feat(ST2.UtilPackages): bump up all packages

- Refresh PackageCache with latest versions of everything
This commit is contained in:
Iristyle
2013-09-16 22:35:46 -04:00
parent 7195197f0f
commit a000ce8acc
451 changed files with 14151 additions and 8317 deletions

View File

@@ -5,7 +5,7 @@
},
{
"caption": "Package Control: Add Channel",
"command": "add_repository_channel"
"command": "add_channel"
},
{
"caption": "Package Control: Create Binary Package File",
@@ -27,6 +27,10 @@
"caption": "Package Control: Enable Package",
"command": "enable_package"
},
{
"caption": "Package Control: Grab CA Certs",
"command": "grab_certs"
},
{
"caption": "Package Control: Install Package",
"command": "install_package"

View File

@@ -0,0 +1,43 @@
----BEGIN CERTIFICATE-----
MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
+OkuE6N36B9K
-----END CERTIFICATE-----

View File

@@ -0,0 +1,4 @@
[
"221e907bdfff70d71cea42361ae209d5",
"7d0986b90061d60c8c02aa3b1cf23850"
]

View File

@@ -2,18 +2,19 @@
// A list of URLs that each contain a JSON file with a list of repositories.
// The repositories from these channels are placed in order after the
// repositories from the "repositories" setting
"repository_channels": [
"https://sublime.wbond.net/repositories.json"
"channels": [
"https://sublime.wbond.net/channel.json"
],
// A list of URLs that contain a packages JSON file. These repositories
// are placed in order before repositories from the "repository_channels"
// are placed in order before repositories from the "channels"
// setting
"repositories": [],
// A list of CA certs needed for domains. The default channel provides a
// list of domains and an identifier (the md5 hash) for the CA cert(s)
// necessary for each.
// necessary for each. Not used on Windows since the system CA cert list
// is automatically used via WinINet.
//
// If a custom cert is required for a proxy or for an alternate channel
// or repository domain name, it should be added in one of the two forms:
@@ -24,21 +25,29 @@
// In both cases the literal "*" means the cert will be checked to ensure
// it is present for accessing any URL. This is necessary for proxy
// connections, but also useful if you want to provide you own
// ca-bundle.crt file.
// Pckage Control.ca-bundle file.
//
// The "my_identifier" and "my_identifier_2" can be any unique string
// that Package Control can use as a filename, and ensures that it has
// merged the cert file with the ca-bundle.crt file in the certs/ directory
// since that is what is passed to the downloaders.
"certs": {
"api.bitbucket.org": ["d867a7b2aecc46f9c31afc4f2f50de05", ""],
"api.github.com": ["1c5282418e2cb4989cd6beddcdbab0b5", ""],
"bitbucket.org": ["897abe0b41fd2f64e9e2e351cbc36d76", ""],
"nodeload.github.com": ["1c5282418e2cb4989cd6beddcdbab0b5", ""],
"raw.github.com": ["1c5282418e2cb4989cd6beddcdbab0b5", ""],
"sublime.wbond.net": ["7f4f8622b4fd001c7f648e09aae7edaa", ""]
"api.bitbucket.org": ["7d0986b90061d60c8c02aa3b1cf23850", "https://sublime.wbond.net/certs/7d0986b90061d60c8c02aa3b1cf23850"],
"api.github.com": ["7d0986b90061d60c8c02aa3b1cf23850", "https://sublime.wbond.net/certs/7d0986b90061d60c8c02aa3b1cf23850"],
"bitbucket.org": ["7d0986b90061d60c8c02aa3b1cf23850", "https://sublime.wbond.net/certs/7d0986b90061d60c8c02aa3b1cf23850"],
"codeload.github.com": ["7d0986b90061d60c8c02aa3b1cf23850", "https://sublime.wbond.net/certs/7d0986b90061d60c8c02aa3b1cf23850"],
"downloads.sourceforge.net": ["221e907bdfff70d71cea42361ae209d5", "https://sublime.wbond.net/certs/221e907bdfff70d71cea42361ae209d5"],
"github.com": ["7d0986b90061d60c8c02aa3b1cf23850", "https://sublime.wbond.net/certs/7d0986b90061d60c8c02aa3b1cf23850"],
"nodeload.github.com": ["7d0986b90061d60c8c02aa3b1cf23850", "https://sublime.wbond.net/certs/7d0986b90061d60c8c02aa3b1cf23850"],
"raw.github.com": ["7d0986b90061d60c8c02aa3b1cf23850", "https://sublime.wbond.net/certs/7d0986b90061d60c8c02aa3b1cf23850"],
"sublime.wbond.net": ["221e907bdfff70d71cea42361ae209d5", "https://sublime.wbond.net/certs/221e907bdfff70d71cea42361ae209d5"]
},
// Install pre-release versions of packages. If this is false, versions
// under 1.0.0 will still be installed. Only packages using the SemVer
// -prerelease suffixes will be ignored.
"install_prereleases": false,
// If debugging information for HTTP/HTTPS connections should be printed
// to the Sublime Text console
"debug": false,
@@ -53,7 +62,7 @@
// the channel for aggregated statistics
"submit_usage": true,
// The URL to post install, upgrade and removal notices to
// The URL to post install, upgrade and removal notices to
"submit_url": "https://sublime.wbond.net/submit",
// If packages should be automatically upgraded when ST2 starts
@@ -69,25 +78,36 @@
// Packages to not auto upgrade
"auto_upgrade_ignore": [],
// Timeout for downloading channels, repositories and packages
// Timeout for downloading channels, repositories and packages. Doesn't
// have an effect on Windows due to a bug in WinINet.
"timeout": 30,
// The number of seconds to cache repository and package info for
"cache_length": 300,
// An HTTP proxy server to use for requests
// An HTTP proxy server to use for requests. Not used on Windows since the
// system proxy configuration is utilized via WinINet.
"http_proxy": "",
// An HTTPS proxy server to use for requests - this will inherit from
// http_proxy if it is set to "" or null and http_proxy has a value. You
// can set this to false to prevent inheriting from http_proxy.
// can set this to false to prevent inheriting from http_proxy. Not used on
// Windows since the system proxy configuration is utilized via WinINet.
"https_proxy": "",
// Username and password for both http_proxy and https_proxy
// Username and password for both http_proxy and https_proxy. May be used
// with WinINet to set credentials for system-level proxy config.
"proxy_username": "",
"proxy_password": "",
// User agent for HTTP requests
"user_agent": "Sublime Package Control",
// If HTTP responses should be cached to disk
"http_cache": true,
// Number of seconds to cache HTTP responses for, defaults to one week
"http_cache_length": 604800,
// User agent for HTTP requests. If "%s" is present, will be replaced
// with the current version.
"user_agent": "Sublime Package Control v%s",
// Setting this to true will cause Package Control to ignore all git
// and hg repositories - this may help if trying to list packages to install
@@ -97,12 +117,20 @@
// Custom paths to VCS binaries for when they can't be automatically
// found on the system and a package includes a VCS metadata directory
"git_binary": "",
"git_update_command": ["pull", "origin", "master", "--ff", "--commit"],
// This should NOT contain the name of the remote or branch - that will
// be automatically determined.
"git_update_command": ["pull", "--ff", "--commit"],
"hg_binary": "",
// Be sure to keep the remote name as the last argument
"hg_update_command": ["pull", "--update", "default"],
// For HG repositories, be sure to use "default" as the remote URL.
// This is the default behavior when cloning an HG repo.
"hg_update_command": ["pull", "--update"],
// Full path to the openssl binary, if not found on your machine. This is
// only used when running the Grab CA Certs command.
"openssl_binary": "",
// Directories to ignore when creating a package
"dirs_to_ignore": [
@@ -133,6 +161,6 @@
"__init__.py"
],
// When a package is created, copy it to this folder - defaults to Desktop
// When a package is created, copy it to this folder - defaults to Desktop
"package_destination": ""
}
}

View File

@@ -1,113 +0,0 @@
Certificate:
Data:
Version: 3 (0x2)
Serial Number:
0a:5f:11:4d:03:5b:17:91:17:d2:ef:d4:03:8c:3f:3b
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance EV Root CA
Validity
Not Before: Apr 2 12:00:00 2008 GMT
Not After : Apr 3 00:00:00 2022 GMT
Subject: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance CA-3
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
Public-Key: (2048 bit)
Modulus:
00:bf:61:0a:29:10:1f:5e:fe:34:37:51:08:f8:1e:
fb:22:ed:61:be:0b:0d:70:4c:50:63:26:75:15:b9:
41:88:97:b6:f0:a0:15:bb:08:60:e0:42:e8:05:29:
10:87:36:8a:28:65:a8:ef:31:07:74:6d:36:97:2f:
28:46:66:04:c7:2a:79:26:7a:99:d5:8e:c3:6d:4f:
a0:5e:ad:bc:3d:91:c2:59:7b:5e:36:6c:c0:53:cf:
00:08:32:3e:10:64:58:10:13:69:c7:0c:ee:9c:42:
51:00:f9:05:44:ee:24:ce:7a:1f:ed:8c:11:bd:12:
a8:f3:15:f4:1c:7a:31:69:01:1b:a7:e6:5d:c0:9a:
6c:7e:09:9e:e7:52:44:4a:10:3a:23:e4:9b:b6:03:
af:a8:9c:b4:5b:9f:d4:4b:ad:92:8c:ce:b5:11:2a:
aa:37:18:8d:b4:c2:b8:d8:5c:06:8c:f8:ff:23:bd:
35:5e:d4:7c:3e:7e:83:0e:91:96:05:98:c3:b2:1f:
e3:c8:65:eb:a9:7b:5d:a0:2c:cc:fc:3c:d9:6d:ed:
cc:fa:4b:43:8c:c9:d4:b8:a5:61:1c:b2:40:b6:28:
12:df:b9:f8:5f:fe:d3:b2:c9:ef:3d:b4:1e:4b:7c:
1c:4c:99:36:9e:3d:eb:ec:a7:68:5e:1d:df:67:6e:
5e:fb
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Key Usage: critical
Digital Signature, Certificate Sign, CRL Sign
X509v3 Certificate Policies:
Policy: 2.16.840.1.114412.1.3.0.2
CPS: http://www.digicert.com/ssl-cps-repository.htm
User Notice:
Explicit Text:
X509v3 Basic Constraints: critical
CA:TRUE, pathlen:0
Authority Information Access:
OCSP - URI:http://ocsp.digicert.com
X509v3 CRL Distribution Points:
Full Name:
URI:http://crl3.digicert.com/DigiCertHighAssuranceEVRootCA.crl
Full Name:
URI:http://crl4.digicert.com/DigiCertHighAssuranceEVRootCA.crl
X509v3 Authority Key Identifier:
keyid:B1:3E:C3:69:03:F8:BF:47:01:D4:98:26:1A:08:02:EF:63:64:2B:C3
X509v3 Subject Key Identifier:
50:EA:73:89:DB:29:FB:10:8F:9E:E5:01:20:D4:DE:79:99:48:83:F7
Signature Algorithm: sha1WithRSAEncryption
1e:e2:a5:48:9e:6c:db:53:38:0f:ef:a6:1a:2a:ac:e2:03:43:
ed:9a:bc:3e:8e:75:1b:f0:fd:2e:22:59:ac:13:c0:61:e2:e7:
fa:e9:99:cd:87:09:75:54:28:bf:46:60:dc:be:51:2c:92:f3:
1b:91:7c:31:08:70:e2:37:b9:c1:5b:a8:bd:a3:0b:00:fb:1a:
15:fd:03:ad:58:6a:c5:c7:24:99:48:47:46:31:1e:92:ef:b4:
5f:4e:34:c7:90:bf:31:c1:f8:b1:84:86:d0:9c:01:aa:df:8a:
56:06:ce:3a:e9:0e:ae:97:74:5d:d7:71:9a:42:74:5f:de:8d:
43:7c:de:e9:55:ed:69:00:cb:05:e0:7a:61:61:33:d1:19:4d:
f9:08:ee:a0:39:c5:25:35:b7:2b:c4:0f:b2:dd:f1:a5:b7:0e:
24:c4:26:28:8d:79:77:f5:2f:f0:57:ba:7c:07:d4:e1:fc:cd:
5a:30:57:7e:86:10:47:dd:31:1f:d7:fc:a2:c2:bf:30:7c:5d:
24:aa:e8:f9:ae:5f:6a:74:c2:ce:6b:b3:46:d8:21:be:29:d4:
8e:5e:15:d6:42:4a:e7:32:6f:a4:b1:6b:51:83:58:be:3f:6d:
c7:fb:da:03:21:cb:6a:16:19:4e:0a:f0:ad:84:ca:5d:94:b3:
5a:76:f7:61
-----BEGIN CERTIFICATE-----
MIIGWDCCBUCgAwIBAgIQCl8RTQNbF5EX0u/UA4w/OzANBgkqhkiG9w0BAQUFADBs
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
ZSBFViBSb290IENBMB4XDTA4MDQwMjEyMDAwMFoXDTIyMDQwMzAwMDAwMFowZjEL
MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
LmRpZ2ljZXJ0LmNvbTElMCMGA1UEAxMcRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
Q0EtMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9hCikQH17+NDdR
CPge+yLtYb4LDXBMUGMmdRW5QYiXtvCgFbsIYOBC6AUpEIc2iihlqO8xB3RtNpcv
KEZmBMcqeSZ6mdWOw21PoF6tvD2Rwll7XjZswFPPAAgyPhBkWBATaccM7pxCUQD5
BUTuJM56H+2MEb0SqPMV9Bx6MWkBG6fmXcCabH4JnudSREoQOiPkm7YDr6ictFuf
1EutkozOtREqqjcYjbTCuNhcBoz4/yO9NV7UfD5+gw6RlgWYw7If48hl66l7XaAs
zPw82W3tzPpLQ4zJ1LilYRyyQLYoEt+5+F/+07LJ7z20Hkt8HEyZNp496+ynaF4d
32duXvsCAwEAAaOCAvowggL2MA4GA1UdDwEB/wQEAwIBhjCCAcYGA1UdIASCAb0w
ggG5MIIBtQYLYIZIAYb9bAEDAAIwggGkMDoGCCsGAQUFBwIBFi5odHRwOi8vd3d3
LmRpZ2ljZXJ0LmNvbS9zc2wtY3BzLXJlcG9zaXRvcnkuaHRtMIIBZAYIKwYBBQUH
AgIwggFWHoIBUgBBAG4AeQAgAHUAcwBlACAAbwBmACAAdABoAGkAcwAgAEMAZQBy
AHQAaQBmAGkAYwBhAHQAZQAgAGMAbwBuAHMAdABpAHQAdQB0AGUAcwAgAGEAYwBj
AGUAcAB0AGEAbgBjAGUAIABvAGYAIAB0AGgAZQAgAEQAaQBnAGkAQwBlAHIAdAAg
AEMAUAAvAEMAUABTACAAYQBuAGQAIAB0AGgAZQAgAFIAZQBsAHkAaQBuAGcAIABQ
AGEAcgB0AHkAIABBAGcAcgBlAGUAbQBlAG4AdAAgAHcAaABpAGMAaAAgAGwAaQBt
AGkAdAAgAGwAaQBhAGIAaQBsAGkAdAB5ACAAYQBuAGQAIABhAHIAZQAgAGkAbgBj
AG8AcgBwAG8AcgBhAHQAZQBkACAAaABlAHIAZQBpAG4AIABiAHkAIAByAGUAZgBl
AHIAZQBuAGMAZQAuMBIGA1UdEwEB/wQIMAYBAf8CAQAwNAYIKwYBBQUHAQEEKDAm
MCQGCCsGAQUFBzABhhhodHRwOi8vb2NzcC5kaWdpY2VydC5jb20wgY8GA1UdHwSB
hzCBhDBAoD6gPIY6aHR0cDovL2NybDMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0SGln
aEFzc3VyYW5jZUVWUm9vdENBLmNybDBAoD6gPIY6aHR0cDovL2NybDQuZGlnaWNl
cnQuY29tL0RpZ2lDZXJ0SGlnaEFzc3VyYW5jZUVWUm9vdENBLmNybDAfBgNVHSME
GDAWgBSxPsNpA/i/RwHUmCYaCALvY2QrwzAdBgNVHQ4EFgQUUOpzidsp+xCPnuUB
INTeeZlIg/cwDQYJKoZIhvcNAQEFBQADggEBAB7ipUiebNtTOA/vphoqrOIDQ+2a
vD6OdRvw/S4iWawTwGHi5/rpmc2HCXVUKL9GYNy+USyS8xuRfDEIcOI3ucFbqL2j
CwD7GhX9A61YasXHJJlIR0YxHpLvtF9ONMeQvzHB+LGEhtCcAarfilYGzjrpDq6X
dF3XcZpCdF/ejUN83ulV7WkAywXgemFhM9EZTfkI7qA5xSU1tyvED7Ld8aW3DiTE
JiiNeXf1L/BXunwH1OH8zVowV36GEEfdMR/X/KLCvzB8XSSq6PmuX2p0ws5rs0bY
Ib4p1I5eFdZCSucyb6Sxa1GDWL4/bcf72gMhy2oWGU4K8K2Eyl2Us1p292E=
-----END CERTIFICATE-----

View File

@@ -1,165 +0,0 @@
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 145105 (0x236d1)
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=GeoTrust Inc., CN=GeoTrust Global CA
Validity
Not Before: Feb 19 22:45:05 2010 GMT
Not After : Feb 18 22:45:05 2020 GMT
Subject: C=US, O=GeoTrust, Inc., CN=RapidSSL CA
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (2048 bit)
Modulus (2048 bit):
00:c7:71:f8:56:c7:1e:d9:cc:b5:ad:f6:b4:97:a3:
fb:a1:e6:0b:50:5f:50:aa:3a:da:0f:fc:3d:29:24:
43:c6:10:29:c1:fc:55:40:72:ee:bd:ea:df:9f:b6:
41:f4:48:4b:c8:6e:fe:4f:57:12:8b:5b:fa:92:dd:
5e:e8:ad:f3:f0:1b:b1:7b:4d:fb:cf:fd:d1:e5:f8:
e3:dc:e7:f5:73:7f:df:01:49:cf:8c:56:c1:bd:37:
e3:5b:be:b5:4f:8b:8b:f0:da:4f:c7:e3:dd:55:47:
69:df:f2:5b:7b:07:4f:3d:e5:ac:21:c1:c8:1d:7a:
e8:e7:f6:0f:a1:aa:f5:6f:de:a8:65:4f:10:89:9c:
03:f3:89:7a:a5:5e:01:72:33:ed:a9:e9:5a:1e:79:
f3:87:c8:df:c8:c5:fc:37:c8:9a:9a:d7:b8:76:cc:
b0:3e:e7:fd:e6:54:ea:df:5f:52:41:78:59:57:ad:
f1:12:d6:7f:bc:d5:9f:70:d3:05:6c:fa:a3:7d:67:
58:dd:26:62:1d:31:92:0c:79:79:1c:8e:cf:ca:7b:
c1:66:af:a8:74:48:fb:8e:82:c2:9e:2c:99:5c:7b:
2d:5d:9b:bc:5b:57:9e:7c:3a:7a:13:ad:f2:a3:18:
5b:2b:59:0f:cd:5c:3a:eb:68:33:c6:28:1d:82:d1:
50:8b
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Key Usage: critical
Certificate Sign, CRL Sign
X509v3 Subject Key Identifier:
6B:69:3D:6A:18:42:4A:DD:8F:02:65:39:FD:35:24:86:78:91:16:30
X509v3 Authority Key Identifier:
keyid:C0:7A:98:68:8D:89:FB:AB:05:64:0C:11:7D:AA:7D:65:B8:CA:CC:4E
X509v3 Basic Constraints: critical
CA:TRUE, pathlen:0
X509v3 CRL Distribution Points:
URI:http://crl.geotrust.com/crls/gtglobal.crl
Authority Information Access:
OCSP - URI:http://ocsp.geotrust.com
Signature Algorithm: sha1WithRSAEncryption
ab:bc:bc:0a:5d:18:94:e3:c1:b1:c3:a8:4c:55:d6:be:b4:98:
f1:ee:3c:1c:cd:cf:f3:24:24:5c:96:03:27:58:fc:36:ae:a2:
2f:8f:f1:fe:da:2b:02:c3:33:bd:c8:dd:48:22:2b:60:0f:a5:
03:10:fd:77:f8:d0:ed:96:67:4f:fd:ea:47:20:70:54:dc:a9:
0c:55:7e:e1:96:25:8a:d9:b5:da:57:4a:be:8d:8e:49:43:63:
a5:6c:4e:27:87:25:eb:5b:6d:fe:a2:7f:38:28:e0:36:ab:ad:
39:a5:a5:62:c4:b7:5c:58:2c:aa:5d:01:60:a6:62:67:a3:c0:
c7:62:23:f4:e7:6c:46:ee:b5:d3:80:6a:22:13:d2:2d:3f:74:
4f:ea:af:8c:5f:b4:38:9c:db:ae:ce:af:84:1e:a6:f6:34:51:
59:79:d3:e3:75:dc:bc:d7:f3:73:df:92:ec:d2:20:59:6f:9c:
fb:95:f8:92:76:18:0a:7c:0f:2c:a6:ca:de:8a:62:7b:d8:f3:
ce:5f:68:bd:8f:3e:c1:74:bb:15:72:3a:16:83:a9:0b:e6:4d:
99:9c:d8:57:ec:a8:01:51:c7:6f:57:34:5e:ab:4a:2c:42:f6:
4f:1c:89:78:de:26:4e:f5:6f:93:4c:15:6b:27:56:4d:00:54:
6c:7a:b7:b7
-----BEGIN CERTIFICATE-----
MIID1TCCAr2gAwIBAgIDAjbRMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
YWwgQ0EwHhcNMTAwMjE5MjI0NTA1WhcNMjAwMjE4MjI0NTA1WjA8MQswCQYDVQQG
EwJVUzEXMBUGA1UEChMOR2VvVHJ1c3QsIEluYy4xFDASBgNVBAMTC1JhcGlkU1NM
IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx3H4Vsce2cy1rfa0
l6P7oeYLUF9QqjraD/w9KSRDxhApwfxVQHLuverfn7ZB9EhLyG7+T1cSi1v6kt1e
6K3z8Buxe037z/3R5fjj3Of1c3/fAUnPjFbBvTfjW761T4uL8NpPx+PdVUdp3/Jb
ewdPPeWsIcHIHXro5/YPoar1b96oZU8QiZwD84l6pV4BcjPtqelaHnnzh8jfyMX8
N8iamte4dsywPuf95lTq319SQXhZV63xEtZ/vNWfcNMFbPqjfWdY3SZiHTGSDHl5
HI7PynvBZq+odEj7joLCniyZXHstXZu8W1eefDp6E63yoxhbK1kPzVw662gzxigd
gtFQiwIDAQABo4HZMIHWMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUa2k9ahhC
St2PAmU5/TUkhniRFjAwHwYDVR0jBBgwFoAUwHqYaI2J+6sFZAwRfap9ZbjKzE4w
EgYDVR0TAQH/BAgwBgEB/wIBADA6BgNVHR8EMzAxMC+gLaArhilodHRwOi8vY3Js
Lmdlb3RydXN0LmNvbS9jcmxzL2d0Z2xvYmFsLmNybDA0BggrBgEFBQcBAQQoMCYw
JAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmdlb3RydXN0LmNvbTANBgkqhkiG9w0B
AQUFAAOCAQEAq7y8Cl0YlOPBscOoTFXWvrSY8e48HM3P8yQkXJYDJ1j8Nq6iL4/x
/torAsMzvcjdSCIrYA+lAxD9d/jQ7ZZnT/3qRyBwVNypDFV+4ZYlitm12ldKvo2O
SUNjpWxOJ4cl61tt/qJ/OCjgNqutOaWlYsS3XFgsql0BYKZiZ6PAx2Ij9OdsRu61
04BqIhPSLT90T+qvjF+0OJzbrs6vhB6m9jRRWXnT43XcvNfzc9+S7NIgWW+c+5X4
knYYCnwPLKbK3opie9jzzl9ovY8+wXS7FXI6FoOpC+ZNmZzYV+yoAVHHb1c0XqtK
LEL2TxyJeN4mTvVvk0wVaydWTQBUbHq3tw==
-----END CERTIFICATE-----
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 144470 (0x23456)
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=GeoTrust Inc., CN=GeoTrust Global CA
Validity
Not Before: May 21 04:00:00 2002 GMT
Not After : May 21 04:00:00 2022 GMT
Subject: C=US, O=GeoTrust Inc., CN=GeoTrust Global CA
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (2048 bit)
Modulus (2048 bit):
00:da:cc:18:63:30:fd:f4:17:23:1a:56:7e:5b:df:
3c:6c:38:e4:71:b7:78:91:d4:bc:a1:d8:4c:f8:a8:
43:b6:03:e9:4d:21:07:08:88:da:58:2f:66:39:29:
bd:05:78:8b:9d:38:e8:05:b7:6a:7e:71:a4:e6:c4:
60:a6:b0:ef:80:e4:89:28:0f:9e:25:d6:ed:83:f3:
ad:a6:91:c7:98:c9:42:18:35:14:9d:ad:98:46:92:
2e:4f:ca:f1:87:43:c1:16:95:57:2d:50:ef:89:2d:
80:7a:57:ad:f2:ee:5f:6b:d2:00:8d:b9:14:f8:14:
15:35:d9:c0:46:a3:7b:72:c8:91:bf:c9:55:2b:cd:
d0:97:3e:9c:26:64:cc:df:ce:83:19:71:ca:4e:e6:
d4:d5:7b:a9:19:cd:55:de:c8:ec:d2:5e:38:53:e5:
5c:4f:8c:2d:fe:50:23:36:fc:66:e6:cb:8e:a4:39:
19:00:b7:95:02:39:91:0b:0e:fe:38:2e:d1:1d:05:
9a:f6:4d:3e:6f:0f:07:1d:af:2c:1e:8f:60:39:e2:
fa:36:53:13:39:d4:5e:26:2b:db:3d:a8:14:bd:32:
eb:18:03:28:52:04:71:e5:ab:33:3d:e1:38:bb:07:
36:84:62:9c:79:ea:16:30:f4:5f:c0:2b:e8:71:6b:
e4:f9
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Basic Constraints: critical
CA:TRUE
X509v3 Subject Key Identifier:
C0:7A:98:68:8D:89:FB:AB:05:64:0C:11:7D:AA:7D:65:B8:CA:CC:4E
X509v3 Authority Key Identifier:
keyid:C0:7A:98:68:8D:89:FB:AB:05:64:0C:11:7D:AA:7D:65:B8:CA:CC:4E
Signature Algorithm: sha1WithRSAEncryption
35:e3:29:6a:e5:2f:5d:54:8e:29:50:94:9f:99:1a:14:e4:8f:
78:2a:62:94:a2:27:67:9e:d0:cf:1a:5e:47:e9:c1:b2:a4:cf:
dd:41:1a:05:4e:9b:4b:ee:4a:6f:55:52:b3:24:a1:37:0a:eb:
64:76:2a:2e:2c:f3:fd:3b:75:90:bf:fa:71:d8:c7:3d:37:d2:
b5:05:95:62:b9:a6:de:89:3d:36:7b:38:77:48:97:ac:a6:20:
8f:2e:a6:c9:0c:c2:b2:99:45:00:c7:ce:11:51:22:22:e0:a5:
ea:b6:15:48:09:64:ea:5e:4f:74:f7:05:3e:c7:8a:52:0c:db:
15:b4:bd:6d:9b:e5:c6:b1:54:68:a9:e3:69:90:b6:9a:a5:0f:
b8:b9:3f:20:7d:ae:4a:b5:b8:9c:e4:1d:b6:ab:e6:94:a5:c1:
c7:83:ad:db:f5:27:87:0e:04:6c:d5:ff:dd:a0:5d:ed:87:52:
b7:2b:15:02:ae:39:a6:6a:74:e9:da:c4:e7:bc:4d:34:1e:a9:
5c:4d:33:5f:92:09:2f:88:66:5d:77:97:c7:1d:76:13:a9:d5:
e5:f1:16:09:11:35:d5:ac:db:24:71:70:2c:98:56:0b:d9:17:
b4:d1:e3:51:2b:5e:75:e8:d5:d0:dc:4f:34:ed:c2:05:66:80:
a1:cb:e6:33
-----BEGIN CERTIFICATE-----
MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
-----END CERTIFICATE-----

View File

@@ -1,285 +0,0 @@
Certificate:
Data:
Version: 3 (0x2)
Serial Number:
03:37:b9:28:34:7c:60:a6:ae:c5:ad:b1:21:7f:38:60
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance EV Root CA
Validity
Not Before: Nov 9 12:00:00 2007 GMT
Not After : Nov 10 00:00:00 2021 GMT
Subject: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance EV CA-1
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (2048 bit)
Modulus (2048 bit):
00:f3:96:62:d8:75:6e:19:ff:3f:34:7c:49:4f:31:
7e:0d:04:4e:99:81:e2:b3:85:55:91:30:b1:c0:af:
70:bb:2c:a8:e7:18:aa:3f:78:f7:90:68:52:86:01:
88:97:e2:3b:06:65:90:aa:bd:65:76:c2:ec:be:10:
5b:37:78:83:60:75:45:c6:bd:74:aa:b6:9f:a4:3a:
01:50:17:c4:39:69:b9:f1:4f:ef:82:c1:ca:f3:4a:
db:cc:9e:50:4f:4d:40:a3:3a:90:e7:86:66:bc:f0:
3e:76:28:4c:d1:75:80:9e:6a:35:14:35:03:9e:db:
0c:8c:c2:28:ad:50:b2:ce:f6:91:a3:c3:a5:0a:58:
49:f6:75:44:6c:ba:f9:ce:e9:ab:3a:02:e0:4d:f3:
ac:e2:7a:e0:60:22:05:3c:82:d3:52:e2:f3:9c:47:
f8:3b:d8:b2:4b:93:56:4a:bf:70:ab:3e:e9:68:c8:
1d:8f:58:1d:2a:4d:5e:27:3d:ad:0a:59:2f:5a:11:
20:40:d9:68:04:68:2d:f4:c0:84:0b:0a:1b:78:df:
ed:1a:58:dc:fb:41:5a:6d:6b:f2:ed:1c:ee:5c:32:
b6:5c:ec:d7:a6:03:32:a6:e8:de:b7:28:27:59:88:
80:ff:7b:ad:89:58:d5:1e:14:a4:f2:b0:70:d4:a0:
3e:a7
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Key Usage: critical
Digital Signature, Certificate Sign, CRL Sign
X509v3 Extended Key Usage:
TLS Web Server Authentication, TLS Web Client Authentication, Code Signing, E-mail Protection, Time Stamping
X509v3 Certificate Policies:
Policy: 2.16.840.1.114412.2.1
CPS: http://www.digicert.com/ssl-cps-repository.htm
User Notice:
Explicit Text:
X509v3 Basic Constraints: critical
CA:TRUE, pathlen:0
Authority Information Access:
OCSP - URI:http://ocsp.digicert.com
CA Issuers - URI:http://www.digicert.com/CACerts/DigiCertHighAssuranceEVRootCA.crt
X509v3 CRL Distribution Points:
URI:http://crl3.digicert.com/DigiCertHighAssuranceEVRootCA.crl
URI:http://crl4.digicert.com/DigiCertHighAssuranceEVRootCA.crl
X509v3 Subject Key Identifier:
4C:58:CB:25:F0:41:4F:52:F4:28:C8:81:43:9B:A6:A8:A0:E6:92:E5
X509v3 Authority Key Identifier:
keyid:B1:3E:C3:69:03:F8:BF:47:01:D4:98:26:1A:08:02:EF:63:64:2B:C3
Signature Algorithm: sha1WithRSAEncryption
4c:7a:17:87:28:5d:17:bc:b2:32:73:bf:cd:2e:f5:58:31:1d:
f0:b1:71:54:9c:d6:9b:67:93:db:2f:03:3e:16:6f:1e:03:c9:
53:84:a3:56:60:1e:78:94:1b:a2:a8:6f:a3:a4:8b:52:91:d7:
dd:5c:95:bb:ef:b5:16:49:e9:a5:42:4f:34:f2:47:ff:ae:81:
7f:13:54:b7:20:c4:70:15:cb:81:0a:81:cb:74:57:dc:9c:df:
24:a4:29:0c:18:f0:1c:e4:ae:07:33:ec:f1:49:3e:55:cf:6e:
4f:0d:54:7b:d3:c9:e8:15:48:d4:c5:bb:dc:35:1c:77:45:07:
48:45:85:bd:d7:7e:53:b8:c0:16:d9:95:cd:8b:8d:7d:c9:60:
4f:d1:a2:9b:e3:d0:30:d6:b4:73:36:e6:d2:f9:03:b2:e3:a4:
f5:e5:b8:3e:04:49:00:ba:2e:a6:4a:72:83:72:9d:f7:0b:8c:
a9:89:e7:b3:d7:64:1f:d6:e3:60:cb:03:c4:dc:88:e9:9d:25:
01:00:71:cb:03:b4:29:60:25:8f:f9:46:d1:7b:71:ae:cd:53:
12:5b:84:8e:c2:0f:c7:ed:93:19:d9:c9:fa:8f:58:34:76:32:
2f:ae:e1:50:14:61:d4:a8:58:a3:c8:30:13:23:ef:c6:25:8c:
36:8f:1c:80
-----BEGIN CERTIFICATE-----
MIIG5jCCBc6gAwIBAgIQAze5KDR8YKauxa2xIX84YDANBgkqhkiG9w0BAQUFADBs
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
ZSBFViBSb290IENBMB4XDTA3MTEwOTEyMDAwMFoXDTIxMTExMDAwMDAwMFowaTEL
MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
LmRpZ2ljZXJ0LmNvbTEoMCYGA1UEAxMfRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
RVYgQ0EtMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPOWYth1bhn/
PzR8SU8xfg0ETpmB4rOFVZEwscCvcLssqOcYqj9495BoUoYBiJfiOwZlkKq9ZXbC
7L4QWzd4g2B1Rca9dKq2n6Q6AVAXxDlpufFP74LByvNK28yeUE9NQKM6kOeGZrzw
PnYoTNF1gJ5qNRQ1A57bDIzCKK1Qss72kaPDpQpYSfZ1RGy6+c7pqzoC4E3zrOJ6
4GAiBTyC01Li85xH+DvYskuTVkq/cKs+6WjIHY9YHSpNXic9rQpZL1oRIEDZaARo
LfTAhAsKG3jf7RpY3PtBWm1r8u0c7lwytlzs16YDMqbo3rcoJ1mIgP97rYlY1R4U
pPKwcNSgPqcCAwEAAaOCA4UwggOBMA4GA1UdDwEB/wQEAwIBhjA7BgNVHSUENDAy
BggrBgEFBQcDAQYIKwYBBQUHAwIGCCsGAQUFBwMDBggrBgEFBQcDBAYIKwYBBQUH
AwgwggHEBgNVHSAEggG7MIIBtzCCAbMGCWCGSAGG/WwCATCCAaQwOgYIKwYBBQUH
AgEWLmh0dHA6Ly93d3cuZGlnaWNlcnQuY29tL3NzbC1jcHMtcmVwb3NpdG9yeS5o
dG0wggFkBggrBgEFBQcCAjCCAVYeggFSAEEAbgB5ACAAdQBzAGUAIABvAGYAIAB0
AGgAaQBzACAAQwBlAHIAdABpAGYAaQBjAGEAdABlACAAYwBvAG4AcwB0AGkAdAB1
AHQAZQBzACAAYQBjAGMAZQBwAHQAYQBuAGMAZQAgAG8AZgAgAHQAaABlACAARABp
AGcAaQBDAGUAcgB0ACAARQBWACAAQwBQAFMAIABhAG4AZAAgAHQAaABlACAAUgBl
AGwAeQBpAG4AZwAgAFAAYQByAHQAeQAgAEEAZwByAGUAZQBtAGUAbgB0ACAAdwBo
AGkAYwBoACAAbABpAG0AaQB0ACAAbABpAGEAYgBpAGwAaQB0AHkAIABhAG4AZAAg
AGEAcgBlACAAaQBuAGMAbwByAHAAbwByAGEAdABlAGQAIABoAGUAcgBlAGkAbgAg
AGIAeQAgAHIAZQBmAGUAcgBlAG4AYwBlAC4wEgYDVR0TAQH/BAgwBgEB/wIBADCB
gwYIKwYBBQUHAQEEdzB1MCQGCCsGAQUFBzABhhhodHRwOi8vb2NzcC5kaWdpY2Vy
dC5jb20wTQYIKwYBBQUHMAKGQWh0dHA6Ly93d3cuZGlnaWNlcnQuY29tL0NBQ2Vy
dHMvRGlnaUNlcnRIaWdoQXNzdXJhbmNlRVZSb290Q0EuY3J0MIGPBgNVHR8EgYcw
gYQwQKA+oDyGOmh0dHA6Ly9jcmwzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydEhpZ2hB
c3N1cmFuY2VFVlJvb3RDQS5jcmwwQKA+oDyGOmh0dHA6Ly9jcmw0LmRpZ2ljZXJ0
LmNvbS9EaWdpQ2VydEhpZ2hBc3N1cmFuY2VFVlJvb3RDQS5jcmwwHQYDVR0OBBYE
FExYyyXwQU9S9CjIgUObpqig5pLlMB8GA1UdIwQYMBaAFLE+w2kD+L9HAdSYJhoI
Au9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQBMeheHKF0XvLIyc7/NLvVYMR3wsXFU
nNabZ5PbLwM+Fm8eA8lThKNWYB54lBuiqG+jpItSkdfdXJW777UWSemlQk808kf/
roF/E1S3IMRwFcuBCoHLdFfcnN8kpCkMGPAc5K4HM+zxST5Vz25PDVR708noFUjU
xbvcNRx3RQdIRYW9135TuMAW2ZXNi419yWBP0aKb49Aw1rRzNubS+QOy46T15bg+
BEkAui6mSnKDcp33C4ypieez12Qf1uNgywPE3IjpnSUBAHHLA7QpYCWP+UbRe3Gu
zVMSW4SOwg/H7ZMZ2cn6j1g0djIvruFQFGHUqFijyDATI+/GJYw2jxyA
-----END CERTIFICATE-----
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 1116160165 (0x428740a5)
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=Entrust.net, OU=www.entrust.net/CPS incorp. by ref. (limits liab.), OU=(c) 1999 Entrust.net Limited, CN=Entrust.net Secure Server Certification Authority
Validity
Not Before: Oct 1 05:00:00 2006 GMT
Not After : Jul 26 18:15:15 2014 GMT
Subject: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance EV Root CA
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (2048 bit)
Modulus (2048 bit):
00:c6:cc:e5:73:e6:fb:d4:bb:e5:2d:2d:32:a6:df:
e5:81:3f:c9:cd:25:49:b6:71:2a:c3:d5:94:34:67:
a2:0a:1c:b0:5f:69:a6:40:b1:c4:b7:b2:8f:d0:98:
a4:a9:41:59:3a:d3:dc:94:d6:3c:db:74:38:a4:4a:
cc:4d:25:82:f7:4a:a5:53:12:38:ee:f3:49:6d:71:
91:7e:63:b6:ab:a6:5f:c3:a4:84:f8:4f:62:51:be:
f8:c5:ec:db:38:92:e3:06:e5:08:91:0c:c4:28:41:
55:fb:cb:5a:89:15:7e:71:e8:35:bf:4d:72:09:3d:
be:3a:38:50:5b:77:31:1b:8d:b3:c7:24:45:9a:a7:
ac:6d:00:14:5a:04:b7:ba:13:eb:51:0a:98:41:41:
22:4e:65:61:87:81:41:50:a6:79:5c:89:de:19:4a:
57:d5:2e:e6:5d:1c:53:2c:7e:98:cd:1a:06:16:a4:
68:73:d0:34:04:13:5c:a1:71:d3:5a:7c:55:db:5e:
64:e1:37:87:30:56:04:e5:11:b4:29:80:12:f1:79:
39:88:a2:02:11:7c:27:66:b7:88:b7:78:f2:ca:0a:
a8:38:ab:0a:64:c2:bf:66:5d:95:84:c1:a1:25:1e:
87:5d:1a:50:0b:20:12:cc:41:bb:6e:0b:51:38:b8:
4b:cb
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Basic Constraints: critical
CA:TRUE, pathlen:1
X509v3 Extended Key Usage:
TLS Web Server Authentication, TLS Web Client Authentication, E-mail Protection
Authority Information Access:
OCSP - URI:http://ocsp.entrust.net
X509v3 CRL Distribution Points:
URI:http://crl.entrust.net/server1.crl
X509v3 Subject Key Identifier:
B1:3E:C3:69:03:F8:BF:47:01:D4:98:26:1A:08:02:EF:63:64:2B:C3
X509v3 Key Usage:
Certificate Sign, CRL Sign
X509v3 Authority Key Identifier:
keyid:F0:17:62:13:55:3D:B3:FF:0A:00:6B:FB:50:84:97:F3:ED:62:D0:1A
1.2.840.113533.7.65.0:
0
..V7.1....
Signature Algorithm: sha1WithRSAEncryption
48:0e:2b:6f:20:62:4c:28:93:a3:24:3d:58:ab:21:cf:80:f8:
9a:97:90:6a:22:ed:5a:7c:47:36:99:e7:79:84:75:ab:24:8f:
92:0a:d5:61:04:ae:c3:6a:5c:b2:cc:d9:e4:44:87:6f:db:8f:
38:62:f7:44:36:9d:ba:bc:6e:07:c4:d4:8d:e8:1f:d1:0b:60:
a3:b5:9c:ce:63:be:ed:67:dc:f8:ba:de:6e:c9:25:cb:5b:b5:
9d:76:70:0b:df:42:72:f8:4f:41:11:64:a5:d2:ea:fc:d5:af:
11:f4:15:38:67:9c:20:a8:4b:77:5a:91:32:42:32:e7:85:b3:
df:36
-----BEGIN CERTIFICATE-----
MIIEQjCCA6ugAwIBAgIEQodApTANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEw
MDEwNTAwMDBaFw0xNDA3MjYxODE1MTVaMGwxCzAJBgNVBAYTAlVTMRUwEwYDVQQK
EwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xKzApBgNV
BAMTIkRpZ2lDZXJ0IEhpZ2ggQXNzdXJhbmNlIEVWIFJvb3QgQ0EwggEiMA0GCSqG
SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGzOVz5vvUu+UtLTKm3+WBP8nNJUm2cSrD
1ZQ0Z6IKHLBfaaZAscS3so/QmKSpQVk609yU1jzbdDikSsxNJYL3SqVTEjju80lt
cZF+Y7arpl/DpIT4T2JRvvjF7Ns4kuMG5QiRDMQoQVX7y1qJFX5x6DW/TXIJPb46
OFBbdzEbjbPHJEWap6xtABRaBLe6E+tRCphBQSJOZWGHgUFQpnlcid4ZSlfVLuZd
HFMsfpjNGgYWpGhz0DQEE1yhcdNafFXbXmThN4cwVgTlEbQpgBLxeTmIogIRfCdm
t4i3ePLKCqg4qwpkwr9mXZWEwaElHoddGlALIBLMQbtuC1E4uEvLAgMBAAGjggET
MIIBDzASBgNVHRMBAf8ECDAGAQH/AgEBMCcGA1UdJQQgMB4GCCsGAQUFBwMBBggr
BgEFBQcDAgYIKwYBBQUHAwQwMwYIKwYBBQUHAQEEJzAlMCMGCCsGAQUFBzABhhdo
dHRwOi8vb2NzcC5lbnRydXN0Lm5ldDAzBgNVHR8ELDAqMCigJqAkhiJodHRwOi8v
Y3JsLmVudHJ1c3QubmV0L3NlcnZlcjEuY3JsMB0GA1UdDgQWBBSxPsNpA/i/RwHU
mCYaCALvY2QrwzALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8BdiE1U9s/8KAGv7
UISX8+1i0BowGQYJKoZIhvZ9B0EABAwwChsEVjcuMQMCAIEwDQYJKoZIhvcNAQEF
BQADgYEASA4rbyBiTCiToyQ9WKshz4D4mpeQaiLtWnxHNpnneYR1qySPkgrVYQSu
w2pcsszZ5ESHb9uPOGL3RDadurxuB8TUjegf0Qtgo7WczmO+7Wfc+Lrebskly1u1
nXZwC99CcvhPQRFkpdLq/NWvEfQVOGecIKhLd1qRMkIy54Wz3zY=
-----END CERTIFICATE-----
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 927650371 (0x374ad243)
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=Entrust.net, OU=www.entrust.net/CPS incorp. by ref. (limits liab.), OU=(c) 1999 Entrust.net Limited, CN=Entrust.net Secure Server Certification Authority
Validity
Not Before: May 25 16:09:40 1999 GMT
Not After : May 25 16:39:40 2019 GMT
Subject: C=US, O=Entrust.net, OU=www.entrust.net/CPS incorp. by ref. (limits liab.), OU=(c) 1999 Entrust.net Limited, CN=Entrust.net Secure Server Certification Authority
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (1024 bit)
Modulus (1024 bit):
00:cd:28:83:34:54:1b:89:f3:0f:af:37:91:31:ff:
af:31:60:c9:a8:e8:b2:10:68:ed:9f:e7:93:36:f1:
0a:64:bb:47:f5:04:17:3f:23:47:4d:c5:27:19:81:
26:0c:54:72:0d:88:2d:d9:1f:9a:12:9f:bc:b3:71:
d3:80:19:3f:47:66:7b:8c:35:28:d2:b9:0a:df:24:
da:9c:d6:50:79:81:7a:5a:d3:37:f7:c2:4a:d8:29:
92:26:64:d1:e4:98:6c:3a:00:8a:f5:34:9b:65:f8:
ed:e3:10:ff:fd:b8:49:58:dc:a0:de:82:39:6b:81:
b1:16:19:61:b9:54:b6:e6:43
Exponent: 3 (0x3)
X509v3 extensions:
Netscape Cert Type:
SSL CA, S/MIME CA, Object Signing CA
X509v3 CRL Distribution Points:
DirName:/C=US/O=Entrust.net/OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Secure Server Certification Authority/CN=CRL1
URI:http://www.entrust.net/CRL/net1.crl
X509v3 Private Key Usage Period:
Not Before: May 25 16:09:40 1999 GMT, Not After: May 25 16:09:40 2019 GMT
X509v3 Key Usage:
Certificate Sign, CRL Sign
X509v3 Authority Key Identifier:
keyid:F0:17:62:13:55:3D:B3:FF:0A:00:6B:FB:50:84:97:F3:ED:62:D0:1A
X509v3 Subject Key Identifier:
F0:17:62:13:55:3D:B3:FF:0A:00:6B:FB:50:84:97:F3:ED:62:D0:1A
X509v3 Basic Constraints:
CA:TRUE
1.2.840.113533.7.65.0:
0
..V4.0....
Signature Algorithm: sha1WithRSAEncryption
90:dc:30:02:fa:64:74:c2:a7:0a:a5:7c:21:8d:34:17:a8:fb:
47:0e:ff:25:7c:8d:13:0a:fb:e4:98:b5:ef:8c:f8:c5:10:0d:
f7:92:be:f1:c3:d5:d5:95:6a:04:bb:2c:ce:26:36:65:c8:31:
c6:e7:ee:3f:e3:57:75:84:7a:11:ef:46:4f:18:f4:d3:98:bb:
a8:87:32:ba:72:f6:3c:e2:3d:9f:d7:1d:d9:c3:60:43:8c:58:
0e:22:96:2f:62:a3:2c:1f:ba:ad:05:ef:ab:32:78:87:a0:54:
73:19:b5:5c:05:f9:52:3e:6d:2d:45:0b:f7:0a:93:ea:ed:06:
f9:b2
-----BEGIN CERTIFICATE-----
MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
-----END CERTIFICATE-----

View File

@@ -1,563 +0,0 @@
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 145105 (0x236d1)
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=GeoTrust Inc., CN=GeoTrust Global CA
Validity
Not Before: Feb 19 22:45:05 2010 GMT
Not After : Feb 18 22:45:05 2020 GMT
Subject: C=US, O=GeoTrust, Inc., CN=RapidSSL CA
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (2048 bit)
Modulus (2048 bit):
00:c7:71:f8:56:c7:1e:d9:cc:b5:ad:f6:b4:97:a3:
fb:a1:e6:0b:50:5f:50:aa:3a:da:0f:fc:3d:29:24:
43:c6:10:29:c1:fc:55:40:72:ee:bd:ea:df:9f:b6:
41:f4:48:4b:c8:6e:fe:4f:57:12:8b:5b:fa:92:dd:
5e:e8:ad:f3:f0:1b:b1:7b:4d:fb:cf:fd:d1:e5:f8:
e3:dc:e7:f5:73:7f:df:01:49:cf:8c:56:c1:bd:37:
e3:5b:be:b5:4f:8b:8b:f0:da:4f:c7:e3:dd:55:47:
69:df:f2:5b:7b:07:4f:3d:e5:ac:21:c1:c8:1d:7a:
e8:e7:f6:0f:a1:aa:f5:6f:de:a8:65:4f:10:89:9c:
03:f3:89:7a:a5:5e:01:72:33:ed:a9:e9:5a:1e:79:
f3:87:c8:df:c8:c5:fc:37:c8:9a:9a:d7:b8:76:cc:
b0:3e:e7:fd:e6:54:ea:df:5f:52:41:78:59:57:ad:
f1:12:d6:7f:bc:d5:9f:70:d3:05:6c:fa:a3:7d:67:
58:dd:26:62:1d:31:92:0c:79:79:1c:8e:cf:ca:7b:
c1:66:af:a8:74:48:fb:8e:82:c2:9e:2c:99:5c:7b:
2d:5d:9b:bc:5b:57:9e:7c:3a:7a:13:ad:f2:a3:18:
5b:2b:59:0f:cd:5c:3a:eb:68:33:c6:28:1d:82:d1:
50:8b
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Key Usage: critical
Certificate Sign, CRL Sign
X509v3 Subject Key Identifier:
6B:69:3D:6A:18:42:4A:DD:8F:02:65:39:FD:35:24:86:78:91:16:30
X509v3 Authority Key Identifier:
keyid:C0:7A:98:68:8D:89:FB:AB:05:64:0C:11:7D:AA:7D:65:B8:CA:CC:4E
X509v3 Basic Constraints: critical
CA:TRUE, pathlen:0
X509v3 CRL Distribution Points:
URI:http://crl.geotrust.com/crls/gtglobal.crl
Authority Information Access:
OCSP - URI:http://ocsp.geotrust.com
Signature Algorithm: sha1WithRSAEncryption
ab:bc:bc:0a:5d:18:94:e3:c1:b1:c3:a8:4c:55:d6:be:b4:98:
f1:ee:3c:1c:cd:cf:f3:24:24:5c:96:03:27:58:fc:36:ae:a2:
2f:8f:f1:fe:da:2b:02:c3:33:bd:c8:dd:48:22:2b:60:0f:a5:
03:10:fd:77:f8:d0:ed:96:67:4f:fd:ea:47:20:70:54:dc:a9:
0c:55:7e:e1:96:25:8a:d9:b5:da:57:4a:be:8d:8e:49:43:63:
a5:6c:4e:27:87:25:eb:5b:6d:fe:a2:7f:38:28:e0:36:ab:ad:
39:a5:a5:62:c4:b7:5c:58:2c:aa:5d:01:60:a6:62:67:a3:c0:
c7:62:23:f4:e7:6c:46:ee:b5:d3:80:6a:22:13:d2:2d:3f:74:
4f:ea:af:8c:5f:b4:38:9c:db:ae:ce:af:84:1e:a6:f6:34:51:
59:79:d3:e3:75:dc:bc:d7:f3:73:df:92:ec:d2:20:59:6f:9c:
fb:95:f8:92:76:18:0a:7c:0f:2c:a6:ca:de:8a:62:7b:d8:f3:
ce:5f:68:bd:8f:3e:c1:74:bb:15:72:3a:16:83:a9:0b:e6:4d:
99:9c:d8:57:ec:a8:01:51:c7:6f:57:34:5e:ab:4a:2c:42:f6:
4f:1c:89:78:de:26:4e:f5:6f:93:4c:15:6b:27:56:4d:00:54:
6c:7a:b7:b7
-----BEGIN CERTIFICATE-----
MIID1TCCAr2gAwIBAgIDAjbRMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
YWwgQ0EwHhcNMTAwMjE5MjI0NTA1WhcNMjAwMjE4MjI0NTA1WjA8MQswCQYDVQQG
EwJVUzEXMBUGA1UEChMOR2VvVHJ1c3QsIEluYy4xFDASBgNVBAMTC1JhcGlkU1NM
IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx3H4Vsce2cy1rfa0
l6P7oeYLUF9QqjraD/w9KSRDxhApwfxVQHLuverfn7ZB9EhLyG7+T1cSi1v6kt1e
6K3z8Buxe037z/3R5fjj3Of1c3/fAUnPjFbBvTfjW761T4uL8NpPx+PdVUdp3/Jb
ewdPPeWsIcHIHXro5/YPoar1b96oZU8QiZwD84l6pV4BcjPtqelaHnnzh8jfyMX8
N8iamte4dsywPuf95lTq319SQXhZV63xEtZ/vNWfcNMFbPqjfWdY3SZiHTGSDHl5
HI7PynvBZq+odEj7joLCniyZXHstXZu8W1eefDp6E63yoxhbK1kPzVw662gzxigd
gtFQiwIDAQABo4HZMIHWMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUa2k9ahhC
St2PAmU5/TUkhniRFjAwHwYDVR0jBBgwFoAUwHqYaI2J+6sFZAwRfap9ZbjKzE4w
EgYDVR0TAQH/BAgwBgEB/wIBADA6BgNVHR8EMzAxMC+gLaArhilodHRwOi8vY3Js
Lmdlb3RydXN0LmNvbS9jcmxzL2d0Z2xvYmFsLmNybDA0BggrBgEFBQcBAQQoMCYw
JAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmdlb3RydXN0LmNvbTANBgkqhkiG9w0B
AQUFAAOCAQEAq7y8Cl0YlOPBscOoTFXWvrSY8e48HM3P8yQkXJYDJ1j8Nq6iL4/x
/torAsMzvcjdSCIrYA+lAxD9d/jQ7ZZnT/3qRyBwVNypDFV+4ZYlitm12ldKvo2O
SUNjpWxOJ4cl61tt/qJ/OCjgNqutOaWlYsS3XFgsql0BYKZiZ6PAx2Ij9OdsRu61
04BqIhPSLT90T+qvjF+0OJzbrs6vhB6m9jRRWXnT43XcvNfzc9+S7NIgWW+c+5X4
knYYCnwPLKbK3opie9jzzl9ovY8+wXS7FXI6FoOpC+ZNmZzYV+yoAVHHb1c0XqtK
LEL2TxyJeN4mTvVvk0wVaydWTQBUbHq3tw==
-----END CERTIFICATE-----
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 144470 (0x23456)
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=GeoTrust Inc., CN=GeoTrust Global CA
Validity
Not Before: May 21 04:00:00 2002 GMT
Not After : May 21 04:00:00 2022 GMT
Subject: C=US, O=GeoTrust Inc., CN=GeoTrust Global CA
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (2048 bit)
Modulus (2048 bit):
00:da:cc:18:63:30:fd:f4:17:23:1a:56:7e:5b:df:
3c:6c:38:e4:71:b7:78:91:d4:bc:a1:d8:4c:f8:a8:
43:b6:03:e9:4d:21:07:08:88:da:58:2f:66:39:29:
bd:05:78:8b:9d:38:e8:05:b7:6a:7e:71:a4:e6:c4:
60:a6:b0:ef:80:e4:89:28:0f:9e:25:d6:ed:83:f3:
ad:a6:91:c7:98:c9:42:18:35:14:9d:ad:98:46:92:
2e:4f:ca:f1:87:43:c1:16:95:57:2d:50:ef:89:2d:
80:7a:57:ad:f2:ee:5f:6b:d2:00:8d:b9:14:f8:14:
15:35:d9:c0:46:a3:7b:72:c8:91:bf:c9:55:2b:cd:
d0:97:3e:9c:26:64:cc:df:ce:83:19:71:ca:4e:e6:
d4:d5:7b:a9:19:cd:55:de:c8:ec:d2:5e:38:53:e5:
5c:4f:8c:2d:fe:50:23:36:fc:66:e6:cb:8e:a4:39:
19:00:b7:95:02:39:91:0b:0e:fe:38:2e:d1:1d:05:
9a:f6:4d:3e:6f:0f:07:1d:af:2c:1e:8f:60:39:e2:
fa:36:53:13:39:d4:5e:26:2b:db:3d:a8:14:bd:32:
eb:18:03:28:52:04:71:e5:ab:33:3d:e1:38:bb:07:
36:84:62:9c:79:ea:16:30:f4:5f:c0:2b:e8:71:6b:
e4:f9
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Basic Constraints: critical
CA:TRUE
X509v3 Subject Key Identifier:
C0:7A:98:68:8D:89:FB:AB:05:64:0C:11:7D:AA:7D:65:B8:CA:CC:4E
X509v3 Authority Key Identifier:
keyid:C0:7A:98:68:8D:89:FB:AB:05:64:0C:11:7D:AA:7D:65:B8:CA:CC:4E
Signature Algorithm: sha1WithRSAEncryption
35:e3:29:6a:e5:2f:5d:54:8e:29:50:94:9f:99:1a:14:e4:8f:
78:2a:62:94:a2:27:67:9e:d0:cf:1a:5e:47:e9:c1:b2:a4:cf:
dd:41:1a:05:4e:9b:4b:ee:4a:6f:55:52:b3:24:a1:37:0a:eb:
64:76:2a:2e:2c:f3:fd:3b:75:90:bf:fa:71:d8:c7:3d:37:d2:
b5:05:95:62:b9:a6:de:89:3d:36:7b:38:77:48:97:ac:a6:20:
8f:2e:a6:c9:0c:c2:b2:99:45:00:c7:ce:11:51:22:22:e0:a5:
ea:b6:15:48:09:64:ea:5e:4f:74:f7:05:3e:c7:8a:52:0c:db:
15:b4:bd:6d:9b:e5:c6:b1:54:68:a9:e3:69:90:b6:9a:a5:0f:
b8:b9:3f:20:7d:ae:4a:b5:b8:9c:e4:1d:b6:ab:e6:94:a5:c1:
c7:83:ad:db:f5:27:87:0e:04:6c:d5:ff:dd:a0:5d:ed:87:52:
b7:2b:15:02:ae:39:a6:6a:74:e9:da:c4:e7:bc:4d:34:1e:a9:
5c:4d:33:5f:92:09:2f:88:66:5d:77:97:c7:1d:76:13:a9:d5:
e5:f1:16:09:11:35:d5:ac:db:24:71:70:2c:98:56:0b:d9:17:
b4:d1:e3:51:2b:5e:75:e8:d5:d0:dc:4f:34:ed:c2:05:66:80:
a1:cb:e6:33
-----BEGIN CERTIFICATE-----
MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
-----END CERTIFICATE-----
Certificate:
Data:
Version: 3 (0x2)
Serial Number:
03:37:b9:28:34:7c:60:a6:ae:c5:ad:b1:21:7f:38:60
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance EV Root CA
Validity
Not Before: Nov 9 12:00:00 2007 GMT
Not After : Nov 10 00:00:00 2021 GMT
Subject: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance EV CA-1
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (2048 bit)
Modulus (2048 bit):
00:f3:96:62:d8:75:6e:19:ff:3f:34:7c:49:4f:31:
7e:0d:04:4e:99:81:e2:b3:85:55:91:30:b1:c0:af:
70:bb:2c:a8:e7:18:aa:3f:78:f7:90:68:52:86:01:
88:97:e2:3b:06:65:90:aa:bd:65:76:c2:ec:be:10:
5b:37:78:83:60:75:45:c6:bd:74:aa:b6:9f:a4:3a:
01:50:17:c4:39:69:b9:f1:4f:ef:82:c1:ca:f3:4a:
db:cc:9e:50:4f:4d:40:a3:3a:90:e7:86:66:bc:f0:
3e:76:28:4c:d1:75:80:9e:6a:35:14:35:03:9e:db:
0c:8c:c2:28:ad:50:b2:ce:f6:91:a3:c3:a5:0a:58:
49:f6:75:44:6c:ba:f9:ce:e9:ab:3a:02:e0:4d:f3:
ac:e2:7a:e0:60:22:05:3c:82:d3:52:e2:f3:9c:47:
f8:3b:d8:b2:4b:93:56:4a:bf:70:ab:3e:e9:68:c8:
1d:8f:58:1d:2a:4d:5e:27:3d:ad:0a:59:2f:5a:11:
20:40:d9:68:04:68:2d:f4:c0:84:0b:0a:1b:78:df:
ed:1a:58:dc:fb:41:5a:6d:6b:f2:ed:1c:ee:5c:32:
b6:5c:ec:d7:a6:03:32:a6:e8:de:b7:28:27:59:88:
80:ff:7b:ad:89:58:d5:1e:14:a4:f2:b0:70:d4:a0:
3e:a7
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Key Usage: critical
Digital Signature, Certificate Sign, CRL Sign
X509v3 Extended Key Usage:
TLS Web Server Authentication, TLS Web Client Authentication, Code Signing, E-mail Protection, Time Stamping
X509v3 Certificate Policies:
Policy: 2.16.840.1.114412.2.1
CPS: http://www.digicert.com/ssl-cps-repository.htm
User Notice:
Explicit Text:
X509v3 Basic Constraints: critical
CA:TRUE, pathlen:0
Authority Information Access:
OCSP - URI:http://ocsp.digicert.com
CA Issuers - URI:http://www.digicert.com/CACerts/DigiCertHighAssuranceEVRootCA.crt
X509v3 CRL Distribution Points:
URI:http://crl3.digicert.com/DigiCertHighAssuranceEVRootCA.crl
URI:http://crl4.digicert.com/DigiCertHighAssuranceEVRootCA.crl
X509v3 Subject Key Identifier:
4C:58:CB:25:F0:41:4F:52:F4:28:C8:81:43:9B:A6:A8:A0:E6:92:E5
X509v3 Authority Key Identifier:
keyid:B1:3E:C3:69:03:F8:BF:47:01:D4:98:26:1A:08:02:EF:63:64:2B:C3
Signature Algorithm: sha1WithRSAEncryption
4c:7a:17:87:28:5d:17:bc:b2:32:73:bf:cd:2e:f5:58:31:1d:
f0:b1:71:54:9c:d6:9b:67:93:db:2f:03:3e:16:6f:1e:03:c9:
53:84:a3:56:60:1e:78:94:1b:a2:a8:6f:a3:a4:8b:52:91:d7:
dd:5c:95:bb:ef:b5:16:49:e9:a5:42:4f:34:f2:47:ff:ae:81:
7f:13:54:b7:20:c4:70:15:cb:81:0a:81:cb:74:57:dc:9c:df:
24:a4:29:0c:18:f0:1c:e4:ae:07:33:ec:f1:49:3e:55:cf:6e:
4f:0d:54:7b:d3:c9:e8:15:48:d4:c5:bb:dc:35:1c:77:45:07:
48:45:85:bd:d7:7e:53:b8:c0:16:d9:95:cd:8b:8d:7d:c9:60:
4f:d1:a2:9b:e3:d0:30:d6:b4:73:36:e6:d2:f9:03:b2:e3:a4:
f5:e5:b8:3e:04:49:00:ba:2e:a6:4a:72:83:72:9d:f7:0b:8c:
a9:89:e7:b3:d7:64:1f:d6:e3:60:cb:03:c4:dc:88:e9:9d:25:
01:00:71:cb:03:b4:29:60:25:8f:f9:46:d1:7b:71:ae:cd:53:
12:5b:84:8e:c2:0f:c7:ed:93:19:d9:c9:fa:8f:58:34:76:32:
2f:ae:e1:50:14:61:d4:a8:58:a3:c8:30:13:23:ef:c6:25:8c:
36:8f:1c:80
-----BEGIN CERTIFICATE-----
MIIG5jCCBc6gAwIBAgIQAze5KDR8YKauxa2xIX84YDANBgkqhkiG9w0BAQUFADBs
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
ZSBFViBSb290IENBMB4XDTA3MTEwOTEyMDAwMFoXDTIxMTExMDAwMDAwMFowaTEL
MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
LmRpZ2ljZXJ0LmNvbTEoMCYGA1UEAxMfRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
RVYgQ0EtMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPOWYth1bhn/
PzR8SU8xfg0ETpmB4rOFVZEwscCvcLssqOcYqj9495BoUoYBiJfiOwZlkKq9ZXbC
7L4QWzd4g2B1Rca9dKq2n6Q6AVAXxDlpufFP74LByvNK28yeUE9NQKM6kOeGZrzw
PnYoTNF1gJ5qNRQ1A57bDIzCKK1Qss72kaPDpQpYSfZ1RGy6+c7pqzoC4E3zrOJ6
4GAiBTyC01Li85xH+DvYskuTVkq/cKs+6WjIHY9YHSpNXic9rQpZL1oRIEDZaARo
LfTAhAsKG3jf7RpY3PtBWm1r8u0c7lwytlzs16YDMqbo3rcoJ1mIgP97rYlY1R4U
pPKwcNSgPqcCAwEAAaOCA4UwggOBMA4GA1UdDwEB/wQEAwIBhjA7BgNVHSUENDAy
BggrBgEFBQcDAQYIKwYBBQUHAwIGCCsGAQUFBwMDBggrBgEFBQcDBAYIKwYBBQUH
AwgwggHEBgNVHSAEggG7MIIBtzCCAbMGCWCGSAGG/WwCATCCAaQwOgYIKwYBBQUH
AgEWLmh0dHA6Ly93d3cuZGlnaWNlcnQuY29tL3NzbC1jcHMtcmVwb3NpdG9yeS5o
dG0wggFkBggrBgEFBQcCAjCCAVYeggFSAEEAbgB5ACAAdQBzAGUAIABvAGYAIAB0
AGgAaQBzACAAQwBlAHIAdABpAGYAaQBjAGEAdABlACAAYwBvAG4AcwB0AGkAdAB1
AHQAZQBzACAAYQBjAGMAZQBwAHQAYQBuAGMAZQAgAG8AZgAgAHQAaABlACAARABp
AGcAaQBDAGUAcgB0ACAARQBWACAAQwBQAFMAIABhAG4AZAAgAHQAaABlACAAUgBl
AGwAeQBpAG4AZwAgAFAAYQByAHQAeQAgAEEAZwByAGUAZQBtAGUAbgB0ACAAdwBo
AGkAYwBoACAAbABpAG0AaQB0ACAAbABpAGEAYgBpAGwAaQB0AHkAIABhAG4AZAAg
AGEAcgBlACAAaQBuAGMAbwByAHAAbwByAGEAdABlAGQAIABoAGUAcgBlAGkAbgAg
AGIAeQAgAHIAZQBmAGUAcgBlAG4AYwBlAC4wEgYDVR0TAQH/BAgwBgEB/wIBADCB
gwYIKwYBBQUHAQEEdzB1MCQGCCsGAQUFBzABhhhodHRwOi8vb2NzcC5kaWdpY2Vy
dC5jb20wTQYIKwYBBQUHMAKGQWh0dHA6Ly93d3cuZGlnaWNlcnQuY29tL0NBQ2Vy
dHMvRGlnaUNlcnRIaWdoQXNzdXJhbmNlRVZSb290Q0EuY3J0MIGPBgNVHR8EgYcw
gYQwQKA+oDyGOmh0dHA6Ly9jcmwzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydEhpZ2hB
c3N1cmFuY2VFVlJvb3RDQS5jcmwwQKA+oDyGOmh0dHA6Ly9jcmw0LmRpZ2ljZXJ0
LmNvbS9EaWdpQ2VydEhpZ2hBc3N1cmFuY2VFVlJvb3RDQS5jcmwwHQYDVR0OBBYE
FExYyyXwQU9S9CjIgUObpqig5pLlMB8GA1UdIwQYMBaAFLE+w2kD+L9HAdSYJhoI
Au9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQBMeheHKF0XvLIyc7/NLvVYMR3wsXFU
nNabZ5PbLwM+Fm8eA8lThKNWYB54lBuiqG+jpItSkdfdXJW777UWSemlQk808kf/
roF/E1S3IMRwFcuBCoHLdFfcnN8kpCkMGPAc5K4HM+zxST5Vz25PDVR708noFUjU
xbvcNRx3RQdIRYW9135TuMAW2ZXNi419yWBP0aKb49Aw1rRzNubS+QOy46T15bg+
BEkAui6mSnKDcp33C4ypieez12Qf1uNgywPE3IjpnSUBAHHLA7QpYCWP+UbRe3Gu
zVMSW4SOwg/H7ZMZ2cn6j1g0djIvruFQFGHUqFijyDATI+/GJYw2jxyA
-----END CERTIFICATE-----
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 1116160165 (0x428740a5)
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=Entrust.net, OU=www.entrust.net/CPS incorp. by ref. (limits liab.), OU=(c) 1999 Entrust.net Limited, CN=Entrust.net Secure Server Certification Authority
Validity
Not Before: Oct 1 05:00:00 2006 GMT
Not After : Jul 26 18:15:15 2014 GMT
Subject: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance EV Root CA
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (2048 bit)
Modulus (2048 bit):
00:c6:cc:e5:73:e6:fb:d4:bb:e5:2d:2d:32:a6:df:
e5:81:3f:c9:cd:25:49:b6:71:2a:c3:d5:94:34:67:
a2:0a:1c:b0:5f:69:a6:40:b1:c4:b7:b2:8f:d0:98:
a4:a9:41:59:3a:d3:dc:94:d6:3c:db:74:38:a4:4a:
cc:4d:25:82:f7:4a:a5:53:12:38:ee:f3:49:6d:71:
91:7e:63:b6:ab:a6:5f:c3:a4:84:f8:4f:62:51:be:
f8:c5:ec:db:38:92:e3:06:e5:08:91:0c:c4:28:41:
55:fb:cb:5a:89:15:7e:71:e8:35:bf:4d:72:09:3d:
be:3a:38:50:5b:77:31:1b:8d:b3:c7:24:45:9a:a7:
ac:6d:00:14:5a:04:b7:ba:13:eb:51:0a:98:41:41:
22:4e:65:61:87:81:41:50:a6:79:5c:89:de:19:4a:
57:d5:2e:e6:5d:1c:53:2c:7e:98:cd:1a:06:16:a4:
68:73:d0:34:04:13:5c:a1:71:d3:5a:7c:55:db:5e:
64:e1:37:87:30:56:04:e5:11:b4:29:80:12:f1:79:
39:88:a2:02:11:7c:27:66:b7:88:b7:78:f2:ca:0a:
a8:38:ab:0a:64:c2:bf:66:5d:95:84:c1:a1:25:1e:
87:5d:1a:50:0b:20:12:cc:41:bb:6e:0b:51:38:b8:
4b:cb
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Basic Constraints: critical
CA:TRUE, pathlen:1
X509v3 Extended Key Usage:
TLS Web Server Authentication, TLS Web Client Authentication, E-mail Protection
Authority Information Access:
OCSP - URI:http://ocsp.entrust.net
X509v3 CRL Distribution Points:
URI:http://crl.entrust.net/server1.crl
X509v3 Subject Key Identifier:
B1:3E:C3:69:03:F8:BF:47:01:D4:98:26:1A:08:02:EF:63:64:2B:C3
X509v3 Key Usage:
Certificate Sign, CRL Sign
X509v3 Authority Key Identifier:
keyid:F0:17:62:13:55:3D:B3:FF:0A:00:6B:FB:50:84:97:F3:ED:62:D0:1A
1.2.840.113533.7.65.0:
0
..V7.1....
Signature Algorithm: sha1WithRSAEncryption
48:0e:2b:6f:20:62:4c:28:93:a3:24:3d:58:ab:21:cf:80:f8:
9a:97:90:6a:22:ed:5a:7c:47:36:99:e7:79:84:75:ab:24:8f:
92:0a:d5:61:04:ae:c3:6a:5c:b2:cc:d9:e4:44:87:6f:db:8f:
38:62:f7:44:36:9d:ba:bc:6e:07:c4:d4:8d:e8:1f:d1:0b:60:
a3:b5:9c:ce:63:be:ed:67:dc:f8:ba:de:6e:c9:25:cb:5b:b5:
9d:76:70:0b:df:42:72:f8:4f:41:11:64:a5:d2:ea:fc:d5:af:
11:f4:15:38:67:9c:20:a8:4b:77:5a:91:32:42:32:e7:85:b3:
df:36
-----BEGIN CERTIFICATE-----
MIIEQjCCA6ugAwIBAgIEQodApTANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEw
MDEwNTAwMDBaFw0xNDA3MjYxODE1MTVaMGwxCzAJBgNVBAYTAlVTMRUwEwYDVQQK
EwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xKzApBgNV
BAMTIkRpZ2lDZXJ0IEhpZ2ggQXNzdXJhbmNlIEVWIFJvb3QgQ0EwggEiMA0GCSqG
SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGzOVz5vvUu+UtLTKm3+WBP8nNJUm2cSrD
1ZQ0Z6IKHLBfaaZAscS3so/QmKSpQVk609yU1jzbdDikSsxNJYL3SqVTEjju80lt
cZF+Y7arpl/DpIT4T2JRvvjF7Ns4kuMG5QiRDMQoQVX7y1qJFX5x6DW/TXIJPb46
OFBbdzEbjbPHJEWap6xtABRaBLe6E+tRCphBQSJOZWGHgUFQpnlcid4ZSlfVLuZd
HFMsfpjNGgYWpGhz0DQEE1yhcdNafFXbXmThN4cwVgTlEbQpgBLxeTmIogIRfCdm
t4i3ePLKCqg4qwpkwr9mXZWEwaElHoddGlALIBLMQbtuC1E4uEvLAgMBAAGjggET
MIIBDzASBgNVHRMBAf8ECDAGAQH/AgEBMCcGA1UdJQQgMB4GCCsGAQUFBwMBBggr
BgEFBQcDAgYIKwYBBQUHAwQwMwYIKwYBBQUHAQEEJzAlMCMGCCsGAQUFBzABhhdo
dHRwOi8vb2NzcC5lbnRydXN0Lm5ldDAzBgNVHR8ELDAqMCigJqAkhiJodHRwOi8v
Y3JsLmVudHJ1c3QubmV0L3NlcnZlcjEuY3JsMB0GA1UdDgQWBBSxPsNpA/i/RwHU
mCYaCALvY2QrwzALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8BdiE1U9s/8KAGv7
UISX8+1i0BowGQYJKoZIhvZ9B0EABAwwChsEVjcuMQMCAIEwDQYJKoZIhvcNAQEF
BQADgYEASA4rbyBiTCiToyQ9WKshz4D4mpeQaiLtWnxHNpnneYR1qySPkgrVYQSu
w2pcsszZ5ESHb9uPOGL3RDadurxuB8TUjegf0Qtgo7WczmO+7Wfc+Lrebskly1u1
nXZwC99CcvhPQRFkpdLq/NWvEfQVOGecIKhLd1qRMkIy54Wz3zY=
-----END CERTIFICATE-----
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 927650371 (0x374ad243)
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=Entrust.net, OU=www.entrust.net/CPS incorp. by ref. (limits liab.), OU=(c) 1999 Entrust.net Limited, CN=Entrust.net Secure Server Certification Authority
Validity
Not Before: May 25 16:09:40 1999 GMT
Not After : May 25 16:39:40 2019 GMT
Subject: C=US, O=Entrust.net, OU=www.entrust.net/CPS incorp. by ref. (limits liab.), OU=(c) 1999 Entrust.net Limited, CN=Entrust.net Secure Server Certification Authority
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (1024 bit)
Modulus (1024 bit):
00:cd:28:83:34:54:1b:89:f3:0f:af:37:91:31:ff:
af:31:60:c9:a8:e8:b2:10:68:ed:9f:e7:93:36:f1:
0a:64:bb:47:f5:04:17:3f:23:47:4d:c5:27:19:81:
26:0c:54:72:0d:88:2d:d9:1f:9a:12:9f:bc:b3:71:
d3:80:19:3f:47:66:7b:8c:35:28:d2:b9:0a:df:24:
da:9c:d6:50:79:81:7a:5a:d3:37:f7:c2:4a:d8:29:
92:26:64:d1:e4:98:6c:3a:00:8a:f5:34:9b:65:f8:
ed:e3:10:ff:fd:b8:49:58:dc:a0:de:82:39:6b:81:
b1:16:19:61:b9:54:b6:e6:43
Exponent: 3 (0x3)
X509v3 extensions:
Netscape Cert Type:
SSL CA, S/MIME CA, Object Signing CA
X509v3 CRL Distribution Points:
DirName:/C=US/O=Entrust.net/OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Secure Server Certification Authority/CN=CRL1
URI:http://www.entrust.net/CRL/net1.crl
X509v3 Private Key Usage Period:
Not Before: May 25 16:09:40 1999 GMT, Not After: May 25 16:09:40 2019 GMT
X509v3 Key Usage:
Certificate Sign, CRL Sign
X509v3 Authority Key Identifier:
keyid:F0:17:62:13:55:3D:B3:FF:0A:00:6B:FB:50:84:97:F3:ED:62:D0:1A
X509v3 Subject Key Identifier:
F0:17:62:13:55:3D:B3:FF:0A:00:6B:FB:50:84:97:F3:ED:62:D0:1A
X509v3 Basic Constraints:
CA:TRUE
1.2.840.113533.7.65.0:
0
..V4.0....
Signature Algorithm: sha1WithRSAEncryption
90:dc:30:02:fa:64:74:c2:a7:0a:a5:7c:21:8d:34:17:a8:fb:
47:0e:ff:25:7c:8d:13:0a:fb:e4:98:b5:ef:8c:f8:c5:10:0d:
f7:92:be:f1:c3:d5:d5:95:6a:04:bb:2c:ce:26:36:65:c8:31:
c6:e7:ee:3f:e3:57:75:84:7a:11:ef:46:4f:18:f4:d3:98:bb:
a8:87:32:ba:72:f6:3c:e2:3d:9f:d7:1d:d9:c3:60:43:8c:58:
0e:22:96:2f:62:a3:2c:1f:ba:ad:05:ef:ab:32:78:87:a0:54:
73:19:b5:5c:05:f9:52:3e:6d:2d:45:0b:f7:0a:93:ea:ed:06:
f9:b2
-----BEGIN CERTIFICATE-----
MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
-----END CERTIFICATE-----
Certificate:
Data:
Version: 3 (0x2)
Serial Number:
0a:5f:11:4d:03:5b:17:91:17:d2:ef:d4:03:8c:3f:3b
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance EV Root CA
Validity
Not Before: Apr 2 12:00:00 2008 GMT
Not After : Apr 3 00:00:00 2022 GMT
Subject: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance CA-3
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
Public-Key: (2048 bit)
Modulus:
00:bf:61:0a:29:10:1f:5e:fe:34:37:51:08:f8:1e:
fb:22:ed:61:be:0b:0d:70:4c:50:63:26:75:15:b9:
41:88:97:b6:f0:a0:15:bb:08:60:e0:42:e8:05:29:
10:87:36:8a:28:65:a8:ef:31:07:74:6d:36:97:2f:
28:46:66:04:c7:2a:79:26:7a:99:d5:8e:c3:6d:4f:
a0:5e:ad:bc:3d:91:c2:59:7b:5e:36:6c:c0:53:cf:
00:08:32:3e:10:64:58:10:13:69:c7:0c:ee:9c:42:
51:00:f9:05:44:ee:24:ce:7a:1f:ed:8c:11:bd:12:
a8:f3:15:f4:1c:7a:31:69:01:1b:a7:e6:5d:c0:9a:
6c:7e:09:9e:e7:52:44:4a:10:3a:23:e4:9b:b6:03:
af:a8:9c:b4:5b:9f:d4:4b:ad:92:8c:ce:b5:11:2a:
aa:37:18:8d:b4:c2:b8:d8:5c:06:8c:f8:ff:23:bd:
35:5e:d4:7c:3e:7e:83:0e:91:96:05:98:c3:b2:1f:
e3:c8:65:eb:a9:7b:5d:a0:2c:cc:fc:3c:d9:6d:ed:
cc:fa:4b:43:8c:c9:d4:b8:a5:61:1c:b2:40:b6:28:
12:df:b9:f8:5f:fe:d3:b2:c9:ef:3d:b4:1e:4b:7c:
1c:4c:99:36:9e:3d:eb:ec:a7:68:5e:1d:df:67:6e:
5e:fb
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Key Usage: critical
Digital Signature, Certificate Sign, CRL Sign
X509v3 Certificate Policies:
Policy: 2.16.840.1.114412.1.3.0.2
CPS: http://www.digicert.com/ssl-cps-repository.htm
User Notice:
Explicit Text:
X509v3 Basic Constraints: critical
CA:TRUE, pathlen:0
Authority Information Access:
OCSP - URI:http://ocsp.digicert.com
X509v3 CRL Distribution Points:
Full Name:
URI:http://crl3.digicert.com/DigiCertHighAssuranceEVRootCA.crl
Full Name:
URI:http://crl4.digicert.com/DigiCertHighAssuranceEVRootCA.crl
X509v3 Authority Key Identifier:
keyid:B1:3E:C3:69:03:F8:BF:47:01:D4:98:26:1A:08:02:EF:63:64:2B:C3
X509v3 Subject Key Identifier:
50:EA:73:89:DB:29:FB:10:8F:9E:E5:01:20:D4:DE:79:99:48:83:F7
Signature Algorithm: sha1WithRSAEncryption
1e:e2:a5:48:9e:6c:db:53:38:0f:ef:a6:1a:2a:ac:e2:03:43:
ed:9a:bc:3e:8e:75:1b:f0:fd:2e:22:59:ac:13:c0:61:e2:e7:
fa:e9:99:cd:87:09:75:54:28:bf:46:60:dc:be:51:2c:92:f3:
1b:91:7c:31:08:70:e2:37:b9:c1:5b:a8:bd:a3:0b:00:fb:1a:
15:fd:03:ad:58:6a:c5:c7:24:99:48:47:46:31:1e:92:ef:b4:
5f:4e:34:c7:90:bf:31:c1:f8:b1:84:86:d0:9c:01:aa:df:8a:
56:06:ce:3a:e9:0e:ae:97:74:5d:d7:71:9a:42:74:5f:de:8d:
43:7c:de:e9:55:ed:69:00:cb:05:e0:7a:61:61:33:d1:19:4d:
f9:08:ee:a0:39:c5:25:35:b7:2b:c4:0f:b2:dd:f1:a5:b7:0e:
24:c4:26:28:8d:79:77:f5:2f:f0:57:ba:7c:07:d4:e1:fc:cd:
5a:30:57:7e:86:10:47:dd:31:1f:d7:fc:a2:c2:bf:30:7c:5d:
24:aa:e8:f9:ae:5f:6a:74:c2:ce:6b:b3:46:d8:21:be:29:d4:
8e:5e:15:d6:42:4a:e7:32:6f:a4:b1:6b:51:83:58:be:3f:6d:
c7:fb:da:03:21:cb:6a:16:19:4e:0a:f0:ad:84:ca:5d:94:b3:
5a:76:f7:61
-----BEGIN CERTIFICATE-----
MIIGWDCCBUCgAwIBAgIQCl8RTQNbF5EX0u/UA4w/OzANBgkqhkiG9w0BAQUFADBs
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
ZSBFViBSb290IENBMB4XDTA4MDQwMjEyMDAwMFoXDTIyMDQwMzAwMDAwMFowZjEL
MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
LmRpZ2ljZXJ0LmNvbTElMCMGA1UEAxMcRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
Q0EtMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9hCikQH17+NDdR
CPge+yLtYb4LDXBMUGMmdRW5QYiXtvCgFbsIYOBC6AUpEIc2iihlqO8xB3RtNpcv
KEZmBMcqeSZ6mdWOw21PoF6tvD2Rwll7XjZswFPPAAgyPhBkWBATaccM7pxCUQD5
BUTuJM56H+2MEb0SqPMV9Bx6MWkBG6fmXcCabH4JnudSREoQOiPkm7YDr6ictFuf
1EutkozOtREqqjcYjbTCuNhcBoz4/yO9NV7UfD5+gw6RlgWYw7If48hl66l7XaAs
zPw82W3tzPpLQ4zJ1LilYRyyQLYoEt+5+F/+07LJ7z20Hkt8HEyZNp496+ynaF4d
32duXvsCAwEAAaOCAvowggL2MA4GA1UdDwEB/wQEAwIBhjCCAcYGA1UdIASCAb0w
ggG5MIIBtQYLYIZIAYb9bAEDAAIwggGkMDoGCCsGAQUFBwIBFi5odHRwOi8vd3d3
LmRpZ2ljZXJ0LmNvbS9zc2wtY3BzLXJlcG9zaXRvcnkuaHRtMIIBZAYIKwYBBQUH
AgIwggFWHoIBUgBBAG4AeQAgAHUAcwBlACAAbwBmACAAdABoAGkAcwAgAEMAZQBy
AHQAaQBmAGkAYwBhAHQAZQAgAGMAbwBuAHMAdABpAHQAdQB0AGUAcwAgAGEAYwBj
AGUAcAB0AGEAbgBjAGUAIABvAGYAIAB0AGgAZQAgAEQAaQBnAGkAQwBlAHIAdAAg
AEMAUAAvAEMAUABTACAAYQBuAGQAIAB0AGgAZQAgAFIAZQBsAHkAaQBuAGcAIABQ
AGEAcgB0AHkAIABBAGcAcgBlAGUAbQBlAG4AdAAgAHcAaABpAGMAaAAgAGwAaQBt
AGkAdAAgAGwAaQBhAGIAaQBsAGkAdAB5ACAAYQBuAGQAIABhAHIAZQAgAGkAbgBj
AG8AcgBwAG8AcgBhAHQAZQBkACAAaABlAHIAZQBpAG4AIABiAHkAIAByAGUAZgBl
AHIAZQBuAGMAZQAuMBIGA1UdEwEB/wQIMAYBAf8CAQAwNAYIKwYBBQUHAQEEKDAm
MCQGCCsGAQUFBzABhhhodHRwOi8vb2NzcC5kaWdpY2VydC5jb20wgY8GA1UdHwSB
hzCBhDBAoD6gPIY6aHR0cDovL2NybDMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0SGln
aEFzc3VyYW5jZUVWUm9vdENBLmNybDBAoD6gPIY6aHR0cDovL2NybDQuZGlnaWNl
cnQuY29tL0RpZ2lDZXJ0SGlnaEFzc3VyYW5jZUVWUm9vdENBLmNybDAfBgNVHSME
GDAWgBSxPsNpA/i/RwHUmCYaCALvY2QrwzAdBgNVHQ4EFgQUUOpzidsp+xCPnuUB
INTeeZlIg/cwDQYJKoZIhvcNAQEFBQADggEBAB7ipUiebNtTOA/vphoqrOIDQ+2a
vD6OdRvw/S4iWawTwGHi5/rpmc2HCXVUKL9GYNy+USyS8xuRfDEIcOI3ucFbqL2j
CwD7GhX9A61YasXHJJlIR0YxHpLvtF9ONMeQvzHB+LGEhtCcAarfilYGzjrpDq6X
dF3XcZpCdF/ejUN83ulV7WkAywXgemFhM9EZTfkI7qA5xSU1tyvED7Ld8aW3DiTE
JiiNeXf1L/BXunwH1OH8zVowV36GEEfdMR/X/KLCvzB8XSSq6PmuX2p0ws5rs0bY
Ib4p1I5eFdZCSucyb6Sxa1GDWL4/bcf72gMhy2oWGU4K8K2Eyl2Us1p292E=
-----END CERTIFICATE-----

View File

@@ -1,197 +0,0 @@
Certificate:
Data:
Version: 3 (0x2)
Serial Number:
0a:5f:11:4d:03:5b:17:91:17:d2:ef:d4:03:8c:3f:3b
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance EV Root CA
Validity
Not Before: Apr 2 12:00:00 2008 GMT
Not After : Apr 3 00:00:00 2022 GMT
Subject: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance CA-3
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (2048 bit)
Modulus (2048 bit):
00:bf:61:0a:29:10:1f:5e:fe:34:37:51:08:f8:1e:
fb:22:ed:61:be:0b:0d:70:4c:50:63:26:75:15:b9:
41:88:97:b6:f0:a0:15:bb:08:60:e0:42:e8:05:29:
10:87:36:8a:28:65:a8:ef:31:07:74:6d:36:97:2f:
28:46:66:04:c7:2a:79:26:7a:99:d5:8e:c3:6d:4f:
a0:5e:ad:bc:3d:91:c2:59:7b:5e:36:6c:c0:53:cf:
00:08:32:3e:10:64:58:10:13:69:c7:0c:ee:9c:42:
51:00:f9:05:44:ee:24:ce:7a:1f:ed:8c:11:bd:12:
a8:f3:15:f4:1c:7a:31:69:01:1b:a7:e6:5d:c0:9a:
6c:7e:09:9e:e7:52:44:4a:10:3a:23:e4:9b:b6:03:
af:a8:9c:b4:5b:9f:d4:4b:ad:92:8c:ce:b5:11:2a:
aa:37:18:8d:b4:c2:b8:d8:5c:06:8c:f8:ff:23:bd:
35:5e:d4:7c:3e:7e:83:0e:91:96:05:98:c3:b2:1f:
e3:c8:65:eb:a9:7b:5d:a0:2c:cc:fc:3c:d9:6d:ed:
cc:fa:4b:43:8c:c9:d4:b8:a5:61:1c:b2:40:b6:28:
12:df:b9:f8:5f:fe:d3:b2:c9:ef:3d:b4:1e:4b:7c:
1c:4c:99:36:9e:3d:eb:ec:a7:68:5e:1d:df:67:6e:
5e:fb
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Key Usage: critical
Digital Signature, Certificate Sign, CRL Sign
X509v3 Certificate Policies:
Policy: 2.16.840.1.114412.1.3.0.2
CPS: http://www.digicert.com/ssl-cps-repository.htm
User Notice:
Explicit Text:
X509v3 Basic Constraints: critical
CA:TRUE, pathlen:0
Authority Information Access:
OCSP - URI:http://ocsp.digicert.com
X509v3 CRL Distribution Points:
URI:http://crl3.digicert.com/DigiCertHighAssuranceEVRootCA.crl
URI:http://crl4.digicert.com/DigiCertHighAssuranceEVRootCA.crl
X509v3 Authority Key Identifier:
keyid:B1:3E:C3:69:03:F8:BF:47:01:D4:98:26:1A:08:02:EF:63:64:2B:C3
X509v3 Subject Key Identifier:
50:EA:73:89:DB:29:FB:10:8F:9E:E5:01:20:D4:DE:79:99:48:83:F7
Signature Algorithm: sha1WithRSAEncryption
1e:e2:a5:48:9e:6c:db:53:38:0f:ef:a6:1a:2a:ac:e2:03:43:
ed:9a:bc:3e:8e:75:1b:f0:fd:2e:22:59:ac:13:c0:61:e2:e7:
fa:e9:99:cd:87:09:75:54:28:bf:46:60:dc:be:51:2c:92:f3:
1b:91:7c:31:08:70:e2:37:b9:c1:5b:a8:bd:a3:0b:00:fb:1a:
15:fd:03:ad:58:6a:c5:c7:24:99:48:47:46:31:1e:92:ef:b4:
5f:4e:34:c7:90:bf:31:c1:f8:b1:84:86:d0:9c:01:aa:df:8a:
56:06:ce:3a:e9:0e:ae:97:74:5d:d7:71:9a:42:74:5f:de:8d:
43:7c:de:e9:55:ed:69:00:cb:05:e0:7a:61:61:33:d1:19:4d:
f9:08:ee:a0:39:c5:25:35:b7:2b:c4:0f:b2:dd:f1:a5:b7:0e:
24:c4:26:28:8d:79:77:f5:2f:f0:57:ba:7c:07:d4:e1:fc:cd:
5a:30:57:7e:86:10:47:dd:31:1f:d7:fc:a2:c2:bf:30:7c:5d:
24:aa:e8:f9:ae:5f:6a:74:c2:ce:6b:b3:46:d8:21:be:29:d4:
8e:5e:15:d6:42:4a:e7:32:6f:a4:b1:6b:51:83:58:be:3f:6d:
c7:fb:da:03:21:cb:6a:16:19:4e:0a:f0:ad:84:ca:5d:94:b3:
5a:76:f7:61
-----BEGIN CERTIFICATE-----
MIIGWDCCBUCgAwIBAgIQCl8RTQNbF5EX0u/UA4w/OzANBgkqhkiG9w0BAQUFADBs
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
ZSBFViBSb290IENBMB4XDTA4MDQwMjEyMDAwMFoXDTIyMDQwMzAwMDAwMFowZjEL
MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
LmRpZ2ljZXJ0LmNvbTElMCMGA1UEAxMcRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
Q0EtMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9hCikQH17+NDdR
CPge+yLtYb4LDXBMUGMmdRW5QYiXtvCgFbsIYOBC6AUpEIc2iihlqO8xB3RtNpcv
KEZmBMcqeSZ6mdWOw21PoF6tvD2Rwll7XjZswFPPAAgyPhBkWBATaccM7pxCUQD5
BUTuJM56H+2MEb0SqPMV9Bx6MWkBG6fmXcCabH4JnudSREoQOiPkm7YDr6ictFuf
1EutkozOtREqqjcYjbTCuNhcBoz4/yO9NV7UfD5+gw6RlgWYw7If48hl66l7XaAs
zPw82W3tzPpLQ4zJ1LilYRyyQLYoEt+5+F/+07LJ7z20Hkt8HEyZNp496+ynaF4d
32duXvsCAwEAAaOCAvowggL2MA4GA1UdDwEB/wQEAwIBhjCCAcYGA1UdIASCAb0w
ggG5MIIBtQYLYIZIAYb9bAEDAAIwggGkMDoGCCsGAQUFBwIBFi5odHRwOi8vd3d3
LmRpZ2ljZXJ0LmNvbS9zc2wtY3BzLXJlcG9zaXRvcnkuaHRtMIIBZAYIKwYBBQUH
AgIwggFWHoIBUgBBAG4AeQAgAHUAcwBlACAAbwBmACAAdABoAGkAcwAgAEMAZQBy
AHQAaQBmAGkAYwBhAHQAZQAgAGMAbwBuAHMAdABpAHQAdQB0AGUAcwAgAGEAYwBj
AGUAcAB0AGEAbgBjAGUAIABvAGYAIAB0AGgAZQAgAEQAaQBnAGkAQwBlAHIAdAAg
AEMAUAAvAEMAUABTACAAYQBuAGQAIAB0AGgAZQAgAFIAZQBsAHkAaQBuAGcAIABQ
AGEAcgB0AHkAIABBAGcAcgBlAGUAbQBlAG4AdAAgAHcAaABpAGMAaAAgAGwAaQBt
AGkAdAAgAGwAaQBhAGIAaQBsAGkAdAB5ACAAYQBuAGQAIABhAHIAZQAgAGkAbgBj
AG8AcgBwAG8AcgBhAHQAZQBkACAAaABlAHIAZQBpAG4AIABiAHkAIAByAGUAZgBl
AHIAZQBuAGMAZQAuMBIGA1UdEwEB/wQIMAYBAf8CAQAwNAYIKwYBBQUHAQEEKDAm
MCQGCCsGAQUFBzABhhhodHRwOi8vb2NzcC5kaWdpY2VydC5jb20wgY8GA1UdHwSB
hzCBhDBAoD6gPIY6aHR0cDovL2NybDMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0SGln
aEFzc3VyYW5jZUVWUm9vdENBLmNybDBAoD6gPIY6aHR0cDovL2NybDQuZGlnaWNl
cnQuY29tL0RpZ2lDZXJ0SGlnaEFzc3VyYW5jZUVWUm9vdENBLmNybDAfBgNVHSME
GDAWgBSxPsNpA/i/RwHUmCYaCALvY2QrwzAdBgNVHQ4EFgQUUOpzidsp+xCPnuUB
INTeeZlIg/cwDQYJKoZIhvcNAQEFBQADggEBAB7ipUiebNtTOA/vphoqrOIDQ+2a
vD6OdRvw/S4iWawTwGHi5/rpmc2HCXVUKL9GYNy+USyS8xuRfDEIcOI3ucFbqL2j
CwD7GhX9A61YasXHJJlIR0YxHpLvtF9ONMeQvzHB+LGEhtCcAarfilYGzjrpDq6X
dF3XcZpCdF/ejUN83ulV7WkAywXgemFhM9EZTfkI7qA5xSU1tyvED7Ld8aW3DiTE
JiiNeXf1L/BXunwH1OH8zVowV36GEEfdMR/X/KLCvzB8XSSq6PmuX2p0ws5rs0bY
Ib4p1I5eFdZCSucyb6Sxa1GDWL4/bcf72gMhy2oWGU4K8K2Eyl2Us1p292E=
-----END CERTIFICATE-----
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 1116160165 (0x428740a5)
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, O=Entrust.net, OU=www.entrust.net/CPS incorp. by ref. (limits liab.), OU=(c) 1999 Entrust.net Limited, CN=Entrust.net Secure Server Certification Authority
Validity
Not Before: Oct 1 05:00:00 2006 GMT
Not After : Jul 26 18:15:15 2014 GMT
Subject: C=US, O=DigiCert Inc, OU=www.digicert.com, CN=DigiCert High Assurance EV Root CA
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (2048 bit)
Modulus (2048 bit):
00:c6:cc:e5:73:e6:fb:d4:bb:e5:2d:2d:32:a6:df:
e5:81:3f:c9:cd:25:49:b6:71:2a:c3:d5:94:34:67:
a2:0a:1c:b0:5f:69:a6:40:b1:c4:b7:b2:8f:d0:98:
a4:a9:41:59:3a:d3:dc:94:d6:3c:db:74:38:a4:4a:
cc:4d:25:82:f7:4a:a5:53:12:38:ee:f3:49:6d:71:
91:7e:63:b6:ab:a6:5f:c3:a4:84:f8:4f:62:51:be:
f8:c5:ec:db:38:92:e3:06:e5:08:91:0c:c4:28:41:
55:fb:cb:5a:89:15:7e:71:e8:35:bf:4d:72:09:3d:
be:3a:38:50:5b:77:31:1b:8d:b3:c7:24:45:9a:a7:
ac:6d:00:14:5a:04:b7:ba:13:eb:51:0a:98:41:41:
22:4e:65:61:87:81:41:50:a6:79:5c:89:de:19:4a:
57:d5:2e:e6:5d:1c:53:2c:7e:98:cd:1a:06:16:a4:
68:73:d0:34:04:13:5c:a1:71:d3:5a:7c:55:db:5e:
64:e1:37:87:30:56:04:e5:11:b4:29:80:12:f1:79:
39:88:a2:02:11:7c:27:66:b7:88:b7:78:f2:ca:0a:
a8:38:ab:0a:64:c2:bf:66:5d:95:84:c1:a1:25:1e:
87:5d:1a:50:0b:20:12:cc:41:bb:6e:0b:51:38:b8:
4b:cb
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Basic Constraints: critical
CA:TRUE, pathlen:1
X509v3 Extended Key Usage:
TLS Web Server Authentication, TLS Web Client Authentication, E-mail Protection
Authority Information Access:
OCSP - URI:http://ocsp.entrust.net
X509v3 CRL Distribution Points:
URI:http://crl.entrust.net/server1.crl
X509v3 Subject Key Identifier:
B1:3E:C3:69:03:F8:BF:47:01:D4:98:26:1A:08:02:EF:63:64:2B:C3
X509v3 Key Usage:
Certificate Sign, CRL Sign
X509v3 Authority Key Identifier:
keyid:F0:17:62:13:55:3D:B3:FF:0A:00:6B:FB:50:84:97:F3:ED:62:D0:1A
1.2.840.113533.7.65.0:
0
..V7.1....
Signature Algorithm: sha1WithRSAEncryption
48:0e:2b:6f:20:62:4c:28:93:a3:24:3d:58:ab:21:cf:80:f8:
9a:97:90:6a:22:ed:5a:7c:47:36:99:e7:79:84:75:ab:24:8f:
92:0a:d5:61:04:ae:c3:6a:5c:b2:cc:d9:e4:44:87:6f:db:8f:
38:62:f7:44:36:9d:ba:bc:6e:07:c4:d4:8d:e8:1f:d1:0b:60:
a3:b5:9c:ce:63:be:ed:67:dc:f8:ba:de:6e:c9:25:cb:5b:b5:
9d:76:70:0b:df:42:72:f8:4f:41:11:64:a5:d2:ea:fc:d5:af:
11:f4:15:38:67:9c:20:a8:4b:77:5a:91:32:42:32:e7:85:b3:
df:36
-----BEGIN CERTIFICATE-----
MIIEQjCCA6ugAwIBAgIEQodApTANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEw
MDEwNTAwMDBaFw0xNDA3MjYxODE1MTVaMGwxCzAJBgNVBAYTAlVTMRUwEwYDVQQK
EwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xKzApBgNV
BAMTIkRpZ2lDZXJ0IEhpZ2ggQXNzdXJhbmNlIEVWIFJvb3QgQ0EwggEiMA0GCSqG
SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGzOVz5vvUu+UtLTKm3+WBP8nNJUm2cSrD
1ZQ0Z6IKHLBfaaZAscS3so/QmKSpQVk609yU1jzbdDikSsxNJYL3SqVTEjju80lt
cZF+Y7arpl/DpIT4T2JRvvjF7Ns4kuMG5QiRDMQoQVX7y1qJFX5x6DW/TXIJPb46
OFBbdzEbjbPHJEWap6xtABRaBLe6E+tRCphBQSJOZWGHgUFQpnlcid4ZSlfVLuZd
HFMsfpjNGgYWpGhz0DQEE1yhcdNafFXbXmThN4cwVgTlEbQpgBLxeTmIogIRfCdm
t4i3ePLKCqg4qwpkwr9mXZWEwaElHoddGlALIBLMQbtuC1E4uEvLAgMBAAGjggET
MIIBDzASBgNVHRMBAf8ECDAGAQH/AgEBMCcGA1UdJQQgMB4GCCsGAQUFBwMBBggr
BgEFBQcDAgYIKwYBBQUHAwQwMwYIKwYBBQUHAQEEJzAlMCMGCCsGAQUFBzABhhdo
dHRwOi8vb2NzcC5lbnRydXN0Lm5ldDAzBgNVHR8ELDAqMCigJqAkhiJodHRwOi8v
Y3JsLmVudHJ1c3QubmV0L3NlcnZlcjEuY3JsMB0GA1UdDgQWBBSxPsNpA/i/RwHU
mCYaCALvY2QrwzALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8BdiE1U9s/8KAGv7
UISX8+1i0BowGQYJKoZIhvZ9B0EABAwwChsEVjcuMQMCAIEwDQYJKoZIhvcNAQEF
BQADgYEASA4rbyBiTCiToyQ9WKshz4D4mpeQaiLtWnxHNpnneYR1qySPkgrVYQSu
w2pcsszZ5ESHb9uPOGL3RDadurxuB8TUjegf0Qtgo7WczmO+7Wfc+Lrebskly1u1
nXZwC99CcvhPQRFkpdLq/NWvEfQVOGecIKhLd1qRMkIy54Wz3zY=
-----END CERTIFICATE-----

View File

@@ -0,0 +1,64 @@
{
"schema_version": "2.0",
// All repositories must be an HTTP or HTTPS URL. HTTPS is vastly superior
// since verification of the source server is performed on SSL certificates.
"repositories": [
"http://sublime.wbond.net/packages.json",
"https://github.com/buymeasoda/soda-theme",
"https://github.com/SublimeText"
],
// The "packages_cache" is completely optional, but allows the
// channel to cache and deliver package data from multiple
// repositories in a single HTTP request, allowing for significantly
// improved performance.
"packages_cache": {
// The first level keys are the repository URLs
"http://sublime.wbond.net/packages.json": [
// Each repository has an array of packages with their fully
// expanded info. This means that the "details" key must be expanded
// into the various keys it provides.
{
"name": "Alignment",
"description": "Multi-line and multiple selection alignment plugin",
"author": "wbond",
"homepage": "http://wbond.net/sublime_packages/alignment",
"releases": [
{
"version": "2.0.0",
"url": "https://sublime.wbond.net/Alignment.sublime-package",
"date": "2011-09-18 20:12:41"
}
]
}
]
},
// Package Control ships with the SSL Certificate Authority (CA) cert for the
// SSL certificate that secures and identifies sublime.wbond.net. After this
// initial connection is made, the channel server provides a list of CA certs
// for the various URLs that Package Control need to connect to. This way the
// default channel (https://sublime.wbond.net/channel.json) can provide
// real-time updates to CA certs in the case that a CA is compromised. The
// CA certs are extracted from openssl, and the server runs on an LTS version
// of Ubuntu, which automatically applies security patches from the official
// Ubuntu repositories. This architecture helps to ensure that the packages
// being downloaded are from the source listed and that users are very
// unlikely to be the subject of the man-in-the-middle attack.
"certs": {
// All certs have the domain they apply to as the key
"sublime.wbond.net": [
// The value is an array of two elements, the first being an md5
// hash of the contents of the certificate. This helps in detecting
// CA cert changes. The second element is the URL where the cert
// can be downloaded, if it is not already installed on the users
// copy of Sublime Text.
"7f4f8622b4fd001c7f648e09aae7edaa",
"https://sublime.wbond.net/certs/7f4f8622b4fd001c7f648e09aae7edaa"
]
}
}

View File

@@ -1,54 +0,0 @@
{
"schema_version": "1.2",
"packages": [
{
"name": "GitHub Example",
"description": "An example from GitHub, be sure to use the zipball URL",
"author": "John Smith",
"homepage": "http://example.com",
"last_modified": "2011-12-12 05:04:31",
"platforms": {
"*": [
{
"version": "1.1",
"url": "http://nodeload.github.com/john_smith/github_example/zipball/master"
}
]
}
},
{
"name": "BitBucket Example",
"description": "An example from BitBucket, be sure to use the zip URL",
"author": "John Smith",
"homepage": "http://example.com",
"last_modified": "2011-08-12 12:21:09",
"platforms": {
"*": [
{
"version": "1.0",
"url": "https://bitbucket.org/john_smith/bitbucket_example/get/tip.zip"
}
]
}
},
{
"name": "Tortoise",
"description": "Keyboard shortcuts and menu entries to execute TortoiseSVN, TortoiseHg and TortoiseGit commands",
"author": "Will Bond",
"homepage": "http://sublime.wbond.net",
"last_modified": "2011-11-30 22:55:52",
"platforms": {
"windows": [
{
"version": "1.0",
"url": "http://sublime.wbond.net/Tortoise.sublime-package"
}
]
}
}
],
"renamed_packages": {
"sublime-old-package": "NewPackage",
"OldPackage": "NewName"
}
}

View File

@@ -1,60 +0,0 @@
{
"schema_version": "1.2",
"repositories": [
"http://sublime.wbond.net/packages.json",
"https://github.com/buymeasoda/soda-theme",
"https://github.com/SublimeText"
],
"package_name_map": {
"soda-theme": "Theme - Soda"
},
"renamed_packages": {
"old-name": "New Name"
},
"packages": {
"http://sublime.wbond.net/packages.json": [
{
"name": "GitHub Example",
"description": "An example from GitHub, be sure to use the zipball URL",
"author": "John Smith",
"homepage": "http://example.com",
"platforms": {
"*": [
{
"version": "1.1",
"url": "http://nodeload.github.com/john_smith/github_example/zipball/master"
}
]
}
},
{
"name": "BitBucket Example",
"description": "An example from BitBucket, be sure to use the zip URL",
"author": "John Smith",
"homepage": "http://example.com",
"platforms": {
"*": [
{
"version": "1.0",
"url": "https://bitbucket.org/john_smith/bitbucket_example/get/tip.zip"
}
]
}
},
{
"name": "Tortoise",
"description": "Keyboard shortcuts and menu entries to execute TortoiseSVN, TortoiseHg and TortoiseGit commands",
"author": "Will Bond",
"homepage": "http://sublime.wbond.net",
"platforms": {
"windows": [
{
"version": "1.0",
"url": "http://sublime.wbond.net/Tortoise.sublime-package"
}
]
}
}
]
}
}

View File

@@ -0,0 +1,275 @@
{
"schema_version": "2.0",
// Packages can be specified with a simple URL to a GitHub or BitBucket
// repository, but details can be overridden for every field. It is
// also possible not utilize GitHub or BitBucket at all, but just to
// host your packages on any server with an SSL certificate.
"packages": [
// This is what most packages should aim to model.
//
// The majority of the information about a package ("name",
// "description", "author") are all pulled from the GitHub (or
// BitBucket) repository info.
//
// If the word "sublime" exists in the repository name, the name
// can be overridden by the "name" key.
//
// A release is created from the the tag that is the highest semantic
// versioning version number in the list of tags.
{
"name": "Alignment",
"details": "https://github.com/wbond/sublime_alignment",
"releases": [
{
"details": "https://github.com/wbond/sublime_alignment/tags"
}
]
},
// Here is an equivalent package being pulled from BitBucket
{
"name": "Alignment",
"details": "https://bitbucket.org/wbond/sublime_alignment",
"releases": [
{
"details": "https://bitbucket.org/wbond/sublime_alignment#tags"
}
]
},
// Pull most details from GitHub, releases from master branch.
// This form is discouraged because users will upgrade to every single
// commit you make to master.
{
"details": "https://github.com/wbond/sublime_alignment"
},
// Pull most details from a BitBucket repository and releases from
// the branch "default" or "master", depending on how your repository
// is configured.
// Similar to the above example, this form is discouraged because users
// will upgrade to every single commit you make to master.
{
"details": "https://bitbucket.org/wbond/sublime_alignment"
},
// Use a custom name instead of just the URL slug
{
"name": "Alignment",
"details": "https://github.com/wbond/sublime_alignment"
},
// You can also override the homepage and author
{
"name": "Alignment",
"details": "https://github.com/wbond/sublime_alignment",
"homepage": "http://wbond.net/sublime_packages/alignment",
"author": "wbond"
},
// It is possible to provide the URL to a readme file. This URL
// should be to the raw source of the file, not rendered HTML.
// GitHub and BitBucket repositories will automatically provide
// these.
//
// The following extensions will be rendered:
//
// .markdown, .mdown, .mkd, .md
// .texttile
// .creole
// .rst
//
// All others are treated as plaintext.
{
"details": "https://github.com/wbond/sublime_alignment",
"readme": "https://raw.github.com/wbond/sublime_alignment/master/readme.creole"
},
// If a package has a public bug tracker, the URL should be
// included via the "issues" key. Both GitHub and BitBucket
// repositories will automatically provide this if they have
// issues enabled.
{
"details": "https://github.com/wbond/sublime_alignment",
"issues": "https://github.com/wbond/sublime_alignment/issues"
},
// The URL to donate to support the development of a package.
// GitHub and BitBucket repositories will default to:
//
// https://www.gittip.com/{username}/
//
// Other URLs with special integration include:
//
// https://flattr.com/profile/{username}
// https://www.dwolla.com/hub/{username}
//
// This may also contain a URL to another other donation-type site
// where users may support the author for their development of the
// package.
{
"details": "https://github.com/wbond/sublime_alignment",
"donate": "https://www.gittip.com/wbond/"
},
// The URL to purchase a license to the package
{
"details": "https://github.com/wbond/sublime_alignment",
"buy": "https://wbond.net/sublime_packages/alignment/buy"
},
// If you rename a package, you can provide the previous name(s)
// so that users with the old package name can be automatically
// upgraded to the new one.
{
"name": "Alignment",
"details": "https://github.com/wbond/sublime_alignment",
"previous_names": ["sublime_alignment"]
},
// Packages can be labelled for the purpose of creating a
// folksonomy so users may more easily find relevant packages.
// Labels should be all lower case and should use spaces instead
// of _ or - to separate words.
//
// Some suggested labels are listed below, however, anything can
// be used as a label:
//
// auto-complete
// browser integration
// build system
// code navigation
// code sharing
// color scheme
// deprecated
// diff/merge
// editor emulation
// file creation
// file navigation
// formatting
// ftp
// language syntax
// linting
// minification
// search
// snippets
// terminal/shell/repl
// testing
// text manipulation
// text navigation
// theme
// todo
// vcs
{
"details": "https://github.com/wbond/sublime_alignment",
"labels": ["text manipulation", "formatting"]
},
// In addition to the recommendation above of pulling releases
// from tags that are semantic version numbers, releases can also
// comefrom a custom branch.
{
"details": "https://github.com/wbond/sublime_alignment",
"releases": [
{
"details": "https://github.com/wbond/sublime_alignment/tree/custom_branch"
}
]
},
// An equivalent package being pulled from BitBucket.
{
"details": "https://bitbucket.org/wbond/sublime_alignment",
"releases": [
{
"details": "https://bitbucket.org/wbond/sublime_alignment/src/custom_branch"
}
]
},
// If your package is only compatible with specific builds of
// Sublime Text, this will cause the package to be hidden from
// users with incompatible versions.
{
"details": "https://github.com/wbond/sublime_alignment",
"releases": [
{
// Could also be >2999 for ST3. Leaving this out indicates
// the package works with both ST2 and ST3.
"sublime_text": "<3000",
"details": "https://github.com/wbond/sublime_alignment"
}
]
},
// The "platforms" key allows specifying what platform(s) the release
// is valid for. As shown, there can be multiple releases of a package
// at any given time. However, only the latest version for any given
// platform/arch will be shown to the user.
//
// The "platforms" key allows specifying a single platform, or a list
// of platforms. Valid platform indentifiers include:
//
// "*"
// "windows", "windows-x64", "windows-x32"
// "osx", "osx-x64"
// "linux", "linux-x32", "linux-x64"
{
"details": "https://github.com/wbond/sublime_alignment",
"releases": [
{
// Defaults to "*", or all platforms.
"platforms": ["osx", "linux"],
"details": "https://github.com/wbond/sublime_alignment/tree/posix"
},
{
"platforms": "windows",
"details": "https://github.com/wbond/sublime_alignment/tree/win32"
}
]
},
// If you don't use a "details" key for a "releases" entry, you need to
// specify the "version", "url" and "date" manually.
{
"details": "https://github.com/wbond/sublime_alignment",
"releases": [
{
// The version number needs to be a semantic version number per
// http://semver.org 2.x.x
"version": "2.0.0",
// The URL needs to be a zip file containing the package. It is permissible
// for the zip file to contain a single root folder with any name. All
// file will be extracted out of this single root folder. This allows
// zip files from GitHub and BitBucket to be used a sources.
"url": "https://codeload.github.com/wbond/sublime_alignment/zip/v2.0.0",
// The date MUST be in the form "YYYY-MM-DD HH:MM:SS" and SHOULD be UTC
"date": "2011-09-18 20:12:41"
}
]
}
],
// If you need/want to split your repository up into multiple smaller files
// for the sake of organization, the "includes" key allows you to enter
// URL paths that will be combined together and dynamically inserted
// into the "packages" key. These URLs these can be relative or absolute.
"includes": [
// Here is an example of how relative paths work for URLs. If this file
// was loaded from:
// "https://sublime.wbond.net/example-repository.json"
// then the following files would be loaded from:
// "https://sublime.wbond.net/repository/0-9.json"
// "https://sublime.wbond.net/repository/a.json"
"./repository/0-9.json",
"./repository/a.json",
// An example of an absolute URL
"https://sublime.wbond.net/repository/b.json"
]
}

View File

@@ -1,86 +0,0 @@
# -*- coding: utf-8 -*-
# This code is copyright Konstantine Rybnikov <k-bx@k-bx.com>, and is
# available at https://github.com/k-bx/python-semver and is licensed under the
# BSD License
import re
_REGEX = re.compile('^(?P<major>[0-9]+)'
'\.(?P<minor>[0-9]+)'
'\.(?P<patch>[0-9]+)'
'(\-(?P<prerelease>[0-9A-Za-z]+(\.[0-9A-Za-z]+)*))?'
'(\+(?P<build>[0-9A-Za-z]+(\.[0-9A-Za-z]+)*))?$')
if 'cmp' not in __builtins__:
cmp = lambda a,b: (a > b) - (a < b)
def parse(version):
"""
Parse version to major, minor, patch, pre-release, build parts.
"""
match = _REGEX.match(version)
if match is None:
raise ValueError('%s is not valid SemVer string' % version)
verinfo = match.groupdict()
verinfo['major'] = int(verinfo['major'])
verinfo['minor'] = int(verinfo['minor'])
verinfo['patch'] = int(verinfo['patch'])
return verinfo
def compare(ver1, ver2):
def nat_cmp(a, b):
a, b = a or '', b or ''
convert = lambda text: text.isdigit() and int(text) or text.lower()
alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
return cmp(alphanum_key(a), alphanum_key(b))
def compare_by_keys(d1, d2):
for key in ['major', 'minor', 'patch']:
v = cmp(d1.get(key), d2.get(key))
if v:
return v
rc1, rc2 = d1.get('prerelease'), d2.get('prerelease')
build1, build2 = d1.get('build'), d2.get('build')
rccmp = nat_cmp(rc1, rc2)
buildcmp = nat_cmp(build1, build2)
if not (rc1 or rc2):
return buildcmp
elif not rc1:
return 1
elif not rc2:
return -1
return rccmp or buildcmp or 0
v1, v2 = parse(ver1), parse(ver2)
return compare_by_keys(v1, v2)
def match(version, match_expr):
prefix = match_expr[:2]
if prefix in ('>=', '<=', '=='):
match_version = match_expr[2:]
elif prefix and prefix[0] in ('>', '<', '='):
prefix = prefix[0]
match_version = match_expr[1:]
else:
raise ValueError("match_expr parameter should be in format <op><ver>, "
"where <op> is one of ['<', '>', '==', '<=', '>=']. "
"You provided: %r" % match_expr)
possibilities_dict = {
'>': (1,),
'<': (-1,),
'==': (0,),
'>=': (0, 1),
'<=': (-1, 0)
}
possibilities = possibilities_dict[prefix]
cmp_res = compare(version, match_version)
return cmp_res in possibilities

View File

@@ -1,113 +0,0 @@
# This file is part of 'NTLM Authorization Proxy Server' http://sourceforge.net/projects/ntlmaps/
# Copyright 2001 Dmitry A. Rozmanov <dima@xenon.spb.ru>
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/> or <http://www.gnu.org/licenses/lgpl.txt>.
C = 0x1000000000L
def norm(n):
return n & 0xFFFFFFFFL
class U32:
v = 0L
def __init__(self, value = 0):
self.v = C + norm(abs(long(value)))
def set(self, value = 0):
self.v = C + norm(abs(long(value)))
def __repr__(self):
return hex(norm(self.v))
def __long__(self): return long(norm(self.v))
def __int__(self): return int(norm(self.v))
def __chr__(self): return chr(norm(self.v))
def __add__(self, b):
r = U32()
r.v = C + norm(self.v + b.v)
return r
def __sub__(self, b):
r = U32()
if self.v < b.v:
r.v = C + norm(0x100000000L - (b.v - self.v))
else: r.v = C + norm(self.v - b.v)
return r
def __mul__(self, b):
r = U32()
r.v = C + norm(self.v * b.v)
return r
def __div__(self, b):
r = U32()
r.v = C + (norm(self.v) / norm(b.v))
return r
def __mod__(self, b):
r = U32()
r.v = C + (norm(self.v) % norm(b.v))
return r
def __neg__(self): return U32(self.v)
def __pos__(self): return U32(self.v)
def __abs__(self): return U32(self.v)
def __invert__(self):
r = U32()
r.v = C + norm(~self.v)
return r
def __lshift__(self, b):
r = U32()
r.v = C + norm(self.v << b)
return r
def __rshift__(self, b):
r = U32()
r.v = C + (norm(self.v) >> b)
return r
def __and__(self, b):
r = U32()
r.v = C + norm(self.v & b.v)
return r
def __or__(self, b):
r = U32()
r.v = C + norm(self.v | b.v)
return r
def __xor__(self, b):
r = U32()
r.v = C + norm(self.v ^ b.v)
return r
def __not__(self):
return U32(not norm(self.v))
def truth(self):
return norm(self.v)
def __cmp__(self, b):
if norm(self.v) > norm(b.v): return 1
elif norm(self.v) < norm(b.v): return -1
else: return 0
def __nonzero__(self):
return norm(self.v)

View File

@@ -1,92 +0,0 @@
# This file is part of 'NTLM Authorization Proxy Server' http://sourceforge.net/projects/ntlmaps/
# Copyright 2001 Dmitry A. Rozmanov <dima@xenon.spb.ru>
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/> or <http://www.gnu.org/licenses/lgpl.txt>.
import des_c
#---------------------------------------------------------------------
class DES:
des_c_obj = None
#-----------------------------------------------------------------
def __init__(self, key_str):
""
k = str_to_key56(key_str)
k = key56_to_key64(k)
key_str = ''
for i in k:
key_str += chr(i & 0xFF)
self.des_c_obj = des_c.DES(key_str)
#-----------------------------------------------------------------
def encrypt(self, plain_text):
""
return self.des_c_obj.encrypt(plain_text)
#-----------------------------------------------------------------
def decrypt(self, crypted_text):
""
return self.des_c_obj.decrypt(crypted_text)
#---------------------------------------------------------------------
#Some Helpers
#---------------------------------------------------------------------
DESException = 'DESException'
#---------------------------------------------------------------------
def str_to_key56(key_str):
""
if type(key_str) != type(''):
#rise DESException, 'ERROR. Wrong key type.'
pass
if len(key_str) < 7:
key_str = key_str + '\000\000\000\000\000\000\000'[:(7 - len(key_str))]
key_56 = []
for i in key_str[:7]: key_56.append(ord(i))
return key_56
#---------------------------------------------------------------------
def key56_to_key64(key_56):
""
key = []
for i in range(8): key.append(0)
key[0] = key_56[0];
key[1] = ((key_56[0] << 7) & 0xFF) | (key_56[1] >> 1);
key[2] = ((key_56[1] << 6) & 0xFF) | (key_56[2] >> 2);
key[3] = ((key_56[2] << 5) & 0xFF) | (key_56[3] >> 3);
key[4] = ((key_56[3] << 4) & 0xFF) | (key_56[4] >> 4);
key[5] = ((key_56[4] << 3) & 0xFF) | (key_56[5] >> 5);
key[6] = ((key_56[5] << 2) & 0xFF) | (key_56[6] >> 6);
key[7] = (key_56[6] << 1) & 0xFF;
key = set_key_odd_parity(key)
return key
#---------------------------------------------------------------------
def set_key_odd_parity(key):
""
for i in range(len(key)):
for k in range(7):
bit = 0
t = key[i] >> k
bit = (t ^ bit) & 0x1
key[i] = (key[i] & 0xFE) | bit
return key

View File

@@ -1,328 +0,0 @@
# This file is part of 'NTLM Authorization Proxy Server' http://sourceforge.net/projects/ntlmaps/
# Copyright 2001 Dmitry A. Rozmanov <dima@xenon.spb.ru>
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/> or <http://www.gnu.org/licenses/lgpl.txt>.
from U32 import U32
# --NON ASCII COMMENT ELIDED--
#typedef unsigned char des_cblock[8];
#define HDRSIZE 4
def c2l(c):
"char[4] to unsigned long"
l = U32(c[0])
l = l | (U32(c[1]) << 8)
l = l | (U32(c[2]) << 16)
l = l | (U32(c[3]) << 24)
return l
def c2ln(c,l1,l2,n):
"char[n] to two unsigned long???"
c = c + n
l1, l2 = U32(0), U32(0)
f = 0
if n == 8:
l2 = l2 | (U32(c[7]) << 24)
f = 1
if f or (n == 7):
l2 = l2 | (U32(c[6]) << 16)
f = 1
if f or (n == 6):
l2 = l2 | (U32(c[5]) << 8)
f = 1
if f or (n == 5):
l2 = l2 | U32(c[4])
f = 1
if f or (n == 4):
l1 = l1 | (U32(c[3]) << 24)
f = 1
if f or (n == 3):
l1 = l1 | (U32(c[2]) << 16)
f = 1
if f or (n == 2):
l1 = l1 | (U32(c[1]) << 8)
f = 1
if f or (n == 1):
l1 = l1 | U32(c[0])
return (l1, l2)
def l2c(l):
"unsigned long to char[4]"
c = []
c.append(int(l & U32(0xFF)))
c.append(int((l >> 8) & U32(0xFF)))
c.append(int((l >> 16) & U32(0xFF)))
c.append(int((l >> 24) & U32(0xFF)))
return c
def n2l(c, l):
"network to host long"
l = U32(c[0] << 24)
l = l | (U32(c[1]) << 16)
l = l | (U32(c[2]) << 8)
l = l | (U32(c[3]))
return l
def l2n(l, c):
"host to network long"
c = []
c.append(int((l >> 24) & U32(0xFF)))
c.append(int((l >> 16) & U32(0xFF)))
c.append(int((l >> 8) & U32(0xFF)))
c.append(int((l ) & U32(0xFF)))
return c
def l2cn(l1, l2, c, n):
""
for i in range(n): c.append(0x00)
f = 0
if f or (n == 8):
c[7] = int((l2 >> 24) & U32(0xFF))
f = 1
if f or (n == 7):
c[6] = int((l2 >> 16) & U32(0xFF))
f = 1
if f or (n == 6):
c[5] = int((l2 >> 8) & U32(0xFF))
f = 1
if f or (n == 5):
c[4] = int((l2 ) & U32(0xFF))
f = 1
if f or (n == 4):
c[3] = int((l1 >> 24) & U32(0xFF))
f = 1
if f or (n == 3):
c[2] = int((l1 >> 16) & U32(0xFF))
f = 1
if f or (n == 2):
c[1] = int((l1 >> 8) & U32(0xFF))
f = 1
if f or (n == 1):
c[0] = int((l1 ) & U32(0xFF))
f = 1
return c[:n]
# array of data
# static unsigned long des_SPtrans[8][64]={
# static unsigned long des_skb[8][64]={
from des_data import des_SPtrans, des_skb
def D_ENCRYPT(tup, u, t, s):
L, R, S = tup
#print 'LRS1', L, R, S, u, t, '-->',
u = (R ^ s[S])
t = R ^ s[S + 1]
t = ((t >> 4) + (t << 28))
L = L ^ (des_SPtrans[1][int((t ) & U32(0x3f))] | \
des_SPtrans[3][int((t >> 8) & U32(0x3f))] | \
des_SPtrans[5][int((t >> 16) & U32(0x3f))] | \
des_SPtrans[7][int((t >> 24) & U32(0x3f))] | \
des_SPtrans[0][int((u ) & U32(0x3f))] | \
des_SPtrans[2][int((u >> 8) & U32(0x3f))] | \
des_SPtrans[4][int((u >> 16) & U32(0x3f))] | \
des_SPtrans[6][int((u >> 24) & U32(0x3f))])
#print 'LRS:', L, R, S, u, t
return ((L, R, S), u, t, s)
def PERM_OP (tup, n, m):
"tup - (a, b, t)"
a, b, t = tup
t = ((a >> n) ^ b) & m
b = b ^ t
a = a ^ (t << n)
return (a, b, t)
def HPERM_OP (tup, n, m):
"tup - (a, t)"
a, t = tup
t = ((a << (16 - n)) ^ a) & m
a = a ^ t ^ (t >> (16 - n))
return (a, t)
shifts2 = [0,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0]
class DES:
KeySched = None # des_key_schedule
def __init__(self, key_str):
# key - UChar[8]
key = []
for i in key_str: key.append(ord(i))
#print 'key:', key
self.KeySched = des_set_key(key)
#print 'schedule:', self.KeySched, len(self.KeySched)
def decrypt(self, str):
# block - UChar[]
block = []
for i in str: block.append(ord(i))
#print block
block = des_ecb_encrypt(block, self.KeySched, 0)
res = ''
for i in block: res = res + (chr(i))
return res
def encrypt(self, str):
# block - UChar[]
block = []
for i in str: block.append(ord(i))
block = des_ecb_encrypt(block, self.KeySched, 1)
res = ''
for i in block: res = res + (chr(i))
return res
#------------------------
def des_encript(input, ks, encrypt):
# input - U32[]
# output - U32[]
# ks - des_key_shedule - U32[2][16]
# encrypt - int
# l, r, t, u - U32
# i - int
# s - U32[]
l = input[0]
r = input[1]
t = U32(0)
u = U32(0)
r, l, t = PERM_OP((r, l, t), 4, U32(0x0f0f0f0fL))
l, r, t = PERM_OP((l, r, t), 16, U32(0x0000ffffL))
r, l, t = PERM_OP((r, l, t), 2, U32(0x33333333L))
l, r, t = PERM_OP((l, r, t), 8, U32(0x00ff00ffL))
r, l, t = PERM_OP((r, l, t), 1, U32(0x55555555L))
t = (r << 1)|(r >> 31)
r = (l << 1)|(l >> 31)
l = t
s = ks # ???????????????
#print l, r
if(encrypt):
for i in range(0, 32, 4):
rtup, u, t, s = D_ENCRYPT((l, r, i + 0), u, t, s)
l = rtup[0]
r = rtup[1]
rtup, u, t, s = D_ENCRYPT((r, l, i + 2), u, t, s)
r = rtup[0]
l = rtup[1]
else:
for i in range(30, 0, -4):
rtup, u, t, s = D_ENCRYPT((l, r, i - 0), u, t, s)
l = rtup[0]
r = rtup[1]
rtup, u, t, s = D_ENCRYPT((r, l, i - 2), u, t, s)
r = rtup[0]
l = rtup[1]
#print l, r
l = (l >> 1)|(l << 31)
r = (r >> 1)|(r << 31)
r, l, t = PERM_OP((r, l, t), 1, U32(0x55555555L))
l, r, t = PERM_OP((l, r, t), 8, U32(0x00ff00ffL))
r, l, t = PERM_OP((r, l, t), 2, U32(0x33333333L))
l, r, t = PERM_OP((l, r, t), 16, U32(0x0000ffffL))
r, l, t = PERM_OP((r, l, t), 4, U32(0x0f0f0f0fL))
output = [l]
output.append(r)
l, r, t, u = U32(0), U32(0), U32(0), U32(0)
return output
def des_ecb_encrypt(input, ks, encrypt):
# input - des_cblock - UChar[8]
# output - des_cblock - UChar[8]
# ks - des_key_shedule - U32[2][16]
# encrypt - int
#print input
l0 = c2l(input[0:4])
l1 = c2l(input[4:8])
ll = [l0]
ll.append(l1)
#print ll
ll = des_encript(ll, ks, encrypt)
#print ll
l0 = ll[0]
l1 = ll[1]
output = l2c(l0)
output = output + l2c(l1)
#print output
l0, l1, ll[0], ll[1] = U32(0), U32(0), U32(0), U32(0)
return output
def des_set_key(key):
# key - des_cblock - UChar[8]
# schedule - des_key_schedule
# register unsigned long c,d,t,s;
# register unsigned char *in;
# register unsigned long *k;
# register int i;
#k = schedule
# in = key
k = []
c = c2l(key[0:4])
d = c2l(key[4:8])
t = U32(0)
d, c, t = PERM_OP((d, c, t), 4, U32(0x0f0f0f0fL))
c, t = HPERM_OP((c, t), -2, U32(0xcccc0000L))
d, t = HPERM_OP((d, t), -2, U32(0xcccc0000L))
d, c, t = PERM_OP((d, c, t), 1, U32(0x55555555L))
c, d, t = PERM_OP((c, d, t), 8, U32(0x00ff00ffL))
d, c, t = PERM_OP((d, c, t), 1, U32(0x55555555L))
d = (((d & U32(0x000000ffL)) << 16)|(d & U32(0x0000ff00L))|((d & U32(0x00ff0000L)) >> 16)|((c & U32(0xf0000000L)) >> 4))
c = c & U32(0x0fffffffL)
for i in range(16):
if (shifts2[i]):
c = ((c >> 2)|(c << 26))
d = ((d >> 2)|(d << 26))
else:
c = ((c >> 1)|(c << 27))
d = ((d >> 1)|(d << 27))
c = c & U32(0x0fffffffL)
d = d & U32(0x0fffffffL)
s= des_skb[0][int((c ) & U32(0x3f))]|\
des_skb[1][int(((c>> 6) & U32(0x03))|((c>> 7) & U32(0x3c)))]|\
des_skb[2][int(((c>>13) & U32(0x0f))|((c>>14) & U32(0x30)))]|\
des_skb[3][int(((c>>20) & U32(0x01))|((c>>21) & U32(0x06)) | ((c>>22) & U32(0x38)))]
t= des_skb[4][int((d ) & U32(0x3f) )]|\
des_skb[5][int(((d>> 7) & U32(0x03))|((d>> 8) & U32(0x3c)))]|\
des_skb[6][int((d>>15) & U32(0x3f) )]|\
des_skb[7][int(((d>>21) & U32(0x0f))|((d>>22) & U32(0x30)))]
#print s, t
k.append(((t << 16)|(s & U32(0x0000ffffL))) & U32(0xffffffffL))
s = ((s >> 16)|(t & U32(0xffff0000L)))
s = (s << 4)|(s >> 28)
k.append(s & U32(0xffffffffL))
schedule = k
return schedule

View File

@@ -1,348 +0,0 @@
# This file is part of 'NTLM Authorization Proxy Server' http://sourceforge.net/projects/ntlmaps/
# Copyright 2001 Dmitry A. Rozmanov <dima@xenon.spb.ru>
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/> or <http://www.gnu.org/licenses/lgpl.txt>.
from U32 import U32
# static unsigned long des_SPtrans[8][64]={
des_SPtrans =\
[
#nibble 0
[
U32(0x00820200L), U32(0x00020000L), U32(0x80800000L), U32(0x80820200L),
U32(0x00800000L), U32(0x80020200L), U32(0x80020000L), U32(0x80800000L),
U32(0x80020200L), U32(0x00820200L), U32(0x00820000L), U32(0x80000200L),
U32(0x80800200L), U32(0x00800000L), U32(0x00000000L), U32(0x80020000L),
U32(0x00020000L), U32(0x80000000L), U32(0x00800200L), U32(0x00020200L),
U32(0x80820200L), U32(0x00820000L), U32(0x80000200L), U32(0x00800200L),
U32(0x80000000L), U32(0x00000200L), U32(0x00020200L), U32(0x80820000L),
U32(0x00000200L), U32(0x80800200L), U32(0x80820000L), U32(0x00000000L),
U32(0x00000000L), U32(0x80820200L), U32(0x00800200L), U32(0x80020000L),
U32(0x00820200L), U32(0x00020000L), U32(0x80000200L), U32(0x00800200L),
U32(0x80820000L), U32(0x00000200L), U32(0x00020200L), U32(0x80800000L),
U32(0x80020200L), U32(0x80000000L), U32(0x80800000L), U32(0x00820000L),
U32(0x80820200L), U32(0x00020200L), U32(0x00820000L), U32(0x80800200L),
U32(0x00800000L), U32(0x80000200L), U32(0x80020000L), U32(0x00000000L),
U32(0x00020000L), U32(0x00800000L), U32(0x80800200L), U32(0x00820200L),
U32(0x80000000L), U32(0x80820000L), U32(0x00000200L), U32(0x80020200L),
],
#nibble 1
[
U32(0x10042004L), U32(0x00000000L), U32(0x00042000L), U32(0x10040000L),
U32(0x10000004L), U32(0x00002004L), U32(0x10002000L), U32(0x00042000L),
U32(0x00002000L), U32(0x10040004L), U32(0x00000004L), U32(0x10002000L),
U32(0x00040004L), U32(0x10042000L), U32(0x10040000L), U32(0x00000004L),
U32(0x00040000L), U32(0x10002004L), U32(0x10040004L), U32(0x00002000L),
U32(0x00042004L), U32(0x10000000L), U32(0x00000000L), U32(0x00040004L),
U32(0x10002004L), U32(0x00042004L), U32(0x10042000L), U32(0x10000004L),
U32(0x10000000L), U32(0x00040000L), U32(0x00002004L), U32(0x10042004L),
U32(0x00040004L), U32(0x10042000L), U32(0x10002000L), U32(0x00042004L),
U32(0x10042004L), U32(0x00040004L), U32(0x10000004L), U32(0x00000000L),
U32(0x10000000L), U32(0x00002004L), U32(0x00040000L), U32(0x10040004L),
U32(0x00002000L), U32(0x10000000L), U32(0x00042004L), U32(0x10002004L),
U32(0x10042000L), U32(0x00002000L), U32(0x00000000L), U32(0x10000004L),
U32(0x00000004L), U32(0x10042004L), U32(0x00042000L), U32(0x10040000L),
U32(0x10040004L), U32(0x00040000L), U32(0x00002004L), U32(0x10002000L),
U32(0x10002004L), U32(0x00000004L), U32(0x10040000L), U32(0x00042000L),
],
#nibble 2
[
U32(0x41000000L), U32(0x01010040L), U32(0x00000040L), U32(0x41000040L),
U32(0x40010000L), U32(0x01000000L), U32(0x41000040L), U32(0x00010040L),
U32(0x01000040L), U32(0x00010000L), U32(0x01010000L), U32(0x40000000L),
U32(0x41010040L), U32(0x40000040L), U32(0x40000000L), U32(0x41010000L),
U32(0x00000000L), U32(0x40010000L), U32(0x01010040L), U32(0x00000040L),
U32(0x40000040L), U32(0x41010040L), U32(0x00010000L), U32(0x41000000L),
U32(0x41010000L), U32(0x01000040L), U32(0x40010040L), U32(0x01010000L),
U32(0x00010040L), U32(0x00000000L), U32(0x01000000L), U32(0x40010040L),
U32(0x01010040L), U32(0x00000040L), U32(0x40000000L), U32(0x00010000L),
U32(0x40000040L), U32(0x40010000L), U32(0x01010000L), U32(0x41000040L),
U32(0x00000000L), U32(0x01010040L), U32(0x00010040L), U32(0x41010000L),
U32(0x40010000L), U32(0x01000000L), U32(0x41010040L), U32(0x40000000L),
U32(0x40010040L), U32(0x41000000L), U32(0x01000000L), U32(0x41010040L),
U32(0x00010000L), U32(0x01000040L), U32(0x41000040L), U32(0x00010040L),
U32(0x01000040L), U32(0x00000000L), U32(0x41010000L), U32(0x40000040L),
U32(0x41000000L), U32(0x40010040L), U32(0x00000040L), U32(0x01010000L),
],
#nibble 3
[
U32(0x00100402L), U32(0x04000400L), U32(0x00000002L), U32(0x04100402L),
U32(0x00000000L), U32(0x04100000L), U32(0x04000402L), U32(0x00100002L),
U32(0x04100400L), U32(0x04000002L), U32(0x04000000L), U32(0x00000402L),
U32(0x04000002L), U32(0x00100402L), U32(0x00100000L), U32(0x04000000L),
U32(0x04100002L), U32(0x00100400L), U32(0x00000400L), U32(0x00000002L),
U32(0x00100400L), U32(0x04000402L), U32(0x04100000L), U32(0x00000400L),
U32(0x00000402L), U32(0x00000000L), U32(0x00100002L), U32(0x04100400L),
U32(0x04000400L), U32(0x04100002L), U32(0x04100402L), U32(0x00100000L),
U32(0x04100002L), U32(0x00000402L), U32(0x00100000L), U32(0x04000002L),
U32(0x00100400L), U32(0x04000400L), U32(0x00000002L), U32(0x04100000L),
U32(0x04000402L), U32(0x00000000L), U32(0x00000400L), U32(0x00100002L),
U32(0x00000000L), U32(0x04100002L), U32(0x04100400L), U32(0x00000400L),
U32(0x04000000L), U32(0x04100402L), U32(0x00100402L), U32(0x00100000L),
U32(0x04100402L), U32(0x00000002L), U32(0x04000400L), U32(0x00100402L),
U32(0x00100002L), U32(0x00100400L), U32(0x04100000L), U32(0x04000402L),
U32(0x00000402L), U32(0x04000000L), U32(0x04000002L), U32(0x04100400L),
],
#nibble 4
[
U32(0x02000000L), U32(0x00004000L), U32(0x00000100L), U32(0x02004108L),
U32(0x02004008L), U32(0x02000100L), U32(0x00004108L), U32(0x02004000L),
U32(0x00004000L), U32(0x00000008L), U32(0x02000008L), U32(0x00004100L),
U32(0x02000108L), U32(0x02004008L), U32(0x02004100L), U32(0x00000000L),
U32(0x00004100L), U32(0x02000000L), U32(0x00004008L), U32(0x00000108L),
U32(0x02000100L), U32(0x00004108L), U32(0x00000000L), U32(0x02000008L),
U32(0x00000008L), U32(0x02000108L), U32(0x02004108L), U32(0x00004008L),
U32(0x02004000L), U32(0x00000100L), U32(0x00000108L), U32(0x02004100L),
U32(0x02004100L), U32(0x02000108L), U32(0x00004008L), U32(0x02004000L),
U32(0x00004000L), U32(0x00000008L), U32(0x02000008L), U32(0x02000100L),
U32(0x02000000L), U32(0x00004100L), U32(0x02004108L), U32(0x00000000L),
U32(0x00004108L), U32(0x02000000L), U32(0x00000100L), U32(0x00004008L),
U32(0x02000108L), U32(0x00000100L), U32(0x00000000L), U32(0x02004108L),
U32(0x02004008L), U32(0x02004100L), U32(0x00000108L), U32(0x00004000L),
U32(0x00004100L), U32(0x02004008L), U32(0x02000100L), U32(0x00000108L),
U32(0x00000008L), U32(0x00004108L), U32(0x02004000L), U32(0x02000008L),
],
#nibble 5
[
U32(0x20000010L), U32(0x00080010L), U32(0x00000000L), U32(0x20080800L),
U32(0x00080010L), U32(0x00000800L), U32(0x20000810L), U32(0x00080000L),
U32(0x00000810L), U32(0x20080810L), U32(0x00080800L), U32(0x20000000L),
U32(0x20000800L), U32(0x20000010L), U32(0x20080000L), U32(0x00080810L),
U32(0x00080000L), U32(0x20000810L), U32(0x20080010L), U32(0x00000000L),
U32(0x00000800L), U32(0x00000010L), U32(0x20080800L), U32(0x20080010L),
U32(0x20080810L), U32(0x20080000L), U32(0x20000000L), U32(0x00000810L),
U32(0x00000010L), U32(0x00080800L), U32(0x00080810L), U32(0x20000800L),
U32(0x00000810L), U32(0x20000000L), U32(0x20000800L), U32(0x00080810L),
U32(0x20080800L), U32(0x00080010L), U32(0x00000000L), U32(0x20000800L),
U32(0x20000000L), U32(0x00000800L), U32(0x20080010L), U32(0x00080000L),
U32(0x00080010L), U32(0x20080810L), U32(0x00080800L), U32(0x00000010L),
U32(0x20080810L), U32(0x00080800L), U32(0x00080000L), U32(0x20000810L),
U32(0x20000010L), U32(0x20080000L), U32(0x00080810L), U32(0x00000000L),
U32(0x00000800L), U32(0x20000010L), U32(0x20000810L), U32(0x20080800L),
U32(0x20080000L), U32(0x00000810L), U32(0x00000010L), U32(0x20080010L),
],
#nibble 6
[
U32(0x00001000L), U32(0x00000080L), U32(0x00400080L), U32(0x00400001L),
U32(0x00401081L), U32(0x00001001L), U32(0x00001080L), U32(0x00000000L),
U32(0x00400000L), U32(0x00400081L), U32(0x00000081L), U32(0x00401000L),
U32(0x00000001L), U32(0x00401080L), U32(0x00401000L), U32(0x00000081L),
U32(0x00400081L), U32(0x00001000L), U32(0x00001001L), U32(0x00401081L),
U32(0x00000000L), U32(0x00400080L), U32(0x00400001L), U32(0x00001080L),
U32(0x00401001L), U32(0x00001081L), U32(0x00401080L), U32(0x00000001L),
U32(0x00001081L), U32(0x00401001L), U32(0x00000080L), U32(0x00400000L),
U32(0x00001081L), U32(0x00401000L), U32(0x00401001L), U32(0x00000081L),
U32(0x00001000L), U32(0x00000080L), U32(0x00400000L), U32(0x00401001L),
U32(0x00400081L), U32(0x00001081L), U32(0x00001080L), U32(0x00000000L),
U32(0x00000080L), U32(0x00400001L), U32(0x00000001L), U32(0x00400080L),
U32(0x00000000L), U32(0x00400081L), U32(0x00400080L), U32(0x00001080L),
U32(0x00000081L), U32(0x00001000L), U32(0x00401081L), U32(0x00400000L),
U32(0x00401080L), U32(0x00000001L), U32(0x00001001L), U32(0x00401081L),
U32(0x00400001L), U32(0x00401080L), U32(0x00401000L), U32(0x00001001L),
],
#nibble 7
[
U32(0x08200020L), U32(0x08208000L), U32(0x00008020L), U32(0x00000000L),
U32(0x08008000L), U32(0x00200020L), U32(0x08200000L), U32(0x08208020L),
U32(0x00000020L), U32(0x08000000L), U32(0x00208000L), U32(0x00008020L),
U32(0x00208020L), U32(0x08008020L), U32(0x08000020L), U32(0x08200000L),
U32(0x00008000L), U32(0x00208020L), U32(0x00200020L), U32(0x08008000L),
U32(0x08208020L), U32(0x08000020L), U32(0x00000000L), U32(0x00208000L),
U32(0x08000000L), U32(0x00200000L), U32(0x08008020L), U32(0x08200020L),
U32(0x00200000L), U32(0x00008000L), U32(0x08208000L), U32(0x00000020L),
U32(0x00200000L), U32(0x00008000L), U32(0x08000020L), U32(0x08208020L),
U32(0x00008020L), U32(0x08000000L), U32(0x00000000L), U32(0x00208000L),
U32(0x08200020L), U32(0x08008020L), U32(0x08008000L), U32(0x00200020L),
U32(0x08208000L), U32(0x00000020L), U32(0x00200020L), U32(0x08008000L),
U32(0x08208020L), U32(0x00200000L), U32(0x08200000L), U32(0x08000020L),
U32(0x00208000L), U32(0x00008020L), U32(0x08008020L), U32(0x08200000L),
U32(0x00000020L), U32(0x08208000L), U32(0x00208020L), U32(0x00000000L),
U32(0x08000000L), U32(0x08200020L), U32(0x00008000L), U32(0x00208020L),
],
]
#static unsigned long des_skb[8][64]={
des_skb = \
[
#for C bits (numbered as per FIPS 46) 1 2 3 4 5 6
[
U32(0x00000000L),U32(0x00000010L),U32(0x20000000L),U32(0x20000010L),
U32(0x00010000L),U32(0x00010010L),U32(0x20010000L),U32(0x20010010L),
U32(0x00000800L),U32(0x00000810L),U32(0x20000800L),U32(0x20000810L),
U32(0x00010800L),U32(0x00010810L),U32(0x20010800L),U32(0x20010810L),
U32(0x00000020L),U32(0x00000030L),U32(0x20000020L),U32(0x20000030L),
U32(0x00010020L),U32(0x00010030L),U32(0x20010020L),U32(0x20010030L),
U32(0x00000820L),U32(0x00000830L),U32(0x20000820L),U32(0x20000830L),
U32(0x00010820L),U32(0x00010830L),U32(0x20010820L),U32(0x20010830L),
U32(0x00080000L),U32(0x00080010L),U32(0x20080000L),U32(0x20080010L),
U32(0x00090000L),U32(0x00090010L),U32(0x20090000L),U32(0x20090010L),
U32(0x00080800L),U32(0x00080810L),U32(0x20080800L),U32(0x20080810L),
U32(0x00090800L),U32(0x00090810L),U32(0x20090800L),U32(0x20090810L),
U32(0x00080020L),U32(0x00080030L),U32(0x20080020L),U32(0x20080030L),
U32(0x00090020L),U32(0x00090030L),U32(0x20090020L),U32(0x20090030L),
U32(0x00080820L),U32(0x00080830L),U32(0x20080820L),U32(0x20080830L),
U32(0x00090820L),U32(0x00090830L),U32(0x20090820L),U32(0x20090830L),
],
#for C bits (numbered as per FIPS 46) 7 8 10 11 12 13
[
U32(0x00000000L),U32(0x02000000L),U32(0x00002000L),U32(0x02002000L),
U32(0x00200000L),U32(0x02200000L),U32(0x00202000L),U32(0x02202000L),
U32(0x00000004L),U32(0x02000004L),U32(0x00002004L),U32(0x02002004L),
U32(0x00200004L),U32(0x02200004L),U32(0x00202004L),U32(0x02202004L),
U32(0x00000400L),U32(0x02000400L),U32(0x00002400L),U32(0x02002400L),
U32(0x00200400L),U32(0x02200400L),U32(0x00202400L),U32(0x02202400L),
U32(0x00000404L),U32(0x02000404L),U32(0x00002404L),U32(0x02002404L),
U32(0x00200404L),U32(0x02200404L),U32(0x00202404L),U32(0x02202404L),
U32(0x10000000L),U32(0x12000000L),U32(0x10002000L),U32(0x12002000L),
U32(0x10200000L),U32(0x12200000L),U32(0x10202000L),U32(0x12202000L),
U32(0x10000004L),U32(0x12000004L),U32(0x10002004L),U32(0x12002004L),
U32(0x10200004L),U32(0x12200004L),U32(0x10202004L),U32(0x12202004L),
U32(0x10000400L),U32(0x12000400L),U32(0x10002400L),U32(0x12002400L),
U32(0x10200400L),U32(0x12200400L),U32(0x10202400L),U32(0x12202400L),
U32(0x10000404L),U32(0x12000404L),U32(0x10002404L),U32(0x12002404L),
U32(0x10200404L),U32(0x12200404L),U32(0x10202404L),U32(0x12202404L),
],
#for C bits (numbered as per FIPS 46) 14 15 16 17 19 20
[
U32(0x00000000L),U32(0x00000001L),U32(0x00040000L),U32(0x00040001L),
U32(0x01000000L),U32(0x01000001L),U32(0x01040000L),U32(0x01040001L),
U32(0x00000002L),U32(0x00000003L),U32(0x00040002L),U32(0x00040003L),
U32(0x01000002L),U32(0x01000003L),U32(0x01040002L),U32(0x01040003L),
U32(0x00000200L),U32(0x00000201L),U32(0x00040200L),U32(0x00040201L),
U32(0x01000200L),U32(0x01000201L),U32(0x01040200L),U32(0x01040201L),
U32(0x00000202L),U32(0x00000203L),U32(0x00040202L),U32(0x00040203L),
U32(0x01000202L),U32(0x01000203L),U32(0x01040202L),U32(0x01040203L),
U32(0x08000000L),U32(0x08000001L),U32(0x08040000L),U32(0x08040001L),
U32(0x09000000L),U32(0x09000001L),U32(0x09040000L),U32(0x09040001L),
U32(0x08000002L),U32(0x08000003L),U32(0x08040002L),U32(0x08040003L),
U32(0x09000002L),U32(0x09000003L),U32(0x09040002L),U32(0x09040003L),
U32(0x08000200L),U32(0x08000201L),U32(0x08040200L),U32(0x08040201L),
U32(0x09000200L),U32(0x09000201L),U32(0x09040200L),U32(0x09040201L),
U32(0x08000202L),U32(0x08000203L),U32(0x08040202L),U32(0x08040203L),
U32(0x09000202L),U32(0x09000203L),U32(0x09040202L),U32(0x09040203L),
],
#for C bits (numbered as per FIPS 46) 21 23 24 26 27 28
[
U32(0x00000000L),U32(0x00100000L),U32(0x00000100L),U32(0x00100100L),
U32(0x00000008L),U32(0x00100008L),U32(0x00000108L),U32(0x00100108L),
U32(0x00001000L),U32(0x00101000L),U32(0x00001100L),U32(0x00101100L),
U32(0x00001008L),U32(0x00101008L),U32(0x00001108L),U32(0x00101108L),
U32(0x04000000L),U32(0x04100000L),U32(0x04000100L),U32(0x04100100L),
U32(0x04000008L),U32(0x04100008L),U32(0x04000108L),U32(0x04100108L),
U32(0x04001000L),U32(0x04101000L),U32(0x04001100L),U32(0x04101100L),
U32(0x04001008L),U32(0x04101008L),U32(0x04001108L),U32(0x04101108L),
U32(0x00020000L),U32(0x00120000L),U32(0x00020100L),U32(0x00120100L),
U32(0x00020008L),U32(0x00120008L),U32(0x00020108L),U32(0x00120108L),
U32(0x00021000L),U32(0x00121000L),U32(0x00021100L),U32(0x00121100L),
U32(0x00021008L),U32(0x00121008L),U32(0x00021108L),U32(0x00121108L),
U32(0x04020000L),U32(0x04120000L),U32(0x04020100L),U32(0x04120100L),
U32(0x04020008L),U32(0x04120008L),U32(0x04020108L),U32(0x04120108L),
U32(0x04021000L),U32(0x04121000L),U32(0x04021100L),U32(0x04121100L),
U32(0x04021008L),U32(0x04121008L),U32(0x04021108L),U32(0x04121108L),
],
#for D bits (numbered as per FIPS 46) 1 2 3 4 5 6
[
U32(0x00000000L),U32(0x10000000L),U32(0x00010000L),U32(0x10010000L),
U32(0x00000004L),U32(0x10000004L),U32(0x00010004L),U32(0x10010004L),
U32(0x20000000L),U32(0x30000000L),U32(0x20010000L),U32(0x30010000L),
U32(0x20000004L),U32(0x30000004L),U32(0x20010004L),U32(0x30010004L),
U32(0x00100000L),U32(0x10100000L),U32(0x00110000L),U32(0x10110000L),
U32(0x00100004L),U32(0x10100004L),U32(0x00110004L),U32(0x10110004L),
U32(0x20100000L),U32(0x30100000L),U32(0x20110000L),U32(0x30110000L),
U32(0x20100004L),U32(0x30100004L),U32(0x20110004L),U32(0x30110004L),
U32(0x00001000L),U32(0x10001000L),U32(0x00011000L),U32(0x10011000L),
U32(0x00001004L),U32(0x10001004L),U32(0x00011004L),U32(0x10011004L),
U32(0x20001000L),U32(0x30001000L),U32(0x20011000L),U32(0x30011000L),
U32(0x20001004L),U32(0x30001004L),U32(0x20011004L),U32(0x30011004L),
U32(0x00101000L),U32(0x10101000L),U32(0x00111000L),U32(0x10111000L),
U32(0x00101004L),U32(0x10101004L),U32(0x00111004L),U32(0x10111004L),
U32(0x20101000L),U32(0x30101000L),U32(0x20111000L),U32(0x30111000L),
U32(0x20101004L),U32(0x30101004L),U32(0x20111004L),U32(0x30111004L),
],
#for D bits (numbered as per FIPS 46) 8 9 11 12 13 14
[
U32(0x00000000L),U32(0x08000000L),U32(0x00000008L),U32(0x08000008L),
U32(0x00000400L),U32(0x08000400L),U32(0x00000408L),U32(0x08000408L),
U32(0x00020000L),U32(0x08020000L),U32(0x00020008L),U32(0x08020008L),
U32(0x00020400L),U32(0x08020400L),U32(0x00020408L),U32(0x08020408L),
U32(0x00000001L),U32(0x08000001L),U32(0x00000009L),U32(0x08000009L),
U32(0x00000401L),U32(0x08000401L),U32(0x00000409L),U32(0x08000409L),
U32(0x00020001L),U32(0x08020001L),U32(0x00020009L),U32(0x08020009L),
U32(0x00020401L),U32(0x08020401L),U32(0x00020409L),U32(0x08020409L),
U32(0x02000000L),U32(0x0A000000L),U32(0x02000008L),U32(0x0A000008L),
U32(0x02000400L),U32(0x0A000400L),U32(0x02000408L),U32(0x0A000408L),
U32(0x02020000L),U32(0x0A020000L),U32(0x02020008L),U32(0x0A020008L),
U32(0x02020400L),U32(0x0A020400L),U32(0x02020408L),U32(0x0A020408L),
U32(0x02000001L),U32(0x0A000001L),U32(0x02000009L),U32(0x0A000009L),
U32(0x02000401L),U32(0x0A000401L),U32(0x02000409L),U32(0x0A000409L),
U32(0x02020001L),U32(0x0A020001L),U32(0x02020009L),U32(0x0A020009L),
U32(0x02020401L),U32(0x0A020401L),U32(0x02020409L),U32(0x0A020409L),
],
#for D bits (numbered as per FIPS 46) 16 17 18 19 20 21
[
U32(0x00000000L),U32(0x00000100L),U32(0x00080000L),U32(0x00080100L),
U32(0x01000000L),U32(0x01000100L),U32(0x01080000L),U32(0x01080100L),
U32(0x00000010L),U32(0x00000110L),U32(0x00080010L),U32(0x00080110L),
U32(0x01000010L),U32(0x01000110L),U32(0x01080010L),U32(0x01080110L),
U32(0x00200000L),U32(0x00200100L),U32(0x00280000L),U32(0x00280100L),
U32(0x01200000L),U32(0x01200100L),U32(0x01280000L),U32(0x01280100L),
U32(0x00200010L),U32(0x00200110L),U32(0x00280010L),U32(0x00280110L),
U32(0x01200010L),U32(0x01200110L),U32(0x01280010L),U32(0x01280110L),
U32(0x00000200L),U32(0x00000300L),U32(0x00080200L),U32(0x00080300L),
U32(0x01000200L),U32(0x01000300L),U32(0x01080200L),U32(0x01080300L),
U32(0x00000210L),U32(0x00000310L),U32(0x00080210L),U32(0x00080310L),
U32(0x01000210L),U32(0x01000310L),U32(0x01080210L),U32(0x01080310L),
U32(0x00200200L),U32(0x00200300L),U32(0x00280200L),U32(0x00280300L),
U32(0x01200200L),U32(0x01200300L),U32(0x01280200L),U32(0x01280300L),
U32(0x00200210L),U32(0x00200310L),U32(0x00280210L),U32(0x00280310L),
U32(0x01200210L),U32(0x01200310L),U32(0x01280210L),U32(0x01280310L),
],
#for D bits (numbered as per FIPS 46) 22 23 24 25 27 28
[
U32(0x00000000L),U32(0x04000000L),U32(0x00040000L),U32(0x04040000L),
U32(0x00000002L),U32(0x04000002L),U32(0x00040002L),U32(0x04040002L),
U32(0x00002000L),U32(0x04002000L),U32(0x00042000L),U32(0x04042000L),
U32(0x00002002L),U32(0x04002002L),U32(0x00042002L),U32(0x04042002L),
U32(0x00000020L),U32(0x04000020L),U32(0x00040020L),U32(0x04040020L),
U32(0x00000022L),U32(0x04000022L),U32(0x00040022L),U32(0x04040022L),
U32(0x00002020L),U32(0x04002020L),U32(0x00042020L),U32(0x04042020L),
U32(0x00002022L),U32(0x04002022L),U32(0x00042022L),U32(0x04042022L),
U32(0x00000800L),U32(0x04000800L),U32(0x00040800L),U32(0x04040800L),
U32(0x00000802L),U32(0x04000802L),U32(0x00040802L),U32(0x04040802L),
U32(0x00002800L),U32(0x04002800L),U32(0x00042800L),U32(0x04042800L),
U32(0x00002802L),U32(0x04002802L),U32(0x00042802L),U32(0x04042802L),
U32(0x00000820L),U32(0x04000820L),U32(0x00040820L),U32(0x04040820L),
U32(0x00000822L),U32(0x04000822L),U32(0x00040822L),U32(0x04040822L),
U32(0x00002820L),U32(0x04002820L),U32(0x00042820L),U32(0x04042820L),
U32(0x00002822L),U32(0x04002822L),U32(0x00042822L),U32(0x04042822L),
]
]

View File

@@ -1,466 +0,0 @@
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/> or <http://www.gnu.org/licenses/lgpl.txt>.
import struct
import base64
import string
import des
import hashlib
import hmac
import random
from socket import gethostname
NTLM_NegotiateUnicode = 0x00000001
NTLM_NegotiateOEM = 0x00000002
NTLM_RequestTarget = 0x00000004
NTLM_Unknown9 = 0x00000008
NTLM_NegotiateSign = 0x00000010
NTLM_NegotiateSeal = 0x00000020
NTLM_NegotiateDatagram = 0x00000040
NTLM_NegotiateLanManagerKey = 0x00000080
NTLM_Unknown8 = 0x00000100
NTLM_NegotiateNTLM = 0x00000200
NTLM_NegotiateNTOnly = 0x00000400
NTLM_Anonymous = 0x00000800
NTLM_NegotiateOemDomainSupplied = 0x00001000
NTLM_NegotiateOemWorkstationSupplied = 0x00002000
NTLM_Unknown6 = 0x00004000
NTLM_NegotiateAlwaysSign = 0x00008000
NTLM_TargetTypeDomain = 0x00010000
NTLM_TargetTypeServer = 0x00020000
NTLM_TargetTypeShare = 0x00040000
NTLM_NegotiateExtendedSecurity = 0x00080000
NTLM_NegotiateIdentify = 0x00100000
NTLM_Unknown5 = 0x00200000
NTLM_RequestNonNTSessionKey = 0x00400000
NTLM_NegotiateTargetInfo = 0x00800000
NTLM_Unknown4 = 0x01000000
NTLM_NegotiateVersion = 0x02000000
NTLM_Unknown3 = 0x04000000
NTLM_Unknown2 = 0x08000000
NTLM_Unknown1 = 0x10000000
NTLM_Negotiate128 = 0x20000000
NTLM_NegotiateKeyExchange = 0x40000000
NTLM_Negotiate56 = 0x80000000
# we send these flags with our type 1 message
NTLM_TYPE1_FLAGS = (NTLM_NegotiateUnicode | \
NTLM_NegotiateOEM | \
NTLM_RequestTarget | \
NTLM_NegotiateNTLM | \
NTLM_NegotiateOemDomainSupplied | \
NTLM_NegotiateOemWorkstationSupplied | \
NTLM_NegotiateAlwaysSign | \
NTLM_NegotiateExtendedSecurity | \
NTLM_NegotiateVersion | \
NTLM_Negotiate128 | \
NTLM_Negotiate56 )
NTLM_TYPE2_FLAGS = (NTLM_NegotiateUnicode | \
NTLM_RequestTarget | \
NTLM_NegotiateNTLM | \
NTLM_NegotiateAlwaysSign | \
NTLM_NegotiateExtendedSecurity | \
NTLM_NegotiateTargetInfo | \
NTLM_NegotiateVersion | \
NTLM_Negotiate128 | \
NTLM_Negotiate56)
NTLM_MsvAvEOL = 0 # Indicates that this is the last AV_PAIR in the list. AvLen MUST be 0. This type of information MUST be present in the AV pair list.
NTLM_MsvAvNbComputerName = 1 # The server's NetBIOS computer name. The name MUST be in Unicode, and is not null-terminated. This type of information MUST be present in the AV_pair list.
NTLM_MsvAvNbDomainName = 2 # The server's NetBIOS domain name. The name MUST be in Unicode, and is not null-terminated. This type of information MUST be present in the AV_pair list.
NTLM_MsvAvDnsComputerName = 3 # The server's Active Directory DNS computer name. The name MUST be in Unicode, and is not null-terminated.
NTLM_MsvAvDnsDomainName = 4 # The server's Active Directory DNS domain name. The name MUST be in Unicode, and is not null-terminated.
NTLM_MsvAvDnsTreeName = 5 # The server's Active Directory (AD) DNS forest tree name. The name MUST be in Unicode, and is not null-terminated.
NTLM_MsvAvFlags = 6 # A field containing a 32-bit value indicating server or client configuration. 0x00000001: indicates to the client that the account authentication is constrained. 0x00000002: indicates that the client is providing message integrity in the MIC field (section 2.2.1.3) in the AUTHENTICATE_MESSAGE.
NTLM_MsvAvTimestamp = 7 # A FILETIME structure ([MS-DTYP] section 2.3.1) in little-endian byte order that contains the server local time.<12>
NTLM_MsAvRestrictions = 8 #A Restriction_Encoding structure (section 2.2.2.2). The Value field contains a structure representing the integrity level of the security principal, as well as a MachineID created at computer startup to identify the calling machine. <13>
"""
utility functions for Microsoft NTLM authentication
References:
[MS-NLMP]: NT LAN Manager (NTLM) Authentication Protocol Specification
http://download.microsoft.com/download/a/e/6/ae6e4142-aa58-45c6-8dcf-a657e5900cd3/%5BMS-NLMP%5D.pdf
[MS-NTHT]: NTLM Over HTTP Protocol Specification
http://download.microsoft.com/download/a/e/6/ae6e4142-aa58-45c6-8dcf-a657e5900cd3/%5BMS-NTHT%5D.pdf
Cntlm Authentication Proxy
http://cntlm.awk.cz/
NTLM Authorization Proxy Server
http://sourceforge.net/projects/ntlmaps/
Optimized Attack for NTLM2 Session Response
http://www.blackhat.com/presentations/bh-asia-04/bh-jp-04-pdfs/bh-jp-04-seki.pdf
"""
def dump_NegotiateFlags(NegotiateFlags):
if NegotiateFlags & NTLM_NegotiateUnicode:
print "NTLM_NegotiateUnicode set"
if NegotiateFlags & NTLM_NegotiateOEM:
print "NTLM_NegotiateOEM set"
if NegotiateFlags & NTLM_RequestTarget:
print "NTLM_RequestTarget set"
if NegotiateFlags & NTLM_Unknown9:
print "NTLM_Unknown9 set"
if NegotiateFlags & NTLM_NegotiateSign:
print "NTLM_NegotiateSign set"
if NegotiateFlags & NTLM_NegotiateSeal:
print "NTLM_NegotiateSeal set"
if NegotiateFlags & NTLM_NegotiateDatagram:
print "NTLM_NegotiateDatagram set"
if NegotiateFlags & NTLM_NegotiateLanManagerKey:
print "NTLM_NegotiateLanManagerKey set"
if NegotiateFlags & NTLM_Unknown8:
print "NTLM_Unknown8 set"
if NegotiateFlags & NTLM_NegotiateNTLM:
print "NTLM_NegotiateNTLM set"
if NegotiateFlags & NTLM_NegotiateNTOnly:
print "NTLM_NegotiateNTOnly set"
if NegotiateFlags & NTLM_Anonymous:
print "NTLM_Anonymous set"
if NegotiateFlags & NTLM_NegotiateOemDomainSupplied:
print "NTLM_NegotiateOemDomainSupplied set"
if NegotiateFlags & NTLM_NegotiateOemWorkstationSupplied:
print "NTLM_NegotiateOemWorkstationSupplied set"
if NegotiateFlags & NTLM_Unknown6:
print "NTLM_Unknown6 set"
if NegotiateFlags & NTLM_NegotiateAlwaysSign:
print "NTLM_NegotiateAlwaysSign set"
if NegotiateFlags & NTLM_TargetTypeDomain:
print "NTLM_TargetTypeDomain set"
if NegotiateFlags & NTLM_TargetTypeServer:
print "NTLM_TargetTypeServer set"
if NegotiateFlags & NTLM_TargetTypeShare:
print "NTLM_TargetTypeShare set"
if NegotiateFlags & NTLM_NegotiateExtendedSecurity:
print "NTLM_NegotiateExtendedSecurity set"
if NegotiateFlags & NTLM_NegotiateIdentify:
print "NTLM_NegotiateIdentify set"
if NegotiateFlags & NTLM_Unknown5:
print "NTLM_Unknown5 set"
if NegotiateFlags & NTLM_RequestNonNTSessionKey:
print "NTLM_RequestNonNTSessionKey set"
if NegotiateFlags & NTLM_NegotiateTargetInfo:
print "NTLM_NegotiateTargetInfo set"
if NegotiateFlags & NTLM_Unknown4:
print "NTLM_Unknown4 set"
if NegotiateFlags & NTLM_NegotiateVersion:
print "NTLM_NegotiateVersion set"
if NegotiateFlags & NTLM_Unknown3:
print "NTLM_Unknown3 set"
if NegotiateFlags & NTLM_Unknown2:
print "NTLM_Unknown2 set"
if NegotiateFlags & NTLM_Unknown1:
print "NTLM_Unknown1 set"
if NegotiateFlags & NTLM_Negotiate128:
print "NTLM_Negotiate128 set"
if NegotiateFlags & NTLM_NegotiateKeyExchange:
print "NTLM_NegotiateKeyExchange set"
if NegotiateFlags & NTLM_Negotiate56:
print "NTLM_Negotiate56 set"
def create_NTLM_NEGOTIATE_MESSAGE(user, type1_flags=NTLM_TYPE1_FLAGS):
BODY_LENGTH = 40
Payload_start = BODY_LENGTH # in bytes
protocol = 'NTLMSSP\0' #name
type = struct.pack('<I',1) #type 1
flags = struct.pack('<I', type1_flags)
Workstation = gethostname().upper().encode('ascii')
user_parts = user.split('\\', 1)
if type1_flags & NTLM_NegotiateOemDomainSupplied:
DomainName = user_parts[0].upper().encode('ascii')
else:
DomainName = ''
EncryptedRandomSessionKey = ""
WorkstationLen = struct.pack('<H', len(Workstation))
WorkstationMaxLen = struct.pack('<H', len(Workstation))
WorkstationBufferOffset = struct.pack('<I', Payload_start)
Payload_start += len(Workstation)
DomainNameLen = struct.pack('<H', len(DomainName))
DomainNameMaxLen = struct.pack('<H', len(DomainName))
DomainNameBufferOffset = struct.pack('<I',Payload_start)
Payload_start += len(DomainName)
ProductMajorVersion = struct.pack('<B', 5)
ProductMinorVersion = struct.pack('<B', 1)
ProductBuild = struct.pack('<H', 2600)
VersionReserved1 = struct.pack('<B', 0)
VersionReserved2 = struct.pack('<B', 0)
VersionReserved3 = struct.pack('<B', 0)
NTLMRevisionCurrent = struct.pack('<B', 15)
msg1 = protocol + type + flags + \
DomainNameLen + DomainNameMaxLen + DomainNameBufferOffset + \
WorkstationLen + WorkstationMaxLen + WorkstationBufferOffset + \
ProductMajorVersion + ProductMinorVersion + ProductBuild + \
VersionReserved1 + VersionReserved2 + VersionReserved3 + NTLMRevisionCurrent
assert BODY_LENGTH==len(msg1), "BODY_LENGTH: %d != msg1: %d" % (BODY_LENGTH,len(msg1))
msg1 += Workstation + DomainName
msg1 = base64.encodestring(msg1)
msg1 = string.replace(msg1, '\n', '')
return msg1
def parse_NTLM_CHALLENGE_MESSAGE(msg2):
""
msg2 = base64.decodestring(msg2)
Signature = msg2[0:8]
msg_type = struct.unpack("<I",msg2[8:12])[0]
assert(msg_type==2)
TargetNameLen = struct.unpack("<H",msg2[12:14])[0]
TargetNameMaxLen = struct.unpack("<H",msg2[14:16])[0]
TargetNameOffset = struct.unpack("<I",msg2[16:20])[0]
TargetName = msg2[TargetNameOffset:TargetNameOffset+TargetNameMaxLen]
NegotiateFlags = struct.unpack("<I",msg2[20:24])[0]
ServerChallenge = msg2[24:32]
Reserved = msg2[32:40]
# Fixes some NTLM auth that don't include the target info
# I have no idea if such a server is broken or not, but
# this helped with my testing
if TargetNameOffset > 40:
TargetInfoLen = struct.unpack("<H",msg2[40:42])[0]
TargetInfoMaxLen = struct.unpack("<H",msg2[42:44])[0]
TargetInfoOffset = struct.unpack("<I",msg2[44:48])[0]
TargetInfo = msg2[TargetInfoOffset:TargetInfoOffset+TargetInfoLen]
i=0
TimeStamp = '\0'*8
while(i<TargetInfoLen):
AvId = struct.unpack("<H",TargetInfo[i:i+2])[0]
AvLen = struct.unpack("<H",TargetInfo[i+2:i+4])[0]
AvValue = TargetInfo[i+4:i+4+AvLen]
i = i+4+AvLen
if AvId == NTLM_MsvAvTimestamp:
TimeStamp = AvValue
#~ print AvId, AvValue.decode('utf-16')
return (ServerChallenge, NegotiateFlags)
def create_NTLM_AUTHENTICATE_MESSAGE(nonce, user, domain, password, NegotiateFlags):
""
is_unicode = NegotiateFlags & NTLM_NegotiateUnicode
is_NegotiateExtendedSecurity = NegotiateFlags & NTLM_NegotiateExtendedSecurity
flags = struct.pack('<I',NTLM_TYPE2_FLAGS)
BODY_LENGTH = 72
Payload_start = BODY_LENGTH # in bytes
Workstation = gethostname().upper()
DomainName = domain.upper()
UserName = user
EncryptedRandomSessionKey = ""
if is_unicode:
Workstation = Workstation.encode('utf-16-le')
DomainName = DomainName.encode('utf-16-le')
UserName = UserName.encode('utf-16-le')
EncryptedRandomSessionKey = EncryptedRandomSessionKey.encode('utf-16-le')
LmChallengeResponse = calc_resp(create_LM_hashed_password_v1(password), nonce)
NtChallengeResponse = calc_resp(create_NT_hashed_password_v1(password), nonce)
if is_NegotiateExtendedSecurity:
pwhash = create_NT_hashed_password_v1(password, UserName, DomainName)
ClientChallenge = ""
for i in range(8):
ClientChallenge+= chr(random.getrandbits(8))
(NtChallengeResponse, LmChallengeResponse) = ntlm2sr_calc_resp(pwhash, nonce, ClientChallenge) #='\x39 e3 f4 cd 59 c5 d8 60')
Signature = 'NTLMSSP\0'
MessageType = struct.pack('<I',3) #type 3
DomainNameLen = struct.pack('<H', len(DomainName))
DomainNameMaxLen = struct.pack('<H', len(DomainName))
DomainNameOffset = struct.pack('<I', Payload_start)
Payload_start += len(DomainName)
UserNameLen = struct.pack('<H', len(UserName))
UserNameMaxLen = struct.pack('<H', len(UserName))
UserNameOffset = struct.pack('<I', Payload_start)
Payload_start += len(UserName)
WorkstationLen = struct.pack('<H', len(Workstation))
WorkstationMaxLen = struct.pack('<H', len(Workstation))
WorkstationOffset = struct.pack('<I', Payload_start)
Payload_start += len(Workstation)
LmChallengeResponseLen = struct.pack('<H', len(LmChallengeResponse))
LmChallengeResponseMaxLen = struct.pack('<H', len(LmChallengeResponse))
LmChallengeResponseOffset = struct.pack('<I', Payload_start)
Payload_start += len(LmChallengeResponse)
NtChallengeResponseLen = struct.pack('<H', len(NtChallengeResponse))
NtChallengeResponseMaxLen = struct.pack('<H', len(NtChallengeResponse))
NtChallengeResponseOffset = struct.pack('<I', Payload_start)
Payload_start += len(NtChallengeResponse)
EncryptedRandomSessionKeyLen = struct.pack('<H', len(EncryptedRandomSessionKey))
EncryptedRandomSessionKeyMaxLen = struct.pack('<H', len(EncryptedRandomSessionKey))
EncryptedRandomSessionKeyOffset = struct.pack('<I',Payload_start)
Payload_start += len(EncryptedRandomSessionKey)
NegotiateFlags = flags
ProductMajorVersion = struct.pack('<B', 5)
ProductMinorVersion = struct.pack('<B', 1)
ProductBuild = struct.pack('<H', 2600)
VersionReserved1 = struct.pack('<B', 0)
VersionReserved2 = struct.pack('<B', 0)
VersionReserved3 = struct.pack('<B', 0)
NTLMRevisionCurrent = struct.pack('<B', 15)
MIC = struct.pack('<IIII',0,0,0,0)
msg3 = Signature + MessageType + \
LmChallengeResponseLen + LmChallengeResponseMaxLen + LmChallengeResponseOffset + \
NtChallengeResponseLen + NtChallengeResponseMaxLen + NtChallengeResponseOffset + \
DomainNameLen + DomainNameMaxLen + DomainNameOffset + \
UserNameLen + UserNameMaxLen + UserNameOffset + \
WorkstationLen + WorkstationMaxLen + WorkstationOffset + \
EncryptedRandomSessionKeyLen + EncryptedRandomSessionKeyMaxLen + EncryptedRandomSessionKeyOffset + \
NegotiateFlags + \
ProductMajorVersion + ProductMinorVersion + ProductBuild + \
VersionReserved1 + VersionReserved2 + VersionReserved3 + NTLMRevisionCurrent
assert BODY_LENGTH==len(msg3), "BODY_LENGTH: %d != msg3: %d" % (BODY_LENGTH,len(msg3))
Payload = DomainName + UserName + Workstation + LmChallengeResponse + NtChallengeResponse + EncryptedRandomSessionKey
msg3 += Payload
msg3 = base64.encodestring(msg3)
msg3 = string.replace(msg3, '\n', '')
return msg3
def calc_resp(password_hash, server_challenge):
"""calc_resp generates the LM response given a 16-byte password hash and the
challenge from the Type-2 message.
@param password_hash
16-byte password hash
@param server_challenge
8-byte challenge from Type-2 message
returns
24-byte buffer to contain the LM response upon return
"""
# padding with zeros to make the hash 21 bytes long
password_hash = password_hash + '\0' * (21 - len(password_hash))
res = ''
dobj = des.DES(password_hash[0:7])
res = res + dobj.encrypt(server_challenge[0:8])
dobj = des.DES(password_hash[7:14])
res = res + dobj.encrypt(server_challenge[0:8])
dobj = des.DES(password_hash[14:21])
res = res + dobj.encrypt(server_challenge[0:8])
return res
def ComputeResponse(ResponseKeyNT, ResponseKeyLM, ServerChallenge, ServerName, ClientChallenge='\xaa'*8, Time='\0'*8):
LmChallengeResponse = hmac.new(ResponseKeyLM, ServerChallenge+ClientChallenge).digest() + ClientChallenge
Responserversion = '\x01'
HiResponserversion = '\x01'
temp = Responserversion + HiResponserversion + '\0'*6 + Time + ClientChallenge + '\0'*4 + ServerChallenge + '\0'*4
NTProofStr = hmac.new(ResponseKeyNT, ServerChallenge + temp).digest()
NtChallengeResponse = NTProofStr + temp
SessionBaseKey = hmac.new(ResponseKeyNT, NTProofStr).digest()
return (NtChallengeResponse, LmChallengeResponse)
def ntlm2sr_calc_resp(ResponseKeyNT, ServerChallenge, ClientChallenge='\xaa'*8):
import hashlib
LmChallengeResponse = ClientChallenge + '\0'*16
sess = hashlib.md5(ServerChallenge+ClientChallenge).digest()
NtChallengeResponse = calc_resp(ResponseKeyNT, sess[0:8])
return (NtChallengeResponse, LmChallengeResponse)
def create_LM_hashed_password_v1(passwd):
"setup LanManager password"
"create LanManager hashed password"
# fix the password length to 14 bytes
passwd = string.upper(passwd)
lm_pw = passwd + '\0' * (14 - len(passwd))
lm_pw = passwd[0:14]
# do hash
magic_str = "KGS!@#$%" # page 57 in [MS-NLMP]
res = ''
dobj = des.DES(lm_pw[0:7])
res = res + dobj.encrypt(magic_str)
dobj = des.DES(lm_pw[7:14])
res = res + dobj.encrypt(magic_str)
return res
def create_NT_hashed_password_v1(passwd, user=None, domain=None):
"create NT hashed password"
digest = hashlib.new('md4', passwd.encode('utf-16le')).digest()
return digest
def create_NT_hashed_password_v2(passwd, user, domain):
"create NT hashed password"
digest = create_NT_hashed_password_v1(passwd)
return hmac.new(digest, (user.upper()+domain).encode('utf-16le')).digest()
return digest
def create_sessionbasekey(password):
return hashlib.new('md4', create_NT_hashed_password_v1(password)).digest()
if __name__ == "__main__":
def ByteToHex( byteStr ):
"""
Convert a byte string to it's hex string representation e.g. for output.
"""
return ' '.join( [ "%02X" % ord( x ) for x in byteStr ] )
def HexToByte( hexStr ):
"""
Convert a string hex byte values into a byte string. The Hex Byte values may
or may not be space separated.
"""
bytes = []
hexStr = ''.join( hexStr.split(" ") )
for i in range(0, len(hexStr), 2):
bytes.append( chr( int (hexStr[i:i+2], 16 ) ) )
return ''.join( bytes )
ServerChallenge = HexToByte("01 23 45 67 89 ab cd ef")
ClientChallenge = '\xaa'*8
Time = '\x00'*8
Workstation = "COMPUTER".encode('utf-16-le')
ServerName = "Server".encode('utf-16-le')
User = "User"
Domain = "Domain"
Password = "Password"
RandomSessionKey = '\55'*16
assert HexToByte("e5 2c ac 67 41 9a 9a 22 4a 3b 10 8f 3f a6 cb 6d") == create_LM_hashed_password_v1(Password) # [MS-NLMP] page 72
assert HexToByte("a4 f4 9c 40 65 10 bd ca b6 82 4e e7 c3 0f d8 52") == create_NT_hashed_password_v1(Password) # [MS-NLMP] page 73
assert HexToByte("d8 72 62 b0 cd e4 b1 cb 74 99 be cc cd f1 07 84") == create_sessionbasekey(Password)
assert HexToByte("67 c4 30 11 f3 02 98 a2 ad 35 ec e6 4f 16 33 1c 44 bd be d9 27 84 1f 94") == calc_resp(create_NT_hashed_password_v1(Password), ServerChallenge)
assert HexToByte("98 de f7 b8 7f 88 aa 5d af e2 df 77 96 88 a1 72 de f1 1c 7d 5c cd ef 13") == calc_resp(create_LM_hashed_password_v1(Password), ServerChallenge)
(NTLMv1Response,LMv1Response) = ntlm2sr_calc_resp(create_NT_hashed_password_v1(Password), ServerChallenge, ClientChallenge)
assert HexToByte("aa aa aa aa aa aa aa aa 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00") == LMv1Response # [MS-NLMP] page 75
assert HexToByte("75 37 f8 03 ae 36 71 28 ca 45 82 04 bd e7 ca f8 1e 97 ed 26 83 26 72 32") == NTLMv1Response
assert HexToByte("0c 86 8a 40 3b fd 7a 93 a3 00 1e f2 2e f0 2e 3f") == create_NT_hashed_password_v2(Password, User, Domain) # [MS-NLMP] page 76
ResponseKeyLM = ResponseKeyNT = create_NT_hashed_password_v2(Password, User, Domain)
(NTLMv2Response,LMv2Response) = ComputeResponse(ResponseKeyNT, ResponseKeyLM, ServerChallenge, ServerName, ClientChallenge, Time)
assert HexToByte("86 c3 50 97 ac 9c ec 10 25 54 76 4a 57 cc cc 19 aa aa aa aa aa aa aa aa") == LMv2Response # [MS-NLMP] page 76
# expected failure
# According to the spec in section '3.3.2 NTLM v2 Authentication' the NTLMv2Response should be longer than the value given on page 77 (this suggests a mistake in the spec)
#~ assert HexToByte("68 cd 0a b8 51 e5 1c 96 aa bc 92 7b eb ef 6a 1c") == NTLMv2Response, "\nExpected: 68 cd 0a b8 51 e5 1c 96 aa bc 92 7b eb ef 6a 1c\nActual: %s" % ByteToHex(NTLMv2Response) # [MS-NLMP] page 77

View File

@@ -1,4 +1,5 @@
{
"1.3.0": "messages/1.3.0.txt",
"1.6.0": "messages/1.6.0.txt"
}
"1.6.0": "messages/1.6.0.txt",
"2.0.0": "messages/2.0.0.txt"
}

View File

@@ -0,0 +1,64 @@
Package Control 2.0.0 Changelog:
Today I'd like to announce two big milestones:
- Package Control 2.0 for ST2 and ST3
- A new Package Control website at https://sublime.wbond.net
The full announcement about the PC 2.0 release is available on the news page at
https://sublime.wbond.net/news.
If you are running the "testing" version of Package Control (1.6.9 - 1.6.11),
you will likely need to restart Sublime Text before Package Control will work
properly.
Giving Back
Part of the new Package Control website is in-depth information about each
package. The new package pages include a link where you can give a tip to the
developer/maintainer of your favorite packages.
The donate links go to https://www.gittip.com, which is building an excellent,
and open platform for users to say "thank you" to open source developers. It
is possible to give a small amount each week, such as $0.25, however these small
amounts multiplied by the large size of the community can be a big thank you!
One of the less glamorous jobs involved with making Package Control happen is
reviewing and giving package developers feedback before adding their packages
to the default channel. The follow contributors deserve a big thank you:
FichteFoll - https://www.gittip.com/FichteFoll/
joneshf - https://www.gittip.com/on/github/joneshf/
sentience - https://www.gittip.com/on/github/sentience/
Finally, I'm looking to raise some money to obtain a Mac Mini for the purposes
of supporting ST3 on OS X and a license for a Windows 8 VM. If you are inclined
to donate to those, or want to just buy me a beer, check out:
https://sublime.wbond.net/say_thanks
Notable Features
- A new Windows downloader that uses WinINet and should provide much better
proxy support
- Using operating system-supplied SSL CA certs on all platforms, with a
deprecated fallback to certificates served through the channel
- Proxy server fixes for OS X
- A completely revamped channel and repository system with support for more
information about packages including labels; readme, issues, donate and buy
URLs; tag-based releases; platform targetting without a custom packages.json
file; and Sublime Text version targetting
- Support for installing via .sublime-package files in ST3, which allows users
to easily override specific files from the package. Package developers who
need a loose folder of files may include a .no-sublime-package file in their
repo.
- In the coming days the new Package Control website will be released as open
source on GitHub

View File

@@ -1,5 +1,5 @@
{
"version": "1.6.3",
"url": "http://wbond.net/sublime_packages/package_control",
"version": "2.0.1-beta1",
"url": "https://sublime.wbond.net",
"description": "A full-featured package manager"
}
}

View File

@@ -0,0 +1,2 @@
__version__ = "2.0.1-beta1"
__version_info__ = (2, 0, 1, 'beta1')

View File

@@ -0,0 +1,215 @@
import threading
import re
import os
import datetime
import time
import sublime
from .console_write import console_write
from .package_installer import PackageInstaller
from .package_renamer import PackageRenamer
from .open_compat import open_compat, read_compat
class AutomaticUpgrader(threading.Thread):
"""
Automatically checks for updated packages and installs them. controlled
by the `auto_upgrade`, `auto_upgrade_ignore`, and `auto_upgrade_frequency`
settings.
"""
def __init__(self, found_packages):
"""
:param found_packages:
A list of package names for the packages that were found to be
installed on the machine.
"""
self.installer = PackageInstaller()
self.manager = self.installer.manager
self.load_settings()
self.package_renamer = PackageRenamer()
self.package_renamer.load_settings()
self.auto_upgrade = self.settings.get('auto_upgrade')
self.auto_upgrade_ignore = self.settings.get('auto_upgrade_ignore')
self.load_last_run()
self.determine_next_run()
# Detect if a package is missing that should be installed
self.missing_packages = list(set(self.installed_packages) -
set(found_packages))
if self.auto_upgrade and self.next_run <= time.time():
self.save_last_run(time.time())
threading.Thread.__init__(self)
def load_last_run(self):
"""
Loads the last run time from disk into memory
"""
self.last_run = None
self.last_run_file = os.path.join(sublime.packages_path(), 'User',
'Package Control.last-run')
if os.path.isfile(self.last_run_file):
with open_compat(self.last_run_file) as fobj:
try:
self.last_run = int(read_compat(fobj))
except ValueError:
pass
def determine_next_run(self):
"""
Figure out when the next run should happen
"""
self.next_run = int(time.time())
frequency = self.settings.get('auto_upgrade_frequency')
if frequency:
if self.last_run:
self.next_run = int(self.last_run) + (frequency * 60 * 60)
else:
self.next_run = time.time()
def save_last_run(self, last_run):
"""
Saves a record of when the last run was
:param last_run:
The unix timestamp of when to record the last run as
"""
with open_compat(self.last_run_file, 'w') as fobj:
fobj.write(str(int(last_run)))
def load_settings(self):
"""
Loads the list of installed packages from the
Package Control.sublime-settings file
"""
self.settings_file = 'Package Control.sublime-settings'
self.settings = sublime.load_settings(self.settings_file)
self.installed_packages = self.settings.get('installed_packages', [])
self.should_install_missing = self.settings.get('install_missing')
if not isinstance(self.installed_packages, list):
self.installed_packages = []
def run(self):
self.install_missing()
if self.next_run > time.time():
self.print_skip()
return
self.upgrade_packages()
def install_missing(self):
"""
Installs all packages that were listed in the list of
`installed_packages` from Package Control.sublime-settings but were not
found on the filesystem and passed as `found_packages`.
"""
if not self.missing_packages or not self.should_install_missing:
return
console_write(u'Installing %s missing packages' % len(self.missing_packages), True)
for package in self.missing_packages:
if self.installer.manager.install_package(package):
console_write(u'Installed missing package %s' % package, True)
def print_skip(self):
"""
Prints a notice in the console if the automatic upgrade is skipped
due to already having been run in the last `auto_upgrade_frequency`
hours.
"""
last_run = datetime.datetime.fromtimestamp(self.last_run)
next_run = datetime.datetime.fromtimestamp(self.next_run)
date_format = '%Y-%m-%d %H:%M:%S'
message_string = u'Skipping automatic upgrade, last run at %s, next run at %s or after' % (
last_run.strftime(date_format), next_run.strftime(date_format))
console_write(message_string, True)
def upgrade_packages(self):
"""
Upgrades all packages that are not currently upgraded to the lastest
version. Also renames any installed packages to their new names.
"""
if not self.auto_upgrade:
return
self.package_renamer.rename_packages(self.installer)
package_list = self.installer.make_package_list(['install',
'reinstall', 'downgrade', 'overwrite', 'none'],
ignore_packages=self.auto_upgrade_ignore)
# If Package Control is being upgraded, just do that and restart
for package in package_list:
if package[0] != 'Package Control':
continue
def reset_last_run():
# Re-save the last run time so it runs again after PC has
# been updated
self.save_last_run(self.last_run)
sublime.set_timeout(reset_last_run, 1)
package_list = [package]
break
if not package_list:
console_write(u'No updated packages', True)
return
console_write(u'Installing %s upgrades' % len(package_list), True)
disabled_packages = []
def do_upgrades():
# Wait so that the ignored packages can be "unloaded"
time.sleep(0.5)
# We use a function to generate the on-complete lambda because if
# we don't, the lambda will bind to info at the current scope, and
# thus use the last value of info from the loop
def make_on_complete(name):
return lambda: self.installer.reenable_package(name)
for info in package_list:
if info[0] in disabled_packages:
on_complete = make_on_complete(info[0])
else:
on_complete = None
self.installer.manager.install_package(info[0])
version = re.sub('^.*?(v[\d\.]+).*?$', '\\1', info[2])
if version == info[2] and version.find('pull with') != -1:
vcs = re.sub('^pull with (\w+).*?$', '\\1', version)
version = 'latest %s commit' % vcs
message_string = u'Upgraded %s to %s' % (info[0], version)
console_write(message_string, True)
if on_complete:
sublime.set_timeout(on_complete, 1)
# Disabling a package means changing settings, which can only be done
# in the main thread. We then create a new background thread so that
# the upgrade process does not block the UI.
def disable_packages():
disabled_packages.extend(self.installer.disable_packages([info[0] for info in package_list]))
threading.Thread(target=do_upgrades).start()
sublime.set_timeout(disable_packages, 1)

View File

@@ -0,0 +1,378 @@
import hashlib
import os
import re
import time
import sys
from .cmd import Cli
from .console_write import console_write
from .open_compat import open_compat, read_compat
# Have somewhere to store the CA bundle, even when not running in Sublime Text
try:
import sublime
ca_bundle_dir = None
except (ImportError):
ca_bundle_dir = os.path.join(os.path.expanduser('~'), '.package_control')
if not os.path.exists(ca_bundle_dir):
os.mkdir(ca_bundle_dir)
def find_root_ca_cert(settings, domain):
runner = OpensslCli(settings.get('openssl_binary'), settings.get('debug'))
binary = runner.retrieve_binary()
args = [binary, 's_client', '-showcerts', '-connect', domain + ':443']
result = runner.execute(args, os.path.dirname(binary))
certs = []
temp = []
in_block = False
for line in result.splitlines():
if line.find('BEGIN CERTIFICATE') != -1:
in_block = True
if in_block:
temp.append(line)
if line.find('END CERTIFICATE') != -1:
in_block = False
certs.append(u"\n".join(temp))
temp = []
# Remove the cert for the domain itself, just leaving the
# chain cert and the CA cert
certs.pop(0)
# Look for the "parent" root CA cert
subject = openssl_get_cert_subject(settings, certs[-1])
issuer = openssl_get_cert_issuer(settings, certs[-1])
cert = get_ca_cert_by_subject(settings, issuer)
cert_hash = hashlib.md5(cert.encode('utf-8')).hexdigest()
return [cert, cert_hash]
def get_system_ca_bundle_path(settings):
"""
Get the filesystem path to the system CA bundle. On Linux it looks in a
number of predefined places, however on OS X it has to be programatically
exported from the SystemRootCertificates.keychain. Windows does not ship
with a CA bundle, but also we use WinINet on Windows, so we don't need to
worry about CA certs.
:param settings:
A dict to look in for `debug` and `openssl_binary` keys
:return:
The full filesystem path to the .ca-bundle file, or False on error
"""
# If the sublime module is available, we bind this value at run time
# since the sublime.packages_path() is not available at import time
global ca_bundle_dir
platform = sys.platform
debug = settings.get('debug')
ca_path = False
if platform == 'win32':
console_write(u"Unable to get system CA cert path since Windows does not ship with them", True)
return False
# OS X
if platform == 'darwin':
if not ca_bundle_dir:
ca_bundle_dir = os.path.join(sublime.packages_path(), 'User')
ca_path = os.path.join(ca_bundle_dir, 'Package Control.system-ca-bundle')
exists = os.path.exists(ca_path)
# The bundle is old if it is a week or more out of date
is_old = exists and os.stat(ca_path).st_mtime < time.time() - 604800
if not exists or is_old:
if debug:
console_write(u"Generating new CA bundle from system keychain", True)
_osx_create_ca_bundle(settings, ca_path)
if debug:
console_write(u"Finished generating new CA bundle at %s" % ca_path, True)
elif debug:
console_write(u"Found previously exported CA bundle at %s" % ca_path, True)
# Linux
else:
# Common CA cert paths
paths = [
'/usr/lib/ssl/certs/ca-certificates.crt',
'/etc/ssl/certs/ca-certificates.crt',
'/etc/pki/tls/certs/ca-bundle.crt',
'/etc/ssl/ca-bundle.pem'
]
for path in paths:
if os.path.exists(path):
ca_path = path
break
if debug and ca_path:
console_write(u"Found system CA bundle at %s" % ca_path, True)
return ca_path
def get_ca_cert_by_subject(settings, subject):
bundle_path = get_system_ca_bundle_path(settings)
with open_compat(bundle_path, 'r') as f:
contents = read_compat(f)
temp = []
in_block = False
for line in contents.splitlines():
if line.find('BEGIN CERTIFICATE') != -1:
in_block = True
if in_block:
temp.append(line)
if line.find('END CERTIFICATE') != -1:
in_block = False
cert = u"\n".join(temp)
temp = []
if openssl_get_cert_subject(settings, cert) == subject:
return cert
return False
def openssl_get_cert_issuer(settings, cert):
"""
Uses the openssl command line client to extract the issuer of an x509
certificate.
:param settings:
A dict to look in for `debug` and `openssl_binary` keys
:param cert:
A string containing the PEM-encoded x509 certificate to extract the
issuer from
:return:
The cert issuer
"""
runner = OpensslCli(settings.get('openssl_binary'), settings.get('debug'))
binary = runner.retrieve_binary()
args = [binary, 'x509', '-noout', '-issuer']
output = runner.execute(args, os.path.dirname(binary), cert)
return re.sub('^issuer=\s*', '', output)
def openssl_get_cert_name(settings, cert):
"""
Uses the openssl command line client to extract the name of an x509
certificate. If the commonName is set, that is used, otherwise the first
organizationalUnitName is used. This mirrors what OS X uses for storing
trust preferences.
:param settings:
A dict to look in for `debug` and `openssl_binary` keys
:param cert:
A string containing the PEM-encoded x509 certificate to extract the
name from
:return:
The cert subject name, which is the commonName (if available), or the
first organizationalUnitName
"""
runner = OpensslCli(settings.get('openssl_binary'), settings.get('debug'))
binary = runner.retrieve_binary()
args = [binary, 'x509', '-noout', '-subject', '-nameopt',
'sep_multiline,lname,utf8']
result = runner.execute(args, os.path.dirname(binary), cert)
# First look for the common name
cn = None
# If there is no common name for the cert, the trust prefs use the first
# orginizational unit name
first_ou = None
for line in result.splitlines():
match = re.match('^\s+commonName=(.*)$', line)
if match:
cn = match.group(1)
break
match = re.match('^\s+organizationalUnitName=(.*)$', line)
if first_ou is None and match:
first_ou = match.group(1)
continue
# This is the name of the cert that would be used in the trust prefs
return cn or first_ou
def openssl_get_cert_subject(settings, cert):
"""
Uses the openssl command line client to extract the subject of an x509
certificate.
:param settings:
A dict to look in for `debug` and `openssl_binary` keys
:param cert:
A string containing the PEM-encoded x509 certificate to extract the
subject from
:return:
The cert subject
"""
runner = OpensslCli(settings.get('openssl_binary'), settings.get('debug'))
binary = runner.retrieve_binary()
args = [binary, 'x509', '-noout', '-subject']
output = runner.execute(args, os.path.dirname(binary), cert)
return re.sub('^subject=\s*', '', output)
def _osx_create_ca_bundle(settings, destination):
"""
Uses the OS X `security` command line tool to export the system's list of
CA certs from /System/Library/Keychains/SystemRootCertificates.keychain.
Checks the cert names against the trust preferences, ensuring that
distrusted certs are not exported.
:param settings:
A dict to look in for `debug` and `openssl_binary` keys
:param destination:
The full filesystem path to the destination .ca-bundle file
"""
distrusted_certs = _osx_get_distrusted_certs(settings)
# Export the root certs
args = ['/usr/bin/security', 'export', '-k',
'/System/Library/Keychains/SystemRootCertificates.keychain', '-t',
'certs', '-p']
result = Cli(None, settings.get('debug')).execute(args, '/usr/bin')
certs = []
temp = []
in_block = False
for line in result.splitlines():
if line.find('BEGIN CERTIFICATE') != -1:
in_block = True
if in_block:
temp.append(line)
if line.find('END CERTIFICATE') != -1:
in_block = False
cert = u"\n".join(temp)
temp = []
if distrusted_certs:
# If it is a distrusted cert, we move on to the next
cert_name = openssl_get_cert_name(settings, cert)
if cert_name in distrusted_certs:
if settings.get('debug'):
console_write(u'Skipping root certficate %s because it is distrusted' % cert_name, True)
continue
certs.append(cert)
with open_compat(destination, 'w') as f:
f.write(u"\n".join(certs))
def _osx_get_distrusted_certs(settings):
"""
Uses the OS X `security` binary to get a list of admin trust settings,
which is what is set when a user changes the trust setting on a root
certificate. By looking at the SSL policy, we can properly exclude
distrusted certs from out export.
Tested on OS X 10.6 and 10.8
:param settings:
A dict to look in for `debug` key
:return:
A list of CA cert names, where the name is the commonName (if
available), or the first organizationalUnitName
"""
args = ['/usr/bin/security', 'dump-trust-settings', '-d']
result = Cli(None, settings.get('debug')).execute(args, '/usr/bin')
distrusted_certs = []
cert_name = None
ssl_policy = False
for line in result.splitlines():
if line == '':
continue
# Reset for each cert
match = re.match('Cert\s+\d+:\s+(.*)$', line)
if match:
cert_name = match.group(1)
continue
# Reset for each trust setting
if re.match('^\s+Trust\s+Setting\s+\d+:', line):
ssl_policy = False
continue
# We are only interested in SSL policies
if re.match('^\s+Policy\s+OID\s+:\s+SSL', line):
ssl_policy = True
continue
distrusted = re.match('^\s+Result\s+Type\s+:\s+kSecTrustSettingsResultDeny', line)
if ssl_policy and distrusted and cert_name not in distrusted_certs:
if settings.get('debug'):
console_write(u'Found SSL distrust setting for root certificate %s' % cert_name, True)
distrusted_certs.append(cert_name)
return distrusted_certs
class OpensslCli(Cli):
cli_name = 'openssl'
def retrieve_binary(self):
"""
Returns the path to the openssl executable
:return: The string path to the executable or False on error
"""
name = 'openssl'
if os.name == 'nt':
name += '.exe'
binary = self.find_binary(name)
if binary and os.path.isdir(binary):
full_path = os.path.join(binary, name)
if os.path.exists(full_path):
binary = full_path
if not binary:
show_error((u'Unable to find %s. Please set the openssl_binary ' +
u'setting by accessing the Preferences > Package Settings > ' +
u'Package Control > Settings \u2013 User menu entry. The ' +
u'Settings \u2013 Default entry can be used for reference, ' +
u'but changes to that will be overwritten upon next upgrade.') % name)
return False
return binary

View File

@@ -0,0 +1,173 @@
import time
# A cache of channel and repository info to allow users to install multiple
# packages without having to wait for the metadata to be downloaded more
# than once. The keys are managed locally by the utilizing code.
_channel_repository_cache = {}
def clear_cache():
global _channel_repository_cache
_channel_repository_cache = {}
def get_cache(key, default=None):
"""
Gets an in-memory cache value
:param key:
The string key
:param default:
The value to return if the key has not been set, or the ttl expired
:return:
The cached value, or default
"""
struct = _channel_repository_cache.get(key, {})
expires = struct.get('expires')
if expires and expires > time.time():
return struct.get('data')
return default
def merge_cache_over_settings(destination, setting, key_prefix):
"""
Take the cached value of `key` and put it into the key `setting` of
the destination.settings dict. Merge the values by overlaying the
cached setting over the existing info.
:param destination:
An object that has a `.settings` attribute that is a dict
:param setting:
The dict key to use when pushing the value into the settings dict
:param key_prefix:
The string to prefix to `setting` to make the cache key
"""
existing = destination.settings.get(setting, {})
value = get_cache(key_prefix + '.' + setting, {})
if value:
existing.update(value)
destination.settings[setting] = existing
def merge_cache_under_settings(destination, setting, key_prefix, list_=False):
"""
Take the cached value of `key` and put it into the key `setting` of
the destination.settings dict. Merge the values by overlaying the
existing setting value over the cached info.
:param destination:
An object that has a `.settings` attribute that is a dict
:param setting:
The dict key to use when pushing the value into the settings dict
:param key_prefix:
The string to prefix to `setting` to make the cache key
:param list_:
If a list should be used instead of a dict
"""
default = {} if not list_ else []
existing = destination.settings.get(setting)
value = get_cache(key_prefix + '.' + setting, default)
if value:
if existing:
if list_:
# Prevent duplicate values
base = dict(zip(value, [None]*len(value)))
for val in existing:
if val in base:
continue
value.append(val)
else:
value.update(existing)
destination.settings[setting] = value
def set_cache(key, data, ttl=300):
"""
Sets an in-memory cache value
:param key:
The string key
:param data:
The data to cache
:param ttl:
The integer number of second to cache the data for
"""
_channel_repository_cache[key] = {
'data': data,
'expires': time.time() + ttl
}
def set_cache_over_settings(destination, setting, key_prefix, value, ttl):
"""
Take the value passed, and merge it over the current `setting`. Once
complete, take the value and set the cache `key` and destination.settings
`setting` to that value, using the `ttl` for set_cache().
:param destination:
An object that has a `.settings` attribute that is a dict
:param setting:
The dict key to use when pushing the value into the settings dict
:param key_prefix:
The string to prefix to `setting` to make the cache key
:param value:
The value to set
:param ttl:
The cache ttl to use
"""
existing = destination.settings.get(setting, {})
existing.update(value)
set_cache(key_prefix + '.' + setting, value, ttl)
destination.settings[setting] = value
def set_cache_under_settings(destination, setting, key_prefix, value, ttl, list_=False):
"""
Take the value passed, and merge the current `setting` over it. Once
complete, take the value and set the cache `key` and destination.settings
`setting` to that value, using the `ttl` for set_cache().
:param destination:
An object that has a `.settings` attribute that is a dict
:param setting:
The dict key to use when pushing the value into the settings dict
:param key_prefix:
The string to prefix to `setting` to make the cache key
:param value:
The value to set
:param ttl:
The cache ttl to use
"""
default = {} if not list_ else []
existing = destination.settings.get(setting, default)
if value:
if list_:
value.extend(existing)
else:
value.update(existing)
set_cache(key_prefix + '.' + setting, value, ttl)
destination.settings[setting] = value

View File

@@ -0,0 +1,37 @@
import os
def clear_directory(directory, ignore_paths=None):
"""
Tries to delete all files and folders from a directory
:param directory:
The string directory path
:param ignore_paths:
An array of paths to ignore while deleting files
:return:
If all of the files and folders were successfully deleted
"""
was_exception = False
for root, dirs, files in os.walk(directory, topdown=False):
paths = [os.path.join(root, f) for f in files]
paths.extend([os.path.join(root, d) for d in dirs])
for path in paths:
try:
# Don't delete the metadata file, that way we have it
# when the reinstall happens, and the appropriate
# usage info can be sent back to the server
if ignore_paths and path in ignore_paths:
continue
if os.path.isdir(path):
os.rmdir(path)
else:
os.remove(path)
except (OSError, IOError):
was_exception = True
return not was_exception

View File

@@ -0,0 +1,257 @@
import re
from ..versions import version_sort, version_filter
from .json_api_client import JSONApiClient
# A predefined list of readme filenames to look for
_readme_filenames = [
'readme',
'readme.txt',
'readme.md',
'readme.mkd',
'readme.mdown',
'readme.markdown',
'readme.textile',
'readme.creole',
'readme.rst'
]
class BitBucketClient(JSONApiClient):
def download_info(self, url):
"""
Retrieve information about downloading a package
:param url:
The URL of the repository, in one of the forms:
https://bitbucket.org/{user}/{repo}
https://bitbucket.org/{user}/{repo}/src/{branch}
https://bitbucket.org/{user}/{repo}/#tags
If the last option, grabs the info from the newest
tag that is a valid semver version.
:raises:
DownloaderException: when there is an error downloading
ClientException: when there is an error parsing the response
:return:
None if no match, False if no commit, or a dict with the following keys:
`version` - the version number of the download
`url` - the download URL of a zip file of the package
`date` - the ISO-8601 timestamp string when the version was published
"""
commit_info = self._commit_info(url)
if not commit_info:
return commit_info
return {
'version': commit_info['version'],
'url': 'https://bitbucket.org/%s/get/%s.zip' % (commit_info['user_repo'], commit_info['commit']),
'date': commit_info['timestamp']
}
def repo_info(self, url):
"""
Retrieve general information about a repository
:param url:
The URL to the repository, in one of the forms:
https://bitbucket.org/{user}/{repo}
https://bitbucket.org/{user}/{repo}/src/{branch}
:raises:
DownloaderException: when there is an error downloading
ClientException: when there is an error parsing the response
:return:
None if no match, or a dict with the following keys:
`name`
`description`
`homepage` - URL of the homepage
`author`
`readme` - URL of the readme
`issues` - URL of bug tracker
`donate` - URL of a donate page
"""
user_repo, branch = self._user_repo_branch(url)
if not user_repo:
return user_repo
api_url = self._make_api_url(user_repo)
info = self.fetch_json(api_url)
issues_url = u'https://bitbucket.org/%s/issues' % user_repo
return {
'name': info['name'],
'description': info['description'] or 'No description provided',
'homepage': info['website'] or url,
'author': info['owner'],
'donate': u'https://www.gittip.com/on/bitbucket/%s/' % info['owner'],
'readme': self._readme_url(user_repo, branch),
'issues': issues_url if info['has_issues'] else None
}
def _commit_info(self, url):
"""
Fetches info about the latest commit to a repository
:param url:
The URL to the repository, in one of the forms:
https://bitbucket.org/{user}/{repo}
https://bitbucket.org/{user}/{repo}/src/{branch}
https://bitbucket.org/{user}/{repo}/#tags
If the last option, grabs the info from the newest
tag that is a valid semver version.
:raises:
DownloaderException: when there is an error downloading
ClientException: when there is an error parsing the response
:return:
None if no match, False if no commit, or a dict with the following keys:
`user_repo` - the user/repo name
`timestamp` - the ISO-8601 UTC timestamp string
`commit` - the branch or tag name
`version` - the extracted version number
"""
tags_match = re.match('https?://bitbucket.org/([^/]+/[^#/]+)/?#tags$', url)
version = None
if tags_match:
user_repo = tags_match.group(1)
tags_url = self._make_api_url(user_repo, '/tags')
tags_list = self.fetch_json(tags_url)
tags = version_filter(tags_list.keys(), self.settings.get('install_prereleases'))
tags = version_sort(tags, reverse=True)
if not tags:
return False
commit = tags[0]
version = re.sub('^v', '', commit)
else:
user_repo, commit = self._user_repo_branch(url)
if not user_repo:
return user_repo
changeset_url = self._make_api_url(user_repo, '/changesets/%s' % commit)
commit_info = self.fetch_json(changeset_url)
commit_date = commit_info['timestamp'][0:19]
if not version:
version = re.sub('[\-: ]', '.', commit_date)
return {
'user_repo': user_repo,
'timestamp': commit_date,
'commit': commit,
'version': version
}
def _main_branch_name(self, user_repo):
"""
Fetch the name of the default branch
:param user_repo:
The user/repo name to get the main branch for
:raises:
DownloaderException: when there is an error downloading
ClientException: when there is an error parsing the response
:return:
The name of the main branch - `master` or `default`
"""
main_branch_url = self._make_api_url(user_repo, '/main-branch')
main_branch_info = self.fetch_json(main_branch_url, True)
return main_branch_info['name']
def _make_api_url(self, user_repo, suffix=''):
"""
Generate a URL for the BitBucket API
:param user_repo:
The user/repo of the repository
:param suffix:
The extra API path info to add to the URL
:return:
The API URL
"""
return 'https://api.bitbucket.org/1.0/repositories/%s%s' % (user_repo, suffix)
def _readme_url(self, user_repo, branch, prefer_cached=False):
"""
Parse the root directory listing for the repo and return the URL
to any file that looks like a readme
:param user_repo:
The user/repo string
:param branch:
The branch to fetch the readme from
:param prefer_cached:
If a cached directory listing should be used instead of a new HTTP request
:raises:
DownloaderException: when there is an error downloading
ClientException: when there is an error parsing the response
:return:
The URL to the readme file, or None
"""
listing_url = self._make_api_url(user_repo, '/src/%s/' % branch)
root_dir_info = self.fetch_json(listing_url, prefer_cached)
for entry in root_dir_info['files']:
if entry['path'].lower() in _readme_filenames:
return 'https://bitbucket.org/%s/raw/%s/%s' % (user_repo,
branch, entry['path'])
return None
def _user_repo_branch(self, url):
"""
Extract the username/repo and branch name from the URL
:param url:
The URL to extract the info from, in one of the forms:
https://bitbucket.org/{user}/{repo}
https://bitbucket.org/{user}/{repo}/src/{branch}
:raises:
DownloaderException: when there is an error downloading
ClientException: when there is an error parsing the response
:return:
A tuple of (user/repo, branch name) or (None, None) if not matching
"""
repo_match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/?$', url)
branch_match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/src/([^/]+)/?$', url)
if repo_match:
user_repo = repo_match.group(1)
branch = self._main_branch_name(user_repo)
elif branch_match:
user_repo = branch_match.group(1)
branch = branch_match.group(2)
else:
return (None, None)
return (user_repo, branch)

View File

@@ -0,0 +1,5 @@
class ClientException(Exception):
"""If a client could not fetch information"""
def __str__(self):
return self.args[0]

View File

@@ -0,0 +1,292 @@
import re
try:
# Python 3
from urllib.parse import urlencode, quote
except (ImportError):
# Python 2
from urllib import urlencode, quote
from ..versions import version_sort, version_filter
from .json_api_client import JSONApiClient
from ..downloaders.downloader_exception import DownloaderException
class GitHubClient(JSONApiClient):
def download_info(self, url):
"""
Retrieve information about downloading a package
:param url:
The URL of the repository, in one of the forms:
https://github.com/{user}/{repo}
https://github.com/{user}/{repo}/tree/{branch}
https://github.com/{user}/{repo}/tags
If the last option, grabs the info from the newest
tag that is a valid semver version.
:raises:
DownloaderException: when there is an error downloading
ClientException: when there is an error parsing the response
:return:
None if no match, False if no commit, or a dict with the following keys:
`version` - the version number of the download
`url` - the download URL of a zip file of the package
`date` - the ISO-8601 timestamp string when the version was published
"""
commit_info = self._commit_info(url)
if not commit_info:
return commit_info
return {
'version': commit_info['version'],
# We specifically use codeload.github.com here because the download
# URLs all redirect there, and some of the downloaders don't follow
# HTTP redirect headers
'url': 'https://codeload.github.com/%s/zip/%s' % (commit_info['user_repo'], quote(commit_info['commit'])),
'date': commit_info['timestamp']
}
def repo_info(self, url):
"""
Retrieve general information about a repository
:param url:
The URL to the repository, in one of the forms:
https://github.com/{user}/{repo}
https://github.com/{user}/{repo}/tree/{branch}
:raises:
DownloaderException: when there is an error downloading
ClientException: when there is an error parsing the response
:return:
None if no match, or a dict with the following keys:
`name`
`description`
`homepage` - URL of the homepage
`author`
`readme` - URL of the readme
`issues` - URL of bug tracker
`donate` - URL of a donate page
"""
user_repo, branch = self._user_repo_branch(url)
if not user_repo:
return user_repo
api_url = self._make_api_url(user_repo)
info = self.fetch_json(api_url)
output = self._extract_repo_info(info)
output['readme'] = None
readme_info = self._readme_info(user_repo, branch)
if not readme_info:
return output
output['readme'] = 'https://raw.github.com/%s/%s/%s' % (user_repo,
branch, readme_info['path'])
return output
def user_info(self, url):
"""
Retrieve general information about all repositories that are
part of a user/organization.
:param url:
The URL to the user/organization, in the following form:
https://github.com/{user}
:raises:
DownloaderException: when there is an error downloading
ClientException: when there is an error parsing the response
:return:
None if no match, or am list of dicts with the following keys:
`name`
`description`
`homepage` - URL of the homepage
`author`
`readme` - URL of the readme
`issues` - URL of bug tracker
`donate` - URL of a donate page
"""
user_match = re.match('https?://github.com/([^/]+)/?$', url)
if user_match == None:
return None
user = user_match.group(1)
api_url = self._make_api_url(user)
repos_info = self.fetch_json(api_url)
output = []
for info in repos_info:
output.append(self._extract_repo_info(info))
return output
def _commit_info(self, url):
"""
Fetches info about the latest commit to a repository
:param url:
The URL to the repository, in one of the forms:
https://github.com/{user}/{repo}
https://github.com/{user}/{repo}/tree/{branch}
https://github.com/{user}/{repo}/tags
If the last option, grabs the info from the newest
tag that is a valid semver version.
:raises:
DownloaderException: when there is an error downloading
ClientException: when there is an error parsing the response
:return:
None if no match, False is no commit, or a dict with the following keys:
`user_repo` - the user/repo name
`timestamp` - the ISO-8601 UTC timestamp string
`commit` - the branch or tag name
`version` - the extracted version number
"""
tags_match = re.match('https?://github.com/([^/]+/[^/]+)/tags/?$', url)
version = None
if tags_match:
user_repo = tags_match.group(1)
tags_url = self._make_api_url(user_repo, '/tags')
tags_list = self.fetch_json(tags_url)
tags = [tag['name'] for tag in tags_list]
tags = version_filter(tags, self.settings.get('install_prereleases'))
tags = version_sort(tags, reverse=True)
if not tags:
return False
commit = tags[0]
version = re.sub('^v', '', commit)
else:
user_repo, commit = self._user_repo_branch(url)
if not user_repo:
return user_repo
query_string = urlencode({'sha': commit, 'per_page': 1})
commit_url = self._make_api_url(user_repo, '/commits?%s' % query_string)
commit_info = self.fetch_json(commit_url)
commit_date = commit_info[0]['commit']['committer']['date'][0:19].replace('T', ' ')
if not version:
version = re.sub('[\-: ]', '.', commit_date)
return {
'user_repo': user_repo,
'timestamp': commit_date,
'commit': commit,
'version': version
}
def _extract_repo_info(self, result):
"""
Extracts information about a repository from the API result
:param result:
A dict representing the data returned from the GitHub API
:return:
A dict with the following keys:
`name`
`description`
`homepage` - URL of the homepage
`author`
`issues` - URL of bug tracker
`donate` - URL of a donate page
"""
issues_url = u'https://github.com/%s/%s/issues' % (result['owner']['login'], result['name'])
return {
'name': result['name'],
'description': result['description'] or 'No description provided',
'homepage': result['homepage'] or result['html_url'],
'author': result['owner']['login'],
'issues': issues_url if result['has_issues'] else None,
'donate': u'https://www.gittip.com/on/github/%s/' % result['owner']['login']
}
def _make_api_url(self, user_repo, suffix=''):
"""
Generate a URL for the BitBucket API
:param user_repo:
The user/repo of the repository
:param suffix:
The extra API path info to add to the URL
:return:
The API URL
"""
return 'https://api.github.com/repos/%s%s' % (user_repo, suffix)
def _readme_info(self, user_repo, branch, prefer_cached=False):
"""
Fetches the raw GitHub API information about a readme
:param user_repo:
The user/repo of the repository
:param branch:
The branch to pull the readme from
:param prefer_cached:
If a cached version of the info should be returned instead of making a new HTTP request
:raises:
DownloaderException: when there is an error downloading
ClientException: when there is an error parsing the response
:return:
A dict containing all of the info from the GitHub API, or None if no readme exists
"""
query_string = urlencode({'ref': branch})
readme_url = self._make_api_url(user_repo, '/readme?%s' % query_string)
try:
return self.fetch_json(readme_url, prefer_cached)
except (DownloaderException) as e:
if str(e).find('HTTP error 404') != -1:
return None
raise
def _user_repo_branch(self, url):
"""
Extract the username/repo and branch name from the URL
:param url:
The URL to extract the info from, in one of the forms:
https://github.com/{user}/{repo}
https://github.com/{user}/{repo}/tree/{branch}
:return:
A tuple of (user/repo, branch name) or (None, None) if no match
"""
branch = 'master'
branch_match = re.match('https?://github.com/[^/]+/[^/]+/tree/([^/]+)/?$', url)
if branch_match != None:
branch = branch_match.group(1)
repo_match = re.match('https?://github.com/([^/]+/[^/]+)($|/.*$)', url)
if repo_match == None:
return (None, None)
user_repo = repo_match.group(1)
return (user_repo, branch)

View File

@@ -0,0 +1,64 @@
import json
try:
# Python 3
from urllib.parse import urlencode, urlparse
except (ImportError):
# Python 2
from urllib import urlencode
from urlparse import urlparse
from .client_exception import ClientException
from ..download_manager import downloader
class JSONApiClient():
def __init__(self, settings):
self.settings = settings
def fetch(self, url, prefer_cached=False):
"""
Retrieves the contents of a URL
:param url:
The URL to download the content from
:param prefer_cached:
If a cached copy of the content is preferred
:return: The bytes/string
"""
# If there are extra params for the domain name, add them
extra_params = self.settings.get('query_string_params')
domain_name = urlparse(url).netloc
if extra_params and domain_name in extra_params:
params = urlencode(extra_params[domain_name])
joiner = '?%s' if url.find('?') == -1 else '&%s'
url += joiner % params
with downloader(url, self.settings) as manager:
content = manager.fetch(url, 'Error downloading repository.',
prefer_cached)
return content
def fetch_json(self, url, prefer_cached=False):
"""
Retrieves and parses the JSON from a URL
:param url:
The URL to download the JSON from
:param prefer_cached:
If a cached copy of the JSON is preferred
:return: A dict or list from the JSON
"""
repository_json = self.fetch(url, prefer_cached)
try:
return json.loads(repository_json.decode('utf-8'))
except (ValueError):
error_string = u'Error parsing JSON from URL %s.' % url
raise ClientException(error_string)

View File

@@ -0,0 +1,83 @@
import re
import os
import base64
try:
# Python 3
from urllib.parse import urlencode
except (ImportError):
# Python 2
from urllib import urlencode
from .json_api_client import JSONApiClient
from ..downloaders.downloader_exception import DownloaderException
# Used to map file extensions to formats
_readme_formats = {
'.md': 'markdown',
'.mkd': 'markdown',
'.mdown': 'markdown',
'.markdown': 'markdown',
'.textile': 'textile',
'.creole': 'creole',
'.rst': 'rst'
}
class ReadmeClient(JSONApiClient):
def readme_info(self, url):
"""
Retrieve the readme and info about it
:param url:
The URL of the readme file
:raises:
DownloaderException: if there is an error downloading the readme
ClientException: if there is an error parsing the response
:return:
A dict with the following keys:
`filename`
`format` - `markdown`, `textile`, `creole`, `rst` or `txt`
`contents` - contents of the readme as str/unicode
"""
contents = None
# Try to grab the contents of a GitHub-based readme by grabbing the cached
# content of the readme API call
github_match = re.match('https://raw.github.com/([^/]+/[^/]+)/([^/]+)/readme(\.(md|mkd|mdown|markdown|textile|creole|rst|txt))?$', url, re.I)
if github_match:
user_repo = github_match.group(1)
branch = github_match.group(2)
query_string = urlencode({'ref': branch})
readme_json_url = 'https://api.github.com/repos/%s/readme?%s' % (user_repo, query_string)
try:
info = self.fetch_json(readme_json_url, prefer_cached=True)
contents = base64.b64decode(info['content'])
except (ValueError) as e:
pass
if not contents:
contents = self.fetch(url)
basename, ext = os.path.splitext(url)
format = 'txt'
ext = ext.lower()
if ext in _readme_formats:
format = _readme_formats[ext]
try:
contents = contents.decode('utf-8')
except (UnicodeDecodeError) as e:
contents = contents.decode('cp1252', errors='replace')
return {
'filename': os.path.basename(url),
'format': format,
'contents': contents
}

View File

@@ -0,0 +1,167 @@
import os
import subprocess
import re
if os.name == 'nt':
from ctypes import windll, create_unicode_buffer
from .console_write import console_write
from .unicode import unicode_from_os
from .show_error import show_error
try:
# Python 2
str_cls = unicode
except (NameError):
# Python 3
str_cls = str
def create_cmd(args, basename_binary=False):
"""
Takes an array of strings to be passed to subprocess.Popen and creates
a string that can be pasted into a terminal
:param args:
The array containing the binary name/path and all arguments
:param basename_binary:
If only the basename of the binary should be disabled instead of the full path
:return:
The command string
"""
if basename_binary:
args[0] = os.path.basename(args[0])
if os.name == 'nt':
return subprocess.list2cmdline(args)
else:
escaped_args = []
for arg in args:
if re.search('^[a-zA-Z0-9/_^\\-\\.:=]+$', arg) == None:
arg = u"'" + arg.replace(u"'", u"'\\''") + u"'"
escaped_args.append(arg)
return u' '.join(escaped_args)
class Cli(object):
"""
Base class for running command line apps
:param binary:
The full filesystem path to the executable for the version control
system. May be set to None to allow the code to try and find it.
"""
cli_name = None
def __init__(self, binary, debug):
self.binary = binary
self.debug = debug
def execute(self, args, cwd, input=None):
"""
Creates a subprocess with the executable/args
:param args:
A list of the executable path and all arguments to it
:param cwd:
The directory in which to run the executable
:param input:
The input text to send to the program
:return: A string of the executable output
"""
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
# Make sure the cwd is ascii
try:
cwd.encode('ascii')
except UnicodeEncodeError:
buf = create_unicode_buffer(512)
if windll.kernel32.GetShortPathNameW(cwd, buf, len(buf)):
cwd = buf.value
if self.debug:
console_write(u"Trying to execute command %s" % create_cmd(args), True)
try:
proc = subprocess.Popen(args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
startupinfo=startupinfo, cwd=cwd)
if input and isinstance(input, str_cls):
input = input.encode('utf-8')
output, _ = proc.communicate(input)
output = output.decode('utf-8')
output = output.replace('\r\n', '\n').rstrip(' \n\r')
return output
except (OSError) as e:
cmd = create_cmd(args)
error = unicode_from_os(e)
message = u"Error executing: %s\n%s\n\nTry checking your \"%s_binary\" setting?" % (cmd, error, self.cli_name)
show_error(message)
return False
def find_binary(self, name):
"""
Locates the executable by looking in the PATH and well-known directories
:param name:
The string filename of the executable
:return: The filesystem path to the executable, or None if not found
"""
if self.binary:
if self.debug:
error_string = u"Using \"%s_binary\" from settings \"%s\"" % (
self.cli_name, self.binary)
console_write(error_string, True)
return self.binary
# Try the path first
for dir_ in os.environ['PATH'].split(os.pathsep):
path = os.path.join(dir_, name)
if os.path.exists(path):
if self.debug:
console_write(u"Found %s at \"%s\"" % (self.cli_name, path), True)
return path
# This is left in for backwards compatibility and for windows
# users who may have the binary, albeit in a common dir that may
# not be part of the PATH
if os.name == 'nt':
dirs = ['C:\\Program Files\\Git\\bin',
'C:\\Program Files (x86)\\Git\\bin',
'C:\\Program Files\\TortoiseGit\\bin',
'C:\\Program Files\\Mercurial',
'C:\\Program Files (x86)\\Mercurial',
'C:\\Program Files (x86)\\TortoiseHg',
'C:\\Program Files\\TortoiseHg',
'C:\\cygwin\\bin']
else:
# ST seems to launch with a minimal set of environmental variables
# on OS X, so we add some common paths for it
dirs = ['/usr/local/git/bin', '/usr/local/bin']
for dir_ in dirs:
path = os.path.join(dir_, name)
if os.path.exists(path):
if self.debug:
console_write(u"Found %s at \"%s\"" % (self.cli_name, path), True)
return path
if self.debug:
console_write(u"Could not find %s on your machine" % self.cli_name, True)
return None

View File

@@ -0,0 +1,39 @@
import os
from .add_channel_command import AddChannelCommand
from .add_repository_command import AddRepositoryCommand
from .create_binary_package_command import CreateBinaryPackageCommand
from .create_package_command import CreatePackageCommand
from .disable_package_command import DisablePackageCommand
from .discover_packages_command import DiscoverPackagesCommand
from .enable_package_command import EnablePackageCommand
from .grab_certs_command import GrabCertsCommand
from .install_package_command import InstallPackageCommand
from .list_packages_command import ListPackagesCommand
from .remove_package_command import RemovePackageCommand
from .upgrade_all_packages_command import UpgradeAllPackagesCommand
from .upgrade_package_command import UpgradePackageCommand
from .package_message_command import PackageMessageCommand
__all__ = [
'AddChannelCommand',
'AddRepositoryCommand',
'CreateBinaryPackageCommand',
'CreatePackageCommand',
'DisablePackageCommand',
'DiscoverPackagesCommand',
'EnablePackageCommand',
'InstallPackageCommand',
'ListPackagesCommand',
'RemovePackageCommand',
'UpgradeAllPackagesCommand',
'UpgradePackageCommand',
'PackageMessageCommand'
]
# Windows uses the wininet downloader, so it doesn't use the CA cert bundle
# and thus does not need the ability to grab to CA certs. Additionally,
# there is no openssl.exe on Windows.
if os.name != 'nt':
__all__.append('GrabCertsCommand')

View File

@@ -0,0 +1,46 @@
import re
import sublime
import sublime_plugin
from ..show_error import show_error
class AddChannelCommand(sublime_plugin.WindowCommand):
"""
A command to add a new channel (list of repositories) to the user's machine
"""
def run(self):
self.window.show_input_panel('Channel JSON URL', '',
self.on_done, self.on_change, self.on_cancel)
def on_done(self, input):
"""
Input panel handler - adds the provided URL as a channel
:param input:
A string of the URL to the new channel
"""
input = input.strip()
if re.match('https?://', input, re.I) == None:
show_error(u"Unable to add the channel \"%s\" since it does not appear to be served via HTTP (http:// or https://)." % input)
return
settings = sublime.load_settings('Package Control.sublime-settings')
channels = settings.get('channels', [])
if not channels:
channels = []
channels.append(input)
settings.set('channels', channels)
sublime.save_settings('Package Control.sublime-settings')
sublime.status_message(('Channel %s successfully ' +
'added') % input)
def on_change(self, input):
pass
def on_cancel(self):
pass

View File

@@ -0,0 +1,46 @@
import re
import sublime
import sublime_plugin
from ..show_error import show_error
class AddRepositoryCommand(sublime_plugin.WindowCommand):
"""
A command to add a new repository to the user's machine
"""
def run(self):
self.window.show_input_panel('GitHub or BitBucket Web URL, or Custom' +
' JSON Repository URL', '', self.on_done,
self.on_change, self.on_cancel)
def on_done(self, input):
"""
Input panel handler - adds the provided URL as a repository
:param input:
A string of the URL to the new repository
"""
input = input.strip()
if re.match('https?://', input, re.I) == None:
show_error(u"Unable to add the repository \"%s\" since it does not appear to be served via HTTP (http:// or https://)." % input)
return
settings = sublime.load_settings('Package Control.sublime-settings')
repositories = settings.get('repositories', [])
if not repositories:
repositories = []
repositories.append(input)
settings.set('repositories', repositories)
sublime.save_settings('Package Control.sublime-settings')
sublime.status_message('Repository %s successfully added' % input)
def on_change(self, input):
pass
def on_cancel(self):
pass

View File

@@ -0,0 +1,35 @@
import sublime_plugin
from ..package_creator import PackageCreator
class CreateBinaryPackageCommand(sublime_plugin.WindowCommand, PackageCreator):
"""
Command to create a binary .sublime-package file. Binary packages in
general exclude the .py source files and instead include the .pyc files.
Actual included and excluded files are controlled by settings.
"""
def run(self):
self.show_panel()
def on_done(self, picked):
"""
Quick panel user selection handler - processes the user package
selection and create the package file
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.
"""
if picked == -1:
return
package_name = self.packages[picked]
package_destination = self.get_package_destination()
if self.manager.create_package(package_name, package_destination,
binary_package=True):
self.window.run_command('open_dir', {"dir":
package_destination, "file": package_name +
'.sublime-package'})

View File

@@ -0,0 +1,32 @@
import sublime_plugin
from ..package_creator import PackageCreator
class CreatePackageCommand(sublime_plugin.WindowCommand, PackageCreator):
"""
Command to create a regular .sublime-package file
"""
def run(self):
self.show_panel()
def on_done(self, picked):
"""
Quick panel user selection handler - processes the user package
selection and create the package file
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.
"""
if picked == -1:
return
package_name = self.packages[picked]
package_destination = self.get_package_destination()
if self.manager.create_package(package_name, package_destination):
self.window.run_command('open_dir', {"dir":
package_destination, "file": package_name +
'.sublime-package'})

View File

@@ -0,0 +1,48 @@
import sublime
import sublime_plugin
from ..show_error import show_error
from ..package_manager import PackageManager
from ..preferences_filename import preferences_filename
class DisablePackageCommand(sublime_plugin.WindowCommand):
"""
A command that adds a package to Sublime Text's ignored packages list
"""
def run(self):
manager = PackageManager()
packages = manager.list_all_packages()
self.settings = sublime.load_settings(preferences_filename())
ignored = self.settings.get('ignored_packages')
if not ignored:
ignored = []
self.package_list = list(set(packages) - set(ignored))
self.package_list.sort()
if not self.package_list:
show_error('There are no enabled packages to disable.')
return
self.window.show_quick_panel(self.package_list, self.on_done)
def on_done(self, picked):
"""
Quick panel user selection handler - disables the selected package
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.
"""
if picked == -1:
return
package = self.package_list[picked]
ignored = self.settings.get('ignored_packages')
if not ignored:
ignored = []
ignored.append(package)
self.settings.set('ignored_packages', ignored)
sublime.save_settings(preferences_filename())
sublime.status_message(('Package %s successfully added to list of ' +
'disabled packages - restarting Sublime Text may be required') %
package)

View File

@@ -0,0 +1,11 @@
import sublime_plugin
class DiscoverPackagesCommand(sublime_plugin.WindowCommand):
"""
A command that opens the community package list webpage
"""
def run(self):
self.window.run_command('open_url',
{'url': 'http://wbond.net/sublime_packages/community'})

View File

@@ -0,0 +1,40 @@
import sublime
import sublime_plugin
from ..show_error import show_error
from ..preferences_filename import preferences_filename
class EnablePackageCommand(sublime_plugin.WindowCommand):
"""
A command that removes a package from Sublime Text's ignored packages list
"""
def run(self):
self.settings = sublime.load_settings(preferences_filename())
self.disabled_packages = self.settings.get('ignored_packages')
self.disabled_packages.sort()
if not self.disabled_packages:
show_error('There are no disabled packages to enable.')
return
self.window.show_quick_panel(self.disabled_packages, self.on_done)
def on_done(self, picked):
"""
Quick panel user selection handler - enables the selected package
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.
"""
if picked == -1:
return
package = self.disabled_packages[picked]
ignored = self.settings.get('ignored_packages')
self.settings.set('ignored_packages',
list(set(ignored) - set([package])))
sublime.save_settings(preferences_filename())
sublime.status_message(('Package %s successfully removed from list ' +
'of disabled packages - restarting Sublime Text may be required') %
package)

View File

@@ -0,0 +1,69 @@
import os
import re
import sublime
from ..package_manager import PackageManager
class ExistingPackagesCommand():
"""
Allows listing installed packages and their current version
"""
def __init__(self):
self.manager = PackageManager()
def make_package_list(self, action=''):
"""
Returns a list of installed packages suitable for displaying in the
quick panel.
:param action:
An action to display at the beginning of the third element of the
list returned for each package
:return:
A list of lists, each containing three strings:
0 - package name
1 - package description
2 - [action] installed version; package url
"""
packages = self.manager.list_packages()
if action:
action += ' '
package_list = []
for package in sorted(packages, key=lambda s: s.lower()):
package_entry = [package]
metadata = self.manager.get_metadata(package)
package_dir = os.path.join(sublime.packages_path(), package)
description = metadata.get('description')
if not description:
description = 'No description provided'
package_entry.append(description)
version = metadata.get('version')
if not version and os.path.exists(os.path.join(package_dir,
'.git')):
installed_version = 'git repository'
elif not version and os.path.exists(os.path.join(package_dir,
'.hg')):
installed_version = 'hg repository'
else:
installed_version = 'v' + version if version else \
'unknown version'
url = metadata.get('url')
if url:
url = '; ' + re.sub('^https?://', '', url)
else:
url = ''
package_entry.append(action + installed_version + url)
package_list.append(package_entry)
return package_list

View File

@@ -0,0 +1,109 @@
import os
import re
import socket
import threading
try:
# Python 3
from urllib.parse import urlparse
except (ImportError):
# Python 2
from urlparse import urlparse
import sublime
import sublime_plugin
from ..show_error import show_error
from ..open_compat import open_compat
from ..ca_certs import find_root_ca_cert
from ..thread_progress import ThreadProgress
from ..package_manager import PackageManager
class GrabCertsCommand(sublime_plugin.WindowCommand):
"""
A command that extracts the CA certs for a domain name, allowing a user to
fetch packages from sources other than those used by the default channel
"""
def run(self):
panel = self.window.show_input_panel('Domain Name', 'example.com', self.on_done,
None, None)
panel.sel().add(sublime.Region(0, panel.size()))
def on_done(self, domain):
"""
Input panel handler - grabs the CA certs for the domain name presented
:param domain:
A string of the domain name
"""
domain = domain.strip()
# Make sure the user enters something
if domain == '':
show_error(u"Please enter a domain name, or press cancel")
self.run()
return
# If the user inputs a URL, extract the domain name
if domain.find('/') != -1:
parts = urlparse(domain)
if parts.hostname:
domain = parts.hostname
# Allow _ even though it technically isn't valid, this is really
# just to try and prevent people from typing in gibberish
if re.match('^(?:[a-zA-Z0-9]+(?:[\-_]*[a-zA-Z0-9]+)*\.)+[a-zA-Z]{2,6}$', domain, re.I) == None:
show_error(u"Unable to get the CA certs for \"%s\" since it does not appear to be a validly formed domain name" % domain)
return
# Make sure it is a real domain
try:
socket.gethostbyname(domain)
except (socket.gaierror) as e:
error = unicode_from_os(e)
show_error(u"Error trying to lookup \"%s\":\n\n%s" % (domain, error))
return
manager = PackageManager()
thread = GrabCertsThread(manager.settings, domain)
thread.start()
ThreadProgress(thread, 'Grabbing CA certs for %s' % domain,
'CA certs for %s added to settings' % domain)
class GrabCertsThread(threading.Thread):
"""
A thread to run openssl so that the Sublime Text UI does not become frozen
"""
def __init__(self, settings, domain):
self.settings = settings
self.domain = domain
threading.Thread.__init__(self)
def run(self):
cert, cert_hash = find_root_ca_cert(self.settings, self.domain)
certs_dir = os.path.join(sublime.packages_path(), 'User',
'Package Control.ca-certs')
if not os.path.exists(certs_dir):
os.mkdir(certs_dir)
cert_path = os.path.join(certs_dir, self.domain + '-ca.crt')
with open_compat(cert_path, 'w') as f:
f.write(cert)
def save_certs():
settings = sublime.load_settings('Package Control.sublime-settings')
certs = settings.get('certs', {})
if not certs:
certs = {}
certs[self.domain] = [cert_hash, cert_path]
settings.set('certs', certs)
sublime.save_settings('Package Control.sublime-settings')
sublime.set_timeout(save_certs, 10)

View File

@@ -0,0 +1,50 @@
import threading
import sublime
import sublime_plugin
from ..show_error import show_error
from ..package_installer import PackageInstaller
from ..thread_progress import ThreadProgress
class InstallPackageCommand(sublime_plugin.WindowCommand):
"""
A command that presents the list of available packages and allows the
user to pick one to install.
"""
def run(self):
thread = InstallPackageThread(self.window)
thread.start()
ThreadProgress(thread, 'Loading repositories', '')
class InstallPackageThread(threading.Thread, PackageInstaller):
"""
A thread to run the action of retrieving available packages in. Uses the
default PackageInstaller.on_done quick panel handler.
"""
def __init__(self, window):
"""
:param window:
An instance of :class:`sublime.Window` that represents the Sublime
Text window to show the available package list in.
"""
self.window = window
self.completion_type = 'installed'
threading.Thread.__init__(self)
PackageInstaller.__init__(self)
def run(self):
self.package_list = self.make_package_list(['upgrade', 'downgrade',
'reinstall', 'pull', 'none'])
def show_quick_panel():
if not self.package_list:
show_error('There are no packages available for installation')
return
self.window.show_quick_panel(self.package_list, self.on_done)
sublime.set_timeout(show_quick_panel, 10)

View File

@@ -0,0 +1,63 @@
import threading
import os
import sublime
import sublime_plugin
from ..show_error import show_error
from .existing_packages_command import ExistingPackagesCommand
class ListPackagesCommand(sublime_plugin.WindowCommand):
"""
A command that shows a list of all installed packages in the quick panel
"""
def run(self):
ListPackagesThread(self.window).start()
class ListPackagesThread(threading.Thread, ExistingPackagesCommand):
"""
A thread to prevent the listing of existing packages from freezing the UI
"""
def __init__(self, window):
"""
:param window:
An instance of :class:`sublime.Window` that represents the Sublime
Text window to show the list of installed packages in.
"""
self.window = window
threading.Thread.__init__(self)
ExistingPackagesCommand.__init__(self)
def run(self):
self.package_list = self.make_package_list()
def show_quick_panel():
if not self.package_list:
show_error('There are no packages to list')
return
self.window.show_quick_panel(self.package_list, self.on_done)
sublime.set_timeout(show_quick_panel, 10)
def on_done(self, picked):
"""
Quick panel user selection handler - opens the homepage for any
selected package in the user's browser
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.
"""
if picked == -1:
return
package_name = self.package_list[picked][0]
def open_dir():
self.window.run_command('open_dir',
{"dir": os.path.join(sublime.packages_path(), package_name)})
sublime.set_timeout(open_dir, 10)

View File

@@ -0,0 +1,11 @@
import sublime
import sublime_plugin
class PackageMessageCommand(sublime_plugin.TextCommand):
"""
A command to write a package message to the Package Control messaging buffer
"""
def run(self, edit, string=''):
self.view.insert(edit, self.view.size(), string)

View File

@@ -0,0 +1,88 @@
import threading
import sublime
import sublime_plugin
from ..show_error import show_error
from .existing_packages_command import ExistingPackagesCommand
from ..preferences_filename import preferences_filename
from ..thread_progress import ThreadProgress
class RemovePackageCommand(sublime_plugin.WindowCommand,
ExistingPackagesCommand):
"""
A command that presents a list of installed packages, allowing the user to
select one to remove
"""
def __init__(self, window):
"""
:param window:
An instance of :class:`sublime.Window` that represents the Sublime
Text window to show the list of installed packages in.
"""
self.window = window
ExistingPackagesCommand.__init__(self)
def run(self):
self.package_list = self.make_package_list('remove')
if not self.package_list:
show_error('There are no packages that can be removed.')
return
self.window.show_quick_panel(self.package_list, self.on_done)
def on_done(self, picked):
"""
Quick panel user selection handler - deletes the selected package
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.
"""
if picked == -1:
return
package = self.package_list[picked][0]
settings = sublime.load_settings(preferences_filename())
ignored = settings.get('ignored_packages')
if not ignored:
ignored = []
# Don't disable Package Control so it does not get stuck disabled
if package != 'Package Control':
if not package in ignored:
ignored.append(package)
settings.set('ignored_packages', ignored)
sublime.save_settings(preferences_filename())
ignored.remove(package)
thread = RemovePackageThread(self.manager, package,
ignored)
thread.start()
ThreadProgress(thread, 'Removing package %s' % package,
'Package %s successfully removed' % package)
class RemovePackageThread(threading.Thread):
"""
A thread to run the remove package operation in so that the Sublime Text
UI does not become frozen
"""
def __init__(self, manager, package, ignored):
self.manager = manager
self.package = package
self.ignored = ignored
threading.Thread.__init__(self)
def run(self):
self.result = self.manager.remove_package(self.package)
def unignore_package():
settings = sublime.load_settings(preferences_filename())
settings.set('ignored_packages', self.ignored)
sublime.save_settings(preferences_filename())
sublime.set_timeout(unignore_package, 10)

View File

@@ -0,0 +1,77 @@
import time
import threading
import sublime
import sublime_plugin
from ..thread_progress import ThreadProgress
from ..package_installer import PackageInstaller, PackageInstallerThread
from ..package_renamer import PackageRenamer
class UpgradeAllPackagesCommand(sublime_plugin.WindowCommand):
"""
A command to automatically upgrade all installed packages that are
upgradable.
"""
def run(self):
package_renamer = PackageRenamer()
package_renamer.load_settings()
thread = UpgradeAllPackagesThread(self.window, package_renamer)
thread.start()
ThreadProgress(thread, 'Loading repositories', '')
class UpgradeAllPackagesThread(threading.Thread, PackageInstaller):
"""
A thread to run the action of retrieving upgradable packages in.
"""
def __init__(self, window, package_renamer):
self.window = window
self.package_renamer = package_renamer
self.completion_type = 'upgraded'
threading.Thread.__init__(self)
PackageInstaller.__init__(self)
def run(self):
self.package_renamer.rename_packages(self)
package_list = self.make_package_list(['install', 'reinstall', 'none'])
disabled_packages = []
def do_upgrades():
# Pause so packages can be disabled
time.sleep(0.5)
# We use a function to generate the on-complete lambda because if
# we don't, the lambda will bind to info at the current scope, and
# thus use the last value of info from the loop
def make_on_complete(name):
return lambda: self.reenable_package(name)
for info in package_list:
if info[0] in disabled_packages:
on_complete = make_on_complete(info[0])
else:
on_complete = None
thread = PackageInstallerThread(self.manager, info[0],
on_complete)
thread.start()
ThreadProgress(thread, 'Upgrading package %s' % info[0],
'Package %s successfully %s' % (info[0],
self.completion_type))
# Disabling a package means changing settings, which can only be done
# in the main thread. We then create a new background thread so that
# the upgrade process does not block the UI.
def disable_packages():
package_names = []
for info in package_list:
package_names.append(info[0])
disabled_packages.extend(self.disable_packages(package_names))
threading.Thread(target=do_upgrades).start()
sublime.set_timeout(disable_packages, 1)

View File

@@ -0,0 +1,81 @@
import threading
import sublime
import sublime_plugin
from ..show_error import show_error
from ..thread_progress import ThreadProgress
from ..package_installer import PackageInstaller, PackageInstallerThread
from ..package_renamer import PackageRenamer
class UpgradePackageCommand(sublime_plugin.WindowCommand):
"""
A command that presents the list of installed packages that can be upgraded
"""
def run(self):
package_renamer = PackageRenamer()
package_renamer.load_settings()
thread = UpgradePackageThread(self.window, package_renamer)
thread.start()
ThreadProgress(thread, 'Loading repositories', '')
class UpgradePackageThread(threading.Thread, PackageInstaller):
"""
A thread to run the action of retrieving upgradable packages in.
"""
def __init__(self, window, package_renamer):
"""
:param window:
An instance of :class:`sublime.Window` that represents the Sublime
Text window to show the list of upgradable packages in.
:param package_renamer:
An instance of :class:`PackageRenamer`
"""
self.window = window
self.package_renamer = package_renamer
self.completion_type = 'upgraded'
threading.Thread.__init__(self)
PackageInstaller.__init__(self)
def run(self):
self.package_renamer.rename_packages(self)
self.package_list = self.make_package_list(['install', 'reinstall',
'none'])
def show_quick_panel():
if not self.package_list:
show_error('There are no packages ready for upgrade')
return
self.window.show_quick_panel(self.package_list, self.on_done)
sublime.set_timeout(show_quick_panel, 10)
def on_done(self, picked):
"""
Quick panel user selection handler - disables a package, upgrades it,
then re-enables the package
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.
"""
if picked == -1:
return
name = self.package_list[picked][0]
if name in self.disable_packages(name):
on_complete = lambda: self.reenable_package(name)
else:
on_complete = None
thread = PackageInstallerThread(self.manager, name, on_complete)
thread.start()
ThreadProgress(thread, 'Upgrading package %s' % name,
'Package %s successfully %s' % (name, self.completion_type))

View File

@@ -0,0 +1,20 @@
import sys
def console_write(string, prefix=False):
"""
Writes a value to the Sublime Text console, encoding unicode to utf-8 first
:param string:
The value to write
:param prefix:
If the string "Package Control: " should be prefixed to the string
"""
if sys.version_info < (3,):
if isinstance(string, unicode):
string = string.encode('UTF-8')
if prefix:
sys.stdout.write('Package Control: ')
print(string)

View File

@@ -0,0 +1,231 @@
import sys
import re
import socket
from threading import Lock, Timer
from contextlib import contextmanager
try:
# Python 3
from urllib.parse import urlparse
except (ImportError):
# Python 2
from urlparse import urlparse
from . import __version__
from .show_error import show_error
from .console_write import console_write
from .cache import set_cache, get_cache
from .unicode import unicode_from_os
from .downloaders import DOWNLOADERS
from .downloaders.binary_not_found_error import BinaryNotFoundError
from .downloaders.rate_limit_exception import RateLimitException
from .downloaders.no_ca_cert_exception import NoCaCertException
from .downloaders.downloader_exception import DownloaderException
from .http_cache import HttpCache
# A dict of domains - each points to a list of downloaders
_managers = {}
# How many managers are currently checked out
_in_use = 0
# Make sure connection management doesn't run into threading issues
_lock = Lock()
# A timer used to disconnect all managers after a period of no usage
_timer = None
@contextmanager
def downloader(url, settings):
try:
manager = _grab(url, settings)
yield manager
finally:
_release(url, manager)
def _grab(url, settings):
global _managers, _lock, _in_use, _timer
_lock.acquire()
try:
if _timer:
_timer.cancel()
_timer = None
hostname = urlparse(url).hostname.lower()
if hostname not in _managers:
_managers[hostname] = []
if not _managers[hostname]:
_managers[hostname].append(DownloadManager(settings))
_in_use += 1
return _managers[hostname].pop()
finally:
_lock.release()
def _release(url, manager):
global _managers, _lock, _in_use, _timer
_lock.acquire()
try:
hostname = urlparse(url).hostname.lower()
_managers[hostname].insert(0, manager)
_in_use -= 1
if _timer:
_timer.cancel()
_timer = None
if _in_use == 0:
_timer = Timer(5.0, close_all_connections)
_timer.start()
finally:
_lock.release()
def close_all_connections():
global _managers, _lock, _in_use, _timer
_lock.acquire()
try:
if _timer:
_timer.cancel()
_timer = None
for domain, managers in _managers.items():
for manager in managers:
manager.close()
_managers = {}
finally:
_lock.release()
class DownloadManager(object):
def __init__(self, settings):
# Cache the downloader for re-use
self.downloader = None
user_agent = settings.get('user_agent')
if user_agent and user_agent.find('%s') != -1:
settings['user_agent'] = user_agent % __version__
self.settings = settings
if settings.get('http_cache'):
cache_length = settings.get('http_cache_length', 604800)
self.settings['cache'] = HttpCache(cache_length)
def close(self):
if self.downloader:
self.downloader.close()
self.downloader = None
def fetch(self, url, error_message, prefer_cached=False):
"""
Downloads a URL and returns the contents
:param url:
The string URL to download
:param error_message:
The error message to include if the download fails
:param prefer_cached:
If cached version of the URL content is preferred over a new request
:raises:
DownloaderException: if there was an error downloading the URL
:return:
The string contents of the URL
"""
is_ssl = re.search('^https://', url) != None
# Make sure we have a downloader, and it supports SSL if we need it
if not self.downloader or (is_ssl and not self.downloader.supports_ssl()):
for downloader_class in DOWNLOADERS:
try:
downloader = downloader_class(self.settings)
if is_ssl and not downloader.supports_ssl():
continue
self.downloader = downloader
break
except (BinaryNotFoundError):
pass
if not self.downloader:
error_string = u'Unable to download %s due to no ssl module available and no capable program found. Please install curl or wget.' % url
show_error(error_string)
raise DownloaderException(error_string)
url = url.replace(' ', '%20')
hostname = urlparse(url).hostname
if hostname:
hostname = hostname.lower()
timeout = self.settings.get('timeout', 3)
rate_limited_domains = get_cache('rate_limited_domains', [])
no_ca_cert_domains = get_cache('no_ca_cert_domains', [])
if self.settings.get('debug'):
try:
ip = socket.gethostbyname(hostname)
except (socket.gaierror) as e:
ip = unicode_from_os(e)
except (TypeError) as e:
ip = None
console_write(u"Download Debug", True)
console_write(u" URL: %s" % url)
console_write(u" Resolved IP: %s" % ip)
console_write(u" Timeout: %s" % str(timeout))
if hostname in rate_limited_domains:
error_string = u"Skipping due to hitting rate limit for %s" % hostname
if self.settings.get('debug'):
console_write(u" %s" % error_string)
raise DownloaderException(error_string)
if hostname in no_ca_cert_domains:
error_string = u" Skipping since there are no CA certs for %s" % hostname
if self.settings.get('debug'):
console_write(u" %s" % error_string)
raise DownloaderException(error_string)
try:
return self.downloader.download(url, error_message, timeout, 3, prefer_cached)
except (RateLimitException) as e:
rate_limited_domains.append(hostname)
set_cache('rate_limited_domains', rate_limited_domains, self.settings.get('cache_length'))
error_string = (u'Hit rate limit of %s for %s, skipping all futher ' +
u'download requests for this domain') % (e.limit, e.domain)
console_write(error_string, True)
raise
except (NoCaCertException) as e:
no_ca_cert_domains.append(hostname)
set_cache('no_ca_cert_domains', no_ca_cert_domains, self.settings.get('cache_length'))
error_string = (u'No CA certs available for %s, skipping all futher ' +
u'download requests for this domain. If you are on a trusted ' +
u'network, you can add the CA certs by running the "Grab ' +
u'CA Certs" command from the command palette.') % e.domain
console_write(error_string, True)
raise

View File

@@ -0,0 +1,11 @@
import os
if os.name == 'nt':
from .wininet_downloader import WinINetDownloader
DOWNLOADERS = [WinINetDownloader]
else:
from .urllib_downloader import UrlLibDownloader
from .curl_downloader import CurlDownloader
from .wget_downloader import WgetDownloader
DOWNLOADERS = [UrlLibDownloader, CurlDownloader, WgetDownloader]

View File

@@ -0,0 +1,62 @@
import threading
class BackgroundDownloader(threading.Thread):
"""
Downloads information from one or more URLs in the background.
Normal usage is to use one BackgroundDownloader per domain name.
:param settings:
A dict containing at least the following fields:
`cache_length`,
`debug`,
`timeout`,
`user_agent`,
`http_proxy`,
`https_proxy`,
`proxy_username`,
`proxy_password`
:param providers:
An array of providers that can download the URLs
"""
def __init__(self, settings, providers):
self.settings = settings
self.urls = []
self.providers = providers
self.used_providers = {}
threading.Thread.__init__(self)
def add_url(self, url):
"""
Adds a URL to the list to download
:param url:
The URL to download info about
"""
self.urls.append(url)
def get_provider(self, url):
"""
Returns the provider for the URL specified
:param url:
The URL to return the provider for
:return:
The provider object for the URL
"""
return self.used_providers[url]
def run(self):
for url in self.urls:
for provider_class in self.providers:
if provider_class.match_url(url):
provider = provider_class(url, self.settings)
break
provider.prefetch()
self.used_providers[url] = provider

View File

@@ -0,0 +1,4 @@
class BinaryNotFoundError(Exception):
"""If a necessary executable is not found in the PATH on the system"""
pass

View File

@@ -0,0 +1,185 @@
import sys
import re
import json
import hashlib
from ..console_write import console_write
class CachingDownloader(object):
"""
A base downloader that will use a caching backend to cache HTTP requests
and make conditional requests.
"""
def add_conditional_headers(self, url, headers):
"""
Add `If-Modified-Since` and `If-None-Match` headers to a request if a
cached copy exists
:param headers:
A dict with the request headers
:return:
The request headers dict, possibly with new headers added
"""
if not self.settings.get('cache'):
return headers
info_key = self.generate_key(url, '.info')
info_json = self.settings['cache'].get(info_key)
if not info_json:
return headers
# Make sure we have the cached content to use if we get a 304
key = self.generate_key(url)
if not self.settings['cache'].has(key):
return headers
try:
info = json.loads(info_json.decode('utf-8'))
except ValueError:
return headers
etag = info.get('etag')
if etag:
headers['If-None-Match'] = etag
last_modified = info.get('last-modified')
if last_modified:
headers['If-Modified-Since'] = last_modified
return headers
def cache_result(self, method, url, status, headers, content):
"""
Processes a request result, either caching the result, or returning
the cached version of the url.
:param method:
The HTTP method used for the request
:param url:
The url of the request
:param status:
The numeric response status of the request
:param headers:
A dict of reponse headers, with keys being lowercase
:param content:
The response content
:return:
The response content
"""
debug = self.settings.get('debug', False)
if not self.settings.get('cache'):
if debug:
console_write(u"Skipping cache since there is no cache object", True)
return content
if method.lower() != 'get':
if debug:
console_write(u"Skipping cache since the HTTP method != GET", True)
return content
status = int(status)
# Don't do anything unless it was successful or not modified
if status not in [200, 304]:
if debug:
console_write(u"Skipping cache since the HTTP status code not one of: 200, 304", True)
return content
key = self.generate_key(url)
if status == 304:
cached_content = self.settings['cache'].get(key)
if cached_content:
if debug:
console_write(u"Using cached content for %s" % url, True)
return cached_content
# If we got a 304, but did not have the cached content
# stop here so we don't cache an empty response
return content
# If we got here, the status is 200
# Respect some basic cache control headers
cache_control = headers.get('cache-control', '')
if cache_control:
fields = re.split(',\s*', cache_control)
for field in fields:
if field == 'no-store':
return content
# Don't ever cache zip/binary files for the sake of hard drive space
if headers.get('content-type') in ['application/zip', 'application/octet-stream']:
if debug:
console_write(u"Skipping cache since the response is a zip file", True)
return content
etag = headers.get('etag')
last_modified = headers.get('last-modified')
if not etag and not last_modified:
return content
struct = {'etag': etag, 'last-modified': last_modified}
struct_json = json.dumps(struct, indent=4)
info_key = self.generate_key(url, '.info')
if debug:
console_write(u"Caching %s in %s" % (url, key), True)
self.settings['cache'].set(info_key, struct_json.encode('utf-8'))
self.settings['cache'].set(key, content)
return content
def generate_key(self, url, suffix=''):
"""
Generates a key to store the cache under
:param url:
The URL being cached
:param suffix:
A string to append to the key
:return:
A string key for the URL
"""
if sys.version_info >= (3,) or isinstance(url, unicode):
url = url.encode('utf-8')
key = hashlib.md5(url).hexdigest()
return key + suffix
def retrieve_cached(self, url):
"""
Tries to return the cached content for a URL
:param url:
The URL to get the cached content for
:return:
The cached content
"""
key = self.generate_key(url)
if not self.settings['cache'].has(key):
return False
if self.settings.get('debug'):
console_write(u"Using cached content for %s" % url, True)
return self.settings['cache'].get(key)

View File

@@ -0,0 +1,203 @@
import os
import re
import json
import sublime
from ..console_write import console_write
from ..open_compat import open_compat, read_compat
from ..package_io import read_package_file
from ..cache import get_cache
from ..ca_certs import get_system_ca_bundle_path
from .no_ca_cert_exception import NoCaCertException
from .downloader_exception import DownloaderException
class CertProvider(object):
"""
A base downloader that provides access to a ca-bundle for validating
SSL certificates.
"""
def check_certs(self, domain, timeout):
"""
Ensures that the SSL CA cert for a domain is present on the machine
:param domain:
The domain to ensure there is a CA cert for
:param timeout:
The int timeout for downloading the CA cert from the channel
:raises:
NoCaCertException: when a suitable CA cert could not be found
:return:
The CA cert bundle path
"""
# Try to use the system CA bundle
ca_bundle_path = get_system_ca_bundle_path(self.settings)
if ca_bundle_path:
return ca_bundle_path
# If the system bundle did not work, fall back to our CA distribution
# system. Hopefully this will be going away soon.
if self.settings.get('debug'):
console_write(u'Unable to find system CA cert bundle, falling back to certs provided by Package Control')
cert_match = False
certs_list = get_cache('*.certs', self.settings.get('certs', {}))
ca_bundle_path = os.path.join(sublime.packages_path(), 'User', 'Package Control.ca-bundle')
if not os.path.exists(ca_bundle_path) or os.stat(ca_bundle_path).st_size == 0:
bundle_contents = read_package_file('Package Control', 'Package Control.ca-bundle', True)
if not bundle_contents:
raise NoCaCertException(u'Unable to copy distributed Package Control.ca-bundle', domain)
with open_compat(ca_bundle_path, 'wb') as f:
f.write(bundle_contents)
cert_info = certs_list.get(domain)
if cert_info:
cert_match = self.locate_cert(cert_info[0],
cert_info[1], domain, timeout)
wildcard_info = certs_list.get('*')
if wildcard_info:
cert_match = self.locate_cert(wildcard_info[0],
wildcard_info[1], domain, timeout) or cert_match
if not cert_match:
raise NoCaCertException(u'No CA certs available for %s' % domain, domain)
return ca_bundle_path
def locate_cert(self, cert_id, location, domain, timeout):
"""
Makes sure the SSL cert specified has been added to the CA cert
bundle that is present on the machine
:param cert_id:
The identifier for CA cert(s). For those provided by the channel
system, this will be an md5 of the contents of the cert(s). For
user-provided certs, this is something they provide.
:param location:
An http(s) URL, or absolute filesystem path to the CA cert(s)
:param domain:
The domain to ensure there is a CA cert for
:param timeout:
The int timeout for downloading the CA cert from the channel
:return:
If the cert specified (by cert_id) is present on the machine and
part of the Package Control.ca-bundle file in the User package folder
"""
ca_list_path = os.path.join(sublime.packages_path(), 'User', 'Package Control.ca-list')
if not os.path.exists(ca_list_path) or os.stat(ca_list_path).st_size == 0:
list_contents = read_package_file('Package Control', 'Package Control.ca-list')
if not list_contents:
raise NoCaCertException(u'Unable to copy distributed Package Control.ca-list', domain)
with open_compat(ca_list_path, 'w') as f:
f.write(list_contents)
ca_certs = []
with open_compat(ca_list_path, 'r') as f:
ca_certs = json.loads(read_compat(f))
if not cert_id in ca_certs:
if str(location) != '':
if re.match('^https?://', location):
contents = self.download_cert(cert_id, location, domain,
timeout)
else:
contents = self.load_cert(cert_id, location, domain)
if contents:
self.save_cert(cert_id, contents)
return True
return False
return True
def download_cert(self, cert_id, url, domain, timeout):
"""
Downloads CA cert(s) from a URL
:param cert_id:
The identifier for CA cert(s). For those provided by the channel
system, this will be an md5 of the contents of the cert(s). For
user-provided certs, this is something they provide.
:param url:
An http(s) URL to the CA cert(s)
:param domain:
The domain to ensure there is a CA cert for
:param timeout:
The int timeout for downloading the CA cert from the channel
:return:
The contents of the CA cert(s)
"""
cert_downloader = self.__class__(self.settings)
if self.settings.get('debug'):
console_write(u"Downloading CA cert for %s from \"%s\"" % (domain, url), True)
return cert_downloader.download(url,
'Error downloading CA certs for %s.' % domain, timeout, 1)
def load_cert(self, cert_id, path, domain):
"""
Copies CA cert(s) from a file path
:param cert_id:
The identifier for CA cert(s). For those provided by the channel
system, this will be an md5 of the contents of the cert(s). For
user-provided certs, this is something they provide.
:param path:
The absolute filesystem path to a file containing the CA cert(s)
:param domain:
The domain name the cert is for
:return:
The contents of the CA cert(s)
"""
if os.path.exists(path):
if self.settings.get('debug'):
console_write(u"Copying CA cert for %s from \"%s\"" % (domain, path), True)
with open_compat(path, 'rb') as f:
return f.read()
else:
raise NoCaCertException(u"Unable to find CA cert for %s at \"%s\"" % (domain, path), domain)
def save_cert(self, cert_id, contents):
"""
Saves CA cert(s) to the Package Control.ca-bundle
:param cert_id:
The identifier for CA cert(s). For those provided by the channel
system, this will be an md5 of the contents of the cert(s). For
user-provided certs, this is something they provide.
:param contents:
The contents of the CA cert(s)
"""
ca_bundle_path = os.path.join(sublime.packages_path(), 'User', 'Package Control.ca-bundle')
with open_compat(ca_bundle_path, 'ab') as f:
f.write(b"\n" + contents)
ca_list_path = os.path.join(sublime.packages_path(), 'User', 'Package Control.ca-list')
with open_compat(ca_list_path, 'r') as f:
ca_certs = json.loads(read_compat(f))
ca_certs.append(cert_id)
with open_compat(ca_list_path, 'w') as f:
f.write(json.dumps(ca_certs, indent=4))

View File

@@ -0,0 +1,81 @@
import os
import subprocess
from ..console_write import console_write
from ..cmd import create_cmd
from .non_clean_exit_error import NonCleanExitError
from .binary_not_found_error import BinaryNotFoundError
class CliDownloader(object):
"""
Base for downloaders that use a command line program
:param settings:
A dict of the various Package Control settings. The Sublime Text
Settings API is not used because this code is run in a thread.
"""
def __init__(self, settings):
self.settings = settings
def clean_tmp_file(self):
if os.path.exists(self.tmp_file):
os.remove(self.tmp_file)
def find_binary(self, name):
"""
Finds the given executable name in the system PATH
:param name:
The exact name of the executable to find
:return:
The absolute path to the executable
:raises:
BinaryNotFoundError when the executable can not be found
"""
dirs = os.environ['PATH'].split(os.pathsep)
if os.name != 'nt':
# This is mostly for OS X, which seems to launch ST with a
# minimal set of environmental variables
dirs.append('/usr/local/bin')
for dir_ in dirs:
path = os.path.join(dir_, name)
if os.path.exists(path):
return path
raise BinaryNotFoundError('The binary %s could not be located' % name)
def execute(self, args):
"""
Runs the executable and args and returns the result
:param args:
A list of the executable path and all arguments to be passed to it
:return:
The text output of the executable
:raises:
NonCleanExitError when the executable exits with an error
"""
if self.settings.get('debug'):
console_write(u"Trying to execute command %s" % create_cmd(args), True)
proc = subprocess.Popen(args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = proc.stdout.read()
self.stderr = proc.stderr.read()
returncode = proc.wait()
if returncode != 0:
error = NonCleanExitError(returncode)
error.stderr = self.stderr
error.stdout = output
raise error
return output

View File

@@ -0,0 +1,267 @@
import tempfile
import re
import os
from ..console_write import console_write
from ..open_compat import open_compat, read_compat
from .cli_downloader import CliDownloader
from .non_clean_exit_error import NonCleanExitError
from .rate_limit_exception import RateLimitException
from .downloader_exception import DownloaderException
from .cert_provider import CertProvider
from .limiting_downloader import LimitingDownloader
from .caching_downloader import CachingDownloader
class CurlDownloader(CliDownloader, CertProvider, LimitingDownloader, CachingDownloader):
"""
A downloader that uses the command line program curl
:param settings:
A dict of the various Package Control settings. The Sublime Text
Settings API is not used because this code is run in a thread.
:raises:
BinaryNotFoundError: when curl can not be found
"""
def __init__(self, settings):
self.settings = settings
self.curl = self.find_binary('curl')
def close(self):
"""
No-op for compatibility with UrllibDownloader and WinINetDownloader
"""
pass
def download(self, url, error_message, timeout, tries, prefer_cached=False):
"""
Downloads a URL and returns the contents
:param url:
The URL to download
:param error_message:
A string to include in the console error that is printed
when an error occurs
:param timeout:
The int number of seconds to set the timeout to
:param tries:
The int number of times to try and download the URL in the case of
a timeout or HTTP 503 error
:param prefer_cached:
If a cached version should be returned instead of trying a new request
:raises:
NoCaCertException: when no CA certs can be found for the url
RateLimitException: when a rate limit is hit
DownloaderException: when any other download error occurs
:return:
The string contents of the URL
"""
if prefer_cached:
cached = self.retrieve_cached(url)
if cached:
return cached
self.tmp_file = tempfile.NamedTemporaryFile().name
command = [self.curl, '--user-agent', self.settings.get('user_agent'),
'--connect-timeout', str(int(timeout)), '-sSL',
# Don't be alarmed if the response from the server does not select
# one of these since the server runs a relatively new version of
# OpenSSL which supports compression on the SSL layer, and Apache
# will use that instead of HTTP-level encoding.
'--compressed',
# We have to capture the headers to check for rate limit info
'--dump-header', self.tmp_file]
request_headers = self.add_conditional_headers(url, {})
for name, value in request_headers.items():
command.extend(['--header', "%s: %s" % (name, value)])
secure_url_match = re.match('^https://([^/]+)', url)
if secure_url_match != None:
secure_domain = secure_url_match.group(1)
bundle_path = self.check_certs(secure_domain, timeout)
command.extend(['--cacert', bundle_path])
debug = self.settings.get('debug')
if debug:
command.append('-v')
http_proxy = self.settings.get('http_proxy')
https_proxy = self.settings.get('https_proxy')
proxy_username = self.settings.get('proxy_username')
proxy_password = self.settings.get('proxy_password')
if debug:
console_write(u"Curl Debug Proxy", True)
console_write(u" http_proxy: %s" % http_proxy)
console_write(u" https_proxy: %s" % https_proxy)
console_write(u" proxy_username: %s" % proxy_username)
console_write(u" proxy_password: %s" % proxy_password)
if http_proxy or https_proxy:
command.append('--proxy-anyauth')
if proxy_username or proxy_password:
command.extend(['-U', u"%s:%s" % (proxy_username, proxy_password)])
if http_proxy:
os.putenv('http_proxy', http_proxy)
if https_proxy:
os.putenv('HTTPS_PROXY', https_proxy)
command.append(url)
error_string = None
while tries > 0:
tries -= 1
try:
output = self.execute(command)
with open_compat(self.tmp_file, 'r') as f:
headers_str = read_compat(f)
self.clean_tmp_file()
message = 'OK'
status = 200
headers = {}
for header in headers_str.splitlines():
if header[0:5] == 'HTTP/':
message = re.sub('^HTTP/\d\.\d\s+\d+\s*', '', header)
status = int(re.sub('^HTTP/\d\.\d\s+(\d+)(\s+.*)?$', '\\1', header))
continue
if header.strip() == '':
continue
name, value = header.split(':', 1)
headers[name.lower()] = value.strip()
if debug:
self.print_debug(self.stderr.decode('utf-8'))
self.handle_rate_limit(headers, url)
if status not in [200, 304]:
e = NonCleanExitError(22)
e.stderr = "%s %s" % (status, message)
raise e
output = self.cache_result('get', url, status, headers, output)
return output
except (NonCleanExitError) as e:
# Stderr is used for both the error message and the debug info
# so we need to process it to extra the debug info
if self.settings.get('debug'):
if hasattr(e.stderr, 'decode'):
e.stderr = e.stderr.decode('utf-8')
e.stderr = self.print_debug(e.stderr)
self.clean_tmp_file()
if e.returncode == 22:
code = re.sub('^.*?(\d+)([\w\s]+)?$', '\\1', e.stderr)
if code == '503' and tries != 0:
# GitHub and BitBucket seem to rate limit via 503
error_string = u'Downloading %s was rate limited' % url
if tries:
error_string += ', trying again'
if debug:
console_write(error_string, True)
continue
download_error = u'HTTP error ' + code
elif e.returncode == 6:
download_error = u'URL error host not found'
elif e.returncode == 28:
# GitHub and BitBucket seem to time out a lot
error_string = u'Downloading %s timed out' % url
if tries:
error_string += ', trying again'
if debug:
console_write(error_string, True)
continue
else:
download_error = e.stderr.rstrip()
error_string = u'%s %s downloading %s.' % (error_message, download_error, url)
break
raise DownloaderException(error_string)
def supports_ssl(self):
"""
Indicates if the object can handle HTTPS requests
:return:
If the object supports HTTPS requests
"""
return True
def print_debug(self, string):
"""
Takes debug output from curl and groups and prints it
:param string:
The complete debug output from curl
:return:
A string containing any stderr output
"""
section = 'General'
last_section = None
output = ''
for line in string.splitlines():
# Placeholder for body of request
if line and line[0:2] == '{ ':
continue
if line and line[0:18] == '} [data not shown]':
continue
if len(line) > 1:
subtract = 0
if line[0:2] == '* ':
section = 'General'
subtract = 2
elif line[0:2] == '> ':
section = 'Write'
subtract = 2
elif line[0:2] == '< ':
section = 'Read'
subtract = 2
line = line[subtract:]
# If the line does not start with "* ", "< ", "> " or " "
# then it is a real stderr message
if subtract == 0 and line[0:2] != ' ':
output += line.rstrip() + ' '
continue
if line.strip() == '':
continue
if section != last_section:
console_write(u"Curl HTTP Debug %s" % section, True)
console_write(u' ' + line)
last_section = section
return output.rstrip()

View File

@@ -0,0 +1,24 @@
import gzip
import zlib
try:
# Python 3
from io import BytesIO as StringIO
except (ImportError):
# Python 2
from StringIO import StringIO
class DecodingDownloader(object):
"""
A base for downloaders that provides the ability to decode gzipped
or deflated content.
"""
def decode_response(self, encoding, response):
if encoding == 'gzip':
return gzip.GzipFile(fileobj=StringIO(response)).read()
elif encoding == 'deflate':
decompresser = zlib.decompressobj(-zlib.MAX_WBITS)
return decompresser.decompress(response) + decompresser.flush()
return response

View File

@@ -0,0 +1,5 @@
class DownloaderException(Exception):
"""If a downloader could not download a URL"""
def __str__(self):
return self.args[0]

View File

@@ -0,0 +1,9 @@
class HttpError(Exception):
"""If a downloader was able to download a URL, but the result was not a 200 or 304"""
def __init__(self, message, code):
self.code = code
super(HttpError, self).__init__(message)
def __str__(self):
return self.args[0]

View File

@@ -0,0 +1,36 @@
try:
# Python 3
from urllib.parse import urlparse
except (ImportError):
# Python 2
from urlparse import urlparse
from .rate_limit_exception import RateLimitException
class LimitingDownloader(object):
"""
A base for downloaders that checks for rate limiting headers.
"""
def handle_rate_limit(self, headers, url):
"""
Checks the headers of a response object to make sure we are obeying the
rate limit
:param headers:
The dict-like object that contains lower-cased headers
:param url:
The URL that was requested
:raises:
RateLimitException when the rate limit has been hit
"""
limit_remaining = headers.get('x-ratelimit-remaining', '1')
limit = headers.get('x-ratelimit-limit', '1')
if str(limit_remaining) == '0':
hostname = urlparse(url).hostname
raise RateLimitException(hostname, limit)

View File

@@ -0,0 +1,11 @@
from .downloader_exception import DownloaderException
class NoCaCertException(DownloaderException):
"""
An exception for when there is no CA cert for a domain name
"""
def __init__(self, message, domain):
self.domain = domain
super(NoCaCertException, self).__init__(message)

View File

@@ -0,0 +1,13 @@
class NonCleanExitError(Exception):
"""
When an subprocess does not exit cleanly
:param returncode:
The command line integer return code of the subprocess
"""
def __init__(self, returncode):
self.returncode = returncode
def __str__(self):
return repr(self.returncode)

View File

@@ -0,0 +1,5 @@
class NonHttpError(Exception):
"""If a downloader had a non-clean exit, but it was not due to an HTTP error"""
def __str__(self):
return self.args[0]

View File

@@ -0,0 +1,13 @@
from .downloader_exception import DownloaderException
class RateLimitException(DownloaderException):
"""
An exception for when the rate limit of an API has been exceeded.
"""
def __init__(self, domain, limit):
self.domain = domain
self.limit = limit
message = u'Rate limit of %s exceeded for %s' % (limit, domain)
super(RateLimitException, self).__init__(message)

View File

@@ -0,0 +1,291 @@
import re
import os
import sys
from .. import http
try:
# Python 3
from http.client import HTTPException, BadStatusLine
from urllib.request import ProxyHandler, HTTPPasswordMgrWithDefaultRealm, ProxyBasicAuthHandler, ProxyDigestAuthHandler, build_opener, Request
from urllib.error import HTTPError, URLError
import urllib.request as urllib_compat
except (ImportError):
# Python 2
from httplib import HTTPException, BadStatusLine
from urllib2 import ProxyHandler, HTTPPasswordMgrWithDefaultRealm, ProxyBasicAuthHandler, ProxyDigestAuthHandler, build_opener, Request
from urllib2 import HTTPError, URLError
import urllib2 as urllib_compat
try:
# Python 3.3
import ConnectionError
except (ImportError):
# Python 2.6-3.2
from socket import error as ConnectionError
from ..console_write import console_write
from ..unicode import unicode_from_os
from ..http.validating_https_handler import ValidatingHTTPSHandler
from ..http.debuggable_http_handler import DebuggableHTTPHandler
from .rate_limit_exception import RateLimitException
from .downloader_exception import DownloaderException
from .cert_provider import CertProvider
from .decoding_downloader import DecodingDownloader
from .limiting_downloader import LimitingDownloader
from .caching_downloader import CachingDownloader
class UrlLibDownloader(CertProvider, DecodingDownloader, LimitingDownloader, CachingDownloader):
"""
A downloader that uses the Python urllib module
:param settings:
A dict of the various Package Control settings. The Sublime Text
Settings API is not used because this code is run in a thread.
"""
def __init__(self, settings):
self.opener = None
self.settings = settings
def close(self):
"""
Closes any persistent/open connections
"""
if not self.opener:
return
handler = self.get_handler()
if handler:
handler.close()
self.opener = None
def download(self, url, error_message, timeout, tries, prefer_cached=False):
"""
Downloads a URL and returns the contents
Uses the proxy settings from the Package Control.sublime-settings file,
however there seem to be a decent number of proxies that this code
does not work with. Patches welcome!
:param url:
The URL to download
:param error_message:
A string to include in the console error that is printed
when an error occurs
:param timeout:
The int number of seconds to set the timeout to
:param tries:
The int number of times to try and download the URL in the case of
a timeout or HTTP 503 error
:param prefer_cached:
If a cached version should be returned instead of trying a new request
:raises:
NoCaCertException: when no CA certs can be found for the url
RateLimitException: when a rate limit is hit
DownloaderException: when any other download error occurs
:return:
The string contents of the URL
"""
if prefer_cached:
cached = self.retrieve_cached(url)
if cached:
return cached
self.setup_opener(url, timeout)
debug = self.settings.get('debug')
error_string = None
while tries > 0:
tries -= 1
try:
request_headers = {
"User-Agent": self.settings.get('user_agent'),
# Don't be alarmed if the response from the server does not
# select one of these since the server runs a relatively new
# version of OpenSSL which supports compression on the SSL
# layer, and Apache will use that instead of HTTP-level
# encoding.
"Accept-Encoding": "gzip,deflate"
}
request_headers = self.add_conditional_headers(url, request_headers)
request = Request(url, headers=request_headers)
http_file = self.opener.open(request, timeout=timeout)
self.handle_rate_limit(http_file.headers, url)
result = http_file.read()
# Make sure the response is closed so we can re-use the connection
http_file.close()
encoding = http_file.headers.get('content-encoding')
result = self.decode_response(encoding, result)
return self.cache_result('get', url, http_file.getcode(),
http_file.headers, result)
except (HTTPException) as e:
# Since we use keep-alives, it is possible the other end closed
# the connection, and we may just need to re-open
if isinstance(e, BadStatusLine):
handler = self.get_handler()
if handler and handler.use_count > 1:
self.close()
self.setup_opener(url, timeout)
tries += 1
continue
error_string = u'%s HTTP exception %s (%s) downloading %s.' % (
error_message, e.__class__.__name__, unicode_from_os(e), url)
except (HTTPError) as e:
# Make sure the response is closed so we can re-use the connection
e.read()
e.close()
# Make sure we obey Github's rate limiting headers
self.handle_rate_limit(e.headers, url)
# Handle cached responses
if unicode_from_os(e.code) == '304':
return self.cache_result('get', url, int(e.code), e.headers, b'')
# Bitbucket and Github return 503 a decent amount
if unicode_from_os(e.code) == '503' and tries != 0:
error_string = u'Downloading %s was rate limited' % url
if tries:
error_string += ', trying again'
if debug:
console_write(error_string, True)
continue
error_string = u'%s HTTP error %s downloading %s.' % (
error_message, unicode_from_os(e.code), url)
except (URLError) as e:
# Bitbucket and Github timeout a decent amount
if unicode_from_os(e.reason) == 'The read operation timed out' \
or unicode_from_os(e.reason) == 'timed out':
error_string = u'Downloading %s timed out' % url
if tries:
error_string += ', trying again'
if debug:
console_write(error_string, True)
continue
error_string = u'%s URL error %s downloading %s.' % (
error_message, unicode_from_os(e.reason), url)
except (ConnectionError):
# Handle broken pipes/reset connections by creating a new opener, and
# thus getting new handlers and a new connection
error_string = u'Connection went away while trying to download %s, trying again' % url
if debug:
console_write(error_string, True)
self.opener = None
self.setup_opener(url, timeout)
tries += 1
continue
break
raise DownloaderException(error_string)
def get_handler(self):
"""
Get the HTTPHandler object for the current connection
"""
if not self.opener:
return None
for handler in self.opener.handlers:
if isinstance(handler, ValidatingHTTPSHandler) or isinstance(handler, DebuggableHTTPHandler):
return handler
def setup_opener(self, url, timeout):
"""
Sets up a urllib OpenerDirector to be used for requests. There is a
fair amount of custom urllib code in Package Control, and part of it
is to handle proxies and keep-alives. Creating an opener the way
below is because the handlers have been customized to send the
"Connection: Keep-Alive" header and hold onto connections so they
can be re-used.
:param url:
The URL to download
:param timeout:
The int number of seconds to set the timeout to
"""
if not self.opener:
http_proxy = self.settings.get('http_proxy')
https_proxy = self.settings.get('https_proxy')
if http_proxy or https_proxy:
proxies = {}
if http_proxy:
proxies['http'] = http_proxy
if https_proxy:
proxies['https'] = https_proxy
proxy_handler = ProxyHandler(proxies)
else:
proxy_handler = ProxyHandler()
password_manager = HTTPPasswordMgrWithDefaultRealm()
proxy_username = self.settings.get('proxy_username')
proxy_password = self.settings.get('proxy_password')
if proxy_username and proxy_password:
if http_proxy:
password_manager.add_password(None, http_proxy, proxy_username,
proxy_password)
if https_proxy:
password_manager.add_password(None, https_proxy, proxy_username,
proxy_password)
handlers = [proxy_handler]
basic_auth_handler = ProxyBasicAuthHandler(password_manager)
digest_auth_handler = ProxyDigestAuthHandler(password_manager)
handlers.extend([digest_auth_handler, basic_auth_handler])
debug = self.settings.get('debug')
if debug:
console_write(u"Urllib Debug Proxy", True)
console_write(u" http_proxy: %s" % http_proxy)
console_write(u" https_proxy: %s" % https_proxy)
console_write(u" proxy_username: %s" % proxy_username)
console_write(u" proxy_password: %s" % proxy_password)
secure_url_match = re.match('^https://([^/]+)', url)
if secure_url_match != None:
secure_domain = secure_url_match.group(1)
bundle_path = self.check_certs(secure_domain, timeout)
bundle_path = bundle_path.encode(sys.getfilesystemencoding())
handlers.append(ValidatingHTTPSHandler(ca_certs=bundle_path,
debug=debug, passwd=password_manager,
user_agent=self.settings.get('user_agent')))
else:
handlers.append(DebuggableHTTPHandler(debug=debug,
passwd=password_manager))
self.opener = build_opener(*handlers)
def supports_ssl(self):
"""
Indicates if the object can handle HTTPS requests
:return:
If the object supports HTTPS requests
"""
return 'ssl' in sys.modules and hasattr(urllib_compat, 'HTTPSHandler')

View File

@@ -0,0 +1,347 @@
import tempfile
import re
import os
from ..console_write import console_write
from ..unicode import unicode_from_os
from ..open_compat import open_compat, read_compat
from .cli_downloader import CliDownloader
from .non_http_error import NonHttpError
from .non_clean_exit_error import NonCleanExitError
from .rate_limit_exception import RateLimitException
from .downloader_exception import DownloaderException
from .cert_provider import CertProvider
from .decoding_downloader import DecodingDownloader
from .limiting_downloader import LimitingDownloader
from .caching_downloader import CachingDownloader
class WgetDownloader(CliDownloader, CertProvider, DecodingDownloader, LimitingDownloader, CachingDownloader):
"""
A downloader that uses the command line program wget
:param settings:
A dict of the various Package Control settings. The Sublime Text
Settings API is not used because this code is run in a thread.
:raises:
BinaryNotFoundError: when wget can not be found
"""
def __init__(self, settings):
self.settings = settings
self.debug = settings.get('debug')
self.wget = self.find_binary('wget')
def close(self):
"""
No-op for compatibility with UrllibDownloader and WinINetDownloader
"""
pass
def download(self, url, error_message, timeout, tries, prefer_cached=False):
"""
Downloads a URL and returns the contents
:param url:
The URL to download
:param error_message:
A string to include in the console error that is printed
when an error occurs
:param timeout:
The int number of seconds to set the timeout to
:param tries:
The int number of times to try and download the URL in the case of
a timeout or HTTP 503 error
:param prefer_cached:
If a cached version should be returned instead of trying a new request
:raises:
NoCaCertException: when no CA certs can be found for the url
RateLimitException: when a rate limit is hit
DownloaderException: when any other download error occurs
:return:
The string contents of the URL
"""
if prefer_cached:
cached = self.retrieve_cached(url)
if cached:
return cached
self.tmp_file = tempfile.NamedTemporaryFile().name
command = [self.wget, '--connect-timeout=' + str(int(timeout)), '-o',
self.tmp_file, '-O', '-', '-U', self.settings.get('user_agent')]
request_headers = {
# Don't be alarmed if the response from the server does not select
# one of these since the server runs a relatively new version of
# OpenSSL which supports compression on the SSL layer, and Apache
# will use that instead of HTTP-level encoding.
'Accept-Encoding': 'gzip,deflate'
}
request_headers = self.add_conditional_headers(url, request_headers)
for name, value in request_headers.items():
command.extend(['--header', "%s: %s" % (name, value)])
secure_url_match = re.match('^https://([^/]+)', url)
if secure_url_match != None:
secure_domain = secure_url_match.group(1)
bundle_path = self.check_certs(secure_domain, timeout)
command.append(u'--ca-certificate=' + bundle_path)
if self.debug:
command.append('-d')
else:
command.append('-S')
http_proxy = self.settings.get('http_proxy')
https_proxy = self.settings.get('https_proxy')
proxy_username = self.settings.get('proxy_username')
proxy_password = self.settings.get('proxy_password')
if proxy_username:
command.append(u"--proxy-user=%s" % proxy_username)
if proxy_password:
command.append(u"--proxy-password=%s" % proxy_password)
if self.debug:
console_write(u"Wget Debug Proxy", True)
console_write(u" http_proxy: %s" % http_proxy)
console_write(u" https_proxy: %s" % https_proxy)
console_write(u" proxy_username: %s" % proxy_username)
console_write(u" proxy_password: %s" % proxy_password)
command.append(url)
if http_proxy:
os.putenv('http_proxy', http_proxy)
if https_proxy:
os.putenv('https_proxy', https_proxy)
error_string = None
while tries > 0:
tries -= 1
try:
result = self.execute(command)
general, headers = self.parse_output()
encoding = headers.get('content-encoding')
if encoding:
result = self.decode_response(encoding, result)
result = self.cache_result('get', url, general['status'],
headers, result)
return result
except (NonCleanExitError) as e:
try:
general, headers = self.parse_output()
self.handle_rate_limit(headers, url)
if general['status'] == 304:
return self.cache_result('get', url, general['status'],
headers, None)
if general['status'] == 503 and tries != 0:
# GitHub and BitBucket seem to rate limit via 503
error_string = u'Downloading %s was rate limited' % url
if tries:
error_string += ', trying again'
if self.debug:
console_write(error_string, True)
continue
download_error = 'HTTP error %s' % general['status']
except (NonHttpError) as e:
download_error = unicode_from_os(e)
# GitHub and BitBucket seem to time out a lot
if download_error.find('timed out') != -1:
error_string = u'Downloading %s timed out' % url
if tries:
error_string += ', trying again'
if self.debug:
console_write(error_string, True)
continue
error_string = u'%s %s downloading %s.' % (error_message, download_error, url)
break
raise DownloaderException(error_string)
def supports_ssl(self):
"""
Indicates if the object can handle HTTPS requests
:return:
If the object supports HTTPS requests
"""
return True
def parse_output(self):
"""
Parses the wget output file, prints debug information and returns headers
:return:
A tuple of (general, headers) where general is a dict with the keys:
`version` - HTTP version number (string)
`status` - HTTP status code (integer)
`message` - HTTP status message (string)
And headers is a dict with the keys being lower-case version of the
HTTP header names.
"""
with open_compat(self.tmp_file, 'r') as f:
output = read_compat(f).splitlines()
self.clean_tmp_file()
error = None
header_lines = []
if self.debug:
section = 'General'
last_section = None
for line in output:
if section == 'General':
if self.skippable_line(line):
continue
# Skip blank lines
if line.strip() == '':
continue
# Error lines
if line[0:5] == 'wget:':
error = line[5:].strip()
if line[0:7] == 'failed:':
error = line[7:].strip()
if line == '---request begin---':
section = 'Write'
continue
elif line == '---request end---':
section = 'General'
continue
elif line == '---response begin---':
section = 'Read'
continue
elif line == '---response end---':
section = 'General'
continue
if section != last_section:
console_write(u"Wget HTTP Debug %s" % section, True)
if section == 'Read':
header_lines.append(line)
console_write(u' ' + line)
last_section = section
else:
for line in output:
if self.skippable_line(line):
continue
# Check the resolving and connecting to lines for errors
if re.match('(Resolving |Connecting to )', line):
failed_match = re.search(' failed: (.*)$', line)
if failed_match:
error = failed_match.group(1).strip()
# Error lines
if line[0:5] == 'wget:':
error = line[5:].strip()
if line[0:7] == 'failed:':
error = line[7:].strip()
if line[0:2] == ' ':
header_lines.append(line.lstrip())
if error:
raise NonHttpError(error)
return self.parse_headers(header_lines)
def skippable_line(self, line):
"""
Determines if a debug line is skippable - usually because of extraneous
or duplicate information.
:param line:
The debug line to check
:return:
True if the line is skippable, otherwise None
"""
# Skip date lines
if re.match('--\d{4}-\d{2}-\d{2}', line):
return True
if re.match('\d{4}-\d{2}-\d{2}', line):
return True
# Skip HTTP status code lines since we already have that info
if re.match('\d{3} ', line):
return True
# Skip Saving to and progress lines
if re.match('(Saving to:|\s*\d+K)', line):
return True
# Skip notice about ignoring body on HTTP error
if re.match('Skipping \d+ byte', line):
return True
def parse_headers(self, output=None):
"""
Parses HTTP headers into two dict objects
:param output:
An array of header lines, if None, loads from temp output file
:return:
A tuple of (general, headers) where general is a dict with the keys:
`version` - HTTP version number (string)
`status` - HTTP status code (integer)
`message` - HTTP status message (string)
And headers is a dict with the keys being lower-case version of the
HTTP header names.
"""
if not output:
with open_compat(self.tmp_file, 'r') as f:
output = read_compat(f).splitlines()
self.clean_tmp_file()
general = {
'version': '0.9',
'status': 200,
'message': 'OK'
}
headers = {}
for line in output:
# When using the -S option, headers have two spaces before them,
# additionally, valid headers won't have spaces, so this is always
# a safe operation to perform
line = line.lstrip()
if line.find('HTTP/') == 0:
match = re.match('HTTP/(\d\.\d)\s+(\d+)(?:\s+(.*))?$', line)
general['version'] = match.group(1)
general['status'] = int(match.group(2))
general['message'] = match.group(3) or ''
else:
name, value = line.split(':', 1)
headers[name.lower()] = value.strip()
return (general, headers)

View File

@@ -0,0 +1,652 @@
from ctypes import windll, wintypes
import ctypes
import time
import re
import datetime
import struct
import locale
wininet = windll.wininet
try:
# Python 3
from urllib.parse import urlparse
except (ImportError):
# Python 2
from urlparse import urlparse
from ..console_write import console_write
from ..unicode import unicode_from_os
from .non_http_error import NonHttpError
from .http_error import HttpError
from .rate_limit_exception import RateLimitException
from .downloader_exception import DownloaderException
from .decoding_downloader import DecodingDownloader
from .limiting_downloader import LimitingDownloader
from .caching_downloader import CachingDownloader
class WinINetDownloader(DecodingDownloader, LimitingDownloader, CachingDownloader):
"""
A downloader that uses the Windows WinINet DLL to perform downloads. This
has the benefit of utilizing system-level proxy configuration and CA certs.
:param settings:
A dict of the various Package Control settings. The Sublime Text
Settings API is not used because this code is run in a thread.
"""
# General constants
ERROR_INSUFFICIENT_BUFFER = 122
# InternetOpen constants
INTERNET_OPEN_TYPE_PRECONFIG = 0
# InternetConnect constants
INTERNET_SERVICE_HTTP = 3
INTERNET_FLAG_EXISTING_CONNECT = 0x20000000
INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS = 0x00004000
# InternetSetOption constants
INTERNET_OPTION_CONNECT_TIMEOUT = 2
INTERNET_OPTION_SEND_TIMEOUT = 5
INTERNET_OPTION_RECEIVE_TIMEOUT = 6
# InternetQueryOption constants
INTERNET_OPTION_SECURITY_CERTIFICATE_STRUCT = 32
INTERNET_OPTION_PROXY = 38
INTERNET_OPTION_PROXY_USERNAME = 43
INTERNET_OPTION_PROXY_PASSWORD = 44
INTERNET_OPTION_CONNECTED_STATE = 50
# HttpOpenRequest constants
INTERNET_FLAG_KEEP_CONNECTION = 0x00400000
INTERNET_FLAG_RELOAD = 0x80000000
INTERNET_FLAG_NO_CACHE_WRITE = 0x04000000
INTERNET_FLAG_PRAGMA_NOCACHE = 0x00000100
INTERNET_FLAG_SECURE = 0x00800000
# HttpQueryInfo constants
HTTP_QUERY_RAW_HEADERS_CRLF = 22
# InternetConnectedState constants
INTERNET_STATE_CONNECTED = 1
INTERNET_STATE_DISCONNECTED = 2
INTERNET_STATE_DISCONNECTED_BY_USER = 0x10
INTERNET_STATE_IDLE = 0x100
INTERNET_STATE_BUSY = 0x200
def __init__(self, settings):
self.settings = settings
self.debug = settings.get('debug')
self.network_connection = None
self.tcp_connection = None
self.use_count = 0
self.hostname = None
self.port = None
self.scheme = None
self.was_offline = None
def close(self):
"""
Closes any persistent/open connections
"""
closed = False
changed_state_back = False
if self.tcp_connection:
wininet.InternetCloseHandle(self.tcp_connection)
self.tcp_connection = None
closed = True
if self.network_connection:
wininet.InternetCloseHandle(self.network_connection)
self.network_connection = None
closed = True
if self.was_offline:
dw_connected_state = wintypes.DWORD(self.INTERNET_STATE_DISCONNECTED_BY_USER)
dw_flags = wintypes.DWORD(0)
connected_info = InternetConnectedInfo(dw_connected_state, dw_flags)
wininet.InternetSetOptionA(None,
self.INTERNET_OPTION_CONNECTED_STATE, ctypes.byref(connected_info), ctypes.sizeof(connected_info))
changed_state_back = True
if self.debug:
s = '' if self.use_count == 1 else 's'
console_write(u"WinINet %s Debug General" % self.scheme.upper(), True)
console_write(u" Closing connection to %s on port %s after %s request%s" % (
self.hostname, self.port, self.use_count, s))
if changed_state_back:
console_write(u" Changed Internet Explorer back to Work Offline")
self.hostname = None
self.port = None
self.scheme = None
self.use_count = 0
self.was_offline = None
def download(self, url, error_message, timeout, tries, prefer_cached=False):
"""
Downloads a URL and returns the contents
:param url:
The URL to download
:param error_message:
A string to include in the console error that is printed
when an error occurs
:param timeout:
The int number of seconds to set the timeout to
:param tries:
The int number of times to try and download the URL in the case of
a timeout or HTTP 503 error
:param prefer_cached:
If a cached version should be returned instead of trying a new request
:raises:
RateLimitException: when a rate limit is hit
DownloaderException: when any other download error occurs
:return:
The string contents of the URL
"""
if prefer_cached:
cached = self.retrieve_cached(url)
if cached:
return cached
url_info = urlparse(url)
if not url_info.port:
port = 443 if url_info.scheme == 'https' else 80
hostname = url_info.netloc
else:
port = url_info.port
hostname = url_info.hostname
path = url_info.path
if url_info.params:
path += ';' + url_info.params
if url_info.query:
path += '?' + url_info.query
request_headers = {
'Accept-Encoding': 'gzip,deflate'
}
request_headers = self.add_conditional_headers(url, request_headers)
created_connection = False
# If we switched Internet Explorer out of "Work Offline" mode
changed_to_online = False
# If the user is requesting a connection to another server, close the connection
if (self.hostname and self.hostname != hostname) or (self.port and self.port != port):
self.close()
# Reset the error info to a known clean state
ctypes.windll.kernel32.SetLastError(0)
# Save the internet setup in the class for re-use
if not self.tcp_connection:
created_connection = True
# Connect to the internet if necessary
state = self.read_option(None, self.INTERNET_OPTION_CONNECTED_STATE)
state = ord(state)
if state & self.INTERNET_STATE_DISCONNECTED or state & self.INTERNET_STATE_DISCONNECTED_BY_USER:
# Track the previous state so we can go back once complete
self.was_offline = True
dw_connected_state = wintypes.DWORD(self.INTERNET_STATE_CONNECTED)
dw_flags = wintypes.DWORD(0)
connected_info = InternetConnectedInfo(dw_connected_state, dw_flags)
wininet.InternetSetOptionA(None,
self.INTERNET_OPTION_CONNECTED_STATE, ctypes.byref(connected_info), ctypes.sizeof(connected_info))
changed_to_online = True
self.network_connection = wininet.InternetOpenW(self.settings.get('user_agent'),
self.INTERNET_OPEN_TYPE_PRECONFIG, None, None, 0)
if not self.network_connection:
error_string = u'%s %s during network phase of downloading %s.' % (error_message, self.extract_error(), url)
raise DownloaderException(error_string)
win_timeout = wintypes.DWORD(int(timeout) * 1000)
# Apparently INTERNET_OPTION_CONNECT_TIMEOUT just doesn't work, leaving it in hoping they may fix in the future
wininet.InternetSetOptionA(self.network_connection,
self.INTERNET_OPTION_CONNECT_TIMEOUT, win_timeout, ctypes.sizeof(win_timeout))
wininet.InternetSetOptionA(self.network_connection,
self.INTERNET_OPTION_SEND_TIMEOUT, win_timeout, ctypes.sizeof(win_timeout))
wininet.InternetSetOptionA(self.network_connection,
self.INTERNET_OPTION_RECEIVE_TIMEOUT, win_timeout, ctypes.sizeof(win_timeout))
# Don't allow HTTPS sites to redirect to HTTP sites
tcp_flags = self.INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS
# Try to re-use an existing connection to the server
tcp_flags |= self.INTERNET_FLAG_EXISTING_CONNECT
self.tcp_connection = wininet.InternetConnectW(self.network_connection,
hostname, port, None, None, self.INTERNET_SERVICE_HTTP, tcp_flags, 0)
if not self.tcp_connection:
error_string = u'%s %s during connection phase of downloading %s.' % (error_message, self.extract_error(), url)
raise DownloaderException(error_string)
# Normally the proxy info would come from IE, but this allows storing it in
# the Package Control settings file.
proxy_username = self.settings.get('proxy_username')
proxy_password = self.settings.get('proxy_password')
if proxy_username and proxy_password:
username = ctypes.c_wchar_p(proxy_username)
password = ctypes.c_wchar_p(proxy_password)
wininet.InternetSetOptionW(self.tcp_connection,
self.INTERNET_OPTION_PROXY_USERNAME, ctypes.cast(username, ctypes.c_void_p), len(proxy_username))
wininet.InternetSetOptionW(self.tcp_connection,
self.INTERNET_OPTION_PROXY_PASSWORD, ctypes.cast(password, ctypes.c_void_p), len(proxy_password))
self.hostname = hostname
self.port = port
self.scheme = url_info.scheme
else:
if self.debug:
console_write(u"WinINet %s Debug General" % self.scheme.upper(), True)
console_write(u" Re-using connection to %s on port %s for request #%s" % (
self.hostname, self.port, self.use_count))
error_string = None
while tries > 0:
tries -= 1
try:
http_connection = None
# Keep-alive for better performance
http_flags = self.INTERNET_FLAG_KEEP_CONNECTION
# Prevent caching/retrieving from cache
http_flags |= self.INTERNET_FLAG_RELOAD
http_flags |= self.INTERNET_FLAG_NO_CACHE_WRITE
http_flags |= self.INTERNET_FLAG_PRAGMA_NOCACHE
# Use SSL
if self.scheme == 'https':
http_flags |= self.INTERNET_FLAG_SECURE
http_connection = wininet.HttpOpenRequestW(self.tcp_connection, u'GET', path, u'HTTP/1.1', None, None, http_flags, 0)
if not http_connection:
error_string = u'%s %s during HTTP connection phase of downloading %s.' % (error_message, self.extract_error(), url)
raise DownloaderException(error_string)
request_header_lines = []
for header, value in request_headers.items():
request_header_lines.append(u"%s: %s" % (header, value))
request_header_lines = u"\r\n".join(request_header_lines)
success = wininet.HttpSendRequestW(http_connection, request_header_lines, len(request_header_lines), None, 0)
if not success:
error_string = u'%s %s during HTTP write phase of downloading %s.' % (error_message, self.extract_error(), url)
raise DownloaderException(error_string)
# If we try to query before here, the proxy info will not be available to the first request
if self.debug:
proxy_struct = self.read_option(self.network_connection, self.INTERNET_OPTION_PROXY)
proxy = ''
if proxy_struct.lpszProxy:
proxy = proxy_struct.lpszProxy.decode('cp1252')
proxy_bypass = ''
if proxy_struct.lpszProxyBypass:
proxy_bypass = proxy_struct.lpszProxyBypass.decode('cp1252')
proxy_username = self.read_option(self.tcp_connection, self.INTERNET_OPTION_PROXY_USERNAME)
proxy_password = self.read_option(self.tcp_connection, self.INTERNET_OPTION_PROXY_PASSWORD)
console_write(u"WinINet Debug Proxy", True)
console_write(u" proxy: %s" % proxy)
console_write(u" proxy bypass: %s" % proxy_bypass)
console_write(u" proxy username: %s" % proxy_username)
console_write(u" proxy password: %s" % proxy_password)
self.use_count += 1
if self.debug and created_connection:
if self.scheme == 'https':
cert_struct = self.read_option(http_connection, self.INTERNET_OPTION_SECURITY_CERTIFICATE_STRUCT)
if cert_struct.lpszIssuerInfo:
issuer_info = cert_struct.lpszIssuerInfo.decode('cp1252')
issuer_parts = issuer_info.split("\r\n")
else:
issuer_parts = ['No issuer info']
if cert_struct.lpszSubjectInfo:
subject_info = cert_struct.lpszSubjectInfo.decode('cp1252')
subject_parts = subject_info.split("\r\n")
else:
subject_parts = ["No subject info"]
common_name = subject_parts[-1]
if cert_struct.ftStart.dwLowDateTime != 0 and cert_struct.ftStart.dwHighDateTime != 0:
issue_date = self.convert_filetime_to_datetime(cert_struct.ftStart)
issue_date = issue_date.strftime('%a, %d %b %Y %H:%M:%S GMT')
else:
issue_date = u"No issue date"
if cert_struct.ftExpiry.dwLowDateTime != 0 and cert_struct.ftExpiry.dwHighDateTime != 0:
expiration_date = self.convert_filetime_to_datetime(cert_struct.ftExpiry)
expiration_date = expiration_date.strftime('%a, %d %b %Y %H:%M:%S GMT')
else:
expiration_date = u"No expiration date"
console_write(u"WinINet HTTPS Debug General", True)
if changed_to_online:
console_write(u" Internet Explorer was set to Work Offline, temporarily going online")
console_write(u" Server SSL Certificate:")
console_write(u" subject: %s" % ", ".join(subject_parts))
console_write(u" issuer: %s" % ", ".join(issuer_parts))
console_write(u" common name: %s" % common_name)
console_write(u" issue date: %s" % issue_date)
console_write(u" expire date: %s" % expiration_date)
elif changed_to_online:
console_write(u"WinINet HTTP Debug General", True)
console_write(u" Internet Explorer was set to Work Offline, temporarily going online")
if self.debug:
console_write(u"WinINet %s Debug Write" % self.scheme.upper(), True)
# Add in some known headers that WinINet sends since we can't get the real list
console_write(u" GET %s HTTP/1.1" % path)
for header, value in request_headers.items():
console_write(u" %s: %s" % (header, value))
console_write(u" User-Agent: %s" % self.settings.get('user_agent'))
console_write(u" Host: %s" % hostname)
console_write(u" Connection: Keep-Alive")
console_write(u" Cache-Control: no-cache")
header_buffer_size = 8192
try_again = True
while try_again:
try_again = False
to_read_was_read = wintypes.DWORD(header_buffer_size)
headers_buffer = ctypes.create_string_buffer(header_buffer_size)
success = wininet.HttpQueryInfoA(http_connection, self.HTTP_QUERY_RAW_HEADERS_CRLF, ctypes.byref(headers_buffer), ctypes.byref(to_read_was_read), None)
if not success:
if ctypes.GetLastError() != self.ERROR_INSUFFICIENT_BUFFER:
error_string = u'%s %s during header read phase of downloading %s.' % (error_message, self.extract_error(), url)
raise DownloaderException(error_string)
# The error was a buffer that was too small, so try again
header_buffer_size = to_read_was_read.value
try_again = True
continue
headers = b''
if to_read_was_read.value > 0:
headers += headers_buffer.raw[:to_read_was_read.value]
headers = headers.decode('iso-8859-1').rstrip("\r\n").split("\r\n")
if self.debug:
console_write(u"WinINet %s Debug Read" % self.scheme.upper(), True)
for header in headers:
console_write(u" %s" % header)
buffer_length = 65536
output_buffer = ctypes.create_string_buffer(buffer_length)
bytes_read = wintypes.DWORD()
result = b''
try_again = True
while try_again:
try_again = False
wininet.InternetReadFile(http_connection, output_buffer, buffer_length, ctypes.byref(bytes_read))
if bytes_read.value > 0:
result += output_buffer.raw[:bytes_read.value]
try_again = True
general, headers = self.parse_headers(headers)
self.handle_rate_limit(headers, url)
if general['status'] == 503 and tries != 0:
# GitHub and BitBucket seem to rate limit via 503
error_string = u'Downloading %s was rate limited' % url
if tries:
error_string += ', trying again'
if self.debug:
console_write(error_string, True)
continue
encoding = headers.get('content-encoding')
if encoding:
result = self.decode_response(encoding, result)
result = self.cache_result('get', url, general['status'],
headers, result)
if general['status'] not in [200, 304]:
raise HttpError("HTTP error %s" % general['status'], general['status'])
return result
except (NonHttpError, HttpError) as e:
# GitHub and BitBucket seem to time out a lot
if str(e).find('timed out') != -1:
error_string = u'Downloading %s timed out' % url
if tries:
error_string += ', trying again'
if self.debug:
console_write(error_string, True)
continue
error_string = u'%s %s downloading %s.' % (error_message, e, url)
finally:
if http_connection:
wininet.InternetCloseHandle(http_connection)
break
raise DownloaderException(error_string)
def convert_filetime_to_datetime(self, filetime):
"""
Windows returns times as 64-bit unsigned longs that are the number
of hundreds of nanoseconds since Jan 1 1601. This converts it to
a datetime object.
:param filetime:
A FileTime struct object
:return:
A (UTC) datetime object
"""
hundreds_nano_seconds = struct.unpack('>Q', struct.pack('>LL', filetime.dwHighDateTime, filetime.dwLowDateTime))[0]
seconds_since_1601 = hundreds_nano_seconds / 10000000
epoch_seconds = seconds_since_1601 - 11644473600 # Seconds from Jan 1 1601 to Jan 1 1970
return datetime.datetime.fromtimestamp(epoch_seconds)
def extract_error(self):
"""
Retrieves and formats an error from WinINet
:return:
A string with a nice description of the error
"""
error_num = ctypes.GetLastError()
raw_error_string = ctypes.FormatError(error_num)
error_string = unicode_from_os(raw_error_string)
# Try to fill in some known errors
if error_string == u"<no description>":
error_lookup = {
12007: u'host not found',
12029: u'connection refused',
12057: u'error checking for server certificate revocation',
12169: u'invalid secure certificate',
12157: u'secure channel error, server not providing SSL',
12002: u'operation timed out'
}
if error_num in error_lookup:
error_string = error_lookup[error_num]
if error_string == u"<no description>":
return u"(errno %s)" % error_num
error_string = error_string[0].upper() + error_string[1:]
return u"%s (errno %s)" % (error_string, error_num)
def supports_ssl(self):
"""
Indicates if the object can handle HTTPS requests
:return:
If the object supports HTTPS requests
"""
return True
def read_option(self, handle, option):
"""
Reads information about the internet connection, which may be a string or struct
:param handle:
The handle to query for the info
:param option:
The (int) option to get
:return:
A string, or one of the InternetCertificateInfo or InternetProxyInfo structs
"""
option_buffer_size = 8192
try_again = True
while try_again:
try_again = False
to_read_was_read = wintypes.DWORD(option_buffer_size)
option_buffer = ctypes.create_string_buffer(option_buffer_size)
ref = ctypes.byref(option_buffer)
success = wininet.InternetQueryOptionA(handle, option, ref, ctypes.byref(to_read_was_read))
if not success:
if ctypes.GetLastError() != self.ERROR_INSUFFICIENT_BUFFER:
raise NonHttpError(self.extract_error())
# The error was a buffer that was too small, so try again
option_buffer_size = to_read_was_read.value
try_again = True
continue
if option == self.INTERNET_OPTION_SECURITY_CERTIFICATE_STRUCT:
length = min(len(option_buffer), ctypes.sizeof(InternetCertificateInfo))
cert_info = InternetCertificateInfo()
ctypes.memmove(ctypes.addressof(cert_info), option_buffer, length)
return cert_info
elif option == self.INTERNET_OPTION_PROXY:
length = min(len(option_buffer), ctypes.sizeof(InternetProxyInfo))
proxy_info = InternetProxyInfo()
ctypes.memmove(ctypes.addressof(proxy_info), option_buffer, length)
return proxy_info
else:
option = b''
if to_read_was_read.value > 0:
option += option_buffer.raw[:to_read_was_read.value]
return option.decode('cp1252').rstrip("\x00")
def parse_headers(self, output):
"""
Parses HTTP headers into two dict objects
:param output:
An array of header lines
:return:
A tuple of (general, headers) where general is a dict with the keys:
`version` - HTTP version number (string)
`status` - HTTP status code (integer)
`message` - HTTP status message (string)
And headers is a dict with the keys being lower-case version of the
HTTP header names.
"""
general = {
'version': '0.9',
'status': 200,
'message': 'OK'
}
headers = {}
for line in output:
line = line.lstrip()
if line.find('HTTP/') == 0:
match = re.match('HTTP/(\d\.\d)\s+(\d+)\s+(.*)$', line)
general['version'] = match.group(1)
general['status'] = int(match.group(2))
general['message'] = match.group(3)
else:
name, value = line.split(':', 1)
headers[name.lower()] = value.strip()
return (general, headers)
class FileTime(ctypes.Structure):
"""
A Windows struct used by InternetCertificateInfo for certificate
date information
"""
_fields_ = [
("dwLowDateTime", wintypes.DWORD),
("dwHighDateTime", wintypes.DWORD)
]
class InternetCertificateInfo(ctypes.Structure):
"""
A Windows struct used to store information about an SSL certificate
"""
_fields_ = [
("ftExpiry", FileTime),
("ftStart", FileTime),
("lpszSubjectInfo", ctypes.c_char_p),
("lpszIssuerInfo", ctypes.c_char_p),
("lpszProtocolName", ctypes.c_char_p),
("lpszSignatureAlgName", ctypes.c_char_p),
("lpszEncryptionAlgName", ctypes.c_char_p),
("dwKeySize", wintypes.DWORD)
]
class InternetProxyInfo(ctypes.Structure):
"""
A Windows struct usd to store information about the configured proxy server
"""
_fields_ = [
("dwAccessType", wintypes.DWORD),
("lpszProxy", ctypes.c_char_p),
("lpszProxyBypass", ctypes.c_char_p)
]
class InternetConnectedInfo(ctypes.Structure):
"""
A Windows struct usd to store information about the global internet connection state
"""
_fields_ = [
("dwConnectedState", wintypes.DWORD),
("dwFlags", wintypes.DWORD)
]

View File

@@ -0,0 +1,4 @@
class FileNotFoundError(Exception):
"""If a file is not found"""
pass

View File

@@ -0,0 +1,65 @@
import sys
try:
# Python 2
import urllib2
import httplib
# Monkey patch AbstractBasicAuthHandler to prevent infinite recursion
def non_recursive_http_error_auth_reqed(self, authreq, host, req, headers):
authreq = headers.get(authreq, None)
if not hasattr(self, 'retried'):
self.retried = 0
if self.retried > 5:
raise urllib2.HTTPError(req.get_full_url(), 401, "basic auth failed",
headers, None)
else:
self.retried += 1
if authreq:
mo = urllib2.AbstractBasicAuthHandler.rx.search(authreq)
if mo:
scheme, quote, realm = mo.groups()
if scheme.lower() == 'basic':
return self.retry_http_basic_auth(host, req, realm)
urllib2.AbstractBasicAuthHandler.http_error_auth_reqed = non_recursive_http_error_auth_reqed
# Money patch urllib2.Request and httplib.HTTPConnection so that
# HTTPS proxies work in Python 2.6.1-2
if sys.version_info < (2, 6, 3):
urllib2.Request._tunnel_host = None
def py268_set_proxy(self, host, type):
if self.type == 'https' and not self._tunnel_host:
self._tunnel_host = self.host
else:
self.type = type
# The _Request prefix is to handle python private name mangling
self._Request__r_host = self._Request__original
self.host = host
urllib2.Request.set_proxy = py268_set_proxy
if sys.version_info < (2, 6, 5):
def py268_set_tunnel(self, host, port=None, headers=None):
""" Sets up the host and the port for the HTTP CONNECT Tunnelling.
The headers argument should be a mapping of extra HTTP headers
to send with the CONNECT request.
"""
self._tunnel_host = host
self._tunnel_port = port
if headers:
self._tunnel_headers = headers
else:
self._tunnel_headers.clear()
httplib.HTTPConnection._set_tunnel = py268_set_tunnel
except (ImportError):
# Python 3 does not need to be patched
pass

View File

@@ -0,0 +1,72 @@
import os
import re
import socket
try:
# Python 3
from http.client import HTTPConnection
from urllib.error import URLError
except (ImportError):
# Python 2
from httplib import HTTPConnection
from urllib2 import URLError
from ..console_write import console_write
from .debuggable_http_response import DebuggableHTTPResponse
class DebuggableHTTPConnection(HTTPConnection):
"""
A custom HTTPConnection that formats debugging info for Sublime Text
"""
response_class = DebuggableHTTPResponse
_debug_protocol = 'HTTP'
def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
**kwargs):
self.passwd = kwargs.get('passwd')
# Python 2.6.1 on OS X 10.6 does not include these
self._tunnel_host = None
self._tunnel_port = None
self._tunnel_headers = {}
if 'debug' in kwargs and kwargs['debug']:
self.debuglevel = 5
elif 'debuglevel' in kwargs:
self.debuglevel = kwargs['debuglevel']
HTTPConnection.__init__(self, host, port=port, timeout=timeout)
def connect(self):
if self.debuglevel == -1:
console_write(u'Urllib %s Debug General' % self._debug_protocol, True)
console_write(u" Connecting to %s on port %s" % (self.host, self.port))
HTTPConnection.connect(self)
def send(self, string):
# We have to use a positive debuglevel to get it passed to the
# HTTPResponse object, however we don't want to use it because by
# default debugging prints to the stdout and we can't capture it, so
# we temporarily set it to -1 for the standard httplib code
reset_debug = False
if self.debuglevel == 5:
reset_debug = 5
self.debuglevel = -1
HTTPConnection.send(self, string)
if reset_debug or self.debuglevel == -1:
if len(string.strip()) > 0:
console_write(u'Urllib %s Debug Write' % self._debug_protocol, True)
for line in string.strip().splitlines():
console_write(u' ' + line.decode('iso-8859-1'))
if reset_debug:
self.debuglevel = reset_debug
def request(self, method, url, body=None, headers={}):
original_headers = headers.copy()
# By default urllib2 and urllib.request override the Connection header,
# however, it is preferred to be able to re-use it
original_headers['Connection'] = 'Keep-Alive'
HTTPConnection.request(self, method, url, body, original_headers)

View File

@@ -0,0 +1,35 @@
import sys
try:
# Python 3
from urllib.request import HTTPHandler
except (ImportError):
# Python 2
from urllib2 import HTTPHandler
from .debuggable_http_connection import DebuggableHTTPConnection
from .persistent_handler import PersistentHandler
class DebuggableHTTPHandler(PersistentHandler, HTTPHandler):
"""
A custom HTTPHandler that formats debugging info for Sublime Text
"""
def __init__(self, debuglevel=0, debug=False, **kwargs):
# This is a special value that will not trigger the standard debug
# functionality, but custom code where we can format the output
if debug:
self._debuglevel = 5
else:
self._debuglevel = debuglevel
self.passwd = kwargs.get('passwd')
def http_open(self, req):
def http_class_wrapper(host, **kwargs):
kwargs['passwd'] = self.passwd
if 'debuglevel' not in kwargs:
kwargs['debuglevel'] = self._debuglevel
return DebuggableHTTPConnection(host, **kwargs)
return self.do_open(http_class_wrapper, req)

View File

@@ -0,0 +1,66 @@
try:
# Python 3
from http.client import HTTPResponse, IncompleteRead
except (ImportError):
# Python 2
from httplib import HTTPResponse, IncompleteRead
from ..console_write import console_write
class DebuggableHTTPResponse(HTTPResponse):
"""
A custom HTTPResponse that formats debugging info for Sublime Text
"""
_debug_protocol = 'HTTP'
def __init__(self, sock, debuglevel=0, method=None, **kwargs):
# We have to use a positive debuglevel to get it passed to here,
# however we don't want to use it because by default debugging prints
# to the stdout and we can't capture it, so we use a special -1 value
if debuglevel == 5:
debuglevel = -1
HTTPResponse.__init__(self, sock, debuglevel=debuglevel, method=method)
def begin(self):
return_value = HTTPResponse.begin(self)
if self.debuglevel == -1:
console_write(u'Urllib %s Debug Read' % self._debug_protocol, True)
# Python 2
if hasattr(self.msg, 'headers'):
headers = self.msg.headers
# Python 3
else:
headers = []
for header in self.msg:
headers.append("%s: %s" % (header, self.msg[header]))
versions = {
9: 'HTTP/0.9',
10: 'HTTP/1.0',
11: 'HTTP/1.1'
}
status_line = versions[self.version] + ' ' + str(self.status) + ' ' + self.reason
headers.insert(0, status_line)
for line in headers:
console_write(u" %s" % line.rstrip())
return return_value
def is_keep_alive(self):
# Python 2
if hasattr(self.msg, 'headers'):
connection = self.msg.getheader('connection')
# Python 3
else:
connection = self.msg['connection']
if connection and connection.lower() == 'keep-alive':
return True
return False
def read(self, *args):
try:
return HTTPResponse.read(self, *args)
except (IncompleteRead) as e:
return e.partial

View File

@@ -0,0 +1,9 @@
from .debuggable_http_response import DebuggableHTTPResponse
class DebuggableHTTPSResponse(DebuggableHTTPResponse):
"""
A version of DebuggableHTTPResponse that sets the debug protocol to HTTPS
"""
_debug_protocol = 'HTTPS'

View File

@@ -0,0 +1,25 @@
try:
# Python 3
from http.client import HTTPException
from urllib.error import URLError
except (ImportError):
# Python 2
from httplib import HTTPException
from urllib2 import URLError
class InvalidCertificateException(HTTPException, URLError):
"""
An exception for when an SSL certification is not valid for the URL
it was presented for.
"""
def __init__(self, host, cert, reason):
HTTPException.__init__(self)
self.host = host
self.cert = cert
self.reason = reason
def __str__(self):
return ('Host %s returned an invalid certificate (%s) %s\n' %
(self.host, self.reason, self.cert))

View File

@@ -0,0 +1,116 @@
import sys
import socket
try:
# Python 3
from urllib.error import URLError
except ImportError:
# Python 2
from urllib2 import URLError
from urllib import addinfourl
from ..console_write import console_write
class PersistentHandler:
connection = None
use_count = 0
def close(self):
if self.connection:
if self._debuglevel == 5:
s = '' if self.use_count == 1 else 's'
console_write(u"Urllib %s Debug General" % self.connection._debug_protocol, True)
console_write(u" Closing connection to %s on port %s after %s request%s" % (
self.connection.host, self.connection.port, self.use_count, s))
self.connection.close()
self.connection = None
self.use_count = 0
def do_open(self, http_class, req):
# Large portions from Python 3.3 Lib/urllib/request.py and
# Python 2.6 Lib/urllib2.py
if sys.version_info >= (3,):
host = req.host
else:
host = req.get_host()
if not host:
raise URLError('no host given')
if self.connection and self.connection.host != host:
self.close()
# Re-use the connection if possible
self.use_count += 1
if not self.connection:
h = http_class(host, timeout=req.timeout)
else:
h = self.connection
if self._debuglevel == 5:
console_write(u"Urllib %s Debug General" % h._debug_protocol, True)
console_write(u" Re-using connection to %s on port %s for request #%s" % (
h.host, h.port, self.use_count))
if sys.version_info >= (3,):
headers = dict(req.unredirected_hdrs)
headers.update(dict((k, v) for k, v in req.headers.items()
if k not in headers))
headers = dict((name.title(), val) for name, val in headers.items())
else:
h.set_debuglevel(self._debuglevel)
headers = dict(req.headers)
headers.update(req.unredirected_hdrs)
headers = dict(
(name.title(), val) for name, val in headers.items())
if req._tunnel_host and not self.connection:
tunnel_headers = {}
proxy_auth_hdr = "Proxy-Authorization"
if proxy_auth_hdr in headers:
tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
del headers[proxy_auth_hdr]
if sys.version_info >= (3,):
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
else:
h._set_tunnel(req._tunnel_host, headers=tunnel_headers)
try:
if sys.version_info >= (3,):
h.request(req.get_method(), req.selector, req.data, headers)
else:
h.request(req.get_method(), req.get_selector(), req.data, headers)
except socket.error as err: # timeout error
h.close()
raise URLError(err)
else:
r = h.getresponse()
# Keep the connection around for re-use
if r.is_keep_alive():
self.connection = h
else:
if self._debuglevel == 5:
s = '' if self.use_count == 1 else 's'
console_write(u"Urllib %s Debug General" % h._debug_protocol, True)
console_write(u" Closing connection to %s on port %s after %s request%s" % (
h.host, h.port, self.use_count, s))
self.use_count = 0
self.connection = None
if sys.version_info >= (3,):
r.url = req.get_full_url()
r.msg = r.reason
return r
r.recv = r.read
fp = socket._fileobject(r, close=True)
resp = addinfourl(fp, r.msg, req.get_full_url())
resp.code = r.status
resp.msg = r.reason
return resp

View File

@@ -0,0 +1,345 @@
import re
import socket
import base64
import hashlib
import os
import sys
try:
# Python 3
from http.client import HTTPS_PORT
from urllib.request import parse_keqv_list, parse_http_list
except (ImportError):
# Python 2
from httplib import HTTPS_PORT
from urllib2 import parse_keqv_list, parse_http_list
from ..console_write import console_write
from .debuggable_https_response import DebuggableHTTPSResponse
from .debuggable_http_connection import DebuggableHTTPConnection
from .invalid_certificate_exception import InvalidCertificateException
# The following code is wrapped in a try because the Linux versions of Sublime
# Text do not include the ssl module due to the fact that different distros
# have different versions
try:
import ssl
class ValidatingHTTPSConnection(DebuggableHTTPConnection):
"""
A custom HTTPConnection class that validates SSL certificates, and
allows proxy authentication for HTTPS connections.
"""
default_port = HTTPS_PORT
response_class = DebuggableHTTPSResponse
_debug_protocol = 'HTTPS'
def __init__(self, host, port=None, key_file=None, cert_file=None,
ca_certs=None, **kwargs):
passed_args = {}
if 'timeout' in kwargs:
passed_args['timeout'] = kwargs['timeout']
if 'debug' in kwargs:
passed_args['debug'] = kwargs['debug']
DebuggableHTTPConnection.__init__(self, host, port, **passed_args)
self.passwd = kwargs.get('passwd')
self.key_file = key_file
self.cert_file = cert_file
self.ca_certs = ca_certs
if 'user_agent' in kwargs:
self.user_agent = kwargs['user_agent']
if self.ca_certs:
self.cert_reqs = ssl.CERT_REQUIRED
else:
self.cert_reqs = ssl.CERT_NONE
def get_valid_hosts_for_cert(self, cert):
"""
Returns a list of valid hostnames for an SSL certificate
:param cert: A dict from SSLSocket.getpeercert()
:return: An array of hostnames
"""
if 'subjectAltName' in cert:
return [x[1] for x in cert['subjectAltName']
if x[0].lower() == 'dns']
else:
return [x[0][1] for x in cert['subject']
if x[0][0].lower() == 'commonname']
def validate_cert_host(self, cert, hostname):
"""
Checks if the cert is valid for the hostname
:param cert: A dict from SSLSocket.getpeercert()
:param hostname: A string hostname to check
:return: A boolean if the cert is valid for the hostname
"""
hosts = self.get_valid_hosts_for_cert(cert)
for host in hosts:
host_re = host.replace('.', '\.').replace('*', '[^.]*')
if re.search('^%s$' % (host_re,), hostname, re.I):
return True
return False
def _tunnel(self):
"""
This custom _tunnel method allows us to read and print the debug
log for the whole response before throwing an error, and adds
support for proxy authentication
"""
self._proxy_host = self.host
self._proxy_port = self.port
self._set_hostport(self._tunnel_host, self._tunnel_port)
self._tunnel_headers['Host'] = u"%s:%s" % (self.host, self.port)
self._tunnel_headers['User-Agent'] = self.user_agent
self._tunnel_headers['Proxy-Connection'] = 'Keep-Alive'
request = "CONNECT %s:%d HTTP/1.1\r\n" % (self.host, self.port)
for header, value in self._tunnel_headers.items():
request += "%s: %s\r\n" % (header, value)
request += "\r\n"
if sys.version_info >= (3,):
request = bytes(request, 'iso-8859-1')
self.send(request)
response = self.response_class(self.sock, method=self._method)
(version, code, message) = response._read_status()
status_line = u"%s %s %s" % (version, code, message.rstrip())
headers = [status_line]
if self.debuglevel in [-1, 5]:
console_write(u'Urllib %s Debug Read' % self._debug_protocol, True)
console_write(u" %s" % status_line)
content_length = 0
close_connection = False
while True:
line = response.fp.readline()
if sys.version_info >= (3,):
line = str(line, encoding='iso-8859-1')
if line == '\r\n':
break
headers.append(line.rstrip())
parts = line.rstrip().split(': ', 1)
name = parts[0].lower()
value = parts[1].lower().strip()
if name == 'content-length':
content_length = int(value)
if name in ['connection', 'proxy-connection'] and value == 'close':
close_connection = True
if self.debuglevel in [-1, 5]:
console_write(u" %s" % line.rstrip())
# Handle proxy auth for SSL connections since regular urllib punts on this
if code == 407 and self.passwd and 'Proxy-Authorization' not in self._tunnel_headers:
if content_length:
response._safe_read(content_length)
supported_auth_methods = {}
for line in headers:
parts = line.split(': ', 1)
if parts[0].lower() != 'proxy-authenticate':
continue
details = parts[1].split(' ', 1)
supported_auth_methods[details[0].lower()] = details[1] if len(details) > 1 else ''
username, password = self.passwd.find_user_password(None, "%s:%s" % (
self._proxy_host, self._proxy_port))
if 'digest' in supported_auth_methods:
response_value = self.build_digest_response(
supported_auth_methods['digest'], username, password)
if response_value:
self._tunnel_headers['Proxy-Authorization'] = u"Digest %s" % response_value
elif 'basic' in supported_auth_methods:
response_value = u"%s:%s" % (username, password)
response_value = base64.b64encode(response_value).strip()
self._tunnel_headers['Proxy-Authorization'] = u"Basic %s" % response_value
if 'Proxy-Authorization' in self._tunnel_headers:
self.host = self._proxy_host
self.port = self._proxy_port
# If the proxy wanted the connection closed, we need to make a new connection
if close_connection:
self.sock.close()
self.sock = socket.create_connection((self.host, self.port), self.timeout)
return self._tunnel()
if code != 200:
self.close()
raise socket.error("Tunnel connection failed: %d %s" % (code,
message.strip()))
def build_digest_response(self, fields, username, password):
"""
Takes a Proxy-Authenticate: Digest header and creates a response
header
:param fields:
The string portion of the Proxy-Authenticate header after
"Digest "
:param username:
The username to use for the response
:param password:
The password to use for the response
:return:
None if invalid Proxy-Authenticate header, otherwise the
string of fields for the Proxy-Authorization: Digest header
"""
fields = parse_keqv_list(parse_http_list(fields))
realm = fields.get('realm')
nonce = fields.get('nonce')
qop = fields.get('qop')
algorithm = fields.get('algorithm')
if algorithm:
algorithm = algorithm.lower()
opaque = fields.get('opaque')
if algorithm in ['md5', None]:
def md5hash(string):
return hashlib.md5(string).hexdigest()
hash = md5hash
elif algorithm == 'sha':
def sha1hash(string):
return hashlib.sha1(string).hexdigest()
hash = sha1hash
else:
return None
host_port = u"%s:%s" % (self.host, self.port)
a1 = "%s:%s:%s" % (username, realm, password)
a2 = "CONNECT:%s" % host_port
ha1 = hash(a1)
ha2 = hash(a2)
if qop == None:
response = hash(u"%s:%s:%s" % (ha1, nonce, ha2))
elif qop == 'auth':
nc = '00000001'
cnonce = hash(os.urandom(8))[:8]
response = hash(u"%s:%s:%s:%s:%s:%s" % (ha1, nonce, nc, cnonce, qop, ha2))
else:
return None
response_fields = {
'username': username,
'realm': realm,
'nonce': nonce,
'response': response,
'uri': host_port
}
if algorithm:
response_fields['algorithm'] = algorithm
if qop == 'auth':
response_fields['nc'] = nc
response_fields['cnonce'] = cnonce
response_fields['qop'] = qop
if opaque:
response_fields['opaque'] = opaque
return ', '.join([u"%s=\"%s\"" % (field, response_fields[field]) for field in response_fields])
def connect(self):
"""
Adds debugging and SSL certification validation
"""
if self.debuglevel == -1:
console_write(u"Urllib HTTPS Debug General", True)
console_write(u" Connecting to %s on port %s" % (self.host, self.port))
self.sock = socket.create_connection((self.host, self.port), self.timeout)
if self._tunnel_host:
self._tunnel()
if self.debuglevel == -1:
console_write(u"Urllib HTTPS Debug General", True)
console_write(u" Connecting to %s on port %s" % (self.host, self.port))
console_write(u" CA certs file at %s" % (self.ca_certs.decode(sys.getfilesystemencoding())))
self.sock = ssl.wrap_socket(self.sock, keyfile=self.key_file,
certfile=self.cert_file, cert_reqs=self.cert_reqs,
ca_certs=self.ca_certs)
if self.debuglevel == -1:
console_write(u" Successfully upgraded connection to %s:%s with SSL" % (
self.host, self.port))
# This debugs and validates the SSL certificate
if self.cert_reqs & ssl.CERT_REQUIRED:
cert = self.sock.getpeercert()
if self.debuglevel == -1:
subjectMap = {
'organizationName': 'O',
'commonName': 'CN',
'organizationalUnitName': 'OU',
'countryName': 'C',
'serialNumber': 'serialNumber',
'commonName': 'CN',
'localityName': 'L',
'stateOrProvinceName': 'S'
}
subject_list = list(cert['subject'])
subject_list.reverse()
subject_parts = []
for pair in subject_list:
if pair[0][0] in subjectMap:
field_name = subjectMap[pair[0][0]]
else:
field_name = pair[0][0]
subject_parts.append(field_name + '=' + pair[0][1])
console_write(u" Server SSL certificate:")
console_write(u" subject: " + ','.join(subject_parts))
if 'subjectAltName' in cert:
console_write(u" common name: " + cert['subjectAltName'][0][1])
if 'notAfter' in cert:
console_write(u" expire date: " + cert['notAfter'])
hostname = self.host.split(':', 0)[0]
if not self.validate_cert_host(cert, hostname):
if self.debuglevel == -1:
console_write(u" Certificate INVALID")
raise InvalidCertificateException(hostname, cert,
'hostname mismatch')
if self.debuglevel == -1:
console_write(u" Certificate validated for %s" % hostname)
except (ImportError):
pass

View File

@@ -0,0 +1,59 @@
try:
# Python 3
from urllib.error import URLError
import urllib.request as urllib_compat
except (ImportError):
# Python 2
from urllib2 import URLError
import urllib2 as urllib_compat
# The following code is wrapped in a try because the Linux versions of Sublime
# Text do not include the ssl module due to the fact that different distros
# have different versions
try:
import ssl
from .validating_https_connection import ValidatingHTTPSConnection
from .invalid_certificate_exception import InvalidCertificateException
from .persistent_handler import PersistentHandler
if hasattr(urllib_compat, 'HTTPSHandler'):
class ValidatingHTTPSHandler(PersistentHandler, urllib_compat.HTTPSHandler):
"""
A urllib handler that validates SSL certificates for HTTPS requests
"""
def __init__(self, **kwargs):
# This is a special value that will not trigger the standard debug
# functionality, but custom code where we can format the output
self._debuglevel = 0
if 'debug' in kwargs and kwargs['debug']:
self._debuglevel = 5
elif 'debuglevel' in kwargs:
self._debuglevel = kwargs['debuglevel']
self._connection_args = kwargs
def https_open(self, req):
def http_class_wrapper(host, **kwargs):
full_kwargs = dict(self._connection_args)
full_kwargs.update(kwargs)
return ValidatingHTTPSConnection(host, **full_kwargs)
try:
return self.do_open(http_class_wrapper, req)
except URLError as e:
if type(e.reason) == ssl.SSLError and e.reason.args[0] == 1:
raise InvalidCertificateException(req.host, '',
e.reason.args[1])
raise
https_request = urllib_compat.AbstractHTTPHandler.do_request_
else:
raise ImportError()
except (ImportError) as e:
class ValidatingHTTPSHandler():
def __init__(self, **kwargs):
raise e

View File

@@ -0,0 +1,75 @@
import os
import time
import sublime
from .open_compat import open_compat, read_compat
class HttpCache(object):
"""
A data store for caching HTTP response data.
"""
def __init__(self, ttl):
self.base_path = os.path.join(sublime.packages_path(), 'User', 'Package Control.cache')
if not os.path.exists(self.base_path):
os.mkdir(self.base_path)
self.clear(int(ttl))
def clear(self, ttl):
"""
Removes all cache entries older than the TTL
:param ttl:
The number of seconds a cache entry should be valid for
"""
ttl = int(ttl)
for filename in os.listdir(self.base_path):
path = os.path.join(self.base_path, filename)
# There should not be any folders in the cache dir, but we
# ignore to prevent an exception
if os.path.isdir(path):
continue
mtime = os.stat(path).st_mtime
if mtime < time.time() - ttl:
os.unlink(path)
def get(self, key):
"""
Returns a cached value
:param key:
The key to fetch the cache for
:return:
The (binary) cached value, or False
"""
cache_file = os.path.join(self.base_path, key)
if not os.path.exists(cache_file):
return False
with open_compat(cache_file, 'rb') as f:
return read_compat(f)
def has(self, key):
cache_file = os.path.join(self.base_path, key)
return os.path.exists(cache_file)
def set(self, key, content):
"""
Saves a value in the cache
:param key:
The key to save the cache with
:param content:
The (binary) content to cache
"""
cache_file = os.path.join(self.base_path, key)
with open_compat(cache_file, 'wb') as f:
f.write(content)

View File

@@ -0,0 +1,27 @@
import os
import sys
from .file_not_found_error import FileNotFoundError
def open_compat(path, mode='r'):
if mode in ['r', 'rb'] and not os.path.exists(path):
raise FileNotFoundError(u"The file \"%s\" could not be found" % path)
if sys.version_info >= (3,):
encoding = 'utf-8'
errors = 'replace'
if mode in ['rb', 'wb', 'ab']:
encoding = None
errors = None
return open(path, mode, encoding=encoding, errors=errors)
else:
return open(path, mode)
def read_compat(file_obj):
if sys.version_info >= (3,):
return file_obj.read()
else:
return unicode(file_obj.read(), 'utf-8', errors='replace')

View File

@@ -0,0 +1,107 @@
import threading
import os
import shutil
import sublime
from .show_error import show_error
from .console_write import console_write
from .unicode import unicode_from_os
from .clear_directory import clear_directory
from .automatic_upgrader import AutomaticUpgrader
from .package_manager import PackageManager
from .package_renamer import PackageRenamer
from .open_compat import open_compat
from .package_io import package_file_exists
class PackageCleanup(threading.Thread, PackageRenamer):
"""
Cleans up folders for packages that were removed, but that still have files
in use.
"""
def __init__(self):
self.manager = PackageManager()
self.load_settings()
threading.Thread.__init__(self)
def run(self):
found_pkgs = []
installed_pkgs = list(self.installed_packages)
for package_name in os.listdir(sublime.packages_path()):
package_dir = os.path.join(sublime.packages_path(), package_name)
# Cleanup packages that could not be removed due to in-use files
cleanup_file = os.path.join(package_dir, 'package-control.cleanup')
if os.path.exists(cleanup_file):
try:
shutil.rmtree(package_dir)
console_write(u'Removed old directory for package %s' % package_name, True)
except (OSError) as e:
if not os.path.exists(cleanup_file):
open_compat(cleanup_file, 'w').close()
error_string = (u'Unable to remove old directory for package ' +
u'%s - deferring until next start: %s') % (
package_name, unicode_from_os(e))
console_write(error_string, True)
# Finish reinstalling packages that could not be upgraded due to
# in-use files
reinstall = os.path.join(package_dir, 'package-control.reinstall')
if os.path.exists(reinstall):
metadata_path = os.path.join(package_dir, 'package-metadata.json')
if not clear_directory(package_dir, [metadata_path]):
if not os.path.exists(reinstall):
open_compat(reinstall, 'w').close()
# Assigning this here prevents the callback from referencing the value
# of the "package_name" variable when it is executed
restart_message = (u'An error occurred while trying to ' +
u'finish the upgrade of %s. You will most likely need to ' +
u'restart your computer to complete the upgrade.') % package_name
def show_still_locked():
show_error(restart_message)
sublime.set_timeout(show_still_locked, 10)
else:
self.manager.install_package(package_name)
# This adds previously installed packages from old versions of PC
if package_file_exists(package_name, 'package-metadata.json') and \
package_name not in self.installed_packages:
installed_pkgs.append(package_name)
params = {
'package': package_name,
'operation': 'install',
'version': \
self.manager.get_metadata(package_name).get('version')
}
self.manager.record_usage(params)
found_pkgs.append(package_name)
if int(sublime.version()) >= 3000:
package_files = os.listdir(sublime.installed_packages_path())
found_pkgs += [file.replace('.sublime-package', '') for file in package_files]
sublime.set_timeout(lambda: self.finish(installed_pkgs, found_pkgs), 10)
def finish(self, installed_pkgs, found_pkgs):
"""
A callback that can be run the main UI thread to perform saving of the
Package Control.sublime-settings file. Also fires off the
:class:`AutomaticUpgrader`.
:param installed_pkgs:
A list of the string package names of all "installed" packages,
even ones that do not appear to be in the filesystem.
:param found_pkgs:
A list of the string package names of all packages that are
currently installed on the filesystem.
"""
self.save_packages(installed_pkgs)
AutomaticUpgrader(found_pkgs).start()

View File

@@ -0,0 +1,39 @@
import os
from .show_error import show_error
from .package_manager import PackageManager
class PackageCreator():
"""
Abstract class for commands that create .sublime-package files
"""
def show_panel(self):
"""
Shows a list of packages that can be turned into a .sublime-package file
"""
self.manager = PackageManager()
self.packages = self.manager.list_packages(unpacked_only=True)
if not self.packages:
show_error('There are no packages available to be packaged')
return
self.window.show_quick_panel(self.packages, self.on_done)
def get_package_destination(self):
"""
Retrieves the destination for .sublime-package files
:return:
A string - the path to the folder to save .sublime-package files in
"""
destination = self.manager.settings.get('package_destination')
# We check destination via an if statement instead of using
# the dict.get() method since the key may be set, but to a blank value
if not destination:
destination = os.path.join(os.path.expanduser('~'), 'Desktop')
return destination

View File

@@ -0,0 +1,247 @@
import os
import re
import threading
import sublime
from .preferences_filename import preferences_filename
from .thread_progress import ThreadProgress
from .package_manager import PackageManager
from .upgraders.git_upgrader import GitUpgrader
from .upgraders.hg_upgrader import HgUpgrader
from .versions import version_comparable
class PackageInstaller():
"""
Provides helper functionality related to installing packages
"""
def __init__(self):
self.manager = PackageManager()
def make_package_list(self, ignore_actions=[], override_action=None,
ignore_packages=[]):
"""
Creates a list of packages and what operation would be performed for
each. Allows filtering by the applicable action or package name.
Returns the information in a format suitable for displaying in the
quick panel.
:param ignore_actions:
A list of actions to ignore packages by. Valid actions include:
`install`, `upgrade`, `downgrade`, `reinstall`, `overwrite`,
`pull` and `none`. `pull` andd `none` are for Git and Hg
repositories. `pull` is present when incoming changes are detected,
where as `none` is selected if no commits are available. `overwrite`
is for packages that do not include version information via the
`package-metadata.json` file.
:param override_action:
A string action name to override the displayed action for all listed
packages.
:param ignore_packages:
A list of packages names that should not be returned in the list
:return:
A list of lists, each containing three strings:
0 - package name
1 - package description
2 - action; [extra info;] package url
"""
packages = self.manager.list_available_packages()
installed_packages = self.manager.list_packages()
package_list = []
for package in sorted(iter(packages.keys()), key=lambda s: s.lower()):
if ignore_packages and package in ignore_packages:
continue
package_entry = [package]
info = packages[package]
download = info['download']
if package in installed_packages:
installed = True
metadata = self.manager.get_metadata(package)
if metadata.get('version'):
installed_version = metadata['version']
else:
installed_version = None
else:
installed = False
installed_version_name = 'v' + installed_version if \
installed and installed_version else 'unknown version'
new_version = 'v' + download['version']
vcs = None
package_dir = self.manager.get_package_dir(package)
settings = self.manager.settings
if override_action:
action = override_action
extra = ''
else:
if os.path.exists(os.path.join(package_dir, '.git')):
if settings.get('ignore_vcs_packages'):
continue
vcs = 'git'
incoming = GitUpgrader(settings.get('git_binary'),
settings.get('git_update_command'), package_dir,
settings.get('cache_length'), settings.get('debug')
).incoming()
elif os.path.exists(os.path.join(package_dir, '.hg')):
if settings.get('ignore_vcs_packages'):
continue
vcs = 'hg'
incoming = HgUpgrader(settings.get('hg_binary'),
settings.get('hg_update_command'), package_dir,
settings.get('cache_length'), settings.get('debug')
).incoming()
if installed:
if vcs:
if incoming:
action = 'pull'
extra = ' with ' + vcs
else:
action = 'none'
extra = ''
elif not installed_version:
action = 'overwrite'
extra = ' %s with %s' % (installed_version_name,
new_version)
else:
installed_version = version_comparable(installed_version)
download_version = version_comparable(download['version'])
if download_version > installed_version:
action = 'upgrade'
extra = ' to %s from %s' % (new_version,
installed_version_name)
elif download_version < installed_version:
action = 'downgrade'
extra = ' to %s from %s' % (new_version,
installed_version_name)
else:
action = 'reinstall'
extra = ' %s' % new_version
else:
action = 'install'
extra = ' %s' % new_version
extra += ';'
if action in ignore_actions:
continue
description = info.get('description')
if not description:
description = 'No description provided'
package_entry.append(description)
package_entry.append(action + extra + ' ' +
re.sub('^https?://', '', info['homepage']))
package_list.append(package_entry)
return package_list
def disable_packages(self, packages):
"""
Disables one or more packages before installing or upgrading to prevent
errors where Sublime Text tries to read files that no longer exist, or
read a half-written file.
:param packages: The string package name, or an array of strings
"""
if not isinstance(packages, list):
packages = [packages]
# Don't disable Package Control so it does not get stuck disabled
if 'Package Control' in packages:
packages.remove('Package Control')
disabled = []
settings = sublime.load_settings(preferences_filename())
ignored = settings.get('ignored_packages')
if not ignored:
ignored = []
for package in packages:
if not package in ignored:
ignored.append(package)
disabled.append(package)
settings.set('ignored_packages', ignored)
sublime.save_settings(preferences_filename())
return disabled
def reenable_package(self, package):
"""
Re-enables a package after it has been installed or upgraded
:param package: The string package name
"""
settings = sublime.load_settings(preferences_filename())
ignored = settings.get('ignored_packages')
if not ignored:
return
if package in ignored:
settings.set('ignored_packages',
list(set(ignored) - set([package])))
sublime.save_settings(preferences_filename())
def on_done(self, picked):
"""
Quick panel user selection handler - disables a package, installs or
upgrades it, then re-enables the package
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.
"""
if picked == -1:
return
name = self.package_list[picked][0]
if name in self.disable_packages(name):
on_complete = lambda: self.reenable_package(name)
else:
on_complete = None
thread = PackageInstallerThread(self.manager, name, on_complete)
thread.start()
ThreadProgress(thread, 'Installing package %s' % name,
'Package %s successfully %s' % (name, self.completion_type))
class PackageInstallerThread(threading.Thread):
"""
A thread to run package install/upgrade operations in so that the main
Sublime Text thread does not get blocked and freeze the UI
"""
def __init__(self, manager, package, on_complete):
"""
:param manager:
An instance of :class:`PackageManager`
:param package:
The string package name to install/upgrade
:param on_complete:
A callback to run after installing/upgrading the package
"""
self.package = package
self.manager = manager
self.on_complete = on_complete
threading.Thread.__init__(self)
def run(self):
try:
self.result = self.manager.install_package(self.package)
finally:
if self.on_complete:
sublime.set_timeout(self.on_complete, 1)

View File

@@ -0,0 +1,126 @@
import os
import zipfile
import sublime
from .console_write import console_write
from .open_compat import open_compat, read_compat
from .unicode import unicode_from_os
from .file_not_found_error import FileNotFoundError
def read_package_file(package, relative_path, binary=False, debug=False):
package_dir = _get_package_dir(package)
file_path = os.path.join(package_dir, relative_path)
if os.path.exists(package_dir):
result = _read_regular_file(package, relative_path, binary, debug)
if result != False:
return result
if int(sublime.version()) >= 3000:
result = _read_zip_file(package, relative_path, binary, debug)
if result != False:
return result
if debug:
console_write(u"Unable to find file %s in the package %s" % (relative_path, package), True)
return False
def package_file_exists(package, relative_path):
package_dir = _get_package_dir(package)
file_path = os.path.join(package_dir, relative_path)
if os.path.exists(package_dir):
result = _regular_file_exists(package, relative_path)
if result:
return result
if int(sublime.version()) >= 3000:
return _zip_file_exists(package, relative_path)
return False
def _get_package_dir(package):
""":return: The full filesystem path to the package directory"""
return os.path.join(sublime.packages_path(), package)
def _read_regular_file(package, relative_path, binary=False, debug=False):
package_dir = _get_package_dir(package)
file_path = os.path.join(package_dir, relative_path)
try:
with open_compat(file_path, ('rb' if binary else 'r')) as f:
return read_compat(f)
except (FileNotFoundError) as e:
if debug:
console_write(u"Unable to find file %s in the package folder for %s" % (relative_path, package), True)
return False
def _read_zip_file(package, relative_path, binary=False, debug=False):
zip_path = os.path.join(sublime.installed_packages_path(),
package + '.sublime-package')
if not os.path.exists(zip_path):
if debug:
console_write(u"Unable to find a sublime-package file for %s" % package, True)
return False
try:
package_zip = zipfile.ZipFile(zip_path, 'r')
except (zipfile.BadZipfile):
console_write(u'An error occurred while trying to unzip the sublime-package file for %s.' % package, True)
return False
try:
contents = package_zip.read(relative_path)
if not binary:
contents = contents.decode('utf-8')
return contents
except (KeyError) as e:
if debug:
console_write(u"Unable to find file %s in the sublime-package file for %s" % (relative_path, package), True)
except (IOError) as e:
message = unicode_from_os(e)
console_write(u'Unable to read file from sublime-package file for %s due to an invalid filename' % package, True)
except (UnicodeDecodeError):
console_write(u'Unable to read file from sublime-package file for %s due to an invalid filename or character encoding issue' % package, True)
return False
def _regular_file_exists(package, relative_path):
package_dir = _get_package_dir(package)
file_path = os.path.join(package_dir, relative_path)
return os.path.exists(file_path)
def _zip_file_exists(package, relative_path):
zip_path = os.path.join(sublime.installed_packages_path(),
package + '.sublime-package')
if not os.path.exists(zip_path):
return False
try:
package_zip = zipfile.ZipFile(zip_path, 'r')
except (zipfile.BadZipfile):
console_write(u'An error occurred while trying to unzip the sublime-package file for %s.' % package_name, True)
return False
try:
package_zip.getinfo(relative_path)
return True
except (KeyError) as e:
return False

View File

@@ -0,0 +1,117 @@
import os
import sublime
from .console_write import console_write
from .package_io import package_file_exists
class PackageRenamer():
"""
Class to handle renaming packages via the renamed_packages setting
gathered from channels and repositories.
"""
def load_settings(self):
"""
Loads the list of installed packages from the
Package Control.sublime-settings file.
"""
self.settings_file = 'Package Control.sublime-settings'
self.settings = sublime.load_settings(self.settings_file)
self.installed_packages = self.settings.get('installed_packages', [])
if not isinstance(self.installed_packages, list):
self.installed_packages = []
def rename_packages(self, installer):
"""
Renames any installed packages that the user has installed.
:param installer:
An instance of :class:`PackageInstaller`
"""
# Fetch the packages since that will pull in the renamed packages list
installer.manager.list_available_packages()
renamed_packages = installer.manager.settings.get('renamed_packages', {})
if not renamed_packages:
renamed_packages = {}
# These are packages that have been tracked as installed
installed_pkgs = self.installed_packages
# There are the packages actually present on the filesystem
present_packages = installer.manager.list_packages()
# Rename directories for packages that have changed names
for package_name in renamed_packages:
package_dir = os.path.join(sublime.packages_path(), package_name)
if not package_file_exists(package_name, 'package-metadata.json'):
continue
new_package_name = renamed_packages[package_name]
new_package_dir = os.path.join(sublime.packages_path(),
new_package_name)
changing_case = package_name.lower() == new_package_name.lower()
case_insensitive_fs = sublime.platform() in ['windows', 'osx']
# Since Windows and OSX use case-insensitive filesystems, we have to
# scan through the list of installed packages if the rename of the
# package is just changing the case of it. If we don't find the old
# name for it, we continue the loop since os.path.exists() will return
# true due to the case-insensitive nature of the filesystems.
if case_insensitive_fs and changing_case:
has_old = False
for present_package_name in present_packages:
if present_package_name == package_name:
has_old = True
break
if not has_old:
continue
if not os.path.exists(new_package_dir) or (case_insensitive_fs and changing_case):
# Windows will not allow you to rename to the same name with
# a different case, so we work around that with a temporary name
if os.name == 'nt' and changing_case:
temp_package_name = '__' + new_package_name
temp_package_dir = os.path.join(sublime.packages_path(),
temp_package_name)
os.rename(package_dir, temp_package_dir)
package_dir = temp_package_dir
os.rename(package_dir, new_package_dir)
installed_pkgs.append(new_package_name)
console_write(u'Renamed %s to %s' % (package_name, new_package_name), True)
else:
installer.manager.remove_package(package_name)
message_string = u'Removed %s since package with new name (%s) already exists' % (
package_name, new_package_name)
console_write(message_string, True)
try:
installed_pkgs.remove(package_name)
except (ValueError):
pass
sublime.set_timeout(lambda: self.save_packages(installed_pkgs), 10)
def save_packages(self, installed_packages):
"""
Saves the list of installed packages (after having been appropriately
renamed)
:param installed_packages:
The new list of installed packages
"""
installed_packages = list(set(installed_packages))
installed_packages = sorted(installed_packages,
key=lambda s: s.lower())
if installed_packages != self.installed_packages:
self.settings.set('installed_packages', installed_packages)
sublime.save_settings(self.settings_file)

View File

@@ -0,0 +1,11 @@
import sublime
def preferences_filename():
"""
:return: The appropriate settings filename based on the version of Sublime Text
"""
if int(sublime.version()) >= 2174:
return 'Preferences.sublime-settings'
return 'Global.sublime-settings'

View File

@@ -0,0 +1,12 @@
from .bitbucket_repository_provider import BitBucketRepositoryProvider
from .github_repository_provider import GitHubRepositoryProvider
from .github_user_provider import GitHubUserProvider
from .repository_provider import RepositoryProvider
from .channel_provider import ChannelProvider
REPOSITORY_PROVIDERS = [BitBucketRepositoryProvider, GitHubRepositoryProvider,
GitHubUserProvider, RepositoryProvider]
CHANNEL_PROVIDERS = [ChannelProvider]

View File

@@ -0,0 +1,163 @@
import re
from ..clients.bitbucket_client import BitBucketClient
from ..downloaders.downloader_exception import DownloaderException
from ..clients.client_exception import ClientException
from .provider_exception import ProviderException
class BitBucketRepositoryProvider():
"""
Allows using a public BitBucket repository as the source for a single package.
For legacy purposes, this can also be treated as the source for a Package
Control "repository".
:param repo:
The public web URL to the BitBucket repository. Should be in the format
`https://bitbucket.org/user/package`.
:param settings:
A dict containing at least the following fields:
`cache_length`,
`debug`,
`timeout`,
`user_agent`
Optional fields:
`http_proxy`,
`https_proxy`,
`proxy_username`,
`proxy_password`,
`query_string_params`
`install_prereleases`
"""
def __init__(self, repo, settings):
self.cache = {}
self.repo = repo
self.settings = settings
self.failed_sources = {}
@classmethod
def match_url(cls, repo):
"""Indicates if this provider can handle the provided repo"""
return re.search('^https?://bitbucket.org/([^/]+/[^/]+)/?$', repo) != None
def prefetch(self):
"""
Go out and perform HTTP operations, caching the result
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
"""
[name for name, info in self.get_packages()]
def get_failed_sources(self):
"""
List of any URLs that could not be accessed while accessing this repository
:return:
A generator of ("https://bitbucket.org/user/repo", Exception()) tuples
"""
return self.failed_sources.items()
def get_broken_packages(self):
"""
For API-compatibility with RepositoryProvider
"""
return {}.items()
def get_packages(self, invalid_sources=None):
"""
Uses the BitBucket API to construct necessary info for a package
:param invalid_sources:
A list of URLs that should be ignored
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of
(
'Package Name',
{
'name': name,
'description': description,
'author': author,
'homepage': homepage,
'last_modified': last modified date,
'download': {
'url': url,
'date': date,
'version': version
},
'previous_names': [],
'labels': [],
'sources': [the repo URL],
'readme': url,
'issues': url,
'donate': url,
'buy': None
}
)
tuples
"""
if 'get_packages' in self.cache:
for key, value in self.cache['get_packages'].items():
yield (key, value)
return
client = BitBucketClient(self.settings)
if invalid_sources != None and self.repo in invalid_sources:
raise StopIteration()
try:
repo_info = client.repo_info(self.repo)
download = client.download_info(self.repo)
name = repo_info['name']
details = {
'name': name,
'description': repo_info['description'],
'homepage': repo_info['homepage'],
'author': repo_info['author'],
'last_modified': download.get('date'),
'download': download,
'previous_names': [],
'labels': [],
'sources': [self.repo],
'readme': repo_info['readme'],
'issues': repo_info['issues'],
'donate': repo_info['donate'],
'buy': None
}
self.cache['get_packages'] = {name: details}
yield (name, details)
except (DownloaderException, ClientException, ProviderException) as e:
self.failed_sources[self.repo] = e
self.cache['get_packages'] = {}
raise StopIteration()
def get_renamed_packages(self):
"""For API-compatibility with RepositoryProvider"""
return {}
def get_unavailable_packages(self):
"""
Method for compatibility with RepositoryProvider class. These providers
are based on API calls, and thus do not support different platform
downloads, making it impossible for there to be unavailable packages.
:return: An empty list
"""
return []

View File

@@ -0,0 +1,312 @@
import json
import os
import re
try:
# Python 3
from urllib.parse import urlparse
except (ImportError):
# Python 2
from urlparse import urlparse
from ..console_write import console_write
from .release_selector import ReleaseSelector
from .provider_exception import ProviderException
from ..downloaders.downloader_exception import DownloaderException
from ..clients.client_exception import ClientException
from ..download_manager import downloader
class ChannelProvider(ReleaseSelector):
"""
Retrieves a channel and provides an API into the information
The current channel/repository infrastructure caches repository info into
the channel to improve the Package Control client performance. This also
has the side effect of lessening the load on the GitHub and BitBucket APIs
and getting around not-infrequent HTTP 503 errors from those APIs.
:param channel:
The URL of the channel
:param settings:
A dict containing at least the following fields:
`cache_length`,
`debug`,
`timeout`,
`user_agent`
Optional fields:
`http_proxy`,
`https_proxy`,
`proxy_username`,
`proxy_password`,
`query_string_params`
`install_prereleases`
"""
def __init__(self, channel, settings):
self.channel_info = None
self.schema_version = 0.0
self.channel = channel
self.settings = settings
self.unavailable_packages = []
@classmethod
def match_url(cls, channel):
"""Indicates if this provider can handle the provided channel"""
return True
def prefetch(self):
"""
Go out and perform HTTP operations, caching the result
:raises:
ProviderException: when an error occurs trying to open a file
DownloaderException: when an error occurs trying to open a URL
"""
self.fetch()
def fetch(self):
"""
Retrieves and loads the JSON for other methods to use
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
"""
if self.channel_info != None:
return
if re.match('https?://', self.channel, re.I):
with downloader(self.channel, self.settings) as manager:
channel_json = manager.fetch(self.channel,
'Error downloading channel.')
# All other channels are expected to be filesystem paths
else:
if not os.path.exists(self.channel):
raise ProviderException(u'Error, file %s does not exist' % self.channel)
if self.settings.get('debug'):
console_write(u'Loading %s as a channel' % self.channel, True)
# We open as binary so we get bytes like the DownloadManager
with open(self.channel, 'rb') as f:
channel_json = f.read()
try:
channel_info = json.loads(channel_json.decode('utf-8'))
except (ValueError):
raise ProviderException(u'Error parsing JSON from channel %s.' % self.channel)
schema_error = u'Channel %s does not appear to be a valid channel file because ' % self.channel
if 'schema_version' not in channel_info:
raise ProviderException(u'%s the "schema_version" JSON key is missing.' % schema_error)
try:
self.schema_version = float(channel_info.get('schema_version'))
except (ValueError):
raise ProviderException(u'%s the "schema_version" is not a valid number.' % schema_error)
if self.schema_version not in [1.0, 1.1, 1.2, 2.0]:
raise ProviderException(u'%s the "schema_version" is not recognized. Must be one of: 1.0, 1.1, 1.2 or 2.0.' % schema_error)
self.channel_info = channel_info
def get_name_map(self):
"""
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A dict of the mapping for URL slug -> package name
"""
self.fetch()
if self.schema_version >= 2.0:
return {}
return self.channel_info.get('package_name_map', {})
def get_renamed_packages(self):
"""
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A dict of the packages that have been renamed
"""
self.fetch()
if self.schema_version >= 2.0:
return {}
return self.channel_info.get('renamed_packages', {})
def get_repositories(self):
"""
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A list of the repository URLs
"""
self.fetch()
if 'repositories' not in self.channel_info:
raise ProviderException(u'Channel %s does not appear to be a valid channel file because the "repositories" JSON key is missing.' % self.channel)
# Determine a relative root so repositories can be defined
# relative to the location of the channel file.
if re.match('https?://', self.channel, re.I):
url_pieces = urlparse(self.channel)
domain = url_pieces.scheme + '://' + url_pieces.netloc
path = '/' if url_pieces.path == '' else url_pieces.path
if path[-1] != '/':
path = os.path.dirname(path)
relative_base = domain + path
else:
relative_base = os.path.dirname(self.channel) + '/'
output = []
repositories = self.channel_info.get('repositories', [])
for repository in repositories:
if re.match('^\./|\.\./', repository):
repository = os.path.normpath(relative_base + repository)
output.append(repository)
return output
def get_certs(self):
"""
Provides a secure way for distribution of SSL CA certificates
Unfortunately Python does not include a bundle of CA certs with urllib
to perform SSL certificate validation. To circumvent this issue,
Package Control acts as a distributor of the CA certs for all HTTPS
URLs of package downloads.
The default channel scrapes and caches info about all packages
periodically, and in the process it checks the CA certs for all of
the HTTPS URLs listed in the repositories. The contents of the CA cert
files are then hashed, and the CA cert is stored in a filename with
that hash. This is a fingerprint to ensure that Package Control has
the appropriate CA cert for a domain name.
Next, the default channel file serves up a JSON object of the domain
names and the hashes of their current CA cert files. If Package Control
does not have the appropriate hash for a domain, it may retrieve it
from the channel server. To ensure that Package Control is talking to
a trusted authority to get the CA certs from, the CA cert for
sublime.wbond.net is bundled with Package Control. Then when downloading
the channel file, Package Control can ensure that the channel file's
SSL certificate is valid, thus ensuring the resulting CA certs are
legitimate.
As a matter of optimization, the distribution of Package Control also
includes the current CA certs for all known HTTPS domains that are
included in the channel, as of the time when Package Control was
last released.
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A dict of {'Domain Name': ['cert_file_hash', 'cert_file_download_url']}
"""
self.fetch()
return self.channel_info.get('certs', {})
def get_packages(self, repo):
"""
Provides access to the repository info that is cached in a channel
:param repo:
The URL of the repository to get the cached info of
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A dict in the format:
{
'Package Name': {
'name': name,
'description': description,
'author': author,
'homepage': homepage,
'last_modified': last modified date,
'download': {
'url': url,
'date': date,
'version': version
},
'previous_names': [old_name, ...],
'labels': [label, ...],
'readme': url,
'issues': url,
'donate': url,
'buy': url
},
...
}
"""
self.fetch()
# The 2.0 channel schema renamed the key cached package info was
# stored under in order to be more clear to new users.
packages_key = 'packages_cache' if self.schema_version >= 2.0 else 'packages'
if self.channel_info.get(packages_key, False) == False:
return {}
if self.channel_info[packages_key].get(repo, False) == False:
return {}
output = {}
for package in self.channel_info[packages_key][repo]:
copy = package.copy()
# In schema version 2.0, we store a list of dicts containing info
# about all available releases. These include "version" and
# "platforms" keys that are used to pick the download for the
# current machine.
if self.schema_version >= 2.0:
copy = self.select_release(copy)
else:
copy = self.select_platform(copy)
if not copy:
self.unavailable_packages.append(package['name'])
continue
output[copy['name']] = copy
return output
def get_unavailable_packages(self):
"""
Provides a list of packages that are unavailable for the current
platform/architecture that Sublime Text is running on.
This list will be empty unless get_packages() is called first.
:return: A list of package names
"""
return self.unavailable_packages

View File

@@ -0,0 +1,169 @@
import re
from ..clients.github_client import GitHubClient
from ..downloaders.downloader_exception import DownloaderException
from ..clients.client_exception import ClientException
from .provider_exception import ProviderException
class GitHubRepositoryProvider():
"""
Allows using a public GitHub repository as the source for a single package.
For legacy purposes, this can also be treated as the source for a Package
Control "repository".
:param repo:
The public web URL to the GitHub repository. Should be in the format
`https://github.com/user/package` for the master branch, or
`https://github.com/user/package/tree/{branch_name}` for any other
branch.
:param settings:
A dict containing at least the following fields:
`cache_length`,
`debug`,
`timeout`,
`user_agent`
Optional fields:
`http_proxy`,
`https_proxy`,
`proxy_username`,
`proxy_password`,
`query_string_params`
`install_prereleases`
"""
def __init__(self, repo, settings):
self.cache = {}
# Clean off the trailing .git to be more forgiving
self.repo = re.sub('\.git$', '', repo)
self.settings = settings
self.failed_sources = {}
@classmethod
def match_url(cls, repo):
"""Indicates if this provider can handle the provided repo"""
master = re.search('^https?://github.com/[^/]+/[^/]+/?$', repo)
branch = re.search('^https?://github.com/[^/]+/[^/]+/tree/[^/]+/?$',
repo)
return master != None or branch != None
def prefetch(self):
"""
Go out and perform HTTP operations, caching the result
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
"""
[name for name, info in self.get_packages()]
def get_failed_sources(self):
"""
List of any URLs that could not be accessed while accessing this repository
:return:
A generator of ("https://github.com/user/repo", Exception()) tuples
"""
return self.failed_sources.items()
def get_broken_packages(self):
"""
For API-compatibility with RepositoryProvider
"""
return {}.items()
def get_packages(self, invalid_sources=None):
"""
Uses the GitHub API to construct necessary info for a package
:param invalid_sources:
A list of URLs that should be ignored
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of
(
'Package Name',
{
'name': name,
'description': description,
'author': author,
'homepage': homepage,
'last_modified': last modified date,
'download': {
'url': url,
'date': date,
'version': version
},
'previous_names': [],
'labels': [],
'sources': [the repo URL],
'readme': url,
'issues': url,
'donate': url,
'buy': None
}
)
tuples
"""
if 'get_packages' in self.cache:
for key, value in self.cache['get_packages'].items():
yield (key, value)
return
client = GitHubClient(self.settings)
if invalid_sources != None and self.repo in invalid_sources:
raise StopIteration()
try:
repo_info = client.repo_info(self.repo)
download = client.download_info(self.repo)
name = repo_info['name']
details = {
'name': name,
'description': repo_info['description'],
'homepage': repo_info['homepage'],
'author': repo_info['author'],
'last_modified': download.get('date'),
'download': download,
'previous_names': [],
'labels': [],
'sources': [self.repo],
'readme': repo_info['readme'],
'issues': repo_info['issues'],
'donate': repo_info['donate'],
'buy': None
}
self.cache['get_packages'] = {name: details}
yield (name, details)
except (DownloaderException, ClientException, ProviderException) as e:
self.failed_sources[self.repo] = e
self.cache['get_packages'] = {}
raise StopIteration()
def get_renamed_packages(self):
"""For API-compatibility with RepositoryProvider"""
return {}
def get_unavailable_packages(self):
"""
Method for compatibility with RepositoryProvider class. These providers
are based on API calls, and thus do not support different platform
downloads, making it impossible for there to be unavailable packages.
:return: An empty list
"""
return []

View File

@@ -0,0 +1,172 @@
import re
from ..clients.github_client import GitHubClient
from ..downloaders.downloader_exception import DownloaderException
from ..clients.client_exception import ClientException
from .provider_exception import ProviderException
class GitHubUserProvider():
"""
Allows using a GitHub user/organization as the source for multiple packages,
or in Package Control terminology, a "repository".
:param repo:
The public web URL to the GitHub user/org. Should be in the format
`https://github.com/user`.
:param settings:
A dict containing at least the following fields:
`cache_length`,
`debug`,
`timeout`,
`user_agent`,
Optional fields:
`http_proxy`,
`https_proxy`,
`proxy_username`,
`proxy_password`,
`query_string_params`
`install_prereleases`
"""
def __init__(self, repo, settings):
self.cache = {}
self.repo = repo
self.settings = settings
self.failed_sources = {}
@classmethod
def match_url(cls, repo):
"""Indicates if this provider can handle the provided repo"""
return re.search('^https?://github.com/[^/]+/?$', repo) != None
def prefetch(self):
"""
Go out and perform HTTP operations, caching the result
"""
[name for name, info in self.get_packages()]
def get_failed_sources(self):
"""
List of any URLs that could not be accessed while accessing this repository
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of ("https://github.com/user/repo", Exception()) tuples
"""
return self.failed_sources.items()
def get_broken_packages(self):
"""
For API-compatibility with RepositoryProvider
"""
return {}.items()
def get_packages(self, invalid_sources=None):
"""
Uses the GitHub API to construct necessary info for all packages
:param invalid_sources:
A list of URLs that should be ignored
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of
(
'Package Name',
{
'name': name,
'description': description,
'author': author,
'homepage': homepage,
'last_modified': last modified date,
'download': {
'url': url,
'date': date,
'version': version
},
'previous_names': [],
'labels': [],
'sources': [the user URL],
'readme': url,
'issues': url,
'donate': url,
'buy': None
}
)
tuples
"""
if 'get_packages' in self.cache:
for key, value in self.cache['get_packages'].items():
yield (key, value)
return
client = GitHubClient(self.settings)
if invalid_sources != None and self.repo in invalid_sources:
raise StopIteration()
try:
user_repos = client.user_info(self.repo)
except (DownloaderException, ClientException, ProviderException) as e:
self.failed_sources = [self.repo]
self.cache['get_packages'] = e
raise e
output = {}
for repo_info in user_repos:
try:
name = repo_info['name']
repo_url = 'https://github.com/' + repo_info['user_repo']
download = client.download_info(repo_url)
details = {
'name': name,
'description': repo_info['description'],
'homepage': repo_info['homepage'],
'author': repo_info['author'],
'last_modified': download.get('date'),
'download': download,
'previous_names': [],
'labels': [],
'sources': [self.repo],
'readme': repo_info['readme'],
'issues': repo_info['issues'],
'donate': repo_info['donate'],
'buy': None
}
output[name] = details
yield (name, details)
except (DownloaderException, ClientException, ProviderException) as e:
self.failed_sources[repo_url] = e
self.cache['get_packages'] = output
def get_renamed_packages(self):
"""For API-compatibility with RepositoryProvider"""
return {}
def get_unavailable_packages(self):
"""
Method for compatibility with RepositoryProvider class. These providers
are based on API calls, and thus do not support different platform
downloads, making it impossible for there to be unavailable packages.
:return: An empty list
"""
return []

View File

@@ -0,0 +1,5 @@
class ProviderException(Exception):
"""If a provider could not return information"""
def __str__(self):
return self.args[0]

View File

@@ -0,0 +1,125 @@
import re
import sublime
from ..versions import version_sort, version_exclude_prerelease
class ReleaseSelector():
"""
A base class for finding the best version of a package for the current machine
"""
def select_release(self, package_info):
"""
Returns a modified package info dict for package from package schema version 2.0
:param package_info:
A package info dict with a "releases" key
:return:
The package info dict with the "releases" key deleted, and a
"download" key added that contains a dict with "version", "url" and
"date" keys.
None if no compatible relases are available.
"""
releases = version_sort(package_info['releases'])
if not self.settings.get('install_prereleases'):
releases = version_exclude_prerelease(releases)
for release in releases:
platforms = release.get('platforms', '*')
if not isinstance(platforms, list):
platforms = [platforms]
best_platform = self.get_best_platform(platforms)
if not best_platform:
continue
if not self.is_compatible_version(release.get('sublime_text', '<3000')):
continue
package_info['download'] = release
package_info['last_modified'] = release.get('date')
del package_info['releases']
return package_info
return None
def select_platform(self, package_info):
"""
Returns a modified package info dict for package from package schema version <= 1.2
:param package_info:
A package info dict with a "platforms" key
:return:
The package info dict with the "platforms" key deleted, and a
"download" key added that contains a dict with "version" and "url"
keys.
None if no compatible platforms.
"""
platforms = list(package_info['platforms'].keys())
best_platform = self.get_best_platform(platforms)
if not best_platform:
return None
package_info['download'] = package_info['platforms'][best_platform][0]
package_info['download']['date'] = package_info.get('last_modified')
del package_info['platforms']
return package_info
def get_best_platform(self, platforms):
"""
Returns the most specific platform that matches the current machine
:param platforms:
An array of platform names for a package. E.g. ['*', 'windows', 'linux-x64']
:return: A string reprenting the most specific matching platform
"""
ids = [sublime.platform() + '-' + sublime.arch(), sublime.platform(),
'*']
for id in ids:
if id in platforms:
return id
return None
def is_compatible_version(self, version_range):
min_version = float("-inf")
max_version = float("inf")
if version_range == '*':
return True
gt_match = re.match('>(\d+)$', version_range)
ge_match = re.match('>=(\d+)$', version_range)
lt_match = re.match('<(\d+)$', version_range)
le_match = re.match('<=(\d+)$', version_range)
range_match = re.match('(\d+) - (\d+)$', version_range)
if gt_match:
min_version = int(gt_match.group(1)) + 1
elif ge_match:
min_version = int(ge_match.group(1))
elif lt_match:
max_version = int(lt_match.group(1)) - 1
elif le_match:
max_version = int(le_match.group(1))
elif range_match:
min_version = int(range_match.group(1))
max_version = int(range_match.group(2))
else:
return None
if min_version > int(sublime.version()):
return False
if max_version < int(sublime.version()):
return False
return True

View File

@@ -0,0 +1,454 @@
import json
import re
import os
from itertools import chain
try:
# Python 3
from urllib.parse import urlparse
except (ImportError):
# Python 2
from urlparse import urlparse
from ..console_write import console_write
from .release_selector import ReleaseSelector
from .provider_exception import ProviderException
from ..downloaders.downloader_exception import DownloaderException
from ..clients.client_exception import ClientException
from ..clients.github_client import GitHubClient
from ..clients.bitbucket_client import BitBucketClient
from ..download_manager import downloader
class RepositoryProvider(ReleaseSelector):
"""
Generic repository downloader that fetches package info
With the current channel/repository architecture where the channel file
caches info from all includes repositories, these package providers just
serve the purpose of downloading packages not in the default channel.
The structure of the JSON a repository should contain is located in
example-packages.json.
:param repo:
The URL of the package repository
:param settings:
A dict containing at least the following fields:
`cache_length`,
`debug`,
`timeout`,
`user_agent`
Optional fields:
`http_proxy`,
`https_proxy`,
`proxy_username`,
`proxy_password`,
`query_string_params`
`install_prereleases`
"""
def __init__(self, repo, settings):
self.cache = {}
self.repo_info = None
self.schema_version = 0.0
self.repo = repo
self.settings = settings
self.unavailable_packages = []
self.failed_sources = {}
self.broken_packages = {}
@classmethod
def match_url(cls, repo):
"""Indicates if this provider can handle the provided repo"""
return True
def prefetch(self):
"""
Go out and perform HTTP operations, caching the result
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
"""
[name for name, info in self.get_packages()]
def get_failed_sources(self):
"""
List of any URLs that could not be accessed while accessing this repository
:return:
A generator of ("https://example.com", Exception()) tuples
"""
return self.failed_sources.items()
def get_broken_packages(self):
"""
List of package names for packages that are missing information
:return:
A generator of ("Package Name", Exception()) tuples
"""
return self.broken_packages.items()
def fetch(self):
"""
Retrieves and loads the JSON for other methods to use
:raises:
ProviderException: when an error occurs trying to open a file
DownloaderException: when an error occurs trying to open a URL
"""
if self.repo_info != None:
return
self.repo_info = self.fetch_location(self.repo)
if 'includes' not in self.repo_info:
return
# Allow repositories to include other repositories
if re.match('https?://', self.repo, re.I):
url_pieces = urlparse(self.repo)
domain = url_pieces.scheme + '://' + url_pieces.netloc
path = '/' if url_pieces.path == '' else url_pieces.path
if path[-1] != '/':
path = os.path.dirname(path)
relative_base = domain + path
else:
relative_base = os.path.dirname(self.repo) + '/'
includes = self.repo_info.get('includes', [])
del self.repo_info['includes']
for include in includes:
if re.match('^\./|\.\./', include):
include = os.path.normpath(relative_base + include)
include_info = self.fetch_location(include)
included_packages = include_info.get('packages', [])
self.repo_info['packages'].extend(included_packages)
def fetch_location(self, location):
"""
Fetches the contents of a URL of file path
:param location:
The URL or file path
:raises:
ProviderException: when an error occurs trying to open a file
DownloaderException: when an error occurs trying to open a URL
:return:
A dict of the parsed JSON
"""
if re.match('https?://', self.repo, re.I):
with downloader(location, self.settings) as manager:
json_string = manager.fetch(location, 'Error downloading repository.')
# Anything that is not a URL is expected to be a filesystem path
else:
if not os.path.exists(location):
raise ProviderException(u'Error, file %s does not exist' % location)
if self.settings.get('debug'):
console_write(u'Loading %s as a repository' % location, True)
# We open as binary so we get bytes like the DownloadManager
with open(location, 'rb') as f:
json_string = f.read()
try:
return json.loads(json_string.decode('utf-8'))
except (ValueError):
raise ProviderException(u'Error parsing JSON from repository %s.' % location)
def get_packages(self, invalid_sources=None):
"""
Provides access to the packages in this repository
:param invalid_sources:
A list of URLs that are permissible to fetch data from
:raises:
ProviderException: when an error occurs trying to open a file
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of
(
'Package Name',
{
'name': name,
'description': description,
'author': author,
'homepage': homepage,
'last_modified': last modified date,
'download': {
'url': url,
'date': date,
'version': version
},
'previous_names': [old_name, ...],
'labels': [label, ...],
'sources': [url, ...],
'readme': url,
'issues': url,
'donate': url,
'buy': url
}
)
tuples
"""
if 'get_packages' in self.cache:
for key, value in self.cache['get_packages'].items():
yield (key, value)
return
if invalid_sources != None and self.repo in invalid_sources:
raise StopIteration()
try:
self.fetch()
except (DownloaderException, ProviderException) as e:
self.failed_sources[self.repo] = e
self.cache['get_packages'] = {}
return
def fail(message):
exception = ProviderException(message)
self.failed_sources[self.repo] = exception
self.cache['get_packages'] = {}
return
schema_error = u'Repository %s does not appear to be a valid repository file because ' % self.repo
if 'schema_version' not in self.repo_info:
error_string = u'%s the "schema_version" JSON key is missing.' % schema_error
fail(error_string)
return
try:
self.schema_version = float(self.repo_info.get('schema_version'))
except (ValueError):
error_string = u'%s the "schema_version" is not a valid number.' % schema_error
fail(error_string)
return
if self.schema_version not in [1.0, 1.1, 1.2, 2.0]:
error_string = u'%s the "schema_version" is not recognized. Must be one of: 1.0, 1.1, 1.2 or 2.0.' % schema_error
fail(error_string)
return
if 'packages' not in self.repo_info:
error_string = u'%s the "packages" JSON key is missing.' % schema_error
fail(error_string)
return
github_client = GitHubClient(self.settings)
bitbucket_client = BitBucketClient(self.settings)
# Backfill the "previous_names" keys for old schemas
previous_names = {}
if self.schema_version < 2.0:
renamed = self.get_renamed_packages()
for old_name in renamed:
new_name = renamed[old_name]
if new_name not in previous_names:
previous_names[new_name] = []
previous_names[new_name].append(old_name)
output = {}
for package in self.repo_info['packages']:
info = {
'sources': [self.repo]
}
for field in ['name', 'description', 'author', 'last_modified', 'previous_names',
'labels', 'homepage', 'readme', 'issues', 'donate', 'buy']:
if package.get(field):
info[field] = package.get(field)
# Schema version 2.0 allows for grabbing details about a pacakge, or its
# download from "details" urls. See the GitHubClient and BitBucketClient
# classes for valid URLs.
if self.schema_version >= 2.0:
details = package.get('details')
releases = package.get('releases')
# Try to grab package-level details from GitHub or BitBucket
if details:
if invalid_sources != None and details in invalid_sources:
continue
info['sources'].append(details)
try:
github_repo_info = github_client.repo_info(details)
bitbucket_repo_info = bitbucket_client.repo_info(details)
# When grabbing details, prefer explicit field values over the values
# from the GitHub or BitBucket API
if github_repo_info:
info = dict(chain(github_repo_info.items(), info.items()))
elif bitbucket_repo_info:
info = dict(chain(bitbucket_repo_info.items(), info.items()))
else:
raise ProviderException(u'Invalid "details" value "%s" for one of the packages in the repository %s.' % (details, self.repo))
except (DownloaderException, ClientException, ProviderException) as e:
if 'name' in info:
self.broken_packages[info['name']] = e
self.failed_sources[details] = e
continue
# If no releases info was specified, also grab the download info from GH or BB
if not releases and details:
releases = [{'details': details}]
if not releases:
e = ProviderException(u'No "releases" value for one of the packages in the repository %s.' % self.repo)
if 'name' in info:
self.broken_packages[info['name']] = e
else:
self.failed_sources[self.repo] = e
continue
# This allows developers to specify a GH or BB location to get releases from,
# especially tags URLs (https://github.com/user/repo/tags or
# https://bitbucket.org/user/repo#tags)
info['releases'] = []
for release in releases:
download_details = None
download_info = {}
# Make sure that explicit fields are copied over
for field in ['platforms', 'sublime_text', 'version', 'url', 'date']:
if field in release:
download_info[field] = release[field]
if 'details' in release:
download_details = release['details']
try:
github_download = github_client.download_info(download_details)
bitbucket_download = bitbucket_client.download_info(download_details)
# Overlay the explicit field values over values fetched from the APIs
if github_download:
download_info = dict(chain(github_download.items(), download_info.items()))
# No matching tags
elif github_download == False:
download_info = {}
elif bitbucket_download:
download_info = dict(chain(bitbucket_download.items(), download_info.items()))
# No matching tags
elif bitbucket_download == False:
download_info = {}
else:
raise ProviderException(u'Invalid "details" value "%s" under the "releases" key for the package "%s" in the repository %s.' % (download_details, info['name'], self.repo))
except (DownloaderException, ClientException, ProviderException) as e:
if 'name' in info:
self.broken_packages[info['name']] = e
self.failed_sources[download_details] = e
continue
if download_info:
info['releases'].append(download_info)
info = self.select_release(info)
# Schema version 1.0, 1.1 and 1.2 just require that all values be
# explicitly specified in the package JSON
else:
info['platforms'] = package.get('platforms')
info = self.select_platform(info)
if not info:
self.unavailable_packages.append(package['name'])
continue
if 'download' not in info and 'releases' not in info:
self.broken_packages[info['name']] = ProviderException(u'No "releases" key for the package "%s" in the repository %s.' % (info['name'], self.repo))
continue
for field in ['previous_names', 'labels']:
if field not in info:
info[field] = []
for field in ['readme', 'issues', 'donate', 'buy']:
if field not in info:
info[field] = None
if 'homepage' not in info:
info['homepage'] = self.repo
if 'download' in info:
# Rewrites the legacy "zipball" URLs to the new "zip" format
info['download']['url'] = re.sub(
'^(https://nodeload.github.com/[^/]+/[^/]+/)zipball(/.*)$',
'\\1zip\\2', info['download']['url'])
# Rewrites the legacy "nodeload" URLs to the new "codeload" subdomain
info['download']['url'] = info['download']['url'].replace(
'nodeload.github.com', 'codeload.github.com')
# Extract the date from the download
if 'last_modified' not in info:
info['last_modified'] = info['download']['date']
elif 'releases' in info and 'last_modified' not in info:
# Extract a date from the newest download
date = '1970-01-01 00:00:00'
for release in info['releases']:
if 'date' in release and release['date'] > date:
date = release['date']
info['last_modified'] = date
if info['name'] in previous_names:
info['previous_names'].extend(previous_names[info['name']])
output[info['name']] = info
yield (info['name'], info)
self.cache['get_packages'] = output
def get_renamed_packages(self):
""":return: A dict of the packages that have been renamed"""
if self.schema_version < 2.0:
return self.repo_info.get('renamed_packages', {})
output = {}
for package in self.repo_info['packages']:
if 'previous_names' not in package:
continue
previous_names = package['previous_names']
if not isinstance(previous_names, list):
previous_names = [previous_names]
for previous_name in previous_names:
output[previous_name] = package['name']
return output
def get_unavailable_packages(self):
"""
Provides a list of packages that are unavailable for the current
platform/architecture that Sublime Text is running on.
This list will be empty unless get_packages() is called first.
:return: A list of package names
"""
return self.unavailable_packages

View File

@@ -0,0 +1,130 @@
import sys
import sublime
st_version = 2
# With the way ST3 works, the sublime module is not "available" at startup
# which results in an empty version number
if sublime.version() == '' or int(sublime.version()) > 3000:
st_version = 3
from imp import reload
# Python allows reloading modules on the fly, which allows us to do live upgrades.
# The only caveat to this is that you have to reload in the dependency order.
#
# Thus is module A depends on B and we don't reload B before A, when A is reloaded
# it will still have a reference to the old B. Thus we hard-code the dependency
# order of the various Package Control modules so they get reloaded properly.
#
# There are solutions for doing this all programatically, but this is much easier
# to understand.
reload_mods = []
for mod in sys.modules:
if mod[0:15].lower().replace(' ', '_') == 'package_control' and sys.modules[mod] != None:
reload_mods.append(mod)
mod_prefix = 'package_control'
if st_version == 3:
mod_prefix = 'Package Control.' + mod_prefix
mods_load_order = [
'',
'.sys_path',
'.cache',
'.http_cache',
'.ca_certs',
'.clear_directory',
'.cmd',
'.console_write',
'.preferences_filename',
'.show_error',
'.unicode',
'.thread_progress',
'.package_io',
'.semver',
'.versions',
'.http',
'.http.invalid_certificate_exception',
'.http.debuggable_http_response',
'.http.debuggable_https_response',
'.http.debuggable_http_connection',
'.http.persistent_handler',
'.http.debuggable_http_handler',
'.http.validating_https_connection',
'.http.validating_https_handler',
'.clients',
'.clients.client_exception',
'.clients.bitbucket_client',
'.clients.github_client',
'.clients.readme_client',
'.clients.json_api_client',
'.providers',
'.providers.provider_exception',
'.providers.bitbucket_repository_provider',
'.providers.channel_provider',
'.providers.github_repository_provider',
'.providers.github_user_provider',
'.providers.repository_provider',
'.providers.release_selector',
'.download_manager',
'.downloaders',
'.downloaders.downloader_exception',
'.downloaders.rate_limit_exception',
'.downloaders.binary_not_found_error',
'.downloaders.non_clean_exit_error',
'.downloaders.non_http_error',
'.downloaders.caching_downloader',
'.downloaders.decoding_downloader',
'.downloaders.limiting_downloader',
'.downloaders.cert_provider',
'.downloaders.urllib_downloader',
'.downloaders.cli_downloader',
'.downloaders.curl_downloader',
'.downloaders.wget_downloader',
'.downloaders.wininet_downloader',
'.downloaders.background_downloader',
'.upgraders',
'.upgraders.vcs_upgrader',
'.upgraders.git_upgrader',
'.upgraders.hg_upgrader',
'.package_manager',
'.package_creator',
'.package_installer',
'.package_renamer',
'.commands',
'.commands.add_channel_command',
'.commands.add_repository_command',
'.commands.create_binary_package_command',
'.commands.create_package_command',
'.commands.disable_package_command',
'.commands.discover_packages_command',
'.commands.enable_package_command',
'.commands.existing_packages_command',
'.commands.grab_certs_command',
'.commands.install_package_command',
'.commands.list_packages_command',
'.commands.package_message_command',
'.commands.remove_package_command',
'.commands.upgrade_all_packages_command',
'.commands.upgrade_package_command',
'.package_cleanup',
'.automatic_upgrader'
]
for suffix in mods_load_order:
mod = mod_prefix + suffix
if mod in reload_mods:
reload(sys.modules[mod])

View File

@@ -0,0 +1,833 @@
"""pysemver: Semantic Version comparing for Python.
Provides comparing of semantic versions by using SemVer objects using rich comperations plus the
possibility to match a selector string against versions. Interesting for version dependencies.
Versions look like: "1.7.12+b.133"
Selectors look like: ">1.7.0 || 1.6.9+b.111 - 1.6.9+b.113"
Example usages:
>>> SemVer(1, 2, 3, build=13)
SemVer("1.2.3+13")
>>> SemVer.valid("1.2.3.4")
False
>>> SemVer.clean("this is unimportant text 1.2.3-2 and will be stripped")
"1.2.3-2"
>>> SemVer("1.7.12+b.133").satisfies(">1.7.0 || 1.6.9+b.111 - 1.6.9+b.113")
True
>>> SemSel(">1.7.0 || 1.6.9+b.111 - 1.6.9+b.113").matches(SemVer("1.7.12+b.133"),
... SemVer("1.6.9+b.112"), SemVer("1.6.10"))
[SemVer("1.7.12+b.133"), SemVer("1.6.9+b.112")]
>>> min(_)
SemVer("1.6.9+b.112")
>>> _.patch
9
Exported classes:
* SemVer(collections.namedtuple())
Parses semantic versions and defines methods for them. Supports rich comparisons.
* SemSel(tuple)
Parses semantic version selector strings and defines methods for them.
* SelParseError(Exception)
An error among others raised when parsing a semantic version selector failed.
Other classes:
* SemComparator(object)
* SemSelAndChunk(list)
* SemSelOrChunk(list)
Functions/Variables/Constants:
none
Copyright (c) 2013 Zachary King, FichteFoll
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute,
sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions: The above copyright notice and this
permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import re
import sys
from collections import namedtuple # Python >=2.6
__all__ = ('SemVer', 'SemSel', 'SelParseError')
if sys.version_info[0] == 3:
basestring = str
cmp = lambda a, b: (a > b) - (a < b)
# @functools.total_ordering would be nice here but was added in 2.7, __cmp__ is not Py3
class SemVer(namedtuple("_SemVer", 'major, minor, patch, prerelease, build')):
"""Semantic Version, consists of 3 to 5 components defining the version's adicity.
See http://semver.org/ (2.0.0-rc.1) for the standard mainly used for this implementation, few
changes have been made.
Information on this particular class and their instances:
- Immutable and hashable.
- Subclasses `collections.namedtuple`.
- Always `True` in boolean context.
- len() returns an int between 3 and 5; 4 when a pre-release is set and 5 when a build is
set. Note: Still returns 5 when build is set but not pre-release.
- Parts of the semantic version can be accessed by integer indexing, key (string) indexing,
slicing and getting an attribute. Returned slices are tuple. Leading '-' and '+' of
optional components are not stripped. Supported keys/attributes:
major, minor, patch, prerelease, build.
Examples:
s = SemVer("1.2.3-4.5+6")
s[2] == 3
s[:3] == (1, 2, 3)
s['build'] == '-4.5'
s.major == 1
Short information on semantic version structure:
Semantic versions consist of:
* a major component (numeric)
* a minor component (numeric)
* a patch component (numeric)
* a pre-release component [optional]
* a build component [optional]
The pre-release component is indicated by a hyphen '-' and followed by alphanumeric[1] sequences
separated by dots '.'. Sequences are compared numerically if applicable (both sequences of two
versions are numeric) or lexicographically. May also include hyphens. The existence of a
pre-release component lowers the actual version; the shorter pre-release component is considered
lower. An 'empty' pre-release component is considered to be the least version for this
major-minor-patch combination (e.g. "1.0.0-").
The build component may follow the optional pre-release component and is indicated by a plus '+'
followed by sequences, just as the pre-release component. Comparing works similarly. However the
existence of a build component raises the actual version and may also raise a pre-release. An
'empty' build component is considered to be the highest version for this
major-minor-patch-prerelease combination (e.g. "1.2.3+").
[1]: Regexp for a sequence: r'[0-9A-Za-z-]+'.
"""
# Static class variables
_base_regex = r'''(?x)
(?P<major>[0-9]+)
\.(?P<minor>[0-9]+)
\.(?P<patch>[0-9]+)
(?:\-(?P<prerelease>(?:[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)?))?
(?:\+(?P<build>(?:[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)?))?'''
_search_regex = re.compile(_base_regex)
_match_regex = re.compile('^%s$' % _base_regex) # required because of $ anchor
# "Constructor"
def __new__(cls, *args, **kwargs):
"""There are two different constructor styles that are allowed:
- Option 1 allows specification of a semantic version as a string and the option to "clean"
the string before parsing it.
- Option 2 allows specification of each component separately as one parameter.
Note that all the parameters specified in the following sections can be passed either as
positional or as named parameters while considering the usual Python rules for this. As
such, `SemVer(1, 2, minor=1)` will result in an exception and not in `SemVer("1.1.2")`.
Option 1:
Constructor examples:
SemVer("1.0.1")
SemVer("this version 1.0.1-pre.1 here", True)
SemVer(ver="0.0.9-pre-alpha+34", clean=False)
Parameters:
* ver (str)
The string containing the version.
* clean = `False` (bool; optional)
If this is true in boolean context, `SemVer.clean(ver)` is called before
parsing.
Option 2:
Constructor examples:
SemVer(1, 0, 1)
SemVer(1, '0', prerelease='pre-alpha', patch=1, build=34)
SemVer(**dict(minor=2, major=1, patch=3))
Parameters:
* major (int, str, float ...)
* minor (...)
* patch (...)
Major to patch components must be an integer or convertable to an int (e.g. a
string or another number type).
* prerelease = `None` (str, int, float ...; optional)
* build = `None` (...; optional)
Pre-release and build components should be a string (or number) type.
Will be passed to `str()` if not already a string but the final string must
match '^[0-9A-Za-z.-]*$'
Raises:
* TypeError
Invalid parameter type(s) or combination (e.g. option 1 and 2).
* ValueError
Invalid semantic version or option 2 parameters unconvertable.
"""
ver, clean, comps = None, False, None
kw, l = kwargs.copy(), len(args) + len(kwargs)
def inv():
raise TypeError("Invalid parameter combination: args=%s; kwargs=%s" % (args, kwargs))
# Do validation and parse the parameters
if l == 0 or l > 5:
raise TypeError("SemVer accepts at least 1 and at most 5 arguments (%d given)" % l)
elif l < 3:
if len(args) == 2:
ver, clean = args
else:
ver = args[0] if args else kw.pop('ver', None)
clean = kw.pop('clean', clean)
if kw:
inv()
else:
comps = list(args) + [kw.pop(cls._fields[k], None) for k in range(len(args), 5)]
if kw or any(comps[i] is None for i in range(3)):
inv()
typecheck = (int,) * 3 + (basestring,) * 2
for i, (v, t) in enumerate(zip(comps, typecheck)):
if v is None:
continue
elif not isinstance(v, t):
try:
if i < 3:
v = typecheck[i](v)
else: # The real `basestring` can not be instatiated (Py2)
v = str(v)
except ValueError as e:
# Modify the exception message. I can't believe this actually works
e.args = ("Parameter #%d must be of type %s or convertable"
% (i, t.__name__),)
raise
else:
comps[i] = v
if t is basestring and not re.match(r"^[0-9A-Za-z.-]*$", v):
raise ValueError("Build and pre-release strings must match '^[0-9A-Za-z.-]*$'")
# Final adjustments
if not comps:
if ver is None or clean is None:
inv()
ver = clean and cls.clean(ver) or ver
comps = cls._parse(ver)
# Create the obj
return super(SemVer, cls).__new__(cls, *comps)
# Magic methods
def __str__(self):
return ('.'.join(map(str, self[:3]))
+ ('-' + self.prerelease if self.prerelease is not None else '')
+ ('+' + self.build if self.build is not None else ''))
def __repr__(self):
# Use the shortest representation - what would you prefer?
return 'SemVer("%s")' % str(self)
# return 'SemVer(%s)' % ', '.join('%s=%r' % (k, getattr(self, k)) for k in self._fields)
def __len__(self):
return 3 + (self.build is not None and 2 or self.prerelease is not None)
# Magic rich comparing methods
def __gt__(self, other):
return self._compare(other) == 1 if isinstance(other, SemVer) else NotImplemented
def __eq__(self, other):
return self._compare(other) == 0 if isinstance(other, SemVer) else NotImplemented
def __lt__(self, other):
return not (self > other or self == other)
def __ge__(self, other):
return not (self < other)
def __le__(self, other):
return not (self > other)
def __ne__(self, other):
return not (self == other)
# Utility (class-)methods
def satisfies(self, sel):
"""Alias for `bool(sel.matches(self))` or `bool(SemSel(sel).matches(self))`.
See `SemSel.__init__()` and `SemSel.matches(*vers)` for possible exceptions.
Returns:
* bool: `True` if the version matches the passed selector, `False` otherwise.
"""
if not isinstance(sel, SemSel):
sel = SemSel(sel) # just "re-raise" exceptions
return bool(sel.matches(self))
@classmethod
def valid(cls, ver):
"""Check if `ver` is a valid semantic version. Classmethod.
Parameters:
* ver (str)
The string that should be stripped.
Raises:
* TypeError
Invalid parameter type.
Returns:
* bool: `True` if it is valid, `False` otherwise.
"""
if not isinstance(ver, basestring):
raise TypeError("%r is not a string" % ver)
if cls._match_regex.match(ver):
return True
else:
return False
@classmethod
def clean(cls, vers):
"""Remove everything before and after a valid version string. Classmethod.
Parameters:
* vers (str)
The string that should be stripped.
Raises:
* TypeError
Invalid parameter type.
Returns:
* str: The stripped version string. Only the first version is matched.
* None: No version found in the string.
"""
if not isinstance(vers, basestring):
raise TypeError("%r is not a string" % vers)
m = cls._search_regex.search(vers)
if m:
return vers[m.start():m.end()]
else:
return None
# Private (class-)methods
@classmethod
def _parse(cls, ver):
"""Private. Do not touch. Classmethod.
"""
if not isinstance(ver, basestring):
raise TypeError("%r is not a string" % ver)
match = cls._match_regex.match(ver)
if match is None:
raise ValueError("'%s' is not a valid SemVer string" % ver)
g = list(match.groups())
for i in range(3):
g[i] = int(g[i])
return g # Will be passed as namedtuple(...)(*g)
def _compare(self, other):
"""Private. Do not touch.
self > other: 1
self = other: 0
self < other: -1
"""
# Shorthand lambdas
cp_len = lambda t, i=0: cmp(len(t[i]), len(t[not i]))
for i, (x1, x2) in enumerate(zip(self, other)):
if i > 2:
if x1 is None and x2 is None:
continue
# self is greater when other has a prerelease but self doesn't
# self is less when other has a build but self doesn't
if x1 is None or x2 is None:
return int(2 * (i - 3.5)) * (1 - 2 * (x1 is None))
# self is less when other's build is empty
if i == 4 and (not x1 or not x2) and x1 != x2:
return 1 - 2 * bool(x1)
# Split by '.' and use numeric comp or lexicographical order
t2 = [x1.split('.'), x2.split('.')]
for y1, y2 in zip(*t2):
if y1.isdigit() and y2.isdigit():
y1 = int(y1)
y2 = int(y2)
if y1 > y2:
return 1
elif y1 < y2:
return -1
# The "longer" sub-version is greater
d = cp_len(t2)
if d:
return d
else:
if x1 > x2:
return 1
elif x1 < x2:
return -1
# The versions equal
return 0
class SemComparator(object):
"""Holds a SemVer object and a comparing operator and can match these against a given version.
Constructor: SemComparator('<=', SemVer("1.2.3"))
Methods:
* matches(ver)
"""
# Private properties
_ops = {
'>=': '__ge__',
'<=': '__le__',
'>': '__gt__',
'<': '__lt__',
'=': '__eq__',
'!=': '__ne__'
}
_ops_satisfy = ('~', '!')
# Constructor
def __init__(self, op, ver):
"""Constructor examples:
SemComparator('<=', SemVer("1.2.3"))
SemComparator('!=', SemVer("2.3.4"))
Parameters:
* op (str, False, None)
One of [>=, <=, >, <, =, !=, !, ~] or evaluates to `False` which defaults to '~'.
'~' means a "satisfy" operation where pre-releases and builds are ignored.
'!' is a negative "~".
* ver (SemVer)
Holds the version to compare with.
Raises:
* ValueError
Invalid `op` parameter.
* TypeError
Invalid `ver` parameter.
"""
super(SemComparator, self).__init__()
if op and op not in self._ops_satisfy and op not in self._ops:
raise ValueError("Invalid value for `op` parameter.")
if not isinstance(ver, SemVer):
raise TypeError("`ver` parameter is not instance of SemVer.")
# Default to '~' for versions with no build or pre-release
op = op or '~'
# Fallback to '=' and '!=' if len > 3
if len(ver) != 3:
if op == '~':
op = '='
if op == '!':
op = '!='
self.op = op
self.ver = ver
# Magic methods
def __str__(self):
return (self.op or "") + str(self.ver)
# Utility methods
def matches(self, ver):
"""Match the internal version (constructor) against `ver`.
Parameters:
* ver (SemVer)
Raises:
* TypeError
Could not compare `ver` against the version passed in the constructor with the
passed operator.
Returns:
* bool
`True` if the version matched the specified operator and internal version, `False`
otherwise.
"""
if self.op in self._ops_satisfy:
# Compare only the first three parts (which are tuples) and directly
return bool((self.ver[:3] == ver[:3]) + (self.op == '!') * -1)
ret = getattr(ver, self._ops[self.op])(self.ver)
if ret == NotImplemented:
raise TypeError("Unable to compare %r with operator '%s'" % (ver, self.op))
return ret
class SemSelAndChunk(list):
"""Extends list and defines a few methods used for matching versions.
New elements should be added by calling `.add_child(op, ver)` which creates a SemComparator
instance and adds that to itself.
Methods:
* matches(ver)
* add_child(op, ver)
"""
# Magic methods
def __str__(self):
return ' '.join(map(str, self))
# Utitlity methods
def matches(self, ver):
"""Match all of the added children against `ver`.
Parameters:
* ver (SemVer)
Raises:
* TypeError
Invalid `ver` parameter.
Returns:
* bool:
`True` if *all* of the SemComparator children match `ver`, `False` otherwise.
"""
if not isinstance(ver, SemVer):
raise TypeError("`ver` parameter is not instance of SemVer.")
return all(cp.matches(ver) for cp in self)
def add_child(self, op, ver):
"""Create a SemComparator instance with the given parameters and appends that to self.
Parameters:
* op (str)
* ver (SemVer)
Both parameters are forwarded to `SemComparator.__init__`, see there for a more detailed
description.
Raises:
Exceptions raised by `SemComparator.__init__`.
"""
self.append(SemComparator(op, SemVer(ver)))
class SemSelOrChunk(list):
"""Extends list and defines a few methods used for matching versions.
New elements should be added by calling `.new_child()` which returns a SemSelAndChunk
instance.
Methods:
* matches(ver)
* new_child()
"""
# Magic methods
def __str__(self):
return ' || '.join(map(str, self))
# Utility methods
def matches(self, ver):
"""Match all of the added children against `ver`.
Parameters:
* ver (SemVer)
Raises:
* TypeError
Invalid `ver` parameter.
Returns:
* bool
`True` if *any* of the SemSelAndChunk children matches `ver`.
`False` otherwise.
"""
if not isinstance(ver, SemVer):
raise TypeError("`ver` parameter is not instance of SemVer.")
return any(ch.matches(ver) for ch in self)
def new_child(self):
"""Creates a new SemSelAndChunk instance, appends it to self and returns it.
Returns:
* SemSelAndChunk: An empty instance.
"""
ch = SemSelAndChunk()
self.append(ch)
return ch
class SelParseError(Exception):
"""An Exception raised when parsing a semantic selector failed.
"""
pass
# Subclass `tuple` because this is a somewhat simple method to make this immutable
class SemSel(tuple):
"""A Semantic Version Selector, holds a selector and can match it against semantic versions.
Information on this particular class and their instances:
- Immutable but not hashable because the content within might have changed.
- Subclasses `tuple` but does not behave like one.
- Always `True` in boolean context.
- len() returns the number of containing *and chunks* (see below).
- Iterable, iterates over containing *and chunks*.
When talking about "versions" it refers to a semantic version (SemVer). For information on how
versions compare to one another, see SemVer's doc string.
List for **comparators**:
"1.0.0" matches the version 1.0.0 and all its pre-release and build variants
"!1.0.0" matches any version that is not 1.0.0 or any of its variants
"=1.0.0" matches only the version 1.0.0
"!=1.0.0" matches any version that is not 1.0.0
">=1.0.0" matches versions greater than or equal 1.0.0
"<1.0.0" matches versions smaller than 1.0.0
"1.0.0 - 1.0.3" matches versions greater than or equal 1.0.0 thru 1.0.3
"~1.0" matches versions greater than or equal 1.0.0 thru 1.0.9999 (and more)
"~1", "1.x", "1.*" match versions greater than or equal 1.0.0 thru 1.9999.9999 (and more)
"~1.1.2" matches versions greater than or equal 1.1.2 thru 1.1.9999 (and more)
"~1.1.2+any" matches versions greater than or equal 1.1.2+any thru 1.1.9999 (and more)
"*", "~", "~x" match any version
Multiple comparators can be combined by using ' ' spaces and every comparator must match to make
the **and chunk** match a version.
Multiple and chunks can be combined to **or chunks** using ' || ' and match if any of the and
chunks split by these matches.
A complete example would look like:
~1 || 0.0.3 || <0.0.2 >0.0.1+b.1337 || 2.0.x || 2.1.0 - 2.1.0+b.12 !=2.1.0+b.9
Methods:
* matches(*vers)
"""
# Private properties
_fuzzy_regex = re.compile(r'''(?x)^
(?P<op>[<>]=?|~>?=?)?
(?:(?P<major>\d+)
(?:\.(?P<minor>\d+)
(?:\.(?P<patch>\d+)
(?P<other>[-+][a-zA-Z0-9-+.]*)?
)?
)?
)?$''')
_xrange_regex = re.compile(r'''(?x)^
(?P<op>[<>]=?|~>?=?)?
(?:(?P<major>\d+|[xX*])
(?:\.(?P<minor>\d+|[xX*])
(?:\.(?P<patch>\d+|[xX*]))?
)?
)
(?P<other>.*)$''')
_split_op_regex = re.compile(r'^(?P<op>=|[<>!]=?)?(?P<ver>.*)$')
# "Constructor"
def __new__(cls, sel):
"""Constructor examples:
SemSel(">1.0.0")
SemSel("~1.2.9 !=1.2.12")
Parameters:
* sel (str)
A version selector string.
Raises:
* TypeError
`sel` parameter is not a string.
* ValueError
A version in the selector could not be matched as a SemVer.
* SemParseError
The version selector's syntax is unparsable; invalid ranges (fuzzy, xrange or
explicit range) or invalid '||'
"""
chunk = cls._parse(sel)
return super(SemSel, cls).__new__(cls, (chunk,))
# Magic methods
def __str__(self):
return str(self._chunk)
def __repr__(self):
return 'SemSel("%s")' % self._chunk
def __len__(self):
# What would you expect?
return len(self._chunk)
def __iter__(self):
return iter(self._chunk)
# Read-only (private) attributes
@property
def _chunk(self):
return self[0]
# Utility methods
def matches(self, *vers):
"""Match the selector against a selection of versions.
Parameters:
* *vers (str, SemVer)
Versions can be passed as strings and SemVer objects will be created with them.
May also be a mixed list.
Raises:
* TypeError
A version is not an instance of str (basestring) or SemVer.
* ValueError
A string version could not be parsed as a SemVer.
Returns:
* list
A list with all the versions that matched, may be empty. Use `max()` to determine
the highest matching version, or `min()` for the lowest.
"""
ret = []
for v in vers:
if isinstance(v, str):
t = self._chunk.matches(SemVer(v))
elif isinstance(v, SemVer):
t = self._chunk.matches(v)
else:
raise TypeError("Invalid parameter type '%s': %s" % (v, type(v)))
if t:
ret.append(v)
return ret
# Private methods
@classmethod
def _parse(cls, sel):
"""Private. Do not touch.
1. split by whitespace into tokens
a. start new and_chunk on ' || '
b. parse " - " ranges
c. replace "xX*" ranges with "~" equivalent
d. parse "~" ranges
e. parse unmatched token as comparator
~. append to current and_chunk
2. return SemSelOrChunk
Raises TypeError, ValueError or SelParseError.
"""
if not isinstance(sel, basestring):
raise TypeError("Selector must be a string")
if not sel:
raise ValueError("String must not be empty")
# Split selector by spaces and crawl the tokens
tokens = sel.split()
i = -1
or_chunk = SemSelOrChunk()
and_chunk = or_chunk.new_child()
while i + 1 < len(tokens):
i += 1
t = tokens[i]
# Replace x ranges with ~ selector
m = cls._xrange_regex.match(t)
m = m and m.groups('')
if m and any(not x.isdigit() for x in m[1:4]) and not m[0].startswith('>'):
# (do not match '>1.0' or '>*')
if m[4]:
raise SelParseError("XRanges do not allow pre-release or build components")
# Only use digit parts and fail if digit found after non-digit
mm, xran = [], False
for x in m[1:4]:
if x.isdigit():
if xran:
raise SelParseError("Invalid fuzzy range or XRange '%s'" % tokens[i])
mm.append(x)
else:
xran = True
t = m[0] + '.'.join(mm) # x for x in m[1:4] if x.isdigit())
# Append "~" if not already present
if not t.startswith('~'):
t = '~' + t
# switch t:
if t == '||':
if i == 0 or tokens[i - 1] == '||' or i + 1 == len(tokens):
raise SelParseError("OR range must not be empty")
# Start a new and_chunk
and_chunk = or_chunk.new_child()
elif t == '-':
# ' - ' range
i += 1
invalid = False
try:
# If these result in exceptions, you know you're doing it wrong
t = tokens[i]
c = and_chunk[-1]
except:
raise SelParseError("Invalid ' - ' range position")
# If there is an op in front of one of the bound versions
invalid = (c.op not in ('=', '~')
or cls._split_op_regex.match(t).group(1) not in (None, '='))
if invalid:
raise SelParseError("Invalid ' - ' range '%s - %s'"
% (tokens[i - 2], tokens[i]))
c.op = ">="
and_chunk.add_child('<=', t)
elif t == '':
# Multiple spaces
pass
elif t.startswith('~'):
m = cls._fuzzy_regex.match(t)
if not m:
raise SelParseError("Invalid fuzzy range or XRange '%s'" % tokens[i])
mm, m = m.groups('')[1:4], m.groupdict('') # mm: major to patch
# Minimum requirement
min_ver = ('.'.join(x or '0' for x in mm) + '-'
if not m['other']
else cls._split_op_regex(t[1:]).group('ver'))
and_chunk.add_child('>=', min_ver)
if m['major']:
# Increase version before none (or second to last if '~1.2.3')
e = [0, 0, 0]
for j, d in enumerate(mm):
if not d or j == len(mm) - 1:
e[j - 1] = e[j - 1] + 1
break
e[j] = int(d)
and_chunk.add_child('<', '.'.join(str(x) for x in e) + '-')
# else: just plain '~' or '*', or '~>X' which are already handled
else:
# A normal comparator
m = cls._split_op_regex.match(t).groupdict() # this regex can't fail
and_chunk.add_child(**m)
# Finally return the or_chunk
return or_chunk

Some files were not shown because too many files have changed in this diff Show More