feat(ST2.UtilPackages): bump up all packages

- Refresh PackageCache with latest versions of everything
This commit is contained in:
Iristyle
2013-09-16 22:35:46 -04:00
parent 7195197f0f
commit a000ce8acc
451 changed files with 14151 additions and 8317 deletions

View File

@@ -0,0 +1,12 @@
from .bitbucket_repository_provider import BitBucketRepositoryProvider
from .github_repository_provider import GitHubRepositoryProvider
from .github_user_provider import GitHubUserProvider
from .repository_provider import RepositoryProvider
from .channel_provider import ChannelProvider
REPOSITORY_PROVIDERS = [BitBucketRepositoryProvider, GitHubRepositoryProvider,
GitHubUserProvider, RepositoryProvider]
CHANNEL_PROVIDERS = [ChannelProvider]

View File

@@ -0,0 +1,163 @@
import re
from ..clients.bitbucket_client import BitBucketClient
from ..downloaders.downloader_exception import DownloaderException
from ..clients.client_exception import ClientException
from .provider_exception import ProviderException
class BitBucketRepositoryProvider():
"""
Allows using a public BitBucket repository as the source for a single package.
For legacy purposes, this can also be treated as the source for a Package
Control "repository".
:param repo:
The public web URL to the BitBucket repository. Should be in the format
`https://bitbucket.org/user/package`.
:param settings:
A dict containing at least the following fields:
`cache_length`,
`debug`,
`timeout`,
`user_agent`
Optional fields:
`http_proxy`,
`https_proxy`,
`proxy_username`,
`proxy_password`,
`query_string_params`
`install_prereleases`
"""
def __init__(self, repo, settings):
self.cache = {}
self.repo = repo
self.settings = settings
self.failed_sources = {}
@classmethod
def match_url(cls, repo):
"""Indicates if this provider can handle the provided repo"""
return re.search('^https?://bitbucket.org/([^/]+/[^/]+)/?$', repo) != None
def prefetch(self):
"""
Go out and perform HTTP operations, caching the result
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
"""
[name for name, info in self.get_packages()]
def get_failed_sources(self):
"""
List of any URLs that could not be accessed while accessing this repository
:return:
A generator of ("https://bitbucket.org/user/repo", Exception()) tuples
"""
return self.failed_sources.items()
def get_broken_packages(self):
"""
For API-compatibility with RepositoryProvider
"""
return {}.items()
def get_packages(self, invalid_sources=None):
"""
Uses the BitBucket API to construct necessary info for a package
:param invalid_sources:
A list of URLs that should be ignored
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of
(
'Package Name',
{
'name': name,
'description': description,
'author': author,
'homepage': homepage,
'last_modified': last modified date,
'download': {
'url': url,
'date': date,
'version': version
},
'previous_names': [],
'labels': [],
'sources': [the repo URL],
'readme': url,
'issues': url,
'donate': url,
'buy': None
}
)
tuples
"""
if 'get_packages' in self.cache:
for key, value in self.cache['get_packages'].items():
yield (key, value)
return
client = BitBucketClient(self.settings)
if invalid_sources != None and self.repo in invalid_sources:
raise StopIteration()
try:
repo_info = client.repo_info(self.repo)
download = client.download_info(self.repo)
name = repo_info['name']
details = {
'name': name,
'description': repo_info['description'],
'homepage': repo_info['homepage'],
'author': repo_info['author'],
'last_modified': download.get('date'),
'download': download,
'previous_names': [],
'labels': [],
'sources': [self.repo],
'readme': repo_info['readme'],
'issues': repo_info['issues'],
'donate': repo_info['donate'],
'buy': None
}
self.cache['get_packages'] = {name: details}
yield (name, details)
except (DownloaderException, ClientException, ProviderException) as e:
self.failed_sources[self.repo] = e
self.cache['get_packages'] = {}
raise StopIteration()
def get_renamed_packages(self):
"""For API-compatibility with RepositoryProvider"""
return {}
def get_unavailable_packages(self):
"""
Method for compatibility with RepositoryProvider class. These providers
are based on API calls, and thus do not support different platform
downloads, making it impossible for there to be unavailable packages.
:return: An empty list
"""
return []

View File

@@ -0,0 +1,312 @@
import json
import os
import re
try:
# Python 3
from urllib.parse import urlparse
except (ImportError):
# Python 2
from urlparse import urlparse
from ..console_write import console_write
from .release_selector import ReleaseSelector
from .provider_exception import ProviderException
from ..downloaders.downloader_exception import DownloaderException
from ..clients.client_exception import ClientException
from ..download_manager import downloader
class ChannelProvider(ReleaseSelector):
"""
Retrieves a channel and provides an API into the information
The current channel/repository infrastructure caches repository info into
the channel to improve the Package Control client performance. This also
has the side effect of lessening the load on the GitHub and BitBucket APIs
and getting around not-infrequent HTTP 503 errors from those APIs.
:param channel:
The URL of the channel
:param settings:
A dict containing at least the following fields:
`cache_length`,
`debug`,
`timeout`,
`user_agent`
Optional fields:
`http_proxy`,
`https_proxy`,
`proxy_username`,
`proxy_password`,
`query_string_params`
`install_prereleases`
"""
def __init__(self, channel, settings):
self.channel_info = None
self.schema_version = 0.0
self.channel = channel
self.settings = settings
self.unavailable_packages = []
@classmethod
def match_url(cls, channel):
"""Indicates if this provider can handle the provided channel"""
return True
def prefetch(self):
"""
Go out and perform HTTP operations, caching the result
:raises:
ProviderException: when an error occurs trying to open a file
DownloaderException: when an error occurs trying to open a URL
"""
self.fetch()
def fetch(self):
"""
Retrieves and loads the JSON for other methods to use
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
"""
if self.channel_info != None:
return
if re.match('https?://', self.channel, re.I):
with downloader(self.channel, self.settings) as manager:
channel_json = manager.fetch(self.channel,
'Error downloading channel.')
# All other channels are expected to be filesystem paths
else:
if not os.path.exists(self.channel):
raise ProviderException(u'Error, file %s does not exist' % self.channel)
if self.settings.get('debug'):
console_write(u'Loading %s as a channel' % self.channel, True)
# We open as binary so we get bytes like the DownloadManager
with open(self.channel, 'rb') as f:
channel_json = f.read()
try:
channel_info = json.loads(channel_json.decode('utf-8'))
except (ValueError):
raise ProviderException(u'Error parsing JSON from channel %s.' % self.channel)
schema_error = u'Channel %s does not appear to be a valid channel file because ' % self.channel
if 'schema_version' not in channel_info:
raise ProviderException(u'%s the "schema_version" JSON key is missing.' % schema_error)
try:
self.schema_version = float(channel_info.get('schema_version'))
except (ValueError):
raise ProviderException(u'%s the "schema_version" is not a valid number.' % schema_error)
if self.schema_version not in [1.0, 1.1, 1.2, 2.0]:
raise ProviderException(u'%s the "schema_version" is not recognized. Must be one of: 1.0, 1.1, 1.2 or 2.0.' % schema_error)
self.channel_info = channel_info
def get_name_map(self):
"""
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A dict of the mapping for URL slug -> package name
"""
self.fetch()
if self.schema_version >= 2.0:
return {}
return self.channel_info.get('package_name_map', {})
def get_renamed_packages(self):
"""
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A dict of the packages that have been renamed
"""
self.fetch()
if self.schema_version >= 2.0:
return {}
return self.channel_info.get('renamed_packages', {})
def get_repositories(self):
"""
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A list of the repository URLs
"""
self.fetch()
if 'repositories' not in self.channel_info:
raise ProviderException(u'Channel %s does not appear to be a valid channel file because the "repositories" JSON key is missing.' % self.channel)
# Determine a relative root so repositories can be defined
# relative to the location of the channel file.
if re.match('https?://', self.channel, re.I):
url_pieces = urlparse(self.channel)
domain = url_pieces.scheme + '://' + url_pieces.netloc
path = '/' if url_pieces.path == '' else url_pieces.path
if path[-1] != '/':
path = os.path.dirname(path)
relative_base = domain + path
else:
relative_base = os.path.dirname(self.channel) + '/'
output = []
repositories = self.channel_info.get('repositories', [])
for repository in repositories:
if re.match('^\./|\.\./', repository):
repository = os.path.normpath(relative_base + repository)
output.append(repository)
return output
def get_certs(self):
"""
Provides a secure way for distribution of SSL CA certificates
Unfortunately Python does not include a bundle of CA certs with urllib
to perform SSL certificate validation. To circumvent this issue,
Package Control acts as a distributor of the CA certs for all HTTPS
URLs of package downloads.
The default channel scrapes and caches info about all packages
periodically, and in the process it checks the CA certs for all of
the HTTPS URLs listed in the repositories. The contents of the CA cert
files are then hashed, and the CA cert is stored in a filename with
that hash. This is a fingerprint to ensure that Package Control has
the appropriate CA cert for a domain name.
Next, the default channel file serves up a JSON object of the domain
names and the hashes of their current CA cert files. If Package Control
does not have the appropriate hash for a domain, it may retrieve it
from the channel server. To ensure that Package Control is talking to
a trusted authority to get the CA certs from, the CA cert for
sublime.wbond.net is bundled with Package Control. Then when downloading
the channel file, Package Control can ensure that the channel file's
SSL certificate is valid, thus ensuring the resulting CA certs are
legitimate.
As a matter of optimization, the distribution of Package Control also
includes the current CA certs for all known HTTPS domains that are
included in the channel, as of the time when Package Control was
last released.
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A dict of {'Domain Name': ['cert_file_hash', 'cert_file_download_url']}
"""
self.fetch()
return self.channel_info.get('certs', {})
def get_packages(self, repo):
"""
Provides access to the repository info that is cached in a channel
:param repo:
The URL of the repository to get the cached info of
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A dict in the format:
{
'Package Name': {
'name': name,
'description': description,
'author': author,
'homepage': homepage,
'last_modified': last modified date,
'download': {
'url': url,
'date': date,
'version': version
},
'previous_names': [old_name, ...],
'labels': [label, ...],
'readme': url,
'issues': url,
'donate': url,
'buy': url
},
...
}
"""
self.fetch()
# The 2.0 channel schema renamed the key cached package info was
# stored under in order to be more clear to new users.
packages_key = 'packages_cache' if self.schema_version >= 2.0 else 'packages'
if self.channel_info.get(packages_key, False) == False:
return {}
if self.channel_info[packages_key].get(repo, False) == False:
return {}
output = {}
for package in self.channel_info[packages_key][repo]:
copy = package.copy()
# In schema version 2.0, we store a list of dicts containing info
# about all available releases. These include "version" and
# "platforms" keys that are used to pick the download for the
# current machine.
if self.schema_version >= 2.0:
copy = self.select_release(copy)
else:
copy = self.select_platform(copy)
if not copy:
self.unavailable_packages.append(package['name'])
continue
output[copy['name']] = copy
return output
def get_unavailable_packages(self):
"""
Provides a list of packages that are unavailable for the current
platform/architecture that Sublime Text is running on.
This list will be empty unless get_packages() is called first.
:return: A list of package names
"""
return self.unavailable_packages

View File

@@ -0,0 +1,169 @@
import re
from ..clients.github_client import GitHubClient
from ..downloaders.downloader_exception import DownloaderException
from ..clients.client_exception import ClientException
from .provider_exception import ProviderException
class GitHubRepositoryProvider():
"""
Allows using a public GitHub repository as the source for a single package.
For legacy purposes, this can also be treated as the source for a Package
Control "repository".
:param repo:
The public web URL to the GitHub repository. Should be in the format
`https://github.com/user/package` for the master branch, or
`https://github.com/user/package/tree/{branch_name}` for any other
branch.
:param settings:
A dict containing at least the following fields:
`cache_length`,
`debug`,
`timeout`,
`user_agent`
Optional fields:
`http_proxy`,
`https_proxy`,
`proxy_username`,
`proxy_password`,
`query_string_params`
`install_prereleases`
"""
def __init__(self, repo, settings):
self.cache = {}
# Clean off the trailing .git to be more forgiving
self.repo = re.sub('\.git$', '', repo)
self.settings = settings
self.failed_sources = {}
@classmethod
def match_url(cls, repo):
"""Indicates if this provider can handle the provided repo"""
master = re.search('^https?://github.com/[^/]+/[^/]+/?$', repo)
branch = re.search('^https?://github.com/[^/]+/[^/]+/tree/[^/]+/?$',
repo)
return master != None or branch != None
def prefetch(self):
"""
Go out and perform HTTP operations, caching the result
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
"""
[name for name, info in self.get_packages()]
def get_failed_sources(self):
"""
List of any URLs that could not be accessed while accessing this repository
:return:
A generator of ("https://github.com/user/repo", Exception()) tuples
"""
return self.failed_sources.items()
def get_broken_packages(self):
"""
For API-compatibility with RepositoryProvider
"""
return {}.items()
def get_packages(self, invalid_sources=None):
"""
Uses the GitHub API to construct necessary info for a package
:param invalid_sources:
A list of URLs that should be ignored
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of
(
'Package Name',
{
'name': name,
'description': description,
'author': author,
'homepage': homepage,
'last_modified': last modified date,
'download': {
'url': url,
'date': date,
'version': version
},
'previous_names': [],
'labels': [],
'sources': [the repo URL],
'readme': url,
'issues': url,
'donate': url,
'buy': None
}
)
tuples
"""
if 'get_packages' in self.cache:
for key, value in self.cache['get_packages'].items():
yield (key, value)
return
client = GitHubClient(self.settings)
if invalid_sources != None and self.repo in invalid_sources:
raise StopIteration()
try:
repo_info = client.repo_info(self.repo)
download = client.download_info(self.repo)
name = repo_info['name']
details = {
'name': name,
'description': repo_info['description'],
'homepage': repo_info['homepage'],
'author': repo_info['author'],
'last_modified': download.get('date'),
'download': download,
'previous_names': [],
'labels': [],
'sources': [self.repo],
'readme': repo_info['readme'],
'issues': repo_info['issues'],
'donate': repo_info['donate'],
'buy': None
}
self.cache['get_packages'] = {name: details}
yield (name, details)
except (DownloaderException, ClientException, ProviderException) as e:
self.failed_sources[self.repo] = e
self.cache['get_packages'] = {}
raise StopIteration()
def get_renamed_packages(self):
"""For API-compatibility with RepositoryProvider"""
return {}
def get_unavailable_packages(self):
"""
Method for compatibility with RepositoryProvider class. These providers
are based on API calls, and thus do not support different platform
downloads, making it impossible for there to be unavailable packages.
:return: An empty list
"""
return []

View File

@@ -0,0 +1,172 @@
import re
from ..clients.github_client import GitHubClient
from ..downloaders.downloader_exception import DownloaderException
from ..clients.client_exception import ClientException
from .provider_exception import ProviderException
class GitHubUserProvider():
"""
Allows using a GitHub user/organization as the source for multiple packages,
or in Package Control terminology, a "repository".
:param repo:
The public web URL to the GitHub user/org. Should be in the format
`https://github.com/user`.
:param settings:
A dict containing at least the following fields:
`cache_length`,
`debug`,
`timeout`,
`user_agent`,
Optional fields:
`http_proxy`,
`https_proxy`,
`proxy_username`,
`proxy_password`,
`query_string_params`
`install_prereleases`
"""
def __init__(self, repo, settings):
self.cache = {}
self.repo = repo
self.settings = settings
self.failed_sources = {}
@classmethod
def match_url(cls, repo):
"""Indicates if this provider can handle the provided repo"""
return re.search('^https?://github.com/[^/]+/?$', repo) != None
def prefetch(self):
"""
Go out and perform HTTP operations, caching the result
"""
[name for name, info in self.get_packages()]
def get_failed_sources(self):
"""
List of any URLs that could not be accessed while accessing this repository
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of ("https://github.com/user/repo", Exception()) tuples
"""
return self.failed_sources.items()
def get_broken_packages(self):
"""
For API-compatibility with RepositoryProvider
"""
return {}.items()
def get_packages(self, invalid_sources=None):
"""
Uses the GitHub API to construct necessary info for all packages
:param invalid_sources:
A list of URLs that should be ignored
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of
(
'Package Name',
{
'name': name,
'description': description,
'author': author,
'homepage': homepage,
'last_modified': last modified date,
'download': {
'url': url,
'date': date,
'version': version
},
'previous_names': [],
'labels': [],
'sources': [the user URL],
'readme': url,
'issues': url,
'donate': url,
'buy': None
}
)
tuples
"""
if 'get_packages' in self.cache:
for key, value in self.cache['get_packages'].items():
yield (key, value)
return
client = GitHubClient(self.settings)
if invalid_sources != None and self.repo in invalid_sources:
raise StopIteration()
try:
user_repos = client.user_info(self.repo)
except (DownloaderException, ClientException, ProviderException) as e:
self.failed_sources = [self.repo]
self.cache['get_packages'] = e
raise e
output = {}
for repo_info in user_repos:
try:
name = repo_info['name']
repo_url = 'https://github.com/' + repo_info['user_repo']
download = client.download_info(repo_url)
details = {
'name': name,
'description': repo_info['description'],
'homepage': repo_info['homepage'],
'author': repo_info['author'],
'last_modified': download.get('date'),
'download': download,
'previous_names': [],
'labels': [],
'sources': [self.repo],
'readme': repo_info['readme'],
'issues': repo_info['issues'],
'donate': repo_info['donate'],
'buy': None
}
output[name] = details
yield (name, details)
except (DownloaderException, ClientException, ProviderException) as e:
self.failed_sources[repo_url] = e
self.cache['get_packages'] = output
def get_renamed_packages(self):
"""For API-compatibility with RepositoryProvider"""
return {}
def get_unavailable_packages(self):
"""
Method for compatibility with RepositoryProvider class. These providers
are based on API calls, and thus do not support different platform
downloads, making it impossible for there to be unavailable packages.
:return: An empty list
"""
return []

View File

@@ -0,0 +1,5 @@
class ProviderException(Exception):
"""If a provider could not return information"""
def __str__(self):
return self.args[0]

View File

@@ -0,0 +1,125 @@
import re
import sublime
from ..versions import version_sort, version_exclude_prerelease
class ReleaseSelector():
"""
A base class for finding the best version of a package for the current machine
"""
def select_release(self, package_info):
"""
Returns a modified package info dict for package from package schema version 2.0
:param package_info:
A package info dict with a "releases" key
:return:
The package info dict with the "releases" key deleted, and a
"download" key added that contains a dict with "version", "url" and
"date" keys.
None if no compatible relases are available.
"""
releases = version_sort(package_info['releases'])
if not self.settings.get('install_prereleases'):
releases = version_exclude_prerelease(releases)
for release in releases:
platforms = release.get('platforms', '*')
if not isinstance(platforms, list):
platforms = [platforms]
best_platform = self.get_best_platform(platforms)
if not best_platform:
continue
if not self.is_compatible_version(release.get('sublime_text', '<3000')):
continue
package_info['download'] = release
package_info['last_modified'] = release.get('date')
del package_info['releases']
return package_info
return None
def select_platform(self, package_info):
"""
Returns a modified package info dict for package from package schema version <= 1.2
:param package_info:
A package info dict with a "platforms" key
:return:
The package info dict with the "platforms" key deleted, and a
"download" key added that contains a dict with "version" and "url"
keys.
None if no compatible platforms.
"""
platforms = list(package_info['platforms'].keys())
best_platform = self.get_best_platform(platforms)
if not best_platform:
return None
package_info['download'] = package_info['platforms'][best_platform][0]
package_info['download']['date'] = package_info.get('last_modified')
del package_info['platforms']
return package_info
def get_best_platform(self, platforms):
"""
Returns the most specific platform that matches the current machine
:param platforms:
An array of platform names for a package. E.g. ['*', 'windows', 'linux-x64']
:return: A string reprenting the most specific matching platform
"""
ids = [sublime.platform() + '-' + sublime.arch(), sublime.platform(),
'*']
for id in ids:
if id in platforms:
return id
return None
def is_compatible_version(self, version_range):
min_version = float("-inf")
max_version = float("inf")
if version_range == '*':
return True
gt_match = re.match('>(\d+)$', version_range)
ge_match = re.match('>=(\d+)$', version_range)
lt_match = re.match('<(\d+)$', version_range)
le_match = re.match('<=(\d+)$', version_range)
range_match = re.match('(\d+) - (\d+)$', version_range)
if gt_match:
min_version = int(gt_match.group(1)) + 1
elif ge_match:
min_version = int(ge_match.group(1))
elif lt_match:
max_version = int(lt_match.group(1)) - 1
elif le_match:
max_version = int(le_match.group(1))
elif range_match:
min_version = int(range_match.group(1))
max_version = int(range_match.group(2))
else:
return None
if min_version > int(sublime.version()):
return False
if max_version < int(sublime.version()):
return False
return True

View File

@@ -0,0 +1,454 @@
import json
import re
import os
from itertools import chain
try:
# Python 3
from urllib.parse import urlparse
except (ImportError):
# Python 2
from urlparse import urlparse
from ..console_write import console_write
from .release_selector import ReleaseSelector
from .provider_exception import ProviderException
from ..downloaders.downloader_exception import DownloaderException
from ..clients.client_exception import ClientException
from ..clients.github_client import GitHubClient
from ..clients.bitbucket_client import BitBucketClient
from ..download_manager import downloader
class RepositoryProvider(ReleaseSelector):
"""
Generic repository downloader that fetches package info
With the current channel/repository architecture where the channel file
caches info from all includes repositories, these package providers just
serve the purpose of downloading packages not in the default channel.
The structure of the JSON a repository should contain is located in
example-packages.json.
:param repo:
The URL of the package repository
:param settings:
A dict containing at least the following fields:
`cache_length`,
`debug`,
`timeout`,
`user_agent`
Optional fields:
`http_proxy`,
`https_proxy`,
`proxy_username`,
`proxy_password`,
`query_string_params`
`install_prereleases`
"""
def __init__(self, repo, settings):
self.cache = {}
self.repo_info = None
self.schema_version = 0.0
self.repo = repo
self.settings = settings
self.unavailable_packages = []
self.failed_sources = {}
self.broken_packages = {}
@classmethod
def match_url(cls, repo):
"""Indicates if this provider can handle the provided repo"""
return True
def prefetch(self):
"""
Go out and perform HTTP operations, caching the result
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
"""
[name for name, info in self.get_packages()]
def get_failed_sources(self):
"""
List of any URLs that could not be accessed while accessing this repository
:return:
A generator of ("https://example.com", Exception()) tuples
"""
return self.failed_sources.items()
def get_broken_packages(self):
"""
List of package names for packages that are missing information
:return:
A generator of ("Package Name", Exception()) tuples
"""
return self.broken_packages.items()
def fetch(self):
"""
Retrieves and loads the JSON for other methods to use
:raises:
ProviderException: when an error occurs trying to open a file
DownloaderException: when an error occurs trying to open a URL
"""
if self.repo_info != None:
return
self.repo_info = self.fetch_location(self.repo)
if 'includes' not in self.repo_info:
return
# Allow repositories to include other repositories
if re.match('https?://', self.repo, re.I):
url_pieces = urlparse(self.repo)
domain = url_pieces.scheme + '://' + url_pieces.netloc
path = '/' if url_pieces.path == '' else url_pieces.path
if path[-1] != '/':
path = os.path.dirname(path)
relative_base = domain + path
else:
relative_base = os.path.dirname(self.repo) + '/'
includes = self.repo_info.get('includes', [])
del self.repo_info['includes']
for include in includes:
if re.match('^\./|\.\./', include):
include = os.path.normpath(relative_base + include)
include_info = self.fetch_location(include)
included_packages = include_info.get('packages', [])
self.repo_info['packages'].extend(included_packages)
def fetch_location(self, location):
"""
Fetches the contents of a URL of file path
:param location:
The URL or file path
:raises:
ProviderException: when an error occurs trying to open a file
DownloaderException: when an error occurs trying to open a URL
:return:
A dict of the parsed JSON
"""
if re.match('https?://', self.repo, re.I):
with downloader(location, self.settings) as manager:
json_string = manager.fetch(location, 'Error downloading repository.')
# Anything that is not a URL is expected to be a filesystem path
else:
if not os.path.exists(location):
raise ProviderException(u'Error, file %s does not exist' % location)
if self.settings.get('debug'):
console_write(u'Loading %s as a repository' % location, True)
# We open as binary so we get bytes like the DownloadManager
with open(location, 'rb') as f:
json_string = f.read()
try:
return json.loads(json_string.decode('utf-8'))
except (ValueError):
raise ProviderException(u'Error parsing JSON from repository %s.' % location)
def get_packages(self, invalid_sources=None):
"""
Provides access to the packages in this repository
:param invalid_sources:
A list of URLs that are permissible to fetch data from
:raises:
ProviderException: when an error occurs trying to open a file
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of
(
'Package Name',
{
'name': name,
'description': description,
'author': author,
'homepage': homepage,
'last_modified': last modified date,
'download': {
'url': url,
'date': date,
'version': version
},
'previous_names': [old_name, ...],
'labels': [label, ...],
'sources': [url, ...],
'readme': url,
'issues': url,
'donate': url,
'buy': url
}
)
tuples
"""
if 'get_packages' in self.cache:
for key, value in self.cache['get_packages'].items():
yield (key, value)
return
if invalid_sources != None and self.repo in invalid_sources:
raise StopIteration()
try:
self.fetch()
except (DownloaderException, ProviderException) as e:
self.failed_sources[self.repo] = e
self.cache['get_packages'] = {}
return
def fail(message):
exception = ProviderException(message)
self.failed_sources[self.repo] = exception
self.cache['get_packages'] = {}
return
schema_error = u'Repository %s does not appear to be a valid repository file because ' % self.repo
if 'schema_version' not in self.repo_info:
error_string = u'%s the "schema_version" JSON key is missing.' % schema_error
fail(error_string)
return
try:
self.schema_version = float(self.repo_info.get('schema_version'))
except (ValueError):
error_string = u'%s the "schema_version" is not a valid number.' % schema_error
fail(error_string)
return
if self.schema_version not in [1.0, 1.1, 1.2, 2.0]:
error_string = u'%s the "schema_version" is not recognized. Must be one of: 1.0, 1.1, 1.2 or 2.0.' % schema_error
fail(error_string)
return
if 'packages' not in self.repo_info:
error_string = u'%s the "packages" JSON key is missing.' % schema_error
fail(error_string)
return
github_client = GitHubClient(self.settings)
bitbucket_client = BitBucketClient(self.settings)
# Backfill the "previous_names" keys for old schemas
previous_names = {}
if self.schema_version < 2.0:
renamed = self.get_renamed_packages()
for old_name in renamed:
new_name = renamed[old_name]
if new_name not in previous_names:
previous_names[new_name] = []
previous_names[new_name].append(old_name)
output = {}
for package in self.repo_info['packages']:
info = {
'sources': [self.repo]
}
for field in ['name', 'description', 'author', 'last_modified', 'previous_names',
'labels', 'homepage', 'readme', 'issues', 'donate', 'buy']:
if package.get(field):
info[field] = package.get(field)
# Schema version 2.0 allows for grabbing details about a pacakge, or its
# download from "details" urls. See the GitHubClient and BitBucketClient
# classes for valid URLs.
if self.schema_version >= 2.0:
details = package.get('details')
releases = package.get('releases')
# Try to grab package-level details from GitHub or BitBucket
if details:
if invalid_sources != None and details in invalid_sources:
continue
info['sources'].append(details)
try:
github_repo_info = github_client.repo_info(details)
bitbucket_repo_info = bitbucket_client.repo_info(details)
# When grabbing details, prefer explicit field values over the values
# from the GitHub or BitBucket API
if github_repo_info:
info = dict(chain(github_repo_info.items(), info.items()))
elif bitbucket_repo_info:
info = dict(chain(bitbucket_repo_info.items(), info.items()))
else:
raise ProviderException(u'Invalid "details" value "%s" for one of the packages in the repository %s.' % (details, self.repo))
except (DownloaderException, ClientException, ProviderException) as e:
if 'name' in info:
self.broken_packages[info['name']] = e
self.failed_sources[details] = e
continue
# If no releases info was specified, also grab the download info from GH or BB
if not releases and details:
releases = [{'details': details}]
if not releases:
e = ProviderException(u'No "releases" value for one of the packages in the repository %s.' % self.repo)
if 'name' in info:
self.broken_packages[info['name']] = e
else:
self.failed_sources[self.repo] = e
continue
# This allows developers to specify a GH or BB location to get releases from,
# especially tags URLs (https://github.com/user/repo/tags or
# https://bitbucket.org/user/repo#tags)
info['releases'] = []
for release in releases:
download_details = None
download_info = {}
# Make sure that explicit fields are copied over
for field in ['platforms', 'sublime_text', 'version', 'url', 'date']:
if field in release:
download_info[field] = release[field]
if 'details' in release:
download_details = release['details']
try:
github_download = github_client.download_info(download_details)
bitbucket_download = bitbucket_client.download_info(download_details)
# Overlay the explicit field values over values fetched from the APIs
if github_download:
download_info = dict(chain(github_download.items(), download_info.items()))
# No matching tags
elif github_download == False:
download_info = {}
elif bitbucket_download:
download_info = dict(chain(bitbucket_download.items(), download_info.items()))
# No matching tags
elif bitbucket_download == False:
download_info = {}
else:
raise ProviderException(u'Invalid "details" value "%s" under the "releases" key for the package "%s" in the repository %s.' % (download_details, info['name'], self.repo))
except (DownloaderException, ClientException, ProviderException) as e:
if 'name' in info:
self.broken_packages[info['name']] = e
self.failed_sources[download_details] = e
continue
if download_info:
info['releases'].append(download_info)
info = self.select_release(info)
# Schema version 1.0, 1.1 and 1.2 just require that all values be
# explicitly specified in the package JSON
else:
info['platforms'] = package.get('platforms')
info = self.select_platform(info)
if not info:
self.unavailable_packages.append(package['name'])
continue
if 'download' not in info and 'releases' not in info:
self.broken_packages[info['name']] = ProviderException(u'No "releases" key for the package "%s" in the repository %s.' % (info['name'], self.repo))
continue
for field in ['previous_names', 'labels']:
if field not in info:
info[field] = []
for field in ['readme', 'issues', 'donate', 'buy']:
if field not in info:
info[field] = None
if 'homepage' not in info:
info['homepage'] = self.repo
if 'download' in info:
# Rewrites the legacy "zipball" URLs to the new "zip" format
info['download']['url'] = re.sub(
'^(https://nodeload.github.com/[^/]+/[^/]+/)zipball(/.*)$',
'\\1zip\\2', info['download']['url'])
# Rewrites the legacy "nodeload" URLs to the new "codeload" subdomain
info['download']['url'] = info['download']['url'].replace(
'nodeload.github.com', 'codeload.github.com')
# Extract the date from the download
if 'last_modified' not in info:
info['last_modified'] = info['download']['date']
elif 'releases' in info and 'last_modified' not in info:
# Extract a date from the newest download
date = '1970-01-01 00:00:00'
for release in info['releases']:
if 'date' in release and release['date'] > date:
date = release['date']
info['last_modified'] = date
if info['name'] in previous_names:
info['previous_names'].extend(previous_names[info['name']])
output[info['name']] = info
yield (info['name'], info)
self.cache['get_packages'] = output
def get_renamed_packages(self):
""":return: A dict of the packages that have been renamed"""
if self.schema_version < 2.0:
return self.repo_info.get('renamed_packages', {})
output = {}
for package in self.repo_info['packages']:
if 'previous_names' not in package:
continue
previous_names = package['previous_names']
if not isinstance(previous_names, list):
previous_names = [previous_names]
for previous_name in previous_names:
output[previous_name] = package['name']
return output
def get_unavailable_packages(self):
"""
Provides a list of packages that are unavailable for the current
platform/architecture that Sublime Text is running on.
This list will be empty unless get_packages() is called first.
:return: A list of package names
"""
return self.unavailable_packages