desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Define name for referencing matching tokens as a nested attribute of the returned parse results. NOTE: this returns a *copy* of the original C{ParserElement} object; this is so that the client can define a basic element, such as an integer, and reference it in multiple places with different names. You can also set res...
def setResultsName(self, name, listAllMatches=False):
newself = self.copy() if name.endswith('*'): name = name[:(-1)] listAllMatches = True newself.resultsName = name newself.modalResults = (not listAllMatches) return newself
'Method to invoke the Python pdb debugger when this element is about to be parsed. Set C{breakFlag} to True to enable, False to disable.'
def setBreak(self, breakFlag=True):
if breakFlag: _parseMethod = self._parse def breaker(instring, loc, doActions=True, callPreParse=True): import pdb pdb.set_trace() return _parseMethod(instring, loc, doActions, callPreParse) breaker._originalParseMethod = _parseMethod self._parse =...
'Define action to perform when successfully matching parse element definition. Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - s = the original string being parsed (see note below) - loc = the location of the matching substr...
def setParseAction(self, *fns, **kwargs):
self.parseAction = list(map(_trim_arity, list(fns))) self.callDuringTry = kwargs.get('callDuringTry', False) return self
'Add parse action to expression\'s list of parse actions. See L{I{setParseAction}<setParseAction>}.'
def addParseAction(self, *fns, **kwargs):
self.parseAction += list(map(_trim_arity, list(fns))) self.callDuringTry = (self.callDuringTry or kwargs.get('callDuringTry', False)) return self
'Add a boolean predicate function to expression\'s list of parse actions. See L{I{setParseAction}<setParseAction>}. Optional keyword argument C{message} can be used to define a custom message to be used in the raised exception.'
def addCondition(self, *fns, **kwargs):
msg = (kwargs.get('message') or 'failed user-defined condition') for fn in fns: def pa(s, l, t): if (not bool(_trim_arity(fn)(s, l, t))): raise ParseException(s, l, msg) return t self.parseAction.append(pa) self.callDuringTry = (self.callDuringTr...
'Define action to perform if parsing fails at this expression. Fail acton fn is a callable function that takes the arguments C{fn(s,loc,expr,err)} where: - s = string being parsed - loc = location where expression match was attempted and failed - expr = the parse expression that failed - err = the exception thrown The ...
def setFailAction(self, fn):
self.failAction = fn return self
'Enables "packrat" parsing, which adds memoizing to the parsing logic. Repeated parse attempts at the same string location (which happens often in many complex grammars) can immediately return a cached value, instead of re-executing parsing/validating code. Memoizing is done of both valid results and parsing exception...
@staticmethod def enablePackrat():
if (not ParserElement._packratEnabled): ParserElement._packratEnabled = True ParserElement._parse = ParserElement._parseCache
'Execute the parse expression with the given string. This is the main interface to the client code, once the complete expression has been built. If you want the grammar to require that the entire input string be successfully parsed, then set C{parseAll} to True (equivalent to ending the grammar with C{L{StringEnd()}})....
def parseString(self, instring, parseAll=False):
ParserElement.resetCache() if (not self.streamlined): self.streamline() for e in self.ignoreExprs: e.streamline() if (not self.keepTabs): instring = instring.expandtabs() try: (loc, tokens) = self._parse(instring, 0) if parseAll: loc = self.prePars...
'Scan the input string for expression matches. Each match will return the matching tokens, start location, and end location. May be called with optional C{maxMatches} argument, to clip scanning after \'n\' matches are found. If C{overlap} is specified, then overlapping matches will be reported. Note that the start a...
def scanString(self, instring, maxMatches=_MAX_INT, overlap=False):
if (not self.streamlined): self.streamline() for e in self.ignoreExprs: e.streamline() if (not self.keepTabs): instring = _ustr(instring).expandtabs() instrlen = len(instring) loc = 0 preparseFn = self.preParse parseFn = self._parse ParserElement.resetCache() ...
'Extension to C{L{scanString}}, to modify matching text with modified tokens that may be returned from a parse action. To use C{transformString}, define a grammar and attach a parse action to it that modifies the returned token list. Invoking C{transformString()} on a target string will then scan for matches, and repl...
def transformString(self, instring):
out = [] lastE = 0 self.keepTabs = True try: for (t, s, e) in self.scanString(instring): out.append(instring[lastE:s]) if t: if isinstance(t, ParseResults): out += t.asList() elif isinstance(t, list): ...
'Another extension to C{L{scanString}}, simplifying the access to the tokens found to match the given parse expression. May be called with optional C{maxMatches} argument, to clip searching after \'n\' matches are found.'
def searchString(self, instring, maxMatches=_MAX_INT):
try: return ParseResults([t for (t, s, e) in self.scanString(instring, maxMatches)]) except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: raise exc
'Implementation of + operator - returns C{L{And}}'
def __add__(self, other):
if isinstance(other, basestring): other = ParserElement.literalStringClass(other) if (not isinstance(other, ParserElement)): warnings.warn(('Cannot combine element of type %s with ParserElement' % type(other)), SyntaxWarning, stacklevel=2) return None return And(...
'Implementation of + operator when left operand is not a C{L{ParserElement}}'
def __radd__(self, other):
if isinstance(other, basestring): other = ParserElement.literalStringClass(other) if (not isinstance(other, ParserElement)): warnings.warn(('Cannot combine element of type %s with ParserElement' % type(other)), SyntaxWarning, stacklevel=2) return None return (oth...
'Implementation of - operator, returns C{L{And}} with error stop'
def __sub__(self, other):
if isinstance(other, basestring): other = ParserElement.literalStringClass(other) if (not isinstance(other, ParserElement)): warnings.warn(('Cannot combine element of type %s with ParserElement' % type(other)), SyntaxWarning, stacklevel=2) return None return And(...
'Implementation of - operator when left operand is not a C{L{ParserElement}}'
def __rsub__(self, other):
if isinstance(other, basestring): other = ParserElement.literalStringClass(other) if (not isinstance(other, ParserElement)): warnings.warn(('Cannot combine element of type %s with ParserElement' % type(other)), SyntaxWarning, stacklevel=2) return None return (oth...
'Implementation of * operator, allows use of C{expr * 3} in place of C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples may also include C{None} as in: - C{expr*(n,None)} or C{expr*(n,)} is equivalent to C{expr*n + L{ZeroO...
def __mul__(self, other):
if isinstance(other, int): (minElements, optElements) = (other, 0) elif isinstance(other, tuple): other = (other + (None, None))[:2] if (other[0] is None): other = (0, other[1]) if (isinstance(other[0], int) and (other[1] is None)): if (other[0] == 0): ...
'Implementation of | operator - returns C{L{MatchFirst}}'
def __or__(self, other):
if isinstance(other, basestring): other = ParserElement.literalStringClass(other) if (not isinstance(other, ParserElement)): warnings.warn(('Cannot combine element of type %s with ParserElement' % type(other)), SyntaxWarning, stacklevel=2) return None return Matc...
'Implementation of | operator when left operand is not a C{L{ParserElement}}'
def __ror__(self, other):
if isinstance(other, basestring): other = ParserElement.literalStringClass(other) if (not isinstance(other, ParserElement)): warnings.warn(('Cannot combine element of type %s with ParserElement' % type(other)), SyntaxWarning, stacklevel=2) return None return (oth...
'Implementation of ^ operator - returns C{L{Or}}'
def __xor__(self, other):
if isinstance(other, basestring): other = ParserElement.literalStringClass(other) if (not isinstance(other, ParserElement)): warnings.warn(('Cannot combine element of type %s with ParserElement' % type(other)), SyntaxWarning, stacklevel=2) return None return Or([...
'Implementation of ^ operator when left operand is not a C{L{ParserElement}}'
def __rxor__(self, other):
if isinstance(other, basestring): other = ParserElement.literalStringClass(other) if (not isinstance(other, ParserElement)): warnings.warn(('Cannot combine element of type %s with ParserElement' % type(other)), SyntaxWarning, stacklevel=2) return None return (oth...
'Implementation of & operator - returns C{L{Each}}'
def __and__(self, other):
if isinstance(other, basestring): other = ParserElement.literalStringClass(other) if (not isinstance(other, ParserElement)): warnings.warn(('Cannot combine element of type %s with ParserElement' % type(other)), SyntaxWarning, stacklevel=2) return None return Each...
'Implementation of & operator when left operand is not a C{L{ParserElement}}'
def __rand__(self, other):
if isinstance(other, basestring): other = ParserElement.literalStringClass(other) if (not isinstance(other, ParserElement)): warnings.warn(('Cannot combine element of type %s with ParserElement' % type(other)), SyntaxWarning, stacklevel=2) return None return (oth...
'Implementation of ~ operator - returns C{L{NotAny}}'
def __invert__(self):
return NotAny(self)
'Shortcut for C{L{setResultsName}}, with C{listAllMatches=default}:: userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") could be written as:: userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") If C{name} is given with a trailing C{\'*\'} character, then C{listAllMatches...
def __call__(self, name=None):
if (name is not None): return self.setResultsName(name) else: return self.copy()
'Suppresses the output of this C{ParserElement}; useful to keep punctuation from cluttering up returned output.'
def suppress(self):
return Suppress(self)
'Disables the skipping of whitespace before matching the characters in the C{ParserElement}\'s defined pattern. This is normally only used internally by the pyparsing module, but may be needed in some whitespace-sensitive grammars.'
def leaveWhitespace(self):
self.skipWhitespace = False return self
'Overrides the default whitespace chars'
def setWhitespaceChars(self, chars):
self.skipWhitespace = True self.whiteChars = chars self.copyDefaultWhiteChars = False return self
'Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string. Must be called before C{parseString} when the input grammar contains elements that match C{<TAB>} characters.'
def parseWithTabs(self):
self.keepTabs = True return self
'Define expression to be ignored (e.g., comments) while doing pattern matching; may be called repeatedly, to define multiple comment or other ignorable patterns.'
def ignore(self, other):
if isinstance(other, basestring): other = Suppress(other) if isinstance(other, Suppress): if (other not in self.ignoreExprs): self.ignoreExprs.append(other) else: self.ignoreExprs.append(Suppress(other.copy())) return self
'Enable display of debugging messages while doing pattern matching.'
def setDebugActions(self, startAction, successAction, exceptionAction):
self.debugActions = ((startAction or _defaultStartDebugAction), (successAction or _defaultSuccessDebugAction), (exceptionAction or _defaultExceptionDebugAction)) self.debug = True return self
'Enable display of debugging messages while doing pattern matching. Set C{flag} to True to enable, False to disable.'
def setDebug(self, flag=True):
if flag: self.setDebugActions(_defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction) else: self.debug = False return self
'Check defined expressions for valid structure, check for infinite recursive definitions.'
def validate(self, validateTrace=[]):
self.checkRecursion([])
'Execute the parse expression on the given file or filename. If a filename is specified (instead of a file object), the entire file is opened, read, and closed before parsing.'
def parseFile(self, file_or_filename, parseAll=False):
try: file_contents = file_or_filename.read() except AttributeError: f = open(file_or_filename, 'r') file_contents = f.read() f.close() try: return self.parseString(file_contents, parseAll) except ParseBaseException as exc: if ParserElement.verbose_stacktra...
'Execute the parse expression on a series of test strings, showing each test, the parsed results or where the parse failed. Quick and easy way to run a parse expression against a list of sample strings. Parameters: - tests - a list of separate test strings, or a multiline string of test strings - parseAll - (default=Fa...
def runTests(self, tests, parseAll=False):
if isinstance(tests, basestring): tests = map(str.strip, tests.splitlines()) for t in tests: out = [t] try: out.append(self.parseString(t, parseAll=parseAll).dump()) except ParseException as pe: if ('\n' in t): out.append(line(pe.loc, t)) ...
'Overrides the default Keyword chars'
@staticmethod def setDefaultKeywordChars(chars):
Keyword.DEFAULT_KEYWORD_CHARS = chars
'The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.'
def __init__(self, pattern, flags=0):
super(Regex, self).__init__() if isinstance(pattern, basestring): if (not pattern): warnings.warn('null string passed to Regex; use Empty() instead', SyntaxWarning, stacklevel=2) self.pattern = pattern self.flags = flags try: self.re =...
'Defined with the following parameters: - quoteChar - string of one or more characters defining the quote delimiting string - escChar - character to escape quotes, typically backslash (default=None) - escQuote - special quote sequence to escape an embedded quote string (such as SQL\'s "" to escape an embedded ") (defau...
def __init__(self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None):
super(QuotedString, self).__init__() quoteChar = quoteChar.strip() if (not quoteChar): warnings.warn('quoteChar cannot be the empty string', SyntaxWarning, stacklevel=2) raise SyntaxError() if (endQuoteChar is None): endQuoteChar = quoteChar else: endQu...
'Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on all contained expressions.'
def leaveWhitespace(self):
self.skipWhitespace = False self.exprs = [e.copy() for e in self.exprs] for e in self.exprs: e.leaveWhitespace() return self
'Resolve strings to objects using standard import and attribute syntax.'
def resolve(self, s):
name = s.split('.') used = name.pop(0) try: found = self.importer(used) for frag in name: used += ('.' + frag) try: found = getattr(found, frag) except AttributeError: self.importer(used) found = getattr(foun...
'Default converter for the ext:// protocol.'
def ext_convert(self, value):
return self.resolve(value)
'Default converter for the cfg:// protocol.'
def cfg_convert(self, value):
rest = value m = self.WORD_PATTERN.match(rest) if (m is None): raise ValueError(('Unable to convert %r' % value)) else: rest = rest[m.end():] d = self.config[m.groups()[0]] while rest: m = self.DOT_PATTERN.match(rest) if m: ...
'Convert values to an appropriate type. dicts, lists and tuples are replaced by their converting alternatives. Strings are checked to see if they have a conversion format and are converted if they do.'
def convert(self, value):
if ((not isinstance(value, ConvertingDict)) and isinstance(value, dict)): value = ConvertingDict(value) value.configurator = self elif ((not isinstance(value, ConvertingList)) and isinstance(value, list)): value = ConvertingList(value) value.configurator = self elif ((not isi...
'Configure an object with a user-supplied factory.'
def configure_custom(self, config):
c = config.pop('()') if ((not hasattr(c, '__call__')) and hasattr(types, 'ClassType') and (type(c) != types.ClassType)): c = self.resolve(c) props = config.pop('.', None) kwargs = dict(((k, config[k]) for k in config if valid_ident(k))) result = c(**kwargs) if props: for (name, v...
'Utility function which converts lists to tuples.'
def as_tuple(self, value):
if isinstance(value, list): value = tuple(value) return value
'Do the configuration.'
def configure(self):
config = self.config if ('version' not in config): raise ValueError("dictionary doesn't specify a version") if (config['version'] != 1): raise ValueError(('Unsupported version: %s' % config['version'])) incremental = config.pop('incremental', False) EMPTY_DICT = {} ...
'Configure a formatter from a dictionary.'
def configure_formatter(self, config):
if ('()' in config): factory = config['()'] try: result = self.configure_custom(config) except TypeError as te: if ("'format'" not in str(te)): raise config['fmt'] = config.pop('format') config['()'] = factory result...
'Configure a filter from a dictionary.'
def configure_filter(self, config):
if ('()' in config): result = self.configure_custom(config) else: name = config.get('name', '') result = logging.Filter(name) return result
'Add filters to a filterer from a list of names.'
def add_filters(self, filterer, filters):
for f in filters: try: filterer.addFilter(self.config['filters'][f]) except StandardError as e: raise ValueError(('Unable to add filter %r: %s' % (f, e)))
'Configure a handler from a dictionary.'
def configure_handler(self, config):
formatter = config.pop('formatter', None) if formatter: try: formatter = self.config['formatters'][formatter] except StandardError as e: raise ValueError(('Unable to set formatter %r: %s' % (formatter, e))) level = config.pop('level', None) filters ...
'Add handlers to a logger from a list of names.'
def add_handlers(self, logger, handlers):
for h in handlers: try: logger.addHandler(self.config['handlers'][h]) except StandardError as e: raise ValueError(('Unable to add handler %r: %s' % (h, e)))
'Perform configuration which is common to root and non-root loggers.'
def common_logger_config(self, logger, config, incremental=False):
level = config.get('level', None) if (level is not None): logger.setLevel(_checkLevel(level)) if (not incremental): for h in logger.handlers[:]: logger.removeHandler(h) handlers = config.get('handlers', None) if handlers: self.add_handlers(logger, hand...
'Configure a non-root logger from a dictionary.'
def configure_logger(self, name, config, incremental=False):
logger = logging.getLogger(name) self.common_logger_config(logger, config, incremental) propagate = config.get('propagate', None) if (propagate is not None): logger.propagate = propagate
'Configure a root logger from a dictionary.'
def configure_root(self, config, incremental=False):
root = logging.getLogger() self.common_logger_config(root, config, incremental)
'Create a wheel cache. :param cache_dir: The root of the cache. :param format_control: A pip.index.FormatControl object to limit binaries being read from the cache.'
def __init__(self, cache_dir, format_control):
self._cache_dir = (expanduser(cache_dir) if cache_dir else None) self._format_control = format_control
':raises InvalidWheelFilename: when the filename is invalid for a wheel'
def __init__(self, filename):
wheel_info = self.wheel_file_re.match(filename) if (not wheel_info): raise InvalidWheelFilename(('%s is not a valid wheel filename.' % filename)) self.filename = filename self.name = wheel_info.group('name').replace('_', '-') self.version = wheel_info.group('ver').replace('...
'Return the lowest index that one of the wheel\'s file_tag combinations achieves in the supported_tags list e.g. if there are 8 supported tags, and one of the file tags is first in the list, then return 0. Returns None is the wheel is not supported.'
def support_index_min(self, tags=None):
if (tags is None): tags = pep425tags.supported_tags indexes = [tags.index(c) for c in self.file_tags if (c in tags)] return (min(indexes) if indexes else None)
'Is this wheel supported on this system?'
def supported(self, tags=None):
if (tags is None): tags = pep425tags.supported_tags return bool(set(tags).intersection(self.file_tags))
'Build one wheel. :return: The filename of the built wheel, or None if the build failed.'
def _build_one(self, req, output_dir, python_tag=None):
tempd = tempfile.mkdtemp('pip-wheel-') try: if self.__build_one(req, tempd, python_tag=python_tag): try: wheel_name = os.listdir(tempd)[0] wheel_path = os.path.join(output_dir, wheel_name) shutil.move(os.path.join(tempd, wheel_name), wheel_path...
'Build wheels. :param unpack: If True, replace the sdist we built from with the newly built wheel, in preparation for installation. :return: True if all the wheels built correctly.'
def build(self, autobuilding=False):
assert (self._wheel_dir or (autobuilding and self._cache_root)) self.requirement_set.prepare_files(self.finder) reqset = self.requirement_set.requirements.values() buildset = [] for req in reqset: if req.constraint: continue if req.is_wheel: if (not autobuildi...
'Return True if the given path is one we are permitted to remove/modify, False otherwise.'
def _permitted(self, path):
return is_local(path)
'Compact a path set to contain the minimal number of paths necessary to contain all paths in the set. If /a/path/ and /a/path/to/a/file.txt are both in the set, leave only the shorter path.'
def compact(self, paths):
short_paths = set() for path in sorted(paths, key=len): if (not any([(path.startswith(shortpath) and (path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)) for shortpath in short_paths])): short_paths.add(path) return short_paths
'Remove paths in ``self.paths`` with confirmation (unless ``auto_confirm`` is True).'
def remove(self, auto_confirm=False):
if (not self.paths): logger.info("Can't uninstall '%s'. No files were found to uninstall.", self.dist.project_name) return logger.info('Uninstalling %s-%s:', self.dist.project_name, self.dist.version) with indent_log(): paths = sorted(self.compact(self.path...
'Rollback the changes previously made by remove().'
def rollback(self):
if (self.save_dir is None): logger.error("Can't roll back %s; was not uninstalled", self.dist.project_name) return False logger.info('Rolling back uninstall of %s', self.dist.project_name) for path in self._moved_paths: tmp_path = self._stash(path) ...
'Remove temporary save dir: rollback will no longer be possible.'
def commit(self):
if (self.save_dir is not None): rmtree(self.save_dir) self.save_dir = None self._moved_paths = []
'Return a setuptools Dist object.'
def dist(self, finder):
raise NotImplementedError(self.dist)
'Ensure that we can get a Dist for this requirement.'
def prep_for_dist(self):
raise NotImplementedError(self.dist)
'Create a RequirementSet. :param wheel_download_dir: Where still-packed .whl files should be written to. If None they are written to the download_dir parameter. Separate to download_dir to permit only keeping wheel archives for pip wheel. :param download_dir: Where still packed archives should be written to. If None th...
def __init__(self, build_dir, src_dir, download_dir, upgrade=False, ignore_installed=False, as_egg=False, target_dir=None, ignore_dependencies=False, force_reinstall=False, use_user_site=False, session=None, pycompile=True, isolated=False, wheel_download_dir=None, wheel_cache=None, require_hashes=False):
if (session is None): raise TypeError("RequirementSet() missing 1 required keyword argument: 'session'") self.build_dir = build_dir self.src_dir = src_dir self.download_dir = download_dir self.upgrade = upgrade self.ignore_installed = ignore_installed self.force_rei...
'Add install_req as a requirement to install. :param parent_req_name: The name of the requirement that needed this added. The name is used because when multiple unnamed requirements resolve to the same name, we could otherwise end up with dependency links that point outside the Requirements set. parent_req must already...
def add_requirement(self, install_req, parent_req_name=None):
name = install_req.name if (not install_req.match_markers()): logger.warning("Ignoring %s: markers %r don't match your environment", install_req.name, install_req.markers) return [] install_req.as_egg = self.as_egg install_req.use_user_site = self.use_user_site i...
'Prepare process. Create temp directories, download and/or unpack files.'
def prepare_files(self, finder):
if self.wheel_download_dir: ensure_dir(self.wheel_download_dir) root_reqs = (self.unnamed_requirements + self.requirements.values()) require_hashes = (self.require_hashes or any((req.has_hash_options for req in root_reqs))) if (require_hashes and self.as_egg): raise InstallationError('--...
'Check if req_to_install should be skipped. This will check if the req is installed, and whether we should upgrade or reinstall it, taking into account all the relevant user options. After calling this req_to_install will only have satisfied_by set to None if the req_to_install is to be upgraded/reinstalled etc. Any ot...
def _check_skip_installed(self, req_to_install, finder):
req_to_install.check_if_exists() if req_to_install.satisfied_by: skip_reason = 'satisfied (use --upgrade to upgrade)' if self.upgrade: best_installed = False if (not (self.force_reinstall or req_to_install.link)): try: finde...
'Prepare a single requirements file. :return: A list of additional InstallRequirements to also install.'
def _prepare_file(self, finder, req_to_install, require_hashes=False, ignore_dependencies=False):
if (req_to_install.constraint or req_to_install.prepared): return [] req_to_install.prepared = True if req_to_install.editable: logger.info('Obtaining %s', req_to_install) else: assert (req_to_install.satisfied_by is None) if (not self.ignore_installed): sk...
'Clean up files, remove builds.'
def cleanup_files(self):
logger.debug('Cleaning up...') with indent_log(): for req in self.reqs_to_cleanup: req.remove_temporary_source()
'Create the installation order. The installation order is topological - requirements are installed before the requiring thing. We break cycles at an arbitrary point, and make no other guarantees.'
def _to_install(self):
order = [] ordered_reqs = set() def schedule(req): if (req.satisfied_by or (req in ordered_reqs)): return if req.constraint: return ordered_reqs.add(req) for dep in self._dependencies[req]: schedule(dep) order.append(req) for in...
'Install everything in this set (after having downloaded and unpacked the packages)'
def install(self, install_options, global_options=(), *args, **kwargs):
to_install = self._to_install() if to_install: logger.info('Installing collected packages: %s', ', '.join([req.name for req in to_install])) with indent_log(): for requirement in to_install: if requirement.conflicts_with: logger.info('Found existing...
'Creates an InstallRequirement from a name, which might be a requirement, directory containing \'setup.py\', filename, or URL.'
@classmethod def from_line(cls, name, comes_from=None, isolated=False, options=None, wheel_cache=None, constraint=False):
from pip.index import Link if is_url(name): marker_sep = '; ' else: marker_sep = ';' if (marker_sep in name): (name, markers) = name.split(marker_sep, 1) markers = markers.strip() if (not markers): markers = None else: markers = None ...
'Ensure that if a link can be found for this, that it is found. Note that self.link may still be None - if Upgrade is False and the requirement is already installed. If require_hashes is True, don\'t use the wheel cache, because cached wheels, always built locally, have different hashes than the files downloaded from t...
def populate_link(self, finder, upgrade, require_hashes):
if (self.link is None): self.link = finder.find_requirement(self, upgrade) if ((self._wheel_cache is not None) and (not require_hashes)): old_link = self.link self.link = self._wheel_cache.cached_wheel(self.link, self.name) if (old_link != self.link): logger.debug('Us...
'Return whether I am pinned to an exact version. For example, some-package==1.2 is pinned; some-package>1.2 is not.'
@property def is_pinned(self):
specifiers = self.specifier return ((len(specifiers) == 1) and (next(iter(specifiers)).operator in ('==', '===')))
'Move self._temp_build_dir to self._ideal_build_dir/self.req.name For some requirements (e.g. a path to a directory), the name of the package is not available until we run egg_info, so the build_location will return a temporary directory and store the _ideal_build_dir. This is only called by self.egg_info_path to fix t...
def _correct_build_location(self):
if (self.source_dir is not None): return assert (self.req is not None) assert self._temp_build_dir assert self._ideal_build_dir old_location = self._temp_build_dir self._temp_build_dir = None new_location = self.build_location(self._ideal_build_dir) if os.path.exists(new_location...
'Uninstall the distribution currently satisfying this requirement. Prompts before removing or modifying files unless ``auto_confirm`` is True. Refuses to delete or modify files outside of ``sys.prefix`` - thus uninstallation within a virtual environment can only modify that virtual environment, even if the virtualenv i...
def uninstall(self, auto_confirm=False):
if (not self.check_if_exists()): raise UninstallationError(('Cannot uninstall requirement %s, not installed' % (self.name,))) dist = (self.satisfied_by or self.conflicts_with) dist_path = normalize_path(dist.location) if (not dist_is_local(dist)): logger.info('Not unins...
'Ensure that a source_dir is set. This will create a temporary build dir if the name of the requirement isn\'t known yet. :param parent_dir: The ideal pip parent_dir for the source_dir. Generally src_dir for editables and build_dir for sdists. :return: self.source_dir'
def ensure_has_source_dir(self, parent_dir):
if (self.source_dir is None): self.source_dir = self.build_location(parent_dir) return self.source_dir
'Remove the source files from this requirement, if they are marked for deletion'
def remove_temporary_source(self):
if (self.source_dir and os.path.exists(os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME))): logger.debug('Removing source in %s', self.source_dir) rmtree(self.source_dir) self.source_dir = None if (self._temp_build_dir and os.path.exists(self._temp_build_dir)): rmtre...
'Find an installed distribution that satisfies or conflicts with this requirement, and set self.satisfied_by or self.conflicts_with appropriately.'
def check_if_exists(self):
if (self.req is None): return False try: self.satisfied_by = pkg_resources.get_distribution(self.req) except pkg_resources.DistributionNotFound: return False except pkg_resources.VersionConflict: existing_dist = pkg_resources.get_distribution(self.req.project_name) ...
'Return a pkg_resources.Distribution built from self.egg_info_path'
def get_dist(self):
egg_info = self.egg_info_path('').rstrip('/') base_dir = os.path.dirname(egg_info) metadata = pkg_resources.PathMetadata(base_dir, egg_info) dist_name = os.path.splitext(os.path.basename(egg_info))[0] return pkg_resources.Distribution(os.path.dirname(egg_info), project_name=dist_name, metadata=metad...
'Return whether any known-good hashes are specified as options. These activate --require-hashes mode; hashes specified as part of a URL do not.'
@property def has_hash_options(self):
return bool(self.options.get('hashes', {}))
'Return a hash-comparer that considers my option- and URL-based hashes to be known-good. Hashes in URLs--ones embedded in the requirements file, not ones downloaded from an index server--are almost peers with ones from flags. They satisfy --require-hashes (whether it was implicitly or explicitly activated) but do not a...
def hashes(self, trust_internet=True):
good_hashes = self.options.get('hashes', {}).copy() link = (self.link if trust_internet else self.original_link) if (link and link.hash): good_hashes.setdefault(link.hash_name, []).append(link.hash) return Hashes(good_hashes)
'Return a comma-separated list of option strings and metavars. :param option: tuple of (short opt, long opt), e.g: (\'-f\', \'--format\') :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar :param optsep: separator'
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
opts = [] if option._short_opts: opts.append(option._short_opts[0]) if option._long_opts: opts.append(option._long_opts[0]) if (len(opts) > 1): opts.insert(1, optsep) if option.takes_value(): metavar = (option.metavar or option.dest.lower()) opts.append((mvarf...
'Ensure there is only one newline between usage and the first heading if there is no description.'
def format_usage(self, usage):
msg = ('\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), ' ')) return msg
'Insert an OptionGroup at a given position.'
def insert_option_group(self, idx, *args, **kwargs):
group = self.add_option_group(*args, **kwargs) self.option_groups.pop() self.option_groups.insert(idx, group) return group
'Get a list of all options, including those in option groups.'
@property def option_list_all(self):
res = self.option_list[:] for i in self.option_groups: res.extend(i.option_list) return res
'Updates the given defaults with values from the config files and the environ. Does a little special handling for certain types of options (lists).'
def _update_defaults(self, defaults):
config = {} for section in ('global', self.name): config.update(self.normalize_keys(self.get_config_section(section))) if (not self.isolated): config.update(self.normalize_keys(self.get_environ_vars())) self.values = optparse.Values(self.defaults) late_eval = set() for (key, val)...
'Return a config dictionary with normalized keys regardless of whether the keys were specified in environment variables or in config files'
def normalize_keys(self, items):
normalized = {} for (key, val) in items: key = key.replace('_', '-') if (not key.startswith('--')): key = ('--%s' % key) normalized[key] = val return normalized
'Get a section of a configuration'
def get_config_section(self, name):
if self.config.has_section(name): return self.config.items(name) return []
'Returns a generator with all environmental vars with prefix PIP_'
def get_environ_vars(self):
for (key, val) in os.environ.items(): if _environ_prefix_re.search(key): (yield (_environ_prefix_re.sub('', key).lower(), val))
'Overridding to make updating the defaults after instantiation of the option parser possible, _update_defaults() does the dirty work.'
def get_default_values(self):
if (not self.process_default_values): return optparse.Values(self.defaults) defaults = self._update_defaults(self.defaults.copy()) for option in self._get_all_options(): default = defaults.get(option.dest) if isinstance(default, string_types): opt_str = option.get_opt_str...
'Calls the standard formatter, but will indent all of the log messages by our current indentation level.'
def format(self, record):
formatted = logging.Formatter.format(self, record) formatted = ''.join([((' ' * get_indentation()) + line) for line in formatted.splitlines(True)]) return formatted
'Save the original SIGINT handler for later.'
def __init__(self, *args, **kwargs):
super(InterruptibleMixin, self).__init__(*args, **kwargs) self.original_handler = signal(SIGINT, self.handle_sigint) if (self.original_handler is None): self.original_handler = default_int_handler
'Restore the original SIGINT handler after finishing. This should happen regardless of whether the progress display finishes normally, or gets interrupted.'
def finish(self):
super(InterruptibleMixin, self).finish() signal(SIGINT, self.original_handler)
'Call self.finish() before delegating to the original SIGINT handler. This handler should only be in place while the progress display is active.'
def handle_sigint(self, signum, frame):
self.finish() self.original_handler(signum, frame)
':param hashes: A dict of algorithm names pointing to lists of allowed hex digests'
def __init__(self, hashes=None):
self._allowed = ({} if (hashes is None) else hashes)
'Check good hashes against ones built from iterable of chunks of data. Raise HashMismatch if none match.'
def check_against_chunks(self, chunks):
gots = {} for hash_name in iterkeys(self._allowed): try: gots[hash_name] = hashlib.new(hash_name) except (ValueError, TypeError): raise InstallationError(('Unknown hash name: %s' % hash_name)) for chunk in chunks: for hash in itervalues(gots): ...