signature
stringlengths
8
3.44k
body
stringlengths
0
1.41M
docstring
stringlengths
1
122k
id
stringlengths
5
17
def between(self, time_):
hour = int(time_[<NUM_LIT:0>:<NUM_LIT:2>])<EOL>minute = int(time_[<NUM_LIT:3>:<NUM_LIT:5>])<EOL>return not (<EOL>hour < self.h1 or hour > self.h2 or<EOL>(hour == self.h1 and minute < self.m1) or<EOL>(hour == self.h2 and minute > self.m2)<EOL>)<EOL>
Compare if the parameter HH:MM is in the time range.
f9085:c0:m1
def has_host_match(log_data, hosts):
hostname = getattr(log_data, '<STR_LIT:host>', None)<EOL>if hostname and hostname not in host_cache:<EOL><INDENT>for host_pattern in hosts:<EOL><INDENT>if host_pattern.search(hostname) is not None:<EOL><INDENT>host_cache.add(hostname)<EOL>return True<EOL><DEDENT><DEDENT>else:<EOL><INDENT>return False<EOL><DEDENT><DEDEN...
Match the data with a list of hostname patterns. If the log line data doesn't include host information considers the line as matched.
f9088:m4
def create_matcher(dispatcher, parsers, apptags, matcher='<STR_LIT>', hosts=tuple(), time_range=None,<EOL>time_period=(None, None), patterns=tuple(), invert=False, count=False,<EOL>files_with_match=None, max_count=<NUM_LIT:0>, only_matching=False, quiet=False,<EOL>thread=False, name_cache=None):
parsers = CycleParsers(parsers)<EOL>max_matches = <NUM_LIT:1> if quiet else max_count<EOL>use_app_rules = matcher != '<STR_LIT>'<EOL>select_unparsed = matcher == '<STR_LIT>'<EOL>register_log_lines = not (quiet or count or files_with_match is not None)<EOL>start_dt, end_dt = get_mktime_period(time_period)<EOL>pattern_se...
Create a matcher engine. :return: A matcher function.
f9088:m5
def __init__(self, name, pattern, args, filter_keys=None):
try:<EOL><INDENT>if not pattern:<EOL><INDENT>raise LogRaptorConfigError('<STR_LIT>' % name)<EOL><DEDENT>self.regexp = re.compile(pattern)<EOL><DEDENT>except RegexpCompileError:<EOL><INDENT>raise LogRaptorConfigError("<STR_LIT>" % name)<EOL><DEDENT>self.name = name<EOL>self.args = args<EOL>self.filter_keys = filter_keys...
Initialize AppRule. :param name: the configuration option name :param pattern: the option value that represents the search pattern :param filter_keys: the filtering keys dictionary if the rule is a filter
f9089:c0:m0
def add_result(self, values):
idx = [values['<STR_LIT:host>']]<EOL>for gid in self.key_gids[<NUM_LIT:1>:]:<EOL><INDENT>idx.append(values[gid])<EOL><DEDENT>idx = tuple(idx)<EOL>try:<EOL><INDENT>self.results[idx] += <NUM_LIT:1><EOL><DEDENT>except KeyError:<EOL><INDENT>self.results[idx] = <NUM_LIT:1><EOL><DEDENT>self._last_idx = idx<EOL>
Add a tuple or increment the value of an existing one in the rule results dictionary.
f9089:c0:m2
def increase_last(self, k):
idx = self._last_idx<EOL>if idx is not None:<EOL><INDENT>self.results[idx] += k<EOL><DEDENT>
Increase the last result by k.
f9089:c0:m3
def total_events(self, cond, valfld=None):
results = self.results<EOL>if cond == "<STR_LIT:*>" and valfld is None:<EOL><INDENT>return sum(results.values())<EOL><DEDENT>val = self.key_gids.index(valfld) if valfld is not None else None<EOL>if cond == "<STR_LIT:*>":<EOL><INDENT>tot = <NUM_LIT:0><EOL>for key in results:<EOL><INDENT>tot += results[key] * int(key[val...
Return total number of events in the rule'result set. A condition could be provided to select the events to count. If value field (valfld) is passed the function compute the sum taking the product of each value with correspondent event counter.
f9089:c0:m4
def top_events(self, num, valfld, usemax, gid):
def classify():<EOL><INDENT>if value is None:<EOL><INDENT>return<EOL><DEDENT>for j in range(num):<EOL><INDENT>if top[j] is None:<EOL><INDENT>top[j] = [tot, [value]]<EOL>break<EOL><DEDENT>elif tot == top[j][<NUM_LIT:0>]:<EOL><INDENT>top[j][<NUM_LIT:1>].append(value)<EOL>break<EOL><DEDENT>elif tot > top[j][<NUM_LIT:0>]:<...
Return a list with the top NUM list of events. Each list element contain a value, indicating the number of events, and a list of matching gid values (usernames, email addresses, clients). Instead of calculating the top sum of occurrences a value field should be provided to compute the max of a numeric value field or th...
f9089:c0:m5
def list_events(self, cond, cols, fields):
def insert_row():<EOL><INDENT>"""<STR_LIT>"""<EOL>row = list(row_template)<EOL>j = <NUM_LIT:0><EOL>for n in range(cols):<EOL><INDENT>if row[n] is None:<EOL><INDENT>if j == keylen:<EOL><INDENT>row[n] = tabvalues<EOL><DEDENT>else:<EOL><INDENT>row[n] = tabkey[j]<EOL><DEDENT>j += <NUM_LIT:1><EOL><DEDENT><DEDENT>reslist.app...
Return the list of events, with a specific order and filtered by a condition. An element of the list is a tuple with three component. The first is the main attribute (first field). The second the second field/label, usually a string that identify the service. The third is a dictionary with a key-tuple composed by all o...
f9089:c0:m6
def __init__(self, name, cfgfile, args, logdir, fields, name_cache=None, report=None):
logger.debug('<STR_LIT>', name)<EOL>self.name = name <EOL>self.cfgfile = cfgfile <EOL>self.args = args<EOL>self.logdir = logdir<EOL>self.fields = fields<EOL>self.name_cache = name_cache<EOL>self._report = report<EOL>self._thread = args.thread<EOL>self.matches = <NUM_LIT:0> <EOL>self.unparsed ...
:param name: application name :param cfgfile: application config file :param args: cli arguments :param logdir: Log directory :param fields: Configured fields :param name_cache: Optional name cache (--ip-lookup/--uid-lookup/--anonymize options) :param report: Optional report (--report option)
f9089:c1:m0
def parse_rules(self):
<EOL>try:<EOL><INDENT>rule_options = self.config.items('<STR_LIT>')<EOL><DEDENT>except configparser.NoSectionError:<EOL><INDENT>raise LogRaptorConfigError("<STR_LIT>" % self.name)<EOL><DEDENT>rules = []<EOL>for option, value in rule_options:<EOL><INDENT>pattern = value.replace('<STR_LIT:\n>', '<STR_LIT>') <EOL>if not ...
Add a set of rules to the app, dividing between filter and other rule set
f9089:c1:m4
def increase_last(self, k):
rule = self._last_rule<EOL>if rule is not None:<EOL><INDENT>rule.increase_last(k)<EOL><DEDENT>
Increase the counter of the last matched rule by k.
f9089:c1:m5
def match_rules(self, log_data):
for rule in self.rules:<EOL><INDENT>match = rule.regexp.search(log_data.message)<EOL>if match is not None:<EOL><INDENT>gids = rule.regexp.groupindex<EOL>self._last_rule = rule<EOL>if self.name_cache is not None:<EOL><INDENT>values = self.name_cache.match_to_dict(match, rule.key_gids)<EOL>values['<STR_LIT:host>'] = self...
Process a log line data message with app's pattern rules. Return a tuple with this data: Element #0 (app_matched): True if a rule match, False otherwise; Element #1 (has_full_match): True if a rule match and is a filter or the app has not filters; False if a rule match but is not a filter; None...
f9089:c1:m6
def send_report(self, report_parts):
logger.info('<STR_LIT>')<EOL>report_parts = sorted(<EOL>filter(lambda x: x.fmt in self.formats, report_parts),<EOL>key=lambda x: self.formats.index(x.fmt)<EOL>)<EOL>fmtname = '<STR_LIT>' if len(report_parts) > <NUM_LIT:1> else '<STR_LIT>'<EOL>root_part = MIMEMultipart('<STR_LIT>')<EOL>root_part.preamble = '<STR_LIT>'<E...
Publish by sending the report by e-mail
f9091:c4:m6
def prune_old(self):
path = self.pubdir<EOL>dirmask = self.dirmask<EOL>expire = self.expire<EOL>expire_limit = int(time.time()) - (<NUM_LIT> * expire)<EOL>logger.info('<STR_LIT>', expire)<EOL>if not os.path.isdir(path):<EOL><INDENT>logger.warning('<STR_LIT>', path)<EOL>return<EOL><DEDENT>for entry in os.listdir(path):<EOL><INDENT>logger.de...
Removes the directories that are older than a certain date.
f9091:c5:m2
def send_report(self, report_parts):
logger.info('<STR_LIT>')<EOL>report_parts = sorted(<EOL>filter(lambda x: x.fmt in self.formats, report_parts),<EOL>key=lambda x: self.formats.index(x.fmt)<EOL>)<EOL>workdir = os.path.join(self.pubdir, self.dirname)<EOL>if not os.path.isdir(workdir):<EOL><INDENT>try: <EOL><INDENT>os.makedirs(workdir)<EOL><DEDENT>except ...
Publish the report parts to local files. Each report part is a text with a title and specific extension. For html and plaintext sending the report part is unique, for csv send also the stats and unparsed string are plain text and report items are csv texts.
f9091:c5:m7
def __eq__(self, repitem):
if self.function != '<STR_LIT>' or repitem.function != '<STR_LIT>':<EOL><INDENT>return False<EOL><DEDENT>if self.title != repitem.title:<EOL><INDENT>return False<EOL><DEDENT>head1 = re.split('<STR_LIT>', self.headers)<EOL>head2 = re.split('<STR_LIT>', repitem.headers)<EOL>if len(head1) != len(head2):<EOL><INDENT>return...
Compare two 'table' report items. When True the report items results are mergeable.
f9092:c0:m7
def make_text(self, width):
def mformat(reslist):<EOL><INDENT>_text = "<STR_LIT>"<EOL>_buffer = reslist[<NUM_LIT:0>]<EOL>for j in range(<NUM_LIT:1>, len(reslist)):<EOL><INDENT>if (_buffer == "<STR_LIT>") or (len(_buffer) + len(reslist[j])) <= (width - len(filling)):<EOL><INDENT>if reslist[j][<NUM_LIT:0>] == '<STR_LIT:[>' and reslist[j][-<NUM_LIT:...
Make the text representation of a report data element.
f9092:c0:m8
def make_html(self):
html = None<EOL>if self.function == '<STR_LIT>':<EOL><INDENT>html = u'<STR_LIT>''<STR_LIT>''<STR_LIT>'.format(htmlsafe(self.title.strip()), self.color)<EOL>for res in self.results:<EOL><INDENT>html = u'<STR_LIT>''<STR_LIT>'.format(html, res[<NUM_LIT:0>], res[<NUM_LIT:1>])<EOL><DEDENT><DEDENT>elif self.function == '<STR...
Make the text representation of a report element as html.
f9092:c0:m9
def make_csv(self):
import csv<EOL>try:<EOL><INDENT>from StringIO import StringIO <EOL><DEDENT>except ImportError:<EOL><INDENT>from io import StringIO<EOL><DEDENT>out = StringIO()<EOL>writer = csv.writer(out, delimiter='<STR_LIT:|>', lineterminator='<STR_LIT:\n>', quoting=csv.QUOTE_MINIMAL)<EOL>if self.function == '<STR_LIT>':<EOL><INDEN...
Get the text representation of a report element as csv.
f9092:c0:m10
def make(self, apps):
for (appname, app) in sorted(apps.items(), key=lambda x: (x[<NUM_LIT:1>].priority, x[<NUM_LIT:0>])):<EOL><INDENT>logger.info('<STR_LIT>', appname)<EOL>for report_data in app.report_data:<EOL><INDENT>if report_data.subreport != self.name:<EOL><INDENT>continue<EOL><DEDENT>if report_data.function == '<STR_LIT>':<EOL><INDE...
Make subreport items from results.
f9092:c1:m3
def make_format(self, fmt, width):
if not self.report_data:<EOL><INDENT>return<EOL><DEDENT>for data_item in self.report_data:<EOL><INDENT>if data_item.results:<EOL><INDENT>if fmt is None or fmt == '<STR_LIT:text>':<EOL><INDENT>data_item.make_text(width)<EOL><DEDENT>elif fmt == '<STR_LIT:html>':<EOL><INDENT>data_item.make_html()<EOL><DEDENT>elif fmt == '...
Make subreport text in a specified format
f9092:c1:m4
def compact_tables(self):
items_to_del = set()<EOL>for i in range(len(self.report_data)):<EOL><INDENT>if i in items_to_del:<EOL><INDENT>continue<EOL><DEDENT>if self.report_data[i].function[<NUM_LIT:0>:<NUM_LIT:5>] == '<STR_LIT>':<EOL><INDENT>for j in range(i+<NUM_LIT:1>, len(self.report_data)):<EOL><INDENT>if self.report_data[j].function[<NUM_L...
Compact report items of type "table" with same results type. Report items of type "tables" in the same subreport is merged into one. The data are ordered by 1st column.
f9092:c1:m5
def make(self, apps):
for subreport in self.subreports:<EOL><INDENT>logger.debug('<STR_LIT>'.format(subreport.name))<EOL>subreport.make(apps)<EOL><DEDENT>for subreport in self.subreports:<EOL><INDENT>subreport.compact_tables()<EOL><DEDENT>
Create the report from application results
f9092:c2:m1
def get_report_parts(self, apps, formats):
for fmt in formats:<EOL><INDENT>width = <NUM_LIT:100> if fmt is not None else tui.get_terminal_size()[<NUM_LIT:0>]<EOL>for sr in self.subreports:<EOL><INDENT>sr.make_format(fmt, width)<EOL><DEDENT><DEDENT>logger.debug('<STR_LIT>')<EOL>value_mapping = {<EOL>'<STR_LIT:title>': self.title,<EOL>'<STR_LIT>': '<STR_LIT:U+002...
Make report item texts in a specified format.
f9092:c2:m3
def is_empty(self):
return not any(self.subreports)<EOL>
A report is empty when it hasn't subreports or when all subreports are empty.
f9092:c2:m4
def set_stats(self, run_stats):
self.stats = run_stats.copy()<EOL>self.stats['<STR_LIT>'] = '<STR_LIT:U+002CU+0020>'.join(self.stats['<STR_LIT>'])<EOL>self.stats['<STR_LIT>'] = len(run_stats['<STR_LIT>'])<EOL>self.stats['<STR_LIT>'] = '<STR_LIT:U+002CU+0020>'.join(self.stats['<STR_LIT>'])<EOL>
Set run statistics for the report.
f9092:c2:m5
def make_html_page(self, valumap):
logger.info('<STR_LIT>', self.html_template)<EOL>fh = open(self.html_template)<EOL>template = fh.read()<EOL>fh.close()<EOL>parts = []<EOL>for sr in self.subreports:<EOL><INDENT>report_data = [item.html for item in sr.report_data if item.html]<EOL>if report_data:<EOL><INDENT>parts.append('<STR_LIT>'.format(sr.title, sr....
Builds the report as html page, using the template page from file.
f9092:c2:m6
def make_text_page(self, valumap):
logger.info('<STR_LIT>', self.text_template)<EOL>fh = open(self.text_template)<EOL>template = fh.read()<EOL>fh.close()<EOL>parts = []<EOL>for sr in self.subreports:<EOL><INDENT>report_data = [item.text for item in sr.report_data if item.text]<EOL>if report_data:<EOL><INDENT>parts.append('<STR_LIT>'.format(sr.title, '<S...
Builds the report as text page, using the template page from file.
f9092:c2:m7
def make_csv_tables(self):
logger.info('<STR_LIT>')<EOL>report_parts = []<EOL>for sr in self.subreports:<EOL><INDENT>for data_item in sr.report_data:<EOL><INDENT>report_parts.append(TextPart(fmt='<STR_LIT>', text=data_item.csv, ext='<STR_LIT>'))<EOL><DEDENT><DEDENT>return report_parts<EOL>
Builds the report as a list of csv tables with titles.
f9092:c2:m8
def _read_apps(self):
apps = {}<EOL>for cfgfile in glob.iglob(os.path.join(self.confdir, '<STR_LIT>')):<EOL><INDENT>name = os.path.basename(cfgfile)[<NUM_LIT:0>:-<NUM_LIT:5>]<EOL>try:<EOL><INDENT>app = AppLogParser(name, cfgfile, self.args, self.logdir,<EOL>self.fields, self.name_cache, self.report)<EOL><DEDENT>except (LogRaptorOptionError,...
Read the configuration of applications returning a dictionary :return: A dictionary with application names as keys and configuration \ object as values.
f9093:c0:m1
@property<EOL><INDENT>def filters(self):<DEDENT>
return self.args.filters<EOL>
Log processor filters.
f9093:c0:m2
def set_logger(self):
<EOL>effective_level = max(logging.DEBUG, logging.CRITICAL - self.args.loglevel * <NUM_LIT:10>)<EOL>logger.setLevel(effective_level)<EOL>if not logger.handlers:<EOL><INDENT>if sys.stdout.isatty():<EOL><INDENT>handler = logging.StreamHandler()<EOL><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>handler = logging.FileHandler(...
Setup lograptor logger with an handler and a formatter. The logging level is defined by a [0..4] range, where an higher value means a more verbose logging. The loglevel value is mapped to correspondent logging module's value: LOG_CRIT=0 (syslog.h value is 2) ==> logging.CRITICAL LOG_ERR=1 (syslog.h value is 3) ==> log...
f9093:c0:m9
@protected_property<EOL><INDENT>def patterns(self):<DEDENT>
<EOL>if not self.args.patterns and not self.args.pattern_files:<EOL><INDENT>try:<EOL><INDENT>self.args.patterns.append(self.args.files.pop(<NUM_LIT:0>))<EOL><DEDENT>except IndexError:<EOL><INDENT>raise LogRaptorArgumentError('<STR_LIT>', '<STR_LIT>')<EOL><DEDENT><DEDENT>patterns = set()<EOL>if self.args.pattern_files:<...
A tuple with re.RegexObject objects created from regex pattern arguments.
f9093:c0:m11
@protected_property<EOL><INDENT>def files(self):<DEDENT>
<EOL>if not self.args.files and self.recursive:<EOL><INDENT>return ['<STR_LIT:.>']<EOL><DEDENT>else:<EOL><INDENT>return self.args.files<EOL><DEDENT>
A list of input sources. Each item can be a file path, a glob path or URL.
f9093:c0:m12
@protected_property<EOL><INDENT>def matcher(self):<DEDENT>
if self.args.matcher is None:<EOL><INDENT>return '<STR_LIT>'<EOL><DEDENT>elif self.args.matcher.startswith('<STR_LIT:->'):<EOL><INDENT>matcher = self.args.matcher.strip('<STR_LIT:->').replace('<STR_LIT:->', '<STR_LIT:_>')<EOL><DEDENT>else:<EOL><INDENT>matcher = self.args.matcher<EOL><DEDENT>if matcher not in ['<STR_LIT...
Matcher engine: ruled, unruled, unparsed.
f9093:c0:m14
@property<EOL><INDENT>def time_range(self):<DEDENT>
return self.args.time_range<EOL>
Selected time range for log matching. If `None` then match always (equivalent to 0:00-23:59).
f9093:c0:m16
@protected_property<EOL><INDENT>def time_period(self):<DEDENT>
if self.args.time_period is None:<EOL><INDENT>if self.args.files or is_pipe(STDIN_FILENO) or is_redirected(STDIN_FILENO):<EOL><INDENT>time_period = (None, None)<EOL><DEDENT>else:<EOL><INDENT>diff = <NUM_LIT> <EOL>time_period = get_datetime_interval(int(time.time()), diff, <NUM_LIT>)<EOL><DEDENT><DEDENT>else:<EOL><INDE...
Time period that is determined from the arguments --date and --last. It's a 2-tuple with (<start datetime>, <end_datetime>) items. An item is `None` if there isn't a limit.
f9093:c0:m17
@protected_property<EOL><INDENT>def apps(self):<DEDENT>
logger.debug("<STR_LIT>")<EOL>enabled = None<EOL>apps = self.args.apps or self._config_apps.keys()<EOL>unknown = set(apps) - set(self._config_apps.keys())<EOL>if unknown:<EOL><INDENT>raise LogRaptorArgumentError("<STR_LIT>", "<STR_LIT>" % list(unknown))<EOL><DEDENT>if apps or enabled is None:<EOL><INDENT>return {k: v f...
Dictionary with loaded applications.
f9093:c0:m21
@protected_property<EOL><INDENT>def apptags(self):<DEDENT>
logger.debug("<STR_LIT>")<EOL>apps = self._apps.keys()<EOL>unknown = set(apps)<EOL>unknown.difference_update(self._config_apps.keys())<EOL>if unknown:<EOL><INDENT>raise ValueError("<STR_LIT>" % list(unknown))<EOL><DEDENT>apps = [v for v in self._config_apps.values() if v.name in apps]<EOL>tagmap = {}<EOL>for app in sor...
Map from log app-name to an application.
f9093:c0:m22
@protected_property<EOL><INDENT>def channels(self):<DEDENT>
try:<EOL><INDENT>return self._channels<EOL><DEDENT>except AttributeError:<EOL><INDENT>logger.debug("<STR_LIT>")<EOL><DEDENT>channels = self.args.channels<EOL>config_channels = [sec.rpartition('<STR_LIT:_>')[<NUM_LIT:0>] for sec in self.config.sections(suffix='<STR_LIT>')]<EOL>unknown = set(channels) - set(config_channe...
Output channels
f9093:c0:m24
def __call__(self, dispatcher=None, parsers=None):
if dispatcher is None:<EOL><INDENT>dispatcher = self.create_dispatcher()<EOL><DEDENT>matcher_engine = self.create_matcher(dispatcher, parsers=parsers)<EOL>dispatcher.open()<EOL>display_progress_bar = sys.stdout.isatty() and all(c.name != '<STR_LIT>' for c in dispatcher.channels)<EOL>logger.info("<STR_LIT>")<EOL>files =...
Log processing main routine. Iterate over the log files calling the processing internal routine for each file.
f9093:c0:m26
def create_dispatcher(self):
before_context = max(self.args.before_context, self.args.context)<EOL>after_context = max(self.args.after_context, self.args.context)<EOL>if self.args.files_with_match is not None or self.args.count or self.args.only_matching or self.args.quiet:<EOL><INDENT>return UnbufferedDispatcher(self._channels)<EOL><DEDENT>elif b...
Return a dispatcher for configured channels.
f9093:c0:m27
def get_config(self):
<EOL>channels = [sect.rsplit('<STR_LIT:_>')[<NUM_LIT:0>] for sect in self.config.sections(suffix='<STR_LIT>')]<EOL>channels.sort()<EOL>disabled_apps = [app for app in self._config_apps.keys() if app not in self._apps]<EOL>return u'<STR_LIT>'.join([<EOL>u"<STR_LIT>" % __package__,<EOL>u"<STR_LIT>" % self.config.cfgfile,...
Return a formatted text with main configuration parameters.
f9093:c0:m29
def get_run_summary(self, run_stats):
run_stats = run_stats.copy()<EOL>run_stats['<STR_LIT>'] = len(run_stats['<STR_LIT>'])<EOL>summary = [<EOL>u'<STR_LIT>' % __package__,<EOL>u'<STR_LIT>',<EOL>u'<STR_LIT>',<EOL>u'<STR_LIT>',<EOL>]<EOL>if any([app.matches or app.unparsed for app in self.apps.values()]):<EOL><INDENT>if self.matcher == '<STR_LIT>':<EOL><INDE...
Produce a text summary from run statistics. :param run_stats: A dictionary containing run stats :return: Formatted multiline string
f9093:c0:m30
def do_chunked_gzip(infh, outfh, filename):
import gzip<EOL>gzfh = gzip.GzipFile('<STR_LIT>', mode='<STR_LIT:wb>', fileobj=outfh)<EOL>if infh.closed:<EOL><INDENT>infh = open(infh.name, '<STR_LIT:r>')<EOL><DEDENT>else:<EOL><INDENT>infh.seek(<NUM_LIT:0>)<EOL><DEDENT>readsize = <NUM_LIT:0><EOL>sys.stdout.write('<STR_LIT>'.format(filename))<EOL>if os.stat(infh.name)...
A memory-friendly way of compressing the data.
f9094:m0
def mail_message(smtp_server, message, from_address, rcpt_addresses):
if smtp_server[<NUM_LIT:0>] == '<STR_LIT:/>':<EOL><INDENT>p = os.popen(smtp_server, '<STR_LIT:w>')<EOL>p.write(message)<EOL>p.close()<EOL><DEDENT>else:<EOL><INDENT>import smtplib<EOL>server = smtplib.SMTP(smtp_server)<EOL>server.sendmail(from_address, rcpt_addresses, message)<EOL>server.quit()<EOL><DEDENT>
Send mail using smtp.
f9094:m1
def get_value_unit(value, unit, prefix):
prefixes = ('<STR_LIT>', '<STR_LIT>', '<STR_LIT:M>', '<STR_LIT>', '<STR_LIT:T>')<EOL>if len(unit):<EOL><INDENT>if unit[:<NUM_LIT:1>] in prefixes:<EOL><INDENT>valprefix = unit[<NUM_LIT:0>] <EOL>unit = unit[<NUM_LIT:1>:]<EOL><DEDENT>else:<EOL><INDENT>valprefix = '<STR_LIT>'<EOL><DEDENT><DEDENT>else:<EOL><INDENT>valprefix...
Return a human-readable value with unit specification. Try to transform the unit prefix to the one passed as parameter. When transform to higher prefix apply nearest integer round.
f9094:m2
def htmlsafe(unsafe):
unsafe = unsafe.replace('<STR_LIT:&>', '<STR_LIT>')<EOL>unsafe = unsafe.replace('<STR_LIT:<>', '<STR_LIT>')<EOL>unsafe = unsafe.replace('<STR_LIT:>>', '<STR_LIT>')<EOL>return unsafe<EOL>
Escapes all x(ht)ml control characters.
f9094:m3
def get_fmt_results(results, limit=<NUM_LIT:5>, sep='<STR_LIT>', fmt=None):
result_list = []<EOL>for key in sorted(results, key=lambda x: results[x], reverse=True):<EOL><INDENT>if len(result_list) >= limit and results[key] <= <NUM_LIT:1>:<EOL><INDENT>break<EOL><DEDENT>if fmt is not None:<EOL><INDENT>fmtkey = []<EOL>for i in range(len(key)):<EOL><INDENT>if i % <NUM_LIT:2> == <NUM_LIT:1>:<EOL><I...
Return a list of formatted strings representation on a result dictionary. The elements of the key are divided by a separator string. The result is appended after the key between parentheses. Apply a format transformation to odd elements of the key if a fmt parameter is passed.
f9094:m4
def safe_expand(template, mapping):
for _ in range(len(mapping) + <NUM_LIT:1>):<EOL><INDENT>_template = template<EOL>template = string.Template(template).safe_substitute(mapping)<EOL>if template == _template:<EOL><INDENT>return template<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>
Safe string template expansion. Raises an error if the provided substitution mapping has circularities.
f9094:m7
def protected_property(func):
if func.__name__.startswith('<STR_LIT:_>'):<EOL><INDENT>raise ValueError("<STR_LIT>" % func)<EOL><DEDENT>@property<EOL>@wraps(func)<EOL>def proxy_wrapper(self):<EOL><INDENT>try:<EOL><INDENT>return getattr(self, '<STR_LIT>' % func.__name__)<EOL><DEDENT>except AttributeError:<EOL><INDENT>pass<EOL><DEDENT>return func(self...
Class method decorator that creates a property that returns the protected attribute or the value returned by the wrapped method, if the protected attribute is not defined.
f9094:m11
def open_resource(source):
try:<EOL><INDENT>return open(source, mode='<STR_LIT:rb>')<EOL><DEDENT>except (IOError, OSError) as err:<EOL><INDENT>try:<EOL><INDENT>resource = urlopen(source)<EOL><DEDENT>except ValueError:<EOL><INDENT>pass<EOL><DEDENT>else:<EOL><INDENT>resource.name = resource.url<EOL>if hasattr(resource, '<STR_LIT>'):<EOL><INDENT>re...
Opens a resource in binary reading mode. Wraps the resource with a context manager when it doesn't have one. :param source: a filepath or an URL.
f9094:m14
def get_terminal_size():
import platform<EOL>current_os = platform.system()<EOL>tuple_xy = None<EOL>if current_os == '<STR_LIT>':<EOL><INDENT>tuple_xy = get_windows_terminal_size()<EOL>if tuple_xy is None:<EOL><INDENT>tuple_xy = get_unix_tput_terminal_size() <EOL><DEDENT><DEDENT>elif current_os == '<STR_LIT>' or current_os == '<STR_LIT>' or c...
Get the terminal size in width and height. Works on Linux, Mac OS X, Windows, Cygwin (Windows). :return: Returns a 2-tuple with width and height.
f9095:m0
def get_windows_terminal_size():
from ctypes import windll, create_string_buffer<EOL>handle = windll.kernel32.GetStdHandle(-<NUM_LIT:12>)<EOL>try:<EOL><INDENT>csbi = create_string_buffer(<NUM_LIT>)<EOL>res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi)<EOL><DEDENT>except (IOError, OSError):<EOL><INDENT>return None<EOL><DEDENT>if res:<EOL><...
Get the terminal size of a Windows OS terminal.
f9095:m1
def get_unix_tput_terminal_size():
import subprocess<EOL>try:<EOL><INDENT>proc = subprocess.Popen(["<STR_LIT>", "<STR_LIT>"], stdin=subprocess.PIPE, stdout=subprocess.PIPE)<EOL>output = proc.communicate(input=None)<EOL>cols = int(output[<NUM_LIT:0>])<EOL>proc = subprocess.Popen(["<STR_LIT>", "<STR_LIT>"], stdin=subprocess.PIPE, stdout=subprocess.PIPE)<E...
Get the terminal size of a UNIX terminal using the tput UNIX command. Ref: http://stackoverflow.com/questions/263890/how-do-i-find-the-width-height-of-a-terminal-window
f9095:m2
def get_unix_ioctl_terminal_size():
def ioctl_gwinsz(fd):<EOL><INDENT>try:<EOL><INDENT>import fcntl<EOL>import termios<EOL>import struct<EOL>return struct.unpack('<STR_LIT>', fcntl.ioctl(fd, termios.TIOCGWINSZ, '<STR_LIT>'))<EOL><DEDENT>except (IOError, OSError):<EOL><INDENT>return None<EOL><DEDENT><DEDENT>cr = ioctl_gwinsz(<NUM_LIT:0>) or ioctl_gwinsz(<...
Get the terminal size of a UNIX terminal using the ioctl UNIX command.
f9095:m3
def create_argument_parser():
parser = argparse.ArgumentParser(prog='<STR_LIT>', description=__description__, add_help=False)<EOL>parser.usage = """<STR_LIT>"""<EOL>group = parser.add_argument_group("<STR_LIT>")<EOL>group.add_argument(<EOL>"<STR_LIT>", dest="<STR_LIT>", action='<STR_LIT>', default=None, metavar="<STR_LIT>",<EOL>help="<STR_LIT>"<EOL...
Command line options and arguments parsing. This function return a list of options and the list of arguments (pattern, filenames).
f9096:m5
def has_void_args(argv):
n_args = len(argv)<EOL>return n_args == <NUM_LIT:1> or n_args == <NUM_LIT:2> and argv[<NUM_LIT:1>].startswith('<STR_LIT>') or n_args == <NUM_LIT:3> and argv[<NUM_LIT:1>] == '<STR_LIT>'<EOL>
Check if the command line has no arguments or only the --conf optional argument.
f9096:m6
def lograptor(files, patterns=None, matcher='<STR_LIT>', cfgfiles=None, apps=None, hosts=None,<EOL>filters=None, time_period=None, time_range=None, case=False, invert=False,<EOL>word=False, files_with_match=None, count=False, quiet=False, max_count=<NUM_LIT:0>,<EOL>only_matching=False, line_number=False, with_filename=...
cli_parser = create_argument_parser()<EOL>args = cli_parser.parse_args()<EOL>args.files = files<EOL>args.matcher = matcher<EOL>args.cfgfiles = cfgfiles<EOL>args.time_period = time_period<EOL>args.time_range = time_range<EOL>args.case = case<EOL>args.invert = invert<EOL>args.word = word<EOL>args.files_with_match = files...
Run lograptor with arguments. Experimental feature to use the log processor into generic Python scripts. This part is still under development, do not use. :param files: Input files. Each argument can be a file path or a glob pathname. :param patterns: Regex patterns, select the log line if at least one pattern matches...
f9096:m7
def _make_spec_file(self):
spec_file = setuptools.command.bdist_rpm.bdist_rpm._make_spec_file(self)<EOL>spec_file.append('<STR_LIT>')<EOL>spec_file.append('<STR_LIT>')<EOL>spec_file.append('<STR_LIT>')<EOL>return spec_file<EOL>
Customize spec file inserting %config section
f9104:c1:m0
def fake_create_redis_pool(fake_pool):
async def create_redis_pool(*args, **kwargs):<EOL><INDENT>return fake_pool<EOL><DEDENT>return create_redis_pool<EOL>
Original Redis pool have magick method __await__ to create exclusive connection. CoroutineMock sees this method and thinks that Redis pool instance is awaitable and tries to await it. To avoit this behavior we are using this constructor with Mock.side_effect instead of Mock.return_value.
f9108:m0
@lock_timeout.validator<EOL><INDENT>def _validate_lock_timeout(self, attribute, value):<DEDENT>
if value <= <NUM_LIT:0>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>
Validate if lock_timeout is greater than 0
f9109:c0:m1
@drift.validator<EOL><INDENT>def _validate_drift(self, attribute, value):<DEDENT>
if value <= <NUM_LIT:0>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>
Validate if drift is greater than 0
f9109:c0:m2
@retry_count.validator<EOL><INDENT>def _validate_retry_count(self, attribute, value):<DEDENT>
if value < <NUM_LIT:1>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>
Validate if retry_count is greater or equal 1
f9109:c0:m3
@retry_delay_min.validator<EOL><INDENT>@retry_delay_max.validator<EOL>def _validate_retry_delay(self, attribute, value):<DEDENT>
if value <= <NUM_LIT:0>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>
Validate if retry_delay_min and retry_delay_max is greater than 0
f9109:c0:m4
async def lock(self, resource):
lock_identifier = str(uuid.uuid4())<EOL>error = RuntimeError('<STR_LIT>')<EOL>try:<EOL><INDENT>for n in range(self.retry_count):<EOL><INDENT>self.log.debug('<STR_LIT>',<EOL>resource, n + <NUM_LIT:1>, self.retry_count)<EOL>if n != <NUM_LIT:0>:<EOL><INDENT>delay = random.uniform(self.retry_delay_min,<EOL>self.retry_delay...
Tries to acquire de lock. If the lock is correctly acquired, the valid property of the returned lock is True. In case of fault the LockError exception will be raised :param resource: The string identifier of the resource to lock :return: :class:`aioredlock.Lock` :raises: LockError in case of fault
f9109:c0:m6
async def extend(self, lock):
self.log.debug('<STR_LIT>', lock.resource)<EOL>if not lock.valid:<EOL><INDENT>raise RuntimeError('<STR_LIT>')<EOL><DEDENT>await self.redis.set_lock(lock.resource, lock.id)<EOL>
Tries to reset the lock's lifetime to lock_timeout In case of fault the LockError exception will be raised :param lock: :class:`aioredlock.Lock` :raises: RuntimeError if lock is not valid :raises: LockError in case of fault
f9109:c0:m7
async def unlock(self, lock):
self.log.debug('<STR_LIT>', lock.resource)<EOL>await self.redis.unset_lock(lock.resource, lock.id)<EOL>lock.valid = False<EOL>
Release the lock and sets it's validity to False if lock successfuly released. In case of fault the LockError exception will be raised :param lock: :class:`aioredlock.Lock` :raises: LockError in case of fault
f9109:c0:m8
async def is_locked(self, resource_or_lock):
if isinstance(resource_or_lock, Lock):<EOL><INDENT>resource = resource_or_lock.resource<EOL><DEDENT>elif isinstance(resource_or_lock, str):<EOL><INDENT>resource = resource_or_lock<EOL><DEDENT>else:<EOL><INDENT>raise TypeError(<EOL>'<STR_LIT>'<EOL>'<STR_LIT>', type(resource_or_lock)<EOL>)<EOL><DEDENT>return await self.r...
Checks if the resource or the lock is locked by any redlock instance. :param resource_or_lock: resource name or aioredlock.Lock instance :returns: True if locked else False
f9109:c0:m9
async def destroy(self):
self.log.debug('<STR_LIT>', repr(self))<EOL>await self.redis.clear_connections()<EOL>
Clear all the redis connections
f9109:c0:m10
def __init__(self, connection):
self.connection = connection<EOL>self._pool = None<EOL>self._lock = asyncio.Lock()<EOL>self.set_lock_script = re.sub(r'<STR_LIT>', '<STR_LIT>', self.SET_LOCK_SCRIPT, flags=re.M).strip()<EOL>self.unset_lock_script = re.sub(r'<STR_LIT>', '<STR_LIT>', self.UNSET_LOCK_SCRIPT, flags=re.M).strip()<EOL>
Redis instance constructor Constructor takes single argument - a redis host address The address can be one of the following: * a dict - {'host': 'localhost', 'port': 6379, 'db': 0, 'password': 'pass'} all keys except host and port will be passed as kwargs to the aioredis.create_redis_pool(); * a R...
f9113:c0:m0
@staticmethod<EOL><INDENT>async def _create_redis_pool(*args, **kwargs):<DEDENT>
if StrictVersion(aioredis.__version__) >= StrictVersion('<STR_LIT>'): <EOL><INDENT>return await aioredis.create_redis_pool(*args, **kwargs)<EOL><DEDENT>else: <EOL><INDENT>return await aioredis.create_pool(*args, **kwargs)<EOL><DEDENT>
Adapter to support both aioredis-0.3.0 and aioredis-1.0.0 For aioredis-1.0.0 and later calls: aioredis.create_redis_pool(*args, **kwargs) For aioredis-0.3.0 calls: aioredis.create_pool(*args, **kwargs)
f9113:c0:m3
async def connect(self):
if isinstance(self.connection, dict):<EOL><INDENT>kwargs = self.connection.copy()<EOL>address = (<EOL>kwargs.pop('<STR_LIT:host>', '<STR_LIT:localhost>'),<EOL>kwargs.pop('<STR_LIT:port>', <NUM_LIT>)<EOL>)<EOL>redis_kwargs = kwargs<EOL><DEDENT>elif isinstance(self.connection, aioredis.Redis):<EOL><INDENT>self._pool = se...
Get an connection for the self instance
f9113:c0:m4
async def close(self):
if self._pool is not None and not isinstance(self.connection, aioredis.Redis):<EOL><INDENT>self._pool.close()<EOL>await self._pool.wait_closed()<EOL><DEDENT>self._pool = None<EOL>
Closes connection and resets pool
f9113:c0:m5
async def set_lock(self, resource, lock_identifier, lock_timeout):
lock_timeout_ms = int(lock_timeout * <NUM_LIT:1000>)<EOL>try:<EOL><INDENT>with await self.connect() as redis:<EOL><INDENT>await redis.eval(<EOL>self.set_lock_script,<EOL>keys=[resource],<EOL>args=[lock_identifier, lock_timeout_ms]<EOL>)<EOL><DEDENT><DEDENT>except aioredis.errors.ReplyError as exc: <EOL><INDENT>self.lo...
Lock this instance and set lock expiration time to lock_timeout :param resource: redis key to set :param lock_identifier: uniquie id of lock :param lock_timeout: timeout for lock in seconds :raises: LockError if lock is not acquired
f9113:c0:m6
async def unset_lock(self, resource, lock_identifier):
try:<EOL><INDENT>with await self.connect() as redis:<EOL><INDENT>await redis.eval(<EOL>self.unset_lock_script,<EOL>keys=[resource],<EOL>args=[lock_identifier]<EOL>)<EOL><DEDENT><DEDENT>except aioredis.errors.ReplyError as exc: <EOL><INDENT>self.log.debug('<STR_LIT>',<EOL>resource, repr(self))<EOL>raise LockError('<STR...
Unlock this instance :param resource: redis key to set :param lock_identifier: uniquie id of lock :raises: LockError if the lock resource acquired with different lock_identifier
f9113:c0:m7
async def is_locked(self, resource):
with await self.connect() as redis:<EOL><INDENT>lock_identifier = await redis.get(resource)<EOL><DEDENT>if lock_identifier:<EOL><INDENT>return True<EOL><DEDENT>else:<EOL><INDENT>return False<EOL><DEDENT>
Checks if the resource is locked by any redlock instance. :param resource: The resource string name to check :returns: True if locked else False
f9113:c0:m8
async def set_lock(self, resource, lock_identifier):
start_time = time.time()<EOL>lock_timeout = self.lock_timeout<EOL>successes = await asyncio.gather(*[<EOL>i.set_lock(resource, lock_identifier, lock_timeout) for<EOL>i in self.instances<EOL>], return_exceptions=True)<EOL>successful_sets = sum(s is None for s in successes)<EOL>elapsed_time = time.time() - start_time<EOL...
Tries to set the lock to all the redis instances :param resource: The resource string name to lock :param lock_identifier: The id of the lock. A unique string :return float: The elapsed time that took to lock the instances in seconds :raises: LockError if the lock has not been set to at least (N/2 + 1) instanc...
f9113:c1:m2
async def unset_lock(self, resource, lock_identifier):
start_time = time.time()<EOL>successes = await asyncio.gather(*[<EOL>i.unset_lock(resource, lock_identifier) for<EOL>i in self.instances<EOL>], return_exceptions=True)<EOL>successful_remvoes = sum(s is None for s in successes)<EOL>elapsed_time = time.time() - start_time<EOL>unlocked = True if successful_remvoes >= int(...
Tries to unset the lock to all the redis instances :param resource: The resource string name to lock :param lock_identifier: The id of the lock. A unique string :return float: The elapsed time that took to lock the instances in iseconds :raises: LockError if the lock has not matching identifier in more then (N/2 -...
f9113:c1:m3
async def is_locked(self, resource):
successes = await asyncio.gather(*[<EOL>i.is_locked(resource) for<EOL>i in self.instances<EOL>], return_exceptions=True)<EOL>successful_sets = sum(s is True for s in successes)<EOL>locked = True if successful_sets >= int(len(self.instances) / <NUM_LIT:2>) + <NUM_LIT:1> else False<EOL>return locked<EOL>
Checks if the resource is locked by any redlock instance. :param resource: The resource string name to lock :returns: True if locked else False
f9113:c1:m4
def _setup_branchy_tags(self):
<EOL>with open('<STR_LIT>', '<STR_LIT:a>') as f:<EOL><INDENT>f.write('<STR_LIT>')<EOL><DEDENT>self.mgr._invoke('<STR_LIT>', '<STR_LIT>', '<STR_LIT>')<EOL>self.mgr._invoke('<STR_LIT>', '<STR_LIT:1.0>')<EOL>self.mgr._invoke('<STR_LIT>', '<STR_LIT:1>')<EOL>with open('<STR_LIT>', '<STR_LIT:a>') as f:<EOL><INDENT>f.write('<...
Create two heads, one which has a 1.0 tag and a different one which has a 1.1 tag.
f9117:c1:m8
def file_finder(dirname="<STR_LIT:.>"):
import distutils.log<EOL>dirname = dirname or '<STR_LIT:.>'<EOL>try:<EOL><INDENT>valid_mgrs = managers.RepoManager.get_valid_managers(dirname)<EOL>valid_mgrs = managers.RepoManager.existing_only(valid_mgrs)<EOL>for mgr in valid_mgrs:<EOL><INDENT>try:<EOL><INDENT>return mgr.find_all_files()<EOL><DEDENT>except Exception:...
Find the files in ``dirname`` under Mercurial version control according to the setuptools spec (see http://peak.telecommunity.com/DevCenter/setuptools#adding-support-for-other-revision-control-systems ).
f9123:m0
def patch_egg_info(force_hg_version=False):
from setuptools.command.egg_info import egg_info<EOL>from pkg_resources import safe_version<EOL>import functools<EOL>orig_ver = egg_info.tagged_version<EOL>@functools.wraps(orig_ver)<EOL>def tagged_version(self):<EOL><INDENT>vcs_param = (<EOL>getattr(self.distribution, '<STR_LIT>', False)<EOL>or getattr(self.distributi...
A hack to replace egg_info.tagged_version with a wrapped version that will use the mercurial version if indicated. `force_hg_version` is used for hgtools itself.
f9123:m1
def version_calc(dist, attr, value):
expected_attrs = '<STR_LIT>', '<STR_LIT>'<EOL>if not value or attr not in expected_attrs:<EOL><INDENT>return<EOL><DEDENT>options = value if isinstance(value, dict) else {}<EOL>dist.metadata.version = calculate_version(options)<EOL>patch_egg_info()<EOL>
Handler for parameter to setup(use_vcs_version=value) attr should be 'use_vcs_version' (also allows use_hg_version for compatibility). bool(value) should be true to invoke this plugin. value may optionally be a dict and supply options to the plugin.
f9123:m4
def find(pred, items):
for i, item in enumerate(items):<EOL><INDENT>if pred(item):<EOL><INDENT>return i<EOL><DEDENT><DEDENT>
Find the index of the first element in items for which pred returns True >>> find(lambda x: x > 3, range(100)) 4 >>> find(lambda x: x < -3, range(100)) is None True
f9124:m0
def rfind(pred, items):
return -find(pred, reversed(items)) - <NUM_LIT:1><EOL>
Find the index of the last element in items for which pred returns True. Returns a negative number useful for indexing from the end of a list or tuple. >>> rfind(lambda x: x > 3, [5,4,3,2,1]) -4
f9124:m1
def reset_less_significant(self, significant_version):
def nonzero(x):<EOL><INDENT>return x != <NUM_LIT:0><EOL><DEDENT>version_len = <NUM_LIT:3> <EOL>significant_pos = rfind(nonzero, significant_version.version)<EOL>significant_pos = version_len + significant_pos + <NUM_LIT:1><EOL>self.version = (<EOL>self.version[:significant_pos]<EOL>+ (<NUM_LIT:0>,) * (version_len - si...
Reset to zero all version info less significant than the indicated version. >>> ver = SummableVersion('3.1.2') >>> ver.reset_less_significant(SummableVersion('0.1')) >>> str(ver) '3.1'
f9124:c0:m1
def as_number(self):
def combine(subver, ver):<EOL><INDENT>return subver / <NUM_LIT:10> + ver<EOL><DEDENT>return reduce(combine, reversed(self.version))<EOL>
>>> round(SummableVersion('1.9.3').as_number(), 12) 1.93
f9124:c0:m2
def get_strict_versions(self):
return self.__versions_from_tags(<EOL>tag.tag for tag in self.get_repo_tags()<EOL>)<EOL>
Return all version tags that can be represented by a StrictVersion.
f9124:c1:m2
def get_tagged_version(self):
tags = list(self.get_tags())<EOL>if '<STR_LIT>' in tags and not self.is_modified():<EOL><INDENT>tags = self.get_parent_tags('<STR_LIT>')<EOL><DEDENT>versions = self.__versions_from_tags(tags)<EOL>return self.__best_version(versions)<EOL>
Get the version of the local working set as a StrictVersion or None if no viable tag exists. If the local working set is itself the tagged commit and the tip and there are no local modifications, use the tag on the parent changeset.
f9124:c1:m3
def get_current_version(self, increment=None):
ver = (<EOL>self.get_tagged_version()<EOL>or str(self.get_next_version(increment)) + '<STR_LIT>'<EOL>)<EOL>return str(ver)<EOL>
Return as a string the version of the current state of the repository -- a tagged version, if present, or the next version based on prior tagged releases.
f9124:c1:m5
def get_next_version(self, increment=None):
increment = increment or self.increment<EOL>return self.infer_next_version(self.get_latest_version(), increment)<EOL>
Return the next version based on prior tagged releases.
f9124:c1:m6
@staticmethod<EOL><INDENT>def infer_next_version(last_version, increment):<DEDENT>
if last_version is None:<EOL><INDENT>return increment<EOL><DEDENT>last_version = SummableVersion(str(last_version))<EOL>if last_version.prerelease:<EOL><INDENT>last_version.prerelease = None<EOL>return str(last_version)<EOL><DEDENT>increment = SummableVersion(increment)<EOL>sum = last_version + increment<EOL>sum.reset_...
Given a simple application version (as a StrictVersion), and an increment (1.0, 0.1, or 0.0.1), guess the next version. Set up a shorthand for examples >>> def VM_infer(*params): ... return str(VersionManagement.infer_next_version(*params)) >>> VM_infer('3.2', '0.0.1') '3.2.1' >>> VM_infer(StrictVersion('3.2'), ...
f9124:c1:m7
def _invoke(self, *params):
cmd = [self.exe, '<STR_LIT>', self.location] + list(params)<EOL>with reentry.in_process_context(cmd) as result:<EOL><INDENT>sys.modules['<STR_LIT>'].run()<EOL><DEDENT>stdout = result.stdio.stdout.getvalue()<EOL>stderr = result.stdio.stderr.getvalue()<EOL>if not result.returncode == <NUM_LIT:0>:<EOL><INDENT>raise Runtim...
Run the self.exe command in-process with the supplied params.
f9125:c0:m0
def _invoke(self, *params):
cmd = [self.exe] + list(params)<EOL>proc = subprocess.Popen(<EOL>cmd, stdout=subprocess.PIPE,<EOL>stderr=subprocess.PIPE, cwd=self.location, env=self.env)<EOL>stdout, stderr = proc.communicate()<EOL>if not proc.returncode == <NUM_LIT:0>:<EOL><INDENT>raise RuntimeError(stderr.strip() or stdout.strip())<EOL><DEDENT>retur...
Invoke self.exe as a subprocess
f9127:c0:m0
@property<EOL><INDENT>def env(self):<DEDENT>
env = os.environ.copy()<EOL>env.pop('<STR_LIT>', None)<EOL>return env<EOL>
Return an environment safe for calling an `hg` subprocess. Removes MACOSX_DEPLOYMENT_TARGET from the env, as if there's a mismatch between the local Python environment and the environment in which `hg` is installed, it will cause an exception. See https://bitbucket.org/jaraco/hgtools/issue/7 for details.
f9127:c1:m0
def is_valid(self):
return True<EOL>
Return True if this is a valid manager for this location.
f9128:c0:m1
@classmethod<EOL><INDENT>def get_valid_managers(cls, location):<DEDENT>
def by_priority_attr(c):<EOL><INDENT>return getattr(c, '<STR_LIT>', <NUM_LIT:0>)<EOL><DEDENT>classes = sorted(<EOL>iter_subclasses(cls), key=by_priority_attr,<EOL>reverse=True)<EOL>all_managers = (c(location) for c in classes)<EOL>return (mgr for mgr in all_managers if mgr.is_valid())<EOL>
Get the valid RepoManagers for this location.
f9128:c0:m3
@staticmethod<EOL><INDENT>def existing_only(managers):<DEDENT>
return (mgr for mgr in managers if mgr.find_root())<EOL>
Return only those managers that refer to an existing repo
f9128:c0:m5