signature stringlengths 8 3.44k | body stringlengths 0 1.41M | docstring stringlengths 1 122k | id stringlengths 5 17 |
|---|---|---|---|
def collapse_indents(indentation): | change_in_level = ind_change(indentation)<EOL>if change_in_level == <NUM_LIT:0>:<EOL><INDENT>indents = "<STR_LIT>"<EOL><DEDENT>elif change_in_level < <NUM_LIT:0>:<EOL><INDENT>indents = closeindent * (-change_in_level)<EOL><DEDENT>else:<EOL><INDENT>indents = openindent * change_in_level<EOL><DEDENT>return indentation.re... | Removes all openindent-closeindent pairs. | f11256:m34 |
def transform(grammar, text): | results = []<EOL>intervals = []<EOL>for result, start, stop in all_matches(grammar, text):<EOL><INDENT>if result is not ignore_transform:<EOL><INDENT>internal_assert(isinstance(result, str), "<STR_LIT>", result)<EOL>if start == <NUM_LIT:0> and stop == len(text):<EOL><INDENT>return result<EOL><DEDENT>results.append(resu... | Transform text by replacing matches to grammar. | f11256:m35 |
def disable_inside(item, *elems, **kwargs): | _invert = kwargs.get("<STR_LIT>", False)<EOL>internal_assert(set(kwargs.keys()) <= set(("<STR_LIT>",)), "<STR_LIT>")<EOL>level = [<NUM_LIT:0>] <EOL>@contextmanager<EOL>def manage_item(self, instring, loc):<EOL><INDENT>level[<NUM_LIT:0>] += <NUM_LIT:1><EOL>try:<EOL><INDENT>yield<EOL><DEDENT>finally:<EOL><INDENT>level[<... | Prevent elems from matching inside of item.
Returns (item with elem disabled, *new versions of elems). | f11256:m36 |
def disable_outside(item, *elems): | for wrapped in disable_inside(item, *elems, **{"<STR_LIT>": True}):<EOL><INDENT>yield wrapped<EOL><DEDENT> | Prevent elems from matching outside of item.
Returns (item with elem disabled, *new versions of elems). | f11256:m37 |
def __new__(cls, action, original, loc, tokens, greedy=False, ignore_no_tokens=False, ignore_one_token=False): | if ignore_no_tokens and len(tokens) == <NUM_LIT:0>:<EOL><INDENT>return []<EOL><DEDENT>elif ignore_one_token and len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return tokens[<NUM_LIT:0>] <EOL><DEDENT>else:<EOL><INDENT>self = super(ComputationNode, cls).__new__(cls)<EOL>self.action, self.loc, self.tokens = action, loc, tokens<... | Create a ComputionNode to return from a parse action.
If greedy, then never defer the action until later.
If ignore_no_tokens, then don't call the action if there are no tokens.
If ignore_one_token, then don't call the action if there is only one token. | f11256:c0:m0 |
@property<EOL><INDENT>def original(self):<DEDENT> | return self.list_of_originals[self.index_of_original]<EOL> | Get the original from the originals memo. | f11256:c0:m1 |
@property<EOL><INDENT>def name(self):<DEDENT> | name = getattr(self.action, "<STR_LIT>", None)<EOL>return name if name is not None else ascii(self.action)<EOL> | Get the name of the action. | f11256:c0:m2 |
def evaluate(self): | if DEVELOP:<EOL><INDENT>internal_assert(not self.been_called, "<STR_LIT>" + self.name + "<STR_LIT>", self.tokens)<EOL>self.been_called = True<EOL><DEDENT>evaluated_toks = evaluate_tokens(self.tokens)<EOL>if logger.tracing: <EOL><INDENT>logger.log_trace(self.name, self.original, self.loc, evaluated_toks, self.tokens)<E... | Get the result of evaluating the computation graph at this node. | f11256:c0:m3 |
def __repr__(self): | inner_repr = "<STR_LIT:\n>".join("<STR_LIT:\t>" + line for line in repr(self.tokens).splitlines())<EOL>return self.name + "<STR_LIT>" + inner_repr + "<STR_LIT>"<EOL> | Get a representation of the entire computation graph below this node. | f11256:c0:m4 |
def _combine(self, original, loc, tokens): | combined_tokens = super(CombineNode, self).postParse(original, loc, tokens)<EOL>internal_assert(len(combined_tokens) == <NUM_LIT:1>, "<STR_LIT>", combined_tokens)<EOL>return combined_tokens[<NUM_LIT:0>]<EOL> | Implement the parse action for Combine. | f11256:c1:m0 |
def postParse(self, original, loc, tokens): | return ComputationNode(self._combine, original, loc, tokens, ignore_no_tokens=True, ignore_one_token=True)<EOL> | Create a ComputationNode for Combine. | f11256:c1:m1 |
def parseImpl(self, instring, loc, *args, **kwargs): | with self.wrapper(self, instring, loc):<EOL><INDENT>return super(Wrap, self).parseImpl(instring, loc, *args, **kwargs)<EOL><DEDENT> | Wrapper around ParseElementEnhance.parseImpl. | f11256:c2:m1 |
def split_function_call(tokens, loc): | pos_args = []<EOL>star_args = []<EOL>kwd_args = []<EOL>dubstar_args = []<EOL>for arg in tokens:<EOL><INDENT>argstr = "<STR_LIT>".join(arg)<EOL>if len(arg) == <NUM_LIT:1>:<EOL><INDENT>if star_args or kwd_args or dubstar_args:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT>pos_args.append(args... | Split into positional arguments and keyword arguments. | f11257:m0 |
def attrgetter_atom_split(tokens): | if len(tokens) == <NUM_LIT:1>: <EOL><INDENT>return tokens[<NUM_LIT:0>], None<EOL><DEDENT>elif len(tokens) >= <NUM_LIT:2> and tokens[<NUM_LIT:1>] == "<STR_LIT:(>": <EOL><INDENT>if len(tokens) == <NUM_LIT:2>: <EOL><INDENT>return tokens[<NUM_LIT:0>], "<STR_LIT>"<EOL><DEDENT>elif len(tokens) == <NUM_LIT:3>: <EOL><INDEN... | Split attrgetter_atom_tokens into (attr_or_method_name, method_args_or_none_if_attr). | f11257:m1 |
def pipe_item_split(tokens, loc): | <EOL>if isinstance(tokens, list) or "<STR_LIT>" in tokens:<EOL><INDENT>internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>return "<STR_LIT>", (tokens[<NUM_LIT:0>],)<EOL><DEDENT>elif "<STR_LIT>" in tokens:<EOL><INDENT>func, args = tokens<EOL>pos_args, star_args, kwd_args, dubstar_args = split_function_... | Process a pipe item, which could be a partial, an attribute access, a method call, or an expression.
Return (type, split) where split is
- (expr,) for expression,
- (func, pos_args, kwd_args) for partial,
- (name, args) for attr/method, and
- (op, args) for itemgetter. | f11257:m2 |
def infix_error(tokens): | raise CoconutInternalException("<STR_LIT>", tokens)<EOL> | Raise inner infix error. | f11257:m3 |
def get_infix_items(tokens, callback=infix_error): | internal_assert(len(tokens) >= <NUM_LIT:3>, "<STR_LIT>", tokens)<EOL>(arg1, func, arg2), tokens = tokens[:<NUM_LIT:3>], tokens[<NUM_LIT:3>:]<EOL>args = list(arg1) + list(arg2)<EOL>while tokens:<EOL><INDENT>args = [callback([args, func, []])]<EOL>(func, newarg), tokens = tokens[:<NUM_LIT:2>], tokens[<NUM_LIT:2>:]<EOL>ar... | Perform infix token processing.
Takes a callback that (takes infix tokens and returns a string) to handle inner infix calls. | f11257:m4 |
def comp_pipe_info(op): | if op == "<STR_LIT>":<EOL><INDENT>return "<STR_LIT>", False<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>return "<STR_LIT>", False<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>return "<STR_LIT>", True<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>return "<STR_LIT>", True<EOL><DEDENT>else:<EOL><INDENT>raise Coco... | Returns (direction, star) where direction is 'forwards' or 'backwards'. | f11257:m5 |
def add_paren_handle(tokens): | internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>return "<STR_LIT:(>" + tokens[<NUM_LIT:0>] + "<STR_LIT:)>"<EOL> | Add parentheses. | f11257:m6 |
def function_call_handle(loc, tokens): | return "<STR_LIT:(>" + join_args(*split_function_call(tokens, loc)) + "<STR_LIT:)>"<EOL> | Enforce properly ordered function parameters. | f11257:m7 |
def item_handle(loc, tokens): | out = tokens.pop(<NUM_LIT:0>)<EOL>for i, trailer in enumerate(tokens):<EOL><INDENT>if isinstance(trailer, str):<EOL><INDENT>out += trailer<EOL><DEDENT>elif len(trailer) == <NUM_LIT:1>:<EOL><INDENT>if trailer[<NUM_LIT:0>] == "<STR_LIT>":<EOL><INDENT>out = "<STR_LIT>" + out + "<STR_LIT:)>"<EOL><DEDENT>elif trailer[<NUM_L... | Process trailers. | f11257:m8 |
def pipe_handle(loc, tokens, **kwargs): | internal_assert(set(kwargs) <= set(("<STR_LIT>",)), "<STR_LIT>", kwargs)<EOL>top = kwargs.get("<STR_LIT>", True)<EOL>if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>item = tokens.pop()<EOL>if not top: <EOL><INDENT>return item<EOL><DEDENT>name, split_item = pipe_item_split(item, loc)<EOL>if name == "<STR_LIT>":<EOL><INDENT>... | Process pipe calls. | f11257:m9 |
def comp_pipe_handle(loc, tokens): | internal_assert(len(tokens) >= <NUM_LIT:3> and len(tokens) % <NUM_LIT:2> == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>funcs = [tokens[<NUM_LIT:0>]]<EOL>stars = []<EOL>direction = None<EOL>for i in range(<NUM_LIT:1>, len(tokens), <NUM_LIT:2>):<EOL><INDENT>op, fn = tokens[i], tokens[i + <NUM_LIT:1>]<EOL>new_direction, star =... | Process pipe function composition. | f11257:m10 |
def none_coalesce_handle(tokens): | if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return tokens[<NUM_LIT:0>]<EOL><DEDENT>elif tokens[<NUM_LIT:0>].isalnum():<EOL><INDENT>return "<STR_LIT>".format(<EOL>a=tokens[<NUM_LIT:0>],<EOL>b=none_coalesce_handle(tokens[<NUM_LIT:1>:]),<EOL>)<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>".format(<EOL>x=none_coalesce_var... | Process the None-coalescing operator. | f11257:m11 |
def attrgetter_atom_handle(loc, tokens): | name, args = attrgetter_atom_split(tokens)<EOL>if args is None:<EOL><INDENT>return '<STR_LIT>' + name + '<STR_LIT>'<EOL><DEDENT>elif "<STR_LIT:.>" in name:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT>elif args == "<STR_LIT>":<EOL><INDENT>return '<STR_LIT>' + tokens[<NUM_LIT:0>] + '<STR_LI... | Process attrgetter literals. | f11257:m12 |
def lazy_list_handle(tokens): | if len(tokens) == <NUM_LIT:0>:<EOL><INDENT>return "<STR_LIT>"<EOL><DEDENT>else:<EOL><INDENT>return (<EOL>"<STR_LIT>" % (func_var, func_var)<EOL>+ "<STR_LIT>" + "<STR_LIT>".join(tokens) + ("<STR_LIT:U+002C>" if len(tokens) == <NUM_LIT:1> else "<STR_LIT>") + "<STR_LIT>"<EOL>)<EOL><DEDENT> | Process lazy lists. | f11257:m13 |
def chain_handle(tokens): | if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return tokens[<NUM_LIT:0>]<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>" + lazy_list_handle(tokens) + "<STR_LIT:)>"<EOL><DEDENT> | Process chain calls. | f11257:m14 |
def infix_handle(tokens): | func, args = get_infix_items(tokens, callback=infix_handle)<EOL>return "<STR_LIT:(>" + func + "<STR_LIT>" + "<STR_LIT:U+002CU+0020>".join(args) + "<STR_LIT:)>"<EOL> | Process infix calls. | f11257:m15 |
def op_funcdef_handle(tokens): | func, base_args = get_infix_items(tokens)<EOL>args = []<EOL>for arg in base_args[:-<NUM_LIT:1>]:<EOL><INDENT>rstrip_arg = arg.rstrip()<EOL>if not rstrip_arg.endswith(unwrapper):<EOL><INDENT>if not rstrip_arg.endswith("<STR_LIT:U+002C>"):<EOL><INDENT>arg += "<STR_LIT:U+002CU+0020>"<EOL><DEDENT>elif arg.endswith("<STR_LI... | Process infix defs. | f11257:m16 |
def lambdef_handle(tokens): | if len(tokens) == <NUM_LIT:0>:<EOL><INDENT>return "<STR_LIT>"<EOL><DEDENT>elif len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return "<STR_LIT>" + tokens[<NUM_LIT:0>] + "<STR_LIT::>"<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT> | Process lambda calls. | f11257:m17 |
def typedef_callable_handle(tokens): | if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return '<STR_LIT>' + tokens[<NUM_LIT:0>] + '<STR_LIT:]>'<EOL><DEDENT>elif len(tokens) == <NUM_LIT:2>:<EOL><INDENT>return '<STR_LIT>' + tokens[<NUM_LIT:0>] + '<STR_LIT>' + tokens[<NUM_LIT:1>] + '<STR_LIT:]>'<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT... | Process -> to Callable inside type annotations. | f11257:m18 |
def make_suite_handle(tokens): | internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>return "<STR_LIT:\n>" + openindent + tokens[<NUM_LIT:0>] + closeindent<EOL> | Make simple statements into suites. | f11257:m19 |
def implicit_return_handle(tokens): | internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>return "<STR_LIT>" + tokens[<NUM_LIT:0>]<EOL> | Add an implicit return. | f11257:m20 |
def math_funcdef_handle(tokens): | internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>return tokens[<NUM_LIT:0>] + ("<STR_LIT>" if tokens[<NUM_LIT:1>].startswith("<STR_LIT:\n>") else "<STR_LIT:U+0020>") + tokens[<NUM_LIT:1>]<EOL> | Process assignment function definition. | f11257:m21 |
def decorator_handle(tokens): | defs = []<EOL>decorates = []<EOL>for i, tok in enumerate(tokens):<EOL><INDENT>if "<STR_LIT>" in tok and len(tok) == <NUM_LIT:1>:<EOL><INDENT>decorates.append("<STR_LIT:@>" + tok[<NUM_LIT:0>])<EOL><DEDENT>elif "<STR_LIT:test>" in tok and len(tok) == <NUM_LIT:1>:<EOL><INDENT>varname = decorator_var + "<STR_LIT:_>" + str(... | Process decorators. | f11257:m22 |
def match_handle(loc, tokens): | if len(tokens) == <NUM_LIT:4>:<EOL><INDENT>matches, match_type, item, stmts = tokens<EOL>cond = None<EOL><DEDENT>elif len(tokens) == <NUM_LIT:5>:<EOL><INDENT>matches, match_type, item, cond, stmts = tokens<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>if match_type == "<S... | Process match blocks. | f11257:m23 |
def except_handle(tokens): | if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>errs, asname = tokens[<NUM_LIT:0>], None<EOL><DEDENT>elif len(tokens) == <NUM_LIT:2>:<EOL><INDENT>errs, asname = tokens<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>out = "<STR_LIT>"<EOL>if "<STR_LIT:list>" in tokens:<EOL><INDEN... | Process except statements. | f11257:m24 |
def subscriptgroup_handle(tokens): | internal_assert(<NUM_LIT:0> < len(tokens) <= <NUM_LIT:3>, "<STR_LIT>", tokens)<EOL>args = []<EOL>for arg in tokens:<EOL><INDENT>if not arg:<EOL><INDENT>arg = "<STR_LIT:None>"<EOL><DEDENT>args.append(arg)<EOL><DEDENT>if len(args) == <NUM_LIT:1>:<EOL><INDENT>return args[<NUM_LIT:0>]<EOL><DEDENT>else:<EOL><INDENT>return "... | Process subscriptgroups. | f11257:m25 |
def itemgetter_handle(tokens): | internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>op, args = tokens<EOL>if op == "<STR_LIT:[>":<EOL><INDENT>return "<STR_LIT>" + args + "<STR_LIT:)>"<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>return "<STR_LIT>" + args + "<STR_LIT:)>"<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException(... | Process implicit itemgetter partials. | f11257:m26 |
def class_suite_handle(tokens): | internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>return "<STR_LIT>" + tokens[<NUM_LIT:0>]<EOL> | Process implicit pass in class suite. | f11257:m27 |
def namelist_handle(tokens): | if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return tokens[<NUM_LIT:0>]<EOL><DEDENT>elif len(tokens) == <NUM_LIT:2>:<EOL><INDENT>return tokens[<NUM_LIT:0>] + "<STR_LIT:\n>" + tokens[<NUM_LIT:0>] + "<STR_LIT>" + tokens[<NUM_LIT:1>]<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDE... | Process inline nonlocal and global statements. | f11257:m28 |
def compose_item_handle(tokens): | if len(tokens) < <NUM_LIT:1>:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>elif len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return tokens[<NUM_LIT:0>]<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>" + "<STR_LIT:U+002CU+0020>".join(reversed(tokens)) + "<STR_LIT:)>"<EOL><DEDENT> | Process function composition. | f11257:m29 |
def tco_return_handle(tokens): | internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>if tokens[<NUM_LIT:1>].startswith("<STR_LIT>"):<EOL><INDENT>return "<STR_LIT>" + tokens[<NUM_LIT:0>] + "<STR_LIT:)>" + tokens[<NUM_LIT:1>][<NUM_LIT:2>:] <EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>" + tokens[<NUM_LIT:0>] + "<STR_LIT:U+002CU+0020>... | Process tail-call-optimizable return statements. | f11257:m30 |
def split_func_name_args_params_handle(tokens): | internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>func_name = tokens[<NUM_LIT:0>]<EOL>func_args = []<EOL>func_params = []<EOL>for arg in tokens[<NUM_LIT:1>]:<EOL><INDENT>if len(arg) > <NUM_LIT:1> and arg[<NUM_LIT:0>] in ("<STR_LIT:*>", "<STR_LIT>"):<EOL><INDENT>func_args.append(arg[<NUM_LIT:1>])<EOL>... | Process splitting a function into name, params, and args. | f11257:m31 |
def join_match_funcdef(tokens): | if len(tokens) == <NUM_LIT:2>:<EOL><INDENT>(func, insert_after_docstring), body = tokens<EOL>docstring = None<EOL><DEDENT>elif len(tokens) == <NUM_LIT:3>:<EOL><INDENT>(func, insert_after_docstring), docstring, body = tokens<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>in... | Join the pieces of a pattern-matching function together. | f11257:m32 |
def where_stmt_handle(tokens): | internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>base_stmt, assignment_stmts = tokens<EOL>stmts = list(assignment_stmts) + [base_stmt]<EOL>return "<STR_LIT:\n>".join(stmts) + "<STR_LIT:\n>"<EOL> | Process a where statement. | f11257:m33 |
def set_grammar_names(): | for varname, val in vars(Grammar).items():<EOL><INDENT>if isinstance(val, ParserElement):<EOL><INDENT>setattr(Grammar, varname, val.setName(varname))<EOL><DEDENT><DEDENT> | Set names of grammar elements to their variable names. | f11257:m34 |
def gethash(compiled): | lines = compiled.splitlines()<EOL>if len(lines) < <NUM_LIT:3> or not lines[<NUM_LIT:2>].startswith(hash_prefix):<EOL><INDENT>return None<EOL><DEDENT>else:<EOL><INDENT>return lines[<NUM_LIT:2>][len(hash_prefix):]<EOL><DEDENT> | Retrieve a hash from a header. | f11258:m0 |
def minify(compiled): | compiled = compiled.strip()<EOL>if compiled:<EOL><INDENT>out = []<EOL>for line in compiled.splitlines():<EOL><INDENT>line = line.split("<STR_LIT:#>", <NUM_LIT:1>)[<NUM_LIT:0>].rstrip()<EOL>if line:<EOL><INDENT>ind = <NUM_LIT:0><EOL>while line.startswith("<STR_LIT:U+0020>"):<EOL><INDENT>line = line[<NUM_LIT:1>:]<EOL>ind... | Perform basic minifications.
Fails on non-tabideal indentation or a string with a #. | f11258:m1 |
def get_template(template): | with open(os.path.join(template_dir, template) + template_ext, "<STR_LIT:r>") as template_file:<EOL><INDENT>return template_file.read()<EOL><DEDENT> | Read the given template file. | f11258:m2 |
def one_num_ver(target): | return target[:<NUM_LIT:1>]<EOL> | Return the first number of the target version, if it has one. | f11258:m3 |
def section(name): | line = "<STR_LIT>" + name + "<STR_LIT>"<EOL>return line + "<STR_LIT:->" * (justify_len - len(line)) + "<STR_LIT>"<EOL> | Generate a section break. | f11258:m4 |
def process_header_args(which, target, use_hash, no_tco, strict): | target_startswith = one_num_ver(target)<EOL>target_info = get_target_info(target)<EOL>try_backport_lru_cache = r'''<STR_LIT>'''try:<EOL>import trollius as asyncio<EOL>pt ImportError:<EOL>class you_need_to_install_trollius: pass<EOL>asyncio = you_need_to_install_trollius()<EOL>format_dict = dict(<EOL>comment=comment(),<... | Create the dictionary passed to str.format in the header, target_startswith, and target_info. | f11258:m5 |
def getheader(which, target="<STR_LIT>", use_hash=None, no_tco=False, strict=False): | internal_assert(which in allowed_headers, "<STR_LIT>", which)<EOL>if which == "<STR_LIT:none>":<EOL><INDENT>return "<STR_LIT>"<EOL><DEDENT>format_dict, target_startswith, target_info = process_header_args(which, target, use_hash, no_tco, strict)<EOL>if which == "<STR_LIT>" or which == "<STR_LIT>":<EOL><INDENT>header = ... | Generate the specified header. | f11258:m6 |
def __getattr__(self, attr): | return "<STR_LIT>"<EOL> | Return an empty string for all comment attributes. | f11258:c0:m0 |
def get_match_names(match): | names = []<EOL>if "<STR_LIT>" in match:<EOL><INDENT>(match,) = match<EOL>names += get_match_names(match)<EOL><DEDENT>elif "<STR_LIT>" in match:<EOL><INDENT>(setvar,) = match<EOL>if setvar != wildcard:<EOL><INDENT>names.append(setvar)<EOL><DEDENT><DEDENT>elif "<STR_LIT>" in match:<EOL><INDENT>match, trailers = match[<NU... | Gets keyword names for the given match. | f11260:m0 |
def __init__(self, loc, check_var, checkdefs=None, names=None, var_index=<NUM_LIT:0>): | self.loc = loc<EOL>self.check_var = check_var<EOL>self.position = <NUM_LIT:0><EOL>self.checkdefs = []<EOL>if checkdefs is None:<EOL><INDENT>self.increment()<EOL><DEDENT>else:<EOL><INDENT>for checks, defs in checkdefs:<EOL><INDENT>self.checkdefs.append((checks[:], defs[:]))<EOL><DEDENT>self.set_position(-<NUM_LIT:1>)<EO... | Creates the matcher. | f11260:c0:m0 |
def duplicate(self): | other = Matcher(self.loc, self.check_var, self.checkdefs, self.names, self.var_index)<EOL>other.insert_check(<NUM_LIT:0>, "<STR_LIT>" + self.check_var)<EOL>self.others.append(other)<EOL>return other<EOL> | Duplicates the matcher to others. | f11260:c0:m1 |
def add_guard(self, cond): | self.guards.append(cond)<EOL> | Adds cond as a guard. | f11260:c0:m2 |
def get_checks(self, position=None): | if position is None:<EOL><INDENT>position = self.position<EOL><DEDENT>return self.checkdefs[position][<NUM_LIT:0>]<EOL> | Gets the checks at the position. | f11260:c0:m3 |
def set_checks(self, checks, position=None): | if position is None:<EOL><INDENT>position = self.position<EOL><DEDENT>self.checkdefs[position][<NUM_LIT:0>] = checks<EOL> | Sets the checks at the position. | f11260:c0:m4 |
def get_defs(self, position=None): | if position is None:<EOL><INDENT>position = self.position<EOL><DEDENT>return self.checkdefs[position][<NUM_LIT:1>]<EOL> | Gets the defs at the position. | f11260:c0:m5 |
def set_defs(self, defs, position=None): | if position is None:<EOL><INDENT>position = self.position<EOL><DEDENT>self.checkdefs[position][<NUM_LIT:1>] = defs<EOL> | Sets the defs at the position. | f11260:c0:m6 |
def add_check(self, check_item): | self.checks.append(check_item)<EOL>for other in self.others:<EOL><INDENT>other.add_check(check_item)<EOL><DEDENT> | Adds a check universally. | f11260:c0:m7 |
def add_def(self, def_item): | self.defs.append(def_item)<EOL>for other in self.others:<EOL><INDENT>other.add_def(def_item)<EOL><DEDENT> | Adds a def universally. | f11260:c0:m8 |
def insert_check(self, index, check_item): | self.checks.insert(index, check_item)<EOL>for other in self.others:<EOL><INDENT>other.insert_check(index, check_item)<EOL><DEDENT> | Inserts a check universally. | f11260:c0:m9 |
def insert_def(self, index, def_item): | self.defs.insert(index, def_item)<EOL>for other in self.others:<EOL><INDENT>other.insert_def(index, def_item)<EOL><DEDENT> | Inserts a def universally. | f11260:c0:m10 |
def set_position(self, position): | if position < <NUM_LIT:0>:<EOL><INDENT>position += len(self.checkdefs)<EOL><DEDENT>while position >= len(self.checkdefs):<EOL><INDENT>self.checkdefs.append(([], []))<EOL><DEDENT>self.position = position<EOL> | Sets the if-statement position. | f11260:c0:m11 |
def increment(self, by=<NUM_LIT:1>): | self.set_position(self.position + by)<EOL> | Advances the if-statement position. | f11260:c0:m12 |
def decrement(self, by=<NUM_LIT:1>): | self.set_position(self.position - by)<EOL> | Decrements the if-statement position. | f11260:c0:m13 |
@contextmanager<EOL><INDENT>def down_a_level(self, by=<NUM_LIT:1>):<DEDENT> | self.increment(by)<EOL>try:<EOL><INDENT>yield<EOL><DEDENT>finally:<EOL><INDENT>self.decrement(by)<EOL><DEDENT> | Increment then decrement. | f11260:c0:m14 |
@contextmanager<EOL><INDENT>def only_self(self):<DEDENT> | others, self.others = self.others, []<EOL>try:<EOL><INDENT>yield<EOL><DEDENT>finally:<EOL><INDENT>self.others = others + self.others<EOL><DEDENT> | Only match in self not others. | f11260:c0:m15 |
def get_temp_var(self): | tempvar = match_temp_var + "<STR_LIT:_>" + str(self.var_index)<EOL>self.var_index += <NUM_LIT:1><EOL>return tempvar<EOL> | Gets the next match_temp_var. | f11260:c0:m16 |
def match_all_in(self, matches, item): | for i, match in enumerate(matches):<EOL><INDENT>self.match(match, item + "<STR_LIT:[>" + str(i) + "<STR_LIT:]>")<EOL><DEDENT> | Matches all matches to elements of item. | f11260:c0:m17 |
def check_len_in(self, min_len, max_len, item): | if max_len is None:<EOL><INDENT>if min_len:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(min_len))<EOL><DEDENT><DEDENT>elif min_len == max_len:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(min_len))<EOL><DEDENT>elif not min_len:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "... | Checks that the length of item is in range(min_len, max_len+1). | f11260:c0:m18 |
def match_function(self, args, kwargs, match_args=(), star_arg=None, kwd_args=(), dubstar_arg=None): | self.match_in_args_kwargs(match_args, args, kwargs, allow_star_args=star_arg is not None)<EOL>if star_arg is not None:<EOL><INDENT>self.match(star_arg, args + "<STR_LIT:[>" + str(len(match_args)) + "<STR_LIT>")<EOL><DEDENT>self.match_in_kwargs(kwd_args, kwargs)<EOL>with self.down_a_level():<EOL><INDENT>if dubstar_arg i... | Matches a pattern-matching function. | f11260:c0:m19 |
def match_in_args_kwargs(self, match_args, args, kwargs, allow_star_args=False): | req_len = <NUM_LIT:0><EOL>arg_checks = {}<EOL>to_match = [] <EOL>for i, arg in enumerate(match_args):<EOL><INDENT>if isinstance(arg, tuple):<EOL><INDENT>(match, default) = arg<EOL><DEDENT>else:<EOL><INDENT>match, default = arg, None<EOL><DEDENT>names = get_match_names(match)<EOL>if default is None:<EOL><INDENT>if not ... | Matches against args or kwargs. | f11260:c0:m20 |
def match_in_kwargs(self, match_args, kwargs): | for match, default in match_args:<EOL><INDENT>names = get_match_names(match)<EOL>if names:<EOL><INDENT>tempvar = self.get_temp_var()<EOL>self.add_def(<EOL>tempvar + "<STR_LIT>"<EOL>+ "<STR_LIT>".join(<EOL>kwargs + '<STR_LIT>' + name + '<STR_LIT>' + name + '<STR_LIT>' + kwargs + "<STR_LIT>"<EOL>for name in names<EOL>)<E... | Matches against kwargs. | f11260:c0:m21 |
def match_dict(self, tokens, item): | if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>matches, rest = tokens[<NUM_LIT:0>], None<EOL><DEDENT>else:<EOL><INDENT>matches, rest = tokens<EOL><DEDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL>if rest is None:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(len(matches)))<EOL><DEDENT>if matc... | Matches a dictionary. | f11260:c0:m22 |
def assign_to_series(self, name, series_type, item): | if series_type == "<STR_LIT:(>":<EOL><INDENT>self.add_def(name + "<STR_LIT>" + item + "<STR_LIT:)>")<EOL><DEDENT>elif series_type == "<STR_LIT:[>":<EOL><INDENT>self.add_def(name + "<STR_LIT>" + item + "<STR_LIT:)>")<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", series_type)<EOL><DEDENT> | Assign name to item converted to the given series_type. | f11260:c0:m23 |
def match_sequence(self, tokens, item): | tail = None<EOL>if len(tokens) == <NUM_LIT:2>:<EOL><INDENT>series_type, matches = tokens<EOL><DEDENT>else:<EOL><INDENT>series_type, matches, tail = tokens<EOL><DEDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL>if tail is None:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(len(matches)))<EO... | Matches a sequence. | f11260:c0:m24 |
def match_iterator(self, tokens, item): | tail = None<EOL>if len(tokens) == <NUM_LIT:2>:<EOL><INDENT>_, matches = tokens<EOL><DEDENT>else:<EOL><INDENT>_, matches, tail = tokens<EOL><DEDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL>if tail is None:<EOL><INDENT>itervar = self.get_temp_var()<EOL>self.add_def(itervar + "<STR_LIT>" + item + "<STR_LIT:)>"... | Matches a lazy list or a chain. | f11260:c0:m25 |
def match_star(self, tokens, item): | head_matches, last_matches = None, None<EOL>if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>middle = tokens[<NUM_LIT:0>]<EOL><DEDENT>elif len(tokens) == <NUM_LIT:2>:<EOL><INDENT>if isinstance(tokens[<NUM_LIT:0>], str):<EOL><INDENT>middle, last_matches = tokens<EOL><DEDENT>else:<EOL><INDENT>head_matches, middle = tokens<EOL>... | Matches starred assignment. | f11260:c0:m26 |
def match_rsequence(self, tokens, item): | front, series_type, matches = tokens<EOL>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(len(matches)))<EOL>if front != wildcard:<EOL><INDENT>if len(matches):<EOL><INDENT>splice = "<STR_LIT>" + str(-len(matches)) + "<STR_LIT:]>"<EOL><DEDENT>else:<EOL><INDENT>sp... | Matches a reverse sequence. | f11260:c0:m27 |
def match_msequence(self, tokens, item): | series_type, head_matches, middle, _, last_matches = tokens<EOL>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(len(head_matches) + len(last_matches)))<EOL>if middle != wildcard:<EOL><INDENT>if len(head_matches) and len(last_matches):<EOL><INDENT>splice = "<STR... | Matches a middle sequence. | f11260:c0:m28 |
def match_string(self, tokens, item): | prefix, name = tokens<EOL>return self.match_mstring((prefix, name, None), item, use_bytes=prefix.startswith("<STR_LIT:b>"))<EOL> | Match prefix string. | f11260:c0:m29 |
def match_rstring(self, tokens, item): | name, suffix = tokens<EOL>return self.match_mstring((None, name, suffix), item, use_bytes=suffix.startswith("<STR_LIT:b>"))<EOL> | Match suffix string. | f11260:c0:m30 |
def match_mstring(self, tokens, item, use_bytes=None): | prefix, name, suffix = tokens<EOL>if use_bytes is None:<EOL><INDENT>if prefix.startswith("<STR_LIT:b>") or suffix.startswith("<STR_LIT:b>"):<EOL><INDENT>if prefix.startswith("<STR_LIT:b>") and suffix.startswith("<STR_LIT:b>"):<EOL><INDENT>use_bytes = True<EOL><DEDENT>else:<EOL><INDENT>raise CoconutDeferredSyntaxError("... | Match prefix and suffix string. | f11260:c0:m31 |
def match_const(self, tokens, item): | match, = tokens<EOL>if match in const_vars:<EOL><INDENT>self.add_check(item + "<STR_LIT>" + match)<EOL><DEDENT>else:<EOL><INDENT>self.add_check(item + "<STR_LIT>" + match)<EOL><DEDENT> | Matches a constant. | f11260:c0:m32 |
def match_var(self, tokens, item): | setvar, = tokens<EOL>if setvar != wildcard:<EOL><INDENT>if setvar in self.names:<EOL><INDENT>self.add_check(self.names[setvar] + "<STR_LIT>" + item)<EOL><DEDENT>else:<EOL><INDENT>self.add_def(setvar + "<STR_LIT>" + item)<EOL>self.names[setvar] = item<EOL><DEDENT><DEDENT> | Matches a variable. | f11260:c0:m33 |
def match_set(self, tokens, item): | match, = tokens<EOL>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(len(match)))<EOL>for const in match:<EOL><INDENT>self.add_check(const + "<STR_LIT>" + item)<EOL><DEDENT> | Matches a set. | f11260:c0:m34 |
def match_data(self, tokens, item): | if len(tokens) == <NUM_LIT:2>:<EOL><INDENT>data_type, matches = tokens<EOL>star_match = None<EOL><DEDENT>elif len(tokens) == <NUM_LIT:3>:<EOL><INDENT>data_type, matches, star_match = tokens<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>self.add_check("<STR_LIT>" + item + ... | Matches a data type. | f11260:c0:m35 |
def match_paren(self, tokens, item): | match, = tokens<EOL>return self.match(match, item)<EOL> | Matches a paren. | f11260:c0:m36 |
def match_trailer(self, tokens, item): | internal_assert(len(tokens) > <NUM_LIT:1> and len(tokens) % <NUM_LIT:2> == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>match, trailers = tokens[<NUM_LIT:0>], tokens[<NUM_LIT:1>:]<EOL>for i in range(<NUM_LIT:0>, len(trailers), <NUM_LIT:2>):<EOL><INDENT>op, arg = trailers[i], trailers[i + <NUM_LIT:1>]<EOL>if op == "<STR_LIT>":... | Matches typedefs and as patterns. | f11260:c0:m37 |
def match_and(self, tokens, item): | for match in tokens:<EOL><INDENT>self.match(match, item)<EOL><DEDENT> | Matches and. | f11260:c0:m38 |
def match_or(self, tokens, item): | for x in range(<NUM_LIT:1>, len(tokens)):<EOL><INDENT>self.duplicate().match(tokens[x], item)<EOL><DEDENT>with self.only_self():<EOL><INDENT>self.match(tokens[<NUM_LIT:0>], item)<EOL><DEDENT> | Matches or. | f11260:c0:m39 |
def match(self, tokens, item): | for flag, get_handler in self.matchers.items():<EOL><INDENT>if flag in tokens:<EOL><INDENT>return get_handler(self)(tokens, item)<EOL><DEDENT><DEDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL> | Performs pattern-matching processing. | f11260:c0:m40 |
def out(self): | out = "<STR_LIT>"<EOL>if self.use_sentinel:<EOL><INDENT>out += sentinel_var + "<STR_LIT>"<EOL><DEDENT>closes = <NUM_LIT:0><EOL>for checks, defs in self.checkdefs:<EOL><INDENT>if checks:<EOL><INDENT>out += "<STR_LIT>" + paren_join(checks, "<STR_LIT>") + "<STR_LIT>" + openindent<EOL>closes += <NUM_LIT:1><EOL><DEDENT>if d... | Return pattern-matching code. | f11260:c0:m41 |
def build(self, stmts=None, set_check_var=True, invert=False): | out = "<STR_LIT>"<EOL>if set_check_var:<EOL><INDENT>out += self.check_var + "<STR_LIT>"<EOL><DEDENT>out += self.out()<EOL>if stmts is not None:<EOL><INDENT>out += "<STR_LIT>" + ("<STR_LIT>" if invert else "<STR_LIT>") + self.check_var + "<STR_LIT::>" + "<STR_LIT:\n>" + openindent + "<STR_LIT>".join(stmts) + closeindent... | Construct code for performing the match then executing stmts. | f11260:c0:m42 |
def add_coconut_to_path(): | try:<EOL><INDENT>import coconut <EOL><DEDENT>except ImportError:<EOL><INDENT>sys.path.insert(<NUM_LIT:0>, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))<EOL><DEDENT> | Adds coconut to sys.path if it isn't there already. | f11262:m0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.