signature
stringlengths
8
3.44k
body
stringlengths
0
1.41M
docstring
stringlengths
1
122k
id
stringlengths
5
17
def walk(self, into_past=<NUM_LIT:0>, into_future=<NUM_LIT:0>):
walked_range = []<EOL>for shift in range(-into_past, into_future):<EOL><INDENT>kwargs = dict(drip_model=self.drip_model,<EOL>name=self.name,<EOL>now_shift_kwargs={'<STR_LIT>': shift})<EOL>walked_range.append(self.__class__(**kwargs))<EOL><DEDENT>return walked_range<EOL>
Walk over a date range and create new instances of self with new ranges.
f11053:c1:m3
def apply_queryset_rules(self, qs):
clauses = {<EOL>'<STR_LIT>': [],<EOL>'<STR_LIT>': []}<EOL>for rule in self.drip_model.queryset_rules.all():<EOL><INDENT>clause = clauses.get(rule.method_type, clauses['<STR_LIT>'])<EOL>kwargs = rule.filter_kwargs(qs, now=self.now)<EOL>clause.append(Q(**kwargs))<EOL>qs = rule.apply_any_annotation(qs)<EOL><DEDENT>if clau...
First collect all filter/exclude kwargs and apply any annotations. Then apply all filters at once, and all excludes at once.
f11053:c1:m4
def run(self):
if not self.drip_model.enabled:<EOL><INDENT>return None<EOL><DEDENT>self.prune()<EOL>count = self.send()<EOL>return count<EOL>
Get the queryset, prune sent people, and send it.
f11053:c1:m6
def prune(self):
target_user_ids = self.get_queryset().values_list('<STR_LIT:id>', flat=True)<EOL>exclude_user_ids = SentDrip.objects.filter(date__lt=conditional_now(),<EOL>drip=self.drip_model,<EOL>user__id__in=target_user_ids).values_list('<STR_LIT>', flat=True)<EOL>self._queryset = self.get_queryset().exclude(id__in=exclude_user_ids...
Do an exclude for all Users who have a SentDrip already.
f11053:c1:m7
def send(self):
if not self.from_email:<EOL><INDENT>self.from_email = getattr(settings, '<STR_LIT>', settings.DEFAULT_FROM_EMAIL)<EOL><DEDENT>MessageClass = message_class_for(self.drip_model.message_class)<EOL>count = <NUM_LIT:0><EOL>for user in self.get_queryset():<EOL><INDENT>message_instance = MessageClass(self, user)<EOL>try:<EOL>...
Send the message to each user on the queryset. Create SentDrip for each user that gets a message. Returns count of created SentDrips.
f11053:c1:m8
def queryset(self):
User = get_user_model()<EOL>return User.objects<EOL>
Returns a queryset of auth.User who meet the criteria of the drip. Alternatively, you could create Drips on the fly using a queryset builder from the admin interface...
f11053:c1:m9
def get_fields(Model, <EOL>parent_field="<STR_LIT>",<EOL>model_stack=None,<EOL>stack_limit=<NUM_LIT:2>,<EOL>excludes=['<STR_LIT>', '<STR_LIT>', '<STR_LIT>']):
out_fields = []<EOL>if model_stack is None:<EOL><INDENT>model_stack = []<EOL><DEDENT>if isinstance(Model, basestring):<EOL><INDENT>app_label, model_name = Model.split('<STR_LIT:.>')<EOL>Model = models.get_model(app_label, model_name)<EOL><DEDENT>fields = Model._meta.fields + Model._meta.many_to_many + Model._meta.get_a...
Given a Model, return a list of lists of strings with important stuff: ... ['test_user__user__customuser', 'customuser', 'User', 'RelatedObject'] ['test_user__unique_id', 'unique_id', 'TestUser', 'CharField'] ['test_user__confirmed', 'confirmed', 'TestUser', 'BooleanField'] ...
f11059:m0
def give_model_field(full_field, Model):
field_data = get_fields(Model, '<STR_LIT>', [])<EOL>for full_key, name, _Model, _ModelField in field_data:<EOL><INDENT>if full_key == full_field:<EOL><INDENT>return full_key, name, _Model, _ModelField<EOL><DEDENT><DEDENT>raise Exception('<STR_LIT>'.format(full_field, Model.__name__))<EOL>
Given a field_name and Model: "test_user__unique_id", <AchievedGoal> Returns "test_user__unique_id", "id", <Model>, <ModelField>
f11059:m1
def setUp(self):
self.User = get_user_model()<EOL>start = timezone.now() - timedelta(hours=<NUM_LIT:2>)<EOL>num_string = ['<STR_LIT>','<STR_LIT>','<STR_LIT>','<STR_LIT>','<STR_LIT>','<STR_LIT>','<STR_LIT>','<STR_LIT>','<STR_LIT>','<STR_LIT>']<EOL>for i, name in enumerate(num_string):<EOL><INDENT>user = self.User.objects.create(username...
Creates 20 users, half of which buy 25 credits a day, and the other half that does none.
f11061:c1:m0
def get_version(package):
init_py = open(os.path.join(package, '<STR_LIT>')).read()<EOL>return re.search("<STR_LIT>", init_py, re.MULTILINE).group(<NUM_LIT:1>)<EOL>
Return package version as listed in `__version__` in `init.py`.
f11063:m0
def get_packages(package):
return [dirpath<EOL>for dirpath, dirnames, filenames in os.walk(package)<EOL>if os.path.exists(os.path.join(dirpath, '<STR_LIT>'))]<EOL>
Return root package and all sub-packages.
f11063:m1
def get_package_data(package):
walk = [(dirpath.replace(package + os.sep, '<STR_LIT>', <NUM_LIT:1>), filenames)<EOL>for dirpath, dirnames, filenames in os.walk(package)<EOL>if not os.path.exists(os.path.join(dirpath, '<STR_LIT>'))]<EOL>filepaths = []<EOL>for base, filenames in walk:<EOL><INDENT>filepaths.extend([os.path.join(base, filename)<EOL>for ...
Return all files under the root package, that are not in a package themselves.
f11063:m2
def run_capture(out = []):
return lambda command, *args, **kwargs: out.append(command.strip())<EOL>
Helper for retriving env.run issued commands
f11070:m0
def empty_copy():
source_path = os.path.join(env.current_release, "<STR_LIT:src>")<EOL>env.run("<STR_LIT>" % source_path)<EOL>env.run("<STR_LIT>" % source_path)<EOL>
A stub copy method that does nothing more then create a .txt file.
f11070:m2
@task<EOL>def backup_db(release=None, limit=<NUM_LIT:5>):
assert "<STR_LIT>" in env, "<STR_LIT>"<EOL>assert "<STR_LIT>" in env, "<STR_LIT>"<EOL>assert "<STR_LIT>" in env, "<STR_LIT>"<EOL>if not release:<EOL><INDENT>release = paths.get_current_release_name()<EOL><DEDENT>max_versions = limit+<NUM_LIT:1><EOL>if not release:<EOL><INDENT>logger.info("<STR_LIT>")<EOL>return<EOL><DE...
Backup database and associate it with current release
f11074:m1
@task<EOL>def restore_db(release=None):
if not release:<EOL><INDENT>release = paths.get_current_release_name()<EOL><DEDENT>if not release:<EOL><INDENT>raise Exception("<STR_LIT>" % release)<EOL><DEDENT>backup_file = "<STR_LIT>" % release<EOL>backup_path = paths.get_backup_path(backup_file)<EOL>if not env.exists(backup_path):<EOL><INDENT>raise Exception("<STR...
Restores backup back to version, uses current version by default.
f11074:m2
@task<EOL>def sync_local_to_remote(force="<STR_LIT>"):
_check_requirements()<EOL>if force != "<STR_LIT:yes>":<EOL><INDENT>message = "<STR_LIT>""<STR_LIT>" % (env.psql_db, env.local_psql_db)<EOL>answer = prompt(message, "<STR_LIT:y>")<EOL>if answer != "<STR_LIT:y>":<EOL><INDENT>logger.info("<STR_LIT>")<EOL>return<EOL><DEDENT><DEDENT>init_tasks() <EOL>local_file = "<STR_LIT...
Sync your local postgres database with remote Example: fabrik prod sync_local_to_remote:force=yes
f11074:m3
@task<EOL>def sync_remote_to_local(force="<STR_LIT>"):
_check_requirements()<EOL>if force != "<STR_LIT:yes>":<EOL><INDENT>message = "<STR_LIT>""<STR_LIT>" % (env.local_psql_db, env.psql_db)<EOL>answer = prompt(message, "<STR_LIT:y>")<EOL>if answer != "<STR_LIT:y>":<EOL><INDENT>logger.info("<STR_LIT>")<EOL>return<EOL><DEDENT><DEDENT>init_tasks() <EOL>remote_file = "<STR_LI...
Sync your remote postgres database with local Example: fabrik prod sync_remote_to_local
f11074:m4
@task<EOL>def sync_remote_to_local(force="<STR_LIT>"):
assert "<STR_LIT>" in env, "<STR_LIT>"<EOL>if force != "<STR_LIT:yes>":<EOL><INDENT>message = "<STR_LIT>""<STR_LIT>"<EOL>answer = prompt(message, "<STR_LIT:y>")<EOL>if answer != "<STR_LIT:y>":<EOL><INDENT>logger.info("<STR_LIT>")<EOL>return<EOL><DEDENT><DEDENT>init_tasks() <EOL>remote_file = "<STR_LIT>" % int(time.tim...
Replace your remote db with your local Example: sync_remote_to_local:force=yes
f11080:m0
@task<EOL>def backup_db(release=None, limit=<NUM_LIT:5>):
assert "<STR_LIT>" in env, "<STR_LIT>"<EOL>assert "<STR_LIT>" in env, "<STR_LIT>"<EOL>assert "<STR_LIT>" in env, "<STR_LIT>"<EOL>assert "<STR_LIT>" in env, "<STR_LIT>"<EOL>if not release:<EOL><INDENT>release = paths.get_current_release_name()<EOL><DEDENT>max_versions = limit+<NUM_LIT:1><EOL>if not release:<EOL><INDENT>...
Backup database and associate it with current release
f11082:m0
@task<EOL>def restore_db(release=None):
assert "<STR_LIT>" in env, "<STR_LIT>"<EOL>assert "<STR_LIT>" in env, "<STR_LIT>"<EOL>assert "<STR_LIT>" in env, "<STR_LIT>"<EOL>assert "<STR_LIT>" in env, "<STR_LIT>"<EOL>if not release:<EOL><INDENT>release = paths.get_current_release_name()<EOL><DEDENT>if not release:<EOL><INDENT>raise Exception("<STR_LIT>" % release...
Restores backup back to version, uses current version by default.
f11082:m1
def hook(name=None, priority=-<NUM_LIT:1>):
def _hook(hook_func):<EOL><INDENT>return register_hook(name, hook_func=hook_func, priority=priority)<EOL><DEDENT>return _hook<EOL>
Decorator
f11084:m0
def run_task(task):
if has_task(task):<EOL><INDENT>execute(task)<EOL><DEDENT>
A method of running fabric task with silent errors.
f11088:m0
def has_task(task):
return crawl(task, state.commands) is not None<EOL>
Checks if fabric task exists
f11088:m1
def apply_settings():
prompts = {}<EOL>if "<STR_LIT>" in env:<EOL><INDENT>prompts["<STR_LIT>"] = env.git_passphrase<EOL><DEDENT>return settings(prompts=prompts)<EOL>
Applies additional settings before clone takes place"
f11104:m1
@runs_once<EOL>def init_tasks():
<EOL>if "<STR_LIT>" not in env:<EOL><INDENT>env.exists = exists<EOL><DEDENT>if "<STR_LIT>" not in env:<EOL><INDENT>env.run = run<EOL><DEDENT>if "<STR_LIT>" not in env:<EOL><INDENT>env.cd = cd<EOL><DEDENT>if "<STR_LIT>" not in env:<EOL><INDENT>env.max_releases = <NUM_LIT:5><EOL><DEDENT>if "<STR_LIT>" in env:<EOL><INDENT...
Performs basic setup before any of the tasks are run. All tasks needs to run this before continuing. It only fires once.
f11105:m1
@task<EOL>def setup():
init_tasks()<EOL>run_hook("<STR_LIT>")<EOL>env.run("<STR_LIT>" % (paths.get_shared_path()))<EOL>env.run("<STR_LIT>" % (paths.get_shared_path()))<EOL>env.run("<STR_LIT>" % (paths.get_backup_path()))<EOL>env.run("<STR_LIT>" % (paths.get_backup_path()))<EOL>env.run("<STR_LIT>" % (paths.get_upload_path()))<EOL>env.run("<ST...
Creates shared and upload directory then fires setup to recipes.
f11105:m2
@task<EOL>def deploy():
init_tasks()<EOL>if not has_hook("<STR_LIT>"):<EOL><INDENT>return report("<STR_LIT>")<EOL><DEDENT>if not env.exists(paths.get_shared_path()):<EOL><INDENT>return report("<STR_LIT>")<EOL><DEDENT>run_hook("<STR_LIT>")<EOL>release_name = int(time.time()*<NUM_LIT:1000>)<EOL>release_path = paths.get_releases_path(release_nam...
Performs a deploy by invoking copy, then generating next release name and invoking necessary hooks.
f11105:m3
@task<EOL>def rollback():
init_tasks()<EOL>run_hook("<STR_LIT>")<EOL>current_release = paths.get_current_release_path()<EOL>if current_release:<EOL><INDENT>env.run("<STR_LIT>" % current_release)<EOL><DEDENT>old_release = paths.get_current_release_name()<EOL>if old_release:<EOL><INDENT>paths.symlink(paths.get_source_path(old_release),<EOL>paths....
Rolls back to previous release
f11105:m4
@task<EOL>def cleanup_releases(limit=<NUM_LIT:5>):
init_tasks()<EOL>max_versions = limit + <NUM_LIT:1><EOL>env.run("<STR_LIT>" % (<EOL>paths.get_releases_path(),<EOL>max_versions)<EOL>)<EOL>
Removes older releases.
f11105:m5
@task<EOL>def debug():
from fabric.network import ssh<EOL>init_tasks()<EOL>ssh.util.log_to_file("<STR_LIT>", <NUM_LIT:10>)<EOL>
Outputs debug information, needs to run before the task". Example: fab prod debug deploy.
f11105:m6
def validate(yaml, raise_exc=True):
data = read_yaml(yaml)<EOL>validator = get_validator()<EOL>errors = list(validator.iter_errors(data))<EOL>if errors and raise_exc:<EOL><INDENT>raise ValidationErrors(errors)<EOL><DEDENT>return errors<EOL>
Validate the given YAML document and return a list of errors. :param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list) :type yaml: list|dict|str|file :param raise_exc: Whether to raise a meta-exception containing all discovered errors after validation. :type raise_exc: bool :return: A list of...
f11135:m2
@classmethod<EOL><INDENT>def parse(cls, data):<DEDENT>
parsers = {<EOL>'<STR_LIT>': ([], Step.parse),<EOL>'<STR_LIT>': ([], Endpoint.parse),<EOL>}<EOL>for datum in data:<EOL><INDENT>assert isinstance(datum, dict)<EOL>for type, (items, parse) in parsers.items():<EOL><INDENT>if type in datum:<EOL><INDENT>items.append(parse(datum[type]))<EOL>break<EOL><DEDENT><DEDENT>else:<EO...
Parse a Config structure out of a Python dict (that's likely deserialized from YAML). :param data: Config-y dict :type data: dict :return: Config object :rtype: valohai_yaml.objs.Config
f11136:c0:m1
def get_step_by(self, **kwargs):
if not kwargs:<EOL><INDENT>return None<EOL><DEDENT>for index, step in enumerate(self.steps.values()):<EOL><INDENT>extended_step = dict(step.serialize(), index=index)<EOL>if all(item in extended_step.items() for item in kwargs.items()):<EOL><INDENT>return step<EOL><DEDENT><DEDENT>return None<EOL>
Get the first step that matches all the passed named arguments. Has special argument index not present in the real step. Usage: config.get_step_by(name='not found') config.get_step_by(index=0) config.get_step_by(name="greeting", command='echo HELLO MORDOR') :param kwargs: :return: Step object or None :rt...
f11136:c0:m4
def build_parameters(self):
param_bits = []<EOL>for name in self.parameters:<EOL><INDENT>param_bits.extend(self.build_parameter_by_name(name) or [])<EOL><DEDENT>return param_bits<EOL>
Build the CLI command line from the parameter values. :return: list of CLI strings -- not escaped! :rtype: list[str]
f11138:c0:m1
def get_data(self):
data = vars(self).copy()<EOL>data.pop('<STR_LIT>', None)<EOL>return data<EOL>
Get data for serialization.
f11140:c0:m0
def get_parameter_defaults(self, include_flags=True):
return {<EOL>name: parameter.default<EOL>for (name, parameter)<EOL>in self.parameters.items()<EOL>if parameter.default is not None and (include_flags or parameter.type != '<STR_LIT>')<EOL>}<EOL>
Get a dict mapping parameter names to their defaults (if set). :rtype: dict[str, object]
f11144:c0:m3
def build_command(self, parameter_values, command=None):
command = (command or self.command)<EOL>values = dict(self.get_parameter_defaults(include_flags=False), **parameter_values)<EOL>parameter_map = ParameterMap(parameters=self.parameters, values=values)<EOL>return build_command(command, parameter_map)<EOL>
Build the command for this step using the given parameter values. Even if the original configuration only declared a single `command`, this function will return a list of shell commands. It is the caller's responsibility to concatenate them, likely using the semicolon or double ampersands. It is also possible to ove...
f11144:c0:m5
def validate(self, value):
errors = []<EOL>value = self._validate_type(value, errors)<EOL>self._validate_value(value, errors)<EOL>if errors:<EOL><INDENT>raise ValidationErrors(errors)<EOL><DEDENT>return value<EOL>
Validate (and possibly typecast) the given parameter value value. :param value: Parameter value :return: Typecast parameter value :raises ValidationErrors: if there were validation errors
f11147:c0:m4
def format_cli(self, value):
if value is None or (self.type == '<STR_LIT>' and not value):<EOL><INDENT>return None<EOL><DEDENT>pass_as_bits = text_type(self.pass_as or self.default_pass_as).split()<EOL>env = dict(name=self.name, value=value, v=value)<EOL>return [bit.format(**env) for bit in pass_as_bits]<EOL>
Build a single parameter argument. :return: list of CLI strings -- not escaped. If the parameter should not be expressed, returns None. :rtype: list[str]|None
f11147:c0:m6
def style(text, fg=None, bg=None, bold=None, dim=None, underline=None, <EOL>blink=None, reverse=None, reset=True):
bits = []<EOL>if fg:<EOL><INDENT>try:<EOL><INDENT>bits.append('<STR_LIT>' % (_ansi_colors.index(fg) + <NUM_LIT:30>))<EOL><DEDENT>except ValueError:<EOL><INDENT>raise TypeError('<STR_LIT>' % fg)<EOL><DEDENT><DEDENT>if bg:<EOL><INDENT>try:<EOL><INDENT>bits.append('<STR_LIT>' % (_ansi_colors.index(bg) + <NUM_LIT>))<EOL><D...
Styles a text with ANSI styles and returns the new string.
f11150:m0
def listify(value):
if value is None:<EOL><INDENT>return []<EOL><DEDENT>if isinstance(value, (list, tuple)):<EOL><INDENT>return list(value)<EOL><DEDENT>return [value]<EOL>
Wrap the given value into a list, with the below provisions: * If the value is a list or a tuple, it's coerced into a new list. * If the value is None, an empty list is returned. * Otherwise, a single-element list is returned, containing the value. :param value: A value. :return: a list! :rtype: list
f11152:m1
def lint_file(file_path):
with open(file_path, '<STR_LIT:r>') as yaml:<EOL><INDENT>try:<EOL><INDENT>return lint(yaml)<EOL><DEDENT>except Exception as e:<EOL><INDENT>lr = LintResult()<EOL>lr.add_error('<STR_LIT>' % e, exception=e)<EOL>return lr<EOL><DEDENT><DEDENT>
Validate & lint `file_path` and return a LintResult. :param file_path: YAML filename :type file_path: str :return: LintResult object
f11153:m0
def build_command(command, parameter_map):
if isinstance(parameter_map, list): <EOL><INDENT>parameter_map = LegacyParameterMap(parameter_map)<EOL><DEDENT>out_commands = []<EOL>for command in listify(command):<EOL><INDENT>if interpolable_re.search(command):<EOL><INDENT>try:<EOL><INDENT>command = interpolable_re.sub(<EOL>lambda match: _replace_interpolation(para...
Build command line(s) using the given parameter map. Even if the passed a single `command`, this function will return a list of shell commands. It is the caller's responsibility to concatenate them, likely using the semicolon or double ampersands. :param command: The command to interpolate params into. :type command...
f11154:m2
def parse(yaml, validate=True):
data = read_yaml(yaml)<EOL>if validate: <EOL><INDENT>from .validation import validate<EOL>validate(data, raise_exc=True)<EOL><DEDENT>return Config.parse(data)<EOL>
Parse the given YAML data into a `Config` object, optionally validating it first. :param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list) :type yaml: list|dict|str|file :param validate: Whether to validate the data before attempting to parse it. :type validate: bool :return: Config object :r...
f11156:m0
def read_config(config, prefix):
<EOL>suffixes = ('<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>')<EOL>config_server, config_user, config_password, config_token, config_secret, config_consumer, config_cert = [<EOL>config.get('<STR_LIT>'.format(prefix, suffix)) for suffix in suffixes<EOL>]<EOL>result = dict(opt...
Return a jira.client.JIRA.__init__() compatible dictionary from data in the Flask config. Generate a dictionary compatible with jira.client.JIRA.__init__() keyword arguments from data in the Flask application's configuration values relevant to JIRA. If both basic and OAuth settings are specified, OAuth aut...
f11157:m0
def __init__(self, app=None, config_prefix=None):
self.original_kill_session = self.kill_session<EOL>self.kill_session = self._fake_kill_session<EOL>if app is not None:<EOL><INDENT>self.init_app(app, config_prefix)<EOL><DEDENT>
If app argument provided then initialize JIRA using application config values. If no app argument provided you should do initialization later with init_app method. Keyword arguments: app -- Flask application instance. config_prefix -- Prefix used in config key names in the Flask app's ...
f11157:c1:m0
def _fake_kill_session(self):
return self<EOL>
Does nothing. Used to temporary overwrite self.kill_session() in self.__init__(). JIRA calls self.kill_session() even when no session was created.
f11157:c1:m1
def init_app(self, app, config_prefix=None):
<EOL>self.kill_session = self.original_kill_session<EOL>config_prefix = (config_prefix or '<STR_LIT>').rstrip('<STR_LIT:_>').upper()<EOL>if not hasattr(app, '<STR_LIT>'):<EOL><INDENT>app.extensions = dict()<EOL><DEDENT>if config_prefix.lower() in app.extensions:<EOL><INDENT>raise ValueError('<STR_LIT>'.format(config_pr...
Actual method to read JIRA settings from app configuration and initialize the JIRA instance. Positional arguments: app -- Flask application instance. Keyword arguments: config_prefix -- Prefix used in config key names in the Flask app's configuration. Useful for applications which ...
f11157:c1:m2
def get_metadata(main_file):
with open(os.path.join(HERE, '<STR_LIT>'), encoding='<STR_LIT:utf-8>') as f:<EOL><INDENT>long_description = f.read()<EOL><DEDENT>with open(os.path.join(HERE, main_file), encoding='<STR_LIT:utf-8>') as f:<EOL><INDENT>lines = [l.strip() for l in f if l.startswith('<STR_LIT>')]<EOL><DEDENT>metadata = ast.literal_eval("<ST...
Get metadata about the package/module. Positional arguments: main_file -- python file path within `HERE` which has __author__ and the others defined as global variables. Returns: Dictionary to be passed into setuptools.setup().
f11161:m0
def __call__(self, *args, **kwargs):
return self.trigger(*args, **kwargs)<EOL>
Execute all event handlers using obj.trigger() or just obj().
f11163:c0:m1
@property<EOL><INDENT>def handlers(self):<DEDENT>
if not hasattr(self, '<STR_LIT>'): <EOL><INDENT>self._handlers = set()<EOL><DEDENT>return self._handlers<EOL>
Return all event handlers.
f11163:c0:m2
def on(self, handler):
if not hasattr(handler, '<STR_LIT>'):<EOL><INDENT>raise TypeError('<STR_LIT>')<EOL><DEDENT>self.handlers.add(handler)<EOL>
Attach a handler (any Python callable) for the event.
f11163:c0:m3
def off(self, handler):
self.handlers.remove(handler)<EOL>
Deattach a handler for the event.
f11163:c0:m4
def trigger(self, *args, **kwargs):
for h in self.handlers:<EOL><INDENT>h(*args, **kwargs)<EOL><DEDENT>
Execute the handlers with a message, if any.
f11163:c0:m5
@property<EOL><INDENT>def events(self):<DEDENT>
if not hasattr(self, '<STR_LIT>'):<EOL><INDENT>self._events = {}<EOL><DEDENT>return self._events<EOL>
Return all events of the observable.
f11163:c1:m0
def on(self, event, handler=None):
if isinstance(event, str) and '<STR_LIT:U+0020>' in event: <EOL><INDENT>self.on(event.split('<STR_LIT:U+0020>'), handler)<EOL><DEDENT>elif isinstance(event, list): <EOL><INDENT>for each in event:<EOL><INDENT>self.on(each, handler)<EOL><DEDENT><DEDENT>elif isinstance(event, dict): <EOL><INDENT>for key, value in event...
Create, add or update an event with a handler or more attached.
f11163:c1:m1
def off(self, event, handler=None):
if handler:<EOL><INDENT>self.events[event].off(handler)<EOL><DEDENT>else:<EOL><INDENT>del self.events[event]<EOL>delattr(self, event)<EOL><DEDENT>
Remove an event or a handler from it.
f11163:c1:m2
def trigger(self, *args, **kargs):
event = args[<NUM_LIT:0>]<EOL>if isinstance(event, str) and '<STR_LIT:U+0020>' in event:<EOL><INDENT>event = event.split('<STR_LIT:U+0020>') <EOL><DEDENT>if isinstance(event, list): <EOL><INDENT>for each in event:<EOL><INDENT>self.events[each].trigger(*args[<NUM_LIT:1>:], **kargs)<EOL><DEDENT><DEDENT>else:<EOL><INDEN...
Execute all event handlers with optional arguments for the observable.
f11163:c1:m3
def get_handler(progname, fmt=None, datefmt=None, project_id=None,<EOL>credentials=None, debug_thread_worker=False, **_):
builder = CloudLoggingHandlerBuilder(<EOL>progname, fmt=fmt, datefmt=datefmt, project_id=project_id,<EOL>credentials=credentials, debug_thread_worker=debug_thread_worker)<EOL>return builder.get_handler()<EOL>
Helper function to create a Stackdriver handler. See `ulogger.stackdriver.CloudLoggingHandlerBuilder` for arguments and supported keyword arguments. Returns: (obj): Instance of `google.cloud.logging.handlers. CloudLoggingHandler`
f11172:m0
def _get_metadata(self, data_type, key, timeout=<NUM_LIT:5>):
endpoint_url = self.METADATA_ENDPOINT.format(<EOL>data_type=data_type, key=key)<EOL>try:<EOL><INDENT>rsp = requests.get(<EOL>endpoint_url,<EOL>headers={'<STR_LIT>': '<STR_LIT>'},<EOL>timeout=timeout)<EOL>rsp.raise_for_status()<EOL><DEDENT>except requests.exceptions.RequestException as e:<EOL><INDENT>raise exceptions.Go...
Get host instance metadata (only works on GCP hosts). More details about instance metadata: https://cloud.google.com/compute/docs/storing-retrieving-metadata Args: data_type (str): Type of metadata to fetch. Eg. project, instance key (str): Key of metada...
f11172:c0:m1
def _create_gcl_resource(self):
return gcl_resource.Resource('<STR_LIT>', {<EOL>'<STR_LIT>': self.project_id,<EOL>'<STR_LIT>': self.instance_id,<EOL>'<STR_LIT>': self.zone<EOL>})<EOL>
Create a configured Resource object. The logging.resource.Resource object enables GCL to filter and bucket incoming logs according to which resource (host) they're coming from. Returns: (obj): Instance of `google.cloud.logging.resource.Resource`
f11172:c0:m2
def get_formatter(self):
if not self.fmt:<EOL><INDENT>self.fmt = ('<STR_LIT>'<EOL>'<STR_LIT>').format(<EOL>host=self.hostname, progname=self.progname)<EOL><DEDENT>if not self.datefmt:<EOL><INDENT>self.datefmt = '<STR_LIT>'<EOL><DEDENT>return logging.Formatter(fmt=self.fmt, datefmt=self.datefmt)<EOL>
Create a fully configured `logging.Formatter` Example of formatted log message: 2017-08-27T20:19:24.424 cpm-example-gew1 progname (23123): hello Returns: (obj): Instance of `logging.Formatter`
f11172:c0:m3
def _set_worker_thread_level(self):
bthread_logger = logging.getLogger(<EOL>'<STR_LIT>')<EOL>if self.debug_thread_worker:<EOL><INDENT>bthread_logger.setLevel(logging.DEBUG)<EOL><DEDENT>else:<EOL><INDENT>bthread_logger.setLevel(logging.INFO)<EOL><DEDENT>
Sets logging level of the background logging thread to DEBUG or INFO
f11172:c0:m4
def get_handler(self):
gcl_client = gcl_logging.Client(<EOL>project=self.project_id, credentials=self.credentials)<EOL>handler = gcl_handlers.CloudLoggingHandler(<EOL>gcl_client,<EOL>resource=self.resource,<EOL>labels={<EOL>'<STR_LIT>': self.instance_id,<EOL>'<STR_LIT>': self.project_id,<EOL>'<STR_LIT>': self.zone,<EOL>'<STR_LIT>': self.host...
Create a fully configured CloudLoggingHandler. Returns: (obj): Instance of `google.cloud.logging.handlers. CloudLoggingHandler`
f11172:c0:m5
def get_handler(progname, address=None, proto=None, facility=None,<EOL>fmt=None, datefmt=None, **_):
builder = SyslogHandlerBuilder(<EOL>progname, address=address, proto=proto, facility=facility,<EOL>fmt=fmt, datefmt=datefmt)<EOL>return builder.get_handler()<EOL>
Helper function to create a Syslog handler. See `ulogger.syslog.SyslogHandlerBuilder` for arguments and supported keyword arguments. Returns: (obj): Instance of `logging.SysLogHandler`
f11173:m0
def _setup_default_handler(progname, fmt=None, datefmt=None, **_):
handler = logging.StreamHandler()<EOL>if not fmt:<EOL><INDENT>fmt_prefix = '<STR_LIT>'<EOL>fmt_suffix = '<STR_LIT>' + '<STR_LIT>'<EOL>fmt = fmt_prefix + progname + fmt_suffix<EOL><DEDENT>if not datefmt:<EOL><INDENT>datefmt = '<STR_LIT>'<EOL><DEDENT>formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)<EOL>handler.set...
Create a Stream handler (default handler). Args: progname (str): Name of program. fmt (:obj:`str`, optional): Desired log format if different than the default; uses the same formatting string options supported in the stdlib's `logging` module. datefmt (:obj:`str`, op...
f11175:m0
def setup_logging(progname, level, handlers, **kwargs):
for h in handlers:<EOL><INDENT>if h == '<STR_LIT>':<EOL><INDENT>handler = _setup_default_handler(progname, **kwargs)<EOL><DEDENT>else:<EOL><INDENT>handler_module_path = '<STR_LIT>'.format(h)<EOL>try:<EOL><INDENT>handler_module = import_module(<EOL>handler_module_path, package='<STR_LIT>')<EOL><DEDENT>except ImportError...
Setup logging to stdout (stream), syslog, or stackdriver. Attaches handler(s) and sets log level to the root logger. Example usage: import logging from ulogger import setup_logging setup_logging('my_awesome_program', 'INFO', ['stream']) logging.info('ohai') Args: ...
f11175:m1
def read(*filenames, **kwargs):
encoding = kwargs.get('<STR_LIT>', '<STR_LIT:utf-8>')<EOL>sep = kwargs.get('<STR_LIT>', '<STR_LIT:\n>')<EOL>buf = []<EOL>for fl in filenames:<EOL><INDENT>with codecs.open(os.path.join(HERE, fl), '<STR_LIT:rb>', encoding) as f:<EOL><INDENT>buf.append(f.read())<EOL><DEDENT><DEDENT>return sep.join(buf)<EOL>
Build an absolute path from ``*filenames``, and return contents of resulting file. Defaults to UTF-8 encoding.
f11177:m0
def find_meta(meta):
re_str = r"<STR_LIT>".format(meta=meta)<EOL>meta_match = re.search(re_str, META_FILE, re.M)<EOL>if meta_match:<EOL><INDENT>return meta_match.group(<NUM_LIT:1>)<EOL><DEDENT>raise RuntimeError('<STR_LIT>'.format(meta=meta))<EOL>
Extract __*meta*__ from META_FILE.
f11177:m1
def reformat_pattern(pattern, compile=False):
<EOL>rex_pattern = re.sub(r'<STR_LIT>', '<STR_LIT>', pattern)<EOL>rex_pattern = re.sub(r'<STR_LIT>', '<STR_LIT>', rex_pattern)<EOL>rex_pattern = re.sub(r'<STR_LIT>', '<STR_LIT>', rex_pattern)<EOL>rex_pattern = re.sub(r'<STR_LIT>', '<STR_LIT>', rex_pattern)<EOL>rex_pattern = re.sub(r'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>rex...
Apply the filters on user pattern to generate a new regular expression pattern. A user provided variable, should start with an alphabet, can be alphanumeric and can have _.
f11180:m0
def match_string(pattern, search_string):
rexobj = REX(pattern, None)<EOL>rexpatstr = reformat_pattern(pattern)<EOL>rexpat = re.compile(rexpatstr)<EOL>rexobj.rex_patternstr = rexpatstr<EOL>rexobj.rex_pattern = rexpat<EOL>line_count = <NUM_LIT:1><EOL>for line in search_string.splitlines():<EOL><INDENT>line = line.strip()<EOL>mobj = rexpat.match(line)<EOL>if mob...
Match a pattern in a string
f11180:m1
def populate_resobj(rexobj, mobj, loc):
resobj = REXResult(mobj, loc)<EOL>rexobj.matches.append(resobj)<EOL>rexobj.res_count += <NUM_LIT:1><EOL>
Popuate the result object and append it to the rexobj results.
f11180:m2
def match_file(pattern, filename):
<EOL>if pattern is None:<EOL><INDENT>return None<EOL><DEDENT>if os.stat(filename).st_size == <NUM_LIT:0>:<EOL><INDENT>return None<EOL><DEDENT>rexobj = REX(pattern, filename)<EOL>rexpatstr = reformat_pattern(pattern)<EOL>rexpat = re.compile(rexpatstr)<EOL>rexobj.rex_patternstr = rexpatstr<EOL>rexobj.rex_pattern = rexpat...
The function will match a pattern in a file and return a rex object, which will have all the matches found in the file.
f11180:m3
def parse_lrvalue_string(search_string,<EOL>delimiter="<STR_LIT::>"):
mac_search_pattern = r"<STR_LIT>" % delimiter<EOL>search_pattern = r"<STR_LIT>" % delimiter<EOL>rexdict = {}<EOL>for line in search_string.splitlines():<EOL><INDENT>line = line.strip()<EOL>mobj = re.match(mac_search_pattern, line)<EOL>if mobj:<EOL><INDENT>key = mobj.group(<NUM_LIT:1>).lower()<EOL>key = "<STR_LIT:_>".jo...
The function takes a multi-line output/string with the format "name/descr : value", and converts it to a dictionary object with key value pairs, where key is built from the name/desc part and value as the value. eg: "Serial Number: FCH1724V1GT" will be translated to dict['serial_number'] = "FCH1724V1GT"
f11180:m4
def parse_multi_lrvalue_string(search_string, split_string,<EOL>delimiter="<STR_LIT::>"):
dictlist = []<EOL>for out in search_string.split(split_string):<EOL><INDENT>tdict = parse_lrvalue_string(split_string + out,<EOL>delimiter=delimiter)<EOL>dictlist.append(tdict)<EOL><DEDENT>return dictlist<EOL>
The function is an extension of the parse_lrvalue_string() API. The function takes a multi-line output/string of the format "Category: xyz name: foo id: bar Category: abc name: foox id: barx : " It splits the output based on the splitstring passed as argument (eg "Category"), and converts the individual li...
f11180:m5
def parse_tabular_string(search_string,<EOL>header_keys,<EOL>delimiter=None,<EOL>merge_list=None):
first_line = True<EOL>parsed_results = []<EOL>for line in search_string.splitlines():<EOL><INDENT>if first_line:<EOL><INDENT>first_line = False<EOL><DEDENT>else:<EOL><INDENT>result = {}<EOL>row = line.split()<EOL>if merge_list:<EOL><INDENT>for mergeset in merge_list:<EOL><INDENT>fidx = mergeset[<NUM_LIT:0>]<EOL>lidx = ...
Given a string in a tabular format, parse it and return a dictionary @args: search_string: This is a string in tabular format (e.g.: output of df command) header_keys: This is a list of strings for the headers. delimiter(optional): Default is None, which translates to spaces merge_list(optional): In...
f11180:m6
def dump_rexobj_results(rexobj, options=None):
print(("<STR_LIT:->" * <NUM_LIT>))<EOL>print(("<STR_LIT>", rexobj.res_count))<EOL>matches = rexobj.matches<EOL>for match in matches:<EOL><INDENT>print(("<STR_LIT>", match.loc, "<STR_LIT>"))<EOL>for key in list(match.named_groups.keys()):<EOL><INDENT>print(("<STR_LIT>" %<EOL>(key, match.named_groups[key])))<EOL><DEDENT>...
print all the results.
f11180:m7
def get_match_value(rexobj, key, index=<NUM_LIT:0>):
if rexobj is None:<EOL><INDENT>return None<EOL><DEDENT>if rexobj.res_count == <NUM_LIT:0>:<EOL><INDENT>return None<EOL><DEDENT>try:<EOL><INDENT>return rexobj.matches[index].named_groups[key]<EOL><DEDENT>except IndexError:<EOL><INDENT>return None<EOL><DEDENT>except KeyError:<EOL><INDENT>return None<EOL><DEDENT>
Return a matched value for the key for a specific match from the results.
f11180:m8
def __init__(self, pattern, filename=None):
self.user_pattern = pattern<EOL>self.search_file = filename<EOL>self.rex_patternstr = None<EOL>self.rex_pattern = None<EOL>self.matches = []<EOL>self.res_count = <NUM_LIT:0><EOL>
Initialization.
f11180:c0:m0
def __init__(self, reobj, loc):
self.reobj = reobj<EOL>self.loc = loc<EOL>self.named_groups = reobj.groupdict()<EOL>
Initialization.
f11180:c1:m0
def diff(before, after, check_modified=False):
<EOL>if len(before) == <NUM_LIT:0>:<EOL><INDENT>return [<EOL>{'<STR_LIT:state>': '<STR_LIT>', '<STR_LIT:value>': v}<EOL>for v in after<EOL>]<EOL><DEDENT>elif len(after) == <NUM_LIT:0>:<EOL><INDENT>return [<EOL>{'<STR_LIT:state>': '<STR_LIT>', '<STR_LIT:value>': v}<EOL>for v in before<EOL>]<EOL><DEDENT>grid = create_gri...
Diff two sequences of comparable objects. The result of this function is a list of dictionaries containing values in ``before`` or ``after`` with a ``state`` of either 'unchanged', 'added', 'deleted', or 'modified'. >>> import pprint >>> result = diff(['a', 'b', 'c'], ['b', 'c', 'd']) >>> ppri...
f11193:m0
def notebook_diff(nb1, nb2, check_modified=True):
nb1_cells = nb1['<STR_LIT>'][<NUM_LIT:0>]['<STR_LIT>']<EOL>nb2_cells = nb2['<STR_LIT>'][<NUM_LIT:0>]['<STR_LIT>']<EOL>diffed_nb = cells_diff(nb1_cells, nb2_cells, check_modified=check_modified)<EOL>line_diffs = diff_modified_items(diffed_nb)<EOL>cell_list = list()<EOL>for i, item in enumerate(diffed_nb):<EOL><INDENT>ce...
Unify two notebooks into a single notebook with diff metadata. The result of this function is a valid notebook that can be loaded by the IPython Notebook front-end. This function adds additional cell metadata that the front-end Javascript uses to render the diffs. Parameters ---------- nb1 : d...
f11194:m0
def diff_result_to_cell(item):
state = item['<STR_LIT:state>']<EOL>if state == '<STR_LIT>':<EOL><INDENT>new_cell = item['<STR_LIT>'].data<EOL>old_cell = item['<STR_LIT>'].data<EOL>new_cell['<STR_LIT>']['<STR_LIT:state>'] = state<EOL>new_cell['<STR_LIT>']['<STR_LIT>'] = old_cell<EOL>cell = new_cell<EOL><DEDENT>else:<EOL><INDENT>cell = item['<STR_LIT:...
diff.diff returns a dictionary with all the information we need, but we want to extract the cell and change its metadata.
f11194:m2
def cells_diff(before_cells, after_cells, check_modified=False):
before_comps = [<EOL>CellComparator(cell, check_modified=check_modified)<EOL>for cell in before_cells<EOL>]<EOL>after_comps = [<EOL>CellComparator(cell, check_modified=check_modified)<EOL>for cell in after_cells<EOL>]<EOL>diff_result = diff(<EOL>before_comps,<EOL>after_comps,<EOL>check_modified=check_modified<EOL>)<EOL...
Diff two arrays of cells.
f11194:m3
def words_diff(before_words, after_words):
before_comps = before_words.split()<EOL>after_comps = after_words.split()<EOL>diff_result = diff(<EOL>before_comps,<EOL>after_comps<EOL>)<EOL>return diff_result<EOL>
Diff the words in two strings. This is intended for use in diffing prose and other forms of text where line breaks have little semantic value. Parameters ---------- before_words : str A string to be used as the baseline version. after_words : str A string to be compared against...
f11194:m4
def lines_diff(before_lines, after_lines, check_modified=False):
before_comps = [<EOL>LineComparator(line, check_modified=check_modified)<EOL>for line in before_lines<EOL>]<EOL>after_comps = [<EOL>LineComparator(line, check_modified=check_modified)<EOL>for line in after_lines<EOL>]<EOL>diff_result = diff(<EOL>before_comps,<EOL>after_comps,<EOL>check_modified=check_modified<EOL>)<EOL...
Diff the lines in two strings. Parameters ---------- before_lines : iterable Iterable containing lines used as the baseline version. after_lines : iterable Iterable containing lines to be compared against the baseline. Returns ------- diff_result : A list of dictionaries co...
f11194:m5
def merge(local, base, remote, check_modified=False):
base_local = diff.diff(base, local, check_modified=check_modified)<EOL>base_remote = diff.diff(base, remote, check_modified=check_modified)<EOL>merge = diff.diff(base_local, base_remote)<EOL>return merge<EOL>
Generate unmerged series of changes (including conflicts). By diffing the two diffs, we find *changes* that are on the local branch, the remote branch, or both. We arbitrarily choose the "local" branch to be the "before" and the "remote" branch to be the "after" in the diff algorithm. Therefore: ...
f11195:m0
def notebook_merge(local, base, remote, check_modified=False):
local_cells = get_cells(local)<EOL>base_cells = get_cells(base)<EOL>remote_cells = get_cells(remote)<EOL>rows = []<EOL>current_row = []<EOL>empty_cell = lambda: {<EOL>'<STR_LIT>': '<STR_LIT:code>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': [],<EOL>'<STR_LIT>': <NUM_LIT:1>,<EOL>'<STR_LIT:text>': ['<STR_LIT>'],<EOL>...
Unify three notebooks into a single notebook with merge metadata. The result of this function is a valid notebook that can be loaded by the IPython Notebook front-end. This function adds additional cell metadata that the front-end Javascript uses to render the merge. Parameters ---------- loca...
f11195:m1
def parse(self, json_data):
data = current.read(json_data, '<STR_LIT>')<EOL>json_data.close()<EOL>return data<EOL>
Parse a notebook .ipynb file. Parameters ---------- json_data : file A file handle for an .ipynb file. Returns ------- nb : An IPython Notebook data structure.
f11218:c0:m0
def __nonzero__(self):
return self.truth<EOL>
for evaluating as a boolean
f11219:c0:m1
def equal(self, line1, line2):
eqLine = line1 == line2<EOL>if eqLine:<EOL><INDENT>return BooleanPlus(True, False)<EOL><DEDENT>else:<EOL><INDENT>unchanged_count = self.count_similar_words(line1, line2)<EOL>similarity_percent = (<EOL>(<NUM_LIT> * unchanged_count) /<EOL>(len(line1.split()) + len(line2.split()))<EOL>)<EOL>if similarity_percent >= <NUM_L...
return true if exactly equal or if equal but modified, otherwise return false return type: BooleanPlus
f11219:c1:m2
def compare_cells(self, cell1, cell2):
eqlanguage = cell1["<STR_LIT>"] == cell2["<STR_LIT>"]<EOL>eqinput = cell1["<STR_LIT:input>"] == cell2["<STR_LIT:input>"]<EOL>eqoutputs = self.equaloutputs(cell1["<STR_LIT>"], cell2["<STR_LIT>"])<EOL>if eqlanguage and eqinput and eqoutputs:<EOL><INDENT>return BooleanPlus(True, False)<EOL><DEDENT>elif not self.check_modi...
return true if exactly equal or if equal but modified, otherwise return false return type: BooleanPlus
f11219:c2:m5
def load_keys():
consumer_key = os.environ.get('<STR_LIT>')<EOL>consumer_secret = os.environ.get('<STR_LIT>')<EOL>access_token = os.environ.get('<STR_LIT>')<EOL>access_token_secret = os.environ.get('<STR_LIT>')<EOL>return consumer_key, consumer_secret, access_token, access_token_secret<EOL>
Loads Twitter keys. Returns: tuple: consumer_key, consumer_secret, access_token, access_token_secret
f11229:m0
def search(self, q):
results = self._api.search(q=q)<EOL>return results<EOL>
Search tweets by keyword. Args: q: keyword Returns: list: tweet list
f11229:c0:m1
def search_by_user(self, screen_name, count=<NUM_LIT:100>):
results = self._api.user_timeline(screen_name=screen_name, count=count)<EOL>return results<EOL>
Search tweets by user. Args: screen_name: screen name count: the number of tweets Returns: list: tweet list
f11229:c0:m2
def __init__(self, app):
prefix = "<STR_LIT>".format(app.config.get('<STR_LIT>'))<EOL>super(self.__class__, self).__init__(<EOL>current_cache, prefix=prefix, timeout=None,<EOL>ignore_memcache_errors=True<EOL>)<EOL>
Initialize `BytecodeCache`.
f11241:c0:m0
def _callback_factory(callback_imp):
if callback_imp is None:<EOL><INDENT>try:<EOL><INDENT>pkg_resources.get_distribution('<STR_LIT>')<EOL>from flask_login import current_user<EOL>return lambda: current_user.is_authenticated<EOL><DEDENT>except pkg_resources.DistributionNotFound:<EOL><INDENT>return lambda: False<EOL><DEDENT><DEDENT>elif isinstance(callback...
Factory for creating a is authenticated callback.
f11242:m0
def __init__(self, app=None):
if app:<EOL><INDENT>self.init_app(app)<EOL><DEDENT>
Extension initialization.
f11242:c0:m0
def init_app(self, app):
self.init_config(app)<EOL>self.cache = Cache(app)<EOL>self.is_authenticated_callback = _callback_factory(<EOL>app.config['<STR_LIT>'])<EOL>app.extensions['<STR_LIT>'] = self<EOL>
Flask application initialization.
f11242:c0:m1