Search is not available for this dataset
text stringlengths 75 104k |
|---|
def variations(word):
"""Create variations of the word based on letter combinations like oo,
sh, etc."""
if len(word) == 1:
return [[word[0]]]
elif word == 'aa':
return [['A']]
elif word == 'ee':
return [['i']]
elif word == 'ei':
return [['ei']]
elif word in ['oo... |
def f2p_word(word, max_word_size=15, cutoff=3):
"""Convert a single word from Finglish to Persian.
max_word_size: Maximum size of the words to consider. Words larger
than this will be kept unchanged.
cutoff: The cut-off point. For each word, there could be many
possibilities. By default 3 of these... |
def f2p_list(phrase, max_word_size=15, cutoff=3):
"""Convert a phrase from Finglish to Persian.
phrase: The phrase to convert.
max_word_size: Maximum size of the words to consider. Words larger
than this will be kept unchanged.
cutoff: The cut-off point. For each word, there could be many
pos... |
def f2p(phrase, max_word_size=15, cutoff=3):
"""Convert a Finglish phrase to the most probable Persian phrase.
"""
results = f2p_list(phrase, max_word_size, cutoff)
return ' '.join(i[0][0] for i in results) |
def distribution_version(name):
"""try to get the version of the named distribution,
returs None on failure"""
from pkg_resources import get_distribution, DistributionNotFound
try:
dist = get_distribution(name)
except DistributionNotFound:
pass
else:
return dist.version |
def initpkg(pkgname, exportdefs, attr=None, eager=False):
""" initialize given package from the export definitions. """
attr = attr or {}
oldmod = sys.modules.get(pkgname)
d = {}
f = getattr(oldmod, '__file__', None)
if f:
f = _py_abspath(f)
d['__file__'] = f
if hasattr(oldmod, '... |
def importobj(modpath, attrname):
"""imports a module, then resolves the attrname on it"""
module = __import__(modpath, None, None, ['__doc__'])
if not attrname:
return module
retval = module
names = attrname.split(".")
for x in names:
retval = getattr(retval, x)
return retv... |
def __makeattr(self, name):
"""lazily compute value for name or raise AttributeError if unknown."""
# print "makeattr", self.__name__, name
target = None
if '__onfirstaccess__' in self.__map__:
target = self.__map__.pop('__onfirstaccess__')
importobj(*target)()
... |
def _request(self, http_method, relative_url='', **kwargs):
"""Does actual HTTP request using requests library."""
# It could be possible to call api.resource.get('/index')
# but it would be non-intuitive that the path would resolve
# to root of domain
relative_url = self._remove... |
def _new_url(self, relative_url):
"""Create new Url which points to new url."""
return Url(
urljoin(self._base_url, relative_url),
**self._default_kwargs
) |
def roles(self):
"""gets user groups"""
result = AuthGroup.objects(creator=self.client).only('role')
return json.loads(result.to_json()) |
def get_permissions(self, role):
"""gets permissions of role"""
target_role = AuthGroup.objects(role=role, creator=self.client).first()
if not target_role:
return '[]'
targets = AuthPermission.objects(groups=target_role, creator=self.client).only('name')
return json.l... |
def get_user_permissions(self, user):
"""get permissions of a user"""
memberShipRecords = AuthMembership.objects(creator=self.client, user=user).only('groups')
results = []
for each in memberShipRecords:
for group in each.groups:
targetPermissionRecords = Auth... |
def get_user_roles(self, user):
"""get permissions of a user"""
memberShipRecords = AuthMembership.objects(creator=self.client, user=user).only('groups')
results = []
for each in memberShipRecords:
for group in each.groups:
results.append({'role':group.role})
... |
def get_role_members(self, role):
"""get permissions of a user"""
targetRoleDb = AuthGroup.objects(creator=self.client, role=role)
members = AuthMembership.objects(groups__in=targetRoleDb).only('user')
return json.loads(members.to_json()) |
def which_roles_can(self, name):
"""Which role can SendMail? """
targetPermissionRecords = AuthPermission.objects(creator=self.client, name=name).first()
return [{'role': group.role} for group in targetPermissionRecords.groups] |
def which_users_can(self, name):
"""Which role can SendMail? """
_roles = self.which_roles_can(name)
result = [self.get_role_members(i.get('role')) for i in _roles]
return result |
def get_role(self, role):
"""Returns a role object
"""
role = AuthGroup.objects(role=role, creator=self.client).first()
return role |
def add_role(self, role, description=None):
""" Creates a new group """
new_group = AuthGroup(role=role, creator=self.client)
try:
new_group.save()
return True
except NotUniqueError:
return False |
def del_role(self, role):
""" deletes a group """
target = AuthGroup.objects(role=role, creator=self.client).first()
if target:
target.delete()
return True
else:
return False |
def add_membership(self, user, role):
""" make user a member of a group """
targetGroup = AuthGroup.objects(role=role, creator=self.client).first()
if not targetGroup:
return False
target = AuthMembership.objects(user=user, creator=self.client).first()
if not target:... |
def del_membership(self, user, role):
""" dismember user from a group """
if not self.has_membership(user, role):
return True
targetRecord = AuthMembership.objects(creator=self.client, user=user).first()
if not targetRecord:
return True
for group in targe... |
def has_membership(self, user, role):
""" checks if user is member of a group"""
targetRecord = AuthMembership.objects(creator=self.client, user=user).first()
if targetRecord:
return role in [i.role for i in targetRecord.groups]
return False |
def add_permission(self, role, name):
""" authorize a group for something """
if self.has_permission(role, name):
return True
targetGroup = AuthGroup.objects(role=role, creator=self.client).first()
if not targetGroup:
return False
# Create or update
... |
def del_permission(self, role, name):
""" revoke authorization of a group """
if not self.has_permission(role, name):
return True
targetGroup = AuthGroup.objects(role=role, creator=self.client).first()
target = AuthPermission.objects(groups=targetGroup, name=name, creator=sel... |
def user_has_permission(self, user, name):
""" verify user has permission """
targetRecord = AuthMembership.objects(creator=self.client, user=user).first()
if not targetRecord:
return False
for group in targetRecord.groups:
if self.has_permission(group.role, name)... |
def bump_version(version, bump='patch'):
"""patch: patch, minor, major"""
try:
parts = map(int, version.split('.'))
except ValueError:
fail('Current version is not numeric')
if bump == 'patch':
parts[2] += 1
elif bump == 'minor':
parts[1] += 1
parts[2] = 0
... |
def handler(event):
"""Signal decorator to allow use of callback functions as class decorators."""
def decorator(fn):
def apply(cls):
event.connect(fn, sender=cls)
return cls
fn.apply = apply
return fn
return decorator |
def stringify(req, resp):
"""
dumps all valid jsons
This is the latest after hook
"""
if isinstance(resp.body, dict):
try:
resp.body = json.dumps(resp.body)
except(nameError):
resp.status = falcon.HTTP_500 |
def process_response(self, req, resp, resource):
"""Post-processing of the response (after routing).
Args:
req: Request object.
resp: Response object.
resource: Resource object to which the request was
routed. May be None if no route was found
... |
def run(command=None, *arguments):
"""
Run the given command.
Parameters:
:param command: A string describing a command.
:param arguments: A list of strings describing arguments to the command.
"""
if command is None:
sys.exit('django-shortcuts: No argument was supplied, please spe... |
def instantiate(self, scope, args, interp):
"""Create a ParamList instance for actual interpretation
:args: TODO
:returns: A ParamList object
"""
param_instances = []
BYREF = "byref"
# TODO are default values for function parameters allowed in 010?
for... |
def get_experiments(base, load=False):
''' get_experiments will return loaded json for all valid experiments from an experiment folder
:param base: full path to the base folder with experiments inside
:param load: if True, returns a list of loaded config.json objects. If False (default) returns the paths to... |
def load_experiments(folders):
'''load_experiments
a wrapper for load_experiment to read multiple experiments
:param experiment_folders: a list of experiment folders to load, full paths
'''
experiments = []
if isinstance(folders,str):
folders = [experiment_folders]
for folder in fold... |
def load_experiment(folder, return_path=False):
'''load_experiment:
reads in the config.json for a folder, returns None if not found.
:param folder: full path to experiment folder
:param return_path: if True, don't load the config.json, but return it
'''
fullpath = os.path.abspath(folder)
co... |
def get_selection(available, selection, base='/scif/apps'):
'''we compare the basename (the exp_id) of the selection and available,
regardless of parent directories'''
if isinstance(selection, str):
selection = selection.split(',')
available = [os.path.basename(x) for x in available]
s... |
def make_lookup(experiment_list, key='exp_id'):
'''make_lookup returns dict object to quickly look up query experiment on exp_id
:param experiment_list: a list of query (dict objects)
:param key_field: the key in the dictionary to base the lookup key (str)
:returns lookup: dict (json) with key as "key_f... |
def validate(folder=None, cleanup=False):
'''validate
:param folder: full path to experiment folder with config.json. If path begins
with https, we assume to be starting from a repository.
'''
from expfactory.validator import ExperimentValidator
cli = ExperimentValidator()
ret... |
def get_library(lookup=True, key='exp_id'):
''' return the raw library, without parsing'''
library = None
response = requests.get(EXPFACTORY_LIBRARY)
if response.status_code == 200:
library = response.json()
if lookup is True:
return make_lookup(library,key=key)
return li... |
def int3(params, ctxt, scope, stream, coord, interp):
"""Define the ``Int3()`` function in the interpreter. Calling
``Int3()`` will drop the user into an interactive debugger.
"""
if interp._no_debug:
return
if interp._int3:
interp.debugger = PfpDbg(interp)
interp.debugger.c... |
def initdb(self):
'''initdb will check for writability of the data folder, meaning
that it is bound to the local machine. If the folder isn't bound,
expfactory runs in demo mode (not saving data)
'''
self.database = EXPFACTORY_DATABASE
bot.info("DATABASE: %s" %self... |
def setup(self):
''' obtain database and filesystem preferences from defaults,
and compare with selection in container.
'''
self.selection = EXPFACTORY_EXPERIMENTS
self.ordered = len(EXPFACTORY_EXPERIMENTS) > 0
self.data_base = EXPFACTORY_DATA
self.study_id =... |
def get_next(self, session):
'''return the name of the next experiment, depending on the user's
choice to randomize. We don't remove any experiments here, that is
done on finish, in the case the user doesn't submit data (and
thus finish). A return of None means the user has comp... |
def finish_experiment(self, session, exp_id):
'''remove an experiment from the list after completion.
'''
self.logger.debug('Finishing %s' %exp_id)
experiments = session.get('experiments', [])
experiments = [x for x in experiments if x != exp_id]
session['experiments'] = ... |
def find_subdirectories(basepath):
'''
Return directories (and sub) starting from a base
'''
directories = []
for root, dirnames, filenames in os.walk(basepath):
new_directories = [d for d in dirnames if d not in directories]
directories = directories + new_directories
return dir... |
def find_directories(root,fullpath=True):
'''
Return directories at one level specified by user
(not recursive)
'''
directories = []
for item in os.listdir(root):
# Don't include hidden directories
if not re.match("^[.]",item):
if os.path.isdir(os.path.join(root, item... |
def copy_directory(src, dest, force=False):
''' Copy an entire directory recursively
'''
if os.path.exists(dest) and force is True:
shutil.rmtree(dest)
try:
shutil.copytree(src, dest)
except OSError as e:
# If the error was caused because the source wasn't a directory
... |
def clone(url, tmpdir=None):
'''clone a repository from Github'''
if tmpdir is None:
tmpdir = tempfile.mkdtemp()
name = os.path.basename(url).replace('.git', '')
dest = '%s/%s' %(tmpdir,name)
return_code = os.system('git clone %s %s' %(url,dest))
if return_code == 0:
return dest
... |
def run_command(cmd):
'''run_command uses subprocess to send a command to the terminal.
:param cmd: the command to send, should be a list for subprocess
'''
output = Popen(cmd,stderr=STDOUT,stdout=PIPE)
t = output.communicate()[0],output.returncode
output = {'message':t[0],
'return... |
def get_template(name, base=None):
'''read in and return a template file
'''
# If the file doesn't exist, assume relative to base
template_file = name
if not os.path.exists(template_file):
if base is None:
base = get_templatedir()
template_file = "%s/%s" %(base, name)
... |
def sub_template(template,template_tag,substitution):
'''make a substitution for a template_tag in a template
'''
template = template.replace(template_tag,substitution)
return template |
def get_post_fields(request):
'''parse through a request, and return fields from post in a dictionary
'''
fields = dict()
for field,value in request.form.items():
fields[field] = value
return fields |
def getenv(variable_key, default=None, required=False, silent=True):
'''getenv will attempt to get an environment variable. If the variable
is not found, None is returned.
:param variable_key: the variable name
:param required: exit with error if not found
:param silent: Do not print debugging infor... |
def parse(
data = None,
template = None,
data_file = None,
template_file = None,
interp = None,
debug = False,
predefines = True,
int3 = True,
keep_successful = False,
printf ... |
def Checksum(params, ctxt, scope, stream, coord):
"""
Runs a simple checksum on a file and returns the result as a int64. The
algorithm can be one of the following constants:
CHECKSUM_BYTE - Treats the file as a set of unsigned bytes
CHECKSUM_SHORT_LE - Treats the file as a set of unsigned little-e... |
def FindAll(params, ctxt, scope, stream, coord, interp):
"""
This function converts the argument data into a set of hex bytes
and then searches the current file for all occurrences of those
bytes. data may be any of the basic types or an array of one of
the types. If data is an array of signed bytes... |
def FindFirst(params, ctxt, scope, stream, coord, interp):
"""
This function is identical to the FindAll function except that the
return value is the position of the first occurrence of the target
found. A negative number is returned if the value could not be found.
"""
global FIND_MATCHES_ITER
... |
def FindNext(params, ctxt, scope, stream, coord):
"""
This function returns the position of the next occurrence of the
target value specified with the FindFirst function. If dir is 1, the
find direction is down. If dir is 0, the find direction is up. The
return value is the address of the found data... |
def generate_subid(self, token=None):
'''assumes a flat (file system) database, organized by experiment id, and
subject id, with data (json) organized by subject identifier
'''
# Not headless auto-increments
if not token:
token = str(uuid.uuid4())
# Headless doesn't use any folder_... |
def list_users(self):
'''list users, each associated with a filesystem folder
'''
folders = glob('%s/*' %(self.database))
folders.sort()
return [self.print_user(x) for x in folders] |
def print_user(self, user):
'''print a filesystem database user. A "database" folder that might end with
the participant status (e.g. _finished) is extracted to print in format
[folder] [identifier][studyid]
/scif/data/expfactory/xxxx-xxxx xxxx-xxxx[studyid]
... |
def generate_user(self, subid=None):
'''generate a new user on the filesystem, still session based so we
create a new identifier. This function is called from the users new
entrypoint, and it assumes we want a user generated with a token.
since we don't have a database proper, we write the fol... |
def finish_user(self, subid, ext='finished'):
'''finish user will append "finished" (or other) to the data folder when
the user has completed (or been revoked from) the battery.
For headless, this means that the session is ended and the token
will not work again to rewrite the result. If the ... |
def restart_user(self, subid):
'''restart user will remove any "finished" or "revoked" extensions from
the user folder to restart the session. This command always comes from
the client users function, so we know subid does not start with the
study identifer first
'''
if os.path.exists(s... |
def validate_token(self, token):
'''retrieve a subject based on a token. Valid means we return a participant
invalid means we return None
'''
# A token that is finished or revoked is not valid
subid = None
if not token.endswith(('finished','revoked')):
subid = self.generate_subid(toke... |
def refresh_token(self, subid):
'''refresh or generate a new token for a user. If the user is finished,
this will also make the folder available again for using.'''
if os.path.exists(self.data_base): # /scif/data
data_base = "%s/%s" %(self.data_base, subid)
if os.path.exists(data_base)... |
def save_data(self, session, exp_id, content):
'''save data will obtain the current subid from the session, and save it
depending on the database type. Currently we just support flat files'''
subid = session.get('subid')
# We only attempt save if there is a subject id, set at start
data_file = ... |
def init_db(self):
'''init_db for the filesystem ensures that the base folder (named
according to the studyid) exists.
'''
self.session = None
if not os.path.exists(self.data_base):
mkdir_p(self.data_base)
self.database = "%s/%s" %(self.data_base, self.study_id)
if not os.path.... |
def native(name, ret, interp=None, send_interp=False):
"""Used as a decorator to add the decorated function to the
pfp interpreter so that it can be used from within scripts.
:param str name: The name of the function as it will be exposed in template scripts.
:param pfp.fields.Field ret: The return typ... |
def do_peek(self, args):
"""Peek at the next 16 bytes in the stream::
Example:
The peek command will display the next 16 hex bytes in the input
stream::
pfp> peek
89 50 4e 47 0d 0a 1a 0a 00 00 00 0d 49 48 44 52 .PNG........IHDR
"""
... |
def do_next(self, args):
"""Step over the next statement
"""
self._do_print_from_last_cmd = True
self._interp.step_over()
return True |
def do_step(self, args):
"""Step INTO the next statement
"""
self._do_print_from_last_cmd = True
self._interp.step_into()
return True |
def do_continue(self, args):
"""Continue the interpreter
"""
self._do_print_from_last_cmd = True
self._interp.cont()
return True |
def do_eval(self, args):
"""Eval the user-supplied statement. Note that you can do anything with
this command that you can do in a template.
The resulting value of your statement will be displayed.
"""
try:
res = self._interp.eval(args)
if res is not None... |
def do_show(self, args):
"""Show the current structure of __root (no args),
or show the result of the expression (something that can be eval'd).
"""
args = args.strip()
to_show = self._interp._root
if args != "":
try:
to_show = self._interp.ev... |
def do_quit(self, args):
"""The quit command
"""
self._interp.set_break(self._interp.BREAK_NONE)
return True |
def validate(self, url):
''' takes in a Github repository for validation of preview and
runtime (and possibly tests passing?
'''
# Preview must provide the live URL of the repository
if not url.startswith('http') or not 'github' in url:
bot.error('Test of previe... |
def mask_dict_password(dictionary, secret='***'):
"""Replace passwords with a secret in a dictionary."""
d = dictionary.copy()
for k in d:
if 'password' in k:
d[k] = secret
return d |
def save():
'''save is a view to save data. We might want to adjust this to allow for
updating saved data, but given single file is just one post for now
'''
if request.method == 'POST':
exp_id = session.get('exp_id')
app.logger.debug('Saving data for %s' %exp_id)
fields = ge... |
def main(args,parser,subparser=None):
'''this is the main entrypoint for a container based web server, with
most of the variables coming from the environment. See the Dockerfile
template for how this function is executed.
'''
# First priority to args.base
base = args.base
if base is ... |
def save_data(self,session, exp_id, content):
'''save data will obtain the current subid from the session, and save it
depending on the database type.'''
from expfactory.database.models import (
Participant,
Result
)
subid = session.get('subid')
bot.info('Saving data for subi... |
def init_db(self):
'''initialize the database, with the default database path or custom of
the format sqlite:////scif/data/expfactory.db
'''
# Database Setup, use default if uri not provided
if self.database == 'sqlite':
db_path = os.path.join(EXPFACTORY_DATA, '%s.db' % EXPFACTORY_SUBID... |
def reserve_bits(self, num_bits, stream):
"""Used to "reserve" ``num_bits`` amount of bits in order to keep track
of consecutive bitfields (or are the called bitfield groups?).
E.g. ::
struct {
char a:8, b:8;
char c:4, d:4, e:8;
}
... |
def read_bits(self, stream, num_bits, padded, left_right, endian):
"""Return ``num_bits`` bits, taking into account endianness and
left-right bit directions
"""
if self._cls_bits is None and padded:
raw_bits = stream.read_bits(self.cls.width*8)
self._cls_bits = s... |
def write_bits(self, stream, raw_bits, padded, left_right, endian):
"""Write the bits. Once the size of the written bits is equal
to the number of the reserved bits, flush it to the stream
"""
if padded:
if left_right:
self._write_bits += raw_bits
... |
def _pfp__snapshot(self, recurse=True):
"""Save off the current value of the field
"""
if hasattr(self, "_pfp__value"):
self._pfp__snapshot_value = self._pfp__value |
def _pfp__process_metadata(self):
"""Process the metadata once the entire struct has been
declared.
"""
if self._pfp__metadata_processor is None:
return
metadata_info = self._pfp__metadata_processor()
if isinstance(metadata_info, list):
for metada... |
def _pfp__watch(self, watcher):
"""Add the watcher to the list of fields that
are watching this field
"""
if self._pfp__parent is not None and isinstance(self._pfp__parent, Union):
self._pfp__parent._pfp__watch(watcher)
else:
self._pfp__watchers.append(wat... |
def _pfp__set_watch(self, watch_fields, update_func, *func_call_info):
"""Subscribe to update events on each field in ``watch_fields``, using
``update_func`` to update self's value when ``watch_field``
changes"""
self._pfp__watch_fields = watch_fields
for watch_field in watch_fi... |
def _pfp__set_packer(self, pack_type, packer=None, pack=None, unpack=None, func_call_info=None):
"""Set the packer/pack/unpack functions for this field, as
well as the pack type.
:pack_type: The data type of the packed data
:packer: A function that can handle packing and unpacking. Firs... |
def _pfp__pack_data(self):
"""Pack the nested field
"""
if self._pfp__pack_type is None:
return
tmp_stream = six.BytesIO()
self._._pfp__build(bitwrap.BitwrappedStream(tmp_stream))
raw_data = tmp_stream.getvalue()
unpack_func = self._pfp__packer
... |
def _pfp__unpack_data(self, raw_data):
"""Means that the field has already been parsed normally,
and that it now needs to be unpacked.
:raw_data: A string of the data that the field consumed while parsing
"""
if self._pfp__pack_type is None:
return
if self._p... |
def _pfp__handle_updated(self, watched_field):
"""Handle the watched field that was updated
"""
self._pfp__no_notify = True
# nested data has been changed, so rebuild the
# nested data to update the field
# TODO a global setting to determine this behavior?
# coul... |
def _pfp__width(self):
"""Return the width of the field (sizeof)
"""
raw_output = six.BytesIO()
output = bitwrap.BitwrappedStream(raw_output)
self._pfp__build(output)
output.flush()
return len(raw_output.getvalue()) |
def _pfp__set_value(self, new_val):
"""Set the new value if type checking is passes, potentially
(TODO? reevaluate this) casting the value to something else
:new_val: The new value
:returns: TODO
"""
if self._pfp__frozen:
raise errors.UnmodifiableConst()
... |
def _pfp__snapshot(self, recurse=True):
"""Save off the current value of the field
"""
super(Struct, self)._pfp__snapshot(recurse=recurse)
if recurse:
for child in self._pfp__children:
child._pfp__snapshot(recurse=recurse) |
def _pfp__restore_snapshot(self, recurse=True):
"""Restore the snapshotted value without triggering any events
"""
super(Struct, self)._pfp__restore_snapshot(recurse=recurse)
if recurse:
for child in self._pfp__children:
child._pfp__restore_snapshot(recurse=r... |
def _pfp__set_value(self, value):
"""Initialize the struct. Value should be an array of
fields, one each for each struct member.
:value: An array of fields to initialize the struct with
:returns: None
"""
if self._pfp__frozen:
raise errors.UnmodifiableConst()... |
def _pfp__add_child(self, name, child, stream=None, overwrite=False):
"""Add a child to the Struct field. If multiple consecutive fields are
added with the same name, an implicit array will be created to store
all fields of that name.
:param str name: The name of the child
:para... |
def _pfp__handle_non_consecutive_duplicate(self, name, child, insert=True):
"""This new child, and potentially one already existing child, need to
have a numeric suffix appended to their name.
An entry will be made for this name in ``self._pfp__name_collisions`` to keep
track of... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.