code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def t_ARTICLEHEADER(self, token):
# \xef\xbc\x9a is the "fullwidth colon" used in Japanese for instance
ur'\#\#\s+<article-(?P<number>[A-Z0-9]+)><(?P<newtag>[a-zA-Z0-9-]+)><(?P<oldtag>[a-zA-Z0-9-]+)>[ ]*(?P<name>[^\<]+?)(?P<sep>:\s|\xef\xbc\x9a)(?P<title>[^<\n]+)\n'
number = token.lexer.lexmatch... | ur'\#\#\s+<article-(?P<number>[A-Z0-9]+)><(?P<newtag>[a-zA-Z0-9-]+)><(?P<oldtag>[a-zA-Z0-9-]+)>[ ]*(?P<name>[^\<]+?)(?P<sep>:\s|\xef\xbc\x9a)(?P<title>[^<\n]+)\n |
def _dispatch(self, textgroup, directory):
""" Sparql dispatcher do not need to dispatch works, as the link is DB stored through Textgroup
:param textgroup: A Textgroup object
:param directory: The path in which we found the textgroup
:return:
"""
self.dispatcher.dispatc... | Sparql dispatcher do not need to dispatch works, as the link is DB stored through Textgroup
:param textgroup: A Textgroup object
:param directory: The path in which we found the textgroup
:return: |
def get_page(self, target_url):
"""
Retrieve a specific page of SyncListInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of SyncListInstance
:rtype: twilio.rest.sync.v1.... | Retrieve a specific page of SyncListInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of SyncListInstance
:rtype: twilio.rest.sync.v1.service.sync_list.SyncListPage |
def connections(self):
"""
Gets the Connections API client.
Returns:
Connections:
"""
if not self.__connections:
self.__connections = Connections(
self.__connection)
return self.__connections | Gets the Connections API client.
Returns:
Connections: |
def find_py_files(srctree, ignore=None):
"""Return all the python files in a source tree
Ignores any path that contains the ignore string
This is not used by other class methods, but is
designed to be used in code that uses this class.
"""
if not os.path.isdir(srctree)... | Return all the python files in a source tree
Ignores any path that contains the ignore string
This is not used by other class methods, but is
designed to be used in code that uses this class. |
def auto_newline(buffer):
r"""
Insert \n at the cursor position. Also add necessary padding.
"""
insert_text = buffer.insert_text
if buffer.document.current_line_after_cursor:
# When we are in the middle of a line. Always insert a newline.
insert_text('\n')
else:
# Go to... | r"""
Insert \n at the cursor position. Also add necessary padding. |
def save_weights_from_checkpoint(input_checkpoint, output_path, conv_var_names=None, conv_transpose_var_names=None):
"""Save the weights of the trainable variables given a checkpoint, each one in a different file in output_path."""
check_input_checkpoint(input_checkpoint)
with tf.Session() as sess:
... | Save the weights of the trainable variables given a checkpoint, each one in a different file in output_path. |
def compile(self, source, dest, is_two_file=True, post=None, lang=None):
"""Compile the docstring into HTML and save as dest."""
makedirs(os.path.dirname(dest))
with io.open(dest, "w+", encoding="utf8") as out_file:
with io.open(source, "r", encoding="utf8") as in_file:
... | Compile the docstring into HTML and save as dest. |
def create_cookie(host, path, secure, expires, name, value):
"""Shortcut function to create a cookie
"""
return http.cookiejar.Cookie(0, name, value, None, False, host, host.startswith('.'), host.startswith('.'), path,
True, secure, expires, False, None, None, {}) | Shortcut function to create a cookie |
def ellipticity(self):
'''
Most meaningful for bond critical points,
can be physically interpreted as e.g. degree
of pi-bonding in organic molecules. Consult
literature for more information.
:return:
'''
eig = np.linalg.eig(self.field_hessian)
eig.... | Most meaningful for bond critical points,
can be physically interpreted as e.g. degree
of pi-bonding in organic molecules. Consult
literature for more information.
:return: |
def getParameters(self, emailAddress):
"""
Return a C{list} of one L{LiveForm} parameter for editing an
L{EmailAddress}.
@type emailAddress: L{EmailAddress} or C{NoneType}
@param emailAddress: If not C{None}, an existing contact item from
which to get the email addre... | Return a C{list} of one L{LiveForm} parameter for editing an
L{EmailAddress}.
@type emailAddress: L{EmailAddress} or C{NoneType}
@param emailAddress: If not C{None}, an existing contact item from
which to get the email address default value.
@rtype: C{list}
@return:... |
def remove_service(self, service):
"""Removes the service passed in from the services offered by the
current Template. If the Analysis Service passed in is not assigned to
this Analysis Template, returns False.
:param service: the service to be removed from this AR Template
:type... | Removes the service passed in from the services offered by the
current Template. If the Analysis Service passed in is not assigned to
this Analysis Template, returns False.
:param service: the service to be removed from this AR Template
:type service: AnalysisService
:return: Tru... |
def _get_on_trixel_sources_from_database_query(
self):
"""*generate the mysql query before executing it*
"""
self.log.debug(
'completed the ````_get_on_trixel_sources_from_database_query`` method')
tableName = self.tableName
raCol = self.raCol
dec... | *generate the mysql query before executing it* |
def purge_bucket(context, provider, **kwargs):
"""Delete objects in bucket."""
session = get_session(provider.region)
if kwargs.get('bucket_name'):
bucket_name = kwargs['bucket_name']
else:
if kwargs.get('bucket_output_lookup'):
value = kwargs['bucket_output_lookup']
... | Delete objects in bucket. |
def code_to_session(self, js_code):
"""
登录凭证校验。通过 wx.login() 接口获得临时登录凭证 code 后传到开发者服务器调用此接口完成登录流程。更多使用方法详见 小程序登录
详情请参考
https://developers.weixin.qq.com/miniprogram/dev/api/code2Session.html
:param js_code:
:return:
"""
return self._get(
'sns/j... | 登录凭证校验。通过 wx.login() 接口获得临时登录凭证 code 后传到开发者服务器调用此接口完成登录流程。更多使用方法详见 小程序登录
详情请参考
https://developers.weixin.qq.com/miniprogram/dev/api/code2Session.html
:param js_code:
:return: |
def sample(self, bqm, chain_strength=1.0, chain_break_fraction=True, **parameters):
"""Sample from the provided binary quadratic model.
Also set parameters for handling a chain, the set of vertices in a target graph that
represents a source-graph vertex; when a D-Wave system is the sampler, it ... | Sample from the provided binary quadratic model.
Also set parameters for handling a chain, the set of vertices in a target graph that
represents a source-graph vertex; when a D-Wave system is the sampler, it is a set
of qubits that together represent a variable of the binary quadratic model bei... |
def register(self, model, index_cls=AlgoliaIndex, auto_indexing=None):
"""
Registers the given model with Algolia engine.
If the given model is already registered with Algolia engine, a
RegistrationError will be raised.
"""
# Check for existing registration.
if s... | Registers the given model with Algolia engine.
If the given model is already registered with Algolia engine, a
RegistrationError will be raised. |
def get_homepath(self, ignore_session=False, force_cookieless=False):
"""
:param ignore_session: Ignore the cookieless session_id that should be put in the URL
:param force_cookieless: Force the cookieless session; the link will include the session_creator if needed.
"""
if not i... | :param ignore_session: Ignore the cookieless session_id that should be put in the URL
:param force_cookieless: Force the cookieless session; the link will include the session_creator if needed. |
def hilite(s, ok=True, bold=False):
"""Return an highlighted version of 'string'."""
if not term_supports_colors():
return s
attr = []
if ok is None: # no color
pass
elif ok: # green
attr.append('32')
else: # red
attr.append('31')
if bold:
attr.ap... | Return an highlighted version of 'string'. |
def overwrite(self, bs, pos=None):
"""Overwrite with bs at bit position pos.
bs -- The bitstring to overwrite with.
pos -- The bit position to begin overwriting from.
Raises ValueError if pos < 0 or pos + bs.len > self.len
"""
bs = Bits(bs)
if not bs.len:
... | Overwrite with bs at bit position pos.
bs -- The bitstring to overwrite with.
pos -- The bit position to begin overwriting from.
Raises ValueError if pos < 0 or pos + bs.len > self.len |
def managed(name,
source=None,
source_hash='',
source_hash_name=None,
keep_source=True,
user=None,
group=None,
mode=None,
attrs=None,
template=None,
makedirs=False,
dir_mode=None,
... | r'''
Manage a given file, this function allows for a file to be downloaded from
the salt master and potentially run through a templating system.
name
The location of the file to manage, as an absolute path.
source
The source file to download to the minion, this source file can be
... |
def size(default_chunk_size, response_time_max, response_time_actual):
"""Determines the chunk size based on response times."""
if response_time_actual == 0:
response_time_actual = 1
scale = 1 / (response_time_actual / response_time_max)
size = int(default_chunk_size * scale)
return min(max(... | Determines the chunk size based on response times. |
def name(random=random, *args, **kwargs):
"""
Return someone's name
>>> mock_random.seed(0)
>>> name(random=mock_random)
'carl poopbritches'
>>> mock_random.seed(7)
>>> name(random=mock_random, capitalize=True)
'Duke Testy Wonderful'
"""
if random.choice([True, True, True, Fals... | Return someone's name
>>> mock_random.seed(0)
>>> name(random=mock_random)
'carl poopbritches'
>>> mock_random.seed(7)
>>> name(random=mock_random, capitalize=True)
'Duke Testy Wonderful' |
def fit(self, validation_data=None, **kwargs):
"""
Args:
validation_data (DataFlow or InputSource): to be used for inference.
The inference callback is added as the first in the callback list.
If you need to use it in a different order, please write it in the ... | Args:
validation_data (DataFlow or InputSource): to be used for inference.
The inference callback is added as the first in the callback list.
If you need to use it in a different order, please write it in the callback list manually.
kwargs: same arguments as :meth... |
def babel_extract(config, input, output, target, keywords):
"""
Babel, Extracts and updates all messages marked for translation
"""
click.echo(
click.style(
"Starting Extractions config:{0} input:{1} output:{2} keywords:{3}".format(
config, input, output, keywords... | Babel, Extracts and updates all messages marked for translation |
def multiply(dists):
'''
multiplies a list of Distribution objects
'''
if not all([isinstance(k, Distribution) for k in dists]):
raise NotImplementedError("Can only multiply Distribution objects")
n_delta = np.sum([k.is_delta for k in dists])
min_width = np.... | multiplies a list of Distribution objects |
def elevations(self):
"""Retrieves elevations/offsets from the output response
Returns:
elevations/offsets (namedtuple): A named tuple of list of
elevations/offsets
"""
resources = self.get_resource()
elevations = namedtuple('elevations_data', 'elevations... | Retrieves elevations/offsets from the output response
Returns:
elevations/offsets (namedtuple): A named tuple of list of
elevations/offsets |
def download_needed(self, response, outfile, quiet=True):
""" determine if a download is needed based on timestamp. Return True
if needed (remote is newer) or False if local is newest.
Parameters
==========
response: the response from the API
outfile:... | determine if a download is needed based on timestamp. Return True
if needed (remote is newer) or False if local is newest.
Parameters
==========
response: the response from the API
outfile: the output file to write to
quiet: suppress verbose outpu... |
def add_arguments(self, parser):
"""Adds the arguments for the firmware command.
Args:
self (FirmwareCommand): the ``FirmwareCommand`` instance
parser (argparse.ArgumentParser): parser to add the commands to
Returns:
``None``
"""
group = parser.add... | Adds the arguments for the firmware command.
Args:
self (FirmwareCommand): the ``FirmwareCommand`` instance
parser (argparse.ArgumentParser): parser to add the commands to
Returns:
``None`` |
def stop_instance(self, instance_id):
"""Stops the instance gracefully.
:param str instance_id: instance identifier
:return: None
"""
self._restore_from_storage(instance_id)
if self._start_failed:
raise Exception('stop_instance for node %s: failing due to'
... | Stops the instance gracefully.
:param str instance_id: instance identifier
:return: None |
def _update(self, rules: list):
"""
Updates the given rules and stores
them on the router.
"""
self._rules = rules
to_store = '\n'.join(
rule.config_string
for rule in rules
)
sftp_connection = self._sftp_connection
with sft... | Updates the given rules and stores
them on the router. |
def convert(self, request, response, data):
"""
Performs the desired Conversion.
:param request: The webob Request object describing the
request.
:param response: The webob Response object describing the
response.
:param data: The... | Performs the desired Conversion.
:param request: The webob Request object describing the
request.
:param response: The webob Response object describing the
response.
:param data: The data dictionary returned by the prepare()
... |
def resize_thumbnail(image, size, resample=Image.LANCZOS):
"""
Resize image according to size.
image: a Pillow image instance
size: a list of two integers [width, height]
"""
img_format = image.format
img = image.copy()
img.thumbnail((size[0], size[1]), resample)
img.form... | Resize image according to size.
image: a Pillow image instance
size: a list of two integers [width, height] |
def direct_horizontal_irradiance(self):
"""Returns the direct irradiance on a horizontal surface at each timestep.
Note that this is different from the direct_normal_irradiance needed
to construct a Wea, which is NORMAL and not HORIZONTAL."""
analysis_period = AnalysisPeriod(timestep=se... | Returns the direct irradiance on a horizontal surface at each timestep.
Note that this is different from the direct_normal_irradiance needed
to construct a Wea, which is NORMAL and not HORIZONTAL. |
def get_variance(seq):
"""
Batch variance calculation.
"""
m = get_mean(seq)
return sum((v-m)**2 for v in seq)/float(len(seq)) | Batch variance calculation. |
def roc_auc_xlim(x_bla, y_bla, xlim=0.1):
"""
Computes the ROC Area Under Curve until a certain FPR value.
Parameters
----------
fg_vals : array_like
list of values for positive set
bg_vals : array_like
list of values for negative set
xlim : float, optional
FPR val... | Computes the ROC Area Under Curve until a certain FPR value.
Parameters
----------
fg_vals : array_like
list of values for positive set
bg_vals : array_like
list of values for negative set
xlim : float, optional
FPR value
Returns
-------
score : float
... |
def bag(directory, mets_basename, dest, identifier, in_place, manifestation_depth, mets, base_version_checksum, tag_file, skip_zip, processes):
"""
Bag workspace as OCRD-ZIP at DEST
"""
resolver = Resolver()
workspace = Workspace(resolver, directory=directory, mets_basename=mets_basename)
worksp... | Bag workspace as OCRD-ZIP at DEST |
def find_expectations(self,
expectation_type=None,
column=None,
expectation_kwargs=None,
discard_result_format_kwargs=True,
discard_include_configs_kwargs=True,
dis... | Find matching expectations within _expectation_config.
Args:
expectation_type=None : The name of the expectation type to be matched.
column=None : The name of the column to be matched.
expectation_kwargs=None : A dictionary... |
def _reset_suffix_links(self):
'''
Reset all suffix links in all nodes in this trie.
'''
self._suffix_links_set = False
for current, _parent in self.dfs():
current.suffix = None
current.dict_suffix = None
current.longest_prefix = None | Reset all suffix links in all nodes in this trie. |
def load(self, profile_args):
"""Load provided CLI Args.
Args:
args (dict): Dictionary of args in key/value format.
"""
for key, value in profile_args.items():
self.add(key, value) | Load provided CLI Args.
Args:
args (dict): Dictionary of args in key/value format. |
def next_task(self, item, raise_exceptions=None, **kwargs):
"""Deserializes all transactions for this batch and
archives the file.
"""
filename = os.path.basename(item)
batch = self.get_batch(filename)
tx_deserializer = self.tx_deserializer_cls(
allow_self=sel... | Deserializes all transactions for this batch and
archives the file. |
def as_languages(self):
"""
Get the Language objects associated with this queryset of CultureCodes as a list.
The Language objects will have country and culturecode set.
:return:
"""
langs = []
for culture_code in self.select_related('language', 'country').all():
... | Get the Language objects associated with this queryset of CultureCodes as a list.
The Language objects will have country and culturecode set.
:return: |
def get_prev_step(self, step=None):
"""
Returns the previous step before the given `step`. If there are no
steps available, None will be returned. If the `step` argument is
None, the current step will be determined automatically.
"""
if step is None:
step = se... | Returns the previous step before the given `step`. If there are no
steps available, None will be returned. If the `step` argument is
None, the current step will be determined automatically. |
def variablename(var):
"""
Returns the string of a variable name.
"""
s=[tpl[0] for tpl in itertools.ifilter(lambda x: var is x[1], globals().items())]
s=s[0].upper()
return s | Returns the string of a variable name. |
def competition_download_leaderboard(self, id, **kwargs): # noqa: E501
"""Download competition leaderboard # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.competition_download_leade... | Download competition leaderboard # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.competition_download_leaderboard(id, async_req=True)
>>> result = thread.get()
:param async_... |
def havdalah(self):
"""Return the time for havdalah, or None if not applicable.
If havdalah_offset is 0, uses the time for three_stars. Otherwise,
adds the offset to the time of sunset and uses that.
If it's currently a multi-day YomTov, and the end of the stretch is
after today... | Return the time for havdalah, or None if not applicable.
If havdalah_offset is 0, uses the time for three_stars. Otherwise,
adds the offset to the time of sunset and uses that.
If it's currently a multi-day YomTov, and the end of the stretch is
after today, the havdalah value is defined... |
def extend_substation(grid, critical_stations, grid_level):
"""
Reinforce MV or LV substation by exchanging the existing trafo and
installing a parallel one if necessary.
First, all available transformers in a `critical_stations` are extended to
maximum power. If this does not solve all present iss... | Reinforce MV or LV substation by exchanging the existing trafo and
installing a parallel one if necessary.
First, all available transformers in a `critical_stations` are extended to
maximum power. If this does not solve all present issues, additional
transformers are build.
Parameters
--------... |
def add_macd(self,fast_period=12,slow_period=26,signal_period=9,column=None,
name='',str=None,**kwargs):
"""
Add Moving Average Convergence Divergence (MACD) study to QuantFigure.studies
Parameters:
fast_period : int
MACD Fast Period
slow_period : int
MACD Slow Period
signal_period : int
... | Add Moving Average Convergence Divergence (MACD) study to QuantFigure.studies
Parameters:
fast_period : int
MACD Fast Period
slow_period : int
MACD Slow Period
signal_period : int
MACD Signal Period
column :string
Defines the data column name that contains the
data over which the stu... |
def _load_manifest_interpret_source(manifest, source, username=None, password=None, verify_certificate=True, do_inherit=True):
""" Interpret the <source>, and load the results into <manifest> """
try:
if isinstance(source, string_types):
if source.startswith("http"):
# if man... | Interpret the <source>, and load the results into <manifest> |
def parse_signature_type_comment(type_comment):
"""Parse the fugly signature type comment into AST nodes.
Caveats: ASTifying **kwargs is impossible with the current grammar so we
hack it into unary subtraction (to differentiate from Starred in vararg).
For example from:
"(str, int, *int, **Any) ->... | Parse the fugly signature type comment into AST nodes.
Caveats: ASTifying **kwargs is impossible with the current grammar so we
hack it into unary subtraction (to differentiate from Starred in vararg).
For example from:
"(str, int, *int, **Any) -> 'SomeReturnType'"
To:
([ast3.Name, ast.Name, ... |
def RegisterMessageHandler(self, handler, lease_time, limit=1000):
"""Leases a number of message handler requests up to the indicated limit."""
self.UnregisterMessageHandler()
self.handler_stop = False
self.handler_thread = threading.Thread(
name="message_handler",
target=self._MessageH... | Leases a number of message handler requests up to the indicated limit. |
def _complete_statement(self, line: str) -> Statement:
"""Keep accepting lines of input until the command is complete.
There is some pretty hacky code here to handle some quirks of
self.pseudo_raw_input(). It returns a literal 'eof' if the input
pipe runs out. We can't refactor it becau... | Keep accepting lines of input until the command is complete.
There is some pretty hacky code here to handle some quirks of
self.pseudo_raw_input(). It returns a literal 'eof' if the input
pipe runs out. We can't refactor it because we need to retain
backwards compatibility with the stan... |
def aging_csv(request):
"""This view generates a csv output file of all animal data for use in aging analysis.
The view writes to a csv table the animal, strain, genotype, age (in days), and cause of death."""
animal_list = Animal.objects.all()
response = HttpResponse(content_type='text/csv')
respons... | This view generates a csv output file of all animal data for use in aging analysis.
The view writes to a csv table the animal, strain, genotype, age (in days), and cause of death. |
def create_graph_from_data(self, data, **kwargs):
"""Apply causal discovery on observational data using CCDr.
Args:
data (pandas.DataFrame): DataFrame containing the data
Returns:
networkx.DiGraph: Solution given by the CCDR algorithm.
"""
# Building set... | Apply causal discovery on observational data using CCDr.
Args:
data (pandas.DataFrame): DataFrame containing the data
Returns:
networkx.DiGraph: Solution given by the CCDR algorithm. |
def deploy_to(self, displays=None, exclude=[], lock=[]):
"""
Deploys page to listed display (specify with display). If display is None,
deploy to all display. Can specify exclude for which display to exclude.
This overwrites the first argument.
"""
if displays is None:
... | Deploys page to listed display (specify with display). If display is None,
deploy to all display. Can specify exclude for which display to exclude.
This overwrites the first argument. |
def standard_parser(cls, verbose=True,
interactive=False,
no_interactive=False,
simulate=False,
quiet=False,
overwrite=False):
"""
Create a standard ``OptionParser`` instance.
... | Create a standard ``OptionParser`` instance.
Typically used like::
class MyCommand(Command):
parser = Command.standard_parser()
Subclasses may redefine ``standard_parser``, so use the
nearest superclass's class method. |
def main():
"""
main method
"""
# initialize parser
usage = "usage: %prog [-u USER] [-p PASSWORD] [-t TITLE] [-s selection] url"
parser = OptionParser(usage, version="%prog "+instapaperlib.__version__)
parser.add_option("-u", "--user", action="store", dest="user",
m... | main method |
def open(self, user=None, repo=None):
'''Open the URL of a repository in the user's browser'''
webbrowser.open(self.format_path(repo, namespace=user, rw=False)) | Open the URL of a repository in the user's browser |
def parse(self, ioc_obj):
"""
parses an ioc to populate self.iocs and self.ioc_name
:param ioc_obj:
:return:
"""
if ioc_obj is None:
return
iocid = ioc_obj.iocid
try:
sd = ioc_obj.metadata.xpath('.//short_description/text()')[0]
... | parses an ioc to populate self.iocs and self.ioc_name
:param ioc_obj:
:return: |
def default_cx(self):
"""
Native width of this image, calculated from its width in pixels and
horizontal dots per inch (dpi).
"""
px_width = self.image.px_width
horz_dpi = self.image.horz_dpi
width_in_inches = px_width / horz_dpi
return Inches(width_in_inc... | Native width of this image, calculated from its width in pixels and
horizontal dots per inch (dpi). |
def options(self):
""" Returns the options specified as argument to this command.
"""
if self._options is None:
self._options = Option.View(self)
return self._options | Returns the options specified as argument to this command. |
def addAttachment(self, filepath):
"""Upload attachment to a workitem
:param filepath: the attachment file path
:return: the :class:`rtcclient.models.Attachment` object
:rtype: rtcclient.models.Attachment
"""
proj_id = self.contextId
fa = self.rtc_obj.getFiledA... | Upload attachment to a workitem
:param filepath: the attachment file path
:return: the :class:`rtcclient.models.Attachment` object
:rtype: rtcclient.models.Attachment |
def onset_detect(y=None, sr=22050, onset_envelope=None, hop_length=512,
backtrack=False, energy=None,
units='frames', **kwargs):
"""Basic onset detector. Locate note onset events by picking peaks in an
onset strength envelope.
The `peak_pick` parameters were chosen by lar... | Basic onset detector. Locate note onset events by picking peaks in an
onset strength envelope.
The `peak_pick` parameters were chosen by large-scale hyper-parameter
optimization over the dataset provided by [1]_.
.. [1] https://github.com/CPJKU/onset_db
Parameters
----------
y ... |
def download(self, url, destination_path):
"""Download url to given path.
Returns Promise -> sha256 of downloaded file.
Args:
url: address of resource to download.
destination_path: `str`, path to directory where to download the resource.
Returns:
Promise obj -> (`str`, int): (downl... | Download url to given path.
Returns Promise -> sha256 of downloaded file.
Args:
url: address of resource to download.
destination_path: `str`, path to directory where to download the resource.
Returns:
Promise obj -> (`str`, int): (downloaded object checksum, size in bytes). |
def get_cli_static_event_returns(
self,
jid,
minions,
timeout=None,
tgt='*',
tgt_type='glob',
verbose=False,
show_timeout=False,
show_jid=False):
'''
Get the returns for the command line interface... | Get the returns for the command line interface via the event system |
def _handle_wrong_field(cls, field_name, field_type):
"""Raise an exception whenever an invalid attribute with
the given name was attempted to be set to or retrieved from
this model class.
Assumes that the given field is invalid, without making any checks.
Also adds an entry to... | Raise an exception whenever an invalid attribute with
the given name was attempted to be set to or retrieved from
this model class.
Assumes that the given field is invalid, without making any checks.
Also adds an entry to the logs. |
def _apply_section(self, section, hosts):
"""
Recursively find all the hosts that belong in or under a section and
add the section's group name and variables to every host.
"""
# Add the current group name to each host that this section covers.
if section['name'] is not N... | Recursively find all the hosts that belong in or under a section and
add the section's group name and variables to every host. |
def _unsorted_set(df, label, **kwargs):
"""
Returns a set as inp string with unsorted option.
"""
out = "*NSET, NSET={0}, UNSORTED\n".format(label)
labels = df.index.values
return out + argiope.utils.list_to_string(labels, **kwargs) | Returns a set as inp string with unsorted option. |
def _get_delta(self, now, then):
"""
Internal helper which will return a ``datetime.timedelta``
representing the time between ``now`` and ``then``. Assumes
``now`` is a ``datetime.date`` or ``datetime.datetime`` later
than ``then``.
If ``now`` and ``then`` are not of the... | Internal helper which will return a ``datetime.timedelta``
representing the time between ``now`` and ``then``. Assumes
``now`` is a ``datetime.date`` or ``datetime.datetime`` later
than ``then``.
If ``now`` and ``then`` are not of the same type due to one of
them being a ``datet... |
def sff(args):
"""
%prog sff sffiles
Convert reads formatted as 454 SFF file, and convert to CA frg file.
Turn --nodedup on if another deduplication mechanism is used (e.g.
CD-HIT-454). See assembly.sff.deduplicate().
"""
p = OptionParser(sff.__doc__)
p.add_option("--prefix", dest="pref... | %prog sff sffiles
Convert reads formatted as 454 SFF file, and convert to CA frg file.
Turn --nodedup on if another deduplication mechanism is used (e.g.
CD-HIT-454). See assembly.sff.deduplicate(). |
def read_meminfo():
"""
Returns system memory usage information.
:returns: The system memory usage.
:rtype: dict
"""
data = {}
with open("/proc/meminfo", "rb") as meminfo_file:
for row in meminfo_file:
fields = row.split()
# Example content:
# Mem... | Returns system memory usage information.
:returns: The system memory usage.
:rtype: dict |
def register(self, event_type: Union[Type, _ellipsis], callback: Callable[[], Any]):
"""
Register a callback to be applied to an event at time of publishing.
Primarily to be used by subsystems.
The callback will receive the event. Your code should modify the event
in place. It ... | Register a callback to be applied to an event at time of publishing.
Primarily to be used by subsystems.
The callback will receive the event. Your code should modify the event
in place. It does not need to return it.
:param event_type: The class of an event.
:param callback: A... |
def print_invalid_chars(invalid_chars, vargs):
"""
Print Unicode characterss that are not IPA valid,
if requested by the user.
:param list invalid_chars: a list (possibly empty) of invalid Unicode characters
:param dict vargs: the command line parameters
"""
if len(invalid_chars) > 0:
... | Print Unicode characterss that are not IPA valid,
if requested by the user.
:param list invalid_chars: a list (possibly empty) of invalid Unicode characters
:param dict vargs: the command line parameters |
def drain(iterable):
"""
Helper method that empties an iterable as it is iterated over.
Works for:
* ``dict``
* ``collections.deque``
* ``list``
* ``set``
"""
if getattr(iterable, "popleft", False):
def next_item(coll):
return coll.popleft()
elif getattr(ite... | Helper method that empties an iterable as it is iterated over.
Works for:
* ``dict``
* ``collections.deque``
* ``list``
* ``set`` |
def submit_msql_object_query(object_query, client=None):
"""Submit `object_query` to MemberSuite, returning
.models.MemberSuiteObjects.
So this is a converter from MSQL to .models.MemberSuiteObjects.
Returns query results as a list of MemberSuiteObjects.
"""
client = client or get_new_client(... | Submit `object_query` to MemberSuite, returning
.models.MemberSuiteObjects.
So this is a converter from MSQL to .models.MemberSuiteObjects.
Returns query results as a list of MemberSuiteObjects. |
def push_intent(self, intent):
"""Registers or updates an intent and returns the intent_json with an ID"""
if intent.id:
print('Updating {} intent'.format(intent.name))
self.update(intent)
else:
print('Registering {} intent'.format(intent.name))
in... | Registers or updates an intent and returns the intent_json with an ID |
def bookmark(ctx):
"""Bookmark build job.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon build bookmark
```
\b
```bash
$ polyaxon build -b 2 bookmark
```
"""
user, project_name, _build = get_build_or_local(ctx.obj.get('project'),... | Bookmark build job.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon build bookmark
```
\b
```bash
$ polyaxon build -b 2 bookmark
``` |
def _parse_interfaces(interface_files=None):
'''
Parse /etc/network/interfaces and return current configured interfaces
'''
if interface_files is None:
interface_files = []
# Add this later.
if os.path.exists(_DEB_NETWORK_DIR):
interface_files += ['{0}/{1}'.format(_DE... | Parse /etc/network/interfaces and return current configured interfaces |
def blast_pdb(seq, outfile='', outdir='', evalue=0.0001, seq_ident_cutoff=0.0, link=False, force_rerun=False):
"""Returns a list of BLAST hits of a sequence to available structures in the PDB.
Args:
seq (str): Your sequence, in string format
outfile (str): Name of output file
outdir (st... | Returns a list of BLAST hits of a sequence to available structures in the PDB.
Args:
seq (str): Your sequence, in string format
outfile (str): Name of output file
outdir (str, optional): Path to output directory. Default is the current directory.
evalue (float, optional): Cutoff for... |
def where_before_entry(query, ref):
""" Generate a where clause for prior entries
ref -- The entry of reference
"""
return orm.select(
e for e in query
if e.local_date < ref.local_date or
(e.local_date == ref.local_date and e.id < ref.id)
) | Generate a where clause for prior entries
ref -- The entry of reference |
def has_friends(self, flt=FriendFilter.ALL):
"""Indicated whether the user has friends, who meet the given criteria (filter).
:param int flt: Filter value from FriendFilter. Filters can be combined with `|`.
:rtype: bool
"""
return self._iface.get_has_friend(self.user_id, flt) | Indicated whether the user has friends, who meet the given criteria (filter).
:param int flt: Filter value from FriendFilter. Filters can be combined with `|`.
:rtype: bool |
def run_step(context):
"""Run shell command without shell interpolation.
Context is a dictionary or dictionary-like.
Context must contain the following keys:
cmd: <<cmd string>> (command + args to execute.)
OR, as a dict
cmd:
run: str. mandatory. <<cmd string>> command + args to execu... | Run shell command without shell interpolation.
Context is a dictionary or dictionary-like.
Context must contain the following keys:
cmd: <<cmd string>> (command + args to execute.)
OR, as a dict
cmd:
run: str. mandatory. <<cmd string>> command + args to execute.
save: bool. defaul... |
def put(self, deviceId, measurementId):
"""
Schedules a new measurement at the specified time.
:param deviceId: the device to measure.
:param measurementId: the name of the measurement.
:return: 200 if it was scheduled, 400 if the device is busy, 500 if the device is bad.
... | Schedules a new measurement at the specified time.
:param deviceId: the device to measure.
:param measurementId: the name of the measurement.
:return: 200 if it was scheduled, 400 if the device is busy, 500 if the device is bad. |
def atlasdb_num_peers( con=None, path=None ):
"""
How many peers are there in the db?
"""
with AtlasDBOpen(con=con, path=path) as dbcon:
sql = "SELECT MAX(peer_index) FROM peers;"
args = ()
cur = dbcon.cursor()
res = atlasdb_query_execute( cur, sql, args )
ret ... | How many peers are there in the db? |
def object(self, infotype, key):
"""
get the key's info stats
:param name: str the name of the redis key
:param subcommand: REFCOUNT | ENCODING | IDLETIME
:return: Future()
"""
with self.pipe as pipe:
return pipe.object(infotype, self.redis_key(ke... | get the key's info stats
:param name: str the name of the redis key
:param subcommand: REFCOUNT | ENCODING | IDLETIME
:return: Future() |
def compute_residuals(self):
"""Compute residuals and stopping thresholds."""
r = self.rsdl()
adapt_tol = self.opt['RelStopTol']
if self.opt['AutoStop', 'Enabled']:
adapt_tol = self.tau0 / (1. + self.k)
return r, adapt_tol | Compute residuals and stopping thresholds. |
def lane_stats_table(self):
""" Return a table with overview stats for each bcl2fastq lane for a single flow cell """
headers = OrderedDict()
headers['total_yield'] = {
'title': '{} Total Yield'.format(config.base_count_prefix),
'description': 'Number of bases ({})'.forma... | Return a table with overview stats for each bcl2fastq lane for a single flow cell |
def airwires(board, showgui=0):
'search for airwires in eagle board'
board = Path(board).expand().abspath()
file_out = tempfile.NamedTemporaryFile(suffix='.txt', delete=0)
file_out.close()
ulp = ulp_templ.replace('FILE_NAME', file_out.name)
file_ulp = tempfile.NamedTemporaryFile(suffix='.ulp'... | search for airwires in eagle board |
def ARC4_encrypt(key, data, skip=0):
"""Encrypt data @data with key @key, skipping @skip first bytes of the
keystream"""
algorithm = algorithms.ARC4(key)
cipher = Cipher(algorithm, mode=None, backend=default_backend())
encryptor = cipher.encryptor()
if skip:
encryptor.update(b"\x00" * s... | Encrypt data @data with key @key, skipping @skip first bytes of the
keystream |
def send_element(self, element):
"""
Send an element via the transport.
"""
with self.lock:
if self._eof or self._socket is None or not self._serializer:
logger.debug("Dropping element: {0}".format(
element_to_un... | Send an element via the transport. |
def cowsay_output(message):
""" Invoke a shell command to print cowsay output. Primary replacement for
os.system calls.
"""
command = 'cowsay "%s"' % message
ret = subprocess.Popen(
command, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=... | Invoke a shell command to print cowsay output. Primary replacement for
os.system calls. |
def update_from_dict(self, data_dict):
"""
:param data_dict: Dictionary to be mapped into object attributes
:type data_dict: dict
:return:
"""
for k, v in data_dict.items():
setattr(self, k, v)
if "item_queue_id" in data_dict:
self.id = da... | :param data_dict: Dictionary to be mapped into object attributes
:type data_dict: dict
:return: |
def modify(self, max_time_out_of_sync=None, name=None,
hourly_snap_replication_policy=None,
daily_snap_replication_policy=None,
src_spa_interface=None, src_spb_interface=None,
dst_spa_interface=None, dst_spb_interface=None):
"""
Modifies proper... | Modifies properties of a replication session.
:param max_time_out_of_sync: same as the one in `create` method.
:param name: same as the one in `create` method.
:param hourly_snap_replication_policy: same as the one in `create`
method.
:param daily_snap_replication_policy: sa... |
def path_expand(text):
""" returns a string with expanded variable.
:param text: the path to be expanded, which can include ~ and environment $ variables
:param text: string
"""
result = os.path.expandvars(os.path.expanduser(text))
# template = Template(text)
# result = template.substitut... | returns a string with expanded variable.
:param text: the path to be expanded, which can include ~ and environment $ variables
:param text: string |
def predict(self, nSteps):
"""
This function gives the future predictions for <nSteps> timesteps starting
from the current TM state. The TM is returned to its original state at the
end before returning.
1. We save the TM state.
2. Loop for nSteps
a. Turn-on with lateral support from the... | This function gives the future predictions for <nSteps> timesteps starting
from the current TM state. The TM is returned to its original state at the
end before returning.
1. We save the TM state.
2. Loop for nSteps
a. Turn-on with lateral support from the current active cells
b. Set the... |
def et_node_to_string(et_node, default=''):
"""Simple method to get stripped text from node or ``default`` string if None is given.
:param et_node: Element or None
:param default: string returned if None is given, default ``''``
:type et_node: xml.etree.ElementTree.Element, None
:type default: str
... | Simple method to get stripped text from node or ``default`` string if None is given.
:param et_node: Element or None
:param default: string returned if None is given, default ``''``
:type et_node: xml.etree.ElementTree.Element, None
:type default: str
:return: text from node or default
:rtype: ... |
def import_end_event_to_graph(diagram_graph, process_id, process_attributes, element):
"""
Adds to graph the new element that represents BPMN end event.
End event inherits sequence of eventDefinitionRef from Event type.
Separate methods for each event type are required since each of them... | Adds to graph the new element that represents BPMN end event.
End event inherits sequence of eventDefinitionRef from Event type.
Separate methods for each event type are required since each of them has different variants
(Message, Error, Signal etc.).
:param diagram_graph: NetworkX grap... |
def check(self):
"""
Checks to see if Spark worker and HDFS datanode are still running.
"""
status = _checkContainerStatus(self.sparkContainerID,
self.hdfsContainerID,
sparkNoun='worker',
... | Checks to see if Spark worker and HDFS datanode are still running. |
def create_fc_template(self, out_path, out_name):
"""creates a featureclass template on local disk"""
fields = self.fields
objectIdField = self.objectIdField
geomType = self.geometryType
wkid = self.parentLayer.spatialReference['wkid']
return create_feature_class(out_path... | creates a featureclass template on local disk |
def suggest(alias, max=3, cutoff=0.5):
""" Suggest a list of aliases which are similar enough
"""
aliases = matchers.keys()
similar = get_close_matches(alias, aliases, n=max, cutoff=cutoff)
return similar | Suggest a list of aliases which are similar enough |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.