code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def run(self, **kwargs):
"""
Does the magic!
"""
logger.info('UpdateLocationsIfNecessaryTask was called')
# read last ip count
try:
with open(app_settings.IP_ASSEMBLER_IP_CHANGED_FILE, 'r') as f:
content_list = f.readlines()
if... | Does the magic! |
def info_post_request(self, node, info):
"""Run when a request to create an info is complete."""
for agent in node.neighbors():
node.transmit(what=info, to_whom=agent) | Run when a request to create an info is complete. |
def _parse_args():
"""Parse and return command line arguments."""
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=_CliFormatter)
parser.add_argument('-v', '--verbose', action='store_true',
help='Enable verbose output.')
fb_group = parser.ad... | Parse and return command line arguments. |
def collect_segment_partitions(self):
"""Return a dict of segments partitions, keyed on the name of the parent partition
"""
from collections import defaultdict
# Group the segments by their parent partition name, which is the
# same name, but without the segment.
partit... | Return a dict of segments partitions, keyed on the name of the parent partition |
def submit(self, spec):
"""Submit a new skein application.
Parameters
----------
spec : ApplicationSpec, str, or dict
A description of the application to run. Can be an
``ApplicationSpec`` object, a path to a yaml/json file, or a
dictionary descriptio... | Submit a new skein application.
Parameters
----------
spec : ApplicationSpec, str, or dict
A description of the application to run. Can be an
``ApplicationSpec`` object, a path to a yaml/json file, or a
dictionary description of an application specification.
... |
def find_lexer_class_for_filename(_fn, code=None):
"""Get a lexer for a filename.
If multiple lexers match the filename pattern, use ``analyse_text()`` to
figure out which one is more appropriate.
Returns None if not found.
"""
matches = []
fn = basename(_fn)
for modname, name, _, file... | Get a lexer for a filename.
If multiple lexers match the filename pattern, use ``analyse_text()`` to
figure out which one is more appropriate.
Returns None if not found. |
def create_object_if_not_exists(self, alias, name=None, *args, **kwargs):
"""Constructs the type with the given alias using the given args and kwargs.
NB: aliases may be the alias' object type itself if that type is known.
:API: public
:param alias: Either the type alias or the type itself.
:type... | Constructs the type with the given alias using the given args and kwargs.
NB: aliases may be the alias' object type itself if that type is known.
:API: public
:param alias: Either the type alias or the type itself.
:type alias: string|type
:param *args: These pass through to the underlying callab... |
def _send_msg(self, header, payload):
"""send message to server"""
if self.verbose:
print('->', repr(header))
print('..', repr(payload))
assert header.payload == len(payload)
try:
sent = self.socket.send(header + payload)
except IOError as err... | send message to server |
def get_dev_details(ip_address, auth, url):
"""Takes string input of IP address to issue RESTUL call to HP IMC\n
:param ip_address: string object of dotted decimal notation of IPv4 address
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC ... | Takes string input of IP address to issue RESTUL call to HP IMC\n
:param ip_address: string object of dotted decimal notation of IPv4 address
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.a... |
def delete_user(self, username, params=None):
"""
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-delete-user.html>`_
:arg username: username
:arg refresh: If `true` (the default) then refresh the affected shards
to make this operation visible ... | `<https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-delete-user.html>`_
:arg username: username
:arg refresh: If `true` (the default) then refresh the affected shards
to make this operation visible to search, if `wait_for` then wait
for a refresh to ma... |
def parse(self, paramfile):
""" Read parameter file and set parameter values.
File should have python-like syntax. Full file name needed.
"""
with open(paramfile, 'r') as f:
for line in f.readlines():
line_clean = line.rstrip('\n').split('#')[0] # trim out ... | Read parameter file and set parameter values.
File should have python-like syntax. Full file name needed. |
def write(url, content, **args):
"""Put an object into a ftp URL."""
relay = urlparse.urlparse(args.pop('relay', 'lmtp://localhot'))
try:
smtplib_SMTPS = functools.partial(smtplib.SMTP_SSL,
keyfile=args.pop('keyfile', None),
... | Put an object into a ftp URL. |
def tap_and_hold(self, xcoord, ycoord):
"""
Touch down at given coordinates.
:Args:
- xcoord: X Coordinate to touch down.
- ycoord: Y Coordinate to touch down.
"""
self._actions.append(lambda: self._driver.execute(
Command.TOUCH_DOWN, {
... | Touch down at given coordinates.
:Args:
- xcoord: X Coordinate to touch down.
- ycoord: Y Coordinate to touch down. |
def reddening(self,extval):
"""Compute the reddening for the given extinction.
.. math::
A(V) = R(V) \\; \\times \\; E(B-V)
\\textnormal{THRU} = 10^{-0.4 \\; A(V)}
.. note::
``self.litref`` is passed into ``ans.citation``.
Parameters
----... | Compute the reddening for the given extinction.
.. math::
A(V) = R(V) \\; \\times \\; E(B-V)
\\textnormal{THRU} = 10^{-0.4 \\; A(V)}
.. note::
``self.litref`` is passed into ``ans.citation``.
Parameters
----------
extval : float
... |
def _generate_author_query(self, author_name):
"""Generates a query handling specifically authors.
Notes:
The match query is generic enough to return many results. Then, using the filter clause we truncate these
so that we imitate legacy's behaviour on returning more "exact" res... | Generates a query handling specifically authors.
Notes:
The match query is generic enough to return many results. Then, using the filter clause we truncate these
so that we imitate legacy's behaviour on returning more "exact" results. E.g. Searching for `Smith, John`
shouldn... |
def bitop_or(self, dest, key, *keys):
"""Perform bitwise OR operations between strings."""
return self.execute(b'BITOP', b'OR', dest, key, *keys) | Perform bitwise OR operations between strings. |
def remove_sample(self, md5):
"""Delete a specific sample"""
# Grab the sample
record = self.database[self.sample_collection].find_one({'md5': md5})
if not record:
return
# Delete it
print 'Deleting sample: %s (%.2f MB)...' % (record['md5'], record['length']... | Delete a specific sample |
def deleteMapTable(self, name, session):
"""
Remove duplicate map table if it exists
"""
duplicate_map_tables = session.query(MapTable).filter(MapTable.mapTableFile == self).filter(MapTable.name == name).all()
for duplicate_map_table in duplicate_map_tables:
if dupli... | Remove duplicate map table if it exists |
def expect_keyword(parser, value):
# type: (Parser, str) -> Token
"""If the next token is a keyword with the given value, return that
token after advancing the parser. Otherwise, do not change the parser
state and return False."""
token = parser.token
if token.kind == TokenKind.NAME and token.va... | If the next token is a keyword with the given value, return that
token after advancing the parser. Otherwise, do not change the parser
state and return False. |
def update_service_definitions(self, service_definitions):
"""UpdateServiceDefinitions.
[Preview API]
:param :class:`<VssJsonCollectionWrapper> <azure.devops.v5_0.location.models.VssJsonCollectionWrapper>` service_definitions:
"""
content = self._serialize.body(service_definition... | UpdateServiceDefinitions.
[Preview API]
:param :class:`<VssJsonCollectionWrapper> <azure.devops.v5_0.location.models.VssJsonCollectionWrapper>` service_definitions: |
def has_nvme_ssd(system_obj):
"""Gets if the system has any drive as NVMe SSD drive
:param system_obj: The HPESystem object.
:returns True if system has SSD drives and protocol is NVMe.
"""
storage_value = False
storage_resource = _get_attribute_value_of(system_obj, 'storages')
if storage_r... | Gets if the system has any drive as NVMe SSD drive
:param system_obj: The HPESystem object.
:returns True if system has SSD drives and protocol is NVMe. |
def _helper(result,
graph,
number_edges_remaining: int,
node_blacklist: Set[BaseEntity],
invert_degrees: Optional[bool] = None,
):
"""Help build a random graph.
:type result: networkx.Graph
:type graph: networkx.Graph
"""
original_node_cou... | Help build a random graph.
:type result: networkx.Graph
:type graph: networkx.Graph |
def search(self, args):
"""
Executes a search
flickr:(credsfile),search,(arg1)=(val1),(arg2)=(val2)...
"""
kwargs = {}
for a in args:
k, v = a.split('=')
kwargs[k] = v
return self._paged_api_call(self.flickr.photos_search, kwargs) | Executes a search
flickr:(credsfile),search,(arg1)=(val1),(arg2)=(val2)... |
def process_csxml_file(self, filename, interval=None, lazy=False):
"""Processes a filehandle to MedScan csxml input into INDRA
statements.
The CSXML format consists of a top-level `<batch>` root element
containing a series of `<doc>` (document) elements, in turn containing
`<sec... | Processes a filehandle to MedScan csxml input into INDRA
statements.
The CSXML format consists of a top-level `<batch>` root element
containing a series of `<doc>` (document) elements, in turn containing
`<sec>` (section) elements, and in turn containing `<sent>` (sentence)
elem... |
def report_error(self, line_number, offset, text, check):
"""
Report an error, according to options.
"""
if options.quiet == 1 and not self.file_errors:
message(self.filename)
self.file_errors += 1
code = text[:4]
options.counters[code] = options.count... | Report an error, according to options. |
def _prep_ssh(
self,
tgt,
fun,
arg=(),
timeout=None,
tgt_type='glob',
kwarg=None,
**kwargs):
'''
Prepare the arguments
'''
opts = copy.deepcopy(self.opts)
opts.update(kwargs)
i... | Prepare the arguments |
def gradient(self):
r"""Gradient of the KL functional.
The gradient of `KullbackLeibler` with ``prior`` :math:`g` is given
as
.. math::
\nabla F(x) = 1 - \frac{g}{x}.
The gradient is not defined in points where one or more components
are non-positive.
... | r"""Gradient of the KL functional.
The gradient of `KullbackLeibler` with ``prior`` :math:`g` is given
as
.. math::
\nabla F(x) = 1 - \frac{g}{x}.
The gradient is not defined in points where one or more components
are non-positive. |
def __software_to_pkg_id(self, publisher, name, is_component, is_32bit):
'''
Determine the Package ID of a software/component using the
software/component ``publisher``, ``name``, whether its a software or a
component, and if its 32bit or 64bit archiecture.
Args:
pub... | Determine the Package ID of a software/component using the
software/component ``publisher``, ``name``, whether its a software or a
component, and if its 32bit or 64bit archiecture.
Args:
publisher (str): Publisher of the software/component.
name (str): Name of the softwa... |
def remove_range(self, start, end):
'''Remove a range by score.
'''
return self._sl.remove_range(
start, end, callback=lambda sc, value: self._dict.pop(value)) | Remove a range by score. |
def disassemble(qobj):
"""Dissasemble a qobj and return the circuits, run_config, and user header
Args:
qobj (Qobj): The input qobj object to dissasemble
Returns:
circuits (list): A list of quantum circuits
run_config (dict): The dist of the run config
user_qobj_header (dict... | Dissasemble a qobj and return the circuits, run_config, and user header
Args:
qobj (Qobj): The input qobj object to dissasemble
Returns:
circuits (list): A list of quantum circuits
run_config (dict): The dist of the run config
user_qobj_header (dict): The dict of any user header... |
def t_INDENTIFIER(t):
r'(\$?[_a-zA-Z][_a-zA-Z0-9]*)|(__[A-Z_]+__)'
if t.value in reserved:
t.type = t.value.upper()
if t.value in reservedMap:
t.value = reservedMap[t.value]
elif t.value in strStatment:
t.type = 'STATEMENT'
return t | r'(\$?[_a-zA-Z][_a-zA-Z0-9]*)|(__[A-Z_]+__) |
def updateAnomalyLikelihoods(anomalyScores,
params,
verbosity=0):
"""
Compute updated probabilities for anomalyScores using the given params.
:param anomalyScores: a list of records. Each record is a list with the
following three e... | Compute updated probabilities for anomalyScores using the given params.
:param anomalyScores: a list of records. Each record is a list with the
following three elements: [timestamp, value, score]
Example::
[datetime.datetime(2013, 8, 10, 2... |
def select_many(self, *args):
'''
Select several instances from the instance pool. Query operators such as
where_eq(), order_by() or filter functions may be passed as optional
arguments.
'''
s = apply_query_operators(self.storage, args)
if isinstance(s, QuerySet):... | Select several instances from the instance pool. Query operators such as
where_eq(), order_by() or filter functions may be passed as optional
arguments. |
def generate_kmers(seq, k=4):
"""Return a generator of all the unique substrings (k-mer or q-gram strings) within a sequence/string
Not effiicent for large k and long strings.
Doesn't form substrings that are shorter than k, only exactly k-mers
Used for algorithms like UniqTag for genome unique identi... | Return a generator of all the unique substrings (k-mer or q-gram strings) within a sequence/string
Not effiicent for large k and long strings.
Doesn't form substrings that are shorter than k, only exactly k-mers
Used for algorithms like UniqTag for genome unique identifier locality sensitive hashing.
... |
def get_locations(self, url):
"""Get valid location header values from responses.
:param url: a URL address. If a HEAD request sent to it
fails because the address has invalid schema, times out
or there is a connection error, the generator yields nothing.
:returns: valid redirec... | Get valid location header values from responses.
:param url: a URL address. If a HEAD request sent to it
fails because the address has invalid schema, times out
or there is a connection error, the generator yields nothing.
:returns: valid redirection addresses. If a request for
... |
def std(self):
"""Standard deviation
Note that is by default normalizd by n - 1
# TODO, what does pandas do for multiple grouping columns?
# Currently we are just going to use one grouping column
"""
std_expr = grizzly_impl.groupby_std(
[self.column],
... | Standard deviation
Note that is by default normalizd by n - 1
# TODO, what does pandas do for multiple grouping columns?
# Currently we are just going to use one grouping column |
def evaluate_and_log_bleu(estimator, bleu_writer, bleu_source, bleu_ref):
"""Calculate and record the BLEU score."""
subtokenizer = tokenizer.Subtokenizer(
os.path.join(FLAGS.data_dir, FLAGS.vocab_file))
uncased_score, cased_score = translate_and_compute_bleu(
estimator, subtokenizer, bleu_source, bl... | Calculate and record the BLEU score. |
def add_project_name_or_id_arg(arg_parser, required=True, help_text_suffix="manage"):
"""
Adds project name or project id argument. These two are mutually exclusive.
:param arg_parser:
:param required:
:param help_text:
:return:
"""
project_name_or_id = arg_parser.add_mutually_exclusive_... | Adds project name or project id argument. These two are mutually exclusive.
:param arg_parser:
:param required:
:param help_text:
:return: |
def _parse_jetconfig(self):
"""
Undocumented cross-compatability functionality with jetconfig
(https://github.com/shakefu/jetconfig) that is very sloppy.
"""
conf = env('JETCONFIG_ETCD', None)
if not conf:
return
import urlparse
auth = None... | Undocumented cross-compatability functionality with jetconfig
(https://github.com/shakefu/jetconfig) that is very sloppy. |
def clear_samples(self):
"""Clears the chain and blobs from memory.
"""
# store the iteration that the clear is occuring on
self._lastclear = self.niterations
self._itercounter = 0
# now clear the chain
self._sampler.reset() | Clears the chain and blobs from memory. |
def jitter_run(res, rstate=None, approx=False):
"""
Probes **statistical uncertainties** on a nested sampling run by
explicitly generating a *realization* of the prior volume associated
with each sample (dead point). Companion function to :meth:`resample_run`
and :meth:`simulate_run`.
Parameter... | Probes **statistical uncertainties** on a nested sampling run by
explicitly generating a *realization* of the prior volume associated
with each sample (dead point). Companion function to :meth:`resample_run`
and :meth:`simulate_run`.
Parameters
----------
res : :class:`~dynesty.results.Results`... |
def create_vpc(self):
"""Create a virtual private cloud on Amazon's Web services configured
for deploying JupyterHubs.
"""
self.create_stack(
self.vpc_name,
'amazon-eks-vpc.yaml',
parameters=define_parameters(
VpcBlock="10.42.0.0/16",
... | Create a virtual private cloud on Amazon's Web services configured
for deploying JupyterHubs. |
def resizeEvent(self, event):
"""Emit custom signal when the window is re-sized.
:param event: The re-sized event.
:type event: QResizeEvent
"""
self.resized.emit()
return super(MetadataConverterDialog, self).resizeEvent(event) | Emit custom signal when the window is re-sized.
:param event: The re-sized event.
:type event: QResizeEvent |
def _tp__get_typed_properties(self):
"""Return a tuple of typed attrs that can be used for comparisons.
Raises:
NotImplementedError: Raised if this class was mixed into a class
that was not created by _AnnotatedObjectMeta.
"""
try:
return tuple(ge... | Return a tuple of typed attrs that can be used for comparisons.
Raises:
NotImplementedError: Raised if this class was mixed into a class
that was not created by _AnnotatedObjectMeta. |
def save(evt, designer):
"Basic save functionality: just replaces the gui code"
# ask the user if we should save the changes:
ok = gui.confirm("Save the changes?", "GUI2PY Designer",
cancel=True, default=True)
if ok:
wx_obj = evt.GetEventObject()
w = wx_obj.obj
... | Basic save functionality: just replaces the gui code |
def get_items(self, container_id, scope=None, item_path=None, metadata=None, format=None, download_file_name=None, include_download_tickets=None, is_shallow=None):
"""GetItems.
[Preview API]
:param long container_id:
:param str scope:
:param str item_path:
:param bool met... | GetItems.
[Preview API]
:param long container_id:
:param str scope:
:param str item_path:
:param bool metadata:
:param str format:
:param str download_file_name:
:param bool include_download_tickets:
:param bool is_shallow:
:rtype: [FileCon... |
def pip_command_output(pip_args):
"""
Get output (as a string) from pip command
:param pip_args: list o pip switches to pass
:return: string with results
"""
import sys
import pip
from io import StringIO
# as pip will write to stdout we use some nasty hacks
# to substitute system... | Get output (as a string) from pip command
:param pip_args: list o pip switches to pass
:return: string with results |
def strip_praw_submission(cls, sub):
"""
Parse through a submission and return a dict with data ready to be
displayed through the terminal.
Definitions:
permalink - URL to the reddit page with submission comments.
url_full - URL that the submission points to.
... | Parse through a submission and return a dict with data ready to be
displayed through the terminal.
Definitions:
permalink - URL to the reddit page with submission comments.
url_full - URL that the submission points to.
url - URL that will be displayed on the subreddi... |
def replace(self, year=None, week=None):
"""Return a Week with either the year or week attribute value replaced"""
return self.__class__(self.year if year is None else year,
self.week if week is None else week) | Return a Week with either the year or week attribute value replaced |
def shadow_calc(data):
"""计算上下影线
Arguments:
data {DataStruct.slice} -- 输入的是一个行情切片
Returns:
up_shadow {float} -- 上影线
down_shdow {float} -- 下影线
entity {float} -- 实体部分
date {str} -- 时间
code {str} -- 代码
"""
up_shadow = abs(data.high - (max(data.open, da... | 计算上下影线
Arguments:
data {DataStruct.slice} -- 输入的是一个行情切片
Returns:
up_shadow {float} -- 上影线
down_shdow {float} -- 下影线
entity {float} -- 实体部分
date {str} -- 时间
code {str} -- 代码 |
def not_(self, value, name=''):
"""
Bitwise integer complement:
name = ~value
"""
if isinstance(value.type, types.VectorType):
rhs = values.Constant(value.type, (-1,) * value.type.count)
else:
rhs = values.Constant(value.type, -1)
retur... | Bitwise integer complement:
name = ~value |
def _load(self, url, verbose):
"""
Execute a request against the Salesking API to fetch the items
:param url: url to fetch
:return response
:raises SaleskingException with the corresponding http errors
"""
msg = u"_load url: %s" % url
self._last_query_str ... | Execute a request against the Salesking API to fetch the items
:param url: url to fetch
:return response
:raises SaleskingException with the corresponding http errors |
def value_ranges(self, value_ranges):
'''Set the types, min/max values for tunable parameters
Args:
value_ranges (list): each element defines a tunable variable in
the form "(type ('int' or 'float'), (min_val, max_val))";
initial, random values for each bee w... | Set the types, min/max values for tunable parameters
Args:
value_ranges (list): each element defines a tunable variable in
the form "(type ('int' or 'float'), (min_val, max_val))";
initial, random values for each bee will between "min_val" and
"max_va... |
def stretch_cv(x,sr,sc,interpolation=cv2.INTER_AREA):
""" Stretches image x horizontally by sr+1, and vertically by sc+1 while retaining the original image size and proportion. """
if sr==0 and sc==0: return x
r,c,*_ = x.shape
x = cv2.resize(x, None, fx=sr+1, fy=sc+1, interpolation=interpolation)
nr... | Stretches image x horizontally by sr+1, and vertically by sc+1 while retaining the original image size and proportion. |
def setConnStringForWindows():
""" Set Conn String for Windiws
Windows has a different way of forking processes, which causes the
@worker_process_init.connect signal not to work in "CeleryDbConnInit"
"""
global _dbConnectString
from peek_platform.file_config.PeekFileConfigABC import PeekFileC... | Set Conn String for Windiws
Windows has a different way of forking processes, which causes the
@worker_process_init.connect signal not to work in "CeleryDbConnInit" |
def list_jobs(self, argument_filters=None):
'''
a method to list jobs in the scheduler
:param argument_filters: list of query criteria dictionaries for class argument keys
:return: list of jobs (which satisfy the filters)
NOTE: query criteria architecture
... | a method to list jobs in the scheduler
:param argument_filters: list of query criteria dictionaries for class argument keys
:return: list of jobs (which satisfy the filters)
NOTE: query criteria architecture
each item in the argument filters list must be a dictionary
... |
def recover_devices(cls):
"""Track devices.
Creates global dict to track device names across driver invocations
and populates based on current devices configured on the system.
"""
if "_devices" in globals():
return
global _devices
confs_dir = os.pa... | Track devices.
Creates global dict to track device names across driver invocations
and populates based on current devices configured on the system. |
def get_profile(A):
"Fail-soft profile getter; if no profile is present assume none and quietly ignore."
try:
with open(os.path.expanduser(A.profile)) as I:
profile = json.load(I)
return profile
except:
return {} | Fail-soft profile getter; if no profile is present assume none and quietly ignore. |
def badge_label(self, badge):
'''Display the badge label for a given kind'''
kind = badge.kind if isinstance(badge, Badge) else badge
return self.__badges__[kind] | Display the badge label for a given kind |
def _upload(auth_http, project_id, bucket_name, file_path, object_name, acl):
"""Uploads a file to Google Cloud Storage.
Args:
auth_http: An authorized httplib2.Http instance.
project_id: The project to upload to.
bucket_name: The bucket to upload to.
file_path: Path to the file... | Uploads a file to Google Cloud Storage.
Args:
auth_http: An authorized httplib2.Http instance.
project_id: The project to upload to.
bucket_name: The bucket to upload to.
file_path: Path to the file to upload.
object_name: The name within the bucket to upload to.
acl... |
def expand_tpm(tpm):
"""Broadcast a state-by-node TPM so that singleton dimensions are expanded
over the full network.
"""
unconstrained = np.ones([2] * (tpm.ndim - 1) + [tpm.shape[-1]])
return tpm * unconstrained | Broadcast a state-by-node TPM so that singleton dimensions are expanded
over the full network. |
def _finalize_step(self):
"""Finalize simulation step after all agents have acted for the current
step.
"""
t = time.time()
if self._callback is not None:
self._callback(self.age)
t2 = time.time()
self._step_processing_time += t2 - t
self._log(... | Finalize simulation step after all agents have acted for the current
step. |
def apply_gemm(scope, input_name, output_name, container, operator_name=None, alpha=1.0, beta=1.0,
transA=0, transB=0):
"""
Applies operator `gemm <https://github.com/onnx/onnx/blob/master/docs/Operators.md#gemm>`.
"""
name = _create_name_or_use_existing_one(scope, 'Gemm', operator_name)
... | Applies operator `gemm <https://github.com/onnx/onnx/blob/master/docs/Operators.md#gemm>`. |
def terminate_process(self, idf):
""" Terminate a process by id """
try:
p = self.q.pop(idf)
p.terminate()
return p
except:
return None | Terminate a process by id |
def qsnorm(p):
"""
rational approximation for x where q(x)=d, q being the cumulative
normal distribution function. taken from Abramowitz & Stegun p. 933
|error(x)| < 4.5*10**-4
"""
d = p
if d < 0. or d > 1.:
print('d not in (1,1) ')
sys.exit()
x = 0.
if (d - 0.5) > 0:... | rational approximation for x where q(x)=d, q being the cumulative
normal distribution function. taken from Abramowitz & Stegun p. 933
|error(x)| < 4.5*10**-4 |
def CountFlowOutputPluginLogEntries(self,
client_id,
flow_id,
output_plugin_id,
with_type=None):
"""Returns number of flow output plugin log entries of a given flow... | Returns number of flow output plugin log entries of a given flow. |
def _is_valid_duration(self, inpt, metadata):
"""Checks if input is a valid Duration"""
# NEED TO ADD CHECKS FOR OTHER METADATA, LIKE MINIMUM, MAXIMUM, ETC.
from dlkit.abstract_osid.calendaring.primitives import Duration as abc_duration
if isinstance(inpt, abc_duration):
retu... | Checks if input is a valid Duration |
def md5(self, raw_output=False):
"""
Calculates the md5 hash of a given string
:example 'cfcd208495d565ef66e7dff9f98764da'
"""
res = hashlib.md5(str(self.generator.random.random()).encode('utf-8'))
if raw_output:
return res.digest()
return res.hexdiges... | Calculates the md5 hash of a given string
:example 'cfcd208495d565ef66e7dff9f98764da' |
def get_mcu_definition(self, project_file):
""" Parse project file to get mcu definition """
project_file = join(getcwd(), project_file)
uvproj_dic = xmltodict.parse(file(project_file), dict_constructor=dict)
# Generic Target, should get from Target class !
mcu = MCU_TEMPLATE
... | Parse project file to get mcu definition |
def get_all_chats(self):
"""
Fetches all chats
:return: List of chats
:rtype: list[Chat]
"""
chats = self.wapi_functions.getAllChats()
if chats:
return [factory_chat(chat, self) for chat in chats]
else:
return [] | Fetches all chats
:return: List of chats
:rtype: list[Chat] |
def notices(self):
"""pops and returns all notices
http://initd.org/psycopg/docs/connection.html#connection.notices
"""
return [self._db.notices.pop()[8:].strip() for x in range(len(self._db.notices))] | pops and returns all notices
http://initd.org/psycopg/docs/connection.html#connection.notices |
def get_config_directory():
"""Return the directory the config file is located in.
This enables us to use relative paths in config values.
"""
# avoid circular import
from .commands.stacker import Stacker
command = Stacker()
namespace = command.parse_args()
return os.path.dirname(names... | Return the directory the config file is located in.
This enables us to use relative paths in config values. |
def simple_generate_batch(cls, create, size, **kwargs):
"""Generate a batch of instances.
These instances will be either 'built' or 'created'.
Args:
size (int): the number of instances to generate
create (bool): whether to 'build' or 'create' the instances.
Ret... | Generate a batch of instances.
These instances will be either 'built' or 'created'.
Args:
size (int): the number of instances to generate
create (bool): whether to 'build' or 'create' the instances.
Returns:
object list: the generated instances |
def Nu_Mokry(Re, Pr, rho_w=None, rho_b=None):
r'''Calculates internal convection Nusselt number for turbulent vertical
upward flow in a pipe under supercritical conditions according to [1]_,
and reviewed in [2]_.
.. math::
Nu_b = 0.0061 Re_b^{0.904} \bar{Pr}_b^{0.684}
\left(\fra... | r'''Calculates internal convection Nusselt number for turbulent vertical
upward flow in a pipe under supercritical conditions according to [1]_,
and reviewed in [2]_.
.. math::
Nu_b = 0.0061 Re_b^{0.904} \bar{Pr}_b^{0.684}
\left(\frac{\rho_w}{\rho_b}\right)^{0.564}
... |
def sample(self, nsims=1000):
""" Samples from the posterior predictive distribution
Parameters
----------
nsims : int (default : 1000)
How many draws from the posterior predictive distribution
Returns
----------
- np.ndarray of draws from the data
... | Samples from the posterior predictive distribution
Parameters
----------
nsims : int (default : 1000)
How many draws from the posterior predictive distribution
Returns
----------
- np.ndarray of draws from the data |
def ASRS(self, params):
"""
ASRS [Ra,] Ra, Rc
ASRS [Ra,] Rb, #imm5_counting
Arithmetic shift right Rb by Rc or imm5_counting and store the result in Ra
imm5 counting is [1, 32]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc... | ASRS [Ra,] Ra, Rc
ASRS [Ra,] Rb, #imm5_counting
Arithmetic shift right Rb by Rc or imm5_counting and store the result in Ra
imm5 counting is [1, 32]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc must be low registers
If Ra is omit... |
def compile(self):
"""
Compile this expression into an ODPS SQL
:return: compiled DAG
:rtype: str
"""
from ..engines import get_default_engine
engine = get_default_engine(self)
return engine.compile(self) | Compile this expression into an ODPS SQL
:return: compiled DAG
:rtype: str |
def how_long(length=4, choices=len(words), speed=1000 * 1000 * 1000 * 1000,
optimism=2):
"""
How long might it take to guess a password?
@param length: the number of words that we're going to choose.
@type length: L{int}
@param choice: the number of words we might choose between.
... | How long might it take to guess a password?
@param length: the number of words that we're going to choose.
@type length: L{int}
@param choice: the number of words we might choose between.
@type choice: L{int}
@param speed: the speed of our hypothetical password guesser, in guesses
per sec... |
def get_as_string(self, key):
"""
Converts map element into a string or returns "" if conversion is not possible.
:param key: an index of element to get.
:return: string value ot the element or "" if conversion is not supported.
"""
value = self.get(key)
return ... | Converts map element into a string or returns "" if conversion is not possible.
:param key: an index of element to get.
:return: string value ot the element or "" if conversion is not supported. |
def missing_particle(separation=0.0, radius=RADIUS, SNR=20):
""" create a two particle state and compare it to featuring using a single particle guess """
# create a base image of one particle
s = init.create_two_particle_state(imsize=6*radius+4, axis='x', sigma=1.0/SNR,
delta=separation, radius... | create a two particle state and compare it to featuring using a single particle guess |
def get_group_member_profile(self, group_id, user_id, timeout=None):
"""Call get group member profile API.
https://devdocs.line.me/en/#get-group-room-member-profile
Gets the user profile of a member of a group that
the bot is in. This can be the user ID of a user who has
not ad... | Call get group member profile API.
https://devdocs.line.me/en/#get-group-room-member-profile
Gets the user profile of a member of a group that
the bot is in. This can be the user ID of a user who has
not added the bot as a friend or has blocked the bot.
:param str group_id: Gr... |
def locate(self, pattern):
'''Find sequences matching a pattern. For a circular sequence, the
search extends over the origin.
:param pattern: str or NucleicAcidSequence for which to find matches.
:type pattern: str or coral.DNA
:returns: A list of top and bottom strand indices o... | Find sequences matching a pattern. For a circular sequence, the
search extends over the origin.
:param pattern: str or NucleicAcidSequence for which to find matches.
:type pattern: str or coral.DNA
:returns: A list of top and bottom strand indices of matches.
:rtype: list of lis... |
def _hmmalign(self, input_path, directions, pipeline,
forward_reads_output_path, reverse_reads_output_path):
'''
Align reads to the aln_hmm. Receives unaligned sequences and
aligns them.
Parameters
----------
input_path : str
Filename of una... | Align reads to the aln_hmm. Receives unaligned sequences and
aligns them.
Parameters
----------
input_path : str
Filename of unaligned hits to be aligned
directions : dict
dictionary containing read names as keys, and complement
as the entry (... |
def get_stp_mst_detail_output_msti_instance_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_stp_mst_detail = ET.Element("get_stp_mst_detail")
config = get_stp_mst_detail
output = ET.SubElement(get_stp_mst_detail, "output")
msti = E... | Auto Generated Code |
def get_bounds(self, bin_num):
"""Get the bonds of a bin, given its index `bin_num`.
:returns: a `Bounds` namedtuple with properties min and max
respectively.
"""
min_value = pow(2.0, float(bin_num) / 2.0) * self.min_value
max_value = pow(2.0, float(bin_num + 1.0) / ... | Get the bonds of a bin, given its index `bin_num`.
:returns: a `Bounds` namedtuple with properties min and max
respectively. |
def sphgen(self, force_rerun=False):
"""Create sphere representation (sph file) of receptor from the surface representation
Args:
force_rerun (bool): If method should be rerun even if output file exists
"""
log.debug('{}: running sphere generation...'.format(self.id))
... | Create sphere representation (sph file) of receptor from the surface representation
Args:
force_rerun (bool): If method should be rerun even if output file exists |
def generate_config_parser(config, include_all=False):
"""
Generates a config parser from a configuration dictionary.
The dictionary contains the merged informations of the schema and,
optionally, of a source configuration file. Values of the source
configuration file will be stored in the *value* ... | Generates a config parser from a configuration dictionary.
The dictionary contains the merged informations of the schema and,
optionally, of a source configuration file. Values of the source
configuration file will be stored in the *value* field of an option. |
def filter_tess_lcdict(lcdict,
filterqualityflags=True,
nanfilter='sap,pdc,time',
timestoignore=None,
quiet=False):
'''This filters the provided TESS `lcdict`, removing nans and bad
observations.
By default, this fu... | This filters the provided TESS `lcdict`, removing nans and bad
observations.
By default, this function removes points in the TESS LC that have ANY
quality flags set.
Parameters
----------
lcdict : lcdict
An `lcdict` produced by `consolidate_tess_fitslc` or
`read_tess_fitslc`.
... |
def status_pipeline(conf, args):
"""Stop a pipeline."""
host = conf.config['instances'][args.host_instance]
url = api.build_pipeline_url(build_instance_url(host))
auth = tuple([conf.creds['instances'][args.host_instance]['user'], conf.creds['instances'][args.host_instance]['pass']])
verify_ssl = hos... | Stop a pipeline. |
async def set_lock(self, resource, lock_identifier):
"""
Tries to set the lock to all the redis instances
:param resource: The resource string name to lock
:param lock_identifier: The id of the lock. A unique string
:return float: The elapsed time that took to lock the instances... | Tries to set the lock to all the redis instances
:param resource: The resource string name to lock
:param lock_identifier: The id of the lock. A unique string
:return float: The elapsed time that took to lock the instances
in seconds
:raises: LockError if the lock has not be... |
def requires_authentication(func):
"""
Function decorator that throws an exception if the user
is not authenticated, and executes the function normally
if the user is authenticated.
"""
def _auth(self, *args, **kwargs):
if not self._authenticated:
... | Function decorator that throws an exception if the user
is not authenticated, and executes the function normally
if the user is authenticated. |
def linear_connection(plist, lane):
"""Connects a linear list of processes into a list of dictionaries
Parameters
----------
plist : list
List with process names. This list should contain at least two entries.
lane : int
Corresponding lane of the processes
Returns
-------
... | Connects a linear list of processes into a list of dictionaries
Parameters
----------
plist : list
List with process names. This list should contain at least two entries.
lane : int
Corresponding lane of the processes
Returns
-------
res : list
List of dictionaries ... |
def open_url(url, httpuser=None, httppassword=None, method=None):
"""
Open a URL using an opener that will simulate a browser user-agent
url: The URL
httpuser, httppassword: HTTP authentication credentials (either both or
neither must be provided)
method: The HTTP method
Caller is reponsi... | Open a URL using an opener that will simulate a browser user-agent
url: The URL
httpuser, httppassword: HTTP authentication credentials (either both or
neither must be provided)
method: The HTTP method
Caller is reponsible for calling close() on the returned object |
def update_field(self, elements):
"""
Update the field with a list of provided values but only if the values
are different. Return a boolean indicating whether a change was made
indicating whether `save` should be called. If the field is currently
set to any or none, then no comp... | Update the field with a list of provided values but only if the values
are different. Return a boolean indicating whether a change was made
indicating whether `save` should be called. If the field is currently
set to any or none, then no comparison is made and field is updated.
... |
def dispatch(self, frame):
'''
Override the default dispatch since we don't need the rest of
the stack.
'''
if frame.type() == HeartbeatFrame.type():
self.send_heartbeat()
elif frame.type() == MethodFrame.type():
if frame.class_id == 10:
... | Override the default dispatch since we don't need the rest of
the stack. |
def infer_batch(self, dataloader):
"""
Description : inference for LipNet
"""
sum_losses = 0
len_losses = 0
for input_data, input_label in dataloader:
data = gluon.utils.split_and_load(input_data, self.ctx, even_split=False)
label = gluon.utils.spl... | Description : inference for LipNet |
def get_js(self):
""" Fetches and returns javascript file path or contents, depending if
we want a standalone presentation or not.
"""
js_file = os.path.join(self.theme_dir, 'js', 'slides.js')
if not os.path.exists(js_file):
js_file = os.path.join(THEMES_DIR, 'de... | Fetches and returns javascript file path or contents, depending if
we want a standalone presentation or not. |
def getnamedargs(*args, **kwargs):
"""allows you to pass a dict and named args
so you can pass ({'a':5, 'b':3}, c=8) and get
dict(a=5, b=3, c=8)"""
adict = {}
for arg in args:
if isinstance(arg, dict):
adict.update(arg)
adict.update(kwargs)
return adict | allows you to pass a dict and named args
so you can pass ({'a':5, 'b':3}, c=8) and get
dict(a=5, b=3, c=8) |
def EndEdit(self, row, col, grid, oldVal=None):
"""
End editing the cell. This function must check if the current
value of the editing control is valid and different from the
original value (available as oldval in its string form.) If
it has not changed then simply return None,... | End editing the cell. This function must check if the current
value of the editing control is valid and different from the
original value (available as oldval in its string form.) If
it has not changed then simply return None, otherwise return
the value in its string form.
*Mus... |
def copy_(name,
source,
force=False,
makedirs=False,
preserve=False,
user=None,
group=None,
mode=None,
subdir=False,
**kwargs):
'''
If the file defined by the ``source`` option exists on the minion, copy it
to the name... | If the file defined by the ``source`` option exists on the minion, copy it
to the named path. The file will not be overwritten if it already exists,
unless the ``force`` option is set to ``True``.
.. note::
This state only copies files from one location on a minion to another
location on th... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.