code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def probability_gt(self, x):
"""
Returns the probability of a random variable being greater than the
given value.
"""
if self.mean is None:
return
p = normdist(x=x, mu=self.mean, sigma=self.standard_deviation)
return 1-p | Returns the probability of a random variable being greater than the
given value. |
def _recv(self):
"""Take all available bytes from socket, return list of any responses from parser"""
recvd = []
self._lock.acquire()
if not self._can_send_recv():
log.warning('%s cannot recv: socket not connected', self)
self._lock.release()
return ()... | Take all available bytes from socket, return list of any responses from parser |
def cmp_pkgrevno(package, revno, pkgcache=None):
"""Compare supplied revno with the revno of the installed package.
* 1 => Installed revno is greater than supplied arg
* 0 => Installed revno is the same as supplied arg
* -1 => Installed revno is less than supplied arg
This function imports YumBa... | Compare supplied revno with the revno of the installed package.
* 1 => Installed revno is greater than supplied arg
* 0 => Installed revno is the same as supplied arg
* -1 => Installed revno is less than supplied arg
This function imports YumBase function if the pkgcache argument
is None. |
def _restore(self, builder):
"""
The restore extension.
:param builder: The query builder
:type builder: orator.orm.builder.Builder
"""
builder.with_trashed()
return builder.update({builder.get_model().get_deleted_at_column(): None}) | The restore extension.
:param builder: The query builder
:type builder: orator.orm.builder.Builder |
def parse_coach_bsites_inf(infile):
"""Parse the Bsites.inf output file of COACH and return a list of rank-ordered binding site predictions
Bsites.inf contains the summary of COACH clustering results after all other prediction algorithms have finished
For each site (cluster), there are three lines:
... | Parse the Bsites.inf output file of COACH and return a list of rank-ordered binding site predictions
Bsites.inf contains the summary of COACH clustering results after all other prediction algorithms have finished
For each site (cluster), there are three lines:
- Line 1: site number, c-score of coa... |
def get_link(self, rel):
"""
Return link for specified resource
"""
if rel in self.links:
return self.links[rel]
raise ResourceNotFound('Resource requested: %r is not available '
'on this element.' % rel) | Return link for specified resource |
def multiply(self, a, b):
"""
:type A: List[List[int]]
:type B: List[List[int]]
:rtype: List[List[int]]
"""
if a is None or b is None: return None
m, n, l = len(a), len(b[0]), len(b[0])
if len(b) != n:
raise Exception("A's column number must be equal to B's row number.")
c = ... | :type A: List[List[int]]
:type B: List[List[int]]
:rtype: List[List[int]] |
async def generate_waifu_insult(self, avatar):
"""Generate a waifu insult image.
This function is a coroutine.
Parameters:
avatar: str - http/s url pointing to an image, has to have proper headers and be a direct link to an image
Return Type: image data"""
if not i... | Generate a waifu insult image.
This function is a coroutine.
Parameters:
avatar: str - http/s url pointing to an image, has to have proper headers and be a direct link to an image
Return Type: image data |
def folderitem(self, obj, item, index):
"""Service triggered each time an item is iterated in folderitems.
The use of this service prevents the extra-loops in child objects.
:obj: the instance of the class to be foldered
:item: dict containing the properties of the object to be used by... | Service triggered each time an item is iterated in folderitems.
The use of this service prevents the extra-loops in child objects.
:obj: the instance of the class to be foldered
:item: dict containing the properties of the object to be used by
the template
:index: current i... |
def make_slice_key(cls, start_string, size_string):
"""
Converts the given start and size query parts to a slice key.
:return: slice key
:rtype: slice
"""
try:
start = int(start_string)
except ValueError:
raise ValueError('Query parameter ... | Converts the given start and size query parts to a slice key.
:return: slice key
:rtype: slice |
def ack(self, msg):
"""Process the message and determine what to do with it.
"""
self.log.info("receiverId <%s> Received: <%s> " % (self.receiverId, msg['body']))
#return super(MyStomp, self).ack(msg)
return stomper.NO_REPONSE_NEEDED | Process the message and determine what to do with it. |
def rec_update(self, other, **third):
"""Recursively update the dictionary with the contents of other and
third like dict.update() does - but don't overwrite sub-dictionaries.
Example:
>>> d = RecursiveDictionary({'foo': {'bar': 42}})
>>> d.rec_update({'foo': {'baz': 36}})
... | Recursively update the dictionary with the contents of other and
third like dict.update() does - but don't overwrite sub-dictionaries.
Example:
>>> d = RecursiveDictionary({'foo': {'bar': 42}})
>>> d.rec_update({'foo': {'baz': 36}})
>>> d
{'foo': {'baz': 36, 'bar': 42}} |
def get_content(self, key):
"""
Gets given content from the cache.
Usage::
>>> cache = Cache()
>>> cache.add_content(John="Doe", Luke="Skywalker")
True
>>> cache.get_content("Luke")
'Skywalker'
:param key: Content to retrieve... | Gets given content from the cache.
Usage::
>>> cache = Cache()
>>> cache.add_content(John="Doe", Luke="Skywalker")
True
>>> cache.get_content("Luke")
'Skywalker'
:param key: Content to retrieve.
:type key: object
:return: Con... |
def ParseFileSystemsStruct(struct_class, fs_count, data):
"""Take the struct type and parse it into a list of structs."""
results = []
cstr = lambda x: x.split(b"\x00", 1)[0]
for count in range(0, fs_count):
struct_size = struct_class.GetSize()
s_data = data[count * struct_size:(count + 1) * struct_size... | Take the struct type and parse it into a list of structs. |
def delmod_cli(argv, alter_logger=True):
"""Command-line access to ``delmod`` functionality.
The ``delmod`` task deletes "on-the-fly" model information from a
Measurement Set. It is so easy to implement that a standalone
function is essentially unnecessary. Just write::
from pwkit.environments.c... | Command-line access to ``delmod`` functionality.
The ``delmod`` task deletes "on-the-fly" model information from a
Measurement Set. It is so easy to implement that a standalone
function is essentially unnecessary. Just write::
from pwkit.environments.casa import util
cb = util.tools.calibrater... |
def finalize_sv(samples, config):
"""Combine results from multiple sv callers into a single ordered 'sv' key.
"""
by_bam = collections.OrderedDict()
for x in samples:
batch = dd.get_batch(x) or [dd.get_sample_name(x)]
try:
by_bam[x["align_bam"], tuple(batch)].append(x)
... | Combine results from multiple sv callers into a single ordered 'sv' key. |
def _preprocess_data(self, X, Y=None, idxs=None, train=False):
"""
Preprocess the data:
1. Convert sparse matrix to dense matrix.
2. Select subset of the input if idxs exists.
:param X: The input data of the model.
:type X: pair with candidates and corresponding features... | Preprocess the data:
1. Convert sparse matrix to dense matrix.
2. Select subset of the input if idxs exists.
:param X: The input data of the model.
:type X: pair with candidates and corresponding features
:param Y: The labels of input data.
:type Y: list or numpy.array
... |
def get_other_keys(self, key, including_current=False):
""" Returns list of other keys that are mapped to the same value as specified key.
@param key - key for which other keys should be returned.
@param including_current if set to True - key will also appear on this list."""
... | Returns list of other keys that are mapped to the same value as specified key.
@param key - key for which other keys should be returned.
@param including_current if set to True - key will also appear on this list. |
def before(self, value):
"""
Sets the operator type to Query.Op.Before and sets the value to
the amount that this query should be lower than. This is functionally
the same as doing the lessThan operation, but is useful for visual
queries for things like dates.
... | Sets the operator type to Query.Op.Before and sets the value to
the amount that this query should be lower than. This is functionally
the same as doing the lessThan operation, but is useful for visual
queries for things like dates.
:param value | <variant>
... |
def kfolds(n, k, sz, p_testset=None, seed=7238):
"""
return train, valid [,test]
testset if p_testset
:param n:
:param k:
:param sz:
:param p_testset:
:param seed:
:return:
"""
trains, tests = split_rand(sz, p_testset, seed)
ntrain = len(trains)
# np.random.seed(se... | return train, valid [,test]
testset if p_testset
:param n:
:param k:
:param sz:
:param p_testset:
:param seed:
:return: |
def _encode_value(self, value):
""" Encodes the value such that it can be stored into MongoDB.
Any primitive types are stored directly into MongoDB, while non-primitive types
are pickled and stored as GridFS objects. The id pointing to a GridFS object
replaces the original value.
... | Encodes the value such that it can be stored into MongoDB.
Any primitive types are stored directly into MongoDB, while non-primitive types
are pickled and stored as GridFS objects. The id pointing to a GridFS object
replaces the original value.
Args:
value (object): The obj... |
def _callFunc(session, funcName, password, args):
"""Call custom cjdns admin function"""
txid = _randomString()
sock = session.socket
sock.send(bytearray('d1:q6:cookie4:txid10:%se' % txid, 'utf-8'))
msg = _getMessage(session, txid)
cookie = msg['cookie']
txid = _randomString()
tohash = ... | Call custom cjdns admin function |
def _run(self):
"""Run the worker function with some custom exception handling."""
try:
# Run the worker
self.worker()
except SystemExit as ex:
# sys.exit() was called
if isinstance(ex.code, int):
if ex.code is not None and ex.code ... | Run the worker function with some custom exception handling. |
def stitch(network, donor, P_network, P_donor, method='nearest',
len_max=sp.inf, len_min=0, label_suffix=''):
r'''
Stitches a second a network to the current network.
Parameters
----------
networK : OpenPNM Network Object
The Network to which to donor Network will be attached
... | r'''
Stitches a second a network to the current network.
Parameters
----------
networK : OpenPNM Network Object
The Network to which to donor Network will be attached
donor : OpenPNM Network Object
The Network to stitch on to the current Network
P_network : array_like
... |
def children_sum( self, children,node ):
"""Calculate children's total sum"""
return sum( [self.value(value,node) for value in children] ) | Calculate children's total sum |
async def base_combine(source, switch=False, ordered=False, task_limit=None):
"""Base operator for managing an asynchronous sequence of sequences.
The sequences are awaited concurrently, although it's possible to limit
the amount of running sequences using the `task_limit` argument.
The ``switch`` arg... | Base operator for managing an asynchronous sequence of sequences.
The sequences are awaited concurrently, although it's possible to limit
the amount of running sequences using the `task_limit` argument.
The ``switch`` argument enables the switch mecanism, which cause the
previous subsequence to be dis... |
def view_list(self):
'''return a list of polygon indexes lists for the waypoints'''
done = set()
ret = []
while len(done) != self.count():
p = self.view_indexes(done)
if len(p) > 0:
ret.append(p)
return ret | return a list of polygon indexes lists for the waypoints |
def cli(ctx, path, renku_home, use_external_storage):
"""Check common Renku commands used in various situations."""
ctx.obj = LocalClient(
path=path,
renku_home=renku_home,
use_external_storage=use_external_storage,
) | Check common Renku commands used in various situations. |
def parse_cookie(cookie: str) -> Dict[str, str]:
"""Parse a ``Cookie`` HTTP header into a dict of name/value pairs.
This function attempts to mimic browser cookie parsing behavior;
it specifically does not follow any of the cookie-related RFCs
(because browsers don't either).
The algorithm used is... | Parse a ``Cookie`` HTTP header into a dict of name/value pairs.
This function attempts to mimic browser cookie parsing behavior;
it specifically does not follow any of the cookie-related RFCs
(because browsers don't either).
The algorithm used is identical to that used by Django version 1.9.10.
.... |
def _footer(trigger, data, content):
"""
footer of the note
:param trigger: trigger object
:param data: data to be used
:param content: add the footer of the note to the content
:return: content string
"""
# footer of the note
footer = EvernoteMgr.... | footer of the note
:param trigger: trigger object
:param data: data to be used
:param content: add the footer of the note to the content
:return: content string |
def network_lpf(network, snapshots=None, skip_pre=False):
"""
Linear power flow for generic network.
Parameters
----------
snapshots : list-like|single snapshot
A subset or an elements of network.snapshots on which to run
the power flow, defaults to network.snapshots
skip_pre: b... | Linear power flow for generic network.
Parameters
----------
snapshots : list-like|single snapshot
A subset or an elements of network.snapshots on which to run
the power flow, defaults to network.snapshots
skip_pre: bool, default False
Skip the preliminary steps of computing top... |
def sequence_molecular_weight(seq):
"""Returns the molecular weight of the polypeptide sequence.
Notes
-----
Units = Daltons
Parameters
----------
seq : str
Sequence of amino acids.
"""
if 'X' in seq:
warnings.warn(_nc_warning_str, NoncanonicalWarning)
return su... | Returns the molecular weight of the polypeptide sequence.
Notes
-----
Units = Daltons
Parameters
----------
seq : str
Sequence of amino acids. |
def overlapped_convolution(bin_template, bin_image,
tollerance=0.5, splits=(4, 2)):
"""
As each of these images are hold only binary values, and RFFT2 works on
float64 greyscale values, we can make the convolution more efficient by
breaking the image up into :splits: sect... | As each of these images are hold only binary values, and RFFT2 works on
float64 greyscale values, we can make the convolution more efficient by
breaking the image up into :splits: sectons. Each one of these sections
then has its greyscale value adjusted and then stacked.
We then apply the convolut... |
def sort_descendants(self, attr="name"):
"""
This function sort the branches of a given tree by
considerening node names. After the tree is sorted, nodes are
labeled using ascendent numbers. This can be used to ensure
that nodes in a tree with the same node names are always
... | This function sort the branches of a given tree by
considerening node names. After the tree is sorted, nodes are
labeled using ascendent numbers. This can be used to ensure
that nodes in a tree with the same node names are always
labeled in the same way. Note that if duplicated names ar... |
def find_project_dir():
"""Runs up the stack to find the location of manage.py
which will be considered a project base path.
:rtype: str|unicode
"""
frame = inspect.currentframe()
while True:
frame = frame.f_back
fname = frame.f_globals['__file__']
if os.path.basename(... | Runs up the stack to find the location of manage.py
which will be considered a project base path.
:rtype: str|unicode |
def _str_to_windows(self, input_str, window_length, curse_forward):
"""
Divide an input string to a list of substrings based on window_length and curse_forward values
:param input_str: str
:param window_length: int
:param curse_forward: int
:return: list [str]
"""... | Divide an input string to a list of substrings based on window_length and curse_forward values
:param input_str: str
:param window_length: int
:param curse_forward: int
:return: list [str] |
def parse_cfgstr_name_options(cfgstr):
r"""
Args:
cfgstr (str):
Returns:
tuple: (cfgname, cfgopt_strs, subx)
CommandLine:
python -m utool.util_gridsearch --test-parse_cfgstr_name_options
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_gridsearch import * ... | r"""
Args:
cfgstr (str):
Returns:
tuple: (cfgname, cfgopt_strs, subx)
CommandLine:
python -m utool.util_gridsearch --test-parse_cfgstr_name_options
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_gridsearch import * # NOQA
>>> import utool as ut
... |
def i18n_system_locale():
"""
Return the system locale
:return: the system locale (as a string)
"""
log.debug('i18n_system_locale() called')
lc, encoding = locale.getlocale()
log.debug('locale.getlocale() = (lc="{lc}", encoding="{encoding}).'.format(lc=lc, encoding=encoding))
if lc is No... | Return the system locale
:return: the system locale (as a string) |
def makeMarkovApproxToNormalByMonteCarlo(x_grid,mu,sigma,N_draws = 10000):
'''
Creates an approximation to a normal distribution with mean mu and standard
deviation sigma, by Monte Carlo.
Returns a stochastic vector called p_vec, corresponding
to values in x_grid. If a RV is distributed x~N(mu,sigm... | Creates an approximation to a normal distribution with mean mu and standard
deviation sigma, by Monte Carlo.
Returns a stochastic vector called p_vec, corresponding
to values in x_grid. If a RV is distributed x~N(mu,sigma), then the expectation
of a continuous function f() is E[f(x)] = numpy.dot(p_vec,... |
def _fill_table_entry(self, row, col):
"""""
Fill an entry of the observation table.
Args:
row (str): The row of the observation table
col (str): The column of the observation table
Returns:
None
"""
prefix = self._membership_query(row)... | Fill an entry of the observation table.
Args:
row (str): The row of the observation table
col (str): The column of the observation table
Returns:
None |
def translify(in_string, strict=True):
"""
Translify russian text
@param in_string: input string
@type in_string: C{unicode}
@param strict: raise error if transliteration is incomplete.
(True by default)
@type strict: C{bool}
@return: transliterated string
@rtype: C{str}
... | Translify russian text
@param in_string: input string
@type in_string: C{unicode}
@param strict: raise error if transliteration is incomplete.
(True by default)
@type strict: C{bool}
@return: transliterated string
@rtype: C{str}
@raise ValueError: when string doesn't transliterat... |
def set(self, key, val):
"""
Sets a header field with the given value, removing
previous values.
Usage::
headers = HTTPHeaderDict(foo='bar')
headers.set('Foo', 'baz')
headers['foo']
> 'baz'
"""
key_lower = key.lower()
... | Sets a header field with the given value, removing
previous values.
Usage::
headers = HTTPHeaderDict(foo='bar')
headers.set('Foo', 'baz')
headers['foo']
> 'baz' |
def get_widget_label_for(self, fieldname, default=None):
"""Lookup the widget of the field and return the label
"""
widget = self.get_widget_for(fieldname)
if widget is None:
return default
return widget.label | Lookup the widget of the field and return the label |
def relateObjectLocs(obj, entities, selectF):
"""calculate the minimum distance to reach any iterable of entities with a loc"""
#if obj in entities: return 0 # is already one of the entities
try: obj = obj.loc # get object's location, if it has one
except AttributeError: pass # assum... | calculate the minimum distance to reach any iterable of entities with a loc |
def Brokaw(T, ys, mus, MWs, molecular_diameters, Stockmayers):
r'''Calculates viscosity of a gas mixture according to
mixing rules in [1]_.
.. math::
\eta_{mix} = \sum_{i=1}^n \frac{y_i \eta_i}{\sum_{j=1}^n y_j \phi_{ij}}
\phi_{ij} = \left( \frac{\eta_i}{\eta_j} \right)^{0.5} S_{ij} A_{ij}... | r'''Calculates viscosity of a gas mixture according to
mixing rules in [1]_.
.. math::
\eta_{mix} = \sum_{i=1}^n \frac{y_i \eta_i}{\sum_{j=1}^n y_j \phi_{ij}}
\phi_{ij} = \left( \frac{\eta_i}{\eta_j} \right)^{0.5} S_{ij} A_{ij}
A_{ij} = m_{ij} M_{ij}^{-0.5} \left[1 +
\frac{M_{... |
def _shrink(v, gamma):
"""Soft-shrinkage of an array with parameter gamma.
Parameters
----------
v : array
Array containing the values to be applied to the shrinkage operator
gamma : float
Shrinkage parameter.
Returns
-------
v... | Soft-shrinkage of an array with parameter gamma.
Parameters
----------
v : array
Array containing the values to be applied to the shrinkage operator
gamma : float
Shrinkage parameter.
Returns
-------
v : array
The same inpu... |
def list_logs(self):
'''return a list of logs. We return any file that ends in .log
'''
results = []
for image in self._bucket.list_blobs():
if image.name.endswith('log'):
results.append(image)
if len(results) == 0:
bot.info("No containers found, based on extension .log"... | return a list of logs. We return any file that ends in .log |
def get_merge_requests(self):
"http://doc.gitlab.com/ce/api/merge_requests.html"
g = self.gitlab
merges = self.get(g['url'] + "/projects/" +
g['repo'] + "/merge_requests",
{'private_token': g['token'],
'state': 'all'}... | http://doc.gitlab.com/ce/api/merge_requests.html |
def _get_biallelic_variant(self, variant, info, _check_alleles=True):
"""Creates a bi-allelic variant."""
info = info.iloc[0, :]
assert not info.multiallelic
# Seeking and parsing the file
self._impute2_file.seek(info.seek)
genotypes = self._parse_impute2_line(self._impu... | Creates a bi-allelic variant. |
def model_returns_t_alpha_beta(data, bmark, samples=2000, progressbar=True):
"""
Run Bayesian alpha-beta-model with T distributed returns.
This model estimates intercept (alpha) and slope (beta) of two
return sets. Usually, these will be algorithm returns and
benchmark returns (e.g. S&P500). The da... | Run Bayesian alpha-beta-model with T distributed returns.
This model estimates intercept (alpha) and slope (beta) of two
return sets. Usually, these will be algorithm returns and
benchmark returns (e.g. S&P500). The data is assumed to be T
distributed and thus is robust to outliers and takes tail event... |
def next_event_indexer(all_dates,
data_query_cutoff,
all_sids,
event_dates,
event_timestamps,
event_sids):
"""
Construct an index array that, when applied to an array of values, produces
a 2D a... | Construct an index array that, when applied to an array of values, produces
a 2D array containing the values associated with the next event for each
sid at each moment in time.
Locations where no next event was known will be filled with -1.
Parameters
----------
all_dates : ndarray[datetime64[... |
def phaseshift_isc(data, pairwise=False, summary_statistic='median',
n_shifts=1000, tolerate_nans=True, random_state=None):
"""Phase randomization for one-sample ISC test
For each voxel or ROI, compute the actual ISC and p-values
from a null distribution of ISCs where response time seri... | Phase randomization for one-sample ISC test
For each voxel or ROI, compute the actual ISC and p-values
from a null distribution of ISCs where response time series
are phase randomized prior to computing ISC. If pairwise,
apply phase randomization to each subject and compute pairwise
ISCs. If leave-... |
def _set_ipv6_gateway_address(self, v, load=False):
"""
Setter method for ipv6_gateway_address, mapped from YANG variable /interface_vlan/interface/ve/ipv6/ipv6_anycast_gateway/ipv6_gateway_address (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ipv6_gateway_addre... | Setter method for ipv6_gateway_address, mapped from YANG variable /interface_vlan/interface/ve/ipv6/ipv6_anycast_gateway/ipv6_gateway_address (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ipv6_gateway_address is considered as a private
method. Backends looking to po... |
def close(self, signalnum=None, frame=None):
self._running = False
"""Closes all currently open Tail objects"""
self._log_debug("Closing all tail objects")
self._active = False
for fid in self._tails:
self._tails[fid].close()
for n in range(0,self._number_of_c... | Closes all currently open Tail objects |
def ListRecursivelyViaWalking(top):
"""Walks a directory tree, yielding (dir_path, file_paths) tuples.
For each of `top` and its subdirectories, yields a tuple containing the path
to the directory and the path to each of the contained files. Note that
unlike os.Walk()/tf.io.gfile.walk()/ListRecursivelyViaGlob... | Walks a directory tree, yielding (dir_path, file_paths) tuples.
For each of `top` and its subdirectories, yields a tuple containing the path
to the directory and the path to each of the contained files. Note that
unlike os.Walk()/tf.io.gfile.walk()/ListRecursivelyViaGlobbing, this does not
list subdirectories... |
def is_valid_ipv6(ip_str):
"""
Check the validity of an IPv6 address
"""
try:
socket.inet_pton(socket.AF_INET6, ip_str)
except socket.error:
return False
return True | Check the validity of an IPv6 address |
def install(self):
"""Confirm add-on install."""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
self.find_primary_button().click() | Confirm add-on install. |
def parse_hstring(hs):
"""
Parse a single item from the telescope server into name, value, comment.
"""
# split the string on = and /, also stripping whitespace and annoying quotes
name, value, comment = yield_three(
[val.strip().strip("'") for val in filter(None, re.split("[=/]+", hs))]
... | Parse a single item from the telescope server into name, value, comment. |
def updateAltHistory(self):
'''Updates the altitude history plot.'''
self.altHist.append(self.relAlt)
self.timeHist.append(self.relAltTime)
# Delete entries older than x seconds
histLim = 10
currentTime = time.time()
point = 0
for i in range(0,len... | Updates the altitude history plot. |
def make_plus_fields(obj):
"""
Add a '+' to the key of non-standard fields.
dispatch to recursive _make_plus_helper based on _type field
"""
fields = standard_fields.get(obj['_type'], dict())
return _make_plus_helper(obj, fields) | Add a '+' to the key of non-standard fields.
dispatch to recursive _make_plus_helper based on _type field |
def build_fred(self):
'''Build a flat recurrent encoder-decoder dialogue model'''
encoder = Encoder(data=self.dataset, config=self.model_config)
decoder = Decoder(data=self.dataset, config=self.model_config, encoder=encoder)
return EncoderDecoder(config=self.model_config, encod... | Build a flat recurrent encoder-decoder dialogue model |
def GetNetworkAddressWithTime(self):
"""
Get a network address object.
Returns:
NetworkAddressWithTime: if we have a connection to a node.
None: otherwise.
"""
if self.port is not None and self.host is not None and self.Version is not None:
re... | Get a network address object.
Returns:
NetworkAddressWithTime: if we have a connection to a node.
None: otherwise. |
def infer(self, sensations, stats=None, objname=None):
"""
Attempt to recognize the object given a list of sensations.
You may use :meth:`getCurrentClassification` to extract the current object
classification from the network
:param sensations: Array of sensations, where each sensation is composed ... | Attempt to recognize the object given a list of sensations.
You may use :meth:`getCurrentClassification` to extract the current object
classification from the network
:param sensations: Array of sensations, where each sensation is composed of
displacement vector and feature SDR for e... |
def get_channel_id(self):
"""Fetches id
:return: id of youtube channel
"""
soup = BeautifulSoup(
self.get_channel_page(), "lxml"
) # parser for source page
channel_id = soup.find_all(
"span",
{
"class": "channel-header... | Fetches id
:return: id of youtube channel |
def _render_extended_error_message_list(self, extended_error):
"""Parse the ExtendedError object and retruns the message.
Build a list of decoded messages from the extended_error using the
message registries. An ExtendedError JSON object is a response from
the with its own schema. This... | Parse the ExtendedError object and retruns the message.
Build a list of decoded messages from the extended_error using the
message registries. An ExtendedError JSON object is a response from
the with its own schema. This function knows how to parse the
ExtendedError object and, using a... |
def __print_step_by_console(self, step):
"""
print the step by console if the show variable is enabled
:param step: step text
"""
step_list = step.split(u'\n')
for s in step_list:
self.logger.by_console(u' %s' % repr(s).replace("u'", "").replace("'", "")) | print the step by console if the show variable is enabled
:param step: step text |
def hexdump(logger, s, width=16, skip=True, hexii=False, begin=0, highlight=None):
r"""
Return a hexdump-dump of a string.
Arguments:
logger(FastLogger): Logger object
s(str): The data to hexdump.
width(int): The number of characters per line
skip(bool): Set to True, if repe... | r"""
Return a hexdump-dump of a string.
Arguments:
logger(FastLogger): Logger object
s(str): The data to hexdump.
width(int): The number of characters per line
skip(bool): Set to True, if repeated lines should be replaced by a "*"
hexii(bool): Set to True, if a hexii-dum... |
def set_dm(self, num):
"""
Make GUI changes based on data model num.
Get info from WD in appropriate format.
"""
#enable or disable self.btn1a
if self.data_model_num == 3:
self.btn1a.Enable()
else:
self.btn1a.Disable()
#
# s... | Make GUI changes based on data model num.
Get info from WD in appropriate format. |
def resubmit(self, indices_or_msg_ids=None, subheader=None, block=None):
"""Resubmit one or more tasks.
in-flight tasks may not be resubmitted.
Parameters
----------
indices_or_msg_ids : integer history index, str msg_id, or list of either
The indices or msg_ids of... | Resubmit one or more tasks.
in-flight tasks may not be resubmitted.
Parameters
----------
indices_or_msg_ids : integer history index, str msg_id, or list of either
The indices or msg_ids of indices to be retrieved
block : bool
Whether to wait for the r... |
def connect_async(self, connection_id, connection_string, callback):
"""Asynchronously connect to a device
Args:
connection_id (int): A unique identifier that will refer to this connection
connection_string (string): A DeviceAdapter specific string that can be used to connect to... | Asynchronously connect to a device
Args:
connection_id (int): A unique identifier that will refer to this connection
connection_string (string): A DeviceAdapter specific string that can be used to connect to
a device using this DeviceAdapter.
callback (callab... |
def strlen(self, name):
"""
Return the number of bytes stored in the value of the key
:param name: str the name of the redis key
:return: Future()
"""
with self.pipe as pipe:
return pipe.strlen(self.redis_key(name)) | Return the number of bytes stored in the value of the key
:param name: str the name of the redis key
:return: Future() |
def _uneven_transform_deriv_shape(systematic_utilities,
alt_IDs,
rows_to_alts,
shape_params,
output_array=None,
*args, **kwargs):
"""
Paramete... | Parameters
----------
systematic_utilities : 1D ndarray.
All elements should be ints, floats, or longs. Should contain the
systematic utilities of each observation per available alternative.
Note that this vector is formed by the dot product of the design matrix
with the vector o... |
def mfpt(T, target, origin=None, tau=1, mu=None):
r"""Mean first passage times (from a set of starting states - optional)
to a set of target states.
Parameters
----------
T : ndarray or scipy.sparse matrix, shape=(n,n)
Transition matrix.
target : int or list of int
Target states... | r"""Mean first passage times (from a set of starting states - optional)
to a set of target states.
Parameters
----------
T : ndarray or scipy.sparse matrix, shape=(n,n)
Transition matrix.
target : int or list of int
Target states for mfpt calculation.
origin : int or list of int... |
def destroy_venv(env_path, venvscache=None):
"""Destroy a venv."""
# remove the venv itself in disk
logger.debug("Destroying virtualenv at: %s", env_path)
shutil.rmtree(env_path, ignore_errors=True)
# remove venv from cache
if venvscache is not None:
venvscache.remove(env_path) | Destroy a venv. |
def create(self, validated_data):
""" This is a standard method called indirectly by calling
'save' on the serializer.
This method expects the 'parent_field' and 'parent_instance' to
be included in the Serializer context.
"""
if self.context.get('parent_field') \
... | This is a standard method called indirectly by calling
'save' on the serializer.
This method expects the 'parent_field' and 'parent_instance' to
be included in the Serializer context. |
def _encrypt(self):
"""Use your key thing to encrypt things."""
from M2Crypto import BIO, SMIME, X509
# Iterate through the fields and pull out the ones that have a value.
plaintext = 'cert_id=%s\n' % self.cert_id
for name, field in self.fields.items():
value = None
... | Use your key thing to encrypt things. |
def from_boto_instance(cls, instance):
"""
Loads a ``HostEntry`` from a boto instance.
:param instance: A boto instance object.
:type instance: :py:class:`boto.ec2.instanceInstance`
:rtype: :py:class:`HostEntry`
"""
return cls(
name=instance.tags.get... | Loads a ``HostEntry`` from a boto instance.
:param instance: A boto instance object.
:type instance: :py:class:`boto.ec2.instanceInstance`
:rtype: :py:class:`HostEntry` |
def clear_decimal_value(self, label):
"""stub"""
if label not in self.my_osid_object_form._my_map['decimalValues']:
raise NotFound()
del self.my_osid_object_form._my_map['decimalValues'][label] | stub |
def setup(self, data_manager):
"""
Hook to setup this service with a specific DataManager.
Will recursively setup sub-services.
"""
self._data_manager = data_manager
if self._data_manager:
self._dal = self._data_manager.get_dal()
else:
sel... | Hook to setup this service with a specific DataManager.
Will recursively setup sub-services. |
def _has_actions(self, event):
"""Check if a notification type has any enabled actions."""
event_actions = self._aconfig.get(event)
return event_actions is None or bool(event_actions) | Check if a notification type has any enabled actions. |
def find_any_reports(self, usage_page = 0, usage_id = 0):
"""Find any report type referencing HID usage control/data item.
Results are returned in a dictionary mapping report_type to usage
lists.
"""
items = [
(HidP_Input, self.find_input_reports(usage_page, ... | Find any report type referencing HID usage control/data item.
Results are returned in a dictionary mapping report_type to usage
lists. |
def errReceived(self, data):
"""
:api:`twisted.internet.protocol.ProcessProtocol <ProcessProtocol>` API
"""
if self.stderr:
self.stderr.write(data)
if self.kill_on_stderr:
self.transport.loseConnection()
raise RuntimeError(
"R... | :api:`twisted.internet.protocol.ProcessProtocol <ProcessProtocol>` API |
def rgb_to_xy(self, red, green, blue):
"""Converts red, green and blue integer values to approximate CIE 1931
x and y coordinates.
"""
point = self.color.get_xy_point_from_rgb(red, green, blue)
return (point.x, point.y) | Converts red, green and blue integer values to approximate CIE 1931
x and y coordinates. |
def is_tensor_final(self, tensor_name):
"""Whether a tensor is a final output of the computation.
Args:
tensor_name: a string, name of a tensor in the graph.
Returns:
a boolean indicating whether the tensor was a final output.
"""
tensor = self._name_to_tensor(tensor_name)
return t... | Whether a tensor is a final output of the computation.
Args:
tensor_name: a string, name of a tensor in the graph.
Returns:
a boolean indicating whether the tensor was a final output. |
def pct_decode(s):
"""
Return the percent-decoded version of string s.
>>> pct_decode('%43%6F%75%63%6F%75%2C%20%6A%65%20%73%75%69%73%20%63%6F%6E%76%69%76%69%61%6C')
'Coucou, je suis convivial'
>>> pct_decode('')
''
>>> pct_decode('%2525')
'%25'
"""
if s is None:
return N... | Return the percent-decoded version of string s.
>>> pct_decode('%43%6F%75%63%6F%75%2C%20%6A%65%20%73%75%69%73%20%63%6F%6E%76%69%76%69%61%6C')
'Coucou, je suis convivial'
>>> pct_decode('')
''
>>> pct_decode('%2525')
'%25' |
def rulefor(self, addr):
"""Return the rule object for an address from our deps graph."""
return self.rule.subgraph.node[self.rule.makeaddress(addr)][
'target_obj'] | Return the rule object for an address from our deps graph. |
def _init_datastores():
""" Initialize all datastores. """
global _DATASTORES
array = settings.DATASTORES
for config in array:
cls = _lookup(config['ENGINE'])
ds = _get_datastore(cls, DataStore, config)
_DATASTORES.append(ds)
legacy_settings = getattr(settings, 'MACHINE_CATEG... | Initialize all datastores. |
def clip_by_global_norm_per_ctx(self, max_norm=1.0, param_names=None):
"""Clips gradient norm.
The norm is computed over all gradients together, as if they were
concatenated into a single vector. Gradients are modified in-place.
The method is first used in
`[ICML2013] On the ... | Clips gradient norm.
The norm is computed over all gradients together, as if they were
concatenated into a single vector. Gradients are modified in-place.
The method is first used in
`[ICML2013] On the difficulty of training recurrent neural networks`
Note that the gradients... |
def check_format(self, sm_format):
"""
Return ``True`` if the given sync map format is allowed,
and ``False`` otherwise.
:param sm_format: the sync map format to be checked
:type sm_format: Unicode string
:rtype: bool
"""
if sm_format not in SyncMapForma... | Return ``True`` if the given sync map format is allowed,
and ``False`` otherwise.
:param sm_format: the sync map format to be checked
:type sm_format: Unicode string
:rtype: bool |
def format_price(price, currency='$'):
"""
Format the price to have the appropriate currency and digits..
:param price: The price amount.
:param currency: The currency for the price.
:return: A formatted price string, i.e. '$10', '$10.52'.
"""
if int(price) == price:
return '{}{}'.f... | Format the price to have the appropriate currency and digits..
:param price: The price amount.
:param currency: The currency for the price.
:return: A formatted price string, i.e. '$10', '$10.52'. |
def canonical_chrom_sorted(in_chroms):
"""
Sort a list of chromosomes in the order 1..22, X, Y, M/MT
:param list in_chroms: Input chromosomes
:return: Sorted chromosomes
:rtype: list[str]
"""
if len(in_chroms) == 0:
return []
chr_prefix = False
mt = False
if in_chroms[0]... | Sort a list of chromosomes in the order 1..22, X, Y, M/MT
:param list in_chroms: Input chromosomes
:return: Sorted chromosomes
:rtype: list[str] |
def _context_menu_make(self, pos):
"""Reimplement the IPython context menu"""
menu = super(ShellWidget, self)._context_menu_make(pos)
return self.ipyclient.add_actions_to_context_menu(menu) | Reimplement the IPython context menu |
def ID_colored_tube(color):
"""Look up the inner diameter of Ismatec 3-stop tubing given its color code.
:param color: Color of the 3-stop tubing
:type color: string
:returns: Inner diameter of the 3-stop tubing (mm)
:rtype: float
:Examples:
>>> from aguaclara.research.peristaltic_pump i... | Look up the inner diameter of Ismatec 3-stop tubing given its color code.
:param color: Color of the 3-stop tubing
:type color: string
:returns: Inner diameter of the 3-stop tubing (mm)
:rtype: float
:Examples:
>>> from aguaclara.research.peristaltic_pump import ID_colored_tube
>>> from ... |
async def set_topic_channel(self, channel):
"""Set the topic channel for this server"""
data = datatools.get_data()
data["discord"]["servers"][self.server_id][_data.modulename]["topic_id"] = channel.id
datatools.write_data(data)
self.topicchannel = channel
await self.set... | Set the topic channel for this server |
def import_locations(self, data):
"""Parse `GNU miscfiles`_ cities data files.
``import_locations()`` returns a list containing :class:`City` objects.
It expects data files in the same format that `GNU miscfiles`_
provides, that is::
ID : 1
Type ... | Parse `GNU miscfiles`_ cities data files.
``import_locations()`` returns a list containing :class:`City` objects.
It expects data files in the same format that `GNU miscfiles`_
provides, that is::
ID : 1
Type : City
Population : 210700
... |
def section(title, bar=OVERLINE, strm=sys.stdout):
"""Helper function for testing demo routines
"""
width = utils.term.width
printy(bold(title.center(width)))
printy(bold((bar * width)[:width])) | Helper function for testing demo routines |
def GetFileObject(self, data_stream_name=''):
"""Retrieves the file-like object.
Args:
data_stream_name (Optional[str]): name of the data stream, where an empty
string represents the default data stream.
Returns:
FileIO: a file-like object or None if not available.
"""
if dat... | Retrieves the file-like object.
Args:
data_stream_name (Optional[str]): name of the data stream, where an empty
string represents the default data stream.
Returns:
FileIO: a file-like object or None if not available. |
def build_signature(self, user_api_key, user_secret, request):
"""Return the signature for the request."""
path = request.get_full_path()
sent_signature = request.META.get(
self.header_canonical('Authorization'))
signature_headers = self.get_headers_from_signature(sent_signat... | Return the signature for the request. |
def join(input_files, output_file):
'''
Join geojsons into one. The spatial reference system of the output file is the same
as the one of the last file in the list.
Args:
input_files (list): List of file name strings.
output_file (str): Output file name.
'''
# get feature c... | Join geojsons into one. The spatial reference system of the output file is the same
as the one of the last file in the list.
Args:
input_files (list): List of file name strings.
output_file (str): Output file name. |
def process(self, salt_data, token, opts):
'''
Process events and publish data
'''
log.debug('In process %s', threading.current_thread())
log.debug(salt_data['tag'])
log.debug(salt_data)
parts = salt_data['tag'].split('/')
if len(parts) < 2:
r... | Process events and publish data |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.