code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def forgot_password():
"""View function that handles a forgotten password request."""
form_class = _security.forgot_password_form
if request.is_json:
form = form_class(MultiDict(request.get_json()))
else:
form = form_class()
if form.validate_on_submit():
send_reset_passwor... | View function that handles a forgotten password request. |
def get_method_documentation(method):
"""
This function uses "inspect" to retrieve information about a method.
Also, if you place comment on the method, method can be docummented with "reStructured Text".
:param method: method to describe
:returns:
{
'name' : <stri... | This function uses "inspect" to retrieve information about a method.
Also, if you place comment on the method, method can be docummented with "reStructured Text".
:param method: method to describe
:returns:
{
'name' : <string> - name of the method,
'friendly_na... |
def add_state_editor(self, state_m):
"""Triggered whenever a state is selected.
:param state_m: The selected state model.
"""
state_identifier = self.get_state_identifier(state_m)
if state_identifier in self.closed_tabs:
state_editor_ctrl = self.closed_tabs[state_id... | Triggered whenever a state is selected.
:param state_m: The selected state model. |
def register_converter(operator_name, conversion_function, overwrite=False):
'''
:param operator_name: A unique operator ID. It is usually a string but you can use a type as well
:param conversion_function: A callable object
:param overwrite: By default, we raise an exception if the caller of this funct... | :param operator_name: A unique operator ID. It is usually a string but you can use a type as well
:param conversion_function: A callable object
:param overwrite: By default, we raise an exception if the caller of this function is trying to assign an existing
key (i.e., operator_name) a new value (i.e., conv... |
def get_all_apps():
"""Get a list of all applications in Spinnaker.
Returns:
requests.models.Response: Response from Gate containing list of all apps.
"""
LOG.info('Retreiving list of all Spinnaker applications')
url = '{}/applications'.format(API_URL)
response = requests.get(url, veri... | Get a list of all applications in Spinnaker.
Returns:
requests.models.Response: Response from Gate containing list of all apps. |
def filterAcceptsRow(self, row, parentindex):
"""Return True, if the filter accepts the given row of the parent
:param row: the row to filter
:type row: :class:`int`
:param parentindex: the parent index
:type parentindex: :class:`QtCore.QModelIndex`
:returns: True, if th... | Return True, if the filter accepts the given row of the parent
:param row: the row to filter
:type row: :class:`int`
:param parentindex: the parent index
:type parentindex: :class:`QtCore.QModelIndex`
:returns: True, if the filter accepts the row
:rtype: :class:`bool`
... |
def getmlsthelper(referencefilepath, start, organism, update):
"""Prepares to run the getmlst.py script provided in SRST2"""
from accessoryFunctions.accessoryFunctions import GenObject
# Initialise a set to for the organism(s) for which new alleles and profiles are desired
organismset = set()
# Allo... | Prepares to run the getmlst.py script provided in SRST2 |
def neighsol(addr, src, iface, timeout=1, chainCC=0):
"""Sends and receive an ICMPv6 Neighbor Solicitation message
This function sends an ICMPv6 Neighbor Solicitation message
to get the MAC address of the neighbor with specified IPv6 address address.
'src' address is used as source of the message. Mes... | Sends and receive an ICMPv6 Neighbor Solicitation message
This function sends an ICMPv6 Neighbor Solicitation message
to get the MAC address of the neighbor with specified IPv6 address address.
'src' address is used as source of the message. Message is sent on iface.
By default, timeout waiting for an... |
def load_yaml_config(self, conf):
"""Load a YAML configuration file and recursively update the overall configuration."""
with open(conf) as fd:
self.config = recursive_dict_update(self.config, yaml.load(fd, Loader=UnsafeLoader)) | Load a YAML configuration file and recursively update the overall configuration. |
def list_policies(self, filters=None):
"""Retrieve installed trap, drop and bypass policies.
:param filters: retrieve only matching policies (optional)
:type filters: dict
:return: list of installed trap, drop and bypass policies
:rtype: list
"""
_, policy_list =... | Retrieve installed trap, drop and bypass policies.
:param filters: retrieve only matching policies (optional)
:type filters: dict
:return: list of installed trap, drop and bypass policies
:rtype: list |
def add_extension_attribute(self, ext_name, key, value):
"""
Banana banana
"""
attributes = self.extension_attributes.pop(ext_name, {})
attributes[key] = value
self.extension_attributes[ext_name] = attributes | Banana banana |
def watcher(self) -> Watcher:
"""
Gives an access to action's watcher.
:return: Action's watcher instance.
"""
if not hasattr(self, "_watcher"):
self._watcher = Watcher()
return self._watcher | Gives an access to action's watcher.
:return: Action's watcher instance. |
def GetResources(filename, types=None, names=None, languages=None):
"""
Get resources from dll/exe file.
types = a list of resource types to search for (None = all)
names = a list of resource names to search for (None = all)
languages = a list of resource languages to search for (None = all)
... | Get resources from dll/exe file.
types = a list of resource types to search for (None = all)
names = a list of resource names to search for (None = all)
languages = a list of resource languages to search for (None = all)
Return a dict of the form {type_: {name: {language: data}}} which
might a... |
def gen_anytext(*args):
"""
Convenience function to create bag of words for anytext property
"""
bag = []
for term in args:
if term is not None:
if isinstance(term, list):
for term2 in term:
if term2 is not None:
bag.a... | Convenience function to create bag of words for anytext property |
def write(self, values):
"""
Write values to the targeted documents
Values need to be a dict as : {document_id: value}
"""
# Insert only for docs targeted by the target
filtered = {_id: value for _id, value in values.items() if _id in self._document_ids}
if not f... | Write values to the targeted documents
Values need to be a dict as : {document_id: value} |
def _parse_values(self):
""" Get values
"""
data = []
if self.has_tabs:
def _parse_tab_text(tab):
# Annoying html in tabs
if tab.select_one(".visible_normal"):
return tab.select_one(".visible_normal").text
el... | Get values |
def complete_modules(text):
'''complete mavproxy module names'''
import MAVProxy.modules, pkgutil
modlist = [x[1] for x in pkgutil.iter_modules(MAVProxy.modules.__path__)]
ret = []
loaded = set(complete_loadedmodules(''))
for m in modlist:
if not m.startswith("mavproxy_"):
co... | complete mavproxy module names |
def create_sequence_sites(chain, seq_site_length):
"""Create sequence sites using sequence ids.
:param dict chain: Chain object that contains chemical shift values and assignment information.
:param int seq_site_length: Length of a single sequence site.
:return: List of sequence sites.
... | Create sequence sites using sequence ids.
:param dict chain: Chain object that contains chemical shift values and assignment information.
:param int seq_site_length: Length of a single sequence site.
:return: List of sequence sites.
:rtype: :py:class:`list` |
def from_dict(input_dict, data=None):
"""
Instantiate an object of a derived class using the information
in input_dict (built by the to_dict method of the derived class).
More specifically, after reading the derived class from input_dict,
it calls the method _build_from_input_dic... | Instantiate an object of a derived class using the information
in input_dict (built by the to_dict method of the derived class).
More specifically, after reading the derived class from input_dict,
it calls the method _build_from_input_dict of the derived class.
Note: This method should n... |
def sha256_fingerprint_from_raw_ssh_pub_key(raw_key):
"""Encode a raw SSH key (string of bytes, as from
`str(paramiko.AgentKey)`) to a fingerprint in the SHA256 form:
SHA256:j2WoSeOWhFy69BQ39fuafFAySp9qCZTSCEyT2vRKcL+s
"""
digest = hashlib.sha256(raw_key).digest()
h = base64.b64encode(digest... | Encode a raw SSH key (string of bytes, as from
`str(paramiko.AgentKey)`) to a fingerprint in the SHA256 form:
SHA256:j2WoSeOWhFy69BQ39fuafFAySp9qCZTSCEyT2vRKcL+s |
def to_json(model, sort=False, **kwargs):
"""
Return the model as a JSON document.
``kwargs`` are passed on to ``json.dumps``.
Parameters
----------
model : cobra.Model
The cobra model to represent.
sort : bool, optional
Whether to sort the metabolites, reactions, and genes... | Return the model as a JSON document.
``kwargs`` are passed on to ``json.dumps``.
Parameters
----------
model : cobra.Model
The cobra model to represent.
sort : bool, optional
Whether to sort the metabolites, reactions, and genes or maintain the
order defined in the model.
... |
def update(self, index, id, doc_type="_doc", body=None, params=None):
"""
Update a document based on a script or partial data provided.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update.html>`_
:arg index: The name of the index
:arg id: Document ID
... | Update a document based on a script or partial data provided.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update.html>`_
:arg index: The name of the index
:arg id: Document ID
:arg body: The request definition using either `script` or partial `doc`
:arg... |
def get_django_user(self, username, password=None):
"""
Get the Django user with the given username, or create one if it
doesn't already exist. If `password` is given, then set the user's
password to that (regardless of whether the user was created or not).
"""
try:
... | Get the Django user with the given username, or create one if it
doesn't already exist. If `password` is given, then set the user's
password to that (regardless of whether the user was created or not). |
def increase_volume(percentage):
'''Increase the volume.
Increase the volume by a given percentage.
Args:
percentage (int): The percentage (as an integer between 0 and 100) to increase the volume by.
Raises:
ValueError: if the percentage is >100 or <0.
'''
if percentage > 100 or percentage < 0:
raise ... | Increase the volume.
Increase the volume by a given percentage.
Args:
percentage (int): The percentage (as an integer between 0 and 100) to increase the volume by.
Raises:
ValueError: if the percentage is >100 or <0. |
def __init_configsvrs(self, params):
"""create and start config servers"""
self._configsvrs = []
for cfg in params:
# Remove flags that turn on auth.
cfg = self._strip_auth(cfg)
server_id = cfg.pop('server_id', None)
version = cfg.pop('version', se... | create and start config servers |
def _file_write(path, content):
'''
Write content to a file
'''
with salt.utils.files.fopen(path, 'w+') as fp_:
fp_.write(salt.utils.stringutils.to_str(content))
fp_.close() | Write content to a file |
def pager_fatality_rates():
"""USGS Pager fatality estimation model.
Fatality rate(MMI) = cum. standard normal dist(1/BETA * ln(MMI/THETA)).
Reference:
Jaiswal, K. S., Wald, D. J., and Hearne, M. (2009a).
Estimating casualties for large worldwide earthquakes using an empirical
approach. U.S. ... | USGS Pager fatality estimation model.
Fatality rate(MMI) = cum. standard normal dist(1/BETA * ln(MMI/THETA)).
Reference:
Jaiswal, K. S., Wald, D. J., and Hearne, M. (2009a).
Estimating casualties for large worldwide earthquakes using an empirical
approach. U.S. Geological Survey Open-File Report ... |
def _find_players(self, year):
"""
Find all player IDs for the requested team.
For the requested team and year (if applicable), pull the roster table
and parse the player ID for all players on the roster and create an
instance of the Player class for the player. All player insta... | Find all player IDs for the requested team.
For the requested team and year (if applicable), pull the roster table
and parse the player ID for all players on the roster and create an
instance of the Player class for the player. All player instances are
added to the 'players' property to... |
def tent_map(x, steps, mu=2):
"""
Generates a time series of the tent map.
Characteristics and Background:
The name of the tent map is derived from the fact that the plot of x_i vs
x_i+1 looks like a tent. For mu > 1 one application of the mapping function
can be viewed as stretching the surface on w... | Generates a time series of the tent map.
Characteristics and Background:
The name of the tent map is derived from the fact that the plot of x_i vs
x_i+1 looks like a tent. For mu > 1 one application of the mapping function
can be viewed as stretching the surface on which the value is located and
then... |
def nexec(statement, globals=None, locals=None, **kwargs):
"""Execute *statement* using *globals* and *locals* dictionaries as
*global* and *local* namespace. *statement* is transformed using
:class:`.NapiTransformer`."""
try:
import __builtin__ as builtins
except ImportError:
impo... | Execute *statement* using *globals* and *locals* dictionaries as
*global* and *local* namespace. *statement* is transformed using
:class:`.NapiTransformer`. |
def _draw_frame(self, framedata):
"""Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by... | Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by TimedAnimation.
Args:
f... |
def match_any_learning_objective(self, match):
"""Matches an item with any objective.
arg: match (boolean): ``true`` to match items with any
learning objective, ``false`` to match items with no
learning objectives
*compliance: mandatory -- This method must be ... | Matches an item with any objective.
arg: match (boolean): ``true`` to match items with any
learning objective, ``false`` to match items with no
learning objectives
*compliance: mandatory -- This method must be implemented.* |
def set_poll_func(self, func, func_err_handler=None):
'''Can be used to integrate pulse client into existing eventloop.
Function will be passed a list of pollfd structs and timeout value (seconds, float),
which it is responsible to use and modify (set poll flags) accordingly,
returning int value >= 0 with ... | Can be used to integrate pulse client into existing eventloop.
Function will be passed a list of pollfd structs and timeout value (seconds, float),
which it is responsible to use and modify (set poll flags) accordingly,
returning int value >= 0 with number of fds that had any new events within timeout.
fu... |
def eeg_microstates_relabel(method, results, microstates_labels, reverse_microstates=None):
"""
Relabel the microstates.
"""
microstates = list(method['microstates'])
for index, microstate in enumerate(method['microstates']):
if microstate in list(reverse_microstates.keys()):
... | Relabel the microstates. |
def _(s: Influence) -> bool:
""" Check if an Influence statement is grounded """
return is_grounded(s.subj) and is_grounded(s.obj) | Check if an Influence statement is grounded |
def channel(self):
"""If no channel exists, a new one is requested."""
if not self._channel:
self._channel_ref = weakref.ref(self.connection.get_channel())
return self._channel | If no channel exists, a new one is requested. |
def _reduce(self):
"""Perform a greedy reduction of token stream.
If a reducer method matches, it will be executed, then the
:meth:`reduce` method will be called recursively to search
for any more possible reductions.
"""
for reduction, methname in self.reducers:
... | Perform a greedy reduction of token stream.
If a reducer method matches, it will be executed, then the
:meth:`reduce` method will be called recursively to search
for any more possible reductions. |
def check(ctx):
""" Check built package is valid.
"""
check_command = f"twine check {ctx.directory!s}/dist/*"
report.info(ctx, "package.check", "checking package")
ctx.run(check_command) | Check built package is valid. |
def get_collections(self, data):
"""Return serialized list of collection objects on data that user has `view` permission on."""
collections = self._filter_queryset('view_collection', data.collection_set.all())
from .collection import CollectionSerializer
class CollectionWithoutDataSeri... | Return serialized list of collection objects on data that user has `view` permission on. |
def get_network_by_id(self, network_id: int) -> Network:
"""Get a network from the database by its identifier."""
return self.session.query(Network).get(network_id) | Get a network from the database by its identifier. |
def set_active_vectors(self, name, preference='cell'):
"""Finds the vectors by name and appropriately sets it as active"""
_, field = get_scalar(self, name, preference=preference, info=True)
if field == POINT_DATA_FIELD:
self.GetPointData().SetActiveVectors(name)
elif field =... | Finds the vectors by name and appropriately sets it as active |
def run(self):
"""Run.
:raises BuildFailed: extension build failed and need to skip cython part.
"""
try:
build_ext.build_ext.run(self)
# Copy __init__.py back to repair package.
build_dir = os.path.abspath(self.build_lib)
root_dir = os.p... | Run.
:raises BuildFailed: extension build failed and need to skip cython part. |
def iam(self):
"""Generate iam details."""
iam = {
'group': self.format['iam_group'].format(**self.data),
'lambda_role': self.format['iam_lambda_role'].format(**self.data),
'policy': self.format['iam_policy'].format(**self.data),
'profile': self.format['ia... | Generate iam details. |
def update_loadbalancer(self, lbaas_loadbalancer, body=None):
"""Updates a load balancer."""
return self.put(self.lbaas_loadbalancer_path % (lbaas_loadbalancer),
body=body) | Updates a load balancer. |
def get_default_config_help(self):
"""
Return help text for collector configuration.
"""
config_help = super(MemoryLxcCollector, self).get_default_config_help()
config_help.update({
"sys_path": "Defaults to '/sys/fs/cgroup/lxc'",
})
return config_help | Return help text for collector configuration. |
def from_json(self, resource_root, data):
"""
Parses the given JSON value into an appropriate python object.
This means:
- a datetime.datetime if 'atype' is datetime.datetime
- a converted config dictionary or config list if 'atype' is ApiConfig
- if the attr is an API list, an ApiList with ... | Parses the given JSON value into an appropriate python object.
This means:
- a datetime.datetime if 'atype' is datetime.datetime
- a converted config dictionary or config list if 'atype' is ApiConfig
- if the attr is an API list, an ApiList with instances of 'atype'
- an instance of 'atype' if ... |
def getKeywordsForText(self, retina_name, body, ):
"""Get a list of keywords from the text
Args:
retina_name, str: The retina name (required)
body, str: The text to be evaluated (required)
Returns: Array[str]
"""
resourcePath = '/text/keywords'
... | Get a list of keywords from the text
Args:
retina_name, str: The retina name (required)
body, str: The text to be evaluated (required)
Returns: Array[str] |
def pack_image(filename, max_size, form_field='image'):
"""Pack an image from file into multipart-formdata post body"""
try:
if os.path.getsize(filename) > (max_size * 1024):
raise IdeaScalyError('File is too big, must be less than %skb.' % max_size)
except os.error a... | Pack an image from file into multipart-formdata post body |
def _convert_types(schema, col_type_dict, row):
"""
Takes a value from MySQLdb, and converts it to a value that's safe for
JSON/Google cloud storage/BigQuery. Dates are converted to UTC seconds.
Decimals are converted to floats. Binary type fields are encoded with base64,
as impo... | Takes a value from MySQLdb, and converts it to a value that's safe for
JSON/Google cloud storage/BigQuery. Dates are converted to UTC seconds.
Decimals are converted to floats. Binary type fields are encoded with base64,
as imported BYTES data must be base64-encoded according to Bigquery SQL
... |
def plot_signal(signal, sig_len, n_sig, fs, time_units, sig_style, axes):
"Plot signal channels"
# Extend signal style if necesary
if len(sig_style) == 1:
sig_style = n_sig * sig_style
# Figure out time indices
if time_units == 'samples':
t = np.linspace(0, sig_len-1, sig_len)
... | Plot signal channels |
def is_scalar(value):
"""Test if the given value is a scalar.
This function also works with memory mapped array values, in contrast to the numpy is_scalar method.
Args:
value: the value to test for being a scalar value
Returns:
boolean: if the given value is a scalar or not
"""
... | Test if the given value is a scalar.
This function also works with memory mapped array values, in contrast to the numpy is_scalar method.
Args:
value: the value to test for being a scalar value
Returns:
boolean: if the given value is a scalar or not |
def noisy_operation(self, operation: 'cirq.Operation') -> 'cirq.OP_TREE':
"""Adds noise to an individual operation.
Args:
operation: The operation to make noisy.
Returns:
An OP_TREE corresponding to the noisy operations implementing the
noisy version of the ... | Adds noise to an individual operation.
Args:
operation: The operation to make noisy.
Returns:
An OP_TREE corresponding to the noisy operations implementing the
noisy version of the given operation. |
def do_check_artifact_cache(self, vts, post_process_cached_vts=None):
"""Checks the artifact cache for the specified list of VersionedTargetSets.
Returns a tuple (cached, uncached, uncached_causes) of VersionedTargets that were
satisfied/unsatisfied from the cache.
"""
if not vts:
return [], ... | Checks the artifact cache for the specified list of VersionedTargetSets.
Returns a tuple (cached, uncached, uncached_causes) of VersionedTargets that were
satisfied/unsatisfied from the cache. |
def is_dict_like(obj):
"""
Check if the object is dict-like.
Parameters
----------
obj : The object to check
Returns
-------
is_dict_like : bool
Whether `obj` has dict-like properties.
Examples
--------
>>> is_dict_like({1: 2})
True
>>> is_dict_like([1, 2, ... | Check if the object is dict-like.
Parameters
----------
obj : The object to check
Returns
-------
is_dict_like : bool
Whether `obj` has dict-like properties.
Examples
--------
>>> is_dict_like({1: 2})
True
>>> is_dict_like([1, 2, 3])
False
>>> is_dict_like(... |
def set_fixed_image(self, image):
"""
Set Fixed ANTsImage for metric
"""
if not isinstance(image, iio.ANTsImage):
raise ValueError('image must be ANTsImage type')
if image.dimension != self.dimension:
raise ValueError('image dim (%i) does not match metric... | Set Fixed ANTsImage for metric |
def _translate_key(key):
"""
if key id deprecated and a replacement key defined, will return the
replacement key, otherwise returns `key` as - is
"""
d = _get_deprecated_option(key)
if d:
return d.rkey or key
else:
return key | if key id deprecated and a replacement key defined, will return the
replacement key, otherwise returns `key` as - is |
def _generate_processed_key_name(process_to, upload_name):
"""Returns a key name to use after processing based on timestamp and
upload key name."""
timestamp = datetime.now().strftime('%Y%m%d%H%M%S%f')
name, extension = os.path.splitext(upload_name)
digest = md5(''.join([timestam... | Returns a key name to use after processing based on timestamp and
upload key name. |
def get_relative_from_paths(self, filepath, paths):
"""
Find the relative filepath from the most relevant multiple paths.
This is somewhat like a ``os.path.relpath(path[, start])`` but where
``start`` is a list. The most relevant item from ``paths`` will be used
to apply the rel... | Find the relative filepath from the most relevant multiple paths.
This is somewhat like a ``os.path.relpath(path[, start])`` but where
``start`` is a list. The most relevant item from ``paths`` will be used
to apply the relative transform.
Args:
filepath (str): Path to tran... |
def find_dangerous_changes(
old_schema: GraphQLSchema, new_schema: GraphQLSchema
) -> List[DangerousChange]:
"""Find dangerous changes.
Given two schemas, returns a list containing descriptions of all the types of
potentially dangerous changes covered by the other functions down below.
"""
retu... | Find dangerous changes.
Given two schemas, returns a list containing descriptions of all the types of
potentially dangerous changes covered by the other functions down below. |
def desc(self) -> str:
"""A helper property to describe a token as a string for debugging"""
kind, value = self.kind.value, self.value
return f"{kind} {value!r}" if value else kind | A helper property to describe a token as a string for debugging |
def _get_content_type(self, filename):
""" gets the content type of a file """
mntype = mimetypes.guess_type(filename)[0]
filename, fileExtension = os.path.splitext(filename)
if mntype is None and\
fileExtension.lower() == ".csv":
mntype = "text/csv"
elif ... | gets the content type of a file |
def set(self, project, date, data, data_ts):
"""
Set the cache data for a specified project for the specified date.
:param project: project name to set data for
:type project: str
:param date: date to set data for
:type date: datetime.datetime
:param data: data t... | Set the cache data for a specified project for the specified date.
:param project: project name to set data for
:type project: str
:param date: date to set data for
:type date: datetime.datetime
:param data: data to cache
:type data: dict
:param data_ts: maximum ... |
def _button_plus_clicked(self, n):
"""
Create a new colorpoint.
"""
self._button_save.setEnabled(True)
self.insert_colorpoint(self._colorpoint_list[n][0],
self._colorpoint_list[n][1],
self._colorpoint_list[n][2])
... | Create a new colorpoint. |
def Unlock(fd, path):
"""Release the lock on the file.
Args:
fd: int, the file descriptor of the file to unlock.
path: string, the name of the file to lock.
Raises:
IOError, raised from flock while attempting to release a file lock.
"""
try:
fcntl.flock(fd, fcntl.LOCK_UN | fcntl.LOCK_NB)
e... | Release the lock on the file.
Args:
fd: int, the file descriptor of the file to unlock.
path: string, the name of the file to lock.
Raises:
IOError, raised from flock while attempting to release a file lock. |
def _set_linkinfo_domain_reachable(self, v, load=False):
"""
Setter method for linkinfo_domain_reachable, mapped from YANG variable /brocade_fabric_service_rpc/show_linkinfo/output/show_link_info/linkinfo_domain_reachable (linkinfo-domain-reachable-type)
If this variable is read-only (config: false) in the
... | Setter method for linkinfo_domain_reachable, mapped from YANG variable /brocade_fabric_service_rpc/show_linkinfo/output/show_link_info/linkinfo_domain_reachable (linkinfo-domain-reachable-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_linkinfo_domain_reachable is consider... |
def linear_exprs(A, x, b=None, rref=False, Matrix=None):
""" Returns Ax - b
Parameters
----------
A : matrix_like of numbers
Of shape (len(b), len(x)).
x : iterable of symbols
b : array_like of numbers (default: None)
When ``None``, assume zeros of length ``len(x)``.
Matrix ... | Returns Ax - b
Parameters
----------
A : matrix_like of numbers
Of shape (len(b), len(x)).
x : iterable of symbols
b : array_like of numbers (default: None)
When ``None``, assume zeros of length ``len(x)``.
Matrix : class
When ``rref == True``: A matrix class which suppo... |
def decompressBuffer(buffer):
"complements the compressBuffer function in CacheClient"
zbuf = cStringIO.StringIO(buffer)
zfile = gzip.GzipFile(fileobj=zbuf)
deflated = zfile.read()
zfile.close()
return deflated | complements the compressBuffer function in CacheClient |
def login(self, username, json_document):
"""Send user identity information to the identity manager.
Raise a ServerError if an error occurs in the request process.
@param username The logged in user.
@param json_document The JSON payload for login.
"""
url = '{}u/{}'.fo... | Send user identity information to the identity manager.
Raise a ServerError if an error occurs in the request process.
@param username The logged in user.
@param json_document The JSON payload for login. |
def extract_largest(self, inplace=False):
"""
Extract largest connected set in mesh.
Can be used to reduce residues obtained when generating an isosurface.
Works only if residues are not connected (share at least one point with)
the main component of the image.
Paramete... | Extract largest connected set in mesh.
Can be used to reduce residues obtained when generating an isosurface.
Works only if residues are not connected (share at least one point with)
the main component of the image.
Parameters
----------
inplace : bool, optional
... |
def to_image_list(tensors, size_divisible=0):
"""
tensors can be an ImageList, a torch.Tensor or
an iterable of Tensors. It can't be a numpy array.
When tensors is an iterable of Tensors, it pads
the Tensors with zeros so that they have the same
shape
"""
if isinstance(tensors, torch.Ten... | tensors can be an ImageList, a torch.Tensor or
an iterable of Tensors. It can't be a numpy array.
When tensors is an iterable of Tensors, it pads
the Tensors with zeros so that they have the same
shape |
def getSolutionIter(self):
"""
Return an iterator to the solutions of the problem
Example:
>>> problem = Problem()
>>> list(problem.getSolutionIter()) == []
True
>>> problem.addVariables(["a"], [42])
>>> iter = problem.getSolutionIter()
>>> next(... | Return an iterator to the solutions of the problem
Example:
>>> problem = Problem()
>>> list(problem.getSolutionIter()) == []
True
>>> problem.addVariables(["a"], [42])
>>> iter = problem.getSolutionIter()
>>> next(iter)
{'a': 42}
>>> next(iter)
... |
def find_multiplex_by_name(self, multiplex_name: str) -> Multiplex:
"""
Find and return a multiplex in the influence graph with the given name.
Raise an AttributeError if there is no multiplex in the graph with the given name.
"""
for multiplex in self.multiplexes:
i... | Find and return a multiplex in the influence graph with the given name.
Raise an AttributeError if there is no multiplex in the graph with the given name. |
def get_suppliers_per_page(self, per_page=1000, page=1, params=None):
"""
Get suppliers per page
:param per_page: How many objects per page. Default: 1000
:param page: Which page. Default: 1
:param params: Search parameters. Default: {}
:return: list
"""
... | Get suppliers per page
:param per_page: How many objects per page. Default: 1000
:param page: Which page. Default: 1
:param params: Search parameters. Default: {}
:return: list |
def key_exists(self, section, key):
"""
Checks if given key exists.
:param section: Current section to check key in.
:type section: unicode
:param key: Current key to check.
:type key: unicode
:return: Key existence.
:rtype: bool
"""
LOGG... | Checks if given key exists.
:param section: Current section to check key in.
:type section: unicode
:param key: Current key to check.
:type key: unicode
:return: Key existence.
:rtype: bool |
def _decode_embedded_dict(src):
'''
Convert enbedded bytes to strings if possible.
Dict helper.
'''
output = {}
for key, val in six.iteritems(src):
if isinstance(val, dict):
val = _decode_embedded_dict(val)
elif isinstance(val, list):
val = _decode_embedde... | Convert enbedded bytes to strings if possible.
Dict helper. |
def _filter_headers(self):
"""
Add headers designed for filtering messages based on objects.
Returns:
dict: Filter-related headers to be combined with the existing headers
"""
headers = {}
for user in self.usernames:
headers["fedora_messaging_user... | Add headers designed for filtering messages based on objects.
Returns:
dict: Filter-related headers to be combined with the existing headers |
def import_app_module(app_name, module_name):
"""Returns a module from a given app by its name.
:param str app_name:
:param str module_name:
:rtype: module or None
"""
name_split = app_name.split('.')
if name_split[-1][0].isupper(): # Seems that we have app config class path here.
... | Returns a module from a given app by its name.
:param str app_name:
:param str module_name:
:rtype: module or None |
def dig(host):
'''
Performs a DNS lookup with dig
CLI Example:
.. code-block:: bash
salt '*' network.dig archlinux.org
'''
cmd = 'dig {0}'.format(salt.utils.network.sanitize_host(host))
return __salt__['cmd.run'](cmd) | Performs a DNS lookup with dig
CLI Example:
.. code-block:: bash
salt '*' network.dig archlinux.org |
def create_filter(self):
"""Get an instance of filter services facade."""
return Filter(
self.networkapi_url,
self.user,
self.password,
self.user_ldap) | Get an instance of filter services facade. |
def _check_connections(self):
"""Checks if all configured redis servers are reachable"""
for server in self._servers:
if self._is_reachable(server):
server['down_until'] = 0
else:
server['down_until'] = time.time() + 5 | Checks if all configured redis servers are reachable |
def clean(self):
"""
Empties the cache
"""
self._table.clear()
for item in self._usage_recency:
self._usage_recency.remove(item) | Empties the cache |
def fit(self, X, y, **fit_params):
"""Find the best parameters for a particular model.
Parameters
----------
X, y : array-like
**fit_params
Additional partial fit keyword arguments for the estimator.
"""
return default_client().sync(self._fit, X, y, *... | Find the best parameters for a particular model.
Parameters
----------
X, y : array-like
**fit_params
Additional partial fit keyword arguments for the estimator. |
def json_data(self):
"""Returns data as JSON
Returns:
json_data (str): JSON representation of data, as created in make_data
"""
def stringify_keys(d):
if not isinstance(d, dict):
return d
return dict((str(k), stringify_keys(v)) f... | Returns data as JSON
Returns:
json_data (str): JSON representation of data, as created in make_data |
def fetch_assets(self):
""" download bootstrap assets to control host.
If present on the control host they will be uploaded to the target host during bootstrapping.
"""
# allow overwrites from the commandline
packages = set(
env.instance.config.get('bootstrap-packages... | download bootstrap assets to control host.
If present on the control host they will be uploaded to the target host during bootstrapping. |
def _serialize_value_for_xml(self, value):
"""See base class."""
if value is not None:
value_serialized = self.serializer.serialize(value)
else:
value_serialized = ''
return value_serialized | See base class. |
def applyIndex(self, lst, right):
"""Apply a list to something else."""
if len(right) != 1:
raise exceptions.EvaluationError('%r can only be applied to one argument, got %r' % (self.left, self.right))
right = right[0]
if isinstance(right, int):
return lst[right]
raise exceptions.Evalua... | Apply a list to something else. |
def indication(self, pdu):
"""Requests are queued for delivery."""
if _debug: TCPServer._debug("indication %r", pdu)
self.request += pdu.pduData | Requests are queued for delivery. |
def get_partition_dciId(self, org_name, part_name, part_info=None):
"""get DCI ID for the partition.
:param org_name: name of organization
:param part_name: name of partition
"""
if part_info is None:
part_info = self._get_partition(org_name, part_name)
L... | get DCI ID for the partition.
:param org_name: name of organization
:param part_name: name of partition |
def load_vectors(self, vectors, **kwargs):
"""
Arguments:
vectors: one of or a list containing instantiations of the
GloVe, CharNGram, or Vectors classes. Alternatively, one
of or a list of available pretrained vectors:
charngram.100d
... | Arguments:
vectors: one of or a list containing instantiations of the
GloVe, CharNGram, or Vectors classes. Alternatively, one
of or a list of available pretrained vectors:
charngram.100d
fasttext.en.300d
fasttext.simple.300d
... |
def detach(self, ids=None, touch=True):
"""
Detach models from the relationship.
"""
if isinstance(ids, orator.orm.model.Model):
ids = ids.get_key()
if ids is None:
ids = []
query = self._new_pivot_query()
if not isinstance(ids, list):
... | Detach models from the relationship. |
def tValueForPoint(self, point):
"""
get a t values for a given point
required:
the point must be a point on the curve.
in an overlap cause the point will be an intersection points wich is alwasy a point on the curve
"""
if self.segmentType == "curve":
... | get a t values for a given point
required:
the point must be a point on the curve.
in an overlap cause the point will be an intersection points wich is alwasy a point on the curve |
def delete_messages(self, messages):
"""
Delete existing messages.
http://dev.wheniwork.com/#delete-existing-message
"""
url = "/2/messages/?%s" % urlencode([('ids', ",".join(messages))])
data = self._delete_resource(url)
return data | Delete existing messages.
http://dev.wheniwork.com/#delete-existing-message |
def getTypeName(data_type_oid, type_modifier):
"""Returns the base type name according to data_type_oid and type_modifier"""
if data_type_oid == VerticaType.BOOL:
return "Boolean"
elif data_type_oid == VerticaType.INT8:
return "Integer"
elif data_type_oid == VerticaType.FLOAT8:
... | Returns the base type name according to data_type_oid and type_modifier |
def parse(self, args=None):
'''Parse a list of arguments, returning a dict.
Flags are only boolean if they are not followed by a non-flag argument.
All positional arguments not associable with a flag will be added to the return dictionary's `['_']` field.
'''
opts = dict()
... | Parse a list of arguments, returning a dict.
Flags are only boolean if they are not followed by a non-flag argument.
All positional arguments not associable with a flag will be added to the return dictionary's `['_']` field. |
def readline(self):
"""Get the next line including the newline or '' on EOF."""
self.lineno += 1
if self._buffer:
return self._buffer.pop()
else:
return self.input.readline() | Get the next line including the newline or '' on EOF. |
def max_intensity(item_a, time_a, item_b, time_b, max_value):
"""
RMS difference in maximum intensity
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer be... | RMS difference in maximum intensity
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling va... |
def parser(self):
"""Returns the appropriate parser to use for adding arguments to your command."""
if self._command_parser is None:
parents = []
if self.need_verbose:
parents.append(_verbose_parser)
if self.need_settings:
parents.appen... | Returns the appropriate parser to use for adding arguments to your command. |
def read(self, filename):
"""
Read a tribe of templates from a tar formatted file.
:type filename: str
:param filename: File to read templates from.
.. rubric:: Example
>>> tribe = Tribe(templates=[Template(name='c', st=read())])
>>> tribe.write('test_tribe')
... | Read a tribe of templates from a tar formatted file.
:type filename: str
:param filename: File to read templates from.
.. rubric:: Example
>>> tribe = Tribe(templates=[Template(name='c', st=read())])
>>> tribe.write('test_tribe')
Tribe of 1 templates
>>> tribe_... |
def build(path, query=None, fragment=''):
"""
Generates a URL based on the inputted path and given query options and
fragment. The query should be a dictionary of terms that will be
generated into the URL, while the fragment is the anchor point within the
target path that will be navigated to. If ... | Generates a URL based on the inputted path and given query options and
fragment. The query should be a dictionary of terms that will be
generated into the URL, while the fragment is the anchor point within the
target path that will be navigated to. If there are any wildcards within
the path that are f... |
def unpack(self, buff, offset=0):
"""Unpack a binary struct into this object's attributes.
Return the values instead of the lib's basic types.
After unpacking, the abscence of a `tpid` value causes the assignment
of None to the field values to indicate that there is no VLAN
inf... | Unpack a binary struct into this object's attributes.
Return the values instead of the lib's basic types.
After unpacking, the abscence of a `tpid` value causes the assignment
of None to the field values to indicate that there is no VLAN
information.
Args:
buff (by... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.