code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def Run(self, unused_arg):
"""This kills us with no cleanups."""
logging.debug("Disabling service")
msg = "Service disabled."
if hasattr(sys, "frozen"):
grr_binary = os.path.abspath(sys.executable)
elif __file__:
grr_binary = os.path.abspath(__file__)
try:
os.remove(grr_binar... | This kills us with no cleanups. |
def from_taxtable(cls, taxtable_fp):
"""
Generate a node from an open handle to a taxtable, as generated by
``taxit taxtable``
"""
r = csv.reader(taxtable_fp)
headers = next(r)
rows = (collections.OrderedDict(list(zip(headers, i))) for i in r)
row = next(... | Generate a node from an open handle to a taxtable, as generated by
``taxit taxtable`` |
def vi_return_param(self, index):
""" Wrapper function for selecting appropriate latent variable for variational inference
Parameters
----------
index : int
0 or 1 depending on which latent variable
Returns
----------
The appropriate indexed paramete... | Wrapper function for selecting appropriate latent variable for variational inference
Parameters
----------
index : int
0 or 1 depending on which latent variable
Returns
----------
The appropriate indexed parameter |
def decode_call(self, call):
"""
Replace callable tokens with callable names.
:param call: Encoded callable name
:type call: string
:rtype: string
"""
# Callable name is None when callable is part of exclude list
if call is None:
return Non... | Replace callable tokens with callable names.
:param call: Encoded callable name
:type call: string
:rtype: string |
def parse_number_factory(alg, sep, pre_sep):
"""
Create a function that will format a number into a tuple.
Parameters
----------
alg : ns enum
Indicate how to format the *bytes*.
sep : str
The string character to be inserted before the number
in the returned tuple.
p... | Create a function that will format a number into a tuple.
Parameters
----------
alg : ns enum
Indicate how to format the *bytes*.
sep : str
The string character to be inserted before the number
in the returned tuple.
pre_sep : str
In the event that *alg* contains ``U... |
def add_association_to_graph(self):
"""
Overrides Association by including bnode support
The reified relationship between a genotype (or any genotype part)
and a phenotype is decorated with some provenance information.
This makes the assumption that
both the genotype an... | Overrides Association by including bnode support
The reified relationship between a genotype (or any genotype part)
and a phenotype is decorated with some provenance information.
This makes the assumption that
both the genotype and phenotype are classes.
currently hardcoded to... |
def tau_reduction(ms, rate, n_per_decade):
"""Reduce the number of taus to maximum of n per decade (Helper function)
takes in a tau list and reduces the number of taus to a maximum amount per
decade. This is only useful if more than the "decade" and "octave" but
less than the "all" taus are wanted. E.g... | Reduce the number of taus to maximum of n per decade (Helper function)
takes in a tau list and reduces the number of taus to a maximum amount per
decade. This is only useful if more than the "decade" and "octave" but
less than the "all" taus are wanted. E.g. to show certain features of
the data one mig... |
def p_reset(self, program):
"""
reset : RESET primary
"""
program[0] = node.Reset([program[2]])
self.verify_reg(program[2], 'qreg') | reset : RESET primary |
def _update_card_file_location(self, card_name, new_directory):
"""
Moves card to new gssha working directory
"""
with tmp_chdir(self.gssha_directory):
file_card = self.project_manager.getCard(card_name)
if file_card:
if file_card.value:
... | Moves card to new gssha working directory |
def multiple_packaged_versions(package_name):
""" Look through built package directory and see if there are multiple versions there """
dist_files = os.listdir('dist')
versions = set()
for filename in dist_files:
version = funcy.re_find(r'{}-(.+).tar.gz'.format(package_name), filename)
i... | Look through built package directory and see if there are multiple versions there |
def write_metadata(self, key, values):
"""
write out a meta data array to the key as a fixed-format Series
Parameters
----------
key : string
values : ndarray
"""
values = Series(values)
self.parent.put(self._get_metadata_path(key), values, forma... | write out a meta data array to the key as a fixed-format Series
Parameters
----------
key : string
values : ndarray |
def create_tables(database):
'''Create all tables in the given database'''
logging.getLogger(__name__).debug("Creating missing database tables")
database.connect()
database.create_tables([User,
Group,
UserToGroup,
GroupT... | Create all tables in the given database |
def _run_up(self, path, migration_file, batch, pretend=False):
"""
Run "up" a migration instance.
:type migration_file: str
:type batch: int
:type pretend: bool
"""
migration = self._resolve(path, migration_file)
if pretend:
return self._pr... | Run "up" a migration instance.
:type migration_file: str
:type batch: int
:type pretend: bool |
def solar(filename_solar, solar_factor):
'''
read solar abundances from filename_solar.
Parameters
----------
filename_solar : string
The file name.
solar_factor : float
The correction factor to apply, in case filename_solar is not
solar, but some file used to get initia... | read solar abundances from filename_solar.
Parameters
----------
filename_solar : string
The file name.
solar_factor : float
The correction factor to apply, in case filename_solar is not
solar, but some file used to get initial abundances at
metallicity lower than solar.... |
def show_status(self):
"""
dumps the status of the agent
"""
txt = 'Agent Status:\n'
print(txt)
txt += "start_x = " + str(self.start_x) + "\n"
txt += "start_y = " + str(self.start_y) + "\n"
txt += "target_x = " + str(self.target_x) + "\n"
txt += ... | dumps the status of the agent |
def bar_amplitude(self):
"返回bar振幅"
res = (self.high - self.low) / self.low
res.name = 'bar_amplitude'
return res | 返回bar振幅 |
def distance(self, there):
"""
Calculate the distance from this location to there.
Parameters
----------
there : Location
Returns
-------
distance_in_m : float
"""
return haversine_distance((self.latitude, self.longitude),
... | Calculate the distance from this location to there.
Parameters
----------
there : Location
Returns
-------
distance_in_m : float |
def login(self, username, password, load=True):
"""
Set the authentication data in the object, and if load is True
(default is True) it also retrieve the ip list and the vm list
in order to build the internal objects list.
@param (str) username: username of the cloud
@par... | Set the authentication data in the object, and if load is True
(default is True) it also retrieve the ip list and the vm list
in order to build the internal objects list.
@param (str) username: username of the cloud
@param (str) password: password of the cloud
@param (bool) load:... |
def drawDisplay(self, painter, option, rect, text):
"""
Overloads the drawDisplay method to render HTML if the rich text \
information is set to true.
:param painter | <QtGui.QPainter>
option | <QtGui.QStyleOptionItem>
rect ... | Overloads the drawDisplay method to render HTML if the rich text \
information is set to true.
:param painter | <QtGui.QPainter>
option | <QtGui.QStyleOptionItem>
rect | <QtCore.QRect>
text | <str> |
def _http_response(self, url, method, data=None, content_type=None,
schema=None, **kwargs):
"""url -> full target url
method -> method from requests
data -> request body
kwargs -> url formatting args
"""
if schema is None:
sche... | url -> full target url
method -> method from requests
data -> request body
kwargs -> url formatting args |
def truncate_volume(self, volume, size):
"""Truncate a volume to a new, smaller size.
:param volume: Name of the volume to truncate.
:type volume: str
:param size: Size in bytes, or string representing the size of the
volume to be created.
:type size: int or... | Truncate a volume to a new, smaller size.
:param volume: Name of the volume to truncate.
:type volume: str
:param size: Size in bytes, or string representing the size of the
volume to be created.
:type size: int or str
:returns: A dictionary mapping "name" ... |
def absorb(self, other):
"""
For attributes of others that value is not None, assign it to self.
**中文文档**
将另一个文档中的数据更新到本条文档。当且仅当数据值不为None时。
"""
if not isinstance(other, self.__class__):
raise TypeError("`other` has to be a instance of %s!" %
... | For attributes of others that value is not None, assign it to self.
**中文文档**
将另一个文档中的数据更新到本条文档。当且仅当数据值不为None时。 |
def _populate_input_for_name_id(self, config, record, context, data):
"""
Use a record found in LDAP to populate input for
NameID generation.
"""
user_id = ""
user_id_from_attrs = config['user_id_from_attrs']
for attr in user_id_from_attrs:
if attr in ... | Use a record found in LDAP to populate input for
NameID generation. |
def slang_date(self, locale="en"):
""""Returns human slang representation of date.
Keyword Arguments:
locale -- locale to translate to, e.g. 'fr' for french.
(default: 'en' - English)
"""
dt = pendulum.instance(self.datetime())
try:
... | Returns human slang representation of date.
Keyword Arguments:
locale -- locale to translate to, e.g. 'fr' for french.
(default: 'en' - English) |
def dump(self, obj):
"""
Dumps the given object in the Java serialization format
"""
self.references = []
self.object_obj = obj
self.object_stream = BytesIO()
self._writeStreamHeader()
self.writeObject(obj)
return self.object_stream.getvalue() | Dumps the given object in the Java serialization format |
def get_object(self, object_ids):
"""Get the value or values in the object store associated with the IDs.
Return the values from the local object store for object_ids. This will
block until all the values for object_ids have been written to the
local object store.
Args:
... | Get the value or values in the object store associated with the IDs.
Return the values from the local object store for object_ids. This will
block until all the values for object_ids have been written to the
local object store.
Args:
object_ids (List[object_id.ObjectID]): A... |
def register_listener(self, address, func):
"""Adds a listener to messages received on a specific address
If some KNX messages will be received from the KNX bus, this listener
will be called func(address, data).
There can be multiple listeners for a given address
"""
try... | Adds a listener to messages received on a specific address
If some KNX messages will be received from the KNX bus, this listener
will be called func(address, data).
There can be multiple listeners for a given address |
def share_vm_image(self, vm_image_name, permission):
'''
Share an already replicated OS image. This operation is only for
publishers. You have to be registered as image publisher with Windows
Azure to be able to call this.
vm_image_name:
The name of the virtual machi... | Share an already replicated OS image. This operation is only for
publishers. You have to be registered as image publisher with Windows
Azure to be able to call this.
vm_image_name:
The name of the virtual machine image to share
permission:
The sharing permission:... |
def get_rank_value(cls, name):
"""Returns the integer constant value for the given rank name.
:param string rank: the string rank name (E.g., 'HARDCODED').
:returns: the integer constant value of the rank.
:rtype: int
"""
if name in cls._RANK_NAMES.values():
return getattr(cls, name, None... | Returns the integer constant value for the given rank name.
:param string rank: the string rank name (E.g., 'HARDCODED').
:returns: the integer constant value of the rank.
:rtype: int |
def datum_to_value(self, instance, datum):
"""Convert a given MAAS-side datum to a Python-side value.
:param instance: The `Object` instance on which this field is
currently operating. This method should treat it as read-only, for
example to perform validation with regards to ot... | Convert a given MAAS-side datum to a Python-side value.
:param instance: The `Object` instance on which this field is
currently operating. This method should treat it as read-only, for
example to perform validation with regards to other fields.
:param datum: The MAAS-side datum ... |
def from_zenity_tuple_str(zenity_tuple_str: str):
"""
Parser for Zenity output, which outputs a named tuple-like string: "rgb(R, G, B)", where R, G, B are base10
integers.
@param zenity_tuple_str: tuple-like string: "rgb(r, g, b), where r, g, b are base10 integers.
@return: Colou... | Parser for Zenity output, which outputs a named tuple-like string: "rgb(R, G, B)", where R, G, B are base10
integers.
@param zenity_tuple_str: tuple-like string: "rgb(r, g, b), where r, g, b are base10 integers.
@return: ColourData instance
@rtype: ColourData |
def get_config(self, name, default=_MISSING):
"""Get a configuration setting from this DeviceAdapter.
See :meth:`AbstractDeviceAdapter.get_config`.
"""
val = self._config.get(name, default)
if val is _MISSING:
raise ArgumentError("DeviceAdapter config {} did not exi... | Get a configuration setting from this DeviceAdapter.
See :meth:`AbstractDeviceAdapter.get_config`. |
def _fetchBlock(self):
""" internal use only.
get a block of rows from the server and put in standby block.
future enhancements:
(1) locks for multithreaded access (protect from multiple calls)
(2) allow for prefetch by use of separate thread
"""
# make sure that ano... | internal use only.
get a block of rows from the server and put in standby block.
future enhancements:
(1) locks for multithreaded access (protect from multiple calls)
(2) allow for prefetch by use of separate thread |
def protect(self, password=None, read_protect=False, protect_from=0):
"""Set password protection or permanent lock bits.
If the *password* argument is None, all memory pages will be
protected by setting the relevant lock bits (note that lock
bits can not be reset). If valid NDEF managem... | Set password protection or permanent lock bits.
If the *password* argument is None, all memory pages will be
protected by setting the relevant lock bits (note that lock
bits can not be reset). If valid NDEF management data is
found, protect() also sets the NDEF write flag to read-only.
... |
def createNetwork(dataSource):
"""Creates and returns a new Network with a sensor region reading data from
'dataSource'. There are two hierarchical levels, each with one SP and one TM.
@param dataSource - A RecordStream containing the input data
@returns a Network ready to run
"""
network = Network()
# C... | Creates and returns a new Network with a sensor region reading data from
'dataSource'. There are two hierarchical levels, each with one SP and one TM.
@param dataSource - A RecordStream containing the input data
@returns a Network ready to run |
def init_app(self, app, sessionstore=None, register_blueprint=False):
"""Flask application initialization.
:param app: The Flask application.
:param sessionstore: store for sessions. Passed to
``flask-kvsession``. If ``None`` then Redis is configured.
(Default: ``None``)... | Flask application initialization.
:param app: The Flask application.
:param sessionstore: store for sessions. Passed to
``flask-kvsession``. If ``None`` then Redis is configured.
(Default: ``None``)
:param register_blueprint: If ``True``, the application registers the
... |
def _build_youtube_dl_coprocessor(cls, session: AppSession, proxy_port: int):
'''Build youtube-dl coprocessor.'''
# Test early for executable
wpull.processor.coprocessor.youtubedl.get_version(session.args.youtube_dl_exe)
coprocessor = session.factory.new(
'YoutubeDlCoproces... | Build youtube-dl coprocessor. |
def dedents(self, s, stacklevel=3):
"""
Dedent a string and substitute with the :attr:`params` attribute
Parameters
----------
s: str
string to dedent and insert the sections of the :attr:`params`
attribute
stacklevel: int
The stacklev... | Dedent a string and substitute with the :attr:`params` attribute
Parameters
----------
s: str
string to dedent and insert the sections of the :attr:`params`
attribute
stacklevel: int
The stacklevel for the warning raised in :func:`safe_module` when
... |
def remove_event(self, name=None, time=None, chan=None):
"""Action: remove single event."""
self.annot.remove_event(name=name, time=time, chan=chan)
self.update_annotations() | Action: remove single event. |
def _permute(self, ordering: np.ndarray) -> None:
"""
Permute all the attributes in the collection
Remarks:
This permutes the order of the values for each attribute in the file
"""
for key in self.keys():
self[key] = self[key][ordering] | Permute all the attributes in the collection
Remarks:
This permutes the order of the values for each attribute in the file |
def set_volume(self, volume=50):
"""
allows to change the volume
:param int volume: volume to be set for the current device
[0..100] (default: 50)
"""
assert(volume in range(101))
log.debug("setting volume...")
cmd, url = DEVICE_URLS[... | allows to change the volume
:param int volume: volume to be set for the current device
[0..100] (default: 50) |
def main():
"""The entry point for the HNV Agent."""
neutron_config.register_agent_state_opts_helper(CONF)
common_config.init(sys.argv[1:])
neutron_config.setup_logging()
hnv_agent = HNVAgent()
# Start everything.
LOG.info("Agent initialized successfully, now running... ")
hnv_agent.da... | The entry point for the HNV Agent. |
def _parse_response(self, resp):
"""Gets the authentication information from the returned JSON."""
super(RaxIdentity, self)._parse_response(resp)
user = resp["access"]["user"]
defreg = user.get("RAX-AUTH:defaultRegion")
if defreg:
self._default_region = defreg | Gets the authentication information from the returned JSON. |
def prepareToSolve(self):
'''
Perform preparatory work before calculating the unconstrained consumption
function.
Parameters
----------
none
Returns
-------
none
'''
self.setAndUpdateValues(self.solution_next,self.IncomeDstn,self.... | Perform preparatory work before calculating the unconstrained consumption
function.
Parameters
----------
none
Returns
-------
none |
def make_grid(self):
"""
return grid
"""
changes = None
# if there is a MagicDataFrame, extract data from it
if isinstance(self.magic_dataframe, cb.MagicDataFrame):
# get columns and reorder slightly
col_labels = list(self.magic_dataframe.df.column... | return grid |
def __json(self):
"""
Using the exclude lists, convert fields to a string.
"""
if self.exclude_list is None:
self.exclude_list = []
fields = {}
for key, item in vars(self).items():
if hasattr(self, '_sa_instance_state'):
# ... | Using the exclude lists, convert fields to a string. |
def add_mark_at(string, index, mark):
"""
Add mark to the index-th character of the given string. Return the new string after applying change.
Notice: index > 0
"""
if index == -1:
return string
# Python can handle the case which index is out of range of given string
return string[:i... | Add mark to the index-th character of the given string. Return the new string after applying change.
Notice: index > 0 |
def forward(self,
layer_input: torch.Tensor,
layer_output: torch.Tensor,
layer_index: int = None,
total_layers: int = None) -> torch.Tensor:
# pylint: disable=arguments-differ
"""
Apply dropout to this layer, for this whole mini-bat... | Apply dropout to this layer, for this whole mini-batch.
dropout_prob = layer_index / total_layers * undecayed_dropout_prob if layer_idx and
total_layers is specified, else it will use the undecayed_dropout_prob directly.
Parameters
----------
layer_input ``torch.FloatTensor`` re... |
def disable(gandi, resource, backend, port, probe):
""" Disable a backend or a probe on a webaccelerator """
result = []
if backend:
backends = backend
for backend in backends:
if 'port' not in backend:
if not port:
backend['port'] = click.prom... | Disable a backend or a probe on a webaccelerator |
def pack_nibbles(nibbles):
"""pack nibbles to binary
:param nibbles: a nibbles sequence. may have a terminator
"""
if nibbles[-1] == NIBBLE_TERMINATOR:
flags = 2
nibbles = nibbles[:-1]
else:
flags = 0
oddlen = len(nibbles) % 2
flags |= oddlen # set lowest bit if ... | pack nibbles to binary
:param nibbles: a nibbles sequence. may have a terminator |
def add_scope(self, scope_type, scope_name, scope_start, is_method=False):
"""we identified a scope and add it to positions."""
if self._curr is not None:
self._curr['end'] = scope_start - 1 # close last scope
self._curr = {
'type': scope_type, 'name': scope_name,
... | we identified a scope and add it to positions. |
def search(cls, session, queries, out_type):
"""Search for a record given a domain.
Args:
session (requests.sessions.Session): Authenticated session.
queries (helpscout.models.Domain or iter): The queries for the
domain. If a ``Domain`` object is provided, it wil... | Search for a record given a domain.
Args:
session (requests.sessions.Session): Authenticated session.
queries (helpscout.models.Domain or iter): The queries for the
domain. If a ``Domain`` object is provided, it will simply be
returned. Otherwise, a ``Dom... |
def update_function(old, new):
"""Upgrade the code object of a function"""
for name in func_attrs:
try:
setattr(old, name, getattr(new, name))
except (AttributeError, TypeError):
pass | Upgrade the code object of a function |
def options(cls, obj, options=None, **kwargs):
"""
Context-manager for temporarily setting options on an object
(if options is None, no options will be set) . Once the
context manager exits, both the object and the Store will be
left in exactly the same state they were in before ... | Context-manager for temporarily setting options on an object
(if options is None, no options will be set) . Once the
context manager exits, both the object and the Store will be
left in exactly the same state they were in before the context
manager was used.
See holoviews.core.o... |
def ResolveForCreate(self, document):
"""Resolves the collection for creating the document based on the partition key.
:param dict document:
The document to be created.
:return:
Collection Self link or Name based link which should handle the Create operation.
... | Resolves the collection for creating the document based on the partition key.
:param dict document:
The document to be created.
:return:
Collection Self link or Name based link which should handle the Create operation.
:rtype:
str |
def clear_scroll(self,
scroll_id = None,
body = '',
params = {},
callback = None,
**kwargs
):
"""
Clear the scroll request created by specifying the scroll parameter to
search.
`<http://www.elasticsearch.org/gui... | Clear the scroll request created by specifying the scroll parameter to
search.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-scroll.html>`_
:arg scroll_id: The scroll ID or a list of scroll IDs
:arg body: A comma-separated list of scroll IDs to cl... |
def hasDependencyRecursively(self, name, target=None, test_dependencies=False):
''' Check if this module, or any of its dependencies, have a
dependencies with the specified name in their dependencies, or in
their targetDependencies corresponding to the specified target.
Note... | Check if this module, or any of its dependencies, have a
dependencies with the specified name in their dependencies, or in
their targetDependencies corresponding to the specified target.
Note that if recursive dependencies are not installed, this test
may return a false-... |
def doc_stream(path):
"""
Generator to feed tokenized documents (treating each line as a document).
"""
with open(path, 'r') as f:
for line in f:
if line.strip():
yield line | Generator to feed tokenized documents (treating each line as a document). |
def create_module(name, path):
"""
Returns module created *on the fly*. Returned module would have name same
as given ``name`` and would contain code read from file at the given
``path`` (it may also be a zip or package containing *__main__* module).
"""
module = imp.new_module(name)
module.... | Returns module created *on the fly*. Returned module would have name same
as given ``name`` and would contain code read from file at the given
``path`` (it may also be a zip or package containing *__main__* module). |
def single(self):
""" Obtain the next and only remaining record from this result.
A warning is generated if more than one record is available but
the first of these is still returned.
:returns: the next :class:`.Record` or :const:`None` if none remain
:warns: if more than one r... | Obtain the next and only remaining record from this result.
A warning is generated if more than one record is available but
the first of these is still returned.
:returns: the next :class:`.Record` or :const:`None` if none remain
:warns: if more than one record is available |
def fromFile(cls, filename):
"""
Load a suffix array instance from filename, a file created by
toFile.
Accept any filename following the _open conventions.
"""
self = cls.__new__(cls) # new instance which does not call __init__
start = _time()
savedData... | Load a suffix array instance from filename, a file created by
toFile.
Accept any filename following the _open conventions. |
def format_interface_name(intf_type, port, ch_grp=0):
"""Method to format interface name given type, port.
Given interface type, port, and channel-group, this
method formats an interface name. If channel-group is
non-zero, then port-channel is configured.
:param intf_type: Such as 'ethernet' or '... | Method to format interface name given type, port.
Given interface type, port, and channel-group, this
method formats an interface name. If channel-group is
non-zero, then port-channel is configured.
:param intf_type: Such as 'ethernet' or 'port-channel'
:param port: unique identification -- 1/32 ... |
def as_completed(fs, timeout=None):
"""An iterator over the given futures that yields each as it completes.
Args:
fs: The sequence of Futures (possibly created by different Executors) to
iterate over.
timeout: The maximum number of seconds to wait. If None, then there
is... | An iterator over the given futures that yields each as it completes.
Args:
fs: The sequence of Futures (possibly created by different Executors) to
iterate over.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
Returns:
... |
def build_docs(directory):
"""Builds sphinx docs from a given directory."""
os.chdir(directory)
process = subprocess.Popen(["make", "html"], cwd=directory)
process.communicate() | Builds sphinx docs from a given directory. |
def past_date(start='-30d'):
"""
Returns a ``date`` object in the past between 1 day ago and the
specified ``start``. ``start`` can be a string, another date, or a
timedelta. If it's a string, it must start with `-`, followed by and integer
and a unit, Eg: ``'-30d'``. Defaults to `'-30d'`
Valid... | Returns a ``date`` object in the past between 1 day ago and the
specified ``start``. ``start`` can be a string, another date, or a
timedelta. If it's a string, it must start with `-`, followed by and integer
and a unit, Eg: ``'-30d'``. Defaults to `'-30d'`
Valid units are:
* ``'years'``, ``'y'``
... |
def handle_cannot(reference_answers: List[str]):
"""
Process a list of reference answers.
If equal or more than half of the reference answers are "CANNOTANSWER", take it as gold.
Otherwise, return answers that are not "CANNOTANSWER".
"""
num_cannot = 0
num_spans = 0
for ref in reference_... | Process a list of reference answers.
If equal or more than half of the reference answers are "CANNOTANSWER", take it as gold.
Otherwise, return answers that are not "CANNOTANSWER". |
def read(self, offset, length, min_length=0, unbuffered=False, wait=True,
send=True):
"""
Reads from an opened file or pipe
Supports out of band send function, call this function with send=False
to return a tuple of (SMB2ReadRequest, receive_func) instead of
sending... | Reads from an opened file or pipe
Supports out of band send function, call this function with send=False
to return a tuple of (SMB2ReadRequest, receive_func) instead of
sending the the request and waiting for the response. The receive_func
can be used to get the response from the server... |
def download_url(url, destination):
"""
Download an external URL to the destination
"""
from settings import VALID_IMAGE_EXTENSIONS
base_name, ext = os.path.splitext(url)
ext = ext.lstrip('.')
if ext not in VALID_IMAGE_EXTENSIONS:
raise Exception("Invalid image extension")
base... | Download an external URL to the destination |
def qtaax(mt, x, t, q, m=1):
""" geometrica """
q = float(q)
j = (mt.i - q) / (1 + q)
mtj = Actuarial(nt=mt.nt, i=j)
return taax(mtj, x, t) - ((float(m - 1) / float(m * 2)) * (1 - nEx(mt, x, t))) | geometrica |
def property(func):
"""Wrap a function as a property.
This differs from attribute by identifying properties explicitly listed
in the class definition rather than named attributes defined on instances
of a class at init time.
"""
attr = abc.abstractmethod(func)
attr.__iproperty__ = True
... | Wrap a function as a property.
This differs from attribute by identifying properties explicitly listed
in the class definition rather than named attributes defined on instances
of a class at init time. |
def alias_tags(tags_list, alias_map):
"""
update tags to new values
Args:
tags_list (list):
alias_map (list): list of 2-tuples with regex, value
Returns:
list: updated tags
CommandLine:
python -m utool.util_tags alias_tags --show
Example:
>>> # DISABLE... | update tags to new values
Args:
tags_list (list):
alias_map (list): list of 2-tuples with regex, value
Returns:
list: updated tags
CommandLine:
python -m utool.util_tags alias_tags --show
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_tags import *... |
def read_git_branch():
"""Obtain the current branch name from the Git repository. If on Travis CI,
use the ``TRAVIS_BRANCH`` environment variable.
"""
if os.getenv('TRAVIS'):
return os.getenv('TRAVIS_BRANCH')
else:
try:
repo = git.repo.base.Repo(search_parent_directories=... | Obtain the current branch name from the Git repository. If on Travis CI,
use the ``TRAVIS_BRANCH`` environment variable. |
def load_config():
'''try loading config file from a default directory'''
cfg_path = '/usr/local/etc/freelan'
cfg_file = 'freelan.cfg'
if not os.path.isdir(cfg_path):
print("Can not find default freelan config directory.")
return
cfg_file_path = os.path.join(cfg_path,cfg_file)
... | try loading config file from a default directory |
def start_of_chunk(prev_tag, tag, prev_type, type_):
"""Checks if a chunk started between the previous and current word.
Args:
prev_tag: previous chunk tag.
tag: current chunk tag.
prev_type: previous type.
type_: current type.
Returns:
chunk_start: boolean.
"""... | Checks if a chunk started between the previous and current word.
Args:
prev_tag: previous chunk tag.
tag: current chunk tag.
prev_type: previous type.
type_: current type.
Returns:
chunk_start: boolean. |
def get_float(prompt=None):
"""
Read a line of text from standard input and return the equivalent float
as precisely as possible; if text does not represent a double, user is
prompted to retry. If line can't be read, return None.
"""
while True:
s = get_string(prompt)
if s is Non... | Read a line of text from standard input and return the equivalent float
as precisely as possible; if text does not represent a double, user is
prompted to retry. If line can't be read, return None. |
def reset(self):
"""
Indicates the start of a new sequence. Clears any predictions and makes sure
synapses don't grow to the currently active cells in the next time step.
"""
self.activeCells = []
self.winnerCells = []
self.activeSegments = []
self.matchingSegments = [] | Indicates the start of a new sequence. Clears any predictions and makes sure
synapses don't grow to the currently active cells in the next time step. |
def MessageSetItemSizer(field_number):
"""Returns a sizer for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
}
"""
static_size = (_TagSize(1) * 2 + _... | Returns a sizer for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
} |
def _maybe_cast_to_float64(da):
"""Cast DataArrays to np.float64 if they are of type np.float32.
Parameters
----------
da : xr.DataArray
Input DataArray
Returns
-------
DataArray
"""
if da.dtype == np.float32:
logging.warning('Datapoints were stored using the np.flo... | Cast DataArrays to np.float64 if they are of type np.float32.
Parameters
----------
da : xr.DataArray
Input DataArray
Returns
-------
DataArray |
def save_profile(self, **params):
"""Save the given parameters into profile settings.
Parameters
----------
params : dict
Keywords and values to be saved.
"""
image = self.get_image()
if (image is None):
return
profile = image.ge... | Save the given parameters into profile settings.
Parameters
----------
params : dict
Keywords and values to be saved. |
def _count_elements(mapping, iterable):
'Tally elements from the iterable.'
mapping_get = mapping.get
for elem in iterable:
mapping[elem] = mapping_get(elem, 0) + 1 | Tally elements from the iterable. |
def get_acl(self, key_name='', headers=None, version_id=None):
"""returns a bucket's acl. We include a version_id argument
to support a polymorphic interface for callers, however,
version_id is not relevant for Google Cloud Storage buckets
and is therefore ignored here."""
... | returns a bucket's acl. We include a version_id argument
to support a polymorphic interface for callers, however,
version_id is not relevant for Google Cloud Storage buckets
and is therefore ignored here. |
def waitget(self,key, maxwaittime = 60 ):
""" Wait (poll) for a key to get a value
Will wait for `maxwaittime` seconds before raising a KeyError.
The call exits normally if the `key` field in db gets a value
within the timeout period.
Use this for synchronizing different proces... | Wait (poll) for a key to get a value
Will wait for `maxwaittime` seconds before raising a KeyError.
The call exits normally if the `key` field in db gets a value
within the timeout period.
Use this for synchronizing different processes or for ensuring
that an unfortunately time... |
def from_xdr_object(cls, asset_xdr_object):
"""Create a :class:`Asset` from an XDR Asset object.
:param asset_xdr_object: The XDR Asset object.
:return: A new :class:`Asset` object from the given XDR Asset object.
"""
if asset_xdr_object.type == Xdr.const.ASSET_TYPE_NATIVE:
... | Create a :class:`Asset` from an XDR Asset object.
:param asset_xdr_object: The XDR Asset object.
:return: A new :class:`Asset` object from the given XDR Asset object. |
def _create_namespace(namespace, apiserver_url):
''' create namespace on the defined k8s cluster '''
# Prepare URL
url = "{0}/api/v1/namespaces".format(apiserver_url)
# Prepare data
data = {
"kind": "Namespace",
"apiVersion": "v1",
"metadata": {
"name": namespace,... | create namespace on the defined k8s cluster |
def pretty_unicode(string):
"""
Make sure string is unicode, try to decode with utf8, or unicode escaped string if failed.
"""
if isinstance(string, six.text_type):
return string
try:
return string.decode("utf8")
except UnicodeDecodeError:
return string.decode('Latin-1').... | Make sure string is unicode, try to decode with utf8, or unicode escaped string if failed. |
def _num_values(self, zVar, varNum):
'''
Determines the number of values in a record.
Set zVar=True if this is a zvariable.
'''
values = 1
if (zVar == True):
numDims = self.zvarsinfo[varNum][2]
dimSizes = self.zvarsinfo[varNum][3]
dimVa... | Determines the number of values in a record.
Set zVar=True if this is a zvariable. |
def eval_basis(self, x, regularize=True):
"""
basis_mat = C.eval_basis(x)
Evaluates self's basis functions on x and returns them stacked
in a matrix. basis_mat[i,j] gives basis function i evaluated at
x[j,:].
"""
if regularize:
x = regularize_array(x)... | basis_mat = C.eval_basis(x)
Evaluates self's basis functions on x and returns them stacked
in a matrix. basis_mat[i,j] gives basis function i evaluated at
x[j,:]. |
def setup_package():
"""Package setup"""
setup(
name='pyviews',
version=_get_version(),
description='Base package for xml views',
long_description=_get_long_description(),
long_description_content_type='text/markdown',
url='https://github.com/eumis/pyviews',
... | Package setup |
def sr1(x, promisc=None, filter=None, iface=None, nofilter=0, *args, **kargs):
"""Send packets at layer 3 and return only the first answer"""
s = conf.L3socket(promisc=promisc, filter=filter,
nofilter=nofilter, iface=iface)
ans, _ = sndrcv(s, x, *args, **kargs)
s.close()
if len... | Send packets at layer 3 and return only the first answer |
def apply(self, config, raise_on_unknown_key=True):
# type: (Dict[str, Any], bool) -> None
"""Apply additional configuration from a dictionary
This will look for dictionary items that exist in the base_config any apply their values on the current
configuration object
"""
... | Apply additional configuration from a dictionary
This will look for dictionary items that exist in the base_config any apply their values on the current
configuration object |
def edgepaths(self):
"""
Returns the fixed EdgePaths or computes direct connections
between supplied nodes.
"""
edgepaths = super(TriMesh, self).edgepaths
edgepaths.crs = self.crs
return edgepaths | Returns the fixed EdgePaths or computes direct connections
between supplied nodes. |
def make_repr(*args, **kwargs):
"""Returns __repr__ method which returns ASCII
representaion of the object with given fields.
Without arguments, ``make_repr`` generates a method
which outputs all object's non-protected (non-undercored)
arguments which are not callables.
Accepts ``*args``, whic... | Returns __repr__ method which returns ASCII
representaion of the object with given fields.
Without arguments, ``make_repr`` generates a method
which outputs all object's non-protected (non-undercored)
arguments which are not callables.
Accepts ``*args``, which should be a names of object's
att... |
def _filter_valid_arguments(self, arguments, binary="mongod",
config=False):
"""
Return a list of accepted arguments.
Check which arguments in list are accepted by the specified binary
(mongod, mongos). If an argument does not start with '-' but its
... | Return a list of accepted arguments.
Check which arguments in list are accepted by the specified binary
(mongod, mongos). If an argument does not start with '-' but its
preceding argument was accepted, then it is accepted as well. Example
['--slowms', '1000'] both arguments would be acc... |
def _notify_new_tick_event(self, tick):
"""tick推送"""
if tick.time is '':
return
event = Event(type_=EVENT_TINY_TICK)
event.dict_['data'] = tick
self._event_engine.put(event) | tick推送 |
def _flush_data(self):
"""
If this relation's local unit data has been modified, publish it on the
relation. This should be automatically called.
"""
if self._data and self._data.modified:
hookenv.relation_set(self.relation_id, dict(self.to_publish.data)) | If this relation's local unit data has been modified, publish it on the
relation. This should be automatically called. |
def accept(self):
'''
Accepts all children fields, collects resulting values into dict and
passes that dict to converter.
Returns result of converter as separate value in parent `python_data`
'''
result = dict(self.python_data)
for field in self.fields:
... | Accepts all children fields, collects resulting values into dict and
passes that dict to converter.
Returns result of converter as separate value in parent `python_data` |
def edit_wiki_page(self, subreddit, page, content, reason=''):
"""Create or edit a wiki page with title `page` for `subreddit`.
:returns: The json response from the server.
"""
data = {'content': content,
'page': page,
'r': six.text_type(subreddit),
... | Create or edit a wiki page with title `page` for `subreddit`.
:returns: The json response from the server. |
def get_span_column_count(span):
"""
Find the length of a colspan.
Parameters
----------
span : list of lists of int
The [row, column] pairs that make up the span
Returns
-------
columns : int
The number of columns included in the span
Example
-------
Consi... | Find the length of a colspan.
Parameters
----------
span : list of lists of int
The [row, column] pairs that make up the span
Returns
-------
columns : int
The number of columns included in the span
Example
-------
Consider this table::
+------+-----------... |
def verify(expr, params=None):
"""
Determine if expression can be successfully translated to execute on
MapD
"""
try:
compile(expr, params=params)
return True
except com.TranslationError:
return False | Determine if expression can be successfully translated to execute on
MapD |
def flavor_list_paged(request, is_public=True, get_extras=False, marker=None,
paginate=False, sort_key="name", sort_dir="desc",
reversed_order=False):
"""Get the list of available instance sizes (flavors)."""
has_more_data = False
has_prev_data = False
if pag... | Get the list of available instance sizes (flavors). |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.