content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def get_functional_groups(alkoxy_mol):
"""
given a molecule object `alkoxy_mol`. This method returns
a dictionary of groups used in the Vereecken SAR with the
key being the group and the value being the number of occurances
it has.
"""
#print 'getting groups from {}'.format(alkoxy_mol.toSMIL... | 9c0280bb09e6ef606aac2a14fe2826c0a9feb06d | 3,252 |
def rough(material, coverage, scale, det, e0=20.0, withPoisson=True, nTraj=defaultNumTraj, dose=defaultDose, sf=True, bf=True, xtraParams=defaultXtraParams):
"""rough(material, coverage, scale, det, [e0=20.0], [withPoisson=True], [nTraj=defaultNumTraj], [dose = 120.0], [sf=True], [bf=True], [xtraParams={}])
Mon... | 0aa6a21a2cdae22bf9f56cd6babfa9c3402ce465 | 3,253 |
def jsonify(comment_lower: str) -> str:
"""pyNastran: SPOINT={'id':10, 'xyz':[10.,10.,10.]}"""
sline = comment_lower.split('=')
rhs = sline[1].rstrip()
return rhs.replace("'", '"').replace('}', ',}').replace(',,}', ',}') | e8641d5e94cff32389f7ade3360935a2abbcf297 | 3,254 |
from catalyst.engines.torch import (
DataParallelEngine,
DeviceEngine,
DistributedDataParallelEngine,
)
from catalyst.engines.amp import (
AMPEngine,
DataParallelAMPEngine,
DistributedDataParallelAMPEngine,
)
from catalyst.engines.apex import (... | 6d29e0c1938c5889b6e4a7fa972945065bc2cf3a | 3,256 |
import shutil
def disk_usage(pathname):
"""Return disk usage statistics for the given path"""
### Return tuple with the attributes total,used,free in bytes.
### usage(total=118013599744, used=63686647808, free=48352747520)
return shutil.disk_usage(pathname) | c7a36e2f3200e26a67c38d50f0a97dd015f7ccfa | 3,257 |
from typing import Tuple
def create_new_deployment(
runner: Runner, deployment_arg: str, expose: PortMapping,
add_custom_nameserver: bool
) -> Tuple[str, str]:
"""
Create a new Deployment, return its name and Kubernetes label.
"""
span = runner.span()
run_id = runner.session_id
runner.... | d15e9e1ec9d09669b8becd4e169049d5a1e836ab | 3,259 |
import logging
def score_latency(
references, reference_wavs, partial_translations, target_language="en-US"
):
"""Measures the "final" translation lag after all corrections have been made."""
logger = logging.getLogger("evaluation")
tokenizer = get_tokenizer(target_language)
min_len = min(len(par... | 9d31e029247e44448103d99760019f0dffa1cf44 | 3,260 |
def shapelet_with_w_term(
coords, frequency, coeffs, beta, delta_lm, lm, dtype=np.complex128
):
"""
shapelet: outputs visibilities corresponding to that of a shapelet
Inputs:
coords: coordinates in (u,v) space with shape (nrow, 3)
frequency: frequency values with shape (nchan,)
c... | f6c9f9011306cc2de5054e015857b3b47c7e6cd9 | 3,261 |
from typing import Counter
def _entropy_counter2(arr):
"""
calculate the base 2 entropy of the distribution given in `arr` using a
`Counter` and the `values` method (for python3)
"""
arr_len = len(arr)
if arr_len == 0:
return 0
log_arr_len = np.log2(len(arr))
return -sum(val * ... | 1f72c7a7e5db56aa9a0e5c3811cf28c600420949 | 3,263 |
def get_changes_between_models(model1, model2, excludes=None):
"""
Return a dict of differences between two model instances
"""
if excludes is None:
excludes = []
changes = {}
for field in model1._meta.fields:
if (isinstance(field, (fields.AutoField,
... | 1f62afdc7818574553fa7a53eb05e766c2805edd | 3,265 |
def get_intersect(x1, y1, x2, y2):
"""
Returns the point of intersection of the lines or None if lines are parallel
Ex. p1=(x1,x2)... line_intersection((p1,p2), (p3,p4))
a1: [x, y] a point on the first line
a2: [x, y] another point on the first line
b1: [x, y] a point on the second line
b2: ... | 8e9ed2f2351b41658400badc7339eedc9791db8a | 3,266 |
def removeDuplicateColumns(df):
"""
Removes columns that have a duplicate name.
:return pd.DataFrame:
"""
duplicates = getDuplicates(df.columns)
done = False
idx = 0
df_result = df.copy()
additions_dict = {}
while not done:
if idx >= len(df_result.columns):
done = True
break
colu... | dc46580d221b8e4279ba73e8d97eee079e65309c | 3,267 |
def conv_block(data, name, channels,
kernel_size=(3, 3), strides=(1, 1), padding=(1, 1),
epsilon=1e-5):
"""Helper function to construct conv-bn-relu"""
# convolution + bn + relu
conv = sym.conv2d(data=data, channels=channels,
kernel_size=kernel_size, strid... | 90464c208c12a6e9907f5a206ddd324fd92638ff | 3,268 |
import pickle
import torchvision
import torch
def utzappos_tensor_dset(img_size, observed, binarized, drop_infreq,
cache_fn, *dset_args, transform=None, **dset_kwargs):
"""
Convert folder dataset to tensor dataset.
"""
cache_fn = UTZapposIDImageFolder.get_cache_name(cache_fn, ... | 8008f8d19453884106832746a4cefb55c9813c45 | 3,270 |
def compare_versions(aStr, bStr):
"""
Assumes Debian version format:
[epoch:]upstream_version[-debian_revision]
Returns:
-1 : a < b
0 : a == b
1 : a > b
"""
# Compare using the version class
return cmp(Version(aStr), Version(bStr)) | a17e333cc555b1b260cf826a5e4c29b0e291c479 | 3,271 |
import numbers
def unscale_parameter(value: numbers.Number,
petab_scale: str) -> numbers.Number:
"""Bring parameter from scale to linear scale.
:param value:
Value to scale
:param petab_scale:
Target scale of ``value``
:return:
``value`` on linear scale
... | f04156220e8a39c31473507a60fee3d5185bda0c | 3,273 |
def perturb(sentence, bertmodel, num):
"""Generate a list of similar sentences by BERT
Arguments:
sentence: Sentence which needs to be perturbed
bertModel: MLM model being used (BERT here)
num: Number of perturbations required for a word in a sentence
"""
# Tokenize the sentence
tokens = tokenizer.tokenize(s... | 598ed7e37185de6bf2a977c226bb58677684772d | 3,274 |
import logging
def discovery_dispatch(task: TaskRequest) -> TaskResponse:
"""Runs appropriate discovery function based on protocol
Args:
task (TaskRequest): namedtuple
Returns:
TaskResponse[str, dict[str, str|int|bool|list]]
"""
task = TaskRequest(*task)
proto = constant.Proto... | 3fe6394cf81fdb3e25343df27479f4b4ab3033fa | 3,275 |
def get_free_times(busy_times, begin_date, end_date):
"""
Gets a list of free times calculated from a list of busy times.
:param busy_times: is the list of busy times in ascending order.
:param begin_date: is the start of the selected time interval.
:param end_date: is the end of the selected time i... | 95f33c22e28e9ed7bc299ac966767a2292cf6d7b | 3,276 |
from datetime import datetime
import pytz
def upstream_has_data(valid):
"""Does data exist upstream to even attempt a download"""
utcnow = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
# NCEP should have at least 24 hours of data
return (utcnow - datetime.timedelta(hours=24)) < valid | e222ca16820f2e9030170877f8e2ae4faff8d5b7 | 3,277 |
def encode_array(x, base=2, **kwds):
"""Encode array of integer-symbols.
Parameters
----------
x : (N, k) array_like
Array of integer symbols.
base : int
Encoding base.
**kwds :
Keyword arguments passed to :py:func:`numpy.ravel`.
Returns
-------
int
... | b16546350638967dd60812b98295ffc4c95abd4d | 3,278 |
import itertools
def str_for_model(model: Model, formatting: str = "plain", include_params: bool = True) -> str:
"""Make a human-readable string representation of Model, listing all random variables
and their distributions, optionally including parameter values."""
all_rv = itertools.chain(model.unobserve... | 89711e4fd12572339a501698c39fc8b81deca8a3 | 3,279 |
from typing import Callable
from typing import Optional
from typing import Union
def get_device(
raw_data: dict, control_data: dict, request: Callable
) -> Optional[
Union[
HomeSeerDimmableDevice,
HomeSeerFanDevice,
HomeSeerLockableDevice,
HomeSeerStatusDevice,
HomeSeer... | 616c16e749fef7dc45539a7eb8bdbc9f11d3edd1 | 3,280 |
def wait():
"""
Gets the New Block work unit to send to clients
"""
return _event.get() | d24047d92b3774c675369ee739ec697ab23f0fea | 3,281 |
from typing import Optional
def process(msd_id: str, counter: AtomicCounter) -> Optional[dict]:
"""
Processes the given MSD id and increments the counter. The
method will find and return the artist.
:param msd_id: the MSD id to process
:param counter: the counter to increment
:return: the dictionary cont... | 6bd93bf72a7ecfa6ddb41557b0550a629b9612f4 | 3,282 |
def choose_run(D, var2align, run):
"""Get input for the alignment.
Do it by indicating a run to align to.
Args:
D (pd.DataFrame): DataFrame containing columns 'id', 'run', and ...
var2align (str): Name of the column to align.
run (whatever): The run to align to.
Returns:
... | 54fc84e61b3874219d473659c85bd369b367a05d | 3,283 |
def stack(tensor_list, axis=0):
"""
This function is the same as torch.stack but handles both
numpy.ndarray and torch.Tensor
:param tensor_list:
:param axis:
:return:
"""
if isinstance(tensor_list[0], th.Tensor):
return th.stack(tensor_list, axis)
else:
return np.stac... | 9d8e5d8fbd9f89acb40ada362d0ae8d4913df939 | 3,285 |
def alias(alias):
"""Select a single alias."""
return {'alias': alias} | 35364346da4d7b1f6de2d7ba6e0b5721b6bef1dd | 3,286 |
def model_creator(config):
"""Constructor function for the model(s) to be optimized.
You will also need to provide a custom training
function to specify the optimization procedure for multiple models.
Args:
config (dict): Configuration dictionary passed into ``PyTorchTrainer``.
Returns:
... | 81909a284bddd83a62c8c9adacfbe75cf46650bd | 3,287 |
import numbers
def ensure_r_vector(x):
"""Ensures that the input is rendered as a vector in R.
It is way more complicated to define an array in R than in Python because an array
in R cannot end with an comma.
Examples
--------
>>> ensure_r_vector("string")
"c('string')"
>>> ensure_r_... | 14fdeb6bf73244c69d9a6ef89ba93b33aa4a66d8 | 3,288 |
from typing import Optional
def open_and_prepare_avatar(image_bytes: Optional[bytes]) -> Optional[Image.Image]:
"""Opens the image as bytes if they exist, otherwise opens the 404 error image. then circular crops and resizes it"""
if image_bytes is not None:
try:
with Image.open(BytesIO(ima... | f5b4543f64b15180deed3cb8e672a3e1b96956f7 | 3,289 |
def is_GammaH(x):
"""
Return True if x is a congruence subgroup of type GammaH.
EXAMPLES::
sage: from sage.modular.arithgroup.all import is_GammaH
sage: is_GammaH(GammaH(13, [2]))
True
sage: is_GammaH(Gamma0(6))
True
sage: is_GammaH(Gamma1(6))
True
... | 9cfba55901a45d4482b6926673bfb87fabc88030 | 3,290 |
def _run_with_interpreter_if_needed(fuzzer_path, args, max_time):
"""Execute the fuzzer script with an interpreter, or invoke it directly."""
interpreter = shell.get_interpreter(fuzzer_path)
if interpreter:
executable = interpreter
args.insert(0, fuzzer_path)
else:
executable = fuzzer_path
runner... | 3739db213571ed00c5e026f9a768ca610e0ac318 | 3,291 |
def cost_logistic(p, x, y):
"""
Sum of absolute deviations of obs and logistic function L/(1+exp(-k(x-x0)))
Parameters
----------
p : iterable of floats
parameters (`len(p)=3`)
`p[0]` L - Maximum of logistic function
`p[1]` k - Steepness of logistic function
... | 32b89ef7d33d49b7af63c8d11afffeb641b12de1 | 3,293 |
from datetime import datetime
def estimate_dt(time_array):
"""Automatically estimate timestep in a time_array
Args:
time_array ([list]): List or dataframe with time entries
Returns:
dt ([datetime.timedelta]): Timestep in dt.timedelta format
"""
if len(time_array) < 2:
# ... | 6e6b8dcd4d2d85b4bfb97137294774bb4bcc2673 | 3,294 |
import uu
def gen_uuid() -> str:
"""
获取uuid
:return: uuid
"""
return uu.uuid4().hex | 82fd4fa7a3e39cc0c91ab16be3cf0c6a3f63eb3d | 3,295 |
import inspect
def make_signature(arg_names, member=False):
"""Make Signature object from argument name iterable or str."""
kind = inspect.Parameter.POSITIONAL_OR_KEYWORD
if isinstance(arg_names, str):
arg_names = map(str.strip, arg_name_list.split(','))
if member and arg_names and arg_na... | 2730e50ea68e6fe2942c629caa3b3119aea9a325 | 3,296 |
def set_trace_platform(*args):
"""
set_trace_platform(platform)
Set platform name of current trace.
@param platform (C++: const char *)
"""
return _ida_dbg.set_trace_platform(*args) | 9f581018960cdd0949ca41750286eddf1fa43741 | 3,297 |
def leapfrog2(init, tspan, a, beta, omega, h):
"""
Integrate the damped oscillator with damping factor a using single step
Leapfrog for separable Hamiltonians.
"""
f = forcing(beta, omega)
return sym.leapfrog(init, tspan, h, lambda x, p, t: -x-a*p+f(t)) | a8eebe1ee7f50c87e515c2c5cca0bdc30605dc8f | 3,298 |
def get_paths(config, action, dir_name):
"""
Returns 'from' and 'to' paths.
@param config: wrapsync configuration
@param action: 'push'/'pull'
@param dir_name: name of the directory to append to paths from the config
@return: dictionary containing 'from' and 'to' paths
"""
path_from = ''... | f03ee64a76bafcf832f8dddcdcb4f16c28529c5c | 3,299 |
def to_dense(arr):
"""
Convert a sparse array to a dense numpy array. If the
array is already a numpy array, just return it. If the
array passed in is a list, then we recursively apply this
method to its elements.
Parameters
-----------
arr : :obj:`numpy.ndarray`, :obj:`scipy.sparse... | 1fa2ccdd184aa4155cfd121310d67e9e73ffff17 | 3,300 |
def output_results(results, way):
"""Helper method with most of the logic"""
tails = way(results)
heads = len(results) - tails
result = ", ".join([["Heads", "Tails"][flip] for flip in results])
return result + f"\n{heads} Heads; {tails} Tails" | f60716004b11e115fe69a14b70957b5b66080dbc | 3,301 |
def guess_init(model, focal_length, j2d, init_pose):
"""Initialize the camera translation via triangle similarity, by using the torso
joints .
:param model: SMPL model
:param focal_length: camera focal length (kept fixed)
:param j2d: 14x2 array of CNN joints
:param init_pose: 72D vector o... | ce1ca89bc60500cc59441c97cf9d71ef3d9b528b | 3,302 |
def TCnCom_Dump(*args):
"""
Dump(TCnComV const & CnComV, TStr Desc=TStr())
Parameters:
CnComV: TCnComV const &
Desc: TStr const &
TCnCom_Dump(TCnComV const & CnComV)
Parameters:
CnComV: TCnComV const &
"""
return _snap.TCnCom_Dump(*args) | c2ce258a12074106e4c93e938dfa988b1bc29015 | 3,303 |
import functools
def get_reparametrize_functions(
params, constraints, scaling_factor=None, scaling_offset=None
):
"""Construct functions to map between internal and external parameters.
All required information is partialed into the functions.
Args:
params (pandas.DataFrame): See :ref:`para... | a0d8f283bf44f66fb098c499a6b610174078b980 | 3,305 |
def gaussNewton(P, model, target, targetLandmarks, sourceLandmarkInds, NN, jacobi = True, calcId = True):
"""
Energy function to be minimized for fitting.
"""
# Shape eigenvector coefficients
idCoef = P[: model.idEval.size]
expCoef = P[model.idEval.size: model.idEval.size + model.expEval.size]
... | 2b54080bf9f76a8a16e26c10f6209f55bcb0c57f | 3,306 |
from re import T
def arange(start, stop=None, step=1, dtype='int32'):
"""Creates a 1-D tensor containing a sequence of integers.
The function arguments use the same convention as
Theano's arange: if only one argument is provided,
it is in fact the "stop" argument.
"""
return T.arange(start, st... | 72f505d7f1928d4e35a7e183a30bdc8cddf2edd7 | 3,307 |
def create_attachable_access_entity_profile(infra, entity_profile, **args):
"""Create an attached entity profile. This provides a template to deploy hypervisor policies on a large set of leaf ports. This also provides the association of a Virtual Machine Management (VMM) domain and the physical network infrastructu... | 96c711b8c5de52ca44483edcb478a829986e901a | 3,308 |
from typing import Tuple
from typing import List
import csv
def tensor_projection_reader(
embedding_file_path: str,
label_file_path: str
) -> Tuple[np.ndarray, List[List[str]]]:
"""
Reads the embedding and labels stored at the given paths and returns an np.ndarray and list of labels
:para... | 7e8cc804181ead221a283b4d8aa95a9e9b7d00ef | 3,309 |
def xml_to_dict(xmlobj, saveroot=True):
"""Parse the xml into a dictionary of attributes.
Args:
xmlobj: An ElementTree element or an xml string.
saveroot: Keep the xml element names (ugly format)
Returns:
An ElementDict object or ElementList for multiple objects
"""
if isins... | 85428aaefc1f48881891ddd910daef1cc4f1547e | 3,310 |
import torch
import numpy
def conve_interaction(
h: torch.FloatTensor,
r: torch.FloatTensor,
t: torch.FloatTensor,
t_bias: torch.FloatTensor,
input_channels: int,
embedding_height: int,
embedding_width: int,
hr2d: nn.Module,
hr1d: nn.Module,
) -> torch.FloatTensor:
"""Evaluate ... | fadf03905ed5c822df0fe099cb439f481073d202 | 3,311 |
def index():
"""Show Homepage"""
return render_template("index.html") | f05985d10a9699783f6f3c4c4f88c8be48a0a7a9 | 3,312 |
def with_input_dtype(policy, dtype):
"""Copies "infer" `policy`, adding `dtype` to it.
Policy must be "infer" or "infer_float32_vars" (i.e., has no compute dtype).
Returns a new policy with compute dtype `dtype`. The returned policy's
variable dtype is also `dtype` if `policy` is "infer", and is `float32` if
... | 32815d4499b57ed8623a55414ef7b6115c450726 | 3,314 |
import io
import warnings
def decode_object_based(effects):
"""
Reads and decodes info about object-based layer effects.
"""
fp = io.BytesIO(effects)
version, descriptor_version = read_fmt("II", fp)
try:
descriptor = decode_descriptor(None, fp)
except UnknownOSType as e:
w... | 6471f6f9987b1817f223fe02a5ba5923ddf8c0c8 | 3,315 |
def example_add(x: int, y: int):
"""
...
"""
return x + y | 88e835e872e2ef4eb54f721e3d556ee7f8db1bbc | 3,316 |
from typing import Optional
def inverse(text: str, reset_style: Optional[bool] = True) -> str:
"""Returns text inverse-colored.
Args:
reset_style: Boolean that determines whether a reset character should
be appended to the end of the string.
"""
return set_mode("inverse", False) ... | 4d8aceada756386348b68c13dabe4948b15986c3 | 3,317 |
def make():
""" hook function for entrypoints
:return:
"""
return LocalFileSystem | 7e48c7c4a9225f4bd3d7d430b6221005e2787e55 | 3,318 |
def configure():
"""read configuration from command line options and config file values"""
opts = parse_options()
defaults = dict(v.split('=') for v in opts.S or [])
with open(opts.config_file) as config:
targets = read_config(config, defaults, opts.ignore_colon)
if opts.T:
return {o... | 09c85e8fce3947ee54c1524545e14fe25a4d054e | 3,319 |
def proper_loadmat(file_path):
"""Loads using scipy.io.loadmat, and cleans some of the metadata"""
data = loadmat(file_path)
clean_data = {}
for key, value in data.items():
if not key.startswith("__"):
clean_data[key] = value.squeeze().tolist()
return clean_data | d7cbc547ab47235db2df80fdf2ca9decd3a4c42d | 3,320 |
from typing import List
def _get_time_total(responses: List[DsResponse]) -> List[str]:
"""Get formated total time metrics."""
metric_settings = {
"name": "time_total",
"type": "untyped",
"help": "Returns the total time in seconds (time taken to request, render and download).",
... | 641bff0a75d1f61afa7ad1d9e9058faee58c18b8 | 3,321 |
async def list_sessions(
cache: Redis = Depends(depends_redis),
) -> ListSessionsResponse:
"""Get all session keys"""
keylist = []
for key in await cache.keys(pattern=f"{IDPREFIX}*"):
if not isinstance(key, bytes):
raise TypeError(
"Found a key that is not stored as b... | 7fce8610a5c53317636da7e5408a582c10faff3c | 3,322 |
def square(x, out=None, where=True, **kwargs):
"""
Return the element-wise square of the input.
Args:
x (numpoly.ndpoly):
Input data.
out (Optional[numpy.ndarray]):
A location into which the result is stored. If provided, it must
have a shape that the inp... | a59297f913433ec870a9eb7d8be5eea21a78cc41 | 3,323 |
def evaluate_tuple(columns,mapper,condition):
"""
"""
if isinstance(condition, tuple):
return condition[0](columns,mapper,condition[1],condition[2])
else:
return condition(columns,mapper) | 5200da50900329431db4ce657e79135534b8469e | 3,324 |
import scipy
def imread(path, is_grayscale=True):
"""
Read image using its path.
Default value is gray-scale, and image is read by YCbCr format as the paper said.
"""
if is_grayscale:
return scipy.misc.imread(path, flatten=True, mode='YCbCr').astype(np.float)
else:
return scipy... | b32e918583c7d4a3bc3e38994bc4aef7dfdc5206 | 3,325 |
def get_priority_text(priority):
"""
Returns operation priority name by numeric value.
:param int priority: Priority numeric value.
:return: Operation priority name.
:rtype: str | None
"""
if priority == NSOperationQueuePriorityVeryLow:
return "VeryLow"
elif priority == NSOperat... | 02986079f164672d58d7d5476e82463e1343ba9d | 3,326 |
import posixpath
def get_experiment_tag_for_image(image_specs, tag_by_experiment=True):
"""Returns the registry with the experiment tag for given image."""
tag = posixpath.join(experiment_utils.get_base_docker_tag(),
image_specs['tag'])
if tag_by_experiment:
tag += ':' + e... | f45898d1f9adb74ca1133be05ab60da5de9df9e6 | 3,327 |
def call_pager():
"""
Convenient wrapper to call Pager class
"""
return _Pager() | 00cba0c47fc18417ab82ff41ae956961dcff9db4 | 3,328 |
def sign_in(request, party_id, party_guest_id):
"""
Sign guest into party.
"""
if request.method != "POST":
return HttpResponse("Endpoint supports POST method only.", status=405)
try:
party = Party.objects.get(pk=party_id)
party_guest = PartyGuest.objects.get(pk=party_guest_... | 2672344a92fb0d029946bf30d1a0a89d33a24a0f | 3,329 |
from datetime import datetime
def mcoolqc_status(connection, **kwargs):
"""Searches for annotated bam files that do not have a qc object
Keyword arguments:
lab_title -- limit search with a lab i.e. Bing+Ren, UCSD
start_date -- limit search to files generated since a date formatted YYYY-MM-DD
run_t... | 44273aa0f7441775258e0b390059cfe9778747e2 | 3,330 |
def isValidListOrRulename(word: str) -> bool:
"""test if there are no accented characters in a listname or rulename
so asciiletters, digitis, - and _ are allowed
"""
return bool(reValidName.match(word)) | ec826f31604f8dd43ba044e1f6ffbaaf758bdb88 | 3,331 |
def glyph_has_ink(font: TTFont, name: Text) -> bool:
"""Checks if specified glyph has any ink.
That is, that it has at least one defined contour associated.
Composites are considered to have ink if any of their components have ink.
Args:
font: the font
glyph_name: The name of the ... | 6450e2ec2ed7158f901c7e50999245042d880dce | 3,332 |
async def async_setup_entry(hass, entry, async_add_entities):
"""
Set up n3rgy data sensor
:param hass: hass object
:param entry: config entry
:return: none
"""
# in-line function
async def async_update_data():
"""
Fetch data from n3rgy API
This is the place to pr... | e2dd956428eb377c56d104e49889760f6ba9b653 | 3,333 |
def main():
""" Process command line arguments and run the script """
bp = BrPredMetric()
result = bp.Run()
return result | b61a80ee805dfc2d6e146b24ae0564bb5cda6e83 | 3,334 |
def step(init_distr,D):
"""
"""
for k in init_distr.keys():
init_distr[k] = D[init_distr[k]]()
return init_distr | 6270dd2818d2148e7d979d249fbb2a3a596dc2de | 3,335 |
def from_json(data: JsonDict) -> AttributeType:
"""Make an attribute type from JSON data (deserialize)
Args:
data: JSON data from Tamr server
"""
base_type = data.get("baseType")
if base_type is None:
logger.error(f"JSON data: {repr(data)}")
raise ValueError("Missing require... | eba662ed1c1c3f32a5b65908fae68d7dd41f89e3 | 3,336 |
def ftduino_find_by_name(name):
"""
Returns the path of the ftDuino with the specified `name`.
:param name: Name of the ftDuino.
:return: The path of the ftDuino or ``None`` if the ftDuino was not found.
"""
for path, device_name in ftduino_iter():
if device_name == name:
re... | 8a03d0b84dc9180fb2885d46fc8f1755cd2c6eed | 3,337 |
import numbers
def spectral_entropy (Sxx, fn, flim=None, display=False) :
"""
Compute different entropies based on the average spectrum, its variance,
and its maxima [1]_ [2]_
Parameters
----------
Sxx : ndarray of floats
Spectrogram (2d).
It is recommended to work w... | 533b388781e158b558ee38645271194adb414729 | 3,338 |
def _percentages(self):
"""
An extension method for Counter that
returns a dict mapping the keys of the Counter to their percentages.
:param self: Counter
:return: a dict mapping the keys of the Counter to their percentages
"""
# type: () -> dict[any, float]
length = float(sum(count for ... | 752781a9697113ebf3297050649a7f4ba1580b97 | 3,339 |
def find_best_word_n(draw, nb_letters, path):
"""
"""
lexicon = get_lexicon(path, nb_letters)
mask = [is_word_in_draw(draw, word) for word in lexicon["draw"]]
lexicon = lexicon.loc[mask]
return lexicon | ff3e06e69e6c56f59cf278c10e6860c6d0529b87 | 3,340 |
import json
def feature_reader(path):
"""
Reading the feature matrix stored as JSON from the disk.
:param path: Path to the JSON file.
:return out_features: Dict with index and value tensor.
"""
features = json.load(open(path))
features = {int(k): [int(val) for val in v] for k, v in featur... | 959e37ae5a3b0b482d67e5e917211e2131b3c643 | 3,341 |
def locate_all_occurrence(l, e):
"""
Return indices of all element occurrences in given list
:param l: given list
:type l: list
:param e: element to locate
:return: indices of all occurrences
:rtype: list
"""
return [i for i, x in enumerate(l) if x == e] | 95b662f359bd94baf68ac86450d94298dd6b366d | 3,342 |
def UVectorFromAngles(reflection):
"""
Calculate the B&L U vector from bisecting geometry
angles
"""
u = np.zeros((3,), dtype='float64')
# The tricky bit is set again: Busing & Levy's omega is 0 in
# bisecting position. This is why we have to correct for
# stt/2 here
om = np.deg2rad... | fe282e8ac67e5fafb34c63e1745cb9b262602a7a | 3,343 |
import numbers
def to_pillow_image(img_array, image_size=None):
"""Convert an image represented as a numpy array back into a
Pillow Image object."""
if isinstance(image_size, (numbers.Integral, np.integer)):
image_size = (image_size, image_size)
img_array = skimage.img_as_ubyte(img_array)
... | 435bfe79afc59f1cbdd250ca9e1558de8921f7b6 | 3,344 |
from typing import Iterator
def seq_to_sentence(seq: Iterator[int], vocab: Vocab, ignore: Iterator[int]) -> str:
"""Convert a sequence of integers to a string of (space-separated) words according to a vocabulary.
:param seq: Iterator[int]
A sequence of integers (tokens) to be converted.
:param vo... | 2138bd3454c61b7e2a6e3dad25876fdcc4cabe4e | 3,346 |
from skimage.exposure import histogram, match_histograms
import gc
def estimate_exposures(imgs, exif_exp, metadata, method, noise_floor=16, percentile=10,
invert_gamma=False, cam=None, outlier='cerman'):
"""
Exposure times may be inaccurate. Estimate the correct values by fitting a linear system.
:imgs: ... | db80a45dc30cea86a71688a56447ef0166bb49b2 | 3,347 |
def default_reverse(*args, **kwargs):
"""
Acts just like django.core.urlresolvers.reverse() except that if the
resolver raises a NoReverseMatch exception, then a default value will be
returned instead. If no default value is provided, then the exception will
be raised as normal.
NOTE: Any excep... | cadf9452c309adb4f2a865a3ea97ee2aca5b1acc | 3,348 |
def get_company_periods_up_to(period):
""" Get all periods for a company leading up to the given period, including the given period
"""
company = period.company
return (company.period_set
.filter(company=company, end__lte=period.end)) | 604814f60a58f9155a47faba62561f94d3197fb2 | 3,349 |
from typing import List
def format_count(
label: str, counts: List[int], color: str, dashed: bool = False
) -> dict:
"""Format a line dataset for chart.js"""
ret = {
"label": label,
"data": counts,
"borderColor": color,
"borderWidth": 2,
"fill": False,
}
if ... | 40f5aee7ad5d66f57737345b7d82e45a97cf6633 | 3,350 |
def detect_ripples(eeg):
"""Detect sharp wave ripples (SWRs) from single channel eeg (AnalogSignalArray).
"""
# Maggie defines ripples by doing:
# (1) filter 150-250
# (2) hilbert envelope
# (3) smooth with Gaussian (4 ms SD)
# (4) 3.5 SD above the mean for 15 ms
# (5) full ripple ... | c92190ee6c31e6c1805841258224fa2aa7d4a749 | 3,351 |
def configured_hosts(hass):
"""Return a set of the configured hosts."""
"""For future to use with discovery!"""
out = {}
for entry in hass.config_entries.async_entries(DOMAIN):
out[entry.data[CONF_ADDRESS]] = {
UUID: entry.data[UUID],
CONF_ADDRESS: entry.data[CONF_ADDRESS... | 04d24a8011a706d618699528129ba394ec54a590 | 3,353 |
def generate_keys(directory: str, pwd: bytes = None) -> (ec.EllipticCurvePrivateKey, ec.EllipticCurvePublicKey):
"""
Generate the public and private keys
Generated keys have a default name, you should rename them
This can be done with os.rename()
:param directory: folder where the keys are made
... | 28821be8d081e8c8369b889e3ce1a18336ab3c9f | 3,355 |
def get_storage_config_by_data_type(result_table_id):
"""
根据rt_id获取存储配置列表
:param result_table_id:rtid
:return: response:存储配置列表
"""
return DataStorageConfig.objects.filter(result_table_id=result_table_id, data_type="raw_data") | 12df282ece7176f003726dfb5ee1c1a1707ef6ad | 3,357 |
from typing import List
def separate_args(args: List[str]) -> (List[str], List[str]):
"""Separate args into preparser args and primary parser args.
Args:
args: Raw command line arguments.
Returns:
A tuple of lists (preparser_args, mainparser_args).
"""
preparser_args = []
if a... | 49829516f6982d041386d95c20b0028034e066a9 | 3,358 |
def lookup_alive_tags_shallow(repository_id, start_pagination_id=None, limit=None):
""" Returns a list of the tags alive in the specified repository. Note that the tags returned
*only* contain their ID and name. Also note that the Tags are returned ordered by ID.
"""
query = (Tag
.select(Tag.id, ... | a0970da049fb2fa7cd3cc69c459fb7917d8185c8 | 3,359 |
def getLesson(request):
"""
Get the JSON representation for a lesson.
"""
print("getLesson called...")
lesson_id = None
if 'lesson_id' in request.matchdict:
lesson_id = request.matchdict['lesson_id']
if lesson_id is None:
# This should return an appropriate error about not ... | d721ab060462368d9ce3af071faa7e0751b34984 | 3,360 |
def make_daysetting_from_data(data):
""" Constructs a new setting from a given dataset. This method will automatically
instantiate a new class matching the type of the given dataset. It will fill
all values provided by the dataset and then return the created instance """
factory = {
"color": Col... | d3f78fe67441e555d5b525ce1ca6cb334769942a | 3,362 |
from typing import Optional
def read_report(file) -> Optional[Report]:
"""
Reads the report meta-data section of the file.
:param file: The file being read from.
:return: The report section of the file.
"""
# Use a peeker so we don't read beyond the end of the header section
pe... | 557402ee57675fcc11a0a05da02d554c1b2f13db | 3,363 |
def get_valid_segment(text):
""" Returns None or the valid Loki-formatted urn segment for the given input string. """
if text == '':
return None
else:
# Return the converted text value with invalid characters removed.
valid_chars = ['.', '_', '-']
new_text = ''
for c... | 423c1764b590df635b0794bfe52a0a8479d53fbf | 3,364 |
def mparse(filename, staticObstacleList=list(), **kwargs):
"""
Parses a map file into a list of obstacles
@param filename The file name of the map file
@return A list of obstacles
"""
polyList = kwargs.get("nodes", list())
obstacleList = list()
try:
if filename is not None:
... | ea62ff3e4f42ad9150be248c5a13d3c367f668b2 | 3,366 |
def sort_f_df(f_df):
"""Sorts f_df by s_idx first then by l_idx.
E.g. for scenario 0, see all decision alternatives in order,
then scenario 1, scenario 2, etc.
Parameters
----------
f_df : pandas.DataFrame
A dataframe of performance values, `f`, with indexes for the
scenario, `... | ec82966a7a2fb417312198afe42109ed5883d31d | 3,368 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.