content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def _has_access_to_course(user, access_level, course_key):
"""
Returns True if the given user has access_level (= staff or
instructor) access to the course with the given course_key.
This ensures the user is authenticated and checks if global staff or has
staff / instructor access.
access_level... | c6e09f58a849e9ffc0335441144c9246326f2d3c | 3,647,500 |
import tempfile
import os
import tarfile
import pathlib
def download_and_mosaic_through_ftps(file_list, tmp_path, cds_url, cds_path,
cds_sso, cds_pw, bbox, crs, geoTransform):
""" Download Copernicus DEM tiles and create mosaic according to satellite
imagery tiling scheme
... | dbe1a34f2ac866dfcf9147228b04937bc8bbb4db | 3,647,501 |
def chi2(observed, expected):
"""
Return the chi2 sum of the provided observed and expected values.
:param observed: list of floats.
:param expected: list of floats.
:return: chi2 (float).
"""
if 0 in expected:
return 0.0
return sum((_o - _e) ** 2 / _e ** 2 for _o, _e in zip(o... | 6050e98a823671de4a518d584a6e39bc519fa610 | 3,647,502 |
import math
def range_bearing(p1: LatLon, p2: LatLon, R: float = NM) -> tuple[float, Angle]:
"""Rhumb-line course from :py:data:`p1` to :py:data:`p2`.
See :ref:`calc.range_bearing`.
This is the equirectangular approximation.
Without even the minimal corrections for non-spherical Earth.
:param p1... | 68860efbea6d8f1b36ff9e7b91a2a3779a57e611 | 3,647,503 |
import json
import logging
def cf_model_to_life(first_best, update_prod=False, pr_cache=False):
"""
We simulate the response of several variables to a shock to z and x.
We fixed the cross-section distribution of (X,Z) and set rho to rho_start
We apply a permanent shock to either X or Z, and fix the em... | 131fd2a0edb202adacafd9a6416fecb7a1f77dc7 | 3,647,504 |
def kde_interpolation(poi, bw='scott', grid=None, resolution=1, area=None, return_contour_geojson=False):
"""Applies kernel density estimation to a set points-of-interest
measuring the density estimation on a grid of places (arbitrary points
regularly spaced).
Parameters
----------
poi : GeoDat... | f0473e459e42075a3ad4070325aecb229b6b2d89 | 3,647,505 |
def nums2tcrs(nums):
"""Converts a list containing lists of numbers to amino acid sequences. Each number is considered to be an index of the alphabet."""
tcrs_letter=[]
n=len(nums)
for i in range(n):
num=nums[i]
tcr=''
for j in range(len(num)):
tcr+=alphabet[num[j]]
... | 3f366e0bd593b799c7e88c84d583e7c6aeee066f | 3,647,506 |
def extract_columns(data):
""" EXTRACTS COLUMNS TO USE IN `DictWriter()` """
columns = []
column_headers = data[0]
for key in column_headers:
columns.append(key)
return columns | 6df143107612d311ab3c8870b9eccd3528ac3802 | 3,647,507 |
import numpy
def cylindric_grid(dr, dz, origin_z=None, layer=False, material="dfalt"):
"""
Generate a cylindric mesh as a radial XZ structured grid.
Parameters
----------
dr : array_like
Grid spacing along X axis.
dz : array_like
Grid spacing along Z axis.
origin_z : scala... | bf710bc212068ec76eb19edce3e8493689535697 | 3,647,508 |
import urllib
def get_clip_preview_feedback(program, event, classifier, start_time, audio_track, reviewer):
"""
Gets the feedback provided by a user for a Segment's clip
Returns:
Feedback if present. Empty Dictionary of no feedback exists.
"""
event = urllib.parse.unquote(event)
prog... | 578952869606951057b8b8797698c320a02d1d00 | 3,647,509 |
import ast
import numpy
def interp(specStr, t):
"""Return the current value of t using linear interpolation.
<specStr> is a string containing a list of pairs e.g. '[[0,20],[30,65],[60,50],[90,75]]'
The first element of each pair is DAYS. The second is a NUMBER.
<t> is time in seconds"""
... | bc60affe122f2d17044e01a01509231e71eda47d | 3,647,510 |
from bs4 import BeautifulSoup
def time_is(location):
"""
Retrieves the time in a location by parsing the time element in the html from Time.is .
:param location: str location of the place you want to find time (works for small towns as well).
:return: time str or None on failure.
"""
if Beauti... | e8f6675199f070fcad7eead98187683b48417757 | 3,647,511 |
import logging
def _generate_template_context(arguments: PackagingResourceArguments,
manifest: OdahuProjectManifest,
output_folder: str) -> DockerTemplateContext:
"""
Generate Docker packager context for templates
"""
logging.info('Building... | e973a44949d6d2df8bfcbf0be42b8214d1c95352 | 3,647,512 |
def get_records(fname):
"""
Read the records of an IRAF database file into a python list
Parameters
----------
fname : str
name of an IRAF database file
Returns
-------
A list of records
"""
f = open(fname)
dtb = f.read()
f.close()
recs = dtb.split('b... | a1eb4500afcd1379db1efe8674c1ff256f2861b5 | 3,647,513 |
from typing import List
def get_all_clips_matching_filter(fid: int) -> List[Clip]:
"""
gets all te clips that is part of the project and matches the filter
:param fid: The filter the clips should match
:return: A list of all clips that is part of the project and matches the filter
"""
filter ... | eb69bf40ad397e970d85b425d4c2c0b25ee345fc | 3,647,514 |
def get_gushim():
"""
get gush_id metadata
"""
detailed = request.args.get('detailed', '') == 'true'
gushim = helpers._get_gushim(fields={'gush_id': True, 'last_checked_at': True, '_id': False})
if detailed:
# Flatten list of gushim into a dict
g_flat = dict((g['gush_id'], {"gush... | 93a941090f515bb726e305856ec6e0ea644b5a34 | 3,647,515 |
def dump_source(buf, id):
"""Dump BASIC source."""
if id == ID_SP5030:
line_end_code = 0x0d
src_end_code = 0x0000
kind = "SP-5030"
elif id == ID_SBASIC:
line_end_code = 0x00
src_end_code = 0x0000
kind = "S-BASIC"
elif id == ID_HUBASIC:
line_end_co... | 598fe1d9dd4be6f1c651be4f81bc9f8290496c3a | 3,647,516 |
def dense_layers(sequences, training, regularizer, initializer,
num_layers=3, activation=tf.nn.relu):
"""
Create a chain of dense (fully-connected) neural network layers.
Args:
sequences (tf.Tensor): Input sequences.
training (bool): Whether the mode is training or not.
... | 72cebd7eb6487555c3efe8e6c14954dc2886e0c3 | 3,647,517 |
def apply_cst(im, cst):
""" Applies CST matrix to image.
Args:
im: input ndarray image ((height * width) x channel).
cst: a 3x3 CST matrix.
Returns:
transformed image.
"""
result = im
for c in range(3):
result[:, :, c] = (cst[c, 0] * im[:, :, 0] + cst[c, 1] * im[:, :, 1] +
... | 7c63d07413bad5fcebf2dfe5f83f205d16280957 | 3,647,518 |
from typing import Tuple
import torch
def show_binary_classification_accuracy(best_m: nn.Module, local_loader: data_utils.DataLoader, chatty = False) -> Tuple:
"""
Given the model and dataloader, calculate the classification accuracy.
Returns true_positives, true_negatives, false_positives, false_negative... | 7743c51a8f64c46c625ccc3b8737b9553f79334f | 3,647,519 |
import argparse
def validate_accelerator_count(accel: Accelerator, count: int) -> int:
"""Raises an error if the count isn't valid for the supplied accelerator, else
returns the count.
"""
is_gpu = accel in GPU
ucase = accelerator_name(is_gpu)
valid_counts = accelerator_counts(accel)
if not _AccelCount... | b3f422710827eaa5cc95e3f896aefed353f8de1d | 3,647,520 |
from typing import Tuple
import os
from typing import Dict
from typing import Any
import torch
def objective(trial: optuna.trial.Trial, log_dir: str, device, backbone) -> Tuple[float, int, float]:
"""Optuna objective.
Args:
trial
Returns:
float: score1(e.g. accuracy)
int: score2(e.... | 59a8273634eec116d7ff16c0ac6b7dfdcbe24cf8 | 3,647,521 |
def block_device_mapping_destroy(context, bdm_id):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy(context, bdm_id) | 2ad0fcc50721fe30e4d48f691420393748bf9df3 | 3,647,522 |
def feedback(olsys,H=1):
"""Calculate the closed-loop transfer function
olsys
cltf = --------------
1+H*olsys
where olsys is the transfer function of the open loop
system (Gc*Gp) and H is the transfer function in the feedback
loop (H=1 for unity feedback)."""
... | ca78d05196068746a225038c0f401faad24c5f65 | 3,647,523 |
from typing import List
def get_sigma_grid(
init_sigma: float = 1.0, factor: int = 2, n_grid_points: int = 20
) -> List[float]:
"""Get a standard parameter grid for the cross validation strategy.
Parameters
----------
init_sigma : float, default=1.0
The initial sigma to use to populat... | 33e49127bb2e116b8c209446ad1f614c44e5e128 | 3,647,524 |
def parse_csv(value_column):
"""Parses a CSV file based on the provided column types."""
columns = tf.decode_csv(value_column, record_defaults=DEFAULTS)
features = dict(zip(ALL_COLUMNS, columns))
label = features.pop(LABEL_COLUMN)
classes = tf.cast(label, tf.int32) - 1
return features, classes | 11d0f0508fd369ab50df45f71340d8336da676c0 | 3,647,525 |
def on_over_limit():
""" This is called when the rate limit is reached """
return jsonify(status='error', error=[_('Whoa, calm down and wait a bit before posting again.')]) | f954abb1de5746ca49bbdff02894c1fe75fed106 | 3,647,526 |
def comment(strng,indent=''):
"""return an input string, commented out"""
template = indent + '# %s'
lines = [template % s for s in strng.splitlines(True)]
return ''.join(lines) | 42386b7ed8de9127d7224481a5f5315d39b6ae97 | 3,647,527 |
def square(number):
"""
Calculates how many grains were on each square
:param number:
:return:
"""
if number <= 0 or not number or number > 64:
raise ValueError(ERROR)
return 2**(number - 1) | dd8d6f9dc95632effaf7bc8a705ffddd1de6c825 | 3,647,528 |
import os
from datetime import datetime
def get_doc(name=None, filename=None, url=None, parsed=True, start=0, end=None,
localfile=None, params=None, cookies=None, **kwargs):
"""
Retrieve an IDE file from either a file or URL.
Note: `name`, `filename`, and `url` are mutually exclusive argument... | 30b713188fbcc6b8a23193f05114b1b692a2869d | 3,647,529 |
def health_check() -> ControllerResponse:
"""
Retrieve the current health of service integrations.
Returns
-------
dict
Response content.
int
HTTP status code.
dict
Response headers.
"""
status = {}
for name, obj in _getServices():
logger.info('Ge... | 1915deb5283aac2c0ced935c66dbd3d1f5564e33 | 3,647,530 |
def GeoSim(hss_0, pow_law_exp, lat1, lon1, lat2, lon2):
""" In order to make the Similarity adimensional I have to add a scale to the game.
This scale is hss, i.e. the scale after which the similairty is damped by a factor 2.
:param pow_law_exp: is the exponent of the power law
"""
# @TODO: measure... | dc133428d29b03dd1a6d78565350de6765d2197c | 3,647,531 |
from openpype.scripts import publish_filesequence
def _get_script():
"""Get path to the image sequence script"""
try:
except Exception:
raise RuntimeError("Expected module 'publish_deadline'"
"to be available")
module_path = publish_filesequence.__file__
if modu... | 8efa4f24ed070b859a8e406275feb1c989d6fb6c | 3,647,532 |
def residual_unit(data, nchw_inshape, num_filter, stride, dim_match, name, bottle_neck=True,
workspace=256, memonger=False, conv_layout='NCHW', batchnorm_layout='NCHW',
verbose=False, cudnn_bn_off=False, bn_eps=2e-5, bn_mom=0.9, conv_algo=-1,
fuse_bn_relu=False, fus... | a67edaf2a40a75619b389a6de8e8d20397b4df20 | 3,647,533 |
def dwa_control(x, config, goal, ob):
"""
Dynamic Window Approach control
"""
dw = calc_dynamic_window(x, config)
u, trajectory = calc_final_input(x, dw, config, goal, ob)
return u, trajectory | 788d7c5427017436766d86cf0408eeabc4361d7e | 3,647,534 |
import os
def GetCoverageDirectory(fuzzer):
"""Get a coverage report directory for a fuzzer
Args:
fuzzer: The fuzzer to get the coverage report directory for.
Returns:
The location of the coverage report directory for the |fuzzer|.
"""
relative_path = os.path.join(COVERAGE_REPORT_DIRECTORY_NAME, f... | 098b237718389e3296186c177f4ac84c80ebc299 | 3,647,535 |
def decompress_bytes(inp_bytes: bytes, verbose=False) -> bytearray:
"""
Main function to decompress input bytes by extracting the Huffman map
and using the map to replace the encoded sequences with the original
characters.
:param inp_bytes: Input data to be compressed
:param verbose: set to Tru... | 9d3287ff1e481f04edcbe9eb8e06989d5ac83bd6 | 3,647,536 |
def filter_nans(data,
threshold = 3,
threshold_type = "data"):
"""
=================================================================================================
filter_nans(data, threshold, threshold_type)
This function is meant to filter out the nan values from... | fe84ae2d638102e05db68f0c0062ee036be1a63b | 3,647,537 |
def edit_seq2seq_config(config, frameworks=FULL_FRAMEWORKS, no_attn=False):
"""Rotate frameworks and optionally remove attention."""
configs = []
for fw in frameworks:
c = deepcopy(config)
c['backend'] = fw
configs.append(c)
if not no_attn:
new_configs = []
# Run ... | bca93003cf67cc1c0ec14ba1dfa83664b10191fb | 3,647,538 |
from typing import Optional
def get_bioportal_prefix(prefix: str) -> Optional[str]:
"""Get the Bioportal prefix if available."""
return _get_mapped_prefix(prefix, "bioportal") | f68ec16b8de886ab76319b06d4cf68c14a90fc53 | 3,647,539 |
def _obtain_rapt(request, access_token, requested_scopes):
"""Given an http request method and reauth access token, get rapt token.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
access_token (str): reauth access token
requested_scopes ... | 8c430df5c4198af8d044bd3151cdb7af605c14b1 | 3,647,540 |
def argunique(items, key=None):
"""
Returns indices corresponding to the first instance of each unique item.
Args:
items (Sequence[VT]): indexable collection of items
key (Callable[[VT], Any], default=None): custom normalization function.
If specified returns items where ``key(... | fd7af970578aac1a13a3123f13aac9daef1a4b7a | 3,647,541 |
def promote_cvals(*vals):
"""
Promote Python values into the most general dshape containing
all of them. Only defined over simple CType instances.
>>> promote_vals(1,2.)
dshape("float64")
>>> promote_vals(1,2,3j)
dshape("complex128")
"""
promoted = np.result_type(*vals)
datasha... | 3a928ca061bdc8fedf1cb6e125994c4b7167e0c7 | 3,647,542 |
def load_directory_metadata(directory_path, return_copy=True):
"""
Get stored metadata for files in path. This currently only stores bookmarks.
If no metadata is available, return an empty dictionary.
This is a hidden file in the directory which stores metadata for all files
in the directory, as w... | 4033c1fae5c5330ef1254a13c97f33af43e39984 | 3,647,543 |
def _traverse_tree_and_group_all_objects_by_oclass(root_obj, result=None):
"""Traverses the tree once and groups all objects by oclass
:param root_obj: The root object where to start the traversion
:type root_obj: CUDS
:param result: The current results of the recursion, defaults to None
:type resu... | 3ae139313ea7b5e92f0d9231a4e64efc87acc5ac | 3,647,544 |
def check_measurement(m_info, filters):
"""
Determine whether a given measurement should be included based on the
filters.
Inputs:
m_info - A dictionary containing the configuration parameters for an
individual measurement.
filters - A dictionary containing a set of configur... | 374be08c315a63d09faadc9c963a49a89b04b3ed | 3,647,545 |
def audiosegment2wav(data: AudioSegment):
"""
pydub.AudioSegment格式转为音频信号wav。
:param data:
:return:
"""
wav = np.array(data.get_array_of_samples()) / _int16_max
return wav | 44f75bf26ae0f3e11c3d9480aee38c2ad943ae86 | 3,647,546 |
def embargo(cand_times, test_times, embargo_table):
"""
"Embargo" observations from the training set.
Args:
cand_times(Series): times of candidates to be the "embargoed set"
index: t0(start time)
value: t1(end time)
test_times(Series): times of the test set
... | 6fb97816c32fc73661905af27613bef0c6ac0726 | 3,647,547 |
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the WiZ Light platform from config_flow."""
# Assign configuration variables.
wiz_data = hass.data[DOMAIN][entry.entry_id]
wizbulb = WizBulbEntity(wiz_data.bulb, entry.data.get(CONF_NAME), wiz_data.scenes)
# Add devices with def... | c65665220f81a5c918cf8eac7839159b4296a968 | 3,647,548 |
import os
import pandas
def handle_uploaded_file(file, filename):
"""
Обработка файла csv спарсенного с online.edu.ru
"""
if not os.path.exists('upload/'):
os.mkdir('upload/')
path = 'upload/' + filename
with open(path, 'wb+') as destination:
for chunk in file.chunks():
... | 2b24081bf7b4d42c60ff17f500c7da0d81e11ceb | 3,647,549 |
async def check_account():
"""
A check that checks if the user has an account and if not creates one for them.
"""
async def check(ctx) -> bool:
conn = get_db()
cur = conn.cursor()
cur.execute("SELECT * FROM economy WHERE user_id = ?", (ctx.author.id,))
if cur.fetchone() ... | 205e39405eb52b57f743dfabca11c04cf11f0f34 | 3,647,550 |
def mtf_image_transformer_base_cifar():
"""Data parallel CIFAR parameters."""
hparams = mtf_image_transformer_base()
hparams.mesh_shape = "batch:8"
hparams.layout = "batch:batch"
hparams.learning_rate_decay_steps = 13600 # one epoch
hparams.batch_size = 32
hparams.num_heads = 4
hparams.num_decoder_laye... | 0c70aac1ffe03eea62d581a6a4ab6b84495af079 | 3,647,551 |
import pandas as pd
import os
def edc_t(path):
"""EPICA Dome C Ice Core 800KYr Temperature Estimates
Temperature record, using Deuterium as a proxy, from the EPICA (European
Project for Ice Coring in Antarctica) Dome C ice core covering 0 to 800
kyr BP.
A data frame with 5788 observations on the following... | ac92d5b39bfa99b9fdea62e4684868c5593d52bf | 3,647,552 |
def init_group_prams(net):
"""Initialize group_prams."""
decayed_params = []
no_decayed_params = []
for param in net.trainable_params():
if 'beta' not in param.name and 'gamma' not in param.name and 'bias' not in param.name:
decayed_params.append(param)
else:
no_d... | be078603c4ae42163f66668dcc16a0a77d899805 | 3,647,553 |
def nni_differences_parameters(nni=None, rpeaks=None):
"""Computes basic statistical parameters from a series of successive NN interval differences (mean, min, max, standard deviation).
Parameters
----------
nni : array
NN intervals in [ms] or [s].
rpeaks : array
R-peak times in [ms] or [s].
Returns (biospp... | aadea3b440fe4ac3c06cbd88cde69e11566e861f | 3,647,554 |
def contextualize_model(model, cell_line, genes):
"""Contextualize model at the level of a PySB model."""
# Here we just make a PysbAssembler to be able
# to apply set_context on the model being passed in
model.name = cell_line
cell_line_ccle = cell_line + '_SKIN'
pa = PysbAssembler()
pa.mod... | 7f0018b0e1308a354529893fcd8ac54bb9fa7642 | 3,647,555 |
def _quaternionInverse(quat):
""" Inverses a list of quaternions
"""
quat_ = np.empty((quat.shape[0],4))
# For every quaternion
for i in range(quat.shape[0]):
mag = quat[i,0]**2 + quat[i,1]**2 + quat[i,2]**2 + quat[i,3]**2
quat_[i,0] = -quat[i,0]/mag
quat_[i,1] = -quat[i,1]... | a70868d3b38fe087c83a52c1a7cabc32f05310dc | 3,647,556 |
from typing import Union
def load_dataset(files: list[str]) -> Union[list[int], list[list[list[int]]]]:
"""load the images and labels of the test dataset
Args:
files (list[str]): list of files path for images and label dataset
Returns:
Union[list[int], list[list[list[int]]]]: list of labels and li... | e9635b8b9a4f92d96df8e0dea97a569a1b49b02d | 3,647,557 |
def get_minion_node_ips(boot_conf, hb_conf):
"""
Returns a list of IPs for all master nodes
:param boot_conf: the snaps-boot configuration dict
:param hb_conf: the adrenaline configuration dict
:return: a list of IP addresses
"""
return __get_node_ips(boot_conf, hb_conf, 'minions') | c36ccc30043d2bb7a43314f6665b35ae9e1c47f4 | 3,647,558 |
def _normalize_sql(sql, maxlen=150):
"""Collapse whitespace and middle-truncate if needed."""
out = ' '.join(sql.split())
if len(out) > maxlen:
i = int(maxlen / 2 - 4)
out = (out[0:i] +
' . . . ' +
out[-i:None])
return out | f85efb0c367b448d2e363d9c1f8bf62a2bdb600e | 3,647,559 |
from typing import Dict
def utt_non_punct_dialog(dialog: Dict):
"""
Used by: book_skill
"""
dialog = utils.get_last_n_turns(dialog)
dialog = utils.remove_clarification_turns_from_dialog(dialog)
return [{"dialogs": [dialog]}] | 6ef4bf4fee0d8a4bba9fe140e476682e84064060 | 3,647,560 |
def griddata_easy(xx, yy, data, xi=None, yi=None, dx=None, dy=None, nx=10, ny=10, method='nearest', fill_value=None):
"""
Generate a girdded data from scattered data z=f(x, y)
... Wrapper of scipy.interplate.riddata
Parameters
----------
xx: nd array-like
x-coordinate of scattered data
... | 77c5c92e5176c62252f7c6814e3483d8a1323925 | 3,647,561 |
def emit_cover(ctx, go_toolchain,
source = None,
mode = None,
importpath = ""):
"""See go/toolchains.rst#cover for full documentation."""
if source == None: fail("source is a required parameter")
if mode == None: fail("mode is a required parameter")
if not importpat... | d390f534e723a893ca5e8b23a90ae4008abf79fe | 3,647,562 |
def shortdate(date=None):
"""turn (timestamp, tzoff) tuple into iso 8631 date."""
return datestr(date, format='%Y-%m-%d') | 9478c96e8abd95a8cc5822b111b139572693ac8b | 3,647,563 |
from datetime import datetime
import time
import numpy
def default_fram( object_to_serialize):
"""
Python json api custom serializer function for FRAM Warehouse API
per:'Specializing JSON object encoding', https://simplejson.readthedocs.org
>>> import simplejson as json
>>> json.dumps({'With... | bb345b01b7ba86e2e47515addda854d16983f036 | 3,647,564 |
import random
def read_random_stack_for_multiple_pickles(all_spectra, all_sequence, num_spectra, stack_size):
"""TODO(nh2tran): docstring."""
print("read_random_stack_for_multiple_pickles()")
random_idx = random.sample(xrange(num_spectra[-1]), min(stack_size, num_spectra[-1]))
random_locations = []
f_idx ... | 36a0baf3d111f262ebb4845379747e054c5e728f | 3,647,565 |
import logging
def perfect_pattern(dict_class_counts, distinct_classes, pattern, supporting_items, results_dir):
"""
Performs checking whether the pattern is perfect and a common class can be found directly without constructing the
relative class hierarchy.
:param dict_class_counts: the count of each... | cbedf060049466d46689af49cb31be95dd2ecc3b | 3,647,566 |
import timeit
def _benchmark_grep(filename, pattern):
"""Benchmarks grep.
Args:
- filename: The name of the file to be searched.
- pattern: The pattern we are searching for in the file.
"""
time_taken = timeit(setup=BENCHMARK_SETUP, number=SINGLE_STRING_TESTS,
stmt='subprocess.cal... | f1d3a4b9f6d5f7867f49a6eb3bdc6236111d5277 | 3,647,567 |
import pathlib
def inotify_test(
test_paths: dict[str, pathlib.Path], tmp_path: pathlib.Path
) -> InotifyTest:
"""Generate a pre-configured test instance of `inotify_simple.INotify`.
Parameters
----------
test_paths: dict[str, pathlib.Path]
The test fixture that generates test files based... | e64975dc2765e3c887194cbf88a0f47ef3d5311e | 3,647,568 |
def set_system_bios( context, settings, system_id = None ):
"""
Finds a system matching the given ID and sets the BIOS settings
Args:
context: The Redfish client object with an open session
settings: The settings to apply to the system
system_id: The system to locate; if None, perfo... | 68ceeb63ec74f3459f8cfea1eb6eb9d668bff15e | 3,647,569 |
def create() -> UserSecurityModel:
"""
Creates a new instance of the USM
"""
return UserSecurityModel() | 1e07d9bc6359a2ca000b886de416147d85720c9c | 3,647,570 |
def clDice(v_p, v_l):
"""[this function computes the cldice metric]
Args:
v_p ([bool]): [predicted image]
v_l ([bool]): [ground truth image]
Returns:
[float]: [cldice metric]
"""
if len(v_p.shape)==2:
tprec = cl_score(v_p,skeletonize(v_l))
tsens = cl_score(v... | f8a6947ca1487878e9e33c5c7aed3604565801e3 | 3,647,571 |
import re
def validate_regex(regex_str):
"""
Checks if a given string is valid regex
:param str regex_str: a suspicios string that may or may not be valid regex
:rtype: bool
:return: True if valid regex was give, False in case of TypeError or re.error
"""
# another of those super basic fu... | 97c6e2338eb67c2d4be74e3a18a4393a1eb36242 | 3,647,572 |
import json
def load_stats_from_file(date):
"""
Load stats data from a stat file.
Params:
date -- a `datetime` instance.
"""
file_path = _build_stats_file_path(date)
if not isfile(file_path):
raise IOError # This will be FileNotFoundError in Python3.
with open(file_path, 'r... | b2bb85f6a492ca26441271222f10373e200497e1 | 3,647,573 |
def null_gt_null(left, right):
""":yaql:operator >
Returns false. This function is called when left and right are null.
:signature: left > right
:arg left: left operand
:argType left: null
:arg right: right operand
:argType right: null
:returnType: boolean
.. code:
yaql> ... | f99a985ae1b0e678afb315ed441d33064dd281b0 | 3,647,574 |
def read_header(file):
""" Read the information in an OpenFOAM file header.
Parameters
----------
file : str
Name (path) of OpenFOAM file.
Returns
-------
info : dictionary
The information in the file header.
"""
with open(file, 'r') as f:
content = f.read()... | 91446555ed31953ea4290e76db51872eb1ef3ae9 | 3,647,575 |
def point_from_b58(b):
"""Return b58 decoded P."""
x, y = [int_from_b58(t) for t in b.split(",")]
return ECC.EccPoint(x=x, y=y, curve=CURVE) | 4f5b9dfe60c745b17ffb54535a8994273d07c675 | 3,647,576 |
def _cp_embeds_into(cp1, cp2):
"""Check that any state in ComplexPattern2 is matched in ComplexPattern1.
"""
# Check that any state in cp2 is matched in cp1
# If the thing we're matching to is just a monomer pattern, that makes
# things easier--we just need to find the corresponding monomer pattern
... | 67e410eb3ba1131f144829b724ad7099807d4e4e | 3,647,577 |
def get_tags_for_message(khoros_object, msg_id):
"""This function retrieves the tags for a given message.
.. versionadded:: 2.8.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param msg_id: The Message ID for the message from which to re... | 563df4344f9291d9114450a994145610ef79ae8f | 3,647,578 |
def _build_hierarchical_histogram_computation(
lower_bound: float, upper_bound: float, num_bins: int,
aggregation_factory: factory.UnweightedAggregationFactory):
"""Utility function creating tff computation given the parameters and factory.
Args:
lower_bound: A `float` specifying the lower bound of the... | 38d5c711bcd6d6cd8965f7e8e85b0933363a2a7b | 3,647,579 |
import inspect
def check_endpoint(func):
"""Check available endpoint."""
@wraps(func)
def wrapper(*args, **kwargs):
sig = inspect.signature(func)
args_value = sig.bind(*args, **kwargs)
endpoint = args_value.arguments["endpoint"]
if endpoint not in AVAILABLE_ENDPOINTS:
... | 1e833dc8c3d43b6c09bd2b3bc89846ce29952cbd | 3,647,580 |
def read_sql_one(id):
"""
This function responds to a request for api/reviews/{id}
with one matching review from reviews
:param id: id of the review
:return: review matching the id
"""
response = Response.query.filter_by(id=id).one_or_none()
if response is not None:
# serial... | d54abca40fb6d44adf0988bc44484da3af3efb22 | 3,647,581 |
from typing import Tuple
def ds_to_numpy(ds: Dataset) -> Tuple[np.ndarray, np.ndarray]:
"""Transform torch dataset to numpy arrays
Parameters
----------
ds : Dataset
COVID dataset
Returns
-------
Tuple[np.ndarray, np.ndarray]
Flattened images + labels
"""
imgs = ... | 218eaf582b36a562920bc2e8808b3524a900b8ef | 3,647,582 |
import base64
def _b64(b):
"""Helper function base64 encode for jose spec."""
return base64.urlsafe_b64encode(b).decode('utf8').replace("=", "") | 4777d4f47de2c72b8dd95b765fc54d1abc6763f0 | 3,647,583 |
def load_from_arff(filename, label_count, label_location="end",
input_feature_type='float', encode_nominal=True, load_sparse=False,
return_attribute_definitions=False):
"""Method for loading ARFF files as numpy array
Parameters
----------
filename : str
path... | d203b6360d3212e7e6a37f0ff434e17dfacfe6a0 | 3,647,584 |
def gapfill_to_ensemble(model, iterations=1, universal=None, lower_bound=0.05,
penalties=None, exchange_reactions=False,
demand_reactions=False, integer_threshold=1e-6):
"""
Performs gapfilling on model, pulling reactions from universal.
Any existing constraints on base_mod... | 1e5b2c6e413afc1b745867f931d4fbc7c33babcc | 3,647,585 |
import torch
def reparameterize(mu, logvar, n_samples=1):
"""Reparameterization trick.
Args:
mu (torch.Tensor): Mean.
logvar (torch.Tensor): Logarithm of variation.
n_samples (int): The number of samples.
Returns:
torch.Tensor: Samples drawn from the given Gaussian distri... | 726473147ee28f470ad7d543e2b36bc512ffd0ae | 3,647,586 |
def rotationMatrixFromNormals(v0,v1,tol=1e-20):
"""
Performs the minimum number of rotations to define a rotation from the direction indicated by the vector n0 to the direction indicated by n1.
The axis of rotation is n0 x n1
https://en.wikipedia.org/wiki/Rodrigues%27_rotation_formula
... | 946110994a3567871df4b60a3c6814f9ab092ad1 | 3,647,587 |
def P_to_array(P: NestedDicts) -> np.array:
""" Converts a transition matrix in nested dictionary format to a numpy array.
P is usually given as starting state -> action -> ending state w/ data, we reorder this to
action -> starting state -> ending state -> transition probability.
"""
# Action, Sta... | 3a107b3cff6b46b8afc93705bebef84bcbcad6ca | 3,647,588 |
def get_available_smc_versions():
"""
Return list of available SMC versions. SMC versioning is done by
d70/smc:v6.1.2. Version returned is after the colon.
"""
return [repotag for image in get_images(filter='d70/smc')
for repotag in image.get('RepoTags')] | 3ddb2908501ebf2ce648f7ebfe00000eb429ffad | 3,647,589 |
def boolean_fn2(a, b, c):
""" Return the truth value of (a ∧ b) ∨ (-a ∧ -b) """
return a and b or not a and not b | c1ef37b3503866e9460fb95c4ab609278c6cff52 | 3,647,590 |
from utils.ica_base import get_configuration
def get_ica_gds_configuration() -> libgds.Configuration:
"""
Get the configuration object for ica wes
:return:
"""
return get_configuration(libgds.Configuration) | 9e2efd47bca098fb8a03dd4412269a18663e8dfa | 3,647,591 |
import torch
import time
def retry_load_images(image_paths, retry=10, backend="pytorch"):
"""
This function is to load images with support of retrying for failed load.
Args:
image_paths (list): paths of images needed to be loaded.
retry (int, optional): maximum time of loading retrying. D... | 5a34ababc157548c6d9f673c3ff0934df9eccb3d | 3,647,592 |
def b2p(exts):
"""Convert two points of a polygon into its bounding box.
(Rectangular polygon parallel with axes.)
"""
p0x = exts[0][0]
p0y = exts[0][1]
p0 = str(p0x) + ' ' + str(p0y) + ' ' + '0.0'
p1x = exts[0][2]
p1y = exts[0][3]
p1 = str(p1x) + ' ' + str(p1y) + ' ' + '0.0'
pb... | 11a51cffb8143b01b60904bef4c92e6f7335dc1d | 3,647,593 |
def unix_to_human_time(utime, alt_format=0):
"""convert Unix time to Human readable time"""
try:
fraction = utime - int(utime)
except OverflowError as err:
t = 'Unix time %s too long to convert, substituting 0' % utime
# TODO log this time issue
print('NEED TO LOG THIS TIME I... | 0f296ec2f394568fb973ce8371cc262c2e21ffc8 | 3,647,594 |
import re
def read_conf_file_interface(config_name):
"""
Get interface settings.
@param config_name: Name of WG interface
@type config_name: str
@return: Dictionary with interface settings
@rtype: dict
"""
conf_location = WG_CONF_PATH + "/" + config_name + ".conf"
with open(conf_l... | 7f51585d05472fa7fbc26e89b150e540f7013be1 | 3,647,595 |
import re
def book_transformer(query_input, book_dict_input):
"""grabs the book and casts it to a list"""
sample_version = versions_dict.versions_dict()
query_input[1] = query_input[1].replace('[', '').replace(']', '').lstrip().rstrip().upper()
for i in list(book_dict_input.keys()):
result = r... | 259e5520aa762749169b0d529c8f1e8836815a16 | 3,647,596 |
import json
def custom_response(message, status, mimetype):
"""handle custom errors"""
resp = Response(json.dumps({"message": message, "status_code": status}),
status=status,
mimetype=mimetype)
return resp | 6ec8aa2784e6dd0420c3d246ab5a2a2b6e20db1e | 3,647,597 |
def embed_network(input_net, layers, reuse_variables=False):
"""Convolutional embedding."""
n_layers = int(len(layers)/3)
tf.logging.info('Number of layers: %d' % n_layers)
# set normalization and activation functions
normalizer_fn = None
activation_fn = tf.nn.softplus
tf.logging.info('Softplus activati... | d525dbf59ce860af6e0bc0de6c21fa55454c3f55 | 3,647,598 |
def sample_variance(sample1, sample2):
"""
Calculate sample variance. After learn.co
"""
n_1, n_2 = len(sample1), len(sample2)
var_1, var_2 = variance(sample1), variance(sample2)
return (var_1 + var_2)/((n_1 + n_2)-2) | e464ac7434139409a430341bb39b107d9a15eacf | 3,647,599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.