content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def flanking_regions_fasta_deletion(genome, dataframe, flanking_region_size):
"""
Makes batch processing possible, pulls down small region
of genome for which to design primers around.
This is based on the chromosome and position of input file.
Each Fasta record will contain:
>Sample_Gene_chr:... | a20da206630d1f2fb002c5ca63eab9f240b1f1d5 | 12,351 |
import functools
def numpy_episodes(
train_dir, test_dir, shape, loader, preprocess_fn=None, scan_every=10,
num_chunks=None, **kwargs):
"""Read sequences stored as compressed Numpy files as a TensorFlow dataset.
Args:
train_dir: Directory containing NPZ files of the training dataset.
test... | fd9c727c64bdd725ef1615754d12b93f21568c2f | 12,352 |
def fft_convolve(ts, query):
"""
Computes the sliding dot product for query over the time series using
the quicker FFT convolution approach.
Parameters
----------
ts : array_like
The time series.
query : array_like
The query.
Returns
-------
array_like - The sli... | 7e1fec2a3b30770909d7c185bbc0b4885cb7eb22 | 12,353 |
from typing import List
from typing import Optional
def _add_merge_gvcfs_job(
b: hb.Batch,
gvcfs: List[hb.ResourceGroup],
output_gvcf_path: Optional[str],
sample_name: str,
) -> Job:
"""
Combine by-interval GVCFs into a single sample GVCF file
"""
job_name = f'Merge {len(gvcfs)} GVCFs... | d89fd051cd20bef7263b600ce3513ba858acbadd | 12,354 |
def register_permission(name, codename, ctypes=None):
"""Registers a permission to the framework. Returns the permission if the
registration was successfully, otherwise False.
**Parameters:**
name
The unique name of the permission. This is displayed to the customer.
codename
The u... | f09766685ac4690bd72739450977646d521a21d0 | 12,355 |
def calculate_outliers(tile_urls, num_outliers, cache, nprocs):
"""
Fetch tiles and calculate the outlier tiles per layer.
The number of outliers is per layer - the largest N.
Cache, if true, uses a local disk cache for the tiles. This can be very
useful if re-running percentile calculations.
... | 6e72820de2f954a9e349aa40d165817b3ab7c012 | 12,356 |
import random
def load_trigger_dataset(
fname,
templatizer,
limit=None,
train=False,
preprocessor_key=None,
priming_dataset=None,
max_priming_examples=64,
):
"""
Loads a MLM classification dataset.
Parameters
==========
fname : str
The filename.
templatizer... | 6ed4970dd0031bd33cf19414f439c69e5d5a079a | 12,357 |
def pmu2bids(physio_files, verbose=False):
"""
Function to read a list of Siemens PMU physio files and
save them as a BIDS physiological recording.
Parameters
----------
physio_files : list of str
list of paths to files with a Siemens PMU recording
verbose : bool
verbose fla... | 41e607c80955689e5a189652ba445bf0014a3893 | 12,358 |
def add_chain(length):
"""Adds a chain to the network so that"""
chained_works = []
chain = utils.generate_chain(length)
for i in range(len(chain)-1):
agent_id = get_random_agent().properties(ns.KEY_AGENT_ID).value().next()
work_id = g.create_work().properties(ns.KEY_WORK_ID).value().nex... | 80a176fb34460404c847f00dbeab963f1a0be71e | 12,359 |
def convert_graph_to_db_format(input_graph: nx.Graph, with_weights=False, cast_to_directed=False):
"""Converts a given graph into a DB format, which consists of two or three lists
1. **Index list:** a list where the i-th position contains the index of the beginning of the list of adjacent nodes (in the se... | 3f538f697df16b13aeb513dd60831a1252fffb6c | 12,361 |
def auxiliary_subfields():
"""Factory associated with AuxSubfieldsPoroelasticity.
"""
return AuxSubfieldsPoroelasticity() | bcbdaf5b6ee006a6380206ebd331f7e516593b83 | 12,362 |
def cassandra_get_unit_data():
"""
Basing function to obtain units from db and return as dict
:return: dictionary of units
"""
kpi_dict = {}
cassandra_cluster = Cluster()
session = cassandra_cluster.connect('pb2')
query = session.prepare('SELECT * FROM kpi_units')
query_data = sessio... | ab24e4e09f648a74cd16a140279da54aab3d4096 | 12,363 |
def read_cfg_float(cfgp, section, key, default):
"""
Read float from a config file
Args:
cfgp: Config parser
section: [section] of the config file
key: Key to be read
default: Value if couldn't be read
Returns: Resulting float
"""
if cfgp.has_option(section, key... | 0ed341c2d1436e3378e4e126735ac7306973ca8c | 12,364 |
def random(website):
"""
随机获取cookies
:param website:查询网站给 如:weibo
:return:随机获取的cookies
"""
g = get_conn()
cookies = getattr(g, website + '_cookies').random()
return cookies | 6db8d81f18e57af2a7d9294481e45d4ad38962ce | 12,365 |
import requests
def get_pid(referral_data):
""" Example getting PID using the same token used to query AD
NOTE! to get PID the referral information must exist in the BETA(UAT) instance of TOMS
"""
referral_uid = referral_data['referral_uid']
url = "https://api.beta.genomics.nhs.uk/reidentific... | 8e5e43c1a2c85826e03f0fd090fc235b0320aed7 | 12,366 |
from typing import Union
from pathlib import Path
from typing import Tuple
from typing import List
from datetime import datetime
def open_events(
fname: Union[Path, str], leap_sec: float, get_frame_rate: bool = False
) -> Tuple[
List[float], List[float], List[float], List[datetime], Union[List[float], None]
]... | 973b835b1df2aafba1a535b378434b6a532584d0 | 12,367 |
def intdags_permutations(draw, min_size:int=1, max_size:int=10):
""" Produce instances of a same DAG. Instances are not nesessarily
topologically sorted """
return draw(lists(permutations(draw(intdags())),
min_size=min_size,
max_size=max_size)) | 50377412dbd091afa98761e673a35f44acbeb60d | 12,368 |
def getConfiguredGraphClass(doer):
"""
In this class method, we must return a configured graph class
"""
# if options.bReified:
# DU_GRAPH = Graph_MultiSinglePageXml_Segmenter_Separator_DOM
if options.bSeparator:
DU_GRAPH = ConjugateSegmenterGraph_MultiSinglePageXml_Separator
els... | 3089572eb1aa4e7db505b5211d156d3e044aaed5 | 12,369 |
def _seed(x, deg=5, seeds=None):
"""Seed the greedy algorithm with (deg+1) evenly spaced indices"""
if seeds is None:
f = lambda m, n: [ii*n//m + n//(2*m) for ii in range(m)]
indices = np.sort(np.hstack([[0, len(x)-1], f(deg-1, len(x))]))
else:
indices = seeds
errors = []
return indices, errors | 7a5ff1e2e27b812f17196fbec1d7c6a2c867207c | 12,371 |
def get_ref(cube):
"""Gets the 8 reflection symmetries of a nd numpy array"""
L = []
L.append(cube[:,:,:])
L.append(cube[:,:,::-1])
L.append(cube[:,::-1,:])
L.append(cube[::-1,:,:])
L.append(cube[:,::-1,::-1])
L.append(cube[::-1,:,::-1])
L.append(cube[::-1,::-1,:])
L.append(cube[... | 683ef2c7c0a312e4cf891f191452f9c29f6bc1fd | 12,372 |
from typing import Collection
from typing import Tuple
from typing import Optional
from typing import Mapping
def get_relation_functionality(
mapped_triples: Collection[Tuple[int, int, int]],
add_labels: bool = True,
label_to_id: Optional[Mapping[str, int]] = None,
) -> pd.DataFrame:
"""Calculate rela... | 1e6aa6d9e61ebd788d8c1726ca8a75d551b654b8 | 12,373 |
import json
def df_to_vega_lite(df, path=None):
"""
Export a pandas.DataFrame to a vega-lite data JSON.
Params
------
df : pandas.DataFrame
dataframe to convert to JSON
path : None or str
if None, return the JSON str. Else write JSON to the file specified by
path.
... | 5cf5cf834d4113c05c4cc8b99aaa2a94e0a7b746 | 12,374 |
def _is_json_mimetype(mimetype):
"""Returns 'True' if a given mimetype implies JSON data."""
return any(
[
mimetype == "application/json",
mimetype.startswith("application/") and mimetype.endswith("+json"),
]
) | 9c2580ff4a783d9f79d6f6cac41befb516c52e9f | 12,375 |
from datetime import datetime
def make_request(action, data, token):
"""Make request based on passed arguments and timestamp."""
return {
'action': action,
'time': datetime.now().timestamp(),
'data': data,
'token': token
} | 60e511f7b067595bd698421adaafe37bbf8e59e1 | 12,376 |
def get_stats_historical_prices(timestamp, horizon):
"""
We assume here that the price is a random variable following a normal
distribution. We compute the mean and covariance of the price distribution.
"""
hist_prices_df = pd.read_csv(HISTORICAL_PRICES_CSV)
hist_prices_df["timestamp"] = pd.to_d... | bc6fdcbcb54f156d880ba2504a0ca0d50f889786 | 12,377 |
def _unflattify(values, shape):
"""
Unflattifies parameter values.
:param values: The flattened array of values that are to be unflattified
:type values: torch.Tensor
:param shape: The shape of the parameter prior
:type shape: torch.Size
:rtype: torch.Tensor
"""
if len(shape) < 1 or... | e885517419eb48fd1a4ebdf14a8fa3b19f3c5444 | 12,378 |
def theme_cmd(data, buffer, args):
"""Callback for /theme command."""
if args == '':
weechat.command('', '/help ' + SCRIPT_COMMAND)
return weechat.WEECHAT_RC_OK
argv = args.strip().split(' ', 1)
if len(argv) == 0:
return weechat.WEECHAT_RC_OK
if argv[0] in ('install',):
... | f361a56392320efac4bd1e4101b002c1e42d4b89 | 12,379 |
def get_unique_chemical_names(reagents):
"""Get the unique chemical species names in a list of reagents.
The concentrations of these species define the vector space in which we sample possible experiments
:param reagents: a list of perovskitereagent objects
:return: a list of the unique chemical names... | ae5d6b3bdd8e03c47b9c19c900760c8c2b83d0a0 | 12,380 |
def get_sorted_keys(dict_to_sort):
"""Gets the keys from a dict and sorts them in ascending order.
Assumes keys are of the form Ni, where N is a letter and i is an integer.
Args:
dict_to_sort (dict): dict whose keys need sorting
Returns:
list: list of sorted keys from dict_to_sort
... | 9614dee83723e21248381c61a60e92e78c121216 | 12,381 |
def model_3d(psrs, psd='powerlaw', noisedict=None, components=30,
gamma_common=None, upper_limit=False, bayesephem=False,
wideband=False):
"""
Reads in list of enterprise Pulsar instance and returns a PTA
instantiated with model 3D from the analysis paper:
per pulsar:
... | 37abad1016fadd82bcff1a55e9835db28a5c4eb8 | 12,382 |
def max_votes(x):
"""
Return the maximum occurrence of predicted class.
Notes
-----
If number of class 0 prediction is equal to number of class 1 predictions, NO_VOTE will be returned.
E.g.
Num_preds_0 = 25,
Num_preds_1 = 25,
Num_preds_NO_VOTE = 0,
... | 2eadafdaf9e9b4584cd81685a5c1b77a090e4f1c | 12,383 |
def misclassification_error(y_true: np.ndarray, y_pred: np.ndarray, normalize: bool = True) -> float:
"""
Calculate misclassification loss
Parameters
----------
y_true: ndarray of shape (n_samples, )
True response values
y_pred: ndarray of shape (n_samples, )
Predicted response ... | 676657fa4da7b4734077ba3a19878d8890f44815 | 12,384 |
from scipy.stats import uniform
def dunif(x, minimum=0,maximum=1):
"""
Calculates the point estimate of the uniform distribution
"""
result=uniform.pdf(x=x,loc=minimum,scale=maximum-minimum)
return result | 980ffb875cefec13bb78c3a3c779c68e7f510fb7 | 12,385 |
def _generate_upsert_sql(mon_loc):
"""
Generate SQL to insert/update.
"""
mon_loc_db = [(k, _manipulate_values(v, k in TIME_COLUMNS)) for k, v in mon_loc.items()]
all_columns = ','.join(col for (col, _) in mon_loc_db)
all_values = ','.join(value for (_, value) in mon_loc_db)
update_query = '... | 7cbfdc1dd8709a354e4e246324042c8cf02a703b | 12,386 |
def dict2obj(d):
"""Given a dictionary, return an object with the keys mapped to attributes
and the values mapped to attribute values. This is recursive, so nested
dictionaries are nested objects."""
top = type('dict2obj', (object,), d)
seqs = tuple, list, set, frozenset
for k, v in d.items():
... | ccfa713dc130024427872eb6f2017a0383e3bc01 | 12,388 |
def customized_algorithm_plot(experiment_name='finite_simple_sanity', data_path=_DEFAULT_DATA_PATH):
"""Simple plot of average instantaneous regret by agent, per timestep.
Args:
experiment_name: string = name of experiment config.
data_path: string = where to look for the files.
Returns:
p: ggplot p... | bd046c14de1598672391bbcb134dfe8bcff0b558 | 12,389 |
def _get_log_time_scale(units):
"""Retrieves the ``log10()`` of the scale factor for a given time unit.
Args:
units (str): String specifying the units
(one of ``'fs'``, ``'ps'``, ``'ns'``, ``'us'``, ``'ms'``, ``'sec'``).
Returns:
The ``log10()`` of the scale factor for the time... | 2371aab923aacce9159bce6ea1470ed49ef2c72f | 12,390 |
def resolvermatch(request):
"""Add the name of the currently resolved pattern to the RequestContext"""
match = resolve(request.path)
if match:
return {'resolved': match}
else:
return {} | 41cc88633e0b207a53318c761c9849ad2d079994 | 12,391 |
def selection_sort(arr: list) -> list:
"""
Main sorting function. Using "find_smallest" function as part
of the algorythm.
:param arr: list to sort
:return: sorted list
"""
new_arr = []
for index in range(len(arr)):
smallest = find_smallest(arr)
new_arr.append(arr.pop(sma... | e618c5469ce77d830255dc16806f9499bed7ca9a | 12,392 |
def get_primary_monitor():
"""
Returns the primary monitor.
Wrapper for:
GLFWmonitor* glfwGetPrimaryMonitor(void);
"""
return _glfw.glfwGetPrimaryMonitor() | 0bcc55f64c1b8ce6bad31323e5a4bb6ff05eab47 | 12,393 |
def query_people_and_institutions(rc, names):
"""Get the people and institutions names."""
people, institutions = [], []
for person_name in names:
person_found = fuzzy_retrieval(all_docs_from_collection(
rc.client, "people"),
["name", "aka", "_id"],
person_name, c... | fd98a7557e2ee07b67ca8eddaf76c28b7b99033a | 12,394 |
from typing import Union
from typing import Tuple
def add_device(overlay_id) -> Union[str, Tuple[str, int]]:
"""
Add device to an overlay.
"""
manager = get_manager()
api_key = header_api_key(request)
if not manager.api_key_is_valid(api_key):
return jsonify(error="Not authorized"), 403... | b9652b8d99672d0219df4821decebded458719bd | 12,395 |
from math import sin, cos
def pvtol(t, x, u, params={}):
"""Reduced planar vertical takeoff and landing dynamics"""
m = params.get('m', 4.) # kg, system mass
J = params.get('J', 0.0475) # kg m^2, system inertia
r = params.get('r', 0.25) # m, thrust offset
g = params.get('g', 9.8) # m/... | ff3357e6e1fc1b6f878d9f16b14eba0b687642cd | 12,396 |
from typing import List
from typing import Any
from typing import Callable
def route(
path: str, methods: List[str], **kwargs: Any
) -> Callable[[AnyCallable], AnyCallable]:
"""General purpose route definition. Requires you to pass an array of HTTP methods like GET, POST, PUT, etc.
The remaining kwargs a... | 9e499d59b48a3562f46bdcbde76d87ceb199691e | 12,397 |
import wx
def canHaveGui():
"""Return ``True`` if a display is available, ``False`` otherwise. """
# We cache this because calling the
# IsDisplayAvailable function will cause the
# application to steal focus under OSX!
try:
return wx.App.IsDisplayAvailable()
except ImportError:
... | 9a9af0f46ca22faeb5f76e350d1c831bcba95343 | 12,398 |
def syntactic_analysis(input_fd):
"""
Realiza análisis léxico-gráfico y sintáctico de un programa Tiger.
@type input_fd: C{file}
@param input_fd: Descriptor de fichero del programa Tiger al cual se le debe
realizar el análisis sintáctico.
@rtype: C{LanguageNode}
@return: Como ... | 0d0481c8ac84ac1de1ff3f756f20f33bdc8a18e0 | 12,399 |
def create_fixxation_map(eye_x, eye_y, fixxation_classifier):
"""
:param eye_x: an indexable datastructure with the x eye coordinates
:param eye_y: an indexable datastructure with the y eye coordinates
:param fixxation_classifier: a list with values which indicate ... | bccf37777eb4d74fcb48a8316fc3d2695a209371 | 12,400 |
from re import T
from typing import Any
def with_metadata(obj: T, key: str, value: Any) -> T:
"""
Adds meta-data to an object.
:param obj: The object to add meta-data to.
:param key: The key to store the meta-data under.
:param value: The meta-data value to store.
:return: ob... | 566f9a2c1d083bbe44b86f0a8716e5bb44892b13 | 12,401 |
import hashlib
def checksum(uploaded_file: 'SimpleUploadedFile', **options):
"""
Function to calculate checksum for file,
can be used to verify downloaded file integrity
"""
hash_type = options['type']
if hash_type == ChecksumType.MD5:
hasher = hashlib.md5()
elif hash_type == Che... | 766a288a09791242029669a63734143cf8e2c007 | 12,402 |
import types
from typing import Optional
from typing import Tuple
def preceding_words(document: Document, position: types.Position) -> Optional[Tuple[str, str]]:
"""
Get the word under the cursor returning the start and end positions.
"""
lines = document.lines
if position.line >= len(lines):
... | 9d1078084045ac468639a903c74dd24e45ed1087 | 12,404 |
def check_gpu(gpu, *args):
"""Move data in *args to GPU?
gpu: options.gpu (None, or 0, 1, .. gpu index)
"""
if gpu == None:
if isinstance(args[0], dict):
d = args[0]
#print(d.keys())
var_dict = {}
for key in d:
var_dict[key] = ... | e4849a0a99dd6ca7baeacadc130e46006dd23c3a | 12,405 |
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up the SleepIQ config entry."""
conf = entry.data
email = conf[CONF_USERNAME]
password = conf[CONF_PASSWORD]
client_session = async_get_clientsession(hass)
gateway = AsyncSleepIQ(client_session=client_session)... | e4a4765113c7bc1e3c50290c72f3ca8196ba2bf2 | 12,406 |
def expansion(svsal,temp,pres,salt=None,dliq=None,dvap=None,
chkvals=False,chktol=_CHKTOL,salt0=None,dliq0=None,dvap0=None,
chkbnd=False,useext=False,mathargs=None):
"""Calculate seawater-vapour thermal expansion coefficient.
Calculate the thermal expansion coefficient of a seawater-vapour
parc... | 78c47eabf1d8e96c655652c3c8847b391264b05b | 12,407 |
import yaml
def yaml_to_dict(yaml_str=None, str_or_buffer=None):
"""
Load YAML from a string, file, or buffer (an object with a .read method).
Parameters are mutually exclusive.
Parameters
----------
yaml_str : str, optional
A string of YAML.
str_or_buffer : str or file like, opti... | 37aefe8e5b1bcc734626cbf7177e3b3dffda2416 | 12,408 |
from typing import Dict
from typing import Any
from typing import Tuple
def verify_block_arguments(
net_part: str,
block: Dict[str, Any],
num_block: int,
) -> Tuple[int, int]:
"""Verify block arguments are valid.
Args:
net_part: Network part, either 'encoder' or 'decoder'.
block: ... | cead023afcd72d1104e02b2d67406b9c47102589 | 12,409 |
from pathlib import Path
def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names=()):
""" Compute the average precision, given the recall and precision curves.
Source: https://github.com/rafaelpadilla/Object-Detection-Metrics.
# Arguments
tp: True positives (nparray, nx1 ... | 9a41478f8b85b7d43ceeaaaf6425ece67672fc64 | 12,410 |
from typing import Optional
def frame_aligned_point_error(
pred_frames: r3.Rigids,
target_frames: r3.Rigids,
frames_mask: paddle.Tensor,
pred_positions: r3.Vecs,
target_positions: r3.Vecs,
positions_mask: paddle.Tensor,
length_scale: float,
l1_clamp_distance: Optional[float]... | fe66fea6d3d6ca418b64a2d18bdc75a6e10d6707 | 12,411 |
def remove_app_restriction_request(machine_id, comment):
"""Enable execution of any application on the machine.
Args:
machine_id (str): Machine ID
comment (str): Comment to associate with the action
Notes:
Machine action is a collection of actions you can apply on the machine, for ... | f4dd44cbef6194b9fcc301fb19bb5c3ba77ad269 | 12,412 |
import torch
def fix_bond_lengths(
dist_mat: torch.Tensor,
bond_lengths: torch.Tensor,
delim: int = None,
delim_value: float = ARBITRARILY_LARGE_VALUE) -> torch.Tensor:
"""
Replace one-offset diagonal entries with ideal bond lengths
"""
mat_len = dist_mat.shape[1]
b... | 1112ad7019c1cb82360ad6e784f7f8262dc7b4a0 | 12,413 |
def CommandToString(command):
"""Returns quoted command that can be run in bash shell."""
return ' '.join(cmd_helper.SingleQuote(c) for c in command) | bcb6d3f108997b35336a68a559243931ca50a2c5 | 12,414 |
import re
def version(output):
"""
`git --version` > git version 1.8.1.1
"""
output = output.rstrip()
words = re.split('\s+', output, 3)
if not words or words[0] != 'git' or words[1] != 'version':
raise WrongOutputError()
version = words[2]
parts = version.split('.')
try:
... | 21a16245cf7729b56588016f358667b210113eec | 12,416 |
def set_up_s3_encryption_configuration(kms_arn=None):
"""
Use the default SSE-S3 configuration for the journal export if a KMS key ARN was not given.
:type kms_arn: str
:param kms_arn: The Amazon Resource Name to encrypt.
:rtype: dict
:return: The encryption configuration for JournalS3Export.
... | dd8663c17e040423a08c772fd9ca64d25abd2850 | 12,417 |
import click
import json
def search(dataset, node, aoi, start_date, end_date, lng, lat, dist, lower_left, upper_right, where, geojson, extended, api_key):
"""
Search for images.
"""
node = get_node(dataset, node)
if aoi == "-":
src = click.open_file('-')
if not src.isatty():
... | 309a98cf3cfc81f12631bbc15ee0325d16385338 | 12,418 |
from typing import Callable
def _make_rnn_cell(spec: RNNSpec) -> Callable[[], tf.nn.rnn_cell.RNNCell]:
"""Return the graph template for creating RNN cells."""
return RNN_CELL_TYPES[spec.cell_type](spec.size) | 48cf85bcb8d39ab7b4dd150fc890eb281d9b83d9 | 12,419 |
def run_baselines(env, seed, log_dir):
"""Create baselines model and training.
Replace the ppo and its training with the algorithm you want to run.
Args:
env (gym.Env): Environment of the task.
seed (int): Random seed for the trial.
log_dir (str): Log dir path.
Returns:
... | 2a020c5efe548d3722155569fbe69cd836efeebd | 12,420 |
def count_transitions(hypno):
"""
return the count for all possible transitions
"""
possible_transitions = [(0,1), (0,2), (0,4), # W -> S1, S2, REM
(1,2), (1,0), (1,3), # S1 -> W, S2, REM
(2,0), (2,1), (2,3), (2,4), # S2 -> W, S1, SWS, REM
... | 4a0dc835c2e72bf46ad8d3ebe33256f32ce2ede9 | 12,422 |
def mu_ref_normal_sampler_tridiag(loc=0.0, scale=1.0, beta=2, size=10,
random_state=None):
"""Implementation of the tridiagonal model to sample from
.. math::
\\Delta(x_{1}, \\dots, x_{N})^{\\beta}
\\prod_{n=1}^{N} \\exp(-\\frac{(x_i-\\mu)^2}{2\\sigma^2} ) dx_... | 75e7d46ec4816bbfa46443537f66cd27043b212d | 12,423 |
def get_pokemon(name:str) -> dict:
"""
Busca el pokémon dado su nombre en la base de datos y crea un diccionario con su información básica.
Paramétros:
name(str): Nombre del pokémon a buscar
Retorna:
Diccionario con la información básica del pokémon y sus evoluciones.
"""
try:
... | fa19704b2dfb6d2223a73264df6b5dc9e866fb8e | 12,425 |
def create_cluster(module, switch, name, node1, node2):
"""
Method to create a cluster between two switches.
:param module: The Ansible module to fetch input parameters.
:param switch: Name of the local switch.
:param name: The name of the cluster to create.
:param node1: First node of the clust... | a7f0a415d019b7fa3622d18da396879df566b365 | 12,426 |
from typing import List
import random
def random_terminals_for_primitive(
primitive_set: dict, primitive: Primitive
) -> List[Terminal]:
""" Return a list with a random Terminal for each required input to Primitive. """
return [random.choice(primitive_set[term_type]) for term_type in primitive.input] | b3160800bb5da87c0215ed4857f2596934d28c05 | 12,427 |
def where_from_pos(text, pos):
"""
Format a textual representation of the given position in the text.
"""
return "%d:%d" % (line_from_pos(text, pos), col_from_pos(text, pos)) | 587387f017fe32b297c06123fc3853c18a7aea46 | 12,429 |
def generateHuffmanCodes (huffsize):
""" Calculate the huffman code of each length. """
huffcode = []
k = 0
code = 0
# Magic
for i in range (len (huffsize)):
si = huffsize[i]
for k in range (si):
huffcode.append ((i + 1, code))
code += 1
code <<=... | 60d5a2bd5524627dd5cc624dbb6b0ea09b8032d4 | 12,430 |
def one_hot_df(df, cat_col_list):
"""
Make one hot encoding on categoric columns.
Returns a dataframe for the categoric columns provided.
-------------------------
inputs
- df: original input DataFrame
- cat_col_list: list of categorical columns to encode.
outputs
... | d47978a551edbc11f93f9a2e87dbe1598e39161b | 12,431 |
from typing import List
from typing import Optional
import select
from typing import Dict
async def load_users_by_id(user_ids: List[int]) -> List[Optional[User]]:
"""
Batch-loads users by their IDs.
"""
query = select(User).filter(User.id.in_(user_ids))
async with get_session() as session:
... | ac9d0a16a40d478ed7fec590bf591aa0124270d9 | 12,432 |
def create_timetravel_model(for_model):
"""
Returns the newly created timetravel model class for the
model given.
"""
if for_model._meta.proxy:
_tt_model = for_model._meta.concrete_model._tt_model
for_model._tt_model = _tt_model
for_model._meta._tt_model = _tt_model
r... | 6a2557f3737ce014e14ba9dd36cd7a6d9c8c78b7 | 12,433 |
def public_route_server_has_read(server_id, user_id=None):
"""
check if current user has read access to the given server
"""
user = user_id and User.query.get_or_404(user_id) or current_user
server = DockerServer.query.get_or_404(server_id)
if server.has_group_read(user):
return Respons... | b9f812feac7c7e951f8c37178fd1dc2913601631 | 12,434 |
def isValidPublicAddress(address: str) -> bool:
"""Check if address is a valid NEO address"""
valid = False
if len(address) == 34 and address[0] == 'A':
try:
base58.b58decode_check(address.encode())
valid = True
except ValueError:
# checksum mismatch
... | a99f08c289f9d3136adf7e17697645131e785ecb | 12,435 |
def cost_to_go_np(cost_seq, gamma_seq):
"""
Calculate (discounted) cost to go for given cost sequence
"""
# if np.any(gamma_seq == 0):
# return cost_seq
cost_seq = gamma_seq * cost_seq # discounted reward sequence
cost_seq = np.cumsum(cost_seq[:, ::-1], axis=-1)[:, ::-1] # cost to ... | bea4de4cb32c3a346ebe8ea532c2c94589893e65 | 12,436 |
import re
def parse_args():
"""
Parses the command line arguments.
"""
# Override epilog formatting
OptionParser.format_epilog = lambda self, formatter: self.epilog
parser = OptionParser(usage="usage: %prog -f secret.txt | --file secret.txt | --folder allmysecrets", epilog=EXAMPLES)
parser.add_option("-p", "-... | 8f696bb8b419269766bceadb42b36f2a3e052e5b | 12,437 |
def x_to_ggsg(seq):
"""replace Xs with a Serine-Glycine linker (GGSG pattern)
seq and return value are strings
"""
if "X" not in seq:
return seq
replacement = []
ggsg = _ggsg_generator()
for aa in seq:
if aa != "X":
replacement.append(aa)
# restart li... | 53885ca76484f25a04ffc4220af0d7b0e56defd4 | 12,438 |
from typing import OrderedDict
def gini_pairwise(idadf, target=None, features=None, ignore_indexer=True):
"""
Compute the conditional gini coefficients between a set of features and a
set of target in an IdaDataFrame.
Parameters
----------
idadf : IdaDataFrame
target : str or l... | aa886c8d44e54597e86f0736ea383671bda2e13f | 12,439 |
def init_isolated_80():
"""
Real Name: b'init Isolated 80'
Original Eqn: b'0'
Units: b'person'
Limits: (None, None)
Type: constant
b''
"""
return 0 | 5511cac38bf9bd68446fcb1dc41ac96807ea57a2 | 12,440 |
def xcom_api_setup():
"""Instantiate api"""
return XComApi(API_CLIENT) | 1a47066f389ab2846f1aa31ce8338389def07e6d | 12,441 |
def zeros_tensor(*args, **kwargs):
"""Construct a tensor of a given shape with every entry equal to zero."""
labels = kwargs.pop("labels", [])
dtype = kwargs.pop("dtype", np.float)
base_label = kwargs.pop("base_label", "i")
return Tensor(np.zeros(*args, dtype=dtype), labels=labels,
... | 3baba23ba763afb51c715a85aa6f84c8c2d99c43 | 12,442 |
from typing import Tuple
def reg_split_from(
splitted_mappings: np.ndarray,
splitted_sizes: np.ndarray,
splitted_weights: np.ndarray,
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
"""
When creating the regularization matrix of a source pixelization, this function assumes each source pixel has be... | 545f0bd7345a8ab908d2338eaa7cb4c3562f4234 | 12,443 |
def get_initiator_IP(json_isessions):
"""
pull the IP from the host session
"""
print("-" * 20 + " get_initiator started")
for session in json_isessions['sessions']:
session_array[session['initiatorIP']] = session['initiatorName']
return session_array | 4140b9f32727d1e5e1e98fd6714e8d91276b2272 | 12,444 |
def get_data_for_recent_jobs(recency_msec=DEFAULT_RECENCY_MSEC):
"""Get a list containing data about recent jobs.
This list is arranged in descending order based on the time the job
was enqueued. At most NUM_JOBS_IN_DASHBOARD_LIMIT job descriptions are
returned.
Args:
- recency_secs: the thres... | 032f27b55c70947a44cd6ed244291118e3660f77 | 12,445 |
def construct_outgoing_multicast_answers(answers: _AnswerWithAdditionalsType) -> DNSOutgoing:
"""Add answers and additionals to a DNSOutgoing."""
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, multicast=True)
_add_answers_additionals(out, answers)
return out | 65f0a2a42f9d3f1bd8fbc74e8303248adf01e65d | 12,446 |
import struct
def load_analog_binary_v1(filename):
"""Load analog traces stored in the binary format by Logic 1.2.0+
The format is documented at
https://support.saleae.com/faq/technical-faq/data-export-format-analog-binary
Returns (data, period) where data is a numpy array of 32-bit floats
of sh... | 5fcb97c4da367a8abeb12d7dc2852dbb7412956d | 12,447 |
import click
def setup_phantomjs():
"""Create and return a PhantomJS browser object."""
try:
# Setup capabilities for the PhantomJS browser
phantomjs_capabilities = DesiredCapabilities.PHANTOMJS
# Some basic creds to use against an HTTP Basic Auth prompt
phantomjs_capabilities[... | 5a8e536850e2a3c39adaf3228fc1a1f7ad4694dd | 12,448 |
def normal_pdf(x, mu, cov, log=True):
"""
Calculate the probability density of Gaussian (Normal) distribution.
Parameters
----------
x : float, 1-D array_like (K, ), or 2-D array_like (K, N)
The variable for calculating the probability density.
mu : float or 1-D array_like, (K, )
... | 4cdb573e1283a5740cb8d5b518b69c02bc013fe6 | 12,449 |
import sqlite3
from datetime import datetime
def get_quiz(id, user):
"""Get Quiz"""
conn = sqlite3.connect(DBNAME)
cursor = conn.cursor()
if user == 'admin' or user == 'fabioja':
cursor.execute(
"SELECT id, release, expire, problem, tests, results, diagnosis, numb from QUIZ where i... | 7e517e2ca84ebd320883950d4c3d6e572f82c226 | 12,450 |
def filesystem_entry(filesystem):
"""
Filesystem tag {% filesystem_entry filesystem %} is used to display a single
filesystem.
Arguments
---------
filesystem: filesystem object
Returns
-------
A context which maps the filesystem object to filesystem.
"""
return {'fi... | 3afbd0b8ee9e72ab8841ca5c5517396650d2a898 | 12,451 |
def haversine(lat1, lon1, lat2, lon2, units='miles'):
"""
Calculates arc length distance between two lat_lon points (must be in radians)
lat2 & and lon2 can be numpy arrays
units can be 'miles' or 'km' (kilometers)
"""
earth_radius = {'miles': 3959., 'km': 6371.}
a = np.square(np.s... | cadfa496f39e0a02115140d827bebfa6ff96a2dd | 12,452 |
from typing import Optional
def OptionalDateField(description='',validators=[]):
""" A custom field that makes the DateField optional """
validators.append(Optional())
field = DateField(description,validators)
return field | 66695ca94ff7d7283ff5508b4ef3f78efba9a988 | 12,453 |
def init_brats_metrics():
"""Initialize dict for BraTS Dice metrics"""
metrics = {}
metrics['ET'] = {'labels': [3]}
metrics['TC'] = {'labels': [1, 3]}
metrics['WT'] = {'labels': [1, 2, 3]}
for _, value in metrics.items():
value.update({'tp':0, 'tot':0})
return metrics | 755dc706f7090d78dac18a989745041b8617a9d6 | 12,454 |
def add_rse(rse, issuer, vo='def', deterministic=True, volatile=False, city=None, region_code=None,
country_name=None, continent=None, time_zone=None, ISP=None,
staging_area=False, rse_type=None, latitude=None, longitude=None, ASN=None,
availability=None):
"""
Creates a new R... | 3b41e227ea64c5f03d80ae8734c29b24f9c3bed9 | 12,455 |
from typing import Dict
from typing import Tuple
from typing import List
def multi_graph_partition(costs: Dict, probs: Dict, p_t: np.ndarray,
idx2nodes: Dict, ot_hyperpara: Dict,
weights: Dict = None,
predefine_barycenter: bool = False) -> ... | a3743cd9cc9e7f9a10eb84992fb74e7fe57f5792 | 12,456 |
def TDataStd_BooleanArray_Set(*args):
"""
* Finds or creates an attribute with the array.
:param label:
:type label: TDF_Label &
:param lower:
:type lower: int
:param upper:
:type upper: int
:rtype: Handle_TDataStd_BooleanArray
"""
return _TDataStd.TDataStd_BooleanArray_Set(*ar... | c458a1182474432d2df049ae3126a6b6b2b49a8e | 12,457 |
def py_list_to_tcl_list(py_list):
""" Convert Python list to Tcl list using Tcl interpreter.
:param py_list: Python list.
:type py_list: list
:return: string representing the Tcl string equivalent to the Python list.
"""
py_list_str = [str(s) for s in py_list]
return tcl_str(tcl_interp_g.e... | 7f42044b8a0b28089abf453e7a1b65d5cb1fb399 | 12,458 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.