content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def analyze(model, Y, print_to_console=True):
"""
Perform variance-based sensitivty analysis for each process.
Parameters
----------
model : object
The model defined in the sammpy
Y : numpy.array
A NumPy array containing the model outputs
print_to_console : bool
Prin... | 119c00becb1c3b507e35cbcecd98762fcb924521 | 11,767 |
import copy
def GetMorganFingerprint(mol, atomId=-1, radius=2, fpType='bv', nBits=2048, useFeatures=False,
**kwargs):
"""
Calculates the Morgan fingerprint with the environments of atomId removed.
Parameters:
mol -- the molecule of interest
radius -- the maximum radius
... | 9fd8077c4f35c83e8996a53981f99baa0e4510a6 | 11,768 |
import math
def _rgb2lab(rgb):
"""Convert an RGB integer to Lab tuple"""
def xyzHelper(value):
"""Helper function for XYZ colourspace conversion"""
c = value / 255
if c > 0.0445:
c = (c + 0.055) / 1.055
c = math.pow(c, 2.4)
else:
c /= 12.92
... | a663370e3908daa9ba795bb0dc2ecb945653221e | 11,769 |
import torch
def biband_mask(n: int, kernel_size: int, device: torch.device, v=-1e9):
"""compute mask for local attention with kernel size.
Args:
n (torch.Tensor): the input length.
kernel_size (int): The local attention kernel size.
device (torch.device): transformer mask to the devi... | ab3a5f25f9fe0f83579d0492caa2913a13daa2d7 | 11,771 |
def containsIfElse(node):
""" Checks whether the given node contains another if-else-statement """
if node.type == "if" and hasattr(node, "elsePart"):
return True
for child in node:
if child is None:
pass
# Blocks reset this if-else problem so we ignore them
# ... | 255f58fdf4abe69f10e9b433562ade12cb0bc215 | 11,772 |
def get_gitlab_scripts(data):
"""GitLab is nice, as far as I can tell its files have a
flat hierarchy with many small job entities"""
def flatten_nested_string_lists(data):
"""helper function"""
if isinstance(data, str):
return data
elif isinstance(data, list):
... | ad73c1ea6d4edcbce51eea18de317d7ab2d5e536 | 11,774 |
import new
def method(cls):
"""Adds the function as a method to the given class."""
def _wrap(f):
cls.__dict__[f.func_name] = new.instancemethod(f,None,cls)
return None
return _wrap | 0f746420bf9870dec5d8a5e69bcec414530fc1cb | 11,775 |
def maps_from_echse(conf):
"""Produces time series of rainfall maps from ECHSE input data and catchment shapefiles.
"""
# Read sub-catchment rainfall from file
fromfile = np.loadtxt(conf["f_data"], dtype="string", delimiter="\t")
if len(fromfile)==2:
rowix = 1
elif len(fromfile)>2:
... | 31e09c5bed2f7fe3e0d750a59137c05ef987dc2e | 11,776 |
def utility_assn(tfr_dfs):
"""Harvest a Utility-Date-State Association Table."""
# These aren't really "data" tables, and should not be searched for associations
non_data_dfs = [
"balancing_authority_eia861",
"service_territory_eia861",
]
# The dataframes from which to compile BA-Uti... | 6b0357f1d7024bcfddac6981d968e67e5dbeba51 | 11,777 |
def is_smtp_enabled(backend=None):
"""
Check if the current backend is SMTP based.
"""
if backend is None:
backend = get_mail_backend()
return backend not in settings.SENTRY_SMTP_DISABLED_BACKENDS | 988d2173923dc53cd3179cf0866c702ab9fe69d4 | 11,778 |
import requests
def get_presentation_requests_received(tenant: str, state: str = ''):
"""
state: must be in ['propsal-sent', 'proposal-received', 'request-sent', 'request-received', 'presentation-sent', 'presentation-received', 'done', 'abondoned']
"""
possible_states = ['', 'propsal-sent', 'proposal-... | 1157712b8e4df1b269892a2d3ec15dae366d8d71 | 11,779 |
def generate_round():
"""
Генерируем раунд.
Returns:
question: Вопрос пользователю
result: Правильный ответ на вопрос
"""
total_num, random_num = generate_numbers()
question = " ".join(total_num)
answer = str(random_num)
return question, answer | d4b535016e6ca6c6d673c1a6a2ee2c20eca87bc1 | 11,780 |
from datetime import datetime
def get_basic_activity():
"""
A basic set of activity records for a 'Cohort 1' and CoreParticipant participant.
"""
return [
{'timestamp': datetime(2018, 3, 6, 0, 0), 'group': 'Profile', 'group_id': 1,
'event': p_event.EHRFirstReceived},
{'timesta... | 4ee13cf35326d6c09fb4174f0e4217b17a34a545 | 11,781 |
def bad_multi_examples_per_input_estimator_misaligned_input_refs(
export_path, eval_export_path):
"""Like the above (good) estimator, but the input_refs is misaligned."""
estimator = tf.estimator.Estimator(model_fn=_model_fn)
estimator.train(input_fn=_train_input_fn, steps=1)
return util.export_model_and_e... | c08fac8d0ae8679db56128dc8d4a36a5492a6737 | 11,782 |
def caption_example(image):
"""Convert image caption data into an Example proto.
Args:
image: A ImageMetadata instance.
Returns:
example: An Example proto with serialized tensor data.
"""
# Collect image object information from metadata.
image_features, positions = read_object(image.objects, image... | f989774a0d3321717cbb09f6342a6c86f5433c54 | 11,785 |
def GetAttributeTableByFid(fileshp, layername=0, fid=0):
"""
GetAttributeTableByFid
"""
res = {}
dataset = ogr.OpenShared(fileshp)
if dataset:
layer = dataset.GetLayer(layername)
feature = layer.GetFeature(fid)
geom = feature.GetGeometryRef()
res["geometry"] = geo... | 42b845ae5b1a3c9976262cc37f5854b80aa7b290 | 11,786 |
def get_root_folder_id(db, tree_identifier, linked_to, link_id):
"""Get id of the root folder for given data category and profile or user group
Args:
db (object): The db object
tree_identifier (str): The identifier of the tree
linked_to (str): ['profile'|'group']
link_id (int): ... | 7378ec4852d90913282109dcce5d8168613c835e | 11,787 |
def str_cell(cell):
"""Get a nice string of given Cell statistics."""
result = f"-----Cell ({cell.x}, {cell.y})-----\n"
result += f"sugar: {cell.sugar}\n"
result += f"max sugar: {cell.capacity}\n"
result += f"height/level: {cell.level}\n"
result += f"Occupied by Agent {cell.agent.id if cell.agen... | d62801290321d5d2b8404dbe6243f2f0ae03ecef | 11,788 |
def get_idx_pair(mu):
"""get perturbation position"""
idx = np.where(mu != 0)[0]
idx = [idx[0], idx[-1]]
return idx | eed8b77f3f21af93c28c84d6f325dd2161740e6f | 11,789 |
def zeeman_transitions(ju, jl, type):
""" Find possible mu and ml for valid ju and jl for a given transistion
polarization
Parameters:
ju (scalar): Upper level J
jl (scalar): Lower level J
type (string): "Pi", "S+", or "S-" for relevant polarization type
Returns:
tu... | 446d9683da6cc027003b2ec755d8828ccb01db5d | 11,790 |
def get_reachable_nodes(node):
"""
returns a list with all the nodes from the tree with root *node*
"""
ret = []
stack = [node]
while len(stack) > 0:
cur = stack.pop()
ret.append(cur)
for c in cur.get_children():
stack.append(c)
return ret | c9ffaca113a5f85484433f214015bf93eea602d1 | 11,791 |
def imagetransformer_sep_channels_16l_16h_imgnet_lrg_loc():
"""separate rgb embeddings."""
hparams = imagetransformer_sep_channels_12l_16h_imagenet_large()
hparams.num_hidden_layers = 16
hparams.local_attention = True
hparams.batch_size = 1
hparams.block_length = 256
return hparams | 1d428cae33a6a34a7844171c72c7821a44fc3e97 | 11,792 |
def torsion_coordinate_names(zma):
""" z-matrix torsional coordinate names
(currently assumes torsional coordinates generated through x2z)
"""
name_dct = standard_names(zma)
inv_name_dct = dict(map(reversed, name_dct.items()))
geo = automol.geom.without_dummy_atoms(geometry(zma))
tors_names... | a7820e1619d4a73260ec4d9255b78cdec2263a55 | 11,793 |
from typing import List
from typing import Dict
def extract_other_creditors_d(
page: pdfplumber.pdf.Page, markers: List[Dict], creditors: List
) -> None:
"""Crop and extract address, key and acct # from the PDf
:param page: PDF page
:param markers: The top and bottom markers
:return: Address, key... | cb66185c68c7ab3febeee611e4384b839b42417e | 11,794 |
from typing import List
from pathlib import Path
def get_dicdirs(mecab_config: str = "mecab-config") -> List[Path]:
"""Get MeCab dictionary directories.
Parameters
----------
mecab_config : str
Executable path of mecab-config, by default "mecab-config".
Returns
-------
List[Path]... | 26d7969c072a9aa0668db31c296ee930b567049f | 11,795 |
def new_instance(settings):
"""
MAKE A PYTHON INSTANCE
`settings` HAS ALL THE `kwargs`, PLUS `class` ATTRIBUTE TO INDICATE THE CLASS TO CREATE
"""
settings = set_default({}, settings)
if not settings["class"]:
Log.error("Expecting 'class' attribute with fully qualified class name")
... | bf32bd41105052816a9a54efb71143f2a250502f | 11,796 |
def get_type(k):
"""Takes a dict. Returns undefined if not keyed, otherwise returns the key type."""
try:
v = {
'score': '#text',
'applicant': 'str',
'applicant_sort': 'str',
'author': 'str',
'author_sort': 'str',
'brief': 'boo... | fec3b7e04531dd202c46366f096f687160c68320 | 11,798 |
def al(p):
"""
Given a quaternion p, return the 4x4 matrix A_L(p)
which when multiplied with a column vector q gives
the quaternion product pq.
Parameters
----------
p : numpy.ndarray
4 elements, represents quaternion
Returns
-------
numpy.ndarray
4x4 matrix des... | 1e4803bffd75fb841b723d504261c51019d5d45e | 11,799 |
from datetime import datetime
def build_data_table(row, fields_to_try):
"""
Create HTML table for one row of data
If no fields are valid, returns empty string
"""
th_class = 'attribute_heading'
td_class = 'attribute_value'
field_names = pd.read_csv('data/fiel... | 812a7acbe33296fc30aef4b27e427c63c6fc63bb | 11,800 |
def time_entry_reader(date, configuration):
"""Read the entries and return a list of entries that are apart of the date provided."""
parser = YAML(typ='rt')
date = date.date()
try:
with open(configuration['filename'], 'r') as data_file:
time_entries = parser.load(data_file).get('rec... | 5e01246d3fae1d8eaf53cbf1dec40f488ddfd0d4 | 11,801 |
import asyncio
async def test_send_write(event_loop):
"""Check feed-receive scenarios used in the library."""
STREAM_ID = 'whatever'
DATA = b'data'
def make_writer():
queue = asyncio.Queue()
async def writer(id, data):
assert id == STREAM_ID
await queue.put(da... | 669d25646aecd891547c3cbc40f7215e1c32c08b | 11,802 |
def width_series(value_series, outer_average_width=5, max_value=None, method='linear'):
"""
:param value_series: the pd.Series that contain the values
:param outer_average_width: the average width of the width series to return
:param max_value: value to use as the maximum when normalizing the series (to... | 0efefbe0d1e7024293e0f6a8a39b7fca2f5cf41b | 11,803 |
def unroll_upper_triangular(matrix):
"""Converts square matrix to vector by unrolling upper triangle."""
rows, cols = matrix.shape
assert rows == cols, "Not a square matrix."
row_idx, col_idx = np.triu_indices(rows, 1)
unrolled = []
for i, j in zip(row_idx, col_idx):
unrolled.append(mat... | b62725a178d569e2812ad48c826b8a7a864c04b6 | 11,804 |
from typing import Sequence
from typing import Any
from typing import Optional
from typing import Tuple
def fill_tuples(
tuples: Sequence[Any],
length: Optional[int] = None,
repeat: bool = False,
fill_method: str = 'bfill',
) -> Sequence[Tuple]:
"""Fill tuples so they are all the same length.
... | 80766f17b78a3fba0dc49ef95131564ce7b1e563 | 11,805 |
def intersect(connection, items, ttl=30, execute=True):
"""并集计算"""
return _set_common(connection, 'sinterstore', items, ttl, execute) | 154480043f2b7634913839ea6ed1425ecc8cc312 | 11,806 |
def batch_decode(raw_logits, use_random, decode_times):
"""
tbd
"""
size = (raw_logits.shape[1] + 7) // 8
logit_lists = []
for i in range(0, raw_logits.shape[1], size):
if i + size < raw_logits.shape[1]:
logit_lists.append(raw_logits[:, i: i + size, :])
else:
... | fcde630681d4455e717b7b3b19b098b72fb8a64c | 11,807 |
import pathlib
import pkg_resources
import yaml
def _load_schemata(obj_type: str) -> dict:
"""Load the schemata from the package, returning merged results of
other schema files if referenced in the file loaded.
:raises: FileNotFoundError
"""
schema_path = pathlib.Path(pkg_resources.resource_file... | a737420b85bd78cf2210c8d12794eaaa4eb4ee90 | 11,808 |
def waitfor(msg, status = '', spinner = None, log_level = log_levels.INFO):
"""waitfor(msg, status = '', spinner = None) -> waiter
Starts a new progress indicator which includes a spinner
if :data:`pwnlib.term.term_mode` is enabled. By default it
outputs to loglevel :data:`pwnlib.log_levels.INFO`.
... | 6dc229cff86ecdbdccbda71239eafdc878c4520e | 11,809 |
def f(i):
"""Add 2 to a value
Args:
i ([int]): integer value
Returns:
[int]: integer value
"""
return i + 2 | 72b5d99f3b2132054805ab56872cf2199b425b20 | 11,810 |
def show_forecast(cmp_df, num_predictions, num_values, title):
"""Visualize the forecast."""
def create_go(name, column, num, **kwargs):
points = cmp_df.tail(num)
args = dict(name=name, x=points.index, y=points[column], mode='lines')
args.update(kwargs)
return go.Scatter(**a... | 669300c4c57890d76153fe1a419037eada2fcbe6 | 11,811 |
def get_concepts_from_kmeans(tfidf, kmeans):
"""Get kmeans cluster centers in term space.
Parameters
----------
tfidf : TfidfVectorizer
Fitted vectorizer with learned term vocabulary.
kmeans : KMeans
KMeans fitted to document-term matrix returned by tfidf.
Returns
-... | 69734b194c5d71c8e93347845f83264c832820d6 | 11,813 |
def streams_to_dataframe(streams, imcs=None, imts=None, event=None):
"""Extract peak ground motions from list of processed StationStream objects.
Note: The PGM columns underneath each channel will be variable
depending on the units of the Stream being passed in (velocity
sensors can only generate PGV) ... | 8be5968ee513da80910df227156e2ceb02624941 | 11,814 |
def timestamp_diff(time_point_unit: TimePointUnit, time_point1, time_point2) -> Expression:
"""
Returns the (signed) number of :class:`~pyflink.table.expression.TimePointUnit` between
time_point1 and time_point2.
For example,
`timestamp_diff(TimePointUnit.DAY, lit("2016-06-15").to_date, lit("2016-0... | 711c41adf3472b2dd0ada51160aefca432ed2bc6 | 11,815 |
import tqdm
def plot_solar_twins_results(star_postfix=''):
"""Plot results for 17 pairs with q-coefficients for solar twins"""
def format_pair_label(pair_label):
"""Format a pair label for printing with MNRAS ion format.
Parameters
----------
pair_label : str
A p... | c4bd9120891d435dd631394b7b67255bd75fc8d2 | 11,816 |
def equalize(pil_img: Image.Image, level: float):
"""Equalize an image.
.. seealso:: :func:`PIL.ImageOps.equalize`.
Args:
pil_img (Image.Image): The image.
level (float): The intensity.
"""
del level # unused
return ImageOps.equalize(pil_img) | f0771453063b803824571056924397e1f7bb77a3 | 11,817 |
def lightfm_trainer(
train: np.ndarray, loss: str, n_components: int, lam: float
) -> None:
"""Train lightfm models."""
# detect and init the TPU
tpu = tf.distribute.cluster_resolver.TPUClusterResolver.connect()
# instantiate a distribution strategy
tpu_strategy = tf.distribute.experimental.TPU... | 6776b2fbcb3039cc1efb95cec0b7562b959df0fd | 11,818 |
def get_wspd_ts(path, storm, res, shpmask):
"""
Extracts the U and V component and returns the wind speed timeseries of storm_dict
Arguments:
path (str): Path containing data to load
storm (str): Name of storm
res (str): Resolution of data
Returns:
Pandas dataframe with... | 414d39105a14a5ac2d335a3146a4e6d462f9760a | 11,819 |
def _read_output_file(path):
"""Read Stan csv file to ndarray."""
comments = []
data = []
columns = None
with open(path, "rb") as f_obj:
# read header
for line in f_obj:
if line.startswith(b"#"):
comments.append(line.strip().decode("utf-8"))
... | 62b312db851386900cae1643d3eb75896f45cde1 | 11,820 |
def _transform_data(raw_df, cols_config):
"""
Applies required transformations to the raw dataframe
:returns : Trasformed dataframe ready to be exported/loaded
"""
# Perform column and dtype checks
if check_columns(raw_df, cols_config):
df = raw_df
else:
logger.warning("Inco... | 3874ce8bc38d0b75f037538919b1c649d8a6b8b9 | 11,821 |
import math
def mean_and_std(values):
"""Compute mean standard deviation"""
size = len(values)
mean = sum(values)/size
s = 0.0
for v in values:
s += (v - mean)**2
std = math.sqrt((1.0/(size-1)) * s)
return mean, std | 15b11e89317cc86b68262fa959b9c65a2f87bdcc | 11,823 |
from typing import Union
from typing import List
def blacken(
color: Color, amount: FloatOrFloatIterable
) -> Union[Color, List[Color]]:
"""
Return a color or colors amount fraction or fractions of the way from
`color` to `black`.
:param color: The existing color.
:param amount: The propo... | 26c74556b8d73692ec4afbb763221c508c6a941b | 11,824 |
def llf_gradient_sigma_neq_gamma(history, sum_less_equal=True):
"""
Calculate the gradient of the log-likelihood function symbolically.
Parameters
----------
sum_less_equal : bool, default: True
This arg is passed to :meth:`self.llf_sigma_eq_gamma`.
Returns
-------
gradient : s... | b3efce2413b5f88e4c7b76117f4f668a5f386b30 | 11,825 |
def selection_support_df(df, combinations, min_support):
"""
selection combinations with support
Parameters
----------
df : pandas.DataFrame
data to be selected.
for example :
= | banana | mango | apple |
| 1 | 1 | 1 |
... | 4decb66dfe913a62e0b3b67d9a61a6941ec6ff76 | 11,826 |
from trie import TrieTree
def empty_trie_tree():
"""Empty trie tree fixture."""
return TrieTree() | d68ae38a810e02015b3967eb44bb3dda8445afd7 | 11,827 |
def bind_context_to_node(context, node):
"""Give a context a boundnode
to retrieve the correct function name or attribute value
with from further inference.
Do not use an existing context since the boundnode could then
be incorrectly propagated higher up in the call stack.
:param context: Cont... | 92ce7a9d155e621e54ad90f5aefb49bda4ea60df | 11,828 |
def get_weight_matrix(file_handle):
"""
Read each line in file_handle and return the weight matrix as a dict,
in which each key is the original node name, and each value is a nested
dict, whose keys are gene systematic names, and values are weights.
"""
weight_matrix = dict()
for line_num, ... | 08773c5ff852814855e4a042bb79acc82d09b067 | 11,829 |
def get_uptime():
"""
Get uptime
"""
try:
with open('/proc/uptime', 'r') as f:
uptime_seconds = float(f.readline().split()[0])
uptime_time = str(timedelta(seconds=uptime_seconds))
data = uptime_time.split('.', 1)[0]
except Exception as err:
data =... | fc783a24b7239c43b69c44ea30b62465a775761d | 11,830 |
import json
def measure_list_for_upcoming_elections_retrieve_api_view(request): # measureListForUpcomingElectionsRetrieve
"""
Ask for all measures for the elections in google_civic_election_id_list
:param request:
:return:
"""
status = ""
google_civic_election_id_list = request.GET.getlis... | 449bcde309c6c224521fcf5f0acc6de427d30f55 | 11,831 |
from fedelemflowlist.globals import flow_list_fields
def get_required_flowlist_fields():
"""
Gets required field names for Flow List.
:return:list of required fields
"""
required_fields = []
for k, v in flow_list_fields.items():
if v[1]['required']:
required_fields.append(... | c2581cde45e9aad0c09620f98557a777a5d89bdb | 11,833 |
def sparse_table_function(*, index, data) -> callable:
"""
The very simplest Python-ish "sparse matrix", and plenty fast on modern hardware, for the
size of tables this module will probably ever see, is an ordinary Python dictionary from
<row,column> tuples to significant table entries. There are better ways if you... | a1c3f11f5fd9c2ba4d048a69271db48bc61b26df | 11,834 |
import asyncio
async def _get_db_connection() -> asyncpg.Connection:
"""
Initialise database connection.
On failure, retry multiple times. When the DB starts in parallel with the app (with Compose),
it may not yet be ready to take connections.
"""
log.info("Creating DB connection")
n_atte... | 9f2e83b4b98f0d292b352682bf380ff4921e5fba | 11,835 |
def get_view_content(view):
""" Returns view content as string. """
return utils.execute_in_sublime_main_thread(lambda: view.substr(sublime.Region(0, view.size()))) | 4dd8d4c9dfa891b31251f32ad6813549c0c453b0 | 11,836 |
def signed_byte8(x: IntVar) -> Int8:
"""Implementation for `SBYTE8`."""
return signed_byte_n(x, 8) | d4e16c80336a0259b2acb4faf1ff329d90aa21b2 | 11,837 |
import numpy
import itertools
def join(zma1, zma2, join_key_mat, join_name_mat, join_val_dct):
""" join two z-matrices together
"""
syms1 = symbols(zma1)
syms2 = symbols(zma2)
natms1 = count(zma1)
natms2 = count(zma2)
key_mat1 = numpy.array(key_matrix(zma1))
key_mat2 = numpy.array(key_... | de55377d436ce50d8c60c97992e940e53a7c9ecc | 11,838 |
def multi_to_weighted(G: nx.MultiDiGraph):
"""
Converts a multidigraph into a weighted digraph.
"""
nG = nx.DiGraph(G)
# nG.add_nodes_from(G.nodes)
nG.name = G.name + "_weighted_nomulti"
edge_weights = {(u, v): 0 for u, v, k in G.edges}
for u, v, key in G.edges:
edge_weights[(u, ... | 0dd14a02c923c8c238c82399f51701639dc82756 | 11,839 |
def RetentionInDaysMatch(days):
"""Test whether the string matches retention in days pattern.
Args:
days: string to match for retention specified in days format.
Returns:
Returns a match object if the string matches the retention in days
pattern. The match object will contain a 'number' group for th... | 0b2cded5d01bcb294df1fab956dbe54c9c5e03ae | 11,840 |
from typing import Tuple
import re
def _extract_result_details(pipx_output: str) -> Tuple[str, str, str]:
""" Extracts name and version from pipx's stdout """
match = re.search(r'installed package(.*),(.*)\n.*\n.*?-(.*)', pipx_output)
if match:
package, python_version, plugin_name = map(str.strip,... | ae7a588bbb60b47aa889a4dcb7421a55b55b8e2f | 11,841 |
def get_pwr_SXT(sxt_los, plasma, emiss, num_pts=100, labels=labels_full):
"""
"""
pwr_int = {}
for ll in labels:
# Get the appropriate database label
filt = ll.split()[1]
pix_los = sxt_los[ll]
# Get the spatial points along the line of sight
num_pixels = ... | ba1a1831f5fd2ee18ce95214b696d37c2e33b456 | 11,842 |
import requests
import json
def get_mstp_port(auth):
"""
Function to get list of mstp port status
:param auth: AOSSAuth class object returned by pyarubaoss.auth
:return list of mstp port status
:rtype dict
"""
url_mstp_port = "http://" + auth.ipaddr + "/rest/"+auth.version+"/mstp/port"
... | aa74f2d9c5b04f7744009c2862b0f1bcff57a6dc | 11,843 |
from typing import Union
from typing import Callable
def touch(v: Union[Callable, str], default=None):
"""
Touch a function or an expression `v`, see if it causes exception.
If not, output the result, otherwise, output `default`.
Note:
Use `default = pycamia.functions.identity_function`... | 90b5395eb68daadb06b1bb29a52a2ca11f34353d | 11,846 |
def q_mult(q1, q2):
"""Quaternion multiplication"""
w1, x1, y1, z1 = q1
w2, x2, y2, z2 = q2
w = w1 * w2 - x1 * x2 - y1 * y2 - z1 * z2
x = w1 * x2 + x1 * w2 + y1 * z2 - z1 * y2
y = w1 * y2 + y1 * w2 + z1 * x2 - x1 * z2
z = w1 * z2 + z1 * w2 + x1 * y2 - y1 * x2
return w, x, y, z | f2623836744b9143c5eeafe1b0d71e3cfdb5d8d4 | 11,847 |
import math
def round_repeats(repeats, global_params):
"""Calculate module's repeat number of a block based on depth multiplier.
Use depth_coefficient of global_params.
Args:
repeats (int): num_repeat to be calculated.
global_params (namedtuple): Global params of the model.
Returns:... | 9a26e19663c7ecf4b6f746b1900a9afe46311770 | 11,848 |
def estimate_label_width(labels):
"""
Given a list of labels, estimate the width in pixels
and return in a format accepted by CSS.
Necessarily an approximation, since the font is unknown
and is usually proportionally spaced.
"""
max_length = max([len(l) for l in labels])
return "{0}px".f... | 1e22ad939973373a669841dd5cc318d6927249ca | 11,849 |
import six
def ssh_encrypt_text(ssh_public_key, text):
"""Encrypt text with an ssh public key.
If text is a Unicode string, encode it to UTF-8.
"""
if isinstance(text, six.text_type):
text = text.encode('utf-8')
try:
pub_bytes = ssh_public_key.encode('utf-8')
pub_key = ser... | 2a5bfc62e08475dcd7f33ba25cf3fa76c43988a2 | 11,852 |
def naildown_entity(entity_class, entity_dict, entity, state, module, check_missing=None):
""" Ensure that a given entity has a certain state """
changed, changed_entity = False, entity
if state == 'present_with_defaults':
if entity is None:
changed, changed_entity = create_entity(entity... | 3c5b7e8b026d4ea8444625fa7a01b43567973138 | 11,854 |
def get_all_admins():
"""
Returns a queryset of all active admin users.
"""
current_admins = User.objects.filter(is_admin=True, is_active=True)
return current_admins | befba9efb62d7b1a46c0019776d1327251e9cf9d | 11,855 |
def htx_numpy(h, x):
""" Convolution of reversed h with each line of u. Numpy implementation.
Parameters
----------
h : array, shape (n_time_hrf), HRF
x : array, shape (n_samples, n_time), neural activity signals
Return
------
h_conv_x : array, shape (n_samples, n_time_valid), convolve... | 306608179eb52f4f70e0f03da75283404201a044 | 11,856 |
import torch
def get_mask_results(probs, boxes, im_w, im_h, pixil_score_th=0.25):
"""
Args:
probs (Tensor)
boxes (ImageContainer)
Returns:
rles (list[string])
mask_pixel_scores (Tensor)
"""
device = probs.device
N, _, H, W = probs.shape
num_chunks = N if d... | a3c9823f8578e63f7a39fe25791c1b0369640f26 | 11,857 |
from typing import Dict
from typing import Any
def openapi() -> Dict[str, Any]:
"""
>>> client = app.test_client()
>>> response = client.get("/openapi.json")
>>> response.get_json()['openapi']
'3.0.0'
>>> response.get_json()['info']['title']
'Chapter 13. Example 2'
"""
# See domino... | 1671022e42c6bd8cc75aa66c3259f5094fb05696 | 11,858 |
def koliko_izdelkov_v_skladiscu():
"""
Vrne stevilo razlicnih izdelkov v skladiscu.
>>> koliko_izdelkov_v_skladiscu()
18
"""
poizvedba = """
SELECT COUNT(*)
FROM izdelki
WHERE kolicina IS NOT null
"""
st, = conn.execute(poizvedba).fetchone()
return st | bb0143d8a7e4f404c88866331cfbdfd9c89d07f1 | 11,859 |
import math
import random
def create_spline(curve_data, s_type='NURBS', len_nodes=100, spline_id=0, splines_count=1, bud_position=None):
"""
Create a spline of given type with n nodes to form a path made of sin and cos
"""
spline = curve_data.splines.new(type=s_type)
# Regular spline points n... | cce26da44f9da60638b3e46b89cf87c49ad5c3d6 | 11,860 |
def get_composed_jumps(jumps, levels, win, verbose=0):
"""
Take the output of get_jumps (from landmarks)
Compose the jumps, return them as an array of array.
If intermediate=True, we return the jumps for intermediary levels,
not just the requested one.
We use a temporary sqlite3 connection to wo... | 3305d2efed23eed269b3483a9619e50ad39826de | 11,862 |
import itertools
def calculate_agreement_stv(agreement_dictionary, turker_accuracies):
"""
Inter agreement with most accurate chair vote
Args:
agreement_dictionary: holding sentence annotation records - 9 from non-experts and 1 expert
sentence -> list of annotations (size settings.RESPON... | 3253505366edffea1cc7c1302b082dbd85668ad2 | 11,863 |
def count_num_peps(filename):
"""
Count the number of peptide sequences in FASTA file.
"""
with open(filename) as f:
counter = 0
for line in f:
if line.startswith(">"):
counter += 1
return counter | c062a22cd925f29d8793ab364a74cf05cbae2a66 | 11,864 |
import re
def get_variables(examples):
"""Convert a code string to a list of variables.
We assume a variable is a 'word' with only alphanumeric characters in it."""
variables = [" ".join(re.split(r"\W+", text)) for text in examples["text"]]
return {"variables": variables} | 385a4fb3a73a432e6afa9aa69330f950246f48d0 | 11,865 |
def _stored_data_paths(wf, name, serializer):
"""Return list of paths created when storing data"""
metadata = wf.datafile(".{}.alfred-workflow".format(name))
datapath = wf.datafile(name + "." + serializer)
return [metadata, datapath] | 5f01d804db9f1848cc13e701a56e51c06dccdb31 | 11,866 |
def ascii_to_walls(char_matrix):
"""
A parser to build a gridworld from a text file.
Each grid has ONE start and goal location.
A reward of +1 is positioned at the goal location.
:param char_matrix: Matrix of characters.
:param p_success: Probability that the action is successful.
:param see... | 9f6520625623bd446923e374a1a5a557038dfd48 | 11,867 |
def mock_sd(nresp=1):
"""Fake Stackdriver Monitoring API response for the ListTimeSeries endpoint.
Args:
nresp (int): Number of responses to add to response.
Returns:
ChannelStub: Mocked gRPC channel stub.
"""
timeserie = load_fixture('time_series_proto.json')
response = {'next... | cbc5659c02a73048f0263803562a130ac475bcb2 | 11,868 |
def cohesion_separation(chroms, doc):
"""Measure balancing both cohesion and separation of clusters."""
coh = cohesion(chroms, doc)
sep = separation(chroms, doc)
return (1 + sigmoid(coh)) ** sep | c883ee67e978e51b56f4be84e7e0731368eeb5f1 | 11,869 |
import re
def get_number_location(
input : str,
):
# endregion get_number_location header
# region get_number_location docs
"""
get the string indices of all numbers that occur on the string
format example: [ ( 0, 1 ), ( 4, 6 ), ( 9, 9 ) ]
both begin and end are inclusive, in contrast with t... | de035f640dd33dc96b4072bdc925efc649285121 | 11,871 |
def update_object(obj, new_values):
"""update an object attributes from a supplied dictionary"""
# avoiding obj.__dict__.update(new_values) as it will set a new attribute if it doesn't exist
for k, v in new_values.items():
if hasattr(obj, k):
try:
setattr(obj, k, v)
... | 5e916b16301c6e733b2d98b32c175bb202529503 | 11,872 |
def subpathNeedsRefresh(modTimes, ufoPath, *subPath):
"""
Determine if a file needs to be refreshed.
Returns True if the file's latest modification time is different
from its previous modification time.
"""
previous = modTimes.get(subPath[-1])
if previous is None:
return True
lat... | 046c37ca801a74bb83bb45c1b1d0510e15cba6c4 | 11,873 |
def resxy_(x: float, y: float, /) -> Resolution:
"""Construct resolution from X,Y order."""
return Resolution(x=x, y=y) | 1cd2995142981de0932b8bc9452df71b18a46d8b | 11,874 |
def group_toggle_modules(request, group):
"""Enable or disable modules.
"""
if request.method != 'POST':
raise Http404
referer = request.META.get('HTTP_REFERER', None)
next = SITE_ROOT if referer is None else referer
username = request.user.username
group_wiki = request.POST.ge... | 4844d8203bd757802e38bff6ac20f45ade07f21d | 11,875 |
def bilinear_sampler(imgs, coords):
"""
Construct a new image by bilinear sampling from the input image.
Args:
imgs: source image to be sampled from [batch, height_s, width_s, channels]
coords: coordinates of source pixels to sample from [batch, height_t,
Returns:
A new sampled im... | 4138a515f0f4f25b569aae1c28d18de897c63a24 | 11,876 |
import types
def wrap_array(typingctx, data_ptr, shape_tup):
"""create an array from data_ptr with shape_tup as shape
"""
assert isinstance(data_ptr, types.CPointer), "invalid data pointer"
assert (isinstance(shape_tup, types.UniTuple)
and shape_tup.dtype == np.intp), "invalid shape tuple"... | 03fc3c995ae459e644d88baab2ca766ff528ba8d | 11,877 |
from geometric.internal import Angle, Dihedral, Distance, OutOfPlane
from geometric.internal import PrimitiveInternalCoordinates as GeometricPRIC
from geometric.internal import (
RotationA,
RotationB,
RotationC,
TranslationX,
TranslationY,
TranslationZ,
)
from geometr... | f04988255698e43e0febebbf0fa6b4d67625f86f | 11,880 |
def api_get_script(request):
"""POST - Frida Get Script."""
if not request.POST.getlist('scripts[]'):
return make_api_response(
{'error': 'Missing Parameters'}, 422)
resp = tests_frida.get_script(request, True)
if resp['status'] == 'ok':
return make_api_response(resp, 200)
... | f221543d648901c38620bd84d8c6d55a3c8545e0 | 11,881 |
import re
def is_valid_slug(slug):
"""Returns true iff slug is valid."""
VALID_SLUG_RE = re.compile(r"^[a-z0-9\-]+$")
return VALID_SLUG_RE.match(slug) | 439349f0689cd53fb2f7e89b2b48b90aa79dae80 | 11,882 |
import time
import torch
def KMeans_GPU(x, K=10, Niter=10, verbose=True):
"""Implements Lloyd's algorithm for the Euclidean metric."""
start = time.time()
N, D = x.shape # Number of samples, dimension of the ambient space
c = x[:K, :].clone() # Simplistic initialization for the centroids
x_i ... | 675632335520477cdcd283b5e12b46912c26b323 | 11,883 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.