content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def parse_log(log_file):
"""
Parses a log file into a list of lists containing the messages logged
:param log_file: path-like: Path to the log file
:return: list of lists containing messages in the log file
"""
parsed_logs = [[] for i in range(5)]
with open(log_file, 'r') as f:
for l... | 065618c66470a8c538cbe9346ba66949819672b9 | 6,862 |
def generate_experiment_fn(train_files,
eval_files,
num_epochs=None,
train_batch_size=40,
eval_batch_size=40,
embedding_size=8,
first_layer_size=100,
... | a7af08f955d1d93c1c9735b08b5e18b2fd9e405a | 6,863 |
def get_inclination_and_azimuth_from_locations(self, locations):
"""
self must to point to Main_InputWindow
"""
"""
Return "Inc" and "Azi" array objects in reference units.
"""
Inc = []
Azi = []
for MD in locations:
tangentVector = get_ASCT_from_MD(self, MD)
verticalVector = np.array([0.0,0.0,1.0,0.0])
i... | 2761137c670d3ad90c40d0689db062baf743d7a5 | 6,864 |
def _ensure_package(base, *parts):
"""Ensure that all the components of a module directory path exist, and
contain a file __init__.py."""
bits = []
for bit in parts[:-1]:
bits.append(bit)
base.ensure(*(bits + ['__init__.py']))
return base.ensure(*parts) | fc9bb95445cc1b0e8ec819dfafdaff7d5afbf372 | 6,865 |
def make_cat_matrix(n_rows: int, n_cats: int) -> tm.CategoricalMatrix:
"""Make categorical matrix for benchmarks."""
mat = tm.CategoricalMatrix(np.random.choice(np.arange(n_cats, dtype=int), n_rows))
return mat | 5c1f314a9582685d6c6da0f9ac0ee58fe9046952 | 6,866 |
def add_stabilizer_nodes(boundaries_raw, electrodes, nr_nodes_between):
"""
Segmentation of nodes:
we have the existing nodes
N.F is the ratio of required nodes and existing nodes
first, add N nodes to each segment
then, add one more node to the F first segments
* assume ord... | fe8ff9618ee34cb9caedd828a880af05a1c964f0 | 6,867 |
def read_data(creds):
"""Read court tracking data in and drop duplicate case numbers"""
# try:
df = gsheet.read_data(gsheet.open_sheet(gsheet.init_sheets(creds),"01_Community_lawyer_test_out_final","Frontend"))
# df.drop_duplicates("Case Number",inplace=True) #Do we want to drop duplicates???
retu... | 95bb588305c230c2f3aaa306e367da2602788f67 | 6,868 |
def _build_indie_lyrics(
root: str, num_workers: int = 8, max_size: int = 200000
) -> DocumentArray:
"""
Builds the indie lyrics dataset. Download the CSV files from:
https://www.kaggle.com/datasets/neisse/scrapped-lyrics-from-6-genres
:param root: the dataset root folder.
:param num_workers: th... | 9eaaf9742c587a649d036e5a7da30dc5ca37db79 | 6,869 |
def getHostname(request):
"""
Utility method for getting hostname of client.
"""
if request.getClientIP() in LOOPBACK_ADDRESSES and has_headers(request, X_FORWARDED_FOR):
# nginx typically returns ip addresses
addr = get_headers(request, X_FORWARDED_FOR)
if isIPAddress(addr):
... | 41ab9ed3a01d1e1bc53565115a8336a5eac741b3 | 6,870 |
def CollapseSolutionPosition(x,x0):
"""
Calculate a free-fall collapse solution
x - position to calculate time at in cm
x0 - initial position in cm
Sam Geen, March 2018
"""
X = x/x0
t = (np.arccos(np.sqrt(X)) + np.sqrt(X * (1.0-X))) * x0**1.5 / np.sqrt(2.0*units.G*gravity.centralmass)
... | 3d0aaeef997a688b72df38ea2188ea34d62c1d55 | 6,871 |
from scipy import signal
from nsdata import bfixpix
def scaleSpectralSky_cor(subframe, badpixelmask=None, maxshift=20, fitwidth=2, pord=1, nmed=3, dispaxis=0, spatial_index=None, refpix=None, tord=2):
"""
Use cross-correlation to subtract tilted sky backgrounds.
subframe : NumPy array
data subframe... | 50ee28ff81c4e981dca47e67ed525b7d9a421288 | 6,872 |
def login():
"""
Implements the login feature for the app.
Errors are shown if incorrect details are used. If the user tried
to access a page requiring login without being authenticated,
they are redirected there after sign in.
"""
if current_user.is_authenticated:
return redirect(u... | 43c60504648aa4e93e24150b1aceb98293a4064d | 6,873 |
def _get_plot_axes(grid):
"""Find which axes are being plotted.
Parameters
----------
grid : Grid
Returns
-------
tuple
"""
plot_axes = [0, 1, 2]
if np.unique(grid.nodes[:, 0]).size == 1:
plot_axes.remove(0)
if np.unique(grid.nodes[:, 1]).size == 1:
plot_ax... | 3112ba7d954c7b39bec035e31b5281919dc78244 | 6,874 |
def _read_uint(addr):
""" Read a uint """
value = gdb.parse_and_eval("*(unsigned int*)0x%x" % addr)
try:
if value is not None:
return _cast_uint(value)
except gdb.MemoryError:
pass
print("Can't read 0x%x to lookup KASLR uint value" % addr)
return None | abe969c2f8595fdf1efdc98157536131d7a8a5ca | 6,876 |
def line_at_infinity(n):
"""the line at infinity just contains the points at infinity"""
return points_at_infinity(n) | 8a787b4598e072c101f8babbe948c4996b121a9a | 6,877 |
def check_section(config:Namespace, name:str) -> Namespace:
"""Check that a section with the specified name is present."""
section = config._get(name)
if section is None:
raise ConfigurationError(f"Section {name} not found in configuration")
if not isinstance(section, Namespace):
raise ... | 09a315a77bd25a3a78b8e80592a32c8709aa511f | 6,878 |
import math
def ceil(a):
"""The ceil function.
Args:
a (Union[:class:`~taichi.lang.expr.Expr`, :class:`~taichi.lang.matrix.Matrix`]): A number or a matrix.
Returns:
The least integer greater than or equal to `a`.
"""
return _unary_operation(_ti_core.expr_ceil, math.ceil, a) | 456436d8d1104b4df16327665dd477139528f6fa | 6,879 |
from typing import Any
from typing import Optional
def Body(
default: Any = Undefined,
*,
default_factory: Optional[NoArgAnyCallable] = None,
alias: str = None,
title: str = None,
description: str = None,
const: bool = None,
gt: float = None,
ge: float = None,
lt: float = None,... | efc636d1b0e42736cecb04857afa67f636fd0bb6 | 6,880 |
def warp(img, pers_margin=425, margin_bottom=50, margin_top=450, margin_sides=150, reverse=False):
"""
This function warps an image. For the transformation a src polygon and a destination
polygon are used. The source polygon is calculated by the image shape and the margins
given. The destination polygon... | 06c4b08e43a3efcfaf3a44bd58727c6b0db833da | 6,881 |
def get_all_doorstations(hass):
"""Get all doorstations."""
return [
entry[DOOR_STATION]
for entry in hass.data[DOMAIN].values()
if DOOR_STATION in entry
] | a6e785e6c667b956ef41ad98681e38b142d99ef5 | 6,882 |
import requests
import json
def get_weather() -> dict:
"""Makes an api request for the weather api
country code queries the specific country
city name queries the specific city within that country
units determines the type of numerical data returned (centigrade or Fahrenheit)
:return: the respons... | 023253ec2466182515a345d2bca1f10adf7b67ab | 6,883 |
def _create_off_value():
"""create off value"""
return Tensor(0.0, mstype.float32) | 9cddddc27810fdfc4dbe3970aaa5c5a064f4345c | 6,884 |
from datetime import datetime
def is_datetime(value):
"""
Check if an object is a datetime
:param value:
:return:
"""
result = False
if isinstance(value, datetime.datetime):
result = True
# else:
# result = is_datetime_str(str(value))
return result | 95c2392c9a3da9e4fccb43bd50c54914ffe91b8e | 6,885 |
import math
def sigmoid(z):
"""Sigmoid function"""
if z > 100:
return 0
return 1.0 / (1.0 + math.exp(z)) | 097e1a85fc46264cb1c7cd74498d6cfab97e5b88 | 6,886 |
async def get_company_sumary(symbol: str, db: Session = Depends(get_db)):
"""
This method receibe a symbol, if does not exits in our database
go to extract data, save it on our database and retunr the
stored data
"""
company_solver = CompanySolver(company_symbol=symbol)
_ = company_solver.g... | 9cd4a5e6dfe4f308f564d956280cb6cd522c6296 | 6,887 |
def get_attn_pad_mask(seq_q, seq_k):
"""
由于各句子长度不一样,故需要通过PAD将所有句子填充到指定长度;
故用于填充的PAD在句子中无任何含义,无需注意力关注;
注意力掩码函数,可用于屏蔽单词位置为PAD的位置,将注意力放在其他单词上。
:param seq_q: [batch_size, seq_len]
:param seq_k: [batch_size, seq_len]
"""
batch_size, len_q = seq_q.size()
_, len_k = seq_k.size()
pad_... | 522fc244c02ec767b80da2f0c9b5cf6720e931c0 | 6,889 |
def convert_str_to_float(string):
"""Convert str to float
To handle the edge case
Args:
string (str): string
Returns:
f (float): float value
"""
try:
f = float(string)
except Exception:
f = np.nan
return f | f597d9d59c00f484d9b5183fc610fabf84529218 | 6,890 |
def node_tree(node: str):
"""Format printing for locate"""
str2list = list(node.replace(' ', ''))
count = 0
for i, e in enumerate(str2list):
if e == '(':
count += 1
str2list[i] = '(\n{}'.format('| ' * count)
elif e == ')':
count -= 1
str2... | 010805499cb6e886ec8811949a1d1d013db1d15f | 6,891 |
def process_data(data):
"""
:param datas:
:param args:
:return:
"""
# copy of the origin question_toks
for d in datas:
if 'origin_question_toks' not in d:
d['origin_question_toks'] = d['question_toks']
for entry in datas:
entry['question_toks'] = symbol_filt... | 3e2ab0daa83e48abc121b72cbf1970c8b5fabe87 | 6,892 |
import copy
def repeated_parity_data_binning(shots, nr_of_meas:int):
"""
Used for data binning of the repeated parity check experiment.
Assumes the data qubit is alternatively prepared in 0 and 1.
Args:
shots (1D array) : array containing all measured values of 1 qubit
nr_of_meas (int... | 3cd724579738f5ccf4bd664cf1b023d1c7c08f27 | 6,894 |
def get_user_activities(user_id, timestamp_start, timestamp_end):
""" Returns the activities for a user, between two times"""
activities = Activity.query \
.filter(Activity.user_id == user_id) \
.filter(Activity.timestamp_end >= timestamp_start) \
.filter(Activity.timestamp_start <= tim... | 0b58c1e6a430e0179d34b0ee6d8fdb70f6b102c1 | 6,895 |
def _find_matches(ref, pred):
""" find potential matches between objects in the reference and
predicted images. These need to have at least 1 pixel of overlap.
"""
matches = {}
for label in ref.labels:
mask = ref.labeled == label
matches[label] = [m for m in np.unique(pred.labeled[ma... | 82ea5c5a0c73996187d7f5409745b947b7e17960 | 6,896 |
def _process(config: ConfigType, should_make_dir: bool) -> ConfigType:
"""Process the config
Args:
config (ConfigType): Config object
should_make_dir (bool): Should make dir for saving logs, models etc
Returns:
[ConfigType]: Processed config
"""
config = _process_general_c... | 3bf2cc4eff379fcfe8f7d58332ae33658e7e5540 | 6,897 |
def calendar_heatmap_echarts(data_frame: pd.DataFrame, date_field: str = None, value_field: str = None,
title: str = "",
width: str = "100%", height: str = "300px") -> Echarts:
"""
日历热度图,显示日期热度
:param data_frame:
:param date_field: 日期列
:param... | e92a41dcb533f5fdb0fba91bb1f80b0199d1523e | 6,898 |
from typing import Union
import torch
def adj_to_edge_indices(adj: Union[torch.Tensor, np.ndarray]) -> Union[torch.Tensor, np.ndarray]:
"""
Args:
adj: a (N, N) adjacency matrix, where N is the number of nodes
Returns:
A (2, E) array, edge_idxs, where E is the number of edges,
... | b84d978e7ea6b24cf9b4e8aaa074581d4516435d | 6,899 |
def create_export_settings_window():
"""
This function contains all the logic of the export settings window and will run the window by it's own.
:return: None
"""
window = sg.Window("Export Settings", generate_export_settings_layout(), modal=True, finalize=True,
keep_on_top=... | 9552cfb269cb3e67cf3332783b9a43a674bc9e3d | 6,900 |
def get_vertex_list(session, node_id, part_info):
"""Wrapper for HAPI_GetVertexList
Args:
session (int): The session of Houdini you are interacting with.
node_id (int): The node to get.
part_info (PartInfo): Part info of querying
Returns:
np.ndarray: Array of vertices
"... | dd5a37e248347dc9e9b5f8fba07d202008626ea5 | 6,901 |
def lamb1(u,alpha=.5):
"""Approximate the Lambert W function.
Approximate the Lambert W function from its upper and lower bounds.
The parameter alpha (between 0 and 1) determines how close the
approximation is to the lower bound instead of the upper bound.
:arg float u: Modified argument o... | 1d769ccb74334eef55aa1bc0697328b34ba067bc | 6,902 |
def loglikelihood(time_steps: list) -> float:
"""Calculate the log-likelihood of the time steps from the estimation
Parameters
----------
time_steps : list
estimation time steps
Returns
-------
float
log-likelihood
"""
loglikelihood = 0
for time_step in time_st... | 6761ced2947d9ac382d53eef390bd827ceb51203 | 6,903 |
def get_r0_rm_rp(s, i_delta):
""" compute 3 points r0, r_minus and r_plus to determine apsis
compute these at s.i-i_delta and s.i-2*i_delta
"""
xp = s.Xlast[:, s.i % s.save_last]
x0 = s.Xlast[:, (s.i - i_delta) % s.save_last]
xm = s.Xlast[:, (s.i - 2 * i_delta) % s.save_last]
rp = norm... | 83595b9b15eb9c9373aa4e8f75d2ffc39c8ba248 | 6,904 |
def build_rfb_lite(base, feature_layer, mbox, num_classes):
"""Receptive Field Block Net for Accurate and Fast Object Detection for embeded system
See: https://arxiv.org/pdf/1711.07767.pdf for more details.
"""
base_, extras_, norm_, head_ = add_extras(base(), feature_layer, mbox, num_classes, version='... | c8b1810d088f816d4e3be587cb1085bacde08076 | 6,906 |
def bfunsmat(u, p, U):
"""Computes a matrix of the form :math:`B_{ij}`, where
:math:`i=0\\ldots p` and for each :math:`j` th column the
row :math:`i` of the matrix corresponds to the value of
:math:`(\\mathrm{span}(u_j)-p+i)` th bspline basis function at
:math:`u_j`.
Parameters:
u (np.a... | 6dc260a165c5ae25ac9914ff0b96c1fd8f05b93c | 6,907 |
def getFourgram(words, join_string):
"""
Input: a list of words, e.g., ['I', 'am', 'Denny', 'boy']
Output: a list of trigram, e.g., ['I_am_Denny_boy']
I use _ as join_string for this example.
"""
assert type(words) == list
L = len(words)
if L > 3:
lst = []
for... | 17717bb608a7ef5eff1ac9e1f49d2606b7113360 | 6,908 |
import math
def get_age_carbon_14_dating(carbon_14_ratio):
"""Returns the estimated age of the sample in year.
carbon_14_ratio: the percent (0 < percent < 1) of carbon-14
in the sample conpared to the amount in living
tissue (unitless). """
if isinstance(carbon_14_ratio, str):
raise Type... | 8b0ab86e3c45a97065fefb6c4f02ab87c3e82d23 | 6,909 |
def get_input_definition() -> InputDefinition:
"""
Query ReconAll's input file definition (*t1_files*) to check for existing
runs.
Returns
-------
InputDefinition
ReconAll's *t1_files* input definition
"""
node = get_node()
return node.analysis_version.input_definitions.get(... | 1575bc2521b6f041c4151be6405ac1d458333d62 | 6,910 |
def create_ou_process(action_spec, ou_stddev, ou_damping):
"""Create nested zero-mean Ornstein-Uhlenbeck processes.
The temporal update equation is:
.. code-block:: python
x_next = (1 - damping) * x + N(0, std_dev)
Note: if ``action_spec`` is nested, the returned nested OUProcess will not be... | 292b235863e57b49e531e5e5b091f55688357122 | 6,911 |
def clean_data(df):
"""
remove the duplicates from a dataframe
parameters:
df(Dataframe): data frame
"""
df=df.drop_duplicates()
return df | 7072885f7233c5407060344e6858f89108d61ee8 | 6,912 |
def IssueFactory(data, journal_id, issue_order):
"""
Realiza o registro fascículo utilizando o opac schema.
Esta função pode lançar a exceção `models.Journal.DoesNotExist`.
"""
mongo_connect()
metadata = data["metadata"]
issue = models.Issue()
issue._id = issue.iid = data.get("id")
... | 49ef57cb1c628c05e30a35e10680d34140066182 | 6,913 |
def _is_permission_in_db(permission_name: str):
"""To check whether the given permission is in the DB
Parameters
----------
permission_name: str
A permission name we use internally.
E.g., hazard, hazard:hazard, project...
"""
return bool(
models.Auth0Permission.query.fil... | 6e0e672d5c73e0740b695f29d3459a3b80c86831 | 6,914 |
from typing import Optional
def get_dataset(dataset_id: Optional[str] = None,
location: Optional[str] = None,
project: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDatasetResult:
"""
Gets any metadata associated with a datase... | 985a7e9b7b124c0dba37455426889683e5769aaf | 6,916 |
def is_email_available() -> bool:
"""
Returns whether email services are available on this instance (i.e. settings are in place).
"""
return bool(settings.EMAIL_HOST) | c8b8362aed7f2af5dd49070dce7f522fd0c2088a | 6,917 |
def sql2label(sql, num_cols):
"""encode sql"""
# because of classification task, label is from 0
# so sel_num and cond_num should -1,and label should +1 in prediction phrase
cond_conn_op_label = sql.cond_conn_op
sel_num_label = sql.sel_num - 1
# the new dataset has cond_num = 0, do not -1
c... | b25c819e4645c07216970877ac95d20b0f8baab6 | 6,918 |
import time
def retrieveToken(verbose: bool = False, save: bool = False, **kwargs)->str:
"""
LEGACY retrieve token directly following the importConfigFile or Configure method.
"""
token_with_expiry = token_provider.get_token_and_expiry_for_config(config.config_object,**kwargs)
token = token_with_e... | b419934bf2725b46d23abc506c5b5a2828de1d0c | 6,919 |
def format_str_for_write(input_str: str) -> bytes:
"""Format a string for writing to SteamVR's stream."""
if len(input_str) < 1:
return "".encode("utf-8")
if input_str[-1] != "\n":
return (input_str + "\n").encode("utf-8")
return input_str.encode("utf-8") | 1b83a2c75118b03b7af06350e069775c0b877816 | 6,920 |
def reverse_result(func):
"""The recursive function `get_path` returns results in order reversed
from desired. This decorator just reverses those results before returning
them to caller.
"""
@wraps(func)
def inner(*args, **kwargs):
result = func(*args, **kwargs)
if result is not ... | c13d28550e77a8fba149c50673252012c712961f | 6,921 |
def convert_from_opencorpora_tag(to_ud, tag: str, text: str):
"""
Конвертировать теги их формата OpenCorpora в Universal Dependencies
:param to_ud: конвертер.
:param tag: тег в OpenCorpora.
:param text: токен.
:return: тег в UD.
"""
ud_tag = to_ud(str(tag), text)
pos = ud_tag.sp... | 0e650cc4976d408ed88ef9280fe3a74261353561 | 6,922 |
import struct
def reg_to_float(reg):
"""convert reg value to Python float"""
st = struct.pack(">L", reg)
return struct.unpack(">f", st)[0] | f4a2d416e880807503f3c0ba0b042fbbecc09064 | 6,923 |
def wvelocity(grid, u, v, zeta=0):
"""
Compute "true" vertical velocity
Parameters
----------
grid : seapy.model.grid,
The grid to use for the calculations
u : ndarray,
The u-field in time
v : ndarray,
The v-field in time
zeta : ndarray, optional,
The zeta-field ... | 452e84b334b42b9099ed888319a3cc88e7191e9b | 6,924 |
def _as_nested_lists(vertices):
""" Convert a nested structure such as an ndarray into a list of lists. """
out = []
for part in vertices:
if hasattr(part[0], "__iter__"):
verts = _as_nested_lists(part)
out.append(verts)
else:
out.append(list(part))
re... | c69bd2084aa8e76a53adf3e25286a8dd7ae23176 | 6,925 |
def markdown(code: str) -> str:
"""Convert markdown to HTML using markdown2."""
return markdown2.markdown(code, extras=markdown_extensions) | 09f463aa28f9289d05b44244e6ac60ce7905af83 | 6,926 |
import json
import urllib
async def post_notification(request):
"""
Create a new notification to run a specific plugin
:Example:
curl -X POST http://localhost:8081/fledge/notification -d '{"name": "Test Notification", "description":"Test Notification", "rule": "threshold", "channel": "email"... | bdc85dd3d93f51352776a3e63b34a18961014058 | 6,927 |
def test_send_file_to_router(monkeypatch, capsys):
"""
.
"""
# pylint: disable=unused-argument
@counter_wrapper
def get_commands(*args, **kwargs):
"""
.
"""
return "commands"
@counter_wrapper
def add_log(log: Log, cursor=None):
"""
.
... | 739e9d2dbb9adc40b386566b4e73dae98381ed4c | 6,928 |
def smiles2mol(smiles):
"""Convert SMILES string into rdkit.Chem.rdchem.Mol.
Args:
smiles: str, a SMILES string.
Returns:
mol: rdkit.Chem.rdchem.Mol
"""
smiles = canonicalize(smiles)
mol = Chem.MolFromSmiles(smiles)
if mol is None:
return None
Chem.Kekulize(mol)
... | 56a8e0b28f98b1dd920cf03977eb6086a134fd8f | 6,929 |
def build_term_map(deg, blocklen):
"""
Builds term map (degree, index) -> term
:param deg:
:param blocklen:
:return:
"""
term_map = [[0] * comb(blocklen, x, True) for x in range(deg + 1)]
for dg in range(1, deg + 1):
for idx, x in enumerate(term_generator(dg, blocklen - 1)):
... | 3e70cb38314189ff33da3eeb43ca0c68d13904cd | 6,931 |
def gen_sets():
"""
List of names of all available problem generators
"""
return registered_gens.keys() | f5aefd9d480115013ef8423ce6fd173d5acf0045 | 6,932 |
def is_valid_currency(currency_: str) -> bool:
"""
is_valid_currency:判断给定货币是否有效
@currency_(str):货币代码
return(bool):FROM_CNY、TO_CNY均有currency_记录
"""
return currency_ in FROM_CNY and currency_ in TO_CNY | 5b95b0d0a76e5d979e7a560ee14f6adf2c79e140 | 6,933 |
from typing import List
from typing import Tuple
def load_gene_prefixes() -> List[Tuple[str, str, str]]:
"""Returns FamPlex gene prefixes as a list of rows
Returns
-------
list
List of lists corresponding to rows in gene_prefixes.csv. Each row has
three columns [Pattern, Category, Not... | 9fc450636a4b517a79350b9b6131dccfe860c58e | 6,934 |
def uri2dict(uri):
"""Take a license uri and convert it into a dictionary of values."""
if uri.startswith(LICENSES_BASE) and uri.endswith('/'):
base = LICENSES_BASE
license_info = {}
raw_info = uri[len(base):]
raw_info = raw_info.rstrip('/')
info_list = raw_info.split('... | 1f2ccdc52b1dc3424b7554857a87f85a02ea1dbd | 6,936 |
import re
def test_clean_str(text, language='english'):
"""
Method to pre-process an text for training word embeddings.
This is post by Sebastian Ruder: https://s3.amazonaws.com/aylien-main/data/multilingual-embeddings/preprocess.py
and is used at this paper: https://arxiv.org/pdf/1609.02745.pdf
"... | 683f6d27e7486990d0b2a11dd5aeb78f2c1bab07 | 6,937 |
def calc_iou(boxes1, boxes2, scope='iou'):
"""calculate ious
Args:
boxes1: 5-D tensor [BATCH_SIZE, CELL_SIZE, CELL_SIZE, BOXES_PER_CELL, 4] ====> (x_center, y_center, w, h)
boxes2: 5-D tensor [BATCH_SIZE, CELL_SIZE, CELL_SIZE, BOXES_PER_CELL, 4] ===> (x_center, y_center, w, h)
Return:
iou... | e5714cf74be851b6b6003458c44e3308308907a3 | 6,939 |
def not_before(cert):
"""
Gets the naive datetime of the certificates 'not_before' field.
This field denotes the first date in time which the given certificate
is valid.
:param cert:
:return: Datetime
"""
return cert.not_valid_before | e5e269e67de3059fe0ddfa9a35fb13e7f124d798 | 6,940 |
def get_data_from_dict_for_2pttype(type1,type2,datadict):
"""
Given strings identifying the type of 2pt data in a fits file
and a dictionary of 2pt data (i.e. the blinding factors),
returns the data from the dictionary matching those types.
"""
#spectra type codes in fits file, under hdutable... | d8656e6274dd8fb4001d477572220f2c51c08e01 | 6,941 |
def simple_unweighted_distance(g, source, return_as_dicts=True):
"""Returns the unweighted shortest path length between nodes and source."""
dist_dict = nx.shortest_path_length(g, source)
if return_as_dicts:
return dist_dict
else:
return np.fromiter((dist_dict[ni] for ni in g), dtype=in... | d82742ac88f26db8296dec9d28794d3e6d60eec7 | 6,942 |
def A070939(i: int = 0) -> int:
"""Length of binary representation of n."""
return len(f"{i:b}") | 31b12e493645c3bdf7e636a48ceccff5d9ecc492 | 6,943 |
import time
def feed_pump(pin: int, water_supply_time: int=FEED_PUMP_DEFAULT_TIME) -> bool:
"""
feed water
Parameters
----------
pin : int
target gpio (BCM)
water_supply_time : int
water feeding time
Returns
-------
bool
Was water feeding successful ?
... | c45b1775991a4914116468961ae979dae71f6caf | 6,944 |
def app_nav(context):
"""Renders the main nav, topnav on desktop, sidenav on mobile"""
url_name = get_url_name(context)
namespace = get_namespace(context)
cache_id = "{}:{}x".format(context['request'].user.username, context.request.path)
cache_key = make_template_fragment_key('app_nav', [cache_id])... | 8e9cc5428b9af22bad13c6454f462d585a04c005 | 6,945 |
def centre_to_zeroes(cartesian_point, centre_point):
"""Converts centre-based coordinates to be in relation to the (0,0) point.
PIL likes to do things based on (0,0), and in this project I'd like to keep
the origin at the centre point.
Parameters
----------
cartesian_point : (numeric)
... | f0ddd632650127e3bb1ed766191950ccf7f06d87 | 6,946 |
def get_all_stack_names(cf_client=boto3.client("cloudformation")):
"""
Get all stack names
Args:
cf_client: boto3 CF client
Returns: list of StackName
"""
LOGGER.info("Attempting to retrieve stack information")
response = cf_client.describe_stacks()
LOGGER.info("Retrieved stack... | 47a36e15651495cc0b5c80e642bb5154640d6b7d | 6,947 |
import calendar
def match_date(date, date_pattern):
"""
Match a specific date, a four-tuple with no special values, with a date
pattern, four-tuple possibly having special values.
"""
# unpack the date and pattern
year, month, day, day_of_week = date
year_p, month_p, day_p, day_of_week_p =... | d794cf211589840697007ecec7cd9e3ba0655b0f | 6,948 |
def get_heating_features(df, fine_grained_HP_types=False):
"""Get heating type category based on HEATING_TYPE category.
heating_system: heat pump, boiler, community scheme etc.
heating_source: oil, gas, LPC, electric.
Parameters
----------
df : pandas.DataFrame
Dataframe that is updated... | 5707975a63aca4778e8dbdd70670e317c777c998 | 6,949 |
def integrate_eom(initial_conditions, t_span, design_params, SRM1, SRM2):
"""Numerically integrates the zero gravity equations of motion.
Args:
initial_conditions (np.array()): Array of initial conditions. Typically set
to an array of zeros.
t_span (np.array()): Time vector (s) over whi... | 07574c775268798371425b837b20706ac9af5f52 | 6,950 |
def activation_sparse(net, transformer, images_files):
"""
Activation bottom/top blob sparse analyze
Args:
net: the instance of Caffe inference
transformer:
images_files: sparse dataset
Returns:
none
"""
print("\nAnalyze the sparse info of the Activation:")... | da138764d002e84bdee306e15b6c8524b223bcbc | 6,951 |
def cfg_load(filename):
"""Load a config yaml file."""
return omegaconf2namespace(OmegaConf.load(filename)) | 2aa5f808f89d1f654cd95cd6a1c8f903d4baade6 | 6,952 |
def char_to_num(x: str) -> int:
"""Converts a character to a number
:param x: Character
:type x: str
:return: Corresponding number
:rtype: int
"""
total = 0
for i in range(len(x)):
total += (ord(x[::-1][i]) - 64) * (26 ** i)
return total | f66ee13d696ec1872fbc2a9960362456a5c4cbe9 | 6,953 |
from typing import Callable
import time
def time_it(f: Callable):
"""
Timer decorator: shows how long execution of function took.
:param f: function to measure
:return: /
"""
def timed(*args, **kwargs):
t1 = time.time()
res = f(*args, **kwargs)
t2 = time.time()
... | bc7321721afe9dc9b4a2861b2c849e6a5d2c309a | 6,954 |
def has_prefix(sub_s, dictionary):
"""
:param sub_s: (str) A substring that is constructed by neighboring letters on a 4x4 square grid
:return: (bool) If there is any words with prefix stored in sub_s
"""
s = ''
for letter in sub_s:
s += letter
for words in dictionary:
if words.startswith(s):
return True
... | b45f3bf7ed699bc215d1670f35ebc0f15b7ec0ff | 6,955 |
def tf_center_crop(images, sides):
"""Crops central region"""
images_shape = tf.shape(images)
top = (images_shape[1] - sides[0]) // 2
left = (images_shape[2] - sides[1]) // 2
return tf.image.crop_to_bounding_box(images, top, left, sides[0], sides[1]) | 1b1c8bcab55164a04b0ac6109a7b91d084f55b7b | 6,957 |
from datetime import datetime
import pytz
def convert_timezone(time_in: datetime.datetime) -> datetime.datetime:
"""
用来将系统自动生成的datetime格式的utc时区时间转化为本地时间
:param time_in: datetime.datetime格式的utc时间
:return:输出仍旧是datetime.datetime格式,但已经转换为本地时间
"""
time_utc = time_in.replace(tzinfo=pytz.timezone("UT... | 3843aa62a5ff29fd629776e69c52cd95c51fac5d | 6,958 |
import six
def classifier_fn_from_tfhub(output_fields, inception_model, return_tensor=False):
"""Returns a function that can be as a classifier function.
Copied from tfgan but avoid loading the model each time calling _classifier_fn
Args:
output_fields: A string, list, or `None`. If present, assume ... | e7f54a4c46519465460cc0e97b0f6f12f91a98d4 | 6,962 |
import json
def get_rate_limit(client):
"""
Get the Github API rate limit current state for the used token
"""
query = '''query {
rateLimit {
limit
remaining
resetAt
}
}'''
response = client.execute(query)
json_response = json.loads(respo... | ec5f853014f25c841e71047da62ca41907b02e13 | 6,963 |
import functools
import pprint
def pret(f):
"""
Decorator which prints the result returned by `f`.
>>> @pret
... def f(x, y): return {'sum': x + y, 'prod': x * y}
>>> res = f(2, 3)
==> @pret(f) -- {'prod': 6, 'sum': 5}
"""
@functools.wraps(f)
def g(*args, **kwargs):
... | fedb8cf19913042d0defef676db6b22715e8c572 | 6,964 |
def parse_arguments() -> tuple[str, str, bool]:
"""Return the command line arguments."""
current_version = get_version()
description = f"Release Quality-time. Current version is {current_version}."
epilog = """preconditions for release:
- the current folder is the release folder
- the current branch... | 7b58b2b3c99a4297bb12b714b289336cdbc75a5e | 6,965 |
def can_hold_bags(rule: str, bag_rules: dict) -> dict:
"""
Returns a dict of all bags that can be held by given bag color
:param rule: Color of a given bag
:param bag_rules: Dictionary of rules
:type rule: str
:type bag_rules: dict
:return:
"""
return bag_rules[rule] | b7554c32bd91f9a05cd84c9249d92cc6354458a9 | 6,969 |
def fix_levers_on_same_level(same_level, above_level):
"""
Input: 3D numpy array with malmo_object_to_index mapping
Returns:
3D numpy array where 3 channels represent
object index, color index, state index
for minigrid
"""
lever_idx = malmo_object_to_index['lever']
con... | d1727e188f9a5935a660d806f69f9b472db94217 | 6,970 |
def iv_plot(df, var_name=None, suffix='_dev'):
"""Returns an IV plot for a specified variable"""
p_suffix = suffix.replace('_','').upper()
sub_df = df if var_name is None else df.loc[df.var_name==var_name, ['var_cuts_string'+suffix, 'ln_odds'+suffix, 'resp_rate'+suffix, 'iv'+suffix]]
sub_df['resp_rate_t... | dd35329b5b91a19babdfa943c2f7688bb013c680 | 6,971 |
from py._path.local import LocalPath
def is_alive(pid):
"""Return whether a process is running with the given PID."""
return LocalPath('/proc').join(str(pid)).isdir() | e6086b79aa648dc4483085e15f096152185aa780 | 6,972 |
from pyspark import SparkContext
from typing import Callable
import functools
from typing import Any
def inheritable_thread_target(f: Callable) -> Callable:
"""
Return thread target wrapper which is recommended to be used in PySpark when the
pinned thread mode is enabled. The wrapper function, before call... | 02d2e58449c736bf8ef19354bfd8f7a21066615b | 6,973 |
from typing import OrderedDict
def join_label_groups(grouped_issues, grouped_prs, issue_label_groups,
pr_label_groups):
"""Combine issue and PR groups in to one dictionary.
PR-only groups are added after all issue groups. Any groups that are
shared between issues and PRs are added a... | b51a70a60bde3580326816eaf0d3b76cb51062ac | 6,975 |
def healpix_ijs_neighbours(istar, jstar, nside):
"""Gets the healpix i, jstar neighbours for a single healpix pixel.
Parameters
----------
istar : array
Healpix integer i star index.
jstar : array
Healpix integer i star index.
nside : int
Healpix nside.
Returns
... | 48cae5cd13101529c7d03f9c08ed0f2c2d77a7b8 | 6,976 |
def create_parser(config: YAMLConfig) -> ArgumentParser:
"""
Automatically creates a parser from all of the values specified in a config
file. Will use the dot syntax for nested dictionaries.
Parameters
----------
config: YAMLConfig
Config object
Returns
-------
ArgumentPar... | 8fcf886448061b7f520d133bbf9bb66047e9f516 | 6,978 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.