content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def simplex_edge_tensors(dimensions, # type: int
centers_in, # type: List[List[int]]
centers_out, # type: List[List[int]]
surrounds_in, # type: List[List[int]]
surrounds_out, # type: List[List[int]]
... | fb1fdf0a46939db10770984b28dc4f33cb42d0b9 | 3,645,300 |
def hashtoaddress(PARAMETER):
"""
Converts a 160-bit hash to an address.
[PARAMETER] is required and should be an address hash.
"""
d = urllib2.urlopen(blockexplorer('hashtoaddress') + '/' + str(PARAMETER))
return d.read() | 6e96698792d1e64c3feca9d6d9b14b02554cfc50 | 3,645,301 |
import types
import builtins
import ast
def get_all_objects(line: str, frame: types.FrameType) -> ObjectsInfo:
"""Given a (partial) line of code and a frame,
obtains a dict containing all the relevant information about objects
found on that line so that they can be formatted as part of the
answer to "... | 65f2d8e756da32d883c07456feb1d088aa5f4efa | 3,645,302 |
def magenta(msg):
"""Return colorized <msg> in magenta"""
return __fore(msg, 'magenta') | 64eda26662e283779d1a0c1884166b538aa6bb8f | 3,645,303 |
def request_latest_news():
"""
This Method queries the last item of the database and convert it to a string.
:return: A String with the last item of the database
"""
article = News.query.order_by(News.id.desc()).first()
return format_latest_article(article, request.content_type) | 4ff0dc4d7f63465125d38f0683619e59a8f915e0 | 3,645,304 |
def is_vulgar(words, sentence):
"""Checks if a given line has any of the bad words from the bad words list."""
for word in words:
if word in sentence:
return 1
return 0 | f8ff64f1d29313c145ebbff8fef01961e14cfd1f | 3,645,305 |
def edges_cross(graph, nodes1, nodes2):
"""
Finds edges between two sets of disjoint nodes.
Running time is O(len(nodes1) * len(nodes2))
Args:
graph (nx.Graph): an undirected graph
nodes1 (set): set of nodes disjoint from `nodes2`
nodes2 (set): set of nodes disjoint from `nodes1... | 96c3b2d2de97547cb16d9f2e0071bb093e815d28 | 3,645,306 |
def basket_view(func):
""" Returns rendered page for basket """
@jinja2_view('basket.html', template_lookup=[TEMPLATES_DIR])
def _basket_view_call(*args, **kwargs):
func(*args, **kwargs)
return {'col_mapping': COLUMN_MAPPING, 'product_list': _format_products_for_web(get_basket_products())}
... | c818d1bd77fe100df857d746109f20caebd8581f | 3,645,307 |
def py2to3(target_path,
interpreter_command_name="python",
is_transform=False,
is_del_bak=False,
is_html_diff=False,
is_check_requirements=False):
"""
The main entrance of the 2to3 function provides a series of parameter entrances.
The main functions ar... | 8581beacd7daa174309da99c6857acec841345bf | 3,645,308 |
import os
def _norm_path(path):
"""
Decorator function intended for using it to normalize a the output of a path retrieval function. Useful for
fixing the slash/backslash windows cases.
"""
def normalize_path(*args):
return os.path.normpath(path(*args))
return normalize_path | 5d86cc9fdab4ed9643398e2741bcf5f90d8b97e5 | 3,645,309 |
import re
def _get_hash_aliases(name):
"""
internal helper used by :func:`lookup_hash` --
normalize arbitrary hash name to hashlib format.
if name not recognized, returns dummy record and issues a warning.
:arg name:
unnormalized name
:returns:
tuple with 2+ elements: ``(hash... | 537c30fee93c465a768e80dd6fc8314555b65df5 | 3,645,310 |
def dirac_2d_v_and_h(direction, G_row, vec_len_row, num_vec_row,
G_col, vec_len_col, num_vec_col,
a, K, noise_level, max_ini, stop_cri):
"""
used to run the reconstructions along horizontal and vertical directions in parallel.
"""
if direction == 0: # row recon... | e68945c68cb80ef001e027c30651d1f3a38369e4 | 3,645,311 |
import importlib
from typing import Tuple
def Matrix(*args, **kwargs):
"""*Funktion zur Erzeugung von Matrizen mit beliebiger Dimension"""
h = kwargs.get("h")
if h in (1, 2, 3):
matrix_hilfe(h)
return
elif isinstance(h, (Integer, int)):
matrix... | f9bae41e6ce6f6b3c144d8844317ae7b2272bb91 | 3,645,312 |
def afw_word_acceptance(afw: dict, word: list) -> bool:
""" Checks if a **word** is accepted by input AFW, returning
True/False.
The word w is accepted by a AFW if exists at least an
accepting run on w. A run for AFWs is a tree and
an alternating automaton can have multiple runs on a given
inpu... | 52ff4c5fa2c8d2c8af667ee9c03e587b2c4ac10b | 3,645,313 |
from operator import and_
def get_following():
"""
endpoint: /release/following
method: GET
param:
"[header: Authorization] Token": str - Token received from firebase
response_type: array
response:
id: 1
created: 123456789
vol: 1
chapter: 1
title: Chapter ti... | 90999ec6a4e14bf3c3633ef38f0e020cca62623b | 3,645,314 |
import re
def matchNoSpaces(value):
"""Match strings with no spaces."""
if re.search('\s', value):
return False
return True | 6b33c6b500f78664c04ef8c507e9b25fa19c760d | 3,645,315 |
import re
def collect_inline_comments(list_of_strings,begin_token=None,end_token=None):
"""Reads a list of strings and returns all of the inline comments in a list.
Output form is ['comment',line_number,string_location] returns None if there are none or tokens are set to None"""
if begin_token in [None] ... | 8ff2dfa055b2f2a3ef72842518b2fb87bcb62c1e | 3,645,316 |
import os
from pathlib import Path
import pickle
def get_df_ads():
"""
"""
#| - get_df_ads
# #####################################################
# import pickle; import os
path_i = os.path.join(
os.environ["PROJ_irox_oer"],
"dft_workflow/job_analysis/collect_collate_dft_data... | f2253eff4dc74f55c3b256513fe6821b55015567 | 3,645,317 |
def cli_list(apic, args):
"""Implement CLI command `list`.
"""
# pylint: disable=unused-argument
instances = apic.get_instances()
if instances:
print('\n'.join(apic.get_instances()))
return 0 | 7b96b1a7cf85c86627382143e1e0786956546ec1 | 3,645,318 |
def is_symmetric(a: np.array):
"""
Check whether the matrix is symmetric
:param a:
:return:
"""
tol = 1e-10
return (np.abs(a - a.T) <= tol).all() | 223784091cd797d5ba5f3814fb097252d1afc349 | 3,645,319 |
def get_number(line, position):
"""Searches for the end of a number.
Args:
line (str): The line in which the number was found.
position (int): The starting position of the number.
Returns:
str: The number found.
int: The position after the number found.
"""
word = ... | df41a1b53953b912e5ce5d6d9b3d69c4133460f1 | 3,645,320 |
from typing import TextIO
import yaml
def load(f: TextIO) -> Config:
"""Load a configuration from a file-like object f"""
config = yaml.safe_load(f)
if isinstance(config["diag_table"], dict):
config["diag_table"] = DiagTable.from_dict(config["diag_table"])
return config | 0a977a5eda6ad8e0e5aa15315f914186ff65b4d6 | 3,645,321 |
def levelize_smooth_or_improve_candidates(to_levelize, max_levels):
"""Turn parameter in to a list per level.
Helper function to preprocess the smooth and improve_candidates
parameters passed to smoothed_aggregation_solver and rootnode_solver.
Parameters
----------
to_levelize : {string, tuple... | 8b302b8cae04adae010607c394c2e5059aa46eeb | 3,645,322 |
def get_max_num_context_features(model_config):
"""Returns maximum number of context features from a given config.
Args:
model_config: A model config file.
Returns:
An integer specifying the max number of context features if the model
config contains context_config, None otherwise
"""
meta_ar... | 1df5d220e30cfa5b440c0063149e2ebaf896352a | 3,645,323 |
import hashlib
def hashname(name, secsalt):
"""Obtain a sha256 hash from a name."""
m = hashlib.sha256()
m.update((name + secsalt).encode("utf-8"))
return m.hexdigest() | 0db5fbf39eed899162535b6647a047f49e39fa34 | 3,645,324 |
def company_detail(request, stock_quote: int) -> HttpResponse:
""" Return a view to Company details """
try:
company = Company.objects.get(quote=str(stock_quote))
# TODO(me): Implement company detail view logic
except Company.DoesNotExist:
raise Http404("Company with releated quote d... | 95e542e7386361709fedc98375bdbc9c5dd8780b | 3,645,325 |
def parse_encoding_header(header):
"""
Break up the `HTTP_ACCEPT_ENCODING` header into a dict of the form,
{'encoding-name':qvalue}.
"""
encodings = {'identity':1.0}
for encoding in header.split(","):
if(encoding.find(";") > -1):
encoding, qvalue = encoding.split(";")
... | 0d423ad51ff14589b5858681cf32a0f318e6dbfa | 3,645,326 |
def opf_consfcn(x, om, Ybus, Yf, Yt, ppopt, il=None, *args):
"""Evaluates nonlinear constraints and their Jacobian for OPF.
Constraint evaluation function for AC optimal power flow, suitable
for use with L{pips}. Computes constraint vectors and their gradients.
@param x: optimization vector
@param... | f90083088e6de9668ed44cdc950aa81bf96e2450 | 3,645,327 |
def iou3d_kernel(gt_boxes, pred_boxes):
"""
Core iou3d computation (with cuda)
Args:
gt_boxes: [N, 7] (x, y, z, w, l, h, rot) in Lidar coordinates
pred_boxes: [M, 7]
Returns:
iou3d: [N, M]
"""
intersection_2d = rotate_iou_gpu_eval(gt_boxes[:, [0, 1, 3, 4, 6]], pred_boxe... | 368f457b7afe6e5653839d130b6d6b8a6ce1ab7c | 3,645,328 |
def get_final_metrics(raw_metrics, summarized=False):
"""
Calculates final metrics from all categories.
:param summarized: True if the result should contain only final metrics (precision recall, f1 and f0.5)
False if the result should contain all the per category metrics too.
:param raw_metrics: A d... | 4782342efe12765a4de7d4eb9ed2b458f7d56686 | 3,645,329 |
import pprint
from re import IGNORECASE
def get_health_feed():
"""
Parse BBC news health feed and remove articles not related to COVID-19.
"""
feed = parse("http://feeds.bbci.co.uk/news/health/rss.xml")
# log parsed feed for debugging purposes
logger.debug(pprint(feed.entries))
logger.debu... | 858eba4afd9ae6b47d865c0f7ba1b31d2d0f69a5 | 3,645,330 |
def get_data_meta_path(either_file_path: str) -> tuple:
"""get either a meta o rr binary file path and return both as a tuple
Arguments:
either_file_path {str} -- path of a meta/binary file
Returns:
[type] -- (binary_path, meta_path)
"""
file_stripped = '.'.join(either_file_path.... | 0456186cd99d5899e2433ac9e44ba0424077bcc0 | 3,645,331 |
import click
def group(name):
"""
Allow to create a group with a default click context and a class for Click's ``didyoueamn``
without having to repeat it for every group.
"""
return click.group(
name=name,
context_settings=CLICK_CONTEXT_SETTINGS,
cls=AliasedGroup) | 5a36442760cdb86bb89d76bf88c3aa2f3d5dea5b | 3,645,332 |
def get_files(target_files, config):
"""Retrieve files associated with the potential inputs.
"""
out = []
find_fn = _find_file(config)
for fname in target_files.keys():
remote_fname = find_fn(fname)
if remote_fname:
out.append(remote_fname)
return out | 577feb99d15eeec5e22d96dd9fce47a311d60cad | 3,645,333 |
def cmd(func, *args, **kwargs):
"""Takes a function followed by its arguments"""
def command(*a, **ka):
return func(*args, **kwargs)
return command | 9ace378335461080b51dce4936c9a8e0965b3454 | 3,645,334 |
def flow_accumulation(receiver_nodes, baselevel_nodes, node_cell_area=1.0,
runoff_rate=1.0, boundary_nodes=None):
"""Calculate drainage area and (steady) discharge.
Calculates and returns the drainage area and (steady) discharge at each
node, along with a downstream-to-upstream ordere... | e3a7801ed4639ad8168491c4a1689c37adfe930f | 3,645,335 |
def extract_ids(response_content):
"""Given a result's content of a research, returns a list of all ids. This method is meant to work with PubMed"""
ids = str(response_content).split("<Id>")
ids_str = "".join(ids)
ids = ids_str.split("</Id>")
ids.remove(ids[0])
ids.remove(ids[len(ids) - 1])
... | 69ad17a9a6bc3b56a11dceafb802fbf7eb1eac66 | 3,645,336 |
def gatorosc(candles: np.ndarray, sequential=False) -> GATOR:
"""
Gator Oscillator by Bill M. Williams
:param candles: np.ndarray
:param sequential: bool - default=False
:return: float | np.ndarray
"""
if not sequential and len(candles) > 240:
candles = candles[-240:]
jaw = sh... | 2890fa42836ea020ebb54427f7b3c8a773cf13c5 | 3,645,337 |
def program_item(prog_hash):
"""
GET,DELETE /programs/<prog_hash>: query programs
:prog_hash: program checksum/identifier
:returns: flask response
"""
if request.method == 'GET':
with client.client_access() as c:
prog = c.user_programs.get(prog_hash)
return respond_js... | 7a27d4083facc02e71e08a9bffda217fadc5a22e | 3,645,338 |
import json
import logging
def lambda_handler(event, context):
"""
Federate Token Exchange Lambda Function
"""
if not "body" in event:
return helper.build_response(
{"message": "You do not have permission to access this resource."}, 403
)
input_json = dict()
input... | 16456ebb905cdb2b1782a1017928574e4c90b9cd | 3,645,339 |
from typing import List
def find_domain_field(fields: List[str]):
"""Find and return domain field value."""
field_index = 0
for field in fields:
if field == "query:":
field_value = fields[field_index + 1]
return field_value
field_index += 1
return None | fac45f0bd7cead3ad1ec01307c6c623c8d39dbd4 | 3,645,340 |
def placeValueOf(num: int, place: int) -> int:
"""
Get the value on the place specified.
:param num: The num
:param place: The place. 1 for unit place, 10 for tens place, 100 for hundreds place.
:return: The value digit.
"""
return lastDigitOf(num // place) | 8b50ca8a79b267f40b2638b331879746e0bcad7f | 3,645,341 |
def prepare_polygon_coords_for_bokeh(countries):
"""Prepares the country polygons for plotting with Bokeh.
To plot series of polygons, Bokeh needs two lists of lists (one for x coordinates, and another
for y coordinates). Each element in the outer list represents a single polygon, and each
elemen... | 1d325e895cf8efdcaf69ae1ebcb369216e3378de | 3,645,342 |
def get_incident_ids_as_options(incidents):
"""
Collect the campaign incidents ids form the context and return them as options for MultiSelect field
:type incidents: ``list``
:param incidents: the campaign incidents to collect ids from
:rtype: ``dict``
:return: dict with th... | ea44808dfa7b5cb6aa43951062bf3a2401f0c588 | 3,645,343 |
from typing import List
import glob
import csv
def get_result(dir_path: str) -> List[float]:
"""試合のログ(csv)から勝敗データを抽出する
Args:
file_path (str): 抽出したい試合のログが格納されているパス
Returns:
List[float]: 勝率データ
"""
files = glob.glob(dir_path + "*.csv")
result = []
for file in files:
... | 52f6e1d5e432ec1d56524654cba2ddae9c60426c | 3,645,344 |
def get_internal_energies(
compounds: dict, qrrho: bool = True, temperature: float = 298.15
):
"""Obtain internal energies for compounds at a given temperature.
Parameters
----------
compounds : dict-like
A descriptor of the compounds.
Mostly likely, this comes from a parsed input f... | 788955bed2cc643b5440168c5debde41986df570 | 3,645,345 |
from typing import Tuple
from typing import Dict
from typing import Iterable
from typing import List
from typing import Any
import os
import json
def load_folder(folder: str) -> Tuple[Dict[str, Iterable[List[str]]], Dict[str, Any]]:
"""
Loads data from the folder output using neurips_crawler
output/data_... | 088279b5f3d66436e23ca6b2663cf6a79fdaa7c4 | 3,645,346 |
def get_local_info(hass):
"""Get HA's local location config."""
latitude = hass.config.latitude
longitude = hass.config.longitude
timezone = str(hass.config.time_zone)
elevation = hass.config.elevation
return latitude, longitude, timezone, elevation | 1fdefbad46c7cdb58abdc36f7d8799aa1e4af87c | 3,645,347 |
def if_present_phrase(src_str_tokens, phrase_str_tokens):
"""
:param src_str_tokens: a list of strings (words) of source text
:param phrase_str_tokens: a list of strings (words) of a phrase
:return:
"""
match_pos_idx = -1
for src_start_idx in range(len(src_str_tokens) - len(phrase_str_token... | 37297c78bb26c7cda28010e1f7567a19e2f875ee | 3,645,348 |
def fit_2D_xanes_non_iter(img_xanes, eng, spectrum_ref, error_thresh=0.1):
"""
Solve equation of Ax=b, where:
Inputs:
----------
A: reference spectrum (2-colume array: xray_energy vs. absorption_spectrum)
X: fitted coefficient of each ref spectrum
b: experimental 2D XANES data
Outputs... | 2146223aae8bf5ac13f658134a09c5682219777d | 3,645,349 |
def get_cmap(n_fg):
"""Generate a color map for visualizing foreground objects
Args:
n_fg (int): Number of foreground objects
Returns:
cmaps (numpy.ndarray): Colormap
"""
cmap = cm.get_cmap('Set1')
cmaps = []
for i in range(n_fg):
cmaps.append(np.asarray(cmap(i))[:3])
cmaps = n... | 010df9e117d724de398eeb919417a71795aad460 | 3,645,350 |
def GetBasinOutlines(DataDirectory, basins_fname):
"""
This function takes in the raster of basins and gets a dict of basin polygons,
where the key is the basin key and the value is a shapely polygon of the basin.
IMPORTANT: In this case the "basin key" is usually the junction number:
this function will use the ... | 0731451ff765318d63f36950be88dd5c73504bf0 | 3,645,351 |
def detect_park(frame, hsv):
"""
Expects: HSV image of any shape + current frame
Returns: TBD
"""
#hsv = cv2.cvtColor(frame, cfg.COLOUR_CONVERT) # convert to HSV CS
# filter
mask = cv2.inRange(hsv, lower_green_park, upper_green_park)
# operations
mask = cv2.morphologyEx(mask, cv2... | 5cd63590741ac005e7b05090ae77bca6623cf420 | 3,645,352 |
def normalize(mx):
"""Row-normalize sparse matrix"""
mx = np.array(mx)
rowsum = mx.sum(axis=1)
r_inv = np.power(rowsum, -1.0).flatten() #use -1.0 as asym matrix
r_inv[np.isinf(r_inv)] = 0.
r_mat_inv = np.diag(r_inv)
a = np.dot(r_mat_inv, mx)
#a = np.dot(a, r_mat_inv) #skip for asym matri... | 6351bc777731eed2119e59ee411d7338e55d2ced | 3,645,353 |
def th_allclose(x, y):
"""
Determine whether two torch tensors have same values
Mimics np.allclose
"""
return th.sum(th.abs(x-y)) < 1e-5 | e788192dede11e9af8bef08b7aff39440e0fe318 | 3,645,354 |
import h5py
def _check_h5_installed(strict=True):
"""Aux function."""
try:
return h5py
except ImportError:
if strict is True:
raise RuntimeError('For this functionality to work, the h5py '
'library is required.')
else:
return F... | 732300ff4171366c8a3328669068120e21411890 | 3,645,355 |
def calc_c_o(row):
"""
C or O excess
if (C/O>1):
excess = log10 [(YC/YH) - (YO/YH)] + 12
if C/O<1:
excess = log10 [(YO/YH) - (YC/YH)] + 12
where YC = X(C12)/12 + X(C13)/13
YO = X(O16)/16 + X(O17)/17 + X(O18)/18
YH = XH/1.00794
"""
yh = row['H'] / 1.00794
... | 16677f983e17465a509f2b27ec1866d3e56f00da | 3,645,356 |
import json
def create_job_from_file(job_file):
"""Creates a job from a JSON job specification.
:param job_file: Path to job file.
:type job_file: str
:returns: Job object of specified type.
"""
logger.info("Creating Job from {}.".format(job_file))
with open(job_file) as f:
params... | 3e1e2eaa1892dafc310fcb48abd096a59cb9b5a0 | 3,645,357 |
def compile_insert_unless_conflict(
stmt: irast.InsertStmt,
typ: s_objtypes.ObjectType,
*, ctx: context.ContextLevel,
) -> irast.OnConflictClause:
"""Compile an UNLESS CONFLICT clause with no ON
This requires synthesizing a conditional based on all the exclusive
constraints on the object.
"... | feaa0f0ea54ee51d78fe3b95c3ef20e6ea6bb4e2 | 3,645,358 |
import io
def plot_to_image(figure):
"""
Converts the matplotlib plot specified by "figure" to a PNG image and
returns it. The supplied figure is closed and inaccessible after this call.
"""
# Save the plot to a PNG in memory
buf = io.BytesIO()
figure.savefig(buf, format="png")
buf.see... | 14b9f223372f05f32fc096a7dafcbce273b33d0d | 3,645,359 |
def sent2vec(model, words):
"""文本转换成向量
Arguments:
model {[type]} -- Doc2Vec 模型
words {[type]} -- 分词后的文本
Returns:
[type] -- 向量数组
"""
vect_list = []
for w in words:
try:
vect_list.append(model.wv[w])
except:
continue
vect_list = ... | 06569e2bdb13d31b1218ab9a3070affe626fd915 | 3,645,360 |
import requests
def postXML(server: HikVisionServer, path, xmldata=None):
"""
This returns the response of the DVR to the following POST request
Parameters:
server (HikvisionServer): The basic info about the DVR
path (str): The ISAPI path that will be executed
xmldata (str): This ... | a5566e03b13b0938e84928dc09b6509e2dfd8a12 | 3,645,361 |
import requests
def get_government_trading(gov_type: str, ticker: str = "") -> pd.DataFrame:
"""Returns the most recent transactions by members of government
Parameters
----------
gov_type: str
Type of government data between:
'congress', 'senate', 'house', 'contracts', 'quarter-contr... | ba3599d22825cd4a3ed3cb71f384561627067b71 | 3,645,362 |
def pf_mobility(phi, gamma):
""" Phase field mobility function. """
# return gamma * (phi**2-1.)**2
# func = 1.-phi**2
# return 0.75 * gamma * 0.5 * (1. + df.sign(func)) * func
return gamma | 10045807bdb030c362d700d61789c0a490aad93b | 3,645,363 |
def print_df_stats(df: pd.DataFrame, df_train: pd.DataFrame, df_val: pd.DataFrame, df_test: pd.DataFrame, label_encoder, prediction):
"""
Print some statistics of the splitted dataset.
"""
try:
labels = list(label_encoder.classes_)
except AttributeError:
labels = []
headers = ["I... | bb52799de86b069b4c480fd94c2eaf501617284f | 3,645,364 |
import os
import sys
import tempfile
import shutil
import logging
import subprocess
import csv
import json
def hivtrace(id,
input,
reference,
ambiguities,
threshold,
min_overlap,
compare_to_lanl,
fraction,
strip_dr... | d49bb2b67783e35e305cf59ed7ad923831e0d1d8 | 3,645,365 |
def parse_author_mail(author):
"""从形如 ``author <author-mail>`` 中分离author与mail"""
pat = author_mail_re.search(author)
return (pat.group(1), pat.group(2)) if pat else (author, None) | 01aacee7202e701ac11177efe71984a7fb1e9a4f | 3,645,366 |
import attr
def tag(name, content='', nonclosing=False, **attrs):
"""
Wraps content in a HTML tag with optional attributes. This function
provides a Pythonic interface for writing HTML tags with a few bells and
whistles.
The basic usage looks like this::
>>> tag('p', 'content', _class="n... | acf4575a2c95e105ddf4231c74116d4470cf87eb | 3,645,367 |
import random
import os
import json
def _reconcile_phenotype(meth, fba_model_id, phenotype_id, out_model_id):
"""Run Gapfilling on an FBA Model [16]
:param fba_model_id: an FBA model id [16.1]
:type fba_model_id: kbtypes.KBaseFBA.FBAModel
:ui_name fba_model_id: FBA Model ID
:param phenotype_id: a... | 438ad093ebac8fc09372dda36c6ac3187981524d | 3,645,368 |
def label_global_entities(ax, cmesh, edim, color='b', fontsize=10):
"""
Label mesh topology entities using global ids.
"""
coors = cmesh.get_centroids(edim)
coors = _to2d(coors)
dim = cmesh.dim
ax = _get_axes(ax, dim)
for ii, cc in enumerate(coors):
ax.text(*cc.T, s=ii, color=c... | a3e96c090b6f439bcf5991e2df306f5305758cef | 3,645,369 |
from datetime import datetime
def build_filename():
"""Build out the filename based on current UTC time."""
now = datetime.datetime.utcnow()
fname = now.strftime('rib.%Y%m%d.%H00.bz2')
hour = int(now.strftime('%H'))
if not hour % 2 == 0:
if len(str(hour)) == 1:
hour = "0%d" % (... | 0f68b09410bf1d749bf3492e974be315d2fcaa0d | 3,645,370 |
import torch
def sample_sequence(model, length, context=None, temperature=1.0, top_k=10, sample=True,
device='cuda', use_constrained_decoding=False, constrained_decoding_threshold=0.3,
person_to_category_to_salient_ngram_embed=(), word_embeds=(), tokenizer=None):
"""
:param model:
:param length:
:param... | 9d65d5b67163e4794628d5f508517e22bbada02c | 3,645,371 |
def main(debug=False, args=None):
"""Start the app. We will see if we need this anyway."""
log.info('>>>>> Starting development server at http://{}/api/ <<<<<'.format(
flask_app.config['SERVER_NAME']))
# flask_app.run(debug=settings.FLASK_DEBUG)
# flask_app.run(debug=config_json["FLASK_DEBUG"])
... | 3c7c3221b32871e5783bc4b421c9cbdd1d6108a1 | 3,645,372 |
from ostap.math.models import tf1 as _tf1
def tf1 ( fun , **kwargs ) :
"""Convert function object to TF1
"""
return _tf1 ( fun , **kwargs ) | b8af1dd2f7332a9b4585a4d59e8f5299f95b730c | 3,645,373 |
def normalize_requires(filename, **kwargs):
"""Return the contents of filename, with all [Require]s split out and ordered at the top.
Preserve any leading whitespace/comments.
"""
if filename[-2:] != '.v': filename += '.v'
kwargs = fill_kwargs(kwargs)
lib = lib_of_filename(filename, **kwargs)
all_i... | 8973207559289308f98e7c3217a4b825eeb22c91 | 3,645,374 |
import warnings
import sys
def deprecate_module_with_proxy(module_name, module_dict, deprecated_attributes=None):
"""
Usage:
deprecate_module_with_proxy(__name__, locals()) # at bottom of module
"""
def _ModuleProxy(module, depr):
"""Return a wrapped object that warns about deprecated... | 3647770021a790a6ce1f04c6ef56967f23f03569 | 3,645,375 |
import json
def index():
"""
Returns:
render_template (flask method): contains data required to render visualizations
"""
graphs = []
# extract data needed for visuals
# TODO: Below is an example - modify to extract data for your own visuals
genre_counts = df.groupby('genre')... | e3c0aa931635eaf9626e43f9493542100a60b768 | 3,645,376 |
import numpy
import random
def uniform_dec(num):
"""
Declination distribution: uniform in sin(dec), which leads to a uniform distribution across all declinations.
Parameters
----------
num : int
The number of random declinations to produce.
"""
return (numpy.pi / 2.) - numpy.arccos... | bc8724e5aa2e65e87f253d271e3130b9379d5cb5 | 3,645,377 |
import os
import logging
def run_vcfeval(job, context, sample, vcf_tbi_id_pair, vcfeval_baseline_id, vcfeval_baseline_tbi_id,
fasta_path, fasta_id, bed_id, out_name = None, score_field=None):
"""
Run RTG vcf_eval to compare VCFs.
Return a results dict like:
{
"f1": f1 score... | 029ff152a276e325e34a1522b6aee87ff1ddecd4 | 3,645,378 |
def helicsInputGetBytes(ipt: HelicsInput) -> bytes:
"""
Get the raw data for the latest value of a subscription.
**Parameters**
- **`ipt`** - The input to get the data for.
**Returns**: Raw string data.
"""
if HELICS_VERSION == 2:
f = loadSym("helicsInputGetRawValue")
else:
... | e7d14623490aa77e800d7f1b10c1313a1f1fbf8f | 3,645,379 |
def named_char_class(char_class, min_count=0):
"""Return a predefined character class.
The result of this function can be passed to :func:`generate_password` as
one of the character classes to use in generating a password.
:param char_class: Any of the character classes named in
... | 53f1b580eba6d5ef5ea38bd04606a9fbca2cb864 | 3,645,380 |
import argparse
def parse_args():
"""Parse commandline arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('--minSdkVersion', default='', dest='min_sdk_version',
help='specify minSdkVersion used by the build system')
parser.add_argument('--targetSdkVersion', default='',... | 4ccbb4fa225abbe4eaa249a2dbc537d338559b62 | 3,645,381 |
from typing import Sequence
import torch
def make_grid(spatial_dim: Sequence[int]) -> torch.Tensor:
"""Make the grid of coordinates for the Fourier neural operator input.
Args:
spatial_dim: A sequence of spatial deimensions `(height, width)`.
Returns:
A torch.Tensor with the grid of coor... | bf9c858eb068e3f20db8e736883e8b1e74155763 | 3,645,382 |
import logging
def _default_handlers(stream, logging_level, include_time):
"""Return a list of the default logging handlers to use.
Args:
stream: See the configure_logging() docstring.
include_time: See the configure_logging() docstring.
"""
# Create the filter.
def should_log(record)... | 5aacd076f80b2a7e1649dc1806feba0df883ccfa | 3,645,383 |
import os
def find_spec2d_from_spec1d(spec1d_files):
"""
Find the spec2d files corresponding to the given list of spec1d files.
This looks for the spec2d files in the same directory as the spec1d files.
It will exit with an error if a spec2d file cannot be found.
Args:
spec1d_files (list of ... | 7cad3cbe3679a89e8eac19d14cde9aa499a9c67c | 3,645,384 |
import pkg_resources
from pyscaffold.utils import check_setuptools_version
from pyscaffold.contrib.setuptools_scm import get_version
from pyscaffold.contrib.setuptools_scm.hacks import parse_pkginfo
from pyscaffold.contrib.setuptools_scm.git import parse as parse_git
from pyscaffold.integration import local_version2str... | 33aee76a46493cbe735a64281b5d6641ab433746 | 3,645,385 |
def datedif(ctx, start_date, end_date, unit):
"""
Calculates the number of days, months, or years between two dates.
"""
start_date = conversions.to_date(start_date, ctx)
end_date = conversions.to_date(end_date, ctx)
unit = conversions.to_string(unit, ctx).lower()
if start_date > end_date:
... | 4056af5cbf2f5ff0159a6514e8ee3d09d9f4051d | 3,645,386 |
def tan(data):
"""Compute elementwise tan of data.
Parameters
----------
data : relay.Expr
The input data
Returns
-------
result : relay.Expr
The computed result.
"""
return _make.tan(data) | 5c11fa721debd0082514c62f8a8f3afa268ad502 | 3,645,387 |
def get_battery_data(battery, user=None, start = None, end = None):
""" Returns a DataFrame with battery data for a user.
Parameters
----------
battery: DataFrame with battery data
user: string, optional
start: datetime, optional
end: datetime, optional
"""
assert isinstance(battery,... | d45e40e89195d099b1c7a02fc033cd665b3b72f6 | 3,645,388 |
import re
import sys
def fix_brushes(brushes, thresh, vmf_in, snaplo, snaphi):
"""
Find and fix brushes with floating point plane coordinates.
Returns a tuple containing the total number of brushes whose coordinates
were rounded, a list of tuples which pairs suspicious brush IDs with the
greatest... | de257913badb6de2ae782aa3b93e8cb3897fb669 | 3,645,389 |
import logging
import traceback
def mutate():
"""
Handles the '/mutate' path and accepts CREATE and UPDATE requests.
Sends its response back, which either denies or allows the request.
"""
try:
logging.debug(request.json)
admission_request = AdmissionRequest(request.json)
r... | 9b3a649eafe228127dfc72ddf8fb346248ccba86 | 3,645,390 |
from typing import List
def generate_options_for_resource_group(control_value=None, **kwargs) -> List:
"""Dynamically generate options for resource group form field based on the user's selection for Environment."""
if control_value is None:
return []
# Get the environment
env = Environment.ob... | 8271d6bf113f18890862835dfd5d0882a7b7490f | 3,645,391 |
def plot_map(fvcom, tide_db_path, threshold=np.inf, legend=False, **kwargs):
"""
Plot the tide gauges which fall within the model domain (in space and time) defined by the given FileReader object.
Parameters
----------
fvcom : PyFVCOM.read.FileReader
FVCOM model data as a FileReader object.... | c73069c67ecda4429c86b6f887cc5fd5a109b10b | 3,645,392 |
from operator import or_
def get_element_block(
xml_string: str,
first_name: str,
second_name: str = None,
include_initial: bool = True,
include_final: bool = True
) -> str:
"""
warning: use great caution if attempting to apply this function,
or anything like it, to tags that that may ... | 426142b5f1e96dc038640305eb918d065c9bdf20 | 3,645,393 |
def eval_eu_loss(ambiguity_values, dfs_ambiguity):
"""Calculate the expected utility loss that results from a setting that
incorporates different levels of ambiguity.
Args:
ambiguity_values (dict): Dictionary with various levels of ambiguity
to be implemented (key = name of scenario).
... | 00b658640b91de4dd48e99eac6437bebafb8e9b1 | 3,645,394 |
def reset(ip: str = None, username: str = None) -> int:
"""
Reset records that match IP or username, and return the count of removed attempts.
This utility method is meant to be used from the CLI or via Python API.
"""
attempts = AccessAttempt.objects.all()
if ip:
attempts = attempts.... | 3e404ef4b32cc0e183e676e7d07137780beaf3f7 | 3,645,395 |
def try_patch_column(meta_column: MetaColumn) -> bool:
"""Try to patch the meta column from request.json.
Generator assignment must be checked for errors.
Disallow column type change when a generator is assigned and when the column
is imported. An error is raised in that case.
"""
if 'col_type... | 0feb5598853b8a5b1cd060bd806f2fcc6afd69f6 | 3,645,396 |
import btrfsutil
import os
def get_subvs(parent):
"""
:param parent:
:return:
"""
#ls_dirs=[os.path.join(parent, name) for name in os.listdir(parent) if os.path.isdir(os.path.join(parent, name))]
return [directory for directory in os.listdir(parent) if btrfsutil.is_subvolume(directory)] | 1baa63ce2b461299ef7e0bf0a6021aa4b988520e | 3,645,397 |
def readout(x, mask, aggr='add'):
"""
Args:
x: (B, N_max, F)
mask: (B, N_max)
Returns:
(B, F)
"""
return aggregate(x=x, dim=1, aggr=aggr, mask=mask, keepdim=False) | 74253ad0e7a9d23bd8c3d69097e8c1b8508c8b2f | 3,645,398 |
import sys
import six
def debug_ssh(function):
"""Decorator to generate extra debug info in case off SSH failure"""
def wrapper(self, *args, **kwargs):
try:
return function(self, *args, **kwargs)
except tempest.lib.exceptions.SSHTimeout:
try:
original_ex... | bfd3caf911e5d7ecfac9111825ef594413e6bb5f | 3,645,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.