content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def apply_fixes(args, tmpdir):
"""Calls clang-apply-fixes on a given directory."""
invocation = [args.clang_apply_replacements_binary]
if args.format:
invocation.append('-format')
if args.style:
invocation.append('-style=' + args.style)
invocation.append(tmpdir)
subprocess.call(i... | 5,351,900 |
def __get_ll_type__(ll_type):
"""
Given an lltype value, retrieve its definition.
"""
res = [llt for llt in __LL_TYPES__
if llt[1] == ll_type]
assert len(res) < 2, 'Duplicate linklayer types.'
if res:
return res[0]
else:
return None | 5,351,901 |
def is_cloaked(path, names):
""" Return True if this is likely to be a cloaked encrypted post """
fname = unicoder(os.path.split(path)[1]).lower()
fname = os.path.splitext(fname)[0]
for name in names:
name = os.path.split(name.lower())[1]
name, ext = os.path.splitext(unicoder(name))
... | 5,351,902 |
def dump_recarray(filename, recarray):
"""
Dumpy a recarray to an ESV file.
"""
ESV.from_recarray(recarray).dump_file(filename) | 5,351,903 |
def warm_restart(ctx, redis_unix_socket_path):
"""warm_restart-related configuration tasks"""
# Note: redis_unix_socket_path is a path string, and the ground truth is now from database_config.json.
# We only use it as a bool indicator on either unix_socket_path or tcp port
use_unix_socket_path = bool(re... | 5,351,904 |
def parse_email_body(email_body, client_path):
"""
Parses email body to extract magnet link. Once the link is found,
the torrent client is launched and begins downloading the torrent.
After the download completes, a confirmation SMS message is sent.
:param str email_body: body of the email
:par... | 5,351,905 |
def slice_node(node, split):
"""Splits a node up into two sides.
For text nodes, this will return two text nodes.
For text elements, this will return two of the source nodes with children
distributed on either side. Children that live on the split will be
split further.
Parameters
-------... | 5,351,906 |
def main(args):
"""
Use FindRouteLeaks and print leaks detected on stdout or save to file.
"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("pfx_file",
help="full path of file with prefixes data - "
"from prepare_dat... | 5,351,907 |
async def geo(ctx, *, ip):
"""looks up an ip address"""
#above is the description for the command
#runs the command
try:
#gets ip address
ip_address = socket.gethostbyname(ip)
#sends the info about the ip
await ctx.send(await lookup_ip(ip_address))
#message if there is socket error aka if there is no su... | 5,351,908 |
def cascade_visibility_down(element, visibility_mode):
"""Sets visibility for all descendents of an element. (cascades down)."""
# Does nothing to given element.
# Find all related objects, and set them all to the appropriate visibility mode.
links = [rel.get_accessor_name() for rel in element._met... | 5,351,909 |
def k_radius(x,centroids):
"""
Maximal distance between centroids and corresponding samples in partition
"""
labels = partition_labels(x,centroids)
radii = []
for idx in range(centroids.shape[0]):
mask = labels == idx
radii.append(
np.max(
np.linalg.n... | 5,351,910 |
def build_feature_df(data, default=True, custom_features={}):
"""
Computes the feature matrix for the dataset of components.
Args:
data (dataset): A mapping of {ic_id: IC}. Compatible with the dataset representaion produced by load_dataset().
default (bool, optional): Determines wether to c... | 5,351,911 |
def update_uid_digests_cache(uid, digest):
"""
Updates uid_digest cache, also updates rd_digest and rd_digest_dict cache also.
"""
debug = False
try:
if debug:
print '\n debug -- Entered update_uid_digests_cache...'
dump_dict(digest, debug)
# Get the cache; I... | 5,351,912 |
def decode_complex(data, complex_names=(None, None)):
""" Decodes possibly complex data read from an HDF5 file.
Decodes possibly complex datasets read from an HDF5 file. HDF5
doesn't have a native complex type, so they are stored as
H5T_COMPOUND types with fields such as 'r' and 'i' for the real and
... | 5,351,913 |
def CLYH(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-05-29", **kwargs
) -> Graph:
"""Re... | 5,351,914 |
def mock_exception_run_publish_pricing_update(
**kwargs):
"""mock_exception_run_publish_pricing_update
:param kwargs: keyword args dict
"""
raise Exception(
'test throwing mock_exception_run_publish_pricing_update') | 5,351,915 |
def tif_to_array(
filename,
image_descriptions=False,
verbose=False,
):
"""Load a tif into memory and return it as a numpy array.
This is primarily a tool we use to interact with ImageJ, so that's
the only case it's really been debugged for. I bet somebody made
nice python bindings for ... | 5,351,916 |
def lineParPlot(parDict, FigAx=None, **kwargs):
"""
Plot the results of lineParameters().
Parameters
----------
parDict : dict
The relevant parameters:
xPerc : tuple, (xPerc1, xPerc2)
Left and right x-axis values of the line profile at perc% of the peak flux.
Xc ... | 5,351,917 |
def prepare_data_arrays(tr_df, te_df, target):
"""
tr_df: train dataset made by "prepare_dataset" function
te_df: test dataset made by "prepare_dataset" function
target: name of target y
return: (numpy array of train dataset),
(numpy array of test dataset: y will be filled with NaN),
... | 5,351,918 |
def read_input(files):
"""Print all FILES file names."""
for filename in files:
click.echo(filename) | 5,351,919 |
def revoke_task(task):
"""
递归revoke
"""
if task.children:
for child in task.children:
revoke_task(child)
# 终止未执行的任务
# if not task.ready():
# task.revoke(terminate=True)
try:
task.revoke(terminate=True)
except:
pass | 5,351,920 |
def download_git_repo(repo: str):
"""
Download remote git repo
"""
local_filename = repo.split('/')[-1]
class CloneProgress(RemoteProgress):
def update(self, op_code, cur_count, max_count=None, message=''):
if message:
print(message)
td = tempfile.mkdtemp()
... | 5,351,921 |
def _logMe():
"""Add log entries to test the handler."""
logger.info("Solange Norwegen nicht untergeht,")
logger.warning("gilt hier warten, und weiter rufen:")
logger.debug("LAND IN SICHT!")
logger.error("Du kannst nicht bleiben, kannst nicht gehen,")
logger.critical("dich nicht ertragen, gerade... | 5,351,922 |
def export_model(model, clf_path = '../models/disaster_response_clf.pkl'):
"""
Function: save model as pickle file
Args:
model (GridSearch obj): trained and tuned classifier model
clf_path (str): path of pickle file destination
Return:
None
"""
with open(clf_path, 'wb') a... | 5,351,923 |
def unique_bytestring_gen():
"""Generates unique sequences of bytes.
"""
characters = (b"abcdefghijklmnopqrstuvwxyz"
b"0123456789")
characters = [characters[i:i + 1] for i in irange(len(characters))]
rng = random.Random()
while True:
letters = [rng.choice(characters) fo... | 5,351,924 |
def move_vvol_shadow_vm_from_aggr_to_aggr(ds_info, aggr1, aggr2, lun, vm):
"""
suggest a move of a vm from one na aggr to another and adjust aggr and ds usage values accordingly
"""
# IMPORTANT: we can only keep track about the state for the aggr and the ds
# and not for the fvol as we do... | 5,351,925 |
def plot(model_set, actual_mdot=True, qnuc=0.0, verbose=True, ls='-', offset=True,
bprops=('rate', 'fluence', 'peak'), display=True, grid_version=0):
"""Plot predefined set of mesa model comparisons
model_set : int
ID for set of models (defined below)
"""
mesa_info = get_mesa_set(model... | 5,351,926 |
def test_cliargs_0(tmp_path_):
"""Test default parameters."""
parser = mtscomp_parser()
args = ['somefile']
pargs, config = _args_to_config(parser, args)
assert config.algorithm == 'zlib'
assert config.check_after_compress
assert config.check_after_decompress
assert config.do_time_diff
... | 5,351,927 |
def main():
"""Main documentation builder script."""
parser = ArgumentParser(
description="build GGRC documentation",
)
parser.add_argument(
'-c', '--clean',
action='store_true',
default=False,
help='clean cache before build',
dest='clean',
)
parser.add_argument(
'-... | 5,351,928 |
def preprocessing_fn(batch):
"""
Standardize, then normalize sound clips
"""
processed_batch = []
for clip in batch:
signal = clip.astype(np.float64)
# Signal normalization
signal = signal / np.max(np.abs(signal))
# get pseudorandom chunk of fixed length (from SincNe... | 5,351,929 |
def Py_Main(space, argc, argv):
"""The main program for the standard interpreter. This is made available for
programs which embed Python. The argc and argv parameters should be
prepared exactly as those which are passed to a C program's main()
function (converted to wchar_t according to the user's loc... | 5,351,930 |
def generate_random_bond_list(atom_count, bond_count, seed=0):
"""
Generate a random :class:`BondList`.
"""
np.random.seed(seed)
# Create random bonds between atoms of
# a potential atom array of length ATOM_COUNT
bonds = np.random.randint(atom_count, size=(bond_count, 3))
# Clip bond ty... | 5,351,931 |
def open_cosmos_files():
"""
This function opens files related to the COSMOS field.
Returns:
A lot of stuff. Check the code to see what it returns
"""
COSMOS_mastertable = pd.read_csv('data/zfire/zfire_cosmos_master_table_dr1.1.csv',index_col='Nameobj')
ZF_cat = ascii.read('d... | 5,351,932 |
def convert_image_points_to_points(image_positions, distances):
"""Convert image points to 3d points.
Returns:
positions
"""
hypotenuse_small = numpy.sqrt(
image_positions[:, 0]**2 +
image_positions[:, 1]**2 + 1.0)
ratio = distances / hypotenuse_small
n = image_positions... | 5,351,933 |
def W(i, j):
"""The Wilson functions.
:func:`W` corresponds to formula (2) on page 16 in `the technical paper`_
defined as:
.. math::
W(t, u_j)= \\
e^{-UFR\cdot (t+u_j)}\cdot \\
\left\{ \\
\\alpha\cdot\min(t, u_j) \\
-0.5\cdot e^{-\\... | 5,351,934 |
def set_boot_flag(dev_path, use_mbr=False):
"""Set modern or legacy boot flag."""
cmd = [
'sudo',
'parted', dev_path,
'set', '1',
'boot' if use_mbr else 'legacy_boot',
'on',
]
run_program(cmd) | 5,351,935 |
def get_id(ctx):
"""
Get METS id if any
"""
workspace = Workspace(ctx.resolver, directory=ctx.directory, mets_basename=basename(ctx.mets_url))
ID = workspace.mets.unique_identifier
if ID:
print(ID) | 5,351,936 |
def reddening_fm(wave, ebv=None, a_v=None, r_v=3.1, model='f99'):
"""Determines a Fitzpatrick & Massa reddening curve.
Parameters
----------
wave: ~numpy.ndarray
wavelength in Angstroms
ebv: float
E(B-V) differential extinction; specify either this or a_v.
a_v: float
A(V... | 5,351,937 |
def cross_entropy(Y, P):
"""A function that takes as input two lists Y, P,
and returns the float corresponding to their cross-entropy.
""" | 5,351,938 |
def test_cummax_multi_dims(data_type):
"""
Feature: Op Cummax
Description: test Cummax operator with multiple dimension.
Expectation: the result match expectation.
"""
op = "Cummax"
axis = 1
x = [[[6, 11, 4, 9, 15], [1, 2, 14, 13, 15], [15, 10, 6, 13, 6], [9, 4, 11, 10, 11]],
[[... | 5,351,939 |
def render_web_page(file_directory, ihl_config_file_path, current_date):
"""
Writing to html files for each IHL.
Check all the IHL's names and the filepath
Load config file from ihlconfig.json which contains details of the IHLs.
:param file_directory:
:param ihl_config_file_path:
:param current_date: datetime.da... | 5,351,940 |
def list_registered_stateful_ops_without_inputs():
"""Returns set of registered stateful ops that do not expect inputs.
This list is used to identify the ops to be included in the state-graph and
that are subsequently fed into the apply-graphs.
Returns:
A set of strings.
"""
return set([
name
... | 5,351,941 |
def load_frame_from_video(path: str, frame_index: int) -> np.ndarray:
"""load a full trajectory video file and return a single frame from it"""
vid = load_video(path)
img = vid[frame_index]
return img | 5,351,942 |
def get_settings_patterns(project_id: int) -> Dict[str, str]:
"""Returning project patterns settings"""
track_patterns: List[Dict[str, str]] = ProjectSettings.objects.get(project_id=project_id).trackPatterns
return {pattern['pattern']: pattern['regex'] for pattern in track_patterns} | 5,351,943 |
def video_to_array(filepath):
"""Process the video into an array."""
cap = cv2.VideoCapture(filepath)
num_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
channel = 3
frame_buffer = np.empty((num_frames, height... | 5,351,944 |
def test__Ticker_history(requests_mock):
"""Ticker instance history."""
COLUMNS = ["Open", "High", "Low", "Close", "Volume"]
tid = "_yf_history_IBM"
resp, data, params = fetchTestData(yf.endpoints.responses.bundle.responses, tid)
# params = {'period': 'max', 'auto_adjust': True, 'back_adjust': False... | 5,351,945 |
async def test_10_request(requests_mock: Mock) -> None:
"""Test `async request()`."""
result = {"result": "the result"}
rpc = RestClient("http://test", "passkey", timeout=0.1)
def response(req: PreparedRequest, ctx: object) -> bytes: # pylint: disable=W0613
assert req.body is not None
... | 5,351,946 |
def ncvue(ncfile='', miss=np.nan):
"""
The main function to start the data frame GUI.
Parameters
----------
ncfile : str, optional
Name of netcdf file (default: '').
miss : float, optional
Add value to list of missing values: _FillValue, missing_value,
and the standard n... | 5,351,947 |
def entmax15(X, axis=-1, k=None):
"""1.5-entmax: normalizing sparse transform (a la softmax).
Solves the optimization problem:
max_p <x, p> - H_1.5(p) s.t. p >= 0, sum(p) == 1.
where H_1.5(p) is the Tsallis alpha-entropy with alpha=1.5.
Parameters
----------
X : paddle.Tensor
... | 5,351,948 |
def aprint(artname, number=1):
"""
Print 1-line art.
:param artname: artname
:type artname : str
:param number: number of repeats
:type number: int
:return: None
"""
try:
if artname == "UnicodeEncodeError":
raise UnicodeEncodeError(
'test', u"", 4... | 5,351,949 |
def test_cholesky_inverse():
"""Checks that our Cholesky inverse matches `torch.cholesky_inverse()`."""
torch.autograd.set_detect_anomaly(True)
batch_dims = (5,)
matrix_dim = 3
L = fannypack.utils.tril_from_vector(
torch.randn(
batch_dims + (fannypack.utils.tril_count_from_matrix... | 5,351,950 |
def build(target, method: list, dataset_name, limit: int, number_of_topics):
"""
Build page.
:param target: Target file
:param method: List of methods to use.
:param limit: Limit processing into N candidates.
"""
click.echo("Loading dataset ... ", nl=False)
dataset = importlib.import_... | 5,351,951 |
def get_dependency_graph(node, targets=None):
"""Returns the dependent nodes and the edges for the passed in node.
:param str node: The node to get dependencies for.
:param list targets: A list with the modules that are used as targets.
:return: The dependency graph info.
:rtype: GraphInfo
"""... | 5,351,952 |
def is_submodule_repo(p: Path) -> bool:
"""
"""
if p.is_file() and '.git/modules' in p.read_text():
return True
return False | 5,351,953 |
def shift_contig(df2, remove):
"""
The function append shifted fragment from
sort_cluster_seq function.
Parameters
----------
df2 : pandas DataFrame
DataFrame NRPS cluster fragment.
remove : list
List of cluster fragment, which should removed.
Returns
-------
df... | 5,351,954 |
def repos(repo_mapping = {}):
"""Adds external repositories/archives needed by eventuals (phase 1).
Args:
repo_mapping: passed through to all other functions that expect/use
repo_mapping, e.g., 'git_repository'
"""
stout_atomic_backoff_repos(
external = False,
repo_m... | 5,351,955 |
def main():
"""Join the functions."""
input_identification()
zip_extract()
file_format() | 5,351,956 |
def get_color_cycle(n=None):
"""Return the matplotlib color cycle.
:param Optional[int] n:
if given, return a list with exactly n elements formed by repeating
the color cycle as necessary.
Usage::
blue, green, red = get_color_cycle(3)
"""
import matplotlib as mpl
cyc... | 5,351,957 |
def test_database_connection(test_data: dict):
"""
Test database connection using the database connection string.
:param test_data: Database test data.
:type test_data: dict
"""
check_connection(test_data) | 5,351,958 |
def _bocs_consistency_mapping(x):
"""
This is for the comparison with BOCS implementation
:param x:
:return:
"""
horizontal_ind = [0, 2, 4, 7, 9, 11, 14, 16, 18, 21, 22, 23]
vertical_ind = sorted([elm for elm in range(24) if elm not in horizontal_ind])
return x[horizontal_ind].reshape((I... | 5,351,959 |
def get_document(name, key):
"""Get document from Database"""
constructor = Constructor()
inst_coll = constructor.factory(kind='Collection', name=name)
inst_doc = Document(inst_coll)
doc = inst_doc.get_document(key)
return doc | 5,351,960 |
def numero_3():
"""numero_3"""
check50.run("python3 numeros_introescos.py").stdin("999\n1000", prompt=False).stdout("0", regex=False).exit(0) | 5,351,961 |
def predict() -> str:
"""predict the movie genres based on the request data"""
cur = db_connection.cursor()
try:
input_params = __process_input(request.data)
input_vec = vectorizer.transform(input_params)
prediction = classifier.predict(input_vec)
predictions = binarizer.inve... | 5,351,962 |
def SignificanceWeights(serializer, decay):
"""Multiplies a binary mask with a symbol significance mask."""
def significance_weights(mask):
# (repr,) -> (batch, length, repr)
# significance = [0, 1, 2]
significance = serializer.significance_map
assert significance.shape[0] == mask.shape[2]
# sig... | 5,351,963 |
def lint(ctx):
"""Validate the code style (e.g. undefined names)"""
try:
importlib.import_module("flake8")
except ImportError:
sys.exit("You need to ``pip install flake8`` to lint")
# We use flake8 with minimal settings
# http://pep8.readthedocs.io/en/latest/intro.html#error-codes
... | 5,351,964 |
def check_fun_inter_allocation(fun_inter, data, **kwargs):
"""Check allocation rules for fun_inter then returns objects if check"""
out = None
check_allocation_fun_inter = get_allocation_object(data, kwargs['xml_fun_inter_list'])
if check_allocation_fun_inter is None:
check_fe = check_fun_elem_d... | 5,351,965 |
def kernel_bw_lookup(
compute_device: str,
compute_kernel: str,
caching_ratio: Optional[float] = None,
) -> Optional[float]:
"""
Calculates the device bandwidth based on given compute device, compute kernel, and
caching ratio.
Args:
compute_kernel (str): compute kernel.
comp... | 5,351,966 |
def spkltc(targ, et, ref, abcorr, stobs):
"""
Return the state (position and velocity) of a target body
relative to an observer, optionally corrected for light time,
expressed relative to an inertial reference frame.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkltc_c.html
:param ... | 5,351,967 |
async def test_availability_without_topic(hass, mqtt_mock):
"""Test availability without defined availability topic."""
await help_test_availability_without_topic(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
) | 5,351,968 |
def test_mime_db_conversion(testname):
"""Test if the convert process of mime-db structure
to new one is performed correctly.
"""
mime_db, expected = CONVERSION_TESTCASES[testname]
result = convert_mime_db(mime_db)
assert result == expected | 5,351,969 |
def create_container(
container_image: str,
name: str = None,
volumes: t.List[str] = None,
) -> str:
"""Create a new working container from provided container image.
Args:
container_image (str): The container image to start from.
name (str, optional): The container name.
vol... | 5,351,970 |
def single_data_path(client, node_id):
"""
In order for a shrink to work, it should be on a single filesystem, as
shards cannot span filesystems. Return `True` if the node has a single
filesystem, and `False` otherwise.
:arg client: An :class:`elasticsearch.Elasticsearch` client object
:rtype:... | 5,351,971 |
def main():
"""Make a jazz noise here"""
args = get_args()
items = args.items
sortList = args.sorted
i = len(items) - 1
if sortList:
items.sort()
if len(items) == 1:
print("You are bringing {}.".format(items[0]))
elif len(items) == 2:
print("You are bringing {}... | 5,351,972 |
def sortorder(obj):
"""
Trys to smartly determine the sort order for this object ``obj``
"""
if hasattr(obj, 'last'):
return obj.last.timestamp()
if isinstance(obj, str):
# First assume pure numeric
try:
return float(obj)
except ValueError:
pa... | 5,351,973 |
def get_more_details_of_post(post_url: str) -> json:
"""
:param post_url: the url of an imgur post
:return: Details like Virality-score, username etc in JSON format
"""
details = {}
try:
request = HTMLSession().get(post_url)
# some times, request isn't properly made, hence call ... | 5,351,974 |
def save_totals(
year_range,
totals,
entry_list,
count,
category_list,
category_txt,
category_txt2,
filename
):
"""
Write out a bunch of report data to a spreadsheet report.
Report will be a 2D matrix:
- X-Axis = school year
- Y-Axis = 'Category' (FIPS Code, D... | 5,351,975 |
def subkey_public_pair_chain_code_pair(public_pair, chain_code_bytes, i):
"""
Yield info for a child node for this node.
public_pair:
base public pair
chain_code:
base chain code
i:
the index for this node.
Returns a pair (new_public_pair, new_chain_code)
"""
i_... | 5,351,976 |
def get_registry_description(metaprefix: str) -> Optional[str]:
"""Get the description for the registry, if available.
:param metaprefix: The metaprefix of the registry
:return: The description for the registry, if available, otherwise ``None``.
>>> get_registry_description('prefixcommons')
'A reg... | 5,351,977 |
def evaluate(model, valid_exe, valid_ds, valid_prog, dev_count, metric):
"""evaluate """
acc_loss = 0
acc_top1 = 0
cc = 0
for feed_dict in tqdm.tqdm(
multi_device(valid_ds.generator(), dev_count), desc='evaluating'):
if dev_count > 1:
loss, top1 = valid_exe.run(
... | 5,351,978 |
def _get_rank(player):
"""Get the rank of a player"""
cursor = _DB.cursor()
try:
cursor.execute("SELECT score FROM scores WHERE player = ?", (player.lower(),))
rows = cursor.fetchall()
if not rows:
return 0
ps = rows[0][0]
cursor.execute("SELECT count(*) F... | 5,351,979 |
def ensure_branch(c, branch, repo, remote=None, fork=None, base=None):
"""
:param fork: used to validate existing remote branch is correct
using None will assume that there is no fork and branch is on 'remote'
"""
ensure_cloned_repo(c, repo)
if remote is not None:
ensure_remote(c, ... | 5,351,980 |
def getDataFromFileList(filedir):
"""
Reads all data from each file to one big data set ordered as:
[[info],[[residue],[data]]]
"""
data = []
filelist =os.listdir(filedir)
print("Loading data from data dir\n")
if len(filelist)>0:
print("DataFiles included:\n ----------------------------------")
else:
pri... | 5,351,981 |
def spec(func):
"""return a string with Python function specification"""
doc = pydoc.plain(pydoc.render_doc(func))
return doc.splitlines()[2] | 5,351,982 |
def brm_weights(P, r, Γ, X):
"""Bellman-residual minimization fixed-point weights.
TODO: Need to actually go through the details to make sure this is right
"""
assert linalg.is_stochastic(P)
assert X.ndim == 2
assert len(X) == len(P)
ns = len(P)
Γ = as_diag(Γ, ns)
Λ = as_diag(Λ, ns)... | 5,351,983 |
def file_age(file_name):
"""
Returns the age of a file in seconds from now. -1 if the file does not exist.
:param file_name: file name
.. versionadded:: 9.3.1
"""
if not os.path.exists(file_name):
return -1
return time.time() - os.path.getmtime(file_name) | 5,351,984 |
def test_stage_exceptions(t, l, i, b, se):
"""
***Purpose***: Test if correct exceptions are raised when attributes are
assigned unacceptable values.
"""
s = Stage()
data_type = [t, l, i, b, se]
for data in data_type:
print 'Using: %s, %s' % (data, type(data))
if not isi... | 5,351,985 |
def IsVirus(mi, log):
"""Test: a virus is any message with an attached executable
I've also noticed the viruses come in as wav and midi attachements
so I trigger on those as well.
This is a very paranoid detector, since someone might send me a
binary for valid reasons. I white-list everyone who's... | 5,351,986 |
def action_reaction():
"""Run experiments with and without the action-reaction assumption."""
folder = 'action_reaction/'
env = HardSpheres(num_obj=15, width=250)
for asymmetric in (True, False):
exp = FullArch(env, steps=3000, lr=0.1, asymmetric=asymmetric)
losses, coll_losses = exp.ru... | 5,351,987 |
def to_curl(request, compressed=False, verify=True):
"""
Returns string with curl command by provided request object
Parameters
----------
compressed : bool
If `True` then `--compressed` argument will be added to result
"""
parts = [
('curl', None),
('-X', request.me... | 5,351,988 |
def prox_trace_indicator(a, lamda):
"""Time-varying latent variable graphical lasso prox."""
es, Q = np.linalg.eigh(a)
xi = np.maximum(es - lamda, 0)
return np.linalg.multi_dot((Q, np.diag(xi), Q.T)) | 5,351,989 |
def get_gamma_non_jitted(esys):
"""Get log gamma
Returns
-------
float[:]
"""
if isinstance(esys.species[0].logc, float):
v = np.empty(len(esys.species))
else:
v = np.empty(len(esys.species), dtype=object)
for i, sp in enumerate(esys.species):
v[i] = 10.0 ** (sp.... | 5,351,990 |
def active_matrices_from_extrinsic_euler_angles(
basis1, basis2, basis3, e, out=None):
"""Compute active rotation matrices from extrinsic Euler angles.
Parameters
----------
basis1 : int
Basis vector of first rotation. 0 corresponds to x axis, 1 to y axis,
and 2 to z axis.
... | 5,351,991 |
def save(image, parent=None):
""" Save an image with appropriate dialogs (file selector)
Return the chosen save path or None
parent : parent wx Window for dialogs
"""
dialog = ImageFileDialog(parent, style=wx.FD_SAVE|wx.FD_OVERWRITE_PROMPT)
if dialog.ShowModal() !... | 5,351,992 |
def runQuery(scenarioID):
"""
Run a query that aquires the data from the lrs for one specific dialoguetrainer scenario
\n
:param scenarioID: The id of the scenario to request the data from \t
:type scenarioID: int \n
:returns: The data for that scenario or error \t
:rtype: [Dict<string, mixe... | 5,351,993 |
def is_spaceafter_yes(line):
"""
SpaceAfter="Yes" extracted from line
"""
if line[-1] == "_":
return False
for ddd in line[MISC].split("|"):
kkk, vvv = ddd.split("=")
if kkk == "SpaceAfter":
return vvv == "Yes"
raise ValueError | 5,351,994 |
def linear_scheduler(optimizer, warmup_steps, training_steps, last_epoch=-1):
"""linear_scheduler with warmup from huggingface"""
def lr_lambda(current_step):
if current_step < warmup_steps:
return float(current_step) / float(max(1, warmup_steps))
return max(
0.0,
... | 5,351,995 |
def MajorityVoteN(qubits,
nrounds,
prep=[],
meas_delay=1e-6,
add_cals=False,
calRepeats=2):
"""
Majority vote across multiple measurement results (same or different qubits)
Parameters
----------
qubits : Chann... | 5,351,996 |
def numerox_example():
"""
Example of how to prepare a submission for the Numerai tournament.
It uses Numerox which you can install with: pip install numerox
For more information see: https://github.com/kwgoodman/numerox
"""
# download dataset from numerai, save it and then load it
data = n... | 5,351,997 |
def getRatios(vect1, vect2):
"""Assumes: vect1 and vect2 are equal length lists of numbers
Returns: a list containing the meaningful values of
vect1[i]/vect2[i]"""
ratios = []
for index in range(len(vect1)):
try:
ratios.append(vect1[index]/vect2[index])
excep... | 5,351,998 |
def sampleset():
"""Return list with 50 positive and 10 negative samples"""
pos = [(0, i) for i in range(50)]
neg = [(1, i) for i in range(10)]
return pos + neg | 5,351,999 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.