content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def plot_coarray(array, ax=None, show_location_errors=False):
"""Visualizes the difference coarray of the input array.
Args:
array (~doatools.model.arrays.ArrayDesign): A sensor array.
ax (~matplotlib.axes.Axes): Matplotlib axes used for the plot. If not
specified, a new figure will... | e4a0d1fe4ab48b5050c55d44bd4ca4342cc9f9a9 | 13,843 |
def pad_sequence(sequences, batch_first=False, padding_value=0.0):
"""Pad a list of variable-length Variables.
This method stacks a list of variable-length :obj:`nnabla.Variable` s with the padding_value.
:math:`T_i` is the length of the :math:`i`-th Variable in the sequences.
:math:`B` is the batch s... | 449c7681d39edc0494269aefd488aa44548a68df | 13,845 |
import urllib
import yaml
import requests
def _fetch_global_config(config_url, github_release_url, gh_token):
"""
Fetch the index_runner_spec configuration file from the Github release
using either the direct URL to the file or by querying the repo's release
info using the GITHUB API.
"""
if c... | c436bfb7692ce0d100367691588d511ed95bce99 | 13,846 |
def parse_color(c, desc):
"""Check that a given value is a color."""
return c | ebabefbd56de120a753723f1dccb0f7c12af2fe6 | 13,847 |
def __virtual__():
"""Only load gnocchiv1 if requirements are available."""
if REQUIREMENTS_MET:
return 'gnocchiv1'
else:
return False, ("The gnocchiv1 execution module cannot be loaded: "
"os_client_config or keystoneauth are unavailable.") | 5dc2a83ba6a93a37f037978bfe89edf6ec2fe103 | 13,849 |
def setup():
"""Start headless Chrome in docker container."""
options = webdriver.ChromeOptions()
options.add_argument('--no-sandbox')
options.add_argument('--headless')
options.add_argument('--disable-gpu')
driver = webdriver.Chrome(options=options)
driver.implicitly_wait(5)
return driv... | 79c135732b39513f270ac0f670ffddc89b576f75 | 13,850 |
import json
def lambdaResponse(statusCode,
body,
headers={},
isBase64Encoded=False):
"""
A utility to wrap the lambda function call returns with the right status code,
body, and switches.
"""
# Make sure the body is a json object
if not... | 0159ba871c38ce550752d47ffea536c33a5d6b3e | 13,851 |
def singleton(class_):
"""
Specify that a class is a singleton
:param class_:
:return:
"""
instances = {}
def getinstance(*args, **kwargs):
if class_ not in instances:
instances[class_] = class_(*args, **kwargs)
return instances[class_]
return getinstance | 678205d133783f6b0720876546deed9ed7c59d72 | 13,852 |
from typing import Optional
from typing import Union
from typing import List
from datetime import datetime
def is_datetime(
value: Scalar, formats: Optional[Union[str, List[str]]] = None,
typecast: Optional[bool] = True
) -> bool:
"""Test if a given string value can be converted into a datetime object for... | 642fbe509c7b13a905dc4c65b43dcec20f36fb7e | 13,853 |
def sortkey(d):
"""Split d on "_", reverse and return as a tuple."""
parts=d.split("_")
parts.reverse()
return tuple(parts) | 1d8f8864a3d0bfd7dae8711bca183317e0f3fc0e | 13,854 |
def resolve_stream_name(streams, stream_name):
"""Returns the real stream name of a synonym."""
if stream_name in STREAM_SYNONYMS and stream_name in streams:
for name, stream in streams.items():
if stream is streams[stream_name] and name not in STREAM_SYNONYMS:
return name
... | 48fe2f5eca72b30bd669477807c9b7476eb4ef18 | 13,855 |
def get_split_cifar100_tasks(num_tasks, batch_size,run,paradigm,dataset):
"""
Returns data loaders for all tasks of split CIFAR-100
:param num_tasks:
:param batch_size:
:return:
datasets = {}
# convention: tasks starts from 1 not 0 !
# task_id = 1 (i.e., first task) => start_class = 0, end_class = 4
cifar_... | 003c74a55a4e9a1f645a6bc930abf65342abd0fc | 13,856 |
def is_point_in_triangle(pt, v1, v2, v3):
"""Returns True if the 2D point pt is within the triangle defined by v1-3.
https://www.gamedev.net/forums/topic/295943-is-this-a-better-point-in-triangle-test-2d/
"""
b1 = sign(pt, v1, v2) < 0.0
b2 = sign(pt, v2, v3) < 0.0
b3 = sign(pt, v3, v1) < 0.0
... | 2ff58dfb4efe939513cc901772aa744296ebb960 | 13,857 |
def precomputed_aug_experiment(
clf,
auged_featurized_x_train,
auged_featurized_y_train,
auged_featurized_x_train_to_source_idxs,
auged_featurized_x_test,
auged_featurized_y_test,
auged_featurized_x_test_to_source_idxs,
aug_iter,
train_idxs_scores,... | 50f03f08c7ce0777658ca3f84691b940f190e4cd | 13,858 |
def get_yahoo_data(symbol, start_date, end_date):
"""Returns pricing data for a YAHOO stock symbol.
Parameters
----------
symbol : str
Symbol of the stock in the Yahoo. You can refer to this link:
https://www.nasdaq.com/market-activity/stocks/screener?exchange=nasdaq.
start_date : s... | adc2a6186d96c76a75a62391c7f8d7534836f5bd | 13,859 |
def first_n(m: dict, n: int):
"""Return first n items of dict"""
return {k: m[k] for k in list(m.keys())[:n]} | 57ccc9f8913c60c592b38211900fe8d28feffb4c | 13,860 |
from typing import List
from typing import Dict
from typing import Union
def listdictnp_combine(
lst: List,
method: str = "concatenate",
axis: int = 0,
keep_nested: bool = False,
allow_error: bool = False,
) -> Dict[str, Union[np.ndarray, List]]:
"""Concatenate or stack a list of dictionaries ... | b4527342c8a3b90c797e7ef88326c97b4933d1b0 | 13,861 |
def find_pure_symbol(symbols, clauses):
"""Find a symbol and its value if it appears only as a positive literal
(or only as a negative) in clauses.
>>> find_pure_symbol([A, B, C], [A|~B,~B|~C,C|A])
(A, True)
"""
for s in symbols:
found_pos, found_neg = False, False
for c in claus... | 657c011fb0ee865252e7deed4672cde08c6db2e9 | 13,862 |
def cross_entropy_emphasized_loss(labels,
predictions,
corrupted_inds,
axis=0,
alpha=0.3,
beta=0.7,
regularizer=None... | e4ebb4e3198dea085789c81388522130ed867e3f | 13,863 |
def get_process_list(node: Node):
"""Analyse the process description and return the Actinia process chain and the name of the processing result
:param node: The process node
:return: (output_objects, actinia_process_list)
"""
input_objects, process_list = check_node_parents(node=node)
output_o... | 00f5e6c767975def09fbea800a8b74cfcd12f935 | 13,864 |
def _validate_image_formation(the_sicd):
"""
Validate the image formation.
Parameters
----------
the_sicd : sarpy.io.complex.sicd_elements.SICD.SICDType
Returns
-------
bool
"""
if the_sicd.ImageFormation is None:
the_sicd.log_validity_error(
'ImageFormatio... | b68c9a767e2499b8149389e2e207a0f05d20bf44 | 13,865 |
def handle_closet(player, level, reward_list):
"""
Handle a closet
:param player: The player object for the player
:param level: The level that the player is on
:return reward: The reward given to the player
"""
# Print the dialogue for the closet
print "You found a closet. It appears t... | ab170cb556fd688edeac80eac9bb7577df771a33 | 13,866 |
def module_path_to_test_path(module):
"""Convert a module locator to a proper test filename.
"""
return "test_%s.py" % module_path_to_name(module) | 17997d17d64686deec97d4aa9f23a14f04ff5516 | 13,867 |
def inspect_bom(filename):
"""Inspect file for bom."""
encoding = None
try:
with open(filename, "rb") as f:
encoding = has_bom(f.read(4))
except Exception: # pragma: no cover
# print(traceback.format_exc())
pass
return encoding | 84da40bc941053c4e6d18934c27b3e1d63318762 | 13,868 |
from packaging.specifiers import SpecifierSet
def parse_requirement(text):
"""
Parse a requirement such as 'foo>=1.0'.
Returns a (name, specifier) named tuple.
"""
match = REQUIREMENT_RE.match(text)
if not match:
raise ValueError("Invalid requirement: %s" % text)
name = match.gro... | 95dab6f3dd6784bf73233e80cfb946f904984a1d | 13,869 |
def H_split(k, N, eps):
"""Entropy of the split in binary search including overlap, specified by
eps"""
return (k / N) * (np.log(k) + H_epsilon(k, eps)) + ((N - k) / N) * (np.log(N - k) + H_epsilon(N - k, eps)) | 555d5e56550851084fdfa148dc7936c75649a197 | 13,870 |
def date_features(inputs, features_slice, columns_index) -> tf.Tensor:
"""Return an input and output date tensors from the features tensor."""
date = features(inputs, features_slice, columns_index)
date = tf.cast(date, tf.int32)
date = tf.strings.as_string(date)
return tf.strings.reduce_join(date, ... | 3362019b24a6f3104d858d2ddf17f0fae4060d7b | 13,871 |
import pickle
def save_calib(filename, calib_params):
""" Saves calibration parameters as '.pkl' file.
Parameters
----------
filename : str
Path to save file, must be '.pkl' extension
calib_params : dict
Calibration parameters to save
Returns
-------
saved : bool
Saved successfully.
"""
i... | 6735c8a6e96158b9fc580b6e61609b5ae7733fe0 | 13,872 |
def context_to_dict(context):
"""convert a django context to a dict"""
the_dict = {}
for elt in context:
the_dict.update(dict(elt))
return the_dict | b319c6be4efa83c91eefa249c8be90824bc0158f | 13,873 |
def returnItemsWithMinSupport(itemSet, transactionList, minSupport, freqSet):
"""calculates the support for items in the itemSet and returns a subset
of the itemSet each of whose elements satisfies the minimum support"""
_itemSet = set()
localSet = defaultdict(int)
for item in it... | e1290778548f198f87fc210c8a78bbfadaf0de9f | 13,874 |
def create_P(P_δ, P_ζ, P_ι):
"""
Combine `P_δ`, `P_ζ` and `P_ι` into a single matrix.
Parameters
----------
P_δ : ndarray(float, ndim=1)
Probability distribution over the values of δ.
P_ζ : ndarray(float, ndim=2)
Markov transition matrix for ζ.
P_ι : ndarray(float, ndim=1)... | 0afdef50c50563421bb7c6f3f928fa6b3e5f4733 | 13,875 |
import attrs
def sel_nearest(
dset,
lons,
lats,
tolerance=2.0,
unique=False,
exact=False,
dset_lons=None,
dset_lats=None,
):
"""Select sites from nearest distance.
Args:
dset (Dataset): Stations SpecDataset to select from.
lons (array): Longitude of sites to in... | ebf22cdeb30215a76312f2cdd8223a2d24bf6af6 | 13,876 |
import datasets
def evaluate(dataset, predictions, gts, output_folder):
"""evaluate dataset using different methods based on dataset type.
Args:
dataset: Dataset object
predictions(dict): each item in the list represents the
prediction results for one image.
gt(dict): Groun... | 85c0232c53de091f2293d042b944fe8768a9ac91 | 13,877 |
from vlescrapertools import getAuthedSession
def html_xml_save(
s=None, possible_sc_link=None, table="htmlxml", course_presentation=None
):
"""Save the HTML and XML for a VLE page page."""
if not possible_sc_link:
# should really raise error here
print("need a link")
if not s:
... | 37bb86769c86d851e3fec8dabc17534dfdecde60 | 13,878 |
def htmr(t,axis="z"):
"""
Calculate the homogeneous transformation matrix of a rotation
respect to x,y or z axis.
"""
from sympy import sin,cos,tan
if axis in ("z","Z",3):
M = Matrix([[cos(t),-sin(t),0,0],
[sin(t),cos(t),0,0],
[0,0,1,0],
... | b3941680f22b2eb48da15b2bb1a6e39c05e3b5c3 | 13,880 |
def vt(n, gm, gsd, dmin=None, dmax=10.):
"""Evaluate the total volume of the particles between two diameters.
The CDF of the lognormal distribution is calculated using equation 8.12
from Seinfeld and Pandis.
Mathematically, it is represented as:
.. math::
V_t=\\frac{π}{6}∫_{-∞}^{∞}D_p^3n... | ba407dc86bbf3201bd597f729f2397ef9428e72b | 13,881 |
def core_value_encode(origin):
"""
转换utf-8编码为社会主义核心价值观编码
:param origin:
:return:
"""
hex_str = str2hex(origin)
twelve = hex2twelve(hex_str)
core_value_iter = twelve_2_core_value(twelve)
return ''.join(core_value_iter) | 7b81540f7e7184ec60fb6820e3548201d67eec29 | 13,882 |
def user_query_ahjs_is_ahj_official_of(self, request, queryset):
"""
Admin action for the User model. Redirects the admin to
a change list of AHJs the selected users are AHJ officials of.
"""
model_name = 'ahj'
field_key_pairs = [field_key_pair('AHJPK', 'AHJPK')]
queryset = AHJUserMaintains.... | 4d97f25f2647a92a9690bf3360bd3fd63b03d631 | 13,883 |
def get_cache_node_count(
cluster_id: str, configuration: Configuration = None, secrets: Secrets = None
) -> int:
"""Returns the number of cache nodes associated to the cluster
:param cluster_id: str: the name of the cache cluster
:param configuration: Configuration
:param secrets: Secrets
:ex... | e4a4b3cd6d0bf7416ffe5a3d86725a614ad1c41c | 13,884 |
import string
def top_sentences(query, sentences, idfs, n):
"""
Given a `query` (a set of words), `sentences` (a dictionary mapping
sentences to a list of their words), and `idfs` (a dictionary mapping words
to their IDF values), return a list of the `n` top sentences that match
the query, ranked ... | 5533b96848baea5afa614e691d2d2ae07c4a16a9 | 13,885 |
def load_distribution(label):
"""Load sample distributions as described by Seinfeld+Pandis Table 8.3.
There are currently 7 options including: Urban, Marine, Rural, Remote
continental, Free troposphere, Polar, and Desert.
Parameters
----------
label : {'Urban' | 'Marine' | 'Rural' | 'Remote C... | 3dfd2fea5c165c331255e3b350e1f92a37919726 | 13,886 |
from typing import List
def split_4d_itk(img_itk: sitk.Image) -> List[sitk.Image]:
"""
Helper function to split 4d itk images into multiple 3 images
Args:
img_itk: 4D input image
Returns:
List[sitk.Image]: 3d output images
"""
img_npy = sitk.GetArrayFromImage(img_itk)
spa... | 21ad4f6c0cbdb05cf6f67469e3d32e732d1500ee | 13,887 |
from bs4 import BeautifulSoup
def parse_results(html, keyword):
"""[summary]
Arguments:
html {str} -- google search engine html response
keyword {str} -- search term
Returns:
pandas.DataFrame -- Dataframe with the following columns ['keyword', 'rank', 'title', 'link', 'domain']
... | 4c89e919b3f3285565efe5bdf5c4ec5b87664c79 | 13,888 |
def maybe_iter_configs_with_path(x, with_params=False):
"""
Like x.maybe_iter_configs_with_path(), but returns [(x, [{}])] or [(x, {}, [{}])] if x is just a config object and not a Tuner object.
"""
if is_tuner(x):
return x.iter_configs_with_path(with_params=with_params)
else:
if wi... | 947a62067f3eacb4d5c8ba419d8018ad2ab3320c | 13,889 |
import typing
def median(vals: typing.List[float]) -> float:
"""Calculate median value of `vals`
Arguments:
vals {typing.List[float]} -- list of values
Returns:
float -- median value
"""
index = int(len(vals) / 2) - 1
return sorted(vals)[index] | 9f840d11409a570a718fdfe56d7a282af43bc798 | 13,890 |
def melody_mapper(notes):
"""
Makes a map of a melody to be played
each item in the list 'notes' should be formatted using these chars:
duration - length in seconds the sound will be played
note - the note to play
sleep - time in seconds to pause
(note, duration)
... | cf4c8f7864e91e771d3a70bfc4d8a7f4edb38967 | 13,891 |
def sample_bounding_box_scale_balanced_black(landmarks):
"""
Samples a bounding box for cropping so that the distribution of scales in the training data is uniform.
"""
bb_min = 0.9
bb_old = image.get_bounding_box(landmarks)
bb_old_shape = np.array((bb_old[2] - bb_old[0], bb_old[3] - bb_old[1])... | 789cbe92803b77614ab8a018434745b2d9bba3a4 | 13,892 |
def get_trainable_layers(layers):
"""Returns a list of layers that have weights."""
layers = []
# Loop through all layers
for l in layers:
# If layer is a wrapper, find inner trainable layer
l = find_trainable_layer(l)
# Include layer if it has weights
if l.get_weights():... | 2d3f00cb061a6c2ee7081468be564f0b8621441d | 13,894 |
def outcome_from_application_return_code(return_code: int) -> outcome.Outcome:
"""Create either an :class:`outcome.Value` in the case of a 0 `return_code` or an
:class:`outcome.Error` with a :class:`ReturnCodeError` otherwise.
Args:
return_code: The return code to be processed.
Returns:
... | c5b786906e0f3fd99ed6660c55213b18139003c0 | 13,895 |
import re
def group_by_scale(labels):
""" Utility that groups attribute labels by time scale """
groups = defaultdict(list)
# Extract scales from labels (assumes that the scale is given by the last numeral in a label)
for s in labels:
m = re.findall("\d+", s)
if m:
groups[m... | 661ea03f8d463b1e0d5746df60e9e2cb969737ab | 13,896 |
def FontMapper_GetEncodingDescription(*args, **kwargs):
"""FontMapper_GetEncodingDescription(int encoding) -> String"""
return _gdi_.FontMapper_GetEncodingDescription(*args, **kwargs) | 0f154eaa616c3b18bc8828f63137c26c75397d56 | 13,897 |
from typing import Counter
def create_merged_ngram_dictionaries(indices, n):
"""Generate a single dictionary for the full batch.
Args:
indices: List of lists of indices.
n: Degree of n-grams.
Returns:
Dictionary of hashed(n-gram tuples) to counts in the batch of indices.
"""
ngram_dicts = []... | bd313ea7eab835102e94f6c7d66fec8882531385 | 13,898 |
import base64
def compute_hash_base64(*fields):
"""bytes -> base64 string"""
value = compute_hash(*fields)
return base64.b64encode(value).decode() | b29b77b44a51417d63f8cae1970b5c1f4fb40317 | 13,899 |
def triplet_margin_loss(
anchor,
positive,
negative,
margin=0.1,
p=2,
use_cosine=False,
swap=False,
eps=1e-6,
scope='',
reduction=tf.losses.Reduction.SUM
):
"""
Computes the triplet margin loss
Args:
anchor: The tensor containing the anchor embeddings
... | 55e85a9ae98ab57458ae1a61a1dbd445deddd7cb | 13,900 |
def f_raw(x, a, b):
"""
The raw function call, performs no checks on valid parameters..
:return:
"""
return a * x + b | 89bbe9e7a08e3bf4bf37c3efa695ed20fdca95c5 | 13,901 |
from pyapprox.cython.barycentric_interpolation import \
def compute_barycentric_weights_1d(samples, interval_length=None,
return_sequence=False,
normalize_weights=False):
"""
Return barycentric weights for a sequence of samples. e.g. of seq... | 1711328af31b756c040455e0b03363def08e6504 | 13,902 |
import collections
def _generate_conversions():
"""
Generate conversions for unit systems.
"""
# conversions to inches
to_inch = {'microinches': 1.0 / 1000.0,
'mils': 1.0 / 1000.0,
'inches': 1.00,
'feet': 12.0,
'yards': 36.0,
... | 8fa4f625e693fe352b2bba0082d0b18c46f5bec1 | 13,903 |
def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf):
"""
Rather than attempting to merge files that were modified on both
branches, it marks them as unresolved. The resolve command must be
used to resolve these conflicts."""
return 1 | 278bb52f96e1a82ce9966626be08bc6fdd0df65d | 13,904 |
from typing import Pattern
from typing import Optional
from typing import Callable
from typing import Union
import logging
def parser(
text: str,
*,
field: str,
pattern: Pattern[str],
type_converter: Optional[Callable] = None,
clean_up: Optional[Callable] = None,
limit_size: Optional[int] ... | 0b44fecf252399b3109efedffe0f561809982ea6 | 13,905 |
def colorize(text='', opts=(), **kwargs):
"""
Return your text, enclosed in ANSI graphics codes.
Depends on the keyword arguments 'fg' and 'bg', and the contents of
the opts tuple/list.
Return the RESET code if no parameters are given.
Valid colors:
'black', 'red', 'green', 'yellow', ... | 02ad24710413770cebdaa4265a1d40c69212ecc8 | 13,907 |
from re import T
def get_prediction(img_path, threshold):
"""
get_prediction
parameters:
- img_path - path of the input image
- threshold - threshold value for prediction score
method:
- Image is obtained from the image path
- the image is converted to image tensor ... | d6df91fb464b072b06ef759ad53aa00fb7d624ec | 13,908 |
def make_fixed_size(protein, shape_schema, msa_cluster_size, extra_msa_size,
num_res, num_templates=0):
"""Guess at the MSA and sequence dimensions to make fixed size."""
pad_size_map = {
NUM_RES: num_res,
NUM_MSA_SEQ: msa_cluster_size,
NUM_EXTRA_SEQ: extra_msa_size,... | f74306815dd7cd5291305c7b5c67cae4625c4d38 | 13,909 |
def plot_skymap_tract(skyMap, tract=0, title=None, ax=None):
"""
Plot a tract from a skyMap.
Parameters
----------
skyMap: lsst.skyMap.SkyMap
The SkyMap object containing the tract and patch information.
tract: int [0]
The tract id of the desired tract to plot.
title: st... | a8f1b25d8afedfbb0ed643b7954e615932031419 | 13,910 |
import json
def label(vertex):
""" Graph vertex label in dot format """
label = f"{vertex.name} {vertex.state or ''}\n{vertex.traceback or ''}"
label = json.dumps(label).replace("\\n", r"\l")
return f"[label={label}]" | a8604cfd837afbdba8b8ee7666d81df4b015ad2a | 13,911 |
import six
import hashlib
def compute_hashes_from_fileobj(fileobj, chunk_size=1024 * 1024):
"""Compute the linear and tree hash from a fileobj.
This function will compute the linear/tree hash of a fileobj
in a single pass through the fileobj.
:param fileobj: A file like object.
:param chunk_siz... | 8c6aed21ae59ecb3e5449ee0856be1d032108aa6 | 13,912 |
def imshow(axim, img, amp_range=None, extent=None,\
interpolation='nearest', aspect='auto', origin='upper',\
orientation='horizontal', cmap='jet') :
"""
extent - list of four image physical limits for labeling,
cmap: 'gray_r'
#axim.cla()
"""
imsh = axim.imshow(img, interpol... | 3483690b01c5d182877c3bf944fa5409d4cb9e69 | 13,913 |
def get_total():
"""
Return the rounded total as properly rounded string.
Credits:
https://github.com/dbrgn/coverage-badge
"""
cov = coverage.Coverage()
cov.load()
total = cov.report(file=Devnull())
class Precision(coverage.results.Numbers):
"""
A class for usin... | 9df511f0d895721061642c2fb88268490e27cc0b | 13,914 |
def _infer_subscript_list(context, index):
"""
Handles slices in subscript nodes.
"""
if index == ':':
# Like array[:]
return ValueSet([iterable.Slice(context, None, None, None)])
elif index.type == 'subscript' and not index.children[0] == '.':
# subscript basically implies ... | bde1de5e7604d51e6c85e429ceb2102d79e91ca6 | 13,915 |
def count_by_guess(dictionary, correctly=False):
"""
Count the number of correctly/incorrectly guessed images for a dataset
:param dictionary:
:param correctly:
:return:
"""
guessed = 0
for response in dictionary:
guessed = guessed + count_by_guess_user(response, correctly)
... | d1328a63d3029707131f1932be1535dabb62ab66 | 13,916 |
def get_game_by_index(statscursor, table, index):
""" Holds get_game_by_index db related data """
query = "SELECT * FROM " + table + " WHERE num=:num"
statscursor.execute(query, {'num': index})
return statscursor.fetchone() | 754a83f2281ad095ffc32eb8a03c95490bd5f815 | 13,917 |
def create_queue():
"""Creates the SQS queue and returns the queue url and metadata"""
conn = boto3.client('sqs', region_name=CONFIG['region'])
queue_metadata = conn.create_queue(QueueName=QUEUE_NAME, Attributes={'VisibilityTimeout':'3600'})
if 'queue_tags' in CONFIG:
conn.tag_queue(QueueUrl=qu... | ae61c542182bc1238b76bf94991e50809bace595 | 13,918 |
def db_describe(table, **args):
"""Return the list of columns for a database table
(interface to `db.describe -c`). Example:
>>> run_command('g.copy', vector='firestations,myfirestations')
0
>>> db_describe('myfirestations') # doctest: +ELLIPSIS
{'nrows': 71, 'cols': [['cat', 'INTEGER', '20'], ... | 6265a2f6dcc26fcd1fcebb5ead23abfb37cfa179 | 13,919 |
def objective_func(x, cs_objects, cs_data):
"""
Define the objective function
:param x: 1D array containing the voltages to be set
:param args: tuple containing all extra parameters needed
:return: average count rate for 100 shots
"""
x = np.around(x,2)
try:
flag_range = 0
... | 677b6455b0db177a3a4f716ced3dd309c711cf74 | 13,920 |
def getHPELTraceLogAttribute(nodename, servername, attributename):
""" This function returns an attribute of the HPEL Trace Log for the specified server.
Function parameters:
nodename - the name of the node on which the server to be configured resides.
servername - the name of the server w... | 8003066ec41ee07dab311690d0687d7f79e6952a | 13,921 |
def dispersionTable(adata):
"""
Parameters
----------
adata
Returns
-------
"""
if adata.uns["ispFitInfo"]["blind"] is None:
raise ("Error: no dispersion model found. Please call estimateDispersions() before calling this function")
disp_df = pd.DataFrame({"gene_id": adata... | 7f7b4c122ffc42402248ec55155c774c77fbad51 | 13,922 |
def L10_indicator(row):
"""
Determine the Indicator of L10 as one of five indicators
"""
if row < 40:
return "Excellent"
elif row < 50:
return "Good"
elif row < 61:
return "Fair"
elif row <= 85:
return "Poor"
else:
return "Hazard" | 10656a76e72f99f542fd3a4bc2481f0ef7041fa9 | 13,923 |
def create_ip_record(
heartbeat_df: pd.DataFrame, az_net_df: pd.DataFrame = None
) -> IpAddress:
"""
Generate ip_entity record for provided IP value.
Parameters
----------
heartbeat_df : pd.DataFrame
A dataframe of heartbeat data for the host
az_net_df : pd.DataFrame
Option ... | 63deb15081f933b0a445d22eed25646782af4221 | 13,924 |
import re
def extract_version(version_file_name):
"""Extracts the version from a python file.
The statement setting the __version__ variable must not be indented. Comments after that
statement are allowed.
"""
regex = re.compile(r"^__version__\s*=\s*['\"]([^'\"]*)['\"]\s*(#.*)?$")
with open(v... | 1cc70ba4bf69656bb8d210a49c236e38eba59513 | 13,925 |
def powerlaw_loglike(data, theta):
"""Return the natural logarithm of the likelihood P(data | theta) for our
model of the ice flow.
data is expected to be a tuple of numpy arrays = (x, y, sigma)
theta is expected to be an array of parameters = (intercept, slope)
"""
x, y, sigma = data
n = ... | 98650e66d2a16762b2534be9083b6b92e0d9e9fd | 13,926 |
def get_conv(dim=3):
"""Chooses an implementation for a convolution layer."""
if dim == 3:
return nn.Conv3d
elif dim == 2:
return nn.Conv2d
else:
raise ValueError('dim has to be 2 or 3') | 4152984ecf7220dc4693013ee567822a2487e225 | 13,927 |
async def create_mute_role(bot, ctx):
"""Create the mute role for a guild"""
perms = discord.Permissions(
send_messages=False, read_messages=True)
mute_role = await ctx.guild.create_role(
name='Muted', permissions=perms,
reason='Could not find a muted role in the process of muting or... | 9128de3a7f4f841e47531699a878a1c18d8be9d5 | 13,930 |
import json
import uuid
def build_request_data(useralias,
req_node):
"""build_request_data
:param useralias: user alias for directory name
:param req_node: simulated request node
"""
if "file" not in req_node:
return None
use_uniques = req_node["unique_names"]... | 938c79c290e1e4c086e6d48f71cbd0b965d36b36 | 13,931 |
def _get_stmt_lists(self):
"""
Returns a tuple of the statement lists contained in this `ast.stmt`
node. This method should only be called by an `ast.stmt` node.
"""
if self.is_simple():
return ()
elif self.is_body():
return (self.body,)
elif self.is_body_orelse():
r... | 0ec85481bc4261ae77ced0ae32c72081ef80c651 | 13,932 |
def get_article(name):
"""a general function to get an article, returns None if doesn't exist
"""
article = None
if name is not None:
try:
article = Article.objects.get(name=name)
except Article.DoesNotExist:
pass
return article | d69e801a1d18ccf81753cc35ce2afa645b304fba | 13,933 |
def abbreviateLab(lab):
"""Lab names are very long and sometimes differ by punctuation or typos. Abbreviate for easier comparison."""
labAbbrev = apostropheSRe.sub('', lab)
labAbbrev = firstLetterRe.sub(r'\1', labAbbrev, count=0)
labAbbrev = spacePunctRe.sub('', labAbbrev, count=0)
return labAbbrev | dce4a1d0f6302a2968fe701d067b209fb61b8930 | 13,935 |
def backproject(depth, intrinsics, instance_mask):
""" Back-projection, use opencv camera coordinate frame.
"""
cam_fx = intrinsics[0, 0]
cam_fy = intrinsics[1, 1]
cam_cx = intrinsics[0, 2]
cam_cy = intrinsics[1, 2]
non_zero_mask = (depth > 0)
final_instance_mask = np.logical_and(insta... | 9828197b646342ec76cc21b1083540d0fe62978f | 13,936 |
def if_any(
_data,
*args,
_names=None,
_context=None,
**kwargs,
):
"""Apply the same predicate function to a selection of columns and combine
the results True if any element is True.
See Also:
[`across()`](datar.dplyr.across.across)
"""
if not args:
args = (None,... | 41bf4a14cc8b16845f7d0dd8138871a7ccfad66f | 13,937 |
def Gaussian(y, model, yerr):
"""Returns the loglikelihood for a Gaussian distribution.
In this calculation, it is assumed that the parameters
are true, and the loglikelihood that the data is drawn from
the distribution established by the parameters is calculated
Parameters
----------
model... | d9eaa41b95006a9d17907582b804a4921f672141 | 13,940 |
def clean_us_demographics(us_demographics_spark, spark_session):
"""
Clean data from us_demographics
Args:
us_demographics (object): Pyspark dataframe object
spark_session (object): Pyspark session
Returns:
(object): Pyspark dataframe with cleaned data
"""
s... | dcf812bf64a2f6c3b908d895488e1a57e1729301 | 13,941 |
from datetime import datetime
def parse_date(date=None):
"""
Parse a string in YYYY-MM-DD format into a datetime.date object.
Throws ValueError if input is invalid
:param date: string in YYYY-MM-DD format giving a date
:return: a datetime.date object corresponding to the date given
"""
if... | a4c6cef85dabd445dd308fdd5f2c20a38accd6de | 13,942 |
def status():
""" Incoming status handler: forwarded by ForwardServerProvider """
req = jsonex_loads(request.get_data())
status = g.provider._receive_status(req['status'])
return {'status': status} | 3a50ff8d829a7bf37b84871897335345496dbc49 | 13,943 |
def get_feature_extractor_info():
"""Return tuple of pretrained feature extractor and its best-input image size for the extractor"""
return get_pretrained_feature_extractor(), K_MODEL_IMAGE_SIZE | bdec6d5a2d402f659b9a001f4082f6b5e33ca3cc | 13,944 |
import networkx
def nx_find_connected_limited(graph, start_set, end_set, max_depth=3):
"""Return the neurons in end_set reachable from start_set with limited depth."""
reverse_graph = graph.reverse()
reachable = []
for e in end_set:
preorder_nodes = list(
(
network... | 4322f4231be73b575d05442f09608c71c3b9f605 | 13,945 |
def hexbyte_2integer_normalizer(first_int_byte, second_int_btye):
"""Function to normalize integer bytes to a single byte
Transform two integer bytes to their hex byte values and normalize
their values to a single integer
Parameters
__________
first_int_byte, second_int_byte : int
inte... | a3bbe75014b6e08607314b615440039bab245f04 | 13,946 |
def make_window(signal, sample_spacing, which=None, alpha=4):
"""Generate a window function to be used in PSD analysis.
Parameters
----------
signal : `numpy.ndarray`
signal or phase data
sample_spacing : `float`
spacing of samples in the input data
which : `str,` {'welch', 'han... | 5ef18c990225b6610ee10c848ab4ee0b2ce0fc9b | 13,950 |
from typing import Dict
from typing import Union
def set_units(
df: pd.DataFrame, units: Dict[str, Union[pint.Unit, str]]
) -> pd.DataFrame:
"""Make dataframe unit-aware. If dataframe is already unit-aware, convert to specified
units. If not, assume values are in specified unit.
Parameters
------... | 8a0cf821e3e0d1ba7b1b8c3dbdddb5f517ea0acb | 13,951 |
def address_repr(buf, reverse: bool = True, delimit: str = "") -> str:
"""Convert a buffer into a hexlified string."""
order = range(len(buf) - 1, -1, -1) if reverse else range(len(buf))
return delimit.join(["%02X" % buf[byte] for byte in order]) | 6b4b8921d6280cd688c3bfcfca82b2b5546001e7 | 13,952 |
import re
def _highlight(line1, line2):
"""Returns the sections that should be bolded in the given lines.
Returns:
two tuples. Each tuple indicates the start and end of the section
of the line that should be bolded for line1 and line2 respectively.
"""
start1 = start2 = 0
match = re.search(r'\S', ... | d9bf7667e24d21e6f91b656af0697765c2b74f55 | 13,953 |
def get_detected_objects_new(df, siglim=5, Terr_lim=3, Toffset=2000):
"""
Get a dataframe with only the detected objects.
:param df: A DataFrame such as one output by get_ccf_summary with N > 1
:param siglim: The minimum significance to count as detected
:param Terr_lim: The maximum number of standa... | 7662086053c093b9eb19ffe7c56f5cf7914b1ab8 | 13,955 |
def cmp(a, b):
"""
Python 3 does not have a cmp function, this will do the cmp.
:param a: first object to check
:param b: second object to check
:return:
"""
# convert to lower case for string comparison.
if a is None:
return -1
if type(a) is str and type(b) is str:
a... | c82837a0d8887f55fdd1175b5d828742529b3e37 | 13,956 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.