content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def reference_info(ref_img=None):
"""Check if reference image exists and provide information to users."""
if ref_img:
if not os.path.isfile(ref_img):
raise ValueError("""Reference image doesn't exist.
Check --reference_image.""")
else:
logger.debug("Using refe... | 5,354,300 |
def has_no_duplicates(input_):
"""Check that a list contains no duplicates.
For example:
['aa', 'bb', 'cc'] is valid.
['aa', 'bb', 'aa'] is not valid. The word aa appears more than once.
"""
return len(input_) == len(set(input_)) | 5,354,301 |
def test_create_existing(mp_tmpdir):
"""Run mapchete create and execute."""
temp_mapchete = os.path.join(mp_tmpdir, "temp.mapchete")
temp_process = os.path.join(mp_tmpdir, "temp.py")
out_format = "GTiff"
# create files from template
args = [
"create",
temp_mapchete,
temp_... | 5,354,302 |
def process_mean_plots(E, modelconfig, datakey, orientation, mean_name):
"""Generates latitudal / lontitudal / monthly mean plots.
Seasonal values are extracted for lat/lon plots. """
experiment = 'SouthernHemisphere'
plot_dir = E.get_plot_dir()
verbosity = E.get_verbosity()
areas = []
plot_... | 5,354,303 |
def collate_ribocounts_in_df(srp_df, region_type):
"""Collect all ribocount files inside a directory and write them as a dataframe"""
srp = srp_df.study_accession.tolist()[0]
srp_assembly_grouped = srp_df.groupby("assembly")
for assembly, srp_assembly_df in srp_assembly_grouped:
srp_path = srp_a... | 5,354,304 |
def uplab_to_renotation_specification(spec, lab):
"""Convert a color in the normalized UP LAB space to its equivalent Munsell color.
Parameters
----------
lab : np.ndarray of shape (3,) and dtype float
The `l', `a-star` and `b-star` values for the color, with `l` in the domain [0, 1],
and `... | 5,354,305 |
def test_set_convert2tfrecord(input_seq, output_tfrec, kmer, vocab, seq_type):
"""Converts reads to tfrecord, and saves to output file.
Args:
input_seq: string, path to the input fasta or fastq file.
output_tfrec: string, path to the output tfrecord file.
kmer: int, size of k for reads s... | 5,354,306 |
def inv_solve(ridges, XTX, Xtrain=None, Ytrain=None, Xtest=None, Ytest=None,
XTY=None, weights=None, predictions=None, performance=None,
verbose=False, metric='r'):
"""solve ridge problem using cho inversion
Parameters
----------
- [see solve_ridge() and inv_factorize()]
... | 5,354,307 |
def string_type_check(valid_strings, case_sensitive = True, metavar = None):
""" Creates an argparse type for a list of strings.
The passed argument is declared valid if it is a valid string which exists
in the passed list valid_strings. If case_sensitive is False, all input
strings and strings in val... | 5,354,308 |
def _getBestSize(value):
"""
Give a size in bytes, convert it into a nice, human-readable value
with units.
"""
if value >= 1024.0**4:
value = value / 1024.0**4
unit = 'TB'
elif value >= 1024.0**3:
value = value / 1024.0**3
unit = 'GB'
elif value >= 1024... | 5,354,309 |
def _tpl(repository_ctx, tpl, substitutions = {}, out = None):
"""Generate a build file for bazel based upon the `tpl` template."""
if not out:
out = tpl
repository_ctx.template(
out,
Label("//llvm:%s.tpl" % tpl),
substitutions,
) | 5,354,310 |
def initialize():
"""Configure Plone instance using RelStorage."""
environment = Environment()
environment.setup() | 5,354,311 |
async def subscribe(ctx, *subreddit):
"""
This command will 'subscribe' to a reddit and will make posts from it.
Usage: r/sub <subreddit>
Ex. r/sub news funny husky
Permissions required: Administrator
:param ctx:
:param subreddit:
:return:
"""
sid = ctx.message.server.id
subs... | 5,354,312 |
def write_nc_uniform(topography, fobj, format='NETCDF3_64BIT_OFFSET'):
"""
Write topography into a NetCDF file.
Parameters
----------
topography : :obj:`SurfaceTopography`
The topography to write to disk.
fobj : str or stream
Name of the NetCDF file or file stream
format : s... | 5,354,313 |
def enable_oeenclave_debug(oe_enclave_addr):
"""For a given OE enclave, load its symbol and enable debug flag for all its TCS"""
enclave = oe_debug_enclave_t(oe_enclave_addr)
# Check if magic matches
if not enclave.is_valid():
return False
# No version specific checks.
# The contract w... | 5,354,314 |
def loadxrmcresult_xmimsim(xmimsimpath, outradix="out", convoluted=False):
"""XRMC result based on input files converted from XMIMSIM"""
xrmcoutpath = os.path.join(xmimsimpath, "xrmc", "output")
if convoluted:
suffix = "_convoluted"
else:
suffix = "_lines"
return loadxrmcresult(xrmco... | 5,354,315 |
def create_line_segments(df, x="lon", y="lat", epsg=4269):
"""Creates a GeodataFrame of line segments from the
shapes dataframe (CRS is NAD83)
Params:
df (DataFrame): pandas DataFrame
x, y (str, optional) Default values x="lon", y="lat",
column names fo... | 5,354,316 |
def population_correlation(data_matrix, x_index, y_index):
"""
data_matrix is a numpy multi-dimensional array (matrix)
x_index and y_index are the index for the first and second variables respectively
it returns the correlation between two variables in a data_matrix
"""
transposed_data = data_ma... | 5,354,317 |
def upload(workspace: str, table: str) -> Any:
"""
Store a nested_json tree into the database in coordinated node and edge tables.
`workspace` - the target workspace.
`table` - the target table.
`data` - the nested_json data, passed in the request body.
"""
# Set up the parameters.
data... | 5,354,318 |
def validate_google_login(email):
"""
Validate a login completed via Google, returning the user id on success.
An ``ODPIdentityError`` is raised if the login cannot be permitted for any reason.
:param email: the Google email address
:raises ODPUserNotFound: if there is no user account for the give... | 5,354,319 |
def add_months(start_date, months, date_format=DATE_FORMAT):
"""
Return a date with an added desired number of business months
Example 31/1/2020 + 1 month = 29/2/2020 (one business month)
"""
new_date = start_date + relativedelta(months=+months)
return new_date.strftime(date_format) | 5,354,320 |
def inVolts(mv):
""" Converts millivolts to volts... you know, to keep the API
consistent. """
return mv/1000.0 | 5,354,321 |
def test_repository_get_changes_in_a_commit(git_repository):
"""Test getting changes in a commit with multiple change types."""
commit = git_repository.get_commit(revision="8853e0c")
changes = {c.a_path: c for c in commit.get_changes()}
assert "M" == changes["A"].change_type
assert "A" == changes[... | 5,354,322 |
def extract_hit(
hit: Mapping[str, Any],
includes: Tuple[str] = (ID_FIELD,),
source: str = '_source'
) -> Mapping[str, Any]:
"""
Extract a document from a single search result hit.
:param hit: the search hit document
:param includes: the metadata keys to include in the return do... | 5,354,323 |
def ChangeLookAndFeel(index):
"""
:param index:
"""
# global LOOK_AND_FEEL_TABLE
if sys.platform == 'darwin':
print('*** Changing look and feel is not supported on Mac platform ***')
return
# look and feel table
try:
colors = LOOK_AND_FEEL_TABLE[in... | 5,354,324 |
def test_schema_component_equality_operators():
"""Test the usage of == for Column, Index and MultiIndex."""
column = Column(Int, Check(lambda s: s >= 0))
index = Index(Int, [Check(lambda x: 1 <= x <= 11, element_wise=True)])
multi_index = MultiIndex(
indexes=[
Index(Int,
... | 5,354,325 |
def deserialize_wrapper(func, data):
"""
Convert generic productmd exceptions into validation errors.
"""
try:
func(data)
except KeyError as e:
raise serializers.ValidationError(
{'detail': 'Error parsing productmd metadata.',
'reason': 'Missing key %s' % e.m... | 5,354,326 |
def test_invalid_response_check(check, instance, aggregator):
"""
Testing invalid fargate metadata payload.
"""
with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.get', return_value=MockResponse("{}", 200)):
check.check(instance)
aggregator.assert_service_check("fargate_check"... | 5,354,327 |
def generateUserIDToken(id):
"""Generates a unique user id token."""
t = int(time.time() * 1000)
r = int(random.random() * 100000000000000000)
data = "%s %s %s %s" % (ip, t, r, id)
return md5(data.encode('utf-8')).hexdigest() | 5,354,328 |
def grad_simplex_monomial_basis(dims, n):
"""Return the gradients of the functions returned by
:func:`simplex_monomial_basis`.
:returns: a :class:`tuple` of functions, each of which
accepts arrays of shape *(dims, npts)*
and returns a :class:`tuple` of length *dims* containing
the ... | 5,354,329 |
def test_claims_check():
"""
arg={'required': True, 'id_token': ['auth_time']}
"""
_info = setup_conv()
conv = _info['conv']
# Need IdToken
conv.events.store(EV_PROTOCOL_RESPONSE, ACCESS_TOKEN_RESPONSE_1)
chk = ClaimsCheck()
kwargs = {'required': True, 'id_token': ['auth_time']}
... | 5,354,330 |
def get_collection() -> Collection:
"""Коллекция для хранения моделей."""
return _COLLECTION | 5,354,331 |
def webp_convert(ifile, ofile, m=6, q=90):
"""
WebP: Convert to WebP format
"""
try:
subprocess.check_output([bpath.wppath, '-m', str(m), '-q', str(q), ifile, '-o', ofile], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
logexc(e, "cwebp conversion failed") | 5,354,332 |
def ask_for_flasherhwver():
"""
Ask for the flasher version, either 1 or 2 right now...
"""
#if FLASHER_SKIP_ON_VALID_DETECTION and FLASHER_VERSION != 1:
# return FLASHER_VERSION
FLASHER_VERSION = 1
flash_version = FLASHER_VERSION
if FLASHER_VERSION is None:
while True... | 5,354,333 |
def real_to_complex_channels(x, separate_real_imag=False):
""" Inverse of complex_as_real_channels: C*2 real channels (or 2*C if separate_real_imag) to C complex channels. """
if separate_real_imag:
channel_shape = (2, -1)
permute = (0, 2, 3, 4, 1)
else:
channel_shape = (-1, 2)
... | 5,354,334 |
def create_coffee_machine() -> CoffeeMachine:
"""Create CoffeeMachine object for testing"""
_coffee_machine = CoffeeMachine()
_coffee_machine.refill_water()
_coffee_machine.refill_milk()
_coffee_machine.refill_coffee_beans()
return _coffee_machine | 5,354,335 |
def validate_mqtt_vacuum(value):
"""Validate MQTT vacuum schema."""
schemas = {LEGACY: PLATFORM_SCHEMA_LEGACY, STATE: PLATFORM_SCHEMA_STATE}
return schemas[value[CONF_SCHEMA]](value) | 5,354,336 |
def detect(iring, mode, axis=None, *args, **kwargs):
"""Apply square-law detection to create polarization products.
Args:
iring (Ring or Block): Input data source.
mode (string):
``'scalar': x -> real x.x*``
``'jones': x,y -> complex x.x* + 1j*y.y*, x.y*``
... | 5,354,337 |
def url_download_interactive(url, output_file, title='', chunk_size=102400):
"""
Interactively downloads a given file url to a given output file.
:type url: string
:param url: URL for the file to be download
:type output_file: string
:param output_file: file name or absolute path on which to sa... | 5,354,338 |
def DiffedUpdateItem(
Table: TableResource, Key: ItemKey, before: InputItem, after: InputItem, **kwargs
) -> InputItem:
"""Safe top-level diff update that requires only 'before' and 'after' dicts.
By calling this you are trusting that we will make a choice about
whether or not you actually have an upda... | 5,354,339 |
def is_watchdog_supported():
""" Return ``True`` if watchdog is available."""
try:
import watchdog
except ImportError:
return False
return True | 5,354,340 |
def test_concat_incompatible_cols(test_pd_df):
"""Check that calling concat on a single-item list returns identical object"""
df1 = IamDataFrame(test_pd_df)
test_pd_df["extra_col"] = "foo"
df2 = IamDataFrame(test_pd_df)
match = "Items have incompatible timeseries data dimensions"
with pytest.ra... | 5,354,341 |
def set_bit(arg1, x, bit, y):
"""
set_bit(Int_ctx arg1, Int_net x, unsigned int bit, Int_net y) -> Int_net
Parameters
----------
arg1: Int_ctx
x: Int_net
bit: unsigned int
y: Int_net
"""
return _api.set_bit(arg1, x, bit, y) | 5,354,342 |
def get_time_format(format='medium', locale=LC_TIME):
"""Return the time formatting patterns used by the locale for the specified
format.
>>> get_time_format(locale='en_US')
<DateTimePattern u'h:mm:ss a'>
>>> get_time_format('full', locale='de_DE')
<DateTimePattern u'HH:mm:ss zzzz'>
:param... | 5,354,343 |
def test_list_posts(mock_client):
"""list_posts should return a generator of posts"""
client = api.Api(UserFactory.create())
posts = client.list_posts("channel", DEFAULT_LISTING_PARAMS)
assert posts == mock_client.subreddit.return_value.hot.return_value
mock_client.subreddit.return_value.hot.assert_... | 5,354,344 |
def resource_type_service(resource_type):
"""Gets the service name from a resource type.
:exc:`ValueError` is raised if the resource type is invalid, see
:func:`parse_resource_type`.
>>> resource_type_service('AWS::ECS::Instance')
'ECS'
"""
return parse_resource_type(resource_type)[1] | 5,354,345 |
def loads(ss):
""" loads(ss)
Load a struct from the given string.
Parameters
----------
ss : (Unicode) string
A serialized struct (obtained using ssdf.saves()).
"""
# Check
if not isinstance(ss, basestring):
raise ValueError('ssdf.loads() expec... | 5,354,346 |
def guess_mime_type(file_object: IO) -> str:
"""Guess mime type from file extension."""
mime_type, _encoding = mimetypes.guess_type(file_object.name)
if not mime_type:
mime_type = "application/octet-stream"
return mime_type | 5,354,347 |
def postcount_test(metadict_friends):
"""Среднее число постов по выборке, чтобы выделить активных/неактивных неймфагов."""
all_postcount = 0
for namefag in metadict_friends.keys():
name_number = namefag[0]
name_postcount = cursor.execute("SELECT postcount FROM namefags WHERE number=?"\
... | 5,354,348 |
def cli(ctx, debug):
"""
This is a tool to generate an excel file based on a provided source excel and transformation mapping
"""
log_format = '%(asctime)s|%(levelname)s|%(name)s|(%(funcName)s):-%(message)s'
logging.basicConfig(level=logging.DEBUG if debug else logging.INFO, stream=sys.stdout, forma... | 5,354,349 |
def extract_stack_name(fields):
"""_extract_stack_name(self, fields: list[str]) -> str
Extract a stack name from the fields
Examples:
ffffffff818244f2 [unknown] ([kernel.kallsyms]) -> [kernel.kallsyms]
1094d __GI___libc_recvmsg (/lib/x86_64-linux-gnu/libpthread-2.23.so) -> __GI__libc_recvms... | 5,354,350 |
def parse_args(argv):
"""Parse any command line arguments."""
# Set the default logging level to DEBUG
# log_level = logging.INFO
log_level = logging.DEBUG
# This is the dictionary of arguments.
arg_dict = {'start_date': DEFAULT_START_DATE,
'end_date': DEFAULT_END_DATE,
... | 5,354,351 |
def parse(data: Data, config: RawConfigParser):
"""Parses all subjects documents.
:param data: data object
:type data: Data
:param config: config from config file
:type config: RawConfigParser
"""
while True:
subject = data.get_not_parsed()
if subject is None: # break if no... | 5,354,352 |
def get_pafy_stream_obj(url,format=None,only_video=False):
"""This function return stream object from pafy
Arguments:
url {string} -- The url of the video from youtube
Returns:
Stream_Obj -- This is a object of Stream class from pafy
"""
try:
obj = pafy.new(url)
... | 5,354,353 |
def walk_storage(path, topdown=True, onerror=None, followlinks=False,
storage=default_storage):
"""
Generate the file names in a stored directory tree by walking the tree
top-down.
For each directory in the tree rooted at the directory top (including top
itself), it yields a 3-tupl... | 5,354,354 |
def check_reynolds_number(Re):
"""Reynolds number must be between 38e3 and 4e6
Parameters
----------
Re : float
Reynolds number
Raises
------
ValueError
If the value of the Reynolds number is outside the defined layers.
"""
if not (Re_list[0] <= Re <= Re_... | 5,354,355 |
def create_fixtures(model_names, excludes=[], from_file=False):
"""Create json fixtures
Parameters:
model_names (list of str): names of models to create fixtures. If empty, create all.
excludes (list of str): names of models to exclude
from_file (boolean): True - create from xlsx file, ... | 5,354,356 |
def valid_shape(shape):
"""
@returns: True if given shape is a valid tetris shape
"""
return shape in SHAPES and len(shape) == 1 | 5,354,357 |
def get_device(device_id):
"""
@api {get} /devices/:device_id Get Unique Device
@apiVersion 1.0.0
@apiName GetDevice
@apiGroup Device
@apiSuccess {Boolean} success Request status
@apiSuccess {Object} message Respond payload
@apiSuccess {Object} message.device Device object
"""
... | 5,354,358 |
def read_imgs(filename, num_images):
"""读入图片数据
:param filename:
:param num_images:
:return:
"""
with gzip.open(filename) as bytestream:
bytestream.read(16)
buf = bytestream.read(
28 * 28 * num_images * 1)
data = np.frombuffer(buf, dtype=np.uint8)
data... | 5,354,359 |
def test_toAssembly():
"""For testing toAssembly function"""
assert toAssembly(000) == 'HLT'
assert toAssembly(101) == 'ADD 1'
assert toAssembly(202) == 'SUB 2'
assert toAssembly(303) == 'STA 3'
assert toAssembly(404) == 'LDA 4'
assert toAssembly(505) == 'BRA 5'
assert toAssembly(606) ==... | 5,354,360 |
def all_faces(coord, connect):
""" Gets vertices of all faces of the mesh.
Args:
coord (:obj:`numpy.array`): Coordinates of the element.
connect (:obj:`numpy.array`): Element connectivity.
Returns:
Corresponding nodes.
"""
nodes_per_face = np.array([connect[:, [1,2,3... | 5,354,361 |
def _resource_path_dev(relative_path):
"""
:return: Package relative path to resource
"""
base_path = os.path.dirname(os.path.abspath(__file__))
return os.path.join(base_path, relative_path) | 5,354,362 |
def test_default_transfomer_visits_tokens():
"""
Ensures the default instance of the ``SemselParser`` uses the appropraite
tokenized tree transformer.
"""
parser = SemselParser()
assert isinstance(parser.transformer, SemselTransformer)
assert parser.transformer.__visit_tokens__ | 5,354,363 |
def edit_assignment(request_ctx, course_id, id, assignment_name=None, assignment_position=None, assignment_submission_types=None, assignment_allowed_extensions=None, assignment_turnitin_enabled=None, assignment_turnitin_settings=None, assignment_peer_reviews=None, assignment_automatic_peer_reviews=None, assignment_noti... | 5,354,364 |
def alpha_097(code, end_date=None, fq="pre"):
"""
公式:
STD(VOLUME,10)
Inputs:
code: 股票池
end_date: 查询日期
Outputs:
因子的值
"""
end_date = to_date_str(end_date)
func_name = sys._getframe().f_code.co_name
return JQDataClient.instance().get_alpha_191(**locals()) | 5,354,365 |
def weights_init(layer):
"""
weights initialization
Args :
--layer: one layer instance
"""
if isinstance(layer, t.nn.Linear) or isinstance(layer, t.nn.BatchNorm1d):
t.nn.init.normal_(layer.weight, 0.0, 0.02) # we use 0.02 as initial value
t.nn.init.constant_(layer.bias, 0.0) | 5,354,366 |
def filter(p):
"""
把索引list转换为单词list
"""
result = []
for idx in p:
if idx == stop_tag:
break
if idx == padding_tag: continue
result.append(index_word[idx])
return result | 5,354,367 |
def multimodal(seed: tp.Optional[int] = None, para: bool = False) -> tp.Iterator[Experiment]:
"""Experiment on multimodal functions, namely hm, rastrigin, griewank, rosenbrock, ackley, lunacek,
deceptivemultimodal.
0 or 5 dummy variable per real variable.
Base dimension 3 or 25.
Budget in 3000, 1000... | 5,354,368 |
def assert_almost_equal(actual: numpy.ndarray, desired: List[Union[complex, int]]):
"""
usage.scipy: 6
"""
... | 5,354,369 |
def get_sequin_annots(sequin_path, ref_contigs, quiet=False):
"""
Load all genes in the Sequin table as SeqRecords, fetching their sequence data from the reference.
ref_contigs is a dictionary of ref contig sequences created with BioPython's SeqIO.to_dict().
For documentation on the Sequin table fo... | 5,354,370 |
def transformer_decoder_layer(dec_input,
enc_output,
slf_attn_bias,
dec_enc_attn_bias,
n_head,
d_key,
d_value,
... | 5,354,371 |
def replace_word_choice(sentence: str, old_word: str, new_word: str) -> str:
"""Replace a word in the string with another word.
:param sentence: str - a sentence to replace words in.
:param old_word: str - word to replace
:param new_word: str - replacement word
:return: str - input sentence with ne... | 5,354,372 |
def append_slash(url):
"""Make sure we append a slash at the end of the URL otherwise we
have issues with urljoin Example:
>>> urlparse.urljoin('http://www.example.com/api/v3', 'user/1/')
'http://www.example.com/api/user/1/'
"""
if url and not url.endswith('/'):
url = '{0}/'.format(url)
... | 5,354,373 |
def find_cards(thresh_image):
"""Finds all card-sized contours in a thresholded camera image.
Returns the number of cards, and a list of card contours sorted
from largest to smallest."""
# Find contours and sort their indices by contour size
dummy, cnts, hier = cv2.findContours(thresh_image, cv... | 5,354,374 |
def test_version():
"""Check that PyProject and __version__ are equivalent."""
data = Path('pyproject.toml').read_text()
result = tomli.loads(data)['tool']['poetry']['version']
assert result == __version__ | 5,354,375 |
def plot_substructure_PMF(path, temps_to_plot, legend = True, legend_loc=None, upper_cutoff=25, integrate_out = [],states_to_plot = 'All' , linewidth = 1.3, alpha = 0.4,legend_fontsize = 40, ax = None, y_space=0.5, labelsize = 35,fontsize=30, label_fontsize=20, markersize = 120, temp_norm = 1):
"""
path indicat... | 5,354,376 |
def _count_partial_errors(client: GoogleAdsClient,
conversion_upload_response) -> int:
"""Counts the partial errors in the GAds response.
Args:
client: A GoogleAdsClient instance
conversion_upload_response: Google Upload Conversion service response.
Returns:
An intege... | 5,354,377 |
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
_add_roles_for_objects(SCOPING_OBJECTS, NEW_ROLES)
acr_propagation.propagate_roles(
acr_constants.GGRC_NEW_ROLES_PROPAGATION,
with_update=True
) | 5,354,378 |
def cumulative_similarity(atoms, representations,
threshold=0.98):
"""
"""
u_representations = [representations[0]]
s_idxs = [0]
for i, representation in enumerate(representations[1:]):
i += 1
similar = merge_asymmetric_similarity(atoms,
[representation],
... | 5,354,379 |
def generate_job_performance_data(
job: models.StoredJob,
si: storage.StorageInterface,
types: List[models.JobDataTypeEnum],
performance_granularity: Optional[models.PerformanceGranularityEnum],
) -> Generator[pd.DataFrame, None, None]:
"""Generator to fetch job performance data at the inverter leve... | 5,354,380 |
def get_project_by_id(project_id):
"""
Retrieve a project by its Id. Returns None if no project is found.
"""
try:
return Project.objects.get(pk=project_id)
except Project.DoesNotExist:
return None | 5,354,381 |
def create_colorbar(
labels: pd.DataFrame,
tree: CassiopeiaTree,
colormap: Dict[str, Tuple[int, int, int]],
dataset_name: str,
output_directory: str = ".tmp/",
create_legend: bool = False,
) -> str:
"""Creates a colorbar file for the iTOL batch uploader
Creates a colorbar file for iTOL ... | 5,354,382 |
def one_c(rand_gen):
"""
KS Test
:param rand_gen:
:return:
"""
# Now need to do the ks test
# This calculates the value for KS at given points
def ks_test(z):
if z == 0:
return 1
elif z < 1.18: # Numerically optimal cutoff
block = ((np.exp((-1. *... | 5,354,383 |
def compare_command_xml(wanted, command, **kwargs):
"""Create a Broadworks XML command fragment from the arguments"""
cmd = api.get_command_object(command, **kwargs)
check_command_xml(wanted, cmd) | 5,354,384 |
def divide_into_sentences(
text: str, num_of_senteces: int, is_reversed: bool = False, offset: int = 0
) -> str:
"""
This function divides the text into sentences and returns either the first X sentences or the last X sentences.
"""
tokens_sent = nltk.sent_tokenize(text)
# fix uncorrect dialog ... | 5,354,385 |
def cat_files(files, output):
"""Reads the contents of all the files and copies them to the output.
Args:
files: A list of filenames
output: A file-like object in which all the data should be copied.
"""
for file in files:
with open(file, 'r') as fd:
shutil.copyfileobj(fd, output) | 5,354,386 |
def play(env):
"""
run this function in order to create a window and be able to play
this environment.
env: CarRacing env
"""
from pyglet.window import key
discretize = env.discretize_actions
if discretize == None:
a = np.array( [0.0, 0.0, 0.0] )
else:
a =... | 5,354,387 |
def intersection(bbox1: BoundingBox,
bbox2: BoundingBox) -> BoundingBox:
"""
Calculate the intersection of two bounding boxes.
"""
assert bbox1.x_min <= bbox1.x_max
assert bbox1.y_min <= bbox1.y_max
assert bbox2.x_min <= bbox2.x_max
assert bbox2.y_min <= bbox2.y_max
... | 5,354,388 |
def maybe_load_checkpoint(train_loop_rngs: jnp.ndarray,
save_checkpoint_path: str,
init_optimizer: flax.optim.Optimizer,
init_params: Params,
init_fixed_model_states: Optional[Params],
defau... | 5,354,389 |
def transfer_meta_data(path_in, path_out):
"""Read input meta data and write it to the configuration file"""
ds = qpformat.load_data(path=path_in)
cfg = config.ConfigFile(path_out)
sec = cfg["meta"]
for key in sorted(META_MAPPER):
dskey, mult = META_MAPPER[key]
if (key not in sec or ... | 5,354,390 |
def svg_to_clipboard(string):
""" Copy a SVG document to the clipboard.
Parameters
----------
string : basestring
A Python string containing a SVG document.
"""
if isinstance(string, unicode_type):
string = string.encode('utf-8')
mime_data = QtCore.QMimeData()
mime_data... | 5,354,391 |
def test_if_tech_defined(enduse_fueltypes_techs):
"""Test if a technology has been configured,
i.e. a fuel share has been assgined to one of the
fueltpyes in `fuel_shares`.
Arguments
---------
enduse_fueltypes_techs : dict
Configured technologies and fuel shares of an enduse
Return... | 5,354,392 |
def _fanTriangles(vertices, faces=None):
"""Create triangles by fanning out from vertices. Returns a
generator for vertex triplets. If faces is None, assume that
vertices are planar and indicate a polygon; otherwise, use the
face indices given in faces."""
vertices = np.asarray(vertices);
if fa... | 5,354,393 |
def segment_rings(region, seeds, neighbor_lists, step=1, background_value=-1,
verbose=False):
"""
Iteratively segment a region of surface mesh as concentric segments.
Parameters
----------
region : list of integers
indices of region vertices to segment (such as a fold)
... | 5,354,394 |
def equal_spacing(L,w,justify="right"):
"""Print a single string with the elements of the list spaced out"""
s = ""
if justify == "right" or justify == "r":
for i in L:
s += f"{i:>{w}}"
elif justify == "left" or justify == "l":
for i in L:
s += f"{i:<{w}}"
eli... | 5,354,395 |
def classification_result(y, y_pred):
"""
:param y:
:param y_pred:
:return:
"""
assert len(y) == len(y_pred)
correct = []
wrong = []
for i in range(len(y)):
if y[i] == y_pred[i]:
correct.append(i)
else:
wrong.append(i)
return correct, wro... | 5,354,396 |
def flatgrad(loss, var_list, clip_norm=None):
"""Calculate the gradient and flatten it.
Parameters
----------
loss : float
the loss value
var_list : list of tf.Tensor
the variables
clip_norm : float
clip the gradients (disabled if None)
Returns
-------
list ... | 5,354,397 |
async def get_image_from_message(
ctx,
url=None,
*,
return_type="image_RGBA",
search_last_messages=True,
accept_emojis=True,
accept_templates=True,
):
"""Get an image from a discord Context or check on images among the 100
last messages sent in the channel. Return bytes or PIL.Image ... | 5,354,398 |
def test_ifThen():
"""
This function allows to execute a callable on an object only if it
has a valid value. ifThen(value,callable) will return callable(value)
only if value is not in falsables.
It is a List-like method, it can be combined with fandango.excepts.trial
"""
#assert fanda... | 5,354,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.