content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def test_choice_with_distribution():
"""
Make sure that choice_with_distributions basically doesn't crash, and has
the correct type of return value. However, do not test the distribution of
return values unless we are certain of the value.
The goal with this test is to NOT allow randomness to infl... | 5,354,500 |
async def sleepybot(time):
"""For .sleep command, let the userbot snooze for a few second."""
counter = int(time.pattern_match.group(1))
await time.edit("**Estou de mau humor e cochilando...**")
if BOTLOG:
str_counter = time_formatter(counter)
await time.client.send_message(
... | 5,354,501 |
def root_mean_square_ffinalise(out, sub_samples=None):
"""Divide the weighted sum by the sum of weights and take the square
root.
Also mask out any values derived from a too-small sample size.
:Parameters:
out: 3-`tuple` of `numpy.ndarray`
An output from `root_mean_square_fpartial... | 5,354,502 |
def md5sum_fileobj(f, start = 0, end = None):
"""Accepts a file object and returns the md5sum."""
m = hashlib.md5()
for block in file_reader(f, start, end):
assert block != "", "Got an empty read"
m.update(block)
return m.hexdigest() | 5,354,503 |
def get_localization_scores(predicted_start: int, predicted_end: int, true_start: int, true_end: int):
"""
exp(-abs(t_pred_start-t_start)/(t_end-t_start))
exp(-abs(t_pred_end-t_end)/(t_end-t_start))
:param predicted_start:
:param predicted_end:
:param true_start:
:param true_end:
"""
... | 5,354,504 |
def urlopen(url, data=Nic, proxies=Nic):
"""urlopen(url [, data]) -> open file-like object"""
global _urlopener
jeżeli proxies jest nie Nic:
opener = urllib.request.FancyURLopener(proxies=proxies)
albo_inaczej nie _urlopener:
przy support.check_warnings(
('FancyURLopener ... | 5,354,505 |
def json_compatible_key(key: str) -> str:
"""As defined in :pep:`566#json-compatible-metadata`"""
return key.lower().replace("-", "_") | 5,354,506 |
def get_transitions(jira_host, username, password, issue_id):
"""
Returns transitions of the issue.
jira_host -- JIRA host to contact
username -- JIRA username with administrative permissions.
password -- password of the username.
issue_id -- id of the issue which transitions should be returned... | 5,354,507 |
def _scale_func(k):
"""
Return a lambda function that scales its input by k
Parameters
----------
k : float
The scaling factor of the returned lambda function
Returns
-------
Lambda function
"""
return lambda y_values_input: k * y_values_input | 5,354,508 |
def focal_agents(dest, weight, source, fail=False):
"""
dest: point property set (determines property return type)
weight: field property (weight/mask)
source: point property (values to gather from)
"""
# hack rename...
source_point = dest
source_field = weight
dest_prop = sourc... | 5,354,509 |
def erosion_dependent(input_tensor: torch.Tensor,
structuring_element: torch.Tensor,
origin: Optional[Union[tuple, List[int]]] = None,
border_value: Union[int, float, str] = 'geodesic'):
""" This type of erosion is needed when you want a structuring ... | 5,354,510 |
def bin_barcodes(barcodes, binsize=1000):
"""Binning barcodes into chunks
Parameters
----------
barcodes : iterable
Iterable of barcodes
binsize : int
Size of bin for grouping barcodes
Returns
-------
yields list of barcode (1 bin)
"""
binsize = int(float(bin... | 5,354,511 |
def test_apply_cli_subset_none():
"""Ensure subset none works for apply CLI"""
test_config = ApplicationConfiguration(
application_name="test_application",
internals=Internals(),
post_processor=None,
subcommands=[
SubCommand(name="list", description="list"),
... | 5,354,512 |
def flash_regions(device, region_map):
"""divide the named memory into sized memory regions"""
regions = []
for x in region_map:
if len(x) == 2:
# no meta information: set it all to None
(name, region_sizes) = x
meta = (None,) * len(region_sizes)
elif len(x) == 3:
# provided meta i... | 5,354,513 |
def post_to_connection(Data=None, ConnectionId=None):
"""
Sends the provided data to the specified connection.
See also: AWS API Documentation
Exceptions
:example: response = client.post_to_connection(
Data=b'bytes'|file,
ConnectionId='string'
)
:type Data... | 5,354,514 |
def _load_explorer_data(multiprocess=False):
"""
Load in all available corpora and make their initial tables
This is run when the app starts up
"""
corpora = dict()
tables = dict()
for corpus in Corpus.objects.all():
if corpus.disabled:
print(f"Skipping corpus because it... | 5,354,515 |
async def __write_html(path, file_content):
"""
Convert a base64 encoded string containing the
md-formatted post content and write its
html-conversion to disk.
"""
with open(path, "w") as _f:
_f.write(convert_text(b64decode(file_content), "html5", format="md")) | 5,354,516 |
def test_engine_default_base_content_path_can_be_overridden():
"""If content_path is presented when the engine is initialized it can
overwrite the default content_path."""
env = Engine(content_path='override_the_content_path')
assert env.base_content_path == 'override_the_content_path' | 5,354,517 |
def compute_CD_projected_psth(units, time_period=None):
"""
Routine for Coding Direction computation on all the units in the specified unit_keys
Coding Direction is calculated in the specified time_period
:param: unit_keys - list of unit_keys
:return: coding direction unit-vector,
contr... | 5,354,518 |
def transform_count(in_gen, title=None):
"""
counts number of datamaps and prints the count out
"""
count = 0
for in_datamap in in_gen:
count += 1
yield in_datamap
if title is not None:
print("%s count: %d" % (title, count))
else:
print("count: %d" % count) | 5,354,519 |
def _tvos_extension_impl(ctx):
"""Implementation of the `tvos_extension` Skylark rule."""
binary_artifact = binary_support.get_binary_provider(
ctx.attr.deps, apple_common.AppleExecutableBinary).binary
deps_objc_provider = binary_support.get_binary_provider(
ctx.attr.deps, apple_common.AppleExecutable... | 5,354,520 |
def simplify_graph(G):
"""remove the scores, so the cycle_exits() function can work"""
graph = copy.deepcopy(G)
simplified = dict((k, graph[k][0]) for k in graph)
# add dummy edges,so the cycle_exists() function works
for source in simplified.keys():
for target in simplified[source]:
... | 5,354,521 |
def Run_INCR(num_vertices, edge_density, algorithm_name, k, init_tree=None):
"""
Initialize and run the MVA algorithm
"""
edges_bound = int(edge_density * (num_vertices * (num_vertices - 1) / 2))
k = max(1, k * edges_bound)
runner = runner_factory(num_vertices, algorithm_name, None, edges_b... | 5,354,522 |
def create_nrrd_from_dicoms(image, patient_id):
"""
Reads a folder that contains multiple DICOM files and
converts the input into a single nrrd file using a command line
app from MITK or MITK Phenotyping.
Input:
* path to one dicom (other are automatically found.)
* Patient I... | 5,354,523 |
def _two_point_interp(times, altitudes, horizon=0*u.deg):
"""
Do linear interpolation between two ``altitudes`` at
two ``times`` to determine the time where the altitude
goes through zero.
Parameters
----------
times : `~astropy.time.Time`
Two times for linear interpolation between
... | 5,354,524 |
def setup_sample_data(no_of_records):
"""Generate the given number of sample data with 'id', 'name', and 'dt'"""
rows_in_database = [{'id': counter, 'name': get_random_string(string.ascii_lowercase, 20), 'dt': '2017-05-03'}
for counter in range(0, no_of_records)]
return rows_in_datab... | 5,354,525 |
def generate_csv_string(csv_data):
""" Turn 2d string array into a string representing a csv file """
output_buffer = StringIO()
writer = csv.writer(output_buffer)
csv_data = equalize_array(csv_data)
csv_data = utf_8_encode_array(csv_data)
for row in csv_data:
writer.writerow(row)
body = output_buf... | 5,354,526 |
def finish_current_molecule(molecule_name, path_save_mol2, temp_file_name_full):
"""
Last procedures for current molecule
Example:
>>> finish_current_molecule(molecule_name, path_save_mol2, temp_file_name_full)
@param molecule_name: main name of molecule
@type molecule_name: string
@param... | 5,354,527 |
def user_stats_birth(df):
"""Displays statistics of analysis based on the birth years of bikeshare users."""
# Display earliest, most recent, and most common year of birth
birth_year = df['Birth Year']
# the most common birth year
most_common_year = birth_year.value_counts().idxmax()
print("The... | 5,354,528 |
def create_parser() -> ArgumentParser:
"""
Constructs the MFA argument parser
Returns
-------
ArgumentParser
MFA argument parser
"""
GLOBAL_CONFIG = load_global_config()
def add_global_options(subparser: argparse.ArgumentParser, textgrid_output: bool = False):
"""
... | 5,354,529 |
def build_params_comments(python_code, keyword, info):
"""Builds comments for parameters"""
for arg, arg_info in zip(info.get('expected_url_params').keys(), info.get('expected_url_params').values()):
python_code += '\n' + 2*TAB_BASE*SPACE + ':param ' + score_to_underscore(arg) + ': '
python_cod... | 5,354,530 |
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Netatmo component."""
hass.data[DOMAIN] = {}
hass.data[DOMAIN][DATA_PERSONS] = {}
if DOMAIN not in config:
return True
config_flow.NetatmoFlowHandler.async_register_implementation(
hass,
config_entry_oa... | 5,354,531 |
async def upload_artifact(req):
"""
Upload artifact created during sample creation using the Jobs API.
"""
db = req.app["db"]
pg = req.app["pg"]
sample_id = req.match_info["sample_id"]
artifact_type = req.query.get("type")
if not await db.samples.find_one(sample_id):
raise Not... | 5,354,532 |
def hotkey(x: int, y: int) -> bool:
"""Try to copy by dragging over the string, and then use hotkey."""
gui.moveTo(x + 15, y, 0)
gui.mouseDown()
gui.move(70, 0)
gui.hotkey("ctrl", "c")
gui.mouseUp()
return check_copied() | 5,354,533 |
def db_handler(args):
"""db_handler."""
if args.type == 'create':
create_db()
if args.type == 'status':
current_rev = db_revision.current_db_revision()
print('current_rev', current_rev)
if args.type == 'upgrade':
upgrade_db()
if args.type == 'revision':
db... | 5,354,534 |
def test_format_checks_warning():
"""Test that unregistered checks raise a warning when formatting checks."""
with pytest.warns(UserWarning):
io._format_checks({"my_check": None}) | 5,354,535 |
def get_tv_imdbid_by_id( tv_id, verify = True ):
"""
Returns the IMDb_ ID for a TV show.
:param int tv_id: the TMDB_ series ID for the TV show.
:param bool verify: optional argument, whether to verify SSL connections. Default is ``True``.
:returns: the IMDB_ ID for that TV show. Otherwise retur... | 5,354,536 |
def validate(test_case, **__) -> TestCaseResult:
"""
Default function to validate test cases.
Note that the first argument should be a positional argument.
"""
raise NotImplementedError(
f"Missing test case validation implementation for {type(test_case)}."
) | 5,354,537 |
def test_reading_cosmos_catalog():
"""Returns the cosmos catalog"""
cosmos_catalog = CosmosCatalog.from_file(COSMOS_CATALOG_PATHS)
return cosmos_catalog | 5,354,538 |
def write_positions_as_pdbs(i, j, phase, state, annealing_steps, parent_dir, topology_pkl, direction='forward', output_pdb_filename=None, selection_string='resname MOL'):
"""
extract the positions files for an array of annealing steps and write the ligand positions to a pdb;
this is primarily used to extrac... | 5,354,539 |
def is_terminal(p):
"""
Check if a given packet is a terminal element.
:param p: element to check
:type p: object
:return: If ``p`` is a terminal element
:rtype: bool
"""
return isinstance(p, _TerminalPacket) | 5,354,540 |
def vgg11_bn(pretrained=False, **kwargs):
"""VGG 11-layer model (configuration "A") with batch normalization
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
if pretrained:
kwargs['init_weights'] = False
model = VGG(make_layers(cfg['A'], batch_norm=True)... | 5,354,541 |
def trf_input_method(config, patient_id="", key_namespace="", **_):
"""Streamlit GUI method to facilitate TRF data provision.
Notes
-----
TRF files themselves have no innate patient alignment. An option
for TRF collection is to use the CLI tool
``pymedphys trf orchestrate``. This connects to th... | 5,354,542 |
def plotter(fdict):
""" Go """
ctx = get_autoplot_context(fdict, get_description())
station = ctx['station']
network = ctx['network']
year = ctx['year']
season = ctx['season']
nt = NetworkTable(network)
table = "alldata_%s" % (station[:2],)
pgconn = get_dbconn('coop')
# Have to... | 5,354,543 |
def rlist(sub_command, params, query):
"""
Reading list for your daily life
yoda rlist [OPTIONS] SUBCOMMAND [QUERY]
ACTION:
view [--params="tags"] [query]: view your reading list
params: reading list parameter to be filtered (defaults to tags)
... | 5,354,544 |
def coverage(c, report="term", opts="", codecov=False):
"""
Run pytest in coverage mode. See `invocations.pytest.coverage` for details.
"""
# Use our own test() instead of theirs.
# Also add integration test so this always hits both.
# (Not regression, since that's "weird" / doesn't really hit a... | 5,354,545 |
def test_cataloging_admin_can_register_permission_from_collection_view(user, collection, superuser,
testapp):
"""Register new permission from collection view as cataloging admin."""
PermissionFactory(user=user, collection=collection, catalog... | 5,354,546 |
def connectCells(self):
"""
Function for/to <short description of `netpyne.network.conn.connectCells`>
Parameters
----------
self : <type>
<Short description of self>
**Default:** *required*
"""
from .. import sim
# Instantiate network connections based on the conne... | 5,354,547 |
def convert_numpy_str_to_uint16(data):
""" Converts a numpy.unicode\_ to UTF-16 in numpy.uint16 form.
Convert a ``numpy.unicode_`` or an array of them (they are UTF-32
strings) to UTF-16 in the equivalent array of ``numpy.uint16``. The
conversion will throw an exception if any characters cannot be
... | 5,354,548 |
def _get_process_num_examples(builder, split, process_batch_size, process_index,
process_count, drop_remainder):
"""Returns the number of examples in a given process's split."""
process_split = _get_process_split(
split,
process_index=process_index,
process_count=proc... | 5,354,549 |
def export_python_function(earth_model):
"""
Exports model as a pure python function, with no numpy/scipy/sklearn dependencies.
:param earth_model: Trained pyearth model
:return: A function that accepts an iterator over examples, and returns an iterator over transformed examples
"""
i = 0
ac... | 5,354,550 |
def thermostat_get_zone_information(
address: Address, zone: int, info: int, topic=pub.AUTO_TOPIC
):
"""Create a THERMOSTAT_GET_ZONE_INFORMATION command.
zone: (int) 0 to 31
info: (int)
0 = Temperature
1 = Setpoint
2 = Deadband
3 = Humidity
"""
zone = zone & 0x0... | 5,354,551 |
def ehi(data, thr_95, axis=0, keepdims=False):
"""
Calculate Excessive Heat Index (EHI).
Parameters
----------
data: list/array
1D/2D array of daily temperature timeseries
thr_95: float
95th percentile daily mean value from climatology
axis: int
The axis along which ... | 5,354,552 |
def make_transpose_tests(options):
"""Make a set of tests to do transpose."""
# TODO(nupurgarg): Add test for uint8.
test_parameters = [{
"dtype": [tf.int32, tf.int64, tf.float32],
"input_shape": [[2, 2, 3]],
"perm": [[0, 1, 2], [0, 2, 1]],
"constant_perm": [True, False],
}, {
"dt... | 5,354,553 |
def get_disable_migration_module():
""" get disable migration """
class DisableMigration:
def __contains__(self, item):
return True
def __getitem__(self, item):
return None
return DisableMigration() | 5,354,554 |
async def test_view_empty_namespace(client, sensor_entities):
"""Test prometheus metrics view."""
body = await generate_latest_metrics(client)
assert "# HELP python_info Python platform information" in body
assert (
"# HELP python_gc_objects_collected_total "
"Objects collected during g... | 5,354,555 |
def weather():
"""The weather route of My Weather API."""
# Load URL and KEY args of Current Weather API of OpenWeatherMap
api_url = app.config.get("API_URL")
api_key = app.config.get("API_KEY")
validators.check_emptiness('API_URL', api_url)
validators.check_emptiness('API_KEY', api_key)
# ... | 5,354,556 |
def __sbox_bytes(data, sbox):
"""S-Box substitution of a list of bytes"""
return [__sbox_single_byte(byte, sbox) for byte in data] | 5,354,557 |
def load_config_with_kwargs(cls, kwargs):
"""Takes a marshmallow class and dict of parameter values and appropriately instantiantes the schema."""
assert_is_a_marshmallow_class(cls)
schema = cls.Schema()
fields = schema.fields.keys()
return load_config(cls, **{k: v for k, v in kwargs.items() if k in... | 5,354,558 |
def test_pop_the_cap_reform():
"""
Test eliminating the maximum taxable earnings (MTE)
used in the calculation of the OASDI payroll tax.
"""
# create Policy parameters object
ppo = Policy()
assert ppo.current_year == Policy.JSON_START_YEAR
# confirm that MTE has current-law values in 201... | 5,354,559 |
def ConfigureInstanceTemplate(args, kube_client, project_id, network_resource,
workload_namespace, workload_name,
workload_manifest, membership_manifest,
asm_revision, mesh_config):
"""Configure the provided instance template ar... | 5,354,560 |
def SetRandomSeed(seed):
"""Set the global random seed.
Parameters
----------
seed : int
The seed to use.
Returns
-------
None
"""
global option
option['random_seed'] = seed | 5,354,561 |
def p_skip_base(p):
"""
skip_base : skip_operator
| skip_keyword
| skip_constant
| ID
"""
p[0] = p[1] | 5,354,562 |
def main(argv=None):
"""Main program which parses args and runs
Args:
argv: List of command line arguments, if None uses sys.argv.
"""
if argv is None:
argv = sys.argv[1:]
opts = parse_args(argv)
Main(opts.project_configs, opts.program_config, opts.output) | 5,354,563 |
def cli(**cli_kwargs):
"""Rasterize a slide into smaller tiles
Tiles are saved in the whole-slide tiles binary format (tiles.pil), and the corresponding manifest/header file (tiles.csv) is also generated
Neccessary data for the manifest file are:
address, x_coord, y_coord, full_resolution_tile_si... | 5,354,564 |
def test_display_failed():
"""Verify failed devices are showing"""
cmd_list = [NETMIKO_GREP] + ['interface', 'all']
(output, std_err) = subprocess_handler(cmd_list)
assert "Failed devices" in output
failed_devices = output.split("Failed devices:")[1]
failed_devices = failed_devices.strip().split... | 5,354,565 |
def split_rows(sentences, column_names):
"""
Creates a list of sentence where each sentence is a list of lines
Each line is a dictionary of columns
:param sentences:
:param column_names:
:return:
"""
new_sentences = []
root_values = ['0', 'ROOT', 'ROOT', 'ROOT', 'ROOT', 'ROOT', '0', ... | 5,354,566 |
def compute_pw_sparse_out_of_memory2(tr,
row_size = 500,
pm_processes = 2,
pm_pbar = True,
max_distance = 50,
reassemble = T... | 5,354,567 |
def score_models(X_train = None, y_train = None, X_val = None, y_val = None, y_base = None, includeBase = False, model = None):
"""Score Models and return results as a dataframe
Parameters
----------
X_train : Numpy Array
X_train data
y_train : Numpy Array
Train target
X_va... | 5,354,568 |
def _rollup_date(dts, interval=None):
"""format date/time string based on interval spec'd for summation
For Daily, it returns just the date. No time or timezeone.
For Hourly, it returns an ISO-8061 datetime range. This provides previously
missing clarity around whether the rainfall amount shown was fo... | 5,354,569 |
def offset_compensation(time_signal):
""" Offset compensation filter.
"""
return lfilter([1., -1], [1., -0.999], time_signal) | 5,354,570 |
def process_dir(thisdir):
"""Process /thisdir/ recursively"""
res = []
shellparams = {'stdin':subprocess.PIPE,'stdout':sys.stdout,'shell':True}
command = [utils.assimp_bin_path,"testbatchload"]
for f in os.listdir(thisdir):
if os.path.splitext(f)[-1] in settings.exclude_extensions:
... | 5,354,571 |
def _alias(default: Callable) -> Callable[[T], T]:
"""
Decorator which re-assigns a function `_f` to point to `default` instead.
Since global function calls in Python are somewhat expensive, this is
mainly done to reduce a bit of overhead involved in the functions calls.
For example, consider the b... | 5,354,572 |
def test_image(filename):
"""
Return the absolute path to image file having *filename* in test_files
directory.
"""
return absjoin(thisdir, 'test_files', filename) | 5,354,573 |
def menu(
ticker: str,
start: str,
interval: str,
stock: pd.DataFrame,
):
"""Sector and Industry Analysis Menu"""
sia_controller = SectorIndustryAnalysisController(ticker, start, interval, stock)
sia_controller.call_help(None)
while True:
# Get input command from user
if... | 5,354,574 |
def fname_template(orun, detname, ofname, nevts, tsec=None, tnsec=None):
"""Replaces parts of the file name specified as
#src, #exp, #run, #evts, #type, #date, #time, #fid, #sec, #nsec
with actual values
"""
template = replace(ofname, '#src', detname)
template = replace(template, '#exp',... | 5,354,575 |
def opt_checked(method):
"""Like `@checked`, but it is legal to not specify the value. In this case,
the special `Unset` value is passed to the validation function. Storing
`Unset` causes the key to not be emitted during serialization."""
return Checked(method.__name__, method.__doc__, method, True) | 5,354,576 |
def _metadata(case_study):
"""Collect metadata in a dictionnary."""
return {
'creation_date': datetime.strftime(datetime.now(), '%c'),
'imagery': case_study.imagery,
'latitude': case_study.lat,
'longitude': case_study.lon,
'area_of_interest': case_study.aoi_latlon.wkt,
... | 5,354,577 |
def CoP_constraints_ds(
m,
foot_angles,
next_support_foot_pos,
stateX,
stateY,
N=16,
dt=0.1,
h=1.0,
g=9.81,
tPf=8,
):
"""
INPUTS
m (int): remaining time steps in current foot step;
foot_angles ([N, 1] vector): containing the orientations in radians
of the foot... | 5,354,578 |
def HARRIS(img_path):
"""
extract HARR features
:param img_path:
:return:
:Version:1.0
"""
img = io.imread(img_path)
img = skimage.color.rgb2gray(img)
img = (img - np.mean(img)) / np.std(img)
feature = corner_harris(img, method='k', k=0.05, eps=1e-06, sigma=1)
return feature... | 5,354,579 |
def upgrade():
"""
Change upload_area primary key to be integer sequence, and update any foreign keys that reference it.
"""
# Upload Area
op.execute("ALTER TABLE file DROP CONSTRAINT file_upload_area;")
op.execute("ALTER TABLE upload_area DROP CONSTRAINT upload_area_pkey;")
op.execute("ALT... | 5,354,580 |
def test_admin_noauth_fail(fn, args):
"""
Verify that an admin-only call fails when invoked without authentication.
"""
with pytest.raises(AuthorizationError):
fn(*args) | 5,354,581 |
def load_object(f_name, directory=None):
"""Load a custom object, from a pickle file.
Parameters
----------
f_name : str
File name of the object to be loaded.
directory : str or SCDB, optional
Folder or database object specifying the save location.
Returns
-------
objec... | 5,354,582 |
def http_req(blink, url='http://example.com', data=None, headers=None,
reqtype='get', stream=False, json_resp=True, is_retry=False):
"""
Perform server requests and check if reauthorization neccessary.
:param blink: Blink instance
:param url: URL to perform request
:param data: Data to... | 5,354,583 |
def register_encryptor(method: Union[FactorEncryptMethod, str], encryptor: Encryptor) -> None:
"""
register writer on startup
"""
encryptor_registry.register(method, encryptor) | 5,354,584 |
def make_dataset(path, seq_length, mem_length, local_rank, lazy=False, xl_style=False,
shuffle=True, split=None, tokenizer=None, tokenizer_type='CharacterLevelTokenizer',
tokenizer_model_path=None, vocab_size=None, model_type='bpe', pad_token=0, character_converage=1.0,
... | 5,354,585 |
def fix_reference_name(name, blacklist=None):
"""Return a syntax-valid Python reference name from an arbitrary name"""
name = "".join(re.split(r'[^0-9a-zA-Z_]', name))
while name and not re.match(r'([a-zA-Z]+[0-9a-zA-Z_]*)$', name):
if not re.match(r'[a-zA-Z]', name[0]):
name = name[1:]
... | 5,354,586 |
def handle_args():
"""Handles arguments both in the command line and in IDLE.
Output:
Tuple, consisting of:
- string (input filename or stdin)
- string (output filename or stdout)
- integer (number of CPUs)
"""
version_num = "0.0.2"
# Tries to execute the script with command lin... | 5,354,587 |
def load_scripts(reload_scripts=False, refresh_scripts=False):
"""
Load scripts and run each modules register function.
:arg reload_scripts: Causes all scripts to have their unregister method
called before loading.
:type reload_scripts: bool
:arg refresh_scripts: only load scripts which are ... | 5,354,588 |
def plot_repeat_transaction_over_time(data, median, output_folder, time_label):
"""Creates and saves an image containing the plot the transactions over
time.
Args:
data: Pandas DataFrame containing the data to plot.
median: Median line that shows split between calibration and holdout
... | 5,354,589 |
def get_available_gates() -> tuple[str, ...]:
"""
Return available gates.
"""
from hybridq.gate.gate import _available_gates
return tuple(_available_gates) | 5,354,590 |
def main():
"""
Entry point
Collect all reviews from the file system (FS) &
Dump it into JSON representation back to the FS
Returns:
int: The status code
"""
collector = Collector()
return collector.collect() | 5,354,591 |
def __parse_tokens(sentence: spacy.tokens.Doc) -> ParsedUniversalDependencies:
"""Parses parts of speech from the provided tokens."""
#tokenize
# remove the stopwards, convert to lowercase
#bi/n-grams
adj = __get_word_by_ud_pos(sentence, "ADJ")
adp = __get_word_by_ud_pos(sentence, "ADP")
adv... | 5,354,592 |
def main():
"""
In this main function, we connect to the database, and we create position table and intern table
and after that we create new position and new interns and insert the data into the position/intern
table
"""
database = r"interns.db"
sql_drop_positions_table="""
... | 5,354,593 |
def parse_date(month: int, day: int) -> Tuple[int, int, int]:
"""Parse a date given month and day only and convert to
a tuple.
Args:
month (int): 1-index month value (e.g. 1 for January)
day (int): a day of the month
Returns:
Tuple[int, int, int]: (year, month, day)
"""
... | 5,354,594 |
def usgs_perlite_parse(*, df_list, source, year, **_):
"""
Combine, parse, and format the provided dataframes
:param df_list: list of dataframes to concat and format
:param source: source
:param year: year
:return: df, parsed and partially formatted to flowbyactivity
specifications
"... | 5,354,595 |
def downgrade():
"""Remove unique key constraint to the UUID column."""
op.drop_constraint('db_dblog_uuid_key', 'db_dblog') | 5,354,596 |
def calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6):
# token from https://github.com/bioinf-jku/TTUR/blob/master/fid.py
"""Numpy implementation of the Frechet Distance.
The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1)
and X_2 ~ N(mu_2, C_2) is
d^2 = ... | 5,354,597 |
def _svdvals_eig(x): # pragma: no cover
"""SVD-decomposition via eigen, but return singular values only.
"""
if x.shape[0] > x.shape[1]:
s2 = np.linalg.eigvalsh(dag(x) @ x)
else:
s2 = np.linalg.eigvalsh(x @ dag(x))
return s2**0.5 | 5,354,598 |
def get_3d_object_section(target_object):
"""Returns 3D section includes given object like stl.
"""
target_object = target_object.flatten()
x_min = min(target_object[0::3])
x_max = max(target_object[0::3])
y_min = min(target_object[1::3])
y_max = max(target_object[1::3])
z_min = min(tar... | 5,354,599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.