content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def CreateMnemonicsC(mnemonicsIds):
""" Create the opcodes arrays for C header files. """
opsEnum = "typedef enum {\n\tI_UNDEFINED = 0, "
pos = 0
l2 = sorted(mnemonicsIds.keys())
for i in l2:
s = "I_%s = %d" % (i.replace(" ", "_").replace(",", ""), mnemonicsIds[i])
if i != l2[-1]:
s += ","
pos +=... | 5,351,300 |
def create_windows(c_main, origin, J=None, I=None, depth=None, width=None):
"""
Create windows based on contour and windowing parameters. The first
window (at arc length = 0) is placed at the spline origin.
Note: to define the windows, this function uses pseudo-radial and
pseudo-angular coordinates... | 5,351,301 |
def extractsms(htmlsms) :
"""
extractsms -- extract SMS messages from BeautifulSoup tree of Google Voice SMS HTML.
Output is a list of dictionaries, one per message.
"""
msgitems = [] # accum message items here
# Extract all conversations by searching for a DIV with an ID at top leve... | 5,351,302 |
def load_ch_wubi_dict(dict_path=e2p.E2P_CH_WUBI_PATH):
"""Load Chinese to Wubi Dictionary.
Parameters
---------
dict_path : str
the absolute path to chinese2wubi dictionary.
In default, it's E2P_CH_WUBI_PATH.
Returns
-------
dict : Dictionary
a mapping between Chine... | 5,351,303 |
def middle_flow(middle_inputs: Tensor) -> Tensor:
"""
Middle flow
Implements the second of the three broad parts of the model
:param middle_inputs: middle_inputs: Tensor output generate by the Entry Flow,
having shape [*, new_rows, new_cols, 728]
:return: Out... | 5,351,304 |
async def test_button_failure(
hass: HomeAssistant,
load_int: ConfigEntry,
monkeypatch: MonkeyPatch,
get_data: SensiboData,
) -> None:
"""Test the Sensibo button fails."""
state_button = hass.states.get("button.hallway_reset_filter")
with patch(
"homeassistant.components.sensibo.ut... | 5,351,305 |
def feature_extraction(sample_index, labels, baf, lrr, rawcopy_pred, data_shape, margin=10000, pad_val=-2):
"""
Extract features at sample index
:param sample_index: sample index
:param labels: break point labels
:param baf: b-allele frequency values
:param lrr: ... | 5,351,306 |
def get_filename(file_fullpath):
"""
Returns the filename without the full path
:param file_fullpath:
:return: Returns the filename
"""
filename = file_fullpath.split("/")[-1].split(".")[0]
return filename | 5,351,307 |
def test_colocalization(col, row, alt):
"""
Test colocalization function using rpc
"""
data_folder = data_path()
id_scene = "P1BP--2018122638935449CP"
file_dimap = os.path.join(data_folder, f"rpc/PHRDIMAP_{id_scene}.XML")
fctrat = RPC.from_dimap_v1(file_dimap)
row_coloc, col_coloc, _ =... | 5,351,308 |
def create_and_assign_household(humans_with_same_house, housetype, conf, city, allocated_humans):
"""
Creates a residence and allocates humans in `humans_with_same_house` to the same.
Args:
humans_with_same_house (list): a list of `Human` objects which are to be allocated to the same residence of t... | 5,351,309 |
def Rdf2Marc(**kwargs):
"""Runs rdf2marc on a BF Instance URL"""
task_instance = kwargs["task_instance"]
instance_uri = task_instance.xcom_pull(task_ids="sqs-sensor")
instance_path = urlparse(instance_uri).path
instance_id = path.split(instance_path)[-1]
sinopia_env = kwargs.get("sinopia_env",... | 5,351,310 |
def make_positions(tensor, padding_idx):
"""Replace non-padding symbols with their position numbers.
Position numbers begin at padding_idx+1. Padding symbols are ignored.
"""
# The series of casts and type-conversions here are carefully
# balanced to both work with ONNX export and XLA. In particula... | 5,351,311 |
def init():
"""Top level command handler."""
@click.command()
@click.option('--policy-servers', type=cli.LIST,
required=True,
help='Warpgate policy servers')
@click.option('--service-principal', type=str,
default='host',
help='Warp... | 5,351,312 |
def create_color_visualizer(renderer, file_name, scalar_range):
"""Create color visualizer"""
# Initialize variables
reader = vtkStructuredGridReader()
mapper = vtkDataSetMapper()
actor = vtkActor()
# Set reader
reader.SetFileName(file_name)
# Set lookup table
lookup_table = creat... | 5,351,313 |
def load(file):
"""unpickle an object from a file"""
pik = Unpickler(file)
pik._main = _main_module
obj = pik.load()
if type(obj).__module__ == getattr(_main_module, '__name__', '__main__'):
# point obj class to main
try: obj.__class__ = getattr(pik._main, type(obj).__name__)
... | 5,351,314 |
def verify_password(password: str, salt: str, key: str) -> bool:
"""
Verify the given password against the given salt and key.
:param password: The password to check.
:param salt: The salt to use. Should be encoded in ascii.
:param key: The key to use. Should be encoded in ascii.
:returns: True... | 5,351,315 |
def _post_patch_ecr(version, repo, account, region,
filepath='dockerfiles'):
"""
routine to build the docker image and push it to ECR
"""
if not os.path.isdir(repo + '/' + filepath): return
print('Processing docker image...')
account_region = account + '.dkr.ecr.' + regi... | 5,351,316 |
def setup(app):
"""Set up the Sphinx extension."""
app.add_config_value(
name="doctr_versions_menu_conf", default={}, rebuild="html",
)
app.connect('builder-inited', ext.add_versions_menu_js_file)
app.connect('build-finished', ext.cleanup)
return {
"version": __version__,
... | 5,351,317 |
def head(file):
"""Returns the first/head line of the file"""
first = ''
if os.path.isfile(file):
with open_file_read(file) as f_in:
try:
first = f_in.readline().rstrip()
except UnicodeDecodeError:
pass
return first
else:
... | 5,351,318 |
def get_help_recursive(group, ctx, commands):
"""
Returns help for arbitrarily nested subcommands of the given click.Group.
"""
try:
command_name = commands.pop(0)
group = group.get_command(ctx, command_name)
if not group:
raise click.ClickException('Invalid command: ... | 5,351,319 |
def clump_tracker(fprefix, param=None, directory=None, nsmooth=32, verbose=True):
"""
Finds and tracks clumps over a simulation with multiple time steps and
calculates various physical properties of the clumps.
Runs all the steps necessary to find/track clumps, these are:
get_fnames
pF... | 5,351,320 |
def OptimizeGraph(config_proto,
metagraph,
verbose=True,
graph_id=b'graph_to_optimize',
cluster=None,
strip_default_attributes=False):
"""Optimize the provided metagraph.
For best results, the signature_def field in `metagrap... | 5,351,321 |
def files_by_date():
"""TODO
---
responses:
'200':
description: TODO
"""
return redirect("https://explorer.ooni.org/search", 301) | 5,351,322 |
def parse_imei(msg):
"""Parse an IMEI (in BCD format) into ASCII format."""
imei = ''
for octet in msg[1:]:
imei += imei_parse_nibble(ord(octet) & 0x0f)
imei += imei_parse_nibble(ord(octet) >> 4)
return imei | 5,351,323 |
def get_blender_frame_time(skeleton, frame_id, rate, time_scale, actor_id):
"""Goes from multi-actor integer frame_id to modded blender float time."""
# stays within video frame limits
frame_id2 = skeleton.mod_frame_id(frame_id=frame_id) # type: int
time_ = skeleton.get_time(frame_id)
if actor_id >... | 5,351,324 |
def parse_metar(metar_text, year, month, station_metadata=station_info):
"""Parse a METAR report in text form into a list of named tuples.
Parameters
----------
metar_text : str
The METAR report
station_metadata : dict
Mapping of station identifiers to station metadata
year : in... | 5,351,325 |
def upload_ignition_files_to_s3(local_folder, s3_bucket, session: SessionProxy):
"""
Push Ignition files up to S3
:param session: Boto SessionProxy
:param local_folder: The folder to upload
:param s3_bucket: Name of the S3 Bucket
:return None:
"""
files_to_upload = ['auth/kubeconfig', '... | 5,351,326 |
def update_facemap_material(self, context):
""" Assign the updated material to all faces belonging to active facemap
"""
set_material_for_active_facemap(self.material, context)
return None | 5,351,327 |
def calculate_accuracy(y_true, y_pred):
"""Calculates the accuracy of the model.
Arguments:
y_true {numpy.array} -- the true labels corresponding to each input
y_pred {numpy.array} -- the model's predictions
Returns:
accuracy {str} -- the accuracy of the model (%)
... | 5,351,328 |
def resolve_diff_args(args):
"""Resolve ambiguity of path vs base/remote for git:
Cases:
- No args: Use defaults
- One arg: Either base or path, check with is_gitref.
- Two args or more: Check if first two are base/remote by is_gitref
"""
base = args.base
remote = args.remote
pat... | 5,351,329 |
def get_coco_metrics_from_gt_and_det(groundtruth_dict, detection_boxes_list, category=''):
"""
Get COCO metrics given dictionary of groundtruth dictionary and the list of
detections.
"""
coco_wrapped_groundtruth = coco_tools.COCOWrapper(groundtruth_dict)
coco_wrapped_detections = coco_wrapped_gr... | 5,351,330 |
def test_notify_emby_plugin_notify(mock_post, mock_get, mock_logout,
mock_login, mock_sessions):
"""
API: NotifyEmby.notify()
"""
# Disable Throttling to speed testing
plugins.NotifyBase.request_rate_per_sec = 0
req = requests.Request()
req.status_code = ... | 5,351,331 |
def createDataset(dataPath,dStr,sigScale=1):
"""
dStr from ["20K", "1M", "10M"]
"""
print("Loading D1B dataset...")
ft1_d = loadD1B(dataPath,dStr,w=40)
if dStr=="20K":
ft1_d = ft1_d[:10000,:]
print("Running PCA on D1B")
pcaD1B = PCA(n_components=ft1_d.shape[1],random_state... | 5,351,332 |
def generate_interblock_leader():
"""Generates the leader between normal blocks"""
return b'\x55' * 0x2 | 5,351,333 |
def discover_handlers(entrypoint_group_name="databroker.handlers", skip_failures=True):
"""
Discover handlers via entrypoints.
Parameters
----------
entrypoint_group_name: str
Default is 'databroker.handlers', the "official" databroker entrypoint
for handlers.
skip_failures: boo... | 5,351,334 |
def create_highway_layer(highway_type,
num_layer,
unit_dim,
window_size,
activation,
dropout,
num_gpus,
default_gpu_id,
... | 5,351,335 |
def compute_metrics(y_true, y_predicted, y_prob = None):
"""compute metrics for the prredicted labels against ground truth
@args:
y_true: the ground truth label
y_predicted: the predicted label
y_predicted_prob: probability of the predicted label
@returns:
... | 5,351,336 |
def session_store(decoy: Decoy) -> SessionStore:
"""Get a mock SessionStore interface."""
return decoy.mock(cls=SessionStore) | 5,351,337 |
def test_point_geometry_pass(pt_geo_plot, pd_gdf):
"""Check that the point geometry test recognizes correct points."""
pt_geo_plot.assert_points(points_expected=pd_gdf)
plt.close("all") | 5,351,338 |
def get_json_signed(asn_metadata):
"""
Given an ASN.1 object conforming to the new ASN.1 metadata definitions
derived from Snapshot*.asn1, return a Python dictionary containing the same
information, conformant to TUF's standard data specification for Snapshot
metadata (tuf.formats.SNAPSHOT_SCHEMA).
TUF inte... | 5,351,339 |
def extract_values(obj: Dict[str, Any], key: str, val: Any) -> List[Dict[str, Any]]:
"""
Pull all values of specified key from nested JSON.
Args:
obj (dict): Dictionary to be searched
key (str): tuple of key and value.
value (any): value, which can be any type
Returns:
... | 5,351,340 |
def NameSignals(locals):
"""Search locals and name any signal by its key.
N.B. This intended to be called by client code to name signals in the local
scope of a function during module elaboration.
"""
for name in locals:
if issubclass(type(locals[name]), SignalFrontend):
locals... | 5,351,341 |
def createMeshPatches(ax, mesh, rasterized=False, verbose=True):
"""Utility function to create 2d mesh patches within a given ax."""
if not mesh:
pg.error("drawMeshBoundaries(ax, mesh): invalid mesh:", mesh)
return
if mesh.nodeCount() < 2:
pg.error("drawMeshBoundaries(ax, mesh): to ... | 5,351,342 |
def get_url_name(url_):
"""从url_中获取名字"""
raw_res = url_.split('/', -1)[-1]
raw_res = raw_res.split('.', 1)[0]
res = raw_res[-15:]
return res | 5,351,343 |
def check_uuid_in_db(uuid_to_validate, uuid_type):
"""
A helper function to validate whether a UUID exists within our db.
"""
uuid_in_db = None
if uuid_type.name == "SESSION":
uuid_in_db = Sessions.query.filter_by(session_uuid=uuid_to_validate).first()
elif uuid_type.name == "QUIZ":
... | 5,351,344 |
def main(database, symbol, mode):
"""
:param database:
:param symbol:
:param mode:
:return:
"""
symbol = symbol.upper()
data = {
'financial_performance': {
'balance_sheets': {
'yearly': [
]},
'cash_flows': {
... | 5,351,345 |
def delete(uuid):
""" Deletes stored entities and time them.
Args:
uuid: A str, unique identifier, a part of the keynames of entities.
Returns:
A tuple of two lists. A list of float times to delete
all entities, and a list of errors. A zero value signifies
a failure.
"""
timings = []
error... | 5,351,346 |
def remove_prefix(string, prefix):
"""
This function removes the given prefix from a string, if the string does
indeed begin with the prefix; otherwise, it returns the string
unmodified.
"""
if string.startswith(prefix):
return string[len(prefix):]
else:
return string | 5,351,347 |
def action_probs_to_action(probs):
""" Takes output of controller and converts to action in format [0,0,0,0] """
forward = probs[:, 0:2]; camera=probs[:, 2:5]; jump=probs[:,5:7];
action = [torch.distributions.Categorical(p).sample().detach().item() for p in [forward,camera,jump]]
action.append(0) # not... | 5,351,348 |
def solve_circuit(netlist):
"""
Generate and solve the Modified Nodal Analysis (MNA) equations for the circuit.
The MNA equations are a linear system Ax = z.
See http://lpsa.swarthmore.edu/Systems/Electrical/mna/MNA3.html
Args:
netlist (pandas.DataFrame):
A netlist of circuit el... | 5,351,349 |
def search_for_subject(subject: Synset, num_urls: int, subscription_key: str, custom_config: str,
host: str, path: str) -> Tuple[List[Tuple[str, str, str]], str, str]:
"""Perform the search phase for one particular subject."""
query = get_search_query(subject)
logger.info(f"Subject ... | 5,351,350 |
def get_kwargs(class_name: str) -> Kwargs:
"""Returns the specific kwargs for each field `class_name`"""
default_kwargs = get_default_kwargs()
class_kwargs = get_setting("COMMON_KWARGS", {})
use_kwargs = class_kwargs.get(class_name, default_kwargs)
return use_kwargs | 5,351,351 |
def minute_info(x):
"""
separates the minutes from time stamp. Returns minute of time.
"""
n2 = x.minute
return n2/60 | 5,351,352 |
def remove_html_tags(text):
"""Removes HTML Tags from texts and replaces special spaces with regular spaces"""
text = BeautifulSoup(text, 'html.parser').get_text()
text = text.replace(u'\xa0', ' ')
return text | 5,351,353 |
def patchy(target, source=None):
""" If source is not supplied, auto updates cannot be applied """
if isinstance(target, str):
target = resolve(target)
if isinstance(source, str):
source = resolve(source)
if isinstance(target, ModuleType):
return PatchModule(target, source)
e... | 5,351,354 |
def tests_fastapi(session: nox.sessions.Session, fastapi):
""" Test against a specific FastAPI version """
tests(session, overrides={'fastapi': fastapi}) | 5,351,355 |
def _handle_special_addresses(lion):
"""
When there are special address codes/names, ensure that there is a duplicate
row with the special name and code as the primary.
Note: Only for special address type 'P' - addressable place names
"""
special = lion[
(lion['special_address_type'].i... | 5,351,356 |
def get_local():
"""Construct a local population."""
pop = CosmicPopulation.simple(SIZE, generate=True)
survey = Survey('perfect')
surv_pop = SurveyPopulation(pop, survey)
return surv_pop.frbs.s_peak | 5,351,357 |
def get_session():
"""<comment-ja>
thread-localでセッションを取得します。
</comment-ja>
<comment-en>
TODO: English Comment
</comment-en>
"""
return scoped_session(
sessionmaker(bind=get_engine(), autoflush=False)) | 5,351,358 |
def xform(q=1,a=1,bb=1,bbi=1,cp=1,cpc=1,dph=1,eu=1,m="[float, float, float, float, float, float, float, float, float, float, float, float, float, float, float, float]",os=1,piv="[linear, linear, linear]",p=1,puv=1,rfl=1,rab=1,rao=1,rax=1,ray=1,raz=1,rft="float",r=1,ra="[angle, angle, angle]",roo="string",rp="[linear, l... | 5,351,359 |
def mode_ratios(ratios):
"""Box type mode ratios."""
ratios.plot(kind='kde')
plt.title('Boxes Type Mode Ratios')
plt.show() | 5,351,360 |
def calculate_y_pos(x, centre):
"""Calculates the y-coordinate on a parabolic curve, given x."""
centre = 80
y = 1 / centre * (x - centre) ** 2 + sun_radius
return int(y) | 5,351,361 |
def extract_flowlines(gdb_path, target_crs, extra_flowline_cols=[]):
"""
Extracts flowlines data from NHDPlusHR data product.
Extract flowlines from NHDPlusHR data product, joins to VAA table,
and filters out coastlines.
Extracts joins between flowlines, and filters out coastlines.
Parameters
... | 5,351,362 |
def exists(awesome_title):
"""Check the awesome repository is cached
Args:
awesome_title: Awesome repository title
Returns:
True if exists, False otherwise
"""
awesome_cache_directory = os.path.join(CACHE_DIRECTORY, awesome_title)
awesome_cached_readme = os.path.join(awesome_ca... | 5,351,363 |
def split_str_to_list(input_str, split_char=","):
"""Split a string into a list of elements.
Args:
input_str (str): The string to split
split_char (str, optional): The character to split the string by. Defaults
to ",".
Returns:
(list): The string split into a list
"... | 5,351,364 |
def generate_password(length):
"""
This will create a random password for the user
Args:
length - the user's preferred length for the password
Return:
It will return a random password of user's preferred length
"""
return Password.generate_pass(length) | 5,351,365 |
def test_missing_input_type():
"""Test that text input type is returned if field data don't have 'type' key.
1. Create a field parser for a dictionary without input type.
2. Parse input type.
3. Check that text input type is returned.
"""
actual_input_type = FieldParser(data={}).parse_input_typ... | 5,351,366 |
def export_csv(obj, file_name, point_type='evalpts', **kwargs):
""" Exports control points or evaluated points as a CSV file.
:param obj: a curve or a surface object
:type obj: abstract.Curve, abstract.Surface
:param file_name: output file name
:type file_name: str
:param point_type: ``ctrlpts`... | 5,351,367 |
async def test_error(hass):
"""Test entity is created."""
system = get_system()
system.errors = [
Error("device_name", "title", "F152", "description", datetime.now())
]
assert await setup_vaillant(hass, system=system)
assert "binary_sensor.vaillant_error_f152" in hass.states.async_entity... | 5,351,368 |
def formatRFC822Headers(headers):
""" Convert the key-value pairs in 'headers' to valid RFC822-style
headers, including adding leading whitespace to elements which
contain newlines in order to preserve continuation-line semantics.
"""
munged = []
linesplit = re.compile(r'[\n\r]+?')
... | 5,351,369 |
def first_fixation_duration(trial: Trial, region_number: int) -> RegionMeasure:
"""
The duration of the first fixation in a region during first pass reading
(i.e., before the reader fixates areas beyond the region).
If this region is skipped during first pass, this measure is None.
::
fp_f... | 5,351,370 |
def find_in_path(input_data, path):
"""Finds values at the path in input_data.
:param input_data: dict or list
:param path: the path of the values example: b.*.name
:result: list of found data
"""
result = find(input_data, path.split('.'))
return [value for _, value in result if value] | 5,351,371 |
def collect_static_files(site_name=None):
"""
Collects django static files to where nginx can find them
"""
if not site_name:
site_name = env.host
source_folder = get_source_folder(env.user, site_name)
with prefix('source {}/.env/bin/activate'.format(source_folder)):
run('cd {} ... | 5,351,372 |
def post_test_check(duthost, up_bgp_neighbors):
"""Post-checks the status of critical processes and state of BGP sessions.
Args:
duthost: Host DUT.
skip_containers: A list contains the container names which should be skipped.
Return:
This function will return True if all critical p... | 5,351,373 |
def batch_iter(data, batch_size):
"""batch_iter"""
src, dst, eid = data
perm = np.arange(len(eid))
np.random.shuffle(perm)
start = 0
while start < len(src):
index = perm[start:start + batch_size]
start += batch_size
yield src[index], dst[index], eid[index] | 5,351,374 |
def fill_with_mode(filename, column):
"""
Fill the missing values(NaN) in a column with the mode of that column
Args:
filename: Name of the CSV file.
column: Name of the column to fill
Returns:
df: Pandas DataFrame object.
(Representing entire data and where 'column' does... | 5,351,375 |
def successorrevs(unfi, rev):
"""yield revision numbers for successors of rev"""
assert unfi.filtername is None
get_rev = unfi.changelog.index.get_rev
for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
r = get_rev(s)
if r is not None:
yield r | 5,351,376 |
def add_workshift_context(request):
""" Add workshift variables to all dictionaries passed to templates. """
if not request.user.is_authenticated():
return {}
if Semester.objects.count() < 1:
return {"WORKSHIFT_ENABLED": False}
# Current semester is for navbar notifications
try:
... | 5,351,377 |
def get_model_spec(
model_zoo,
model_def,
model_params,
dataset_fn,
loss,
optimizer,
eval_metrics_fn,
prediction_outputs_processor,
):
"""Get the model spec items in a tuple.
The model spec tuple contains the following items in order:
* The model object instantiated with pa... | 5,351,378 |
def clean_meta(unclean_list):
"""
cleans raw_vcf_header_list for downstream processing
:return:
"""
clean_list = []
for i in unclean_list:
if "=<" in i:
i = i.rstrip(">")
i = i.replace("##", "")
ii = i.split("=<", 1)
else:
i = i.rep... | 5,351,379 |
def test_dependencies_detection_recursive_different_steps(dummy_nb_config):
"""Test dependencies are detected even with a chain of functions calls."""
pipeline = Pipeline(dummy_nb_config)
_source = ['''
x = 5
def foo():
print(x)
''']
pipeline.add_step(Step(name="step1", source=_source))
_source... | 5,351,380 |
def generate_result_table(models, data_info): # per idx (gene/transcript)
"""
Generate a table containing learned model parameters and statistic tests.
Parameters
----------
models
Learned models for individual genomic positions of a gene.
group_labels
Labels of samples.
da... | 5,351,381 |
def test_parsers_gelfparser_parse_partially_invalid_file(caplog):
"""Tests the GELFParser with a file containing invalid JSON strings."""
with StringIO() as file:
file.writelines(
[
# This is invalid gelf but we assume it's valid in our case
'{"short_message"... | 5,351,382 |
def get_href_kind(href, domain):
"""Return kind of href (internal or external)"""
if is_internal_href(href, domain):
kind = 'internal'
else:
kind = 'external'
return kind | 5,351,383 |
def main(infile, make_plot, split=False):
"""Read the input file and average the profiles within it."""
print('Reading file "{}"'.format(infile))
raw_data, raw_matrix, average_data, var_data = average_profiles(infile)
print('Data sets: {}'.format(len(raw_matrix)))
print('Variables in sets:')
for... | 5,351,384 |
def get_input_file(req_id, file_number):
"""
Returns an uploaded input file, 404 if not yet uploaded.
:param req_id: The id of the conversion.
:param file_number: File number.
:return: File as text.
"""
cr = db.retrieve(req_id)
if cr is None:
return jsonify({
'status'... | 5,351,385 |
def check_mark(value):
"""Helper method to create an html formatted entry for the flags in tables."""
return format_html('✓') if value == 1 else '' | 5,351,386 |
def test_sleep(n):
"""Used only for testing -- example method with argument. """
logger = LMLogger.get_logger()
logger.info("Starting test_sleep({}) in pid {}".format(n, os.getpid()))
try:
job = get_current_job()
job.meta['sample'] = 'test_sleep metadata'
job.meta['pid'] = int(o... | 5,351,387 |
def military_to_english_time(time, fmt="{0}:{1:02d}{2}"):
""" assumes 08:33:55 and 22:33:42 type times
will return 8:33am and 10:33pm
(not we floor the minutes)
"""
ret_val = time
try:
h, m = split_time(time)
ampm = "am"
if h >= 12:
ampm = "pm"
... | 5,351,388 |
def test_inheritance_branch_override(root, branch, node_class):
"""Branches defined on the subclass take precedence over the baseclass.
This precedence matches the MRO used for diamond inheritance.
"""
assert isinstance(root[branch]['local'], node_class) | 5,351,389 |
def parse_ssh_config(text):
"""
Parse an ssh-config output into a Python dict.
Because Windows doesn't have grep, lol.
"""
try:
lines = text.split('\n')
lists = [l.split(' ') for l in lines]
lists = [filter(None, l) for l in lists]
tuples = [(l[0], ''.join(l[1:]).st... | 5,351,390 |
def visit_downloadlink_node_rst(self, node):
"""
Converts node *downloadlink* into :epkg:`rst`.
"""
logger = logging.getLogger("downloadlink")
logger.info("[downloadlink] RST '{0}'".format(str(node)))
if node['format']:
self.add_text(":downloadlink:`{0} <{1}::{2}>`".format(
... | 5,351,391 |
def snake_case(string: str) -> str:
"""Convert upper camelcase to snake case."""
return re.sub(r"(?<!^)(?=[A-Z])", "_", string).lower() | 5,351,392 |
def positive_int(s: str) -> int:
"""Positive integer validator for `argparse.ArgumentParser`."""
i = int(s)
if i < 0:
raise argparse.ArgumentTypeError("A positive number is required")
return i | 5,351,393 |
def create_tempdir(suffix='', prefix='tmp', directory=None, delete=True):
"""Create a tempdir and return the path.
This function registers the new temporary directory
for deletion with the atexit module.
"""
tempd = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=directory)
if delete:
... | 5,351,394 |
def code(
type_: ObjectTypes = typer.Option(
..., '--type', help='Generate filter, model, and object files for the provided type.'
),
):
"""Generate Args."""
type_ = utils.snake_string(type_.value)
gen_filter(type_)
gen_model(type_)
gen_object(type_) | 5,351,395 |
async def get_transactor_key(request):
"""Get transactor key out of request."""
id_dict = deserialize_api_key(
request.app.config.SECRET_KEY, extract_request_token(request)
)
next_id = id_dict.get("id")
auth_data = await get_auth_by_next_id(next_id)
encrypted_private_key = auth_data.get... | 5,351,396 |
def _get_plot_aeff_exact_to_ground_energy(parsed_ncsd_out_files):
"""Returns a list of plots in the form
(xdata, ydata, const_list, const_dict),
where A=Aeff is xdata, and ground energy is ydata
"""
a_aeff_to_ground_state_energy = get_a_aeff_to_ground_state_energy_map(
parsed_ncsd_ou... | 5,351,397 |
def plot(config):
"""Plot train and test accuracy."""
optimizer_cls = config["optimizer_cls"]
problem_cls = config["problem_cls"]
num_epochs = config["num_epochs"]
summary = load_summary(problem_cls, optimizer_cls)
epochs = list(range(num_epochs + 1))
train_acc_percent = [100 * val for va... | 5,351,398 |
def get_twitter_auth():
"""Setup Twitter connection
return: API object"""
parameters = set_parameters.take_auth_data()
twitter_access_token = parameters['twitter_access_token']
twitter_secret_token = parameters['twitter_secret_token']
twitter_api_key = parameters['twitter_api_key']
twitter... | 5,351,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.