content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def gather_audio_video_eavesdropping(x) :
"""
@param x : a Analysis instance
@rtype : a list strings for the concerned category, for exemple [ 'This application makes phone calls', "This application sends an SMS message 'Premium SMS' to the '12345' phone number" ]
"""
result = []
... | 5,352,300 |
def vdw_radius_single(element):
"""
Get the Van-der-Waals radius of an atom from the given element. [1]_
Parameters
----------
element : str
The chemical element of the atoms.
Returns
-------
The Van-der-Waals radius of the atom.
If the radius is unknown for the element... | 5,352,301 |
def evaluate_ins_to_proto(ins: typing.EvaluateIns) -> ServerMessage.EvaluateIns:
"""Serialize flower.EvaluateIns to ProtoBuf message."""
parameters_proto = parameters_to_proto(ins.parameters)
config_msg = metrics_to_proto(ins.config)
return ServerMessage.EvaluateIns(parameters=parameters_proto, config=c... | 5,352,302 |
def avatar_synth_df(dir, batch_size, num_threads):
"""
Get data for training and evaluating the AvatarSynthModel.
:param dir: The data directory.
:param batch_size: The minibatch size.
:param num_threads: The number of threads to read and process data.
:return: A dataflow for parameter to bitm... | 5,352,303 |
def compile(file, cfile=None, dfile=None, doraise=False):
"""Does nothing on IronPython.
IronPython does not currently support compiling to .pyc
or any other format.
"""
return | 5,352,304 |
def _append_char_coded_text(elem, s, char_code_pat):
"""Append s to C{elem} with text in coded with character style codes converted to span elements.
@param elem: element corresponding to an MDF field. This is modified by the function.
It may already have 'span' subelements corresponding to cha... | 5,352,305 |
def assign_material(obj, materialname):
"""This function assigns a material to an objects mesh.
:param obj: The object to assign the material to.
:type obj: bpy.types.Object
:param materialname: The materials name.
:type materialname: str
"""
if materialname not in bpy.data.materials:
... | 5,352,306 |
def _check_stack_axis(axis, dims, default='unnamed'):
""" check or get new axis name when stacking array or datasets
(just to have that in one place)
"""
if axis is None:
axis = default
if axis in dims:
i = 1
while default+"_{}".format(i) in dims:
... | 5,352,307 |
def eval_push_time_ratios(problem_size: int = 3000) -> Optional[TimeRatioType]:
"""
Function that calculates the execution time ratios, for the different time complexities.
Here, a process pool is created in order to speed up the process of generating
the lists of time ratios, for each time complexity.... | 5,352,308 |
def validate_pbi_sprint(sprint, snapshot_date):
""" Validate sprint in a pbi, try to create it if possible """
if sprint is None:
raise ValidationError('Sprint cannot be null')
from ..models import Sprint
sprt = Sprint.objects.get(id=sprint.id)
if sprt is not None:
# si la date d... | 5,352,309 |
def add_sim(Θ, f, Θs, G, D, Θ_s, G_s, D_s, sim_time_s, **state):
"""Add a simulation to the known simulations by performing the simulation.
"""
t0 = time.time()
g_s, d_s = gwed(Θ, f=f, **state)
Θs.append(Θ)
G.append(g_s)
D.append(d_s)
Θ_s.append(Θ)
G_s.append(g_s)
D_s.append(d_s)... | 5,352,310 |
def organize_photos(path: str):
"""
organize_photos
:param path:
:return:
"""
for file in os.listdir(path):
if os.path.isfile(os.path.join(path, file)):
move_photo(os.path.join(path, file))
elif os.path.isdir(os.path.join(path, file)):
organize_photos(os.path.join(path, file))
else:
print("**** " +... | 5,352,311 |
def _load_data():
"""
Internal function to get the data to plot.
"""
# Load homicides
homicides = gv_data.PoliceHomicides.get()
# Calculate concentrated disadvantage
sub_data = []
for cls in [
"PublicAssistance",
"FemaleHouseholders",
"PercentInPoverty",
... | 5,352,312 |
def test_auto_add_dataloader_idx(tmpdir, add_dataloader_idx):
"""test that auto_add_dataloader_idx argument works."""
class TestModel(BoringModel):
def val_dataloader(self):
dl = super().val_dataloader()
return [dl, dl]
def validation_step(self, *args, **kwargs):
... | 5,352,313 |
def move(x_pos, y_pos):
"""Return the G-CODE describing motion to x_pos, y_pos."""
out = ""
out += "G1X"+str(x_pos)+"Y"+str(y_pos)+"F"+str(FEEDRATE)+";\n"
out += "M400;\n"
return out | 5,352,314 |
def load_scrub_optional_upload(storage_folder: str, filename: str) -> str:
"""Loads a option file that was previously saved in the storage folder.
:param storage_folder: A string representing the path of the storage
folder.
:param filename: A string representing the name of the file that is being
... | 5,352,315 |
def Editor_NewExistingLevels_Works():
"""
Summary: Perform the below operations on Editor
1) Launch & Close editor
2) Create new level
3) Saving and loading levels
4) Level edits persist after saving
5) Export Level
6) Can switch to play mode (ctrl+g) and e... | 5,352,316 |
def test_kb_wrap_exceptions(
version_id: str, kbpk_len: int, key_len: int, error: str
) -> None:
"""Test wrap exceptions"""
with pytest.raises(tr31.KeyBlockError) as e:
kb = tr31.KeyBlock(b"E" * kbpk_len)
kb.header._version_id = version_id
_ = kb.wrap(b"F" * key_len)
assert e.val... | 5,352,317 |
def test_backlinks(fixture, chain_id):
"""
NOTE: these links all use `parameters` and not `requestBody` or
`x-apigraph-requestBodyParameters`
"""
doc_uri = fixture_uri(fixture)
apigraph = APIGraph(doc_uri)
assert apigraph.docs.keys() == {doc_uri}
expected_nodes = [
NodeKey(doc_... | 5,352,318 |
def visualize_result(
experiment_name,
X_test, Y_test, Y_hat, parameters,
losses=None, save_dir="results"
):
"""
结果可视化
"""
# 没有保存目录时创建
now = datetime.now().strftime("%Y%m%d%H%M%S")
save_dir += "_" + experiment_name + os.sep + now
if not os.path.exists(save_dir):
... | 5,352,319 |
def compute_error_decrease(fun, VX, EToV) -> Dict[int, float]:
"""
Computes estimate of possible error decrease for each element in mesh.
:param fun: Function float -> float
:param VX: dict from point id to its position on x axis.
:param EToV: dict from element id to a tuple of its boundary points.... | 5,352,320 |
def file_name_to_title_name(file_name):
"""
#Arguments
check_mk_url (str): URL to Check_Mk web application, check file names and print for each file in the directory in the correct format
#Examples
file_name_to_title_name('activate_mode')
output = 'Activate Mode: activate_mode.md'
"""
... | 5,352,321 |
def save_binary_mask_triple(
rgb_img: np.ndarray, label_img: np.ndarray, save_fpath: str, save_to_disk: bool = False
) -> np.ndarray:
"""Currently mask img background is light-blue. Instead, could set it to white. np.array([255,255,255])
Args:
rgb_img:
label_img:
save_fpath
... | 5,352,322 |
def disc_train_step(
input_images, avg_input, real_images, input_condns, real_condns, epoch,
):
"""
Discriminator training step. Args:
input_images: tf tensor of training images for template branch.
avg_input: tf tensor of linear average repeated 'batch_size' times.
input_images: tf ... | 5,352,323 |
def get_random_fortune(fortune_file):
"""
Get a random fortune from the specified file. Barfs if the corresponding
`.dat` file isn't present.
:Parameters:
fortune_file : str
path to file containing fortune cookies
:rtype: str
:return: the random fortune
"""
fortune... | 5,352,324 |
def output_data(
files : List[pathlib.Path],
parser : Callable[[List[str]], List[Dict[str, DataValue]]]
) -> Optional[OutputData]:
"""Parses output datapoints from a list of output files.
Args:
files: A list of data output files to parse
parser: A function that turns a list of data lines into a... | 5,352,325 |
def test_side_view(capsys):
"""Supplying two outfile should print out the two outputs side-by-side."""
outfiles = [os.path.join(fixtures_dir, 'one.out'),
os.path.join(fixtures_dir, 'two.in')]
expected_file = os.path.join(fixtures_dir, 'side_view_expected.geomconv')
geomconv.main(outfiles... | 5,352,326 |
async def delete_item(item_id: int, db: Session = Depends(get_db)):
"""
Delete the Item with the given ID provided by User stored in database
"""
db_item = ItemRepo.fetch_by_id(db, item_id)
if db_item is None:
raise HTTPException(status_code=404, detail="Item not found with the given ID")
... | 5,352,327 |
def get_clusters(data,
model = None,
num_clusters = 4,
ignore_features = None,
normalize = True,
transformation = False,
pca = False,
pca_components = 0.99,
ignore_low_variance=Fa... | 5,352,328 |
def process_messages(deck, messages, encrypt_or_decrypt):
"""(list of int, list of str, str) -> list of str
Return the messages encrypted or decrypted using the specified deck.
The parameter encrypt_or_decrypt will be ENCRYPT to encrpyt the message,
and DECRYPT to decrypt the message
>>>deck = [1,... | 5,352,329 |
def print_output(r_lst):
"""
打印命令行输出的内容
:param r_lst:
:return:
"""
for line in r_lst:
print line | 5,352,330 |
def write_bom_seeed(output_file_slug, components):
"""Write the BOM according to the Seeed Studio Fusion PCBA template available at:
https://statics3.seeedstudio.com/assets/file/fusion/bom_template_2016-08-18.csv
```
Part/Designator,Manufacture Part Number/Seeed SKU,Quantity
C1,RHA,1
"D1,D2",CC... | 5,352,331 |
def alarm(context):
"""
Handle sending the alarm message
"""
job = context.job
context.bot.send_message(job.context, text=LEMBRETE) | 5,352,332 |
def clip(wavelength, spectra, threshold, substitute=None):
""" Removes or substitutes values above the given threshold.
Args:
wavelength <numpy.ndarray>: Vector of wavelengths.
spectra <numpy.ndarray>: NIRS data matrix.
threshold <float>: threshold value for rejection
substitute... | 5,352,333 |
def PrintBlockAnalysis(e: edid.Edid, desc, mode: Mode, raw_mode, start, prefix=None):
"""Print and interpret a single 18-byte descriptor's information.
Called up to 4 times in a base EDID.
Uses descriptor module to determine descriptor type.
Args:
e: The full EDID being parsed.
desc: The d... | 5,352,334 |
def duplicate12(modeladmin, request, queryset):
""" Duplicate 12 action.
Duplicates each item in the queryset to the next week.
Continues duplicating until 12 duplicates are created.
Skips to the next week if a similar object already exists.
"""
for i in queryset:
count = 0
... | 5,352,335 |
def findMachines(fqpn):
"""
Recursively yield L{MethodicalMachine}s and their FQPNs in and
under the a Python object specified by an FQPN.
The discovery heuristic considers L{MethodicalMachine} instances
that are module-level attributes or class-level attributes
accessible from module scope. M... | 5,352,336 |
def asynchronous_prod_milp_constraint_rule(backend_model, loc_tech, timestep):
"""
BigM limit set on `carrier_prod`, forcing it to either be zero or non-zero,
depending on whether `prod` is zero or one, respectively.
.. container:: scrolling-wrapper
.. math::
\\boldsymbol{carrier_p... | 5,352,337 |
def user_cilogon_certificates_directory_path(instance):
"""
Return full path to filename based on User UUID value
:param instance:
:param filename:
:return:
"""
# file will be uploaded to MEDIA_ROOT/cilogon_certificates/user_<uuid>/<filename>
return os.path.join(MEDIA_ROOT, 'cilogon_cert... | 5,352,338 |
def drawCurveArc(self): #---- only for ELLIPSE -------------------------------------------------------------
"""Given a dxf ELLIPSE object return a blender_curve.
"""
center = self.loc
radius = self.radius
start = self.start_angle
end = self.end_angle
if start > end:
start = start - 360.0
startmatrix = Math... | 5,352,339 |
def read_pickle(filename, protocol=-1, **kwargs):
"""
read grid saved in PICKLE format into a GridData object
:param filename: full path to the filename
:type filename: str
:rtype: ~uquake.core.data.grid.Grid
"""
import pickle
return pickle.load(open(filename, 'rb')) | 5,352,340 |
def test_parameter_check(params, error, err_msg):
"""Test parameter validation."""
boss = BOSS(**params)
with pytest.raises(error, match=re.escape(err_msg)):
boss.fit(X, y) | 5,352,341 |
def linked_ims(im_list, pix_per_um, shape=(2,2),
x_range=None, y_range=None, scale_fig=1, scale_height=1.4,
brightness=1, palette='Turbo256', cmap_range='from zero',
show_fig=True, title_list=[], t_fs=24, ax_fs=16, tk_fs=12, cb_fs=14):
"""
Shows multiple f... | 5,352,342 |
def prepend_zeros_to_lists(ls):
"""
Takes a list of lists and appends 0s to the beggining of each sub_list
until they are all the same length. Used for sign-extending binary numbers.
"""
longest = max([len(l) for l in ls])
for i in range(len(ls)):
while len(ls[i]) < longest:
... | 5,352,343 |
def utility_format_obj_input():
"""bad input object"""
pm1 = magpy.magnet.Cuboid((1, 2, 3), (1, 2, 3))
pm2 = magpy.magnet.Cuboid((1, 2, 3), (1, 2, 3))
format_obj_input([pm1, pm2, 333]) | 5,352,344 |
def read_qmcpack_hamiltonian(filename):
"""Read Hamiltonian from QMCPACK format.
Parameters
----------
filename : string
QMPACK Hamiltonian file.
Returns
-------
hamil : dict
Data read from file.
"""
try:
hc, chol, enuc, nmo, nelec, nmok, qkk2 = (
... | 5,352,345 |
def run_rnn(file):
# define model params
"""
Run the process to train/test a recurrent neural network using LSTM using a given dataset file.
:param string file: Location of CSV-formatted dataset file
:return: Model with expected (test) targets and associated scores
:rtype: object, dataframe, ob... | 5,352,346 |
def lab_results(request, format=None):
"""Get lab results data."""
if request.method == 'GET':
limit = request.query_params.get("limit", 1000)
if limit:
limit = int(limit)
order_by = request.query_params.get("order_by", "")
# TODO: Get any filters from dict(request.q... | 5,352,347 |
def test_J4(i):
""" Test a property of J from result 2 of the paper """
d = SD([1 / i] * i)
assert J(d) == pytest.approx((i - 1) * (np.log2(i) - np.log2(i - 1))) | 5,352,348 |
def load(file, file_format=None, **kwargs):
"""Load data from json, yaml, or pickle files.
This method provides a unified api for loading data from serialized files.
Args:
file (str or file-like object): Filename or a file-like object.
file_format (str, optional): If not specified, the fil... | 5,352,349 |
def get_criteo(root):
"""Download the Criteo data if it doesn't exist."""
url = 'https://s3-eu-west-1.amazonaws.com/kaggle-display-advertising-challenge-dataset/dac.tar.gz'
raw_folder = os.path.join(root, 'criteo', 'raw')
processed_folder = os.path.join(root, 'criteo', 'processed')
makedir_exist_o... | 5,352,350 |
def StepToGeom_MakeAxis2Placement_Convert(*args):
"""
:param SA:
:type SA: Handle_StepGeom_Axis2Placement3d &
:param CA:
:type CA: Handle_Geom_Axis2Placement &
:rtype: bool
"""
return _StepToGeom.StepToGeom_MakeAxis2Placement_Convert(*args) | 5,352,351 |
def test_best_site(txt, expected_coords, expected_count):
"""
Test against examples in the brief
"""
asteroids = solution1.read_array(io.StringIO(txt))
predicted_coords, predicted_count = solution1.calculate_best_site(asteroids)
assert predicted_count == expected_count
assert (predicted_c... | 5,352,352 |
def results(year: hug.types.text, firstName: hug.types.text, lastName: hug.types.text):
"""Returns the results for a given candidate for a given year"""
engine = create_engine(
'postgresql://%s:%s@%s/%s' %(user,pwd,ip,user),
client_encoding='utf8',echo=False)
conn = engine.connect()
... | 5,352,353 |
async def get_user_from_event(event):
""" Get the user from argument or replied message. """
args = event.pattern_match.group(1).split(':', 1)
extra = None
if event.reply_to_msg_id and not len(args) == 2:
previous_message = await event.get_reply_message()
user_obj = await event.client.ge... | 5,352,354 |
def stat_scores_multiple_classes(
pred: torch.Tensor,
target: torch.Tensor,
num_classes: Optional[int] = None,
argmax_dim: int = 1,
reduction: str = 'none',
) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:
"""
.. deprecated::
Use :func:`torchmetrics.f... | 5,352,355 |
def learn_skill(entity: EntityID, skill_name: str):
"""
Add the skill name to the entity's knowledge component.
"""
if not entity_has_component(entity, Knowledge):
add_component(entity, Knowledge([]))
knowledge = get_entitys_component(entity, Knowledge)
if knowledge:
skill_class ... | 5,352,356 |
def build_resnet(
repetitions=(2, 2, 2, 2),
include_top=True,
input_tensor=None,
input_shape=None,
classes=1000,
block_type='usual',
class_detector_top=False):
"""
TODO
"""
# Determine proper input shape
input_shape = _obtain_input_shape(input_shape,
... | 5,352,357 |
def find_python_str():
"""find python executable in PATH"""
paths = os.environ["PATH"].split(os.pathsep)
python_name = "python.exe" if os.name == "nt" else "python?"
for path in paths:
yield from (
os.path.normpath(path) for path in glob.glob("%s/%s" % (path, python_name))
) | 5,352,358 |
def dispatch_tensorflowjs_to_keras_h5_conversion(config_json_path, h5_path):
"""Converts a Keras Model from tensorflowjs format to H5.
Args:
config_json_path: Path to the JSON file that includes the model's
topology and weights manifest, in tensorflowjs format.
h5_path: Path for the to-be-created Ker... | 5,352,359 |
def correct_doi(file_name: str):
"""Attempt extract a DOI from a filename which contains a DOI."""
if file_name.startswith("acs.jced") or file_name.startswith("je"):
doi = f"10.1021/{file_name}"
elif file_name.startswith("j.jct"):
doi = f"10.1016/{file_name}"
elif file_name.startswith("... | 5,352,360 |
def test_one_supplier_one_lot(mock_data_client):
"""Test a single client in a single lot."""
mock_data_client.get_framework.return_value = {
'frameworks': {'lots': [{'slug': 'saas'}]}
}
mock_data_client.find_framework_suppliers.return_value = {
'supplierFrameworks': [
{'suppl... | 5,352,361 |
def getTextFromFile(filename):
"""
"""
filepath = os.path.join(CHAPTERDIR, filename)
txt = open(filepath).read()
return txt | 5,352,362 |
def decode_base64(data):
"""Decode base64, padding being optional.
:param data: Base64 data as an ASCII byte string
:returns: The decoded byte string.
"""
if sys.version_info.major > 2:
data = bytes(data, 'utf-8')
missing_padding = len(data) % 4
if missing_padding != 0:
dat... | 5,352,363 |
def version():
"""Display full version information."""
# Print out the current version of Tower CLI.
click.echo('Tower CLI %s' % __version__)
# Print out the current API version of the current code base.
click.echo('API %s' % CUR_API_VERSION)
# Attempt to connect to the Ansible Tower server.
... | 5,352,364 |
def create_multi_dataset_generic_benchmark(
train_datasets: Sequence[SupportedDataset],
test_datasets: Sequence[SupportedDataset],
*,
other_streams_datasets: Dict[str, Sequence[SupportedDataset]] = None,
complete_test_set_only: bool = False,
train_transform=None, train_ta... | 5,352,365 |
def check_triangle_inequality(method, h1, h2, h3):
""" Classic test for a metric: dist(a,b) < dist(a,b) + dist(a,c)"""
d12 = method(h1, h2)
d23 = method(h2, h3)
d13 = method(h1, h3)
d13_plus_d23 = np.round(d13 + d23, decimals=10)
d12_ = np.round(d12, decimals=10)
assert d12_ <= d13_plus_d2... | 5,352,366 |
def convert_to_xml_string(string):
"""
For input strings with escaped tags and special characters
issue a set of conversion functions to prepare it prior
to adding it to an article object
"""
string = entity_to_unicode(string)
string = decode_brackets(string)
string = eautils.replace_tag... | 5,352,367 |
def read_shear_catalog_type(stage):
"""
Determine the type of shear catalog a stage is using as input.
Returns a string, e.g. metacal, lensfit.
Also sets shear_catalog_type in the stage's configuration
so that it is available later and is saved in output.
"""
with stage.open_input('shear_cat... | 5,352,368 |
def inverse_fft_iterative(
poly: Sequence, has_imaginary: bool = False, imag_threshold: float = 1e-14
) -> List:
"""Perform inverse iterative discrete fast Fourier transform (DFT) of a polynomial with a degree that is `2^t-1`, t being a positive integer (ie `len(poly)` should be an exact power of 2).
I... | 5,352,369 |
def admin_uri():
"""
Helper fucntion to get the admin url quickly
:returns: admin url, redirect or print friendly
:rtype: string
"""
return '/' + app.global_content['options']['admin-url'].value | 5,352,370 |
def rlencode(x, check = True, dropna = False):
"""
Run length encoding.
Based on http://stackoverflow.com/a/32681075, which is based on the rle
function from R.
See https://gist.github.com/nvictus/66627b580c13068589957d6ab0919e66
Parameters
----------
x : 1D array_like
Input array to encode
dropna: bool, op... | 5,352,371 |
def config_load(config_path):
"""Load a json config from a file."""
return files.json_load(config_path) | 5,352,372 |
def tz2utc(date, tz):
"""Offset between local time and UTC.
Parameters
----------
date : various
The local time, in any format acceptable to `date2time`.
tz : string
date will be processed via `pytz`.
Returns
-------
offset : datetime.timedelta
The UTC offset.
""... | 5,352,373 |
def replace_from_execution_report(replace_id, execution_report):
"""Create OrderCancelReplaceRequest from given execution report
For more info about OrderCancelReplaceRequest look at https://support.xena.exchange/support/solutions/articles/44000222082-ws-trading-api#order_cancel_replace_request
"""
cmd... | 5,352,374 |
def test_simple_method_ptr(tmp_path, template_path):
"""Write out a very simple top level class with a method.
Args:
tmp_path ([type]): [description]
"""
classes = [
class_info(
"xAOD.Jets",
"xAOD::Jets",
[
method_info(
... | 5,352,375 |
def convert_unit(value, factor, offset):
"""Return converted value depending on the provided factor and offset."""
return num2decimal(value) * num2decimal(factor) + num2decimal(offset) | 5,352,376 |
def label_panels(axes, labels=None, **kwargs):
"""Label the 1-D array of axes with uppercase letters from the Latin alphabet."""
if labels:
seq = labels
else:
seq = string.ascii_uppercase
for ax, letter in zip(axes, seq):
_label_panel(ax, letter, **kwargs) | 5,352,377 |
def DefinePanelZoneNodes(MasterNodeID: int, MidPanelZoneWidth, MidPanelZoneHeight):
"""
Function that defines the remaining 10 nodes of a panel zone given the dimensions and the master node (top center one).
ID convention for the panel zone: \n
PZNodeID: 12 nodes: top right 1xy (master), 1xy1 top rig... | 5,352,378 |
def single_chromosome_graph_scatter(
df,
chromosome,
chosen_template,
marker_width,
colors,
font_size,
xaxis_gridlines,
yaxis_gridlines,
font_family,
samples,
):
""" Filter out current chromosome and set x- and y-max"""
curr_chrom_data = df[df["Chromosome"] == chromosome]... | 5,352,379 |
def _findall_rmaps_using_reference(filename, observatory="hst"):
"""Return the basename of all reference mappings which mention `filename`."""
return uses_files([filename], observatory, "rmap") | 5,352,380 |
def batchGD_bp(X, y, d=3, nH=10, c=3, lr=0.8, T=100, eps=0.0):
"""
BP算法, 每轮迭代使用全部样本
:param X: 训练样本的特征矩阵
:param y: 训练样本的标签向量
:param d: 训练样本的特征维数
:param nH: 隐层的节点数
:param c: 类别数
:param lr: 学习率
:param T: 停机条件1(最大迭代轮数)
:param eps: 停机条件2(相邻两次迭代loss之差的最大允许值), 设为0.0表示不使用这个条件
:return... | 5,352,381 |
def delete_host_by_id(host_id):
"""
Host deleting
This is intended for use in adcm_delete_host ansible plugin only
"""
host = Host.obj.get(id=host_id)
delete_host(host) | 5,352,382 |
def run_ase_opt(
atoms: Atoms,
fmax: float = 0.01,
max_steps: int = 100,
optimizer: str = "FIRE",
opt_kwargs: Dict[str, Any] = None,
scratch_dir: str = SETTINGS.SCRATCH_DIR,
gzip: bool = SETTINGS.GZIP_FILES,
copy_files: List[str] = None,
) -> trajectory:
"""
Run an ASE-based opti... | 5,352,383 |
def __slicer(my_str, sub):
"""
Remove everything in a string before a specified substring is found.
Throw exception if substring is not found in string
https://stackoverflow.com/questions/33141595/how-can-i-remove-everything-in-a-string-until-a-characters-are-seen-in-python
Args:
m... | 5,352,384 |
def test_delete_no_oid():
"""Test DELETE api without index, it should return response as "None"."""
config = CORTXS3Config()
response = CORTXS3ObjectApi(config).delete(None, "test_layot_id2", "test_pvid_str")
if (response is not None):
assert response[0] is False
assert response[1] is No... | 5,352,385 |
def rank_genes_groups_heatmap(
adata: AnnData,
groups: Union[str, Sequence[str]] = None,
n_genes: int = 10,
groupby: Optional[str] = None,
key: str = None,
show: Optional[bool] = None,
save: Optional[bool] = None,
**kwds,
):
"""\
Plot ranking of genes using heatmap plot (see :fun... | 5,352,386 |
def load_nii(src_path, as_array=False, as_numpy=False):
"""
Load a brain from a nifti file
:param str src_path: The path to the nifty file on the filesystem
:param bool as_array: Whether to convert the brain to a numpy array of
keep it as nifty object
:param bool as_numpy: Whether to conver... | 5,352,387 |
def case34_3ph():
"""
Create the IEEE 34 bus from IEEE PES Test Feeders:
"https://site.ieee.org/pes-testfeeders/resources/”.
OUTPUT:
**net** - The pandapower format network.
"""
net = pp.create_empty_network()
# Linedata
# CF-300
line_data = {'c_nf_per_km': 3.82... | 5,352,388 |
def create_sales_invoice(order_dict, order, site_id_order,
msgprint_log, changes):
"""
Create a Sales Invoice from the eBay order.
"""
updated_db = False
# Don't create SINV from incomplete order
if (order['OrderStatus'] != 'Completed'
or order['CheckoutStat... | 5,352,389 |
def calc_ext_str_features(id2bedrow_dic, chr_len_dic,
out_str, args,
check_seqs_dic=False,
stats_dic=None,
tr_regions=False,
tr_seqs_dic=False):
"""
Calculate structure features (str... | 5,352,390 |
def test_filtrate_is_a_callable():
"""Verify if 'filtrate' is a callable."""
assert callable(filtrate) | 5,352,391 |
def generate_daily_stats():
"""
Generates dummy daily stats for one year
"""
times = [1577836800 + (i * 86400) for i in range(0,366)]
stats_arr = [[]]
for time in times:
vals = [uniform(0,100) for i in range(843)]
stats_arr[0].append({
'min': np.min(vals),
... | 5,352,392 |
def dump_file(filepath,filename, Variable):
"""
@Params:
filename: filename inside the ./dumps folder for dumping
Variable: Variable to dump inside the file
@Returns:
None
"""
with open(filepath +'/'+filename+'.pickle','wb') as handle:
pickle.dump(Variable,handle,protocol=pickle.HIGHEST_PROTOCOL) | 5,352,393 |
def create_mask(imsize: tuple, bbox: tuple) -> Image:
"""
Args:
imsize: (w, h)
bboxes: (x0, y0, x1, y1)
"""
mask = Image.new("L", imsize)
draw = ImageDraw.Draw(mask)
draw.rectangle(bbox, fill=255)
return mask | 5,352,394 |
def send_template_email(
recipient_list: list,
subject: str,
template: str,
template_context: dict,
from_email: str = None,
plain_context: dict = None,
html_context: dict = None,
language: str = 'nl',
) -> None:
"""
Light wrapper for Django's send_... | 5,352,395 |
def phone_number_validator(value, region=settings.KOMPASSI_PHONENUMBERS_DEFAULT_REGION):
"""
Validate the phone number using Google's phonenumbers library.
"""
exc = _('Invalid phone number.')
try:
phone_number = phonenumbers.parse(value, region)
except phonenumbers.NumberParseException... | 5,352,396 |
def request_change_template_picture(update, context):
"""
Args:
update (telegram.Update)
context (telegram.ext.CallbackContext)
"""
reg_user = get_reg_user(update.effective_user, update.effective_chat)
markup = ReplyKeyboardMarkup(
[
[CANCEL_MARKUP]
], res... | 5,352,397 |
def build_resolved_spec(api, spec_lookup, cache, force_build, spec, version,
ecosystem_hash):
"""Builds a resolved spec at a specific version, then uploads it.
Args:
* api - The ThirdPartyPackagesNGApi's `self.m` module collection.
* spec_lookup ((package_name, platform) -> Resolved... | 5,352,398 |
def test_resource_delta(loop):
"""Test if the controller correctly calculates the delta between
``last_applied_manifest`` and ``last_observed_manifest``
State (0):
The application possesses a last_applied_manifest which specifies a Deployment,
a Service and a ConfigMap. The application has... | 5,352,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.