content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def _to_original(sequence, result):
""" Cast result into the same type
>>> _to_original([], ())
[]
>>> _to_original((), [])
()
"""
if isinstance(sequence, tuple):
return tuple(result)
if isinstance(sequence, list):
return list(result)
return result | 5,353,700 |
def generate_oauth_service():
"""Prepare the OAuth2Service that is used to make requests later."""
return OAuth2Service(
client_id=os.environ.get('UBER_CLIENT_ID'),
client_secret=os.environ.get('UBER_CLIENT_SECRET'),
name=config.get('name'),
authorize_url=config.get('authorize_ur... | 5,353,701 |
def is_relative_path(value):
"""Check if the given value is a relative path"""
if urlparse(value).scheme in ('http', 'https', 'file'):
return False
return not os.path.isabs(value) | 5,353,702 |
def add_flops_counter_variable_or_reset(module: torch.nn.Module) -> None:
"""
:param module:
:type module:"""
if is_supported_instance(module):
module.__flops__ = 0 | 5,353,703 |
def get_B_R(Rdot):
"""Get B_R from Q, Qdot"""
return Rdot | 5,353,704 |
def configure_nltk():
"""Downloads any required NLTK data if not already downloaded."""
nltk_resources_folder = get_nltk_resources_folder()
nltk.data.path.append(nltk_resources_folder)
try:
nltk.data.find("tokenizers/punkt")
except:
logger.warning("NLTK punkt tokenizer was not found... | 5,353,705 |
def run_coroutine_with_span(span, coro, *args, **kwargs):
"""Wrap the execution of a Tornado coroutine func in a tracing span.
This makes the span available through the get_current_span() function.
:param span: The tracing span to expose.
:param coro: Co-routine to execute in the scope of tracing span... | 5,353,706 |
def set_database_slide_metadata(database,table):
"""this will iterate and update various project related attributes that may not be set on initial parse
such as stain type, tissue_type , etc... """
## update stain_Type first
sql_lookup = "select * from `"+ database + "`.`dzi_pyramid_info` where stain_type is NULL "... | 5,353,707 |
def run_pkg_tests(m, env_prefix):
"""
Run the tests defined in the recipe of a package in the given
environment.
"""
tmpdir = tempfile.mkdtemp()
try:
test_files = conda_build_test.create_test_files(m, tmpdir)
py_files, pl_files, shell_files = test_files
if not (py_files ... | 5,353,708 |
def calc_bin_centre(bin_edges):
"""
Calculates the centre of a histogram bin from the bin edges.
"""
return bin_edges[:-1] + np.diff(bin_edges) / 2 | 5,353,709 |
def to_matrix(dG, tG, d_mat, t_mat, label_mat, bridges):
"""
Parameters:
tG: target graph
dG: drug graph
d_mat: drug feature matrix
t_mat: target feature matrix
label_mat: label matrix
bridges: known links between drugs and targets
Ret... | 5,353,710 |
def ensureList(obj):
""" ensures that object is list """
if isinstance(obj, list):
return obj # returns original lis
elif hasattr(obj, '__iter__'): # for python 2.x check if obj is iterablet
return list(obj) # converts to list
else:
return [obj] | 5,353,711 |
def gelu_impl(x):
"""OpenAI's gelu implementation."""
return 0.5 * x * (1.0 + torch.tanh(0.7978845608028654 * x * (1.0 + 0.044715 * x * x))) | 5,353,712 |
async def test_query_inst_chaincodes(org1_gw):
""" Tests querying instantiated chaincodes """
res = await org1_gw.query_instantiated_chaincodes()
assert res.chaincodes | 5,353,713 |
def get_shape(kind='line', x=None, y=None, x0=None, y0=None, x1=None, y1=None, span=0, color='red', dash='solid',
width=1,
fillcolor=None, fill=False, opacity=1, xref='x', yref='y'):
"""
Returns a plotly shape
Parameters:
-----------
kind : string
Shape k... | 5,353,714 |
def get_weights(weights_dic, model, epoch):
"""
callback returns matrix with n arrays where n is the number of features and each
array has m elements where m is the number of neurons
records weights for each layer for each epoch and stores in provided dictionary dictionary
weights_dic: dictionary to... | 5,353,715 |
def clean_row(elements: List[Tag]) -> List[Tag]:
"""
Clean MathML row, removing children that should not be considered tokens or child symbols.
One example of cleaning that should take place here is removing 'd' and 'δ' signs that are
used as derivatives, instead of as identifiers.
"""
# Remove... | 5,353,716 |
def models(estimators, cv_search, transform_search):
"""
Grid search prediction workflows. Used by bll6_models, test_models, and product_models.
Args:
estimators: collection of steps, each of which constructs an estimator
cv_search: dictionary of arguments to LeadCrossValidate to search over... | 5,353,717 |
def apply_sync_sensors(sensors_changes: DefaultDict[str, Set[str]]) -> None:
"""
:param sensors_changes:
:return:
"""
from noc.inv.models.object import Object
from noc.inv.models.sensor import sync_object
query = Q()
if "inv.ObjectModel" in sensors_changes:
query |= Q(model__in... | 5,353,718 |
def cross(args: Namespace) -> None:
"""Corss task. Corss validate model on given dataset."""
records, n_state = _get_records(args)
result = cross_validate(n_state, records, args.k)
print(f"Mean Variance: {result.var:.3f}")
print(f"Mean StdDev: {result.std:.3f}")
print(f"Mean Error: {result.... | 5,353,719 |
def xml_timestamp(location='Europe/Prague'):
"""Method creates timestamp including time zone
Args:
location (str): time zone location
Returns:
str: timestamp
"""
return datetime.datetime.now(pytz.timezone(location)).isoformat() | 5,353,720 |
def create_frame_drop_test_launch(filename,camera0,camera1,display=True):
"""
Creates launch file for the blob_finder based frame drop test.
"""
template_name = 'frame_drop_test_launch.xml'
machine_file = mct_utilities.file_tools.machine_launch_file
image_corr_topic_list = mct_introspection.fin... | 5,353,721 |
def transform_postorder(comp, func):
"""Traverses `comp` recursively postorder and replaces its constituents.
For each element of `comp` viewed as an expression tree, the transformation
`func` is applied first to building blocks it is parameterized by, then the
element itself. The transformation `func` should ... | 5,353,722 |
def bytes_to_unicode_records(byte_string, delimiter, encoding):
""" Convert a byte string to a tuple containing an array of unicode
records and any remainder to be used as a prefix next time. """
string = byte_string.decode(encoding)
records = string.split(delimiter)
return (records[:-1], records[-1... | 5,353,723 |
def administrator():
"""Returns a :class:`t_system.administration.Administrator` instance."""
return Administrator() | 5,353,724 |
def ocr(path, lang='eng'):
"""Optical Character Recognition function.
Parameters
----------
path : str
Image path.
lang : str, optional
Decoding language. Default english.
Returns
-------
"""
image = Image.open(path)
vectorized_image = numpy.asarray(image).ast... | 5,353,725 |
def get_rpd_vars():
"""Read RPD variables set by calling and parsing output from init
"""
cmd = get_init_call()
cmd = ' '.join(cmd) + ' && set | grep "^RPD_"'
try:
res = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
logg... | 5,353,726 |
def which(filename):
"""
Emulates the UNIX `which` command in Python.
Raises an IOError if no result is found.
"""
locations = os.environ.get("PATH").split(os.pathsep)
candidates = []
for location in locations:
candidate = os.path.join(location, filename)
if os.path.isfile(c... | 5,353,727 |
def _GetAllHypervisorParameters(cluster, instances):
"""Compute the set of all hypervisor parameters.
@type cluster: L{objects.Cluster}
@param cluster: the cluster object
@param instances: list of L{objects.Instance}
@param instances: additional instances from which to obtain parameters
@rtype: list of (or... | 5,353,728 |
def is_solution(system, point):
"""
Checks whether the point is the solution for a given constraints system.
"""
a = np.array(system)
# get the left part
left = a[:, :-1] * point
left = sum(left.T)
# get the right part
right = (-1) * a[:, -1]
return np.all(left <= r... | 5,353,729 |
def git_config_bool(option: str) -> bool:
"""
Return a boolean git config value, defaulting to False.
"""
return git_config(option) == "true" | 5,353,730 |
def read_configs(paths):
"""
Read yaml files and merged dict.
"""
eths = dict()
vlans = dict()
bonds = dict()
for path in paths:
cfg = read_config(path)
ifaces = cfg.get("network", dict())
if "ethernets" in ifaces:
eths.update(ifaces["ethernets"])
... | 5,353,731 |
def get_client():
""" generates API client with personalized API key """
with open("api_key.json") as json_file:
apikey_data = json.load(json_file)
api_key = apikey_data['perspective_key']
# Generates API client object dynamically based on service name and version.
perspective = discovery.build('commentan... | 5,353,732 |
def tables(
path,
fts4,
fts5,
counts,
nl,
arrays,
csv,
no_headers,
table,
fmt,
json_cols,
columns,
schema,
views=False,
):
"""List the tables in the database"""
db = sqlite_utils.Database(path)
headers = ["view" if views else "table"]
if counts:
... | 5,353,733 |
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
settings['route_patterns'] = {
'villages': '/geography.cfm',
'parameters': '/thesaurus.cfm',
'sources': '/bibliography.cfm',
'languages': '/languages.cfm',
'florafauna': '/f... | 5,353,734 |
def overrides(pattern, norminput):
"""Split a date subfield into beginning date and ending date. Needed for fields with
multiple hyphens.
Args:
pattern: date pattern
norminput: normalized date string
Returns:
start date portion of pattern
start date portion of norminput
... | 5,353,735 |
def defaultPolynomialLoad():
"""
pytest fixture that returns a default PolynomialLoad object
:return: PolynomialLoad object initialized with default values
"""
return PolynomialStaticLoad() | 5,353,736 |
def calc_pi(iteration_count, cores_usage):
"""
We calculate pi using Ulam's Monte Carlo method. See the module
documentation. The calculated value of pi is returned.
We use a process pool to offer the option of spreading the
calculation across more then one core.
iteration_count is... | 5,353,737 |
def send_postatus_errors():
"""Looks at postatus file and sends an email with errors"""
# Gah! Don't do this on stage!
if settings.STAGE:
return
def new_section(line):
return (line.startswith('dennis ')
or line.startswith('Totals')
or line.startswith('BU... | 5,353,738 |
def pair_range_from_to(x): # cpdef pair_range(np.ndarray[long,ndim=1] x):
"""
Returns a list of half-cycle-amplitudes
x: Peak-Trough sequence (integer list of local minima and maxima)
This routine is implemented according to
"Recommended Practices for Wind Turbine Testing - 3. Fatigue Loads", 2. ... | 5,353,739 |
def serialize_action(
action: RetroReaction, molecule_store: MoleculeSerializer
) -> StrDict:
"""
Serialize a retrosynthesis action
:param action: the (re)action to serialize
:param molecule_store: the molecule serialization object
:return: the action as a dictionary
"""
dict_ = action.... | 5,353,740 |
def lint(where, strict):
"""Check style with linters"""
errors = []
try:
_lint_py(where, strict)
except CalledProcessError as e:
errors += [e]
try:
_eslint(where, strict)
except CalledProcessError as e:
errors += [e]
exit(len(errors)) | 5,353,741 |
def convert(obj, totype, debug=False, **kwargs):
"""Converto object obj to type totype.
The converter is chosen from gna.converters dictionary based on the type(obj) or one of it's base classes.
:obj: object to convert
:totype: the target type
Order:
1. Set type to type(obj).
2. Try to... | 5,353,742 |
def test_get_ctf_category():
"""test get_ctf_category function"""
assert get_ctf_category(rawsec_json) == ["binary_exploitation"] | 5,353,743 |
def complement(s):
"""
Return complement of 's'.
"""
c = string.translate(s, __complementTranslation)
return c | 5,353,744 |
def get_state_transitions(actions):
"""
get the next state
@param actions:
@return: tuple (current_state, action, nextstate)
"""
state_transition_pairs = []
for action in actions:
current_state = action[0]
id = action[1][0]
next_path = action[1][1]
next_state ... | 5,353,745 |
def add_label(
img: Image,
text: str,
x: int,
y: int,
ttf: str,
fontsize: float,
rgb: Tuple[int, int, int],
hjust: float = 0,
vjust: float = 0) -> None:
"""
Adds a label to an image.
"""
draw = ImageDraw.Draw(img)
font = get_fon... | 5,353,746 |
def test_list_unsigned_short_max_length_nistxml_sv_iv_list_unsigned_short_max_length_1_1(mode, save_output, output_format):
"""
Type list/unsignedShort is restricted by facet maxLength with value 5.
"""
assert_bindings(
schema="nistData/list/unsignedShort/Schema+Instance/NISTSchema-SV-IV-list-un... | 5,353,747 |
def parse_pascal_string(characterset, data):
"""
Read a Pascal string from a byte array using the given character set.
:param characterset: Character set to use to decode the string
:param data: binary data
:return: tuple containing string and number of bytes consumed
"""
string_size_format,... | 5,353,748 |
def setup(bot: Neorg) -> None:
"""Add Cog to Bot."""
bot.add_cog(BotControl(bot)) | 5,353,749 |
def add_single_expense_cli():
"""Cli version add single expenses."""
add_single_expense(dev_db) | 5,353,750 |
def percent_cb(name, complete, total):
""" Callback for updating target progress """
Logger.debug('{}: {} transferred out of {}'.format(name,
sizeof_fmt(complete),
sizeof_fmt(total)))
progress.updat... | 5,353,751 |
def test_index_availability(client):
"""Index page must be present and HTTP Status Code have to be `200`"""
response = client.get('/')
assert response.status_code == 200 | 5,353,752 |
def folder(initial=None, title='Select Folder'):
"""Request to select an existing folder or to create a new folder.
Parameters
----------
initial : :class:`str`, optional
The initial directory to start in.
title : :class:`str`, optional
The text to display in the title bar of the di... | 5,353,753 |
def build_clusters(
metadata: pd.DataFrame,
ipm_regions: Sequence[str],
min_capacity: float = None,
max_clusters: int = None,
) -> pd.DataFrame:
"""Build resource clusters."""
if max_clusters is None:
max_clusters = np.inf
if max_clusters < 1:
raise ValueError("Max number of ... | 5,353,754 |
def plot_lift_cruise_network(results, line_color = 'bo-',line_color2 = 'r^-', save_figure = False, save_filename = "Lift_Cruise_Network", file_type = ".png"):
"""This plots the electronic and propulsor performance of a vehicle with a lift cruise network
Assumptions:
None
Source:
None
Inputs:
... | 5,353,755 |
def add_asset(zs_code, code, name, category):
"""添加资产品种到数据库"""
_, created = Asset.get_or_create(
zs_code=zs_code,
code=code,
name=name,
category=category,
)
if created:
LOGGER.info('created asset in database successfully')
else:
LOGGER.warning('asset i... | 5,353,756 |
def tqdm_hook(t: tqdm) -> Any:
"""Progressbar to visualisation downloading progress."""
last_b = [0]
def update_to(b: int = 1, bsize: int = 1, t_size: Optional[int] = None) -> None:
if t_size is not None:
t.total = t_size
t.update((b - last_b[0]) * bsize)
last_b[0] =... | 5,353,757 |
def fix_mapping_versions():
"""
This function monkey patches the mapping version information in arcpy to support the currently installed version,
along with past versions if they are not included (arcpy 10.5 does not have 10.4 supported version, but the
support is there under the hood).
"""
# g... | 5,353,758 |
def check_regular_timestamps(
time_series: TimeSeries, time_tolerance_decimals: int = 9, gb_severity_threshold: float = 1.0
):
"""If the TimeSeries uses timestamps, check if they are regular (i.e., they have a constant rate)."""
if (
time_series.timestamps is not None
and len(time_series.tim... | 5,353,759 |
def test_device_bypass(monkeypatch):
"""Test setting the bypass status of a device."""
_was_called = False
def _call_bypass(url, body, **kwargs):
nonlocal _was_called
assert url == "/appservices/v6/orgs/Z100/device_actions"
assert body == {"action_type": "BYPASS", "device_id": [6023... | 5,353,760 |
def test_run3(client):
"""
Tests that a docker executor raises RuntimeError if no
docker_host and docker_port available in execution
"""
with client.application.app_context():
task, job, execution = JobExecutionFixture.new_defaults()
_, pool_mock, _ = PoolFixture.new_defaults(r"test... | 5,353,761 |
def import_config_module( cfg_file ):
""" Returns valid imported config module.
"""
cfg_file = re.sub( r'\.py$', '', cfg_file )
cfg_file = re.sub( r'-', '_', cfg_file )
mod_name = 'config.' + cfg_file
cfg_mod = importlib.import_module( mod_name )
if not hasattr( cfg_mod, 'pre_start_config' ... | 5,353,762 |
def process_batches(args, batches):
"""Runs a set of batches, and merges the resulting output files if more
than one batch is included.
"""
nbatches = min(args.nbatches, len(batches))
pool = multiprocessing.Pool(nbatches, init_worker_thread)
try:
batches = pool.imap(run_batch, batches, ... | 5,353,763 |
def test_vcfield_pk_lookups_work(modelname, name, number, parent_int,
parent_str, expected_pk, testmodels,
make_instance, noise_data):
"""
When a VirtualCompField is a PK on a model, lookups that use `pk`
as a field (instead of the field name... | 5,353,764 |
def tripledes_cbc_pkcs5_decrypt(key, data, iv):
"""
Decrypts 3DES ciphertext in CBC mode using either the 2 or 3 key variant
(16 or 24 byte long key) and PKCS#5 padding.
:param key:
The encryption key - a byte string 16 or 24 bytes long (2 or 3 key mode)
:param data:
The ciphertext... | 5,353,765 |
def kruskal_chi2_test(data=None, alpha=0.05, precision=4):
"""
col = 要比較的 target
row = data for each target
"""
if type(data) == pd.DataFrame:
data = data.copy().to_numpy()
alldata = np.concatenate(data.copy())
else:
alldata = np.concatenate(data.copy())
k = data.sha... | 5,353,766 |
def add_new_action(action, object_types, preferred, analyst):
"""
Add a new action to CRITs.
:param action: The action to add to CRITs.
:type action: str
:param object_types: The TLOs this is for.
:type object_types: list
:param preferred: The TLOs this is preferred for.
:type preferred... | 5,353,767 |
def plot_history(H, epochs, output_path):
"""
Utility function for plotting model history using matplotlib.
This method was developed for use in class and adjusted for this project.
"""
plt.style.use("fivethirtyeight")
plt.figure()
plt.plot(np.arange(0, epochs), H.history["loss"], label="t... | 5,353,768 |
def dissociate_scaling_group(scaling_group, domain):
"""
Dissociate a domain from a scaling_group.
\b
SCALING_GROUP: The name of a scaling group.
DOMAIN: The name of a domain.
"""
with Session() as session:
try:
data = session.ScalingGroup.dissociate_domain(scaling_group... | 5,353,769 |
def test_PipeJsonRpcSendAsync_5():
"""
Specia test case.
Two messages: the first message times out, the second message is send before the response
from the first message is received. Verify that the result returned in response to the
second message is received. (We discard the result of the message ... | 5,353,770 |
def RACEDataset(race_type):
"""
Loads a RACE dataset given the type (see the RACEType enum).
Any error during reading will generate an exception.
Returns a Pandas DataFrame with 5 columns:
* 'article': string
* 'question': string
* 'answers': list[string], length = 4
* 'correct': oneof(... | 5,353,771 |
def tpc(fastas, **kw):
"""
Function to generate tpc encoding for protein sequences
:param fastas:
:param kw:
:return:
"""
AA = kw['order'] if kw['order'] is not None else 'ACDEFGHIKLMNPQRSTVWY'
encodings = []
triPeptides = [aa1 + aa2 + aa3 for aa1 in AA for aa2 in AA for aa3 in AA]
... | 5,353,772 |
def cli():
"""cli entry point"""
parser = argparse.ArgumentParser("Welcome to SigSticker, providing all of your sticker needs")
parser.add_argument(
"-p",
"--pack",
help="Pass in a pack url inline",
nargs="+",
action="append",
)
args = parser.parse_args()
# Get the packs
packs = sum(args.pack, [])
if ... | 5,353,773 |
def _uninstall_flocker_centos7():
"""
Return an ``Effect`` for uninstalling the Flocker package from a CentOS 7
machine.
"""
return sequence([
run_from_args([
b"yum", b"erase", b"-y", b"clusterhq-python-flocker",
]),
run_from_args([
b"yum", b"erase", b... | 5,353,774 |
def set_se_loop(context: X12ParserContext, segment_data: Dict) -> None:
"""
Sets the transaction set footer loop.
:param context: The X12Parsing context which contains the current loop and transaction record.
:param segment_data: The current segment's data
"""
context.set_loop_context(
... | 5,353,775 |
def get_memory_banks_per_run(coreAssignment, cgroups):
"""Get an assignment of memory banks to runs that fits to the given coreAssignment,
i.e., no run is allowed to use memory that is not local (on the same NUMA node)
to one of its CPU cores."""
try:
# read list of available memory banks
... | 5,353,776 |
def authenticate(username, password):
"""Authenticate with the API and get a token."""
API_AUTH = "https://api2.xlink.cn/v2/user_auth"
auth_data = {'corp_id': "1007d2ad150c4000", 'email': username,
'password': password}
r = requests.post(API_AUTH, json=auth_data, timeout=API_TIMEOUT)
... | 5,353,777 |
def filter_hashtags_users(DATAPATH, th, city):
"""
cleans target_hashtags by removing hashtags that are used by less than 2 users
replaces hahstags by ht_id and saves to idhashtags.csv
creates entropy for each ht_id and saves to hashtag_id_entropies.csv
prints std output
:param DATAPATH:
:pa... | 5,353,778 |
def plate_96_bind(form_info):
"""Add the plate container info to the samples in the form.
Arguments:
form_info (api_types.CustomForm): the dataclass holding info on the
form.
"""
smp_locations = {}
for sample in form_info.samples:
sample.con = api_types.Container()
... | 5,353,779 |
def cluster_set_state(connection: 'Connection', state: int, query_id=None) -> 'APIResult':
"""
Set cluster state.
:param connection: Connection to use,
:param state: State to set,
:param query_id: (optional) a value generated by client and returned as-is
in response.query_id. When the paramete... | 5,353,780 |
def print_signatures(client: zeep.Client, out):
"""Print a short summary of each operation signature offered by client."""
# From: https://stackoverflow.com/questions/50089400/introspecting-a-wsdl-with-python-zeep
for service in client.wsdl.services.values():
out.write(f"service: {service.name}\n")
... | 5,353,781 |
def get_all(data, path):
"""Returns a list with all values in data matching the given JsonPath."""
return [x for x in iterate(data, path)] | 5,353,782 |
def extract_information_from_blomap(oneLetterCodes):
"""
extracts isoelectric point (iep) and
hydrophobicity from blomap for each aminoacid
Parameters
----------
oneLetterCodes : list of Strings/Chars
contains oneLetterCode for each aminoacid
Returns
-------
float, float
... | 5,353,783 |
def split_train_val_test_detection_data(xml_dir):
"""
prepare train/val/test dataset for detection
:param xml_dir:
:return:
"""
filenames = [_.replace('.xml', '') for _ in os.listdir(xml_dir)]
random.shuffle(filenames)
TEST_RATIO = 0.2
train = filenames[0:int(len(filenames) * (1 - ... | 5,353,784 |
def partitions(n):
"""
Return a sequence of lists
Each element is a list of integers which sum to n -
a partition n.
The elements of each partition are in descending order
and the sequence of partitions is in descending lex order.
>>> list(partitions(4))
[[3,... | 5,353,785 |
def create_request_element(channel_id, file_info, data_id, annotation):
"""
create dataset item from datalake file
:param channel_id:
:param file_id:
:param file_info:
:param label_metadata_key:
:return:
"""
data_uri = 'datalake://{}/{}'.format(channel_id, file_info.file_id)
da... | 5,353,786 |
def _categorise(obj, _regex_adapter=RegexAdapter):
"""
Check type of the object
"""
if obj is Absent:
return Category.ABSENT
obj_t = type(obj)
if issubclass(obj_t, NATIVE_TYPES):
return Category.VALUE
elif callable(obj):
return Category.CALLABLE
elif _regex_adap... | 5,353,787 |
def test_emcee_opitmizer_can_restart(datasets_db):
"""A restart trace can be passed to the Emcee optimizer """
dbf = Database.from_string(CU_MG_TDB, fmt='tdb')
datasets_db.insert(CU_MG_DATASET_ZPF_WORKING)
param = 'VV0001'
opt = EmceeOptimizer(dbf)
restart_tr = -4*np.ones((2, 10, 1)) # 2 chains... | 5,353,788 |
def remove_key(d, key):
"""Safely remove the `key` from the dictionary.
Safely remove the `key` from the dictionary `d` by first
making a copy of dictionary. Return the new dictionary together
with the value stored for the `key`.
Parameters
----------
d : dict
The dictionary from w... | 5,353,789 |
def test_clear_child_protocol_sets_raise_exc(httpcs_svc_instance, video_format, rewrite_type):
"""Test: Clear protocolSets with exception in Httpcs Service Instance object
Steps:
1. Clear protocolSets, do not set either published or source protocol
2. Check raised exception
Result:
OK: excepti... | 5,353,790 |
def graph(x, y, pl, opacity, lbf, title=None, x_title=None, y_title=None, xlim=None, ylim=None):
"""
Creates and displays a matplotlib scatter plot of y vs. x
Parameters
----------
x: iterable, x-coordinates for values to be plotted
y: iterable, y-coordinates for values to be plotted
pl: re... | 5,353,791 |
def tasks_file_to_task_descriptors(tasks, retries, input_file_param_util,
output_file_param_util):
"""Parses task parameters from a TSV.
Args:
tasks: Dict containing the path to a TSV file and task numbers to run
variables, input, and output parameters as column headings.... | 5,353,792 |
def array2raster(newRasterfn, dataset, array, dtype):
"""
Function from https://gist.github.com/jkatagi/a1207eee32463efd06fb57676dcf86c8
Save GeoTiff file from numpy.array
input:
newRasterfn: save file name
dataset : original tif file
array : numpy.array
dtype: Byte or Fl... | 5,353,793 |
def load_model(filename, folder=None):
"""
Load a model from a file.
:param filename: name of the file to load the model from
:param folder: name of the subdirectory folder. If given, the model will be loaded from the subdirectory.
:return: model from the file
"""
if folder is not None:
... | 5,353,794 |
def spreadplayers(self: Client, x: RelativeFloat, y: RelativeFloat,
spread_distance: float, max_range: float,
victim: str) -> str:
"""Spreads players."""
return self.run('spreadplayers', x, y, spread_distance, max_range, victim) | 5,353,795 |
def get_version():
"""Returns single integer number with the serialization version"""
return 2 | 5,353,796 |
def format_result(func):
"""包装结果格式返回给调用者"""
@wraps(func)
def wrapper(*args, **kwargs):
ret = {}
try:
data = func(*args, **kwargs)
if type(data) is Response:
return data
ret['data'] = data
ret['success'] = True
ret['m... | 5,353,797 |
def writeData(filename, data):
"""
MBARBIER: Taken/adapted from https://github.com/ChristophKirst/ClearMap/blob/master/ClearMap/IO/TIF.py
Write image data to tif file
Arguments:
filename (str): file name
data (array): image data
Returns:
str: tif file name
... | 5,353,798 |
def test_activate_heater(sht4x, power, duration):
"""
Test if the command is accepted by the device and returns the proper
result.
"""
temperature, humidity = sht4x.activate_heater(power, duration)
assert type(temperature) is Sht4xTemperature
assert type(temperature.ticks) is int
assert ... | 5,353,799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.