content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def generic_validator(check, error_message):
"""
Validator factory
>>> v = generic_validator(is_int, "invalid int")
>>> v(6)
6
>>> v("g")
Traceback (most recent call last):
...
ValidationError: [u'invalid int']
"""
# Validator closure
def inner_validator(value, *args, **... | 5,351,700 |
def get_molec_shape(mol, conf, confId, vdwScale=1.0,
boxMargin=2.0, spacing=0.2):
"""
Get the shape of a conformer of a molecule as a grid
representation.
"""
box = Chem.ComputeConfBox(conf)
sideLen = (box[1].x-box[0].x + 2*boxMargin,
box[1].y-box[0].y + 2*box... | 5,351,701 |
def autoencoder(dimensions=[784, 512, 256, 64]):
"""Build a deep denoising autoencoder w/ tied weights.
Parameters
----------
dimensions : list, optional
The number of neurons for each layer of the autoencoder.
Returns
-------
x : Tensor
Input placeholder to the network
... | 5,351,702 |
def _asymptotic_expansion_of_normalized_black_call(h, t):
"""
Asymptotic expansion of
b = Φ(h+t)·exp(x/2) - Φ(h-t)·exp(-x/2)
with
h = x/s and t = s/2
which makes
b = Φ(h+t)·exp(h·t) - Φ(h-t)·exp(-h·t)
exp(-(h²+t²)/2)
... | 5,351,703 |
def date_read(date_string, *, convert_to_current_timezone: bool = False):
"""Read the given date (if possible)."""
return date_parse(date_string, convert_to_current_timezone=convert_to_current_timezone) | 5,351,704 |
def encode_name(name):
"""
Encode a unicode as utf-8 and then url encode that
string. Use for entity titles in URLs.
"""
return urllib.quote(name.encode('utf-8'), safe='') | 5,351,705 |
def cmp_str(element1, element2):
"""
compare number in str format correctley
"""
try:
return cmp(int(element1), int(element2))
except ValueError:
return cmp(element1, element2) | 5,351,706 |
def parse_page_file(page_raw: str, type: str, file_name: str) -> Page:
"""
FIXME: add documentation
"""
page_id = extract_page_id(file_name)
title, fields = parse_md(page_raw)
return Page(
id=page_id,
type=type,
title=title,
fields=fields,
) | 5,351,707 |
def fetch(name):
"""
Fetches an appropriate model to perform the prediction.
:param name: model's name
:return: a trained model
"""
K.clear_session()
try:
full_weights_path = path.join(path_prefix, *load_weights()[name])
if name == 'svm':
return SVMModel(joblib.... | 5,351,708 |
def post(text, appid=2, touser=None, toparty=None):
"""
party
"""
#print '=========',type(text)
if type(text) is unicode:
text = text.encode('utf8')
if not touser:
touser = []
if not toparty:
toparty = ['2']
url = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?... | 5,351,709 |
def download_image_from_annotation(
api_key: str,
api_url: str,
annotation_path: Path,
images_path: str,
annotation_format: str,
use_folders: bool,
video_frames: bool,
):
"""Helper function: dispatcher of functions to download an image given an annotation
Parameters
----------
... | 5,351,710 |
def hex_encrypt(msg):
"""Hex encrypts a message.
:param bytes msg: string message to be encrypted.
:return: string for encrypted version of msg in hex.
:rtype: bytes
"""
if not cipher:
return msg
if not isinstance(msg, six.binary_type):
raise ValueError('only bytes can be en... | 5,351,711 |
def test_TestPermsWorldReadableDir(tmp_path):
"""Test for TestPermissionsWorldReadableDir."""
# setup test case
test_path = tmp_path / 'testdir'
test_path.mkdir()
test_path.chmod(0o755)
# run test case
test = fs_lint.TestPermissionsWorldReadableDir()
assert test(test_path, test_path.lst... | 5,351,712 |
def http_request(source_id, endpoint_id, args, kwargs, # pylint: disable=too-many-arguments
service_addr, auth=None):
"""Call http endpoint"""
headers = {"content-type": "application/json"}
if auth is not None:
headers["Authorization"] = basic_auth_header(auth)
payload = _creat... | 5,351,713 |
async def filter_by_game_stats(opsdroid, string, room, action):
"""Match incoming messages against the current games stats."""
if room not in STAT_REGEXES.keys():
gamestats = await get_stat_names(opsdroid, room)
if not gamestats:
return []
STAT_REGEXES[room] = {"set": regex.c... | 5,351,714 |
def password_provider():
"""
Provides the full password check
"""
return [(n,) for n in range(5)] | 5,351,715 |
def make_combiparameter(*args, **kwargs):
"""
Make a combined qcodes parameter.
Args:
*args : list of gates or parameters
(e.g. make_combiparameter("A1", "A3", station.gates.B1 ))
"""
station = qc.Station.default
parameters = []
for i in args:
if type(i) == str:
... | 5,351,716 |
def sort_data(items):
"""
"""
for i in range(len(items)):
for j in range(len(items)-1-i):
if items[j] > items[j+1]:
items[j], items[j+1] = items[j+1], items[j]
print items | 5,351,717 |
def main():
""" Input Image File / Url """
os.system('clear')
print logo
print '\x1b[1;0mAuthor :', author
print '\x1b[1;0mGithub :', github
img = raw_input('\n\x1b[1;0m[\x1b[1;32m+\x1b[1;0m] Image File / Url : \x1b[1;32m')
if 'https' in img or 'http' in img:
try:
image =... | 5,351,718 |
def get_delta_fmt(delta):
"""arbitrary colour formatting of rank delta
more red for bigger losses, more green for bigger gains
"""
col = (0, 0, 0, 255)
n = abs(delta)
s = delta
if delta < 0:
sat = min(n/200 + 0.2, 1)
r, g, b = hsv_to_rgb(0, sat, 1)
col = (r, g, b, 1)
... | 5,351,719 |
def main(args=None):
"""Entry point"""
warnings.showwarning = _warn_redirect
try:
retcode = create_workflow(args)
except Exception as e:
retcode= 1
sys.exit(retcode) | 5,351,720 |
def api_wait():
"""
Wait 3 seconds to ensure no requests are sent before the 3 second timer
for API calls has refreshed
"""
gevent.sleep(3) | 5,351,721 |
def displayFrames(frames):
"""Displays the supplied list of frames
@type frames: list<Frame>
@param frames: List of frames to display"""
framesFormat = "%-35s %-11s %-15s %-13s %-12s %-9s %5s %7s %5s"
header = framesFormat % (
"Frame", "Status", "Host", "Start", "End", "Runtime", "Mem", "Re... | 5,351,722 |
def calculate_wtv(sample_values, epoch_time_interval=WTV_EPOCH_TIME, relative_to_time=None):
"""
Calculate the Wear-Time Validation (30-minute epochs) for a given sample ndarray [[time_seconds, accel_x, accel_y, accel_z]].
Based on the method by van Hees et al in PLos ONE 2011 6(7),
"Estimation of D... | 5,351,723 |
def function(default=None):
"""Docstring comes first.
Possibly many lines.
"""
# FIXME: Some comment about why this function is crap but still in production.
import inner_imports
if inner_imports.are_evil():
# Explains why we have this if.
# In great detail indeed.
x = ... | 5,351,724 |
def fit_pk_parms_1d(p0, x, f, pktype='pvoigt'):
"""
Performs least squares fit to find parameters for 1d analytic functions fit
to diffraction data
Required Arguments:
p0 -- (m) ndarray containing initial guesses for parameters
for the input peaktype
x -- (n) ndarray of coordinate... | 5,351,725 |
def is_pipe_registered(
pipe : Union['meerschaum.Pipe', 'meerschaum.Pipe.MetaPipe'],
pipes : dict,
debug : bool = False
):
"""
Check if a Pipe or MetaPipe is inside the pipes dictionary.
"""
from meerschaum.utils.debug import dprint
ck, mk, lk = pipe.connector_keys, pipe.... | 5,351,726 |
def truncate(s, length=255, killwords=True, end='...'):
"""
Wrapper for jinja's truncate that checks if the object has a
__truncate__ attribute first.
Altering the jinja2 default of killwords=False because of
https://bugzilla.mozilla.org/show_bug.cgi?id=624642, which could occur
elsewhere.
... | 5,351,727 |
def tract_segmentation_single_example_lap (kdt_T_A, prototypes_T_A,sid, num_NN,T_A ):
""" step 1: tract segmentation from a single example using Jonker-Volgenant algorithm (LAPJV)
"""
E_t_filename= 'data/example/'+ str(sid) +'_'+str(tract_name)+'.trk'
print("Loa... | 5,351,728 |
def output_screening_results(parent_aln_obj, aln_obj_lst, style):
"""
Function outputs results according to user input style from the -s flag
Currently there are 3 versions.
1) changes output alignment sequence to lower case, then overwrites to
upper case if there is a match
2) alignment seq... | 5,351,729 |
def url(method):
"""对于每一个URL的请求访问装饰器,在出错时返回对应的信息"""
@wraps(method)
def error_handler(*args, **kwargs):
try:
return success(method(*args, **kwargs))
except RequestError as r:
current_app.logger.exception(r)
# 返回对应异常类的字符串文档
return failed(reason... | 5,351,730 |
def is_abbreviation(sentence):
"""
Evaluate a word to be an abbreviation if the immediate word before the
period contains a capital letter and not a single word sentence.
"""
sentence_split = sentence.split(" ")
if len(sentence_split) == 1:
return False
elif len(sentence_split[-1]) <... | 5,351,731 |
async def test_crudrouter_get_one_404(test_app):
"""Tests that select_or_404 will raise a 404 error on an empty return"""
async with AsyncClient(app=test_app[1], base_url="http://test") as client:
response = await client.get("/model/test")
assert response.status_code == 404 | 5,351,732 |
def update_sitedown(cur, site, status):
"""
Update whether the site is down
:param cur: database cursor
:param site: named tuple containing site data
:param status: string 'true' if site is down or 'false' if site is up
"""
cur.execute('''UPDATE watchlist SET sitedown=? WHERE username=? AND... | 5,351,733 |
def generate_self_signed(domain):
"""Generate self-signed SSL key and certificate.
"""
cmd = (
'openssl req -x509 -nodes -days 365 -newkey rsa:2048'
' -keyout {0}.key -out {0}.crt'
).format(domain)
run(cmd) | 5,351,734 |
def compose_redis_key(vim_name, identifier, identifier_type="vdu"):
"""Compose the key for redis given vim name and vdu uuid
Args:
vim_name (str): The VIM name
identifier (str): The VDU or VNF uuid (NFVI based)
identifier_type (str): the identifier type. Default type is vdu. Also vnf is... | 5,351,735 |
def stack_to_hdf5(stack_path, write_path, dims, dtype):
"""
"""
stack = np.fromfile(stack_path, dtype=dtype).reshape(dims)
writeHDF5(write_path, '/default', stack) | 5,351,736 |
def calGridID(locs, id, SPLIT = 0.0005):
"""
根据城市网格编号还原经纬度信息
:param locs:
:param id:
:param SPLIT=0.05:
"""
centerincrement = SPLIT/2.0
LNGNUM = int((locs['east'] - locs['west']) / SPLIT + 1)
latind = int(id / LNGNUM)
lngind = id - latind * LNGNUM
lat = (locs['south'] + latind * SPLIT)
lng = (locs['... | 5,351,737 |
def linear(x, *p):
"""[summary]
Arguments:
x {[type]} -- [description]
Returns:
[type] -- [description]
"""
return p[0] * x + p[1] | 5,351,738 |
def convert_price_text(t):
"""
convert "$175/month' to 175
:param t:
:return: price, unit (i.e. 175, 'month')
"""
tok = t.split('$')[1]
if '/' in tok:
price, unit = tok.split('/')
else:
price = tok
unit = None
return float(price.strip().strip('$').replace(',... | 5,351,739 |
async def async_setup(hass, config):
"""Set up the WWLLN component."""
if DOMAIN not in config:
return True
conf = config[DOMAIN]
latitude = conf.get(CONF_LATITUDE, hass.config.latitude)
longitude = conf.get(CONF_LONGITUDE, hass.config.longitude)
identifier = '{0}, {1}'.format(latitud... | 5,351,740 |
def __are_nearly_overlapped(
plane_predicted: NDArray[Any, np.int32],
plane_gt: NDArray[Any, np.int32],
required_overlap: np.float64,
) -> (bool, bool):
"""
Calculate if planes are overlapped enough (required_overlap %) to be used for PP-PR metric
:param required_overlap: overlap threshold which... | 5,351,741 |
def _get_prob_k_given_L(B, N=None):
"""
Helper function.
"""
if N is None:
N = int(B[0, 1])
return B / N | 5,351,742 |
def base64_encode(s):
"""unicode-safe base64
base64 API only talks bytes
"""
if not isinstance(s, bytes):
s = s.encode('ascii', 'replace')
encoded = encodebytes(s)
return encoded.decode('ascii') | 5,351,743 |
def _unpack_tableswitch(bc, offset):
"""
function for unpacking the tableswitch op arguments
"""
jump = (offset % 4)
if jump:
offset += (4 - jump)
(default, low, high), offset = _unpack(_struct_iii, bc, offset)
joffs = list()
for _index in xrange((high - low) + 1):
j, ... | 5,351,744 |
def test_immi1():
"""
Test immi on redundant distribution.
"""
d = bivariates['redundant']
red = i_mmi(d, ((0,), (1,)), (2,))
assert red == pytest.approx(1) | 5,351,745 |
def force_orders(self, **kwargs):
"""User's Force Orders (USER_DATA)
GET /fapi/v1/forceOrders
https://binance-docs.github.io/apidocs/futures/en/#user-39-s-force-orders-user_data
Keyword Args:
symbol (str, optional)
autoCloseType (str, optional): "LIQUIDATION" for liquidation orders, ... | 5,351,746 |
def main():
"""main"""
args = get_args()
print('color =', args.color) | 5,351,747 |
def prosp_power_analysis_norm(d, sigma, pow_lev, alpha, direction):
"""
This function conducts pre-testing power analysis and
calculates the minimally required sample size for a normal sample.
@param d: difference between the mean differences under H1 and H0
@param sigma: standard deviation
... | 5,351,748 |
def decision_tree_construction(examples, target_attribute, attributes, depth):
"""
:param examples: The data we will use to train the tree(x)
:param target_attribute: The label we want to classify(y)
:param attributes: The number(index) of the labels/attributes of the data-set
:return: The tr... | 5,351,749 |
def mass_centered(geo):
""" mass-centered geometry
"""
geo = translate(geo, numpy.negative(center_of_mass(geo)))
return geo | 5,351,750 |
def generate(env):
"""Called when the tool is loaded into the environment at startup of script"""
assert(exists(env))
MkdocsCommon.setup_opts_combiner(env)
mkdocs_scanner = env.Scanner(
MkdocsCommon.MkdocsScanner,
'MkdocsScanner',
)
bld = Builder(
action = __Mkdo... | 5,351,751 |
def test_newlines_group():
"""Convert newlines in all groups."""
assert_equal(
gfm('apple\npear\norange\n\nruby\npython\nerlang'),
'apple \npear \norange\n\nruby \npython \nerlang',
) | 5,351,752 |
def binaryMatrix(l, value=PAD_token):
"""
:param l:
:param value:
:return: seq: [3,4,5,0,0]
m: [[1],[1],[1],[0],[0]]
"""
m = []
for i, seq in enumerate(l):
m.append([])
for token in seq:
if token == PAD_token:
m[i].append(0)
... | 5,351,753 |
def show_download(dl_config):
"""
Do the download thing you know
"""
i = 0
config_specs = config.get_specs(dl_config)
url = dl_config[DOWNLOAD]['url']
main_html = urllib.request.urlopen(
url, timeout=int(
dl_config[DOWNLOAD][TIMEOUT])).read()
main_page = Beautifu... | 5,351,754 |
def clear_config():
"""Reset pn.config"""
pn.config.raw_css = []
pn.config.js_files = {}
pn.config.css_files = [] | 5,351,755 |
def realTimeIdentification(face_recognizer, subjects):
"""实时识别"""
print("进行实时预测")
face_recognizer.read(r'./models/train.yml')
cap = cv2.VideoCapture(0)
# 视频保存 保存的文件的路径 fourcc:指定编码器 fps:要保存的视频的帧率 frameSize:要保存的文件的画面尺寸 isColor:指示是黑白画面还是彩色的画面
fourcc = cv2.VideoWriter_fourcc('I', '4', '2', '0'... | 5,351,756 |
def plot_history(model_wrapper, job_config, save_dir=None):
"""Evaluates training result.
Args:
figsize: tuple
Defines plot size.
"""
logger.info("Plotting training history")
plot_config = job_config.apply.cfg_history
train_history = model_wrapper._train_his... | 5,351,757 |
def load_map(mappath):
""" Attempt to load map with known loaders
"""
data = None
shirtloader = lambda path: fio.load_map(path)[0][0:3]
maploaders = [load_pfire_map, shirtloader]
for loader in maploaders:
try:
data = loader(mappath)
except (ValueError, OSError):
... | 5,351,758 |
def client_thread(client_url, i):
"""Basic request-reply client using REQ socket"""
context = zmq.Context()
socket = context.socket(zmq.REQ)
identity = "Client-%d" % (i)
socket.setsockopt(zmq.IDENTITY, identity) #Set client identity. Makes tracing easier
socket.connect(client_url)
... | 5,351,759 |
def ifttt_account_options_topup_source():
""" Option values for topup source account selection"""
return ifttt_account_options(False, "Internal") | 5,351,760 |
def announce_user_details_updated(
event: UserDetailsUpdated, webhook: OutgoingWebhook
) -> None:
"""Announce that a user's details have been changed."""
text = user.assemble_text_for_user_details_updated(event)
call_webhook(webhook, text) | 5,351,761 |
def strip_classes(soup:BeautifulSoup, *args:str):
"""
Strip class from given tags in a BeautifulSoup object.
Args:
soup (BeautifulSoup): soup to clean
args ([str]): A list of tags to be unclassed
Returns:
soup (BeautifulSoup)
Modules:
bs4 (BeautifulSoup)
"""
... | 5,351,762 |
def take_measurement(n_grid: np.int, n_rays: np.int, r_theta: np.float64) -> (
np.ndarray, np.ndarray, np.ndarray, np.ndarray):
"""
Take a measurement with the tomograph from direction r_theta.
Arguments:
n_grid: number of cells of grid in each direction
n_rays: number of parallel rays
r_theta... | 5,351,763 |
def chrom_karyo_sort(chroms):
"""
:param chroms:
:return:
"""
ordered = []
unordered = []
for cname, size in chroms:
try:
ord = int(cname.lower().strip('chr'))
ordered.append((cname, size, ord * 10))
except ValueError:
ord = check_special_c... | 5,351,764 |
def merge(A, lo, mid, hi, aux):
"""Merge two (consecutive) runs together."""
aux[lo:hi+1] = A[lo:hi+1]
left = lo
right = mid + 1
for i in range(lo, hi+1):
if left > mid:
A[i] = aux[right]
right += 1
elif right > hi:
A[i] = aux[left]
le... | 5,351,765 |
def teardown_function(function):
""" teardown any state that was previously setup with a setup_function
call.
"""
if (resource('ska_mid/tm_subarray_node/1').get('State') == "ON"):
if (resource('ska_mid/tm_subarray_node/1').get('obsState') == "IDLE"):
LOGGER.info("tearing down compose... | 5,351,766 |
def test_wrap_predict_method():
"""Check wrap_predict_method output with default inputs."""
from sasctl.utils.pymas.core import wrap_predict_method
target = """
def predict(a, b):
"Output: c, msg"
result = None
msg = None
try:
global _compile_error
if _compile_error is not N... | 5,351,767 |
def calibrate_stereo(observations_left: List, observations_right: List, detector: FiducialCalibrationDetector,
num_radial: int = 4, tangential: bool = False, zero_skew: bool = True) -> (StereoParameters, List):
"""
Calibrates a stereo camera using a Brown camera model
:param observatio... | 5,351,768 |
def register_producer_class(cls: Type[C]) -> Type[C]:
"""Registers the producer class and returns it unmodified."""
if not cls.TYPES:
raise ProducerInterfaceError(
f"Invalid producer. When defining producer, make sure to specify at least 1 type in the TYPES class variable."
)
fo... | 5,351,769 |
def get_file_list(var, obsname, start_date, end_date):
"""
Get a list of data set files that covers the time period defined by
start_date and end_date provided in the function call.
Parameters
----------
var: str
Input variable, e.g. 'tas'
obsname: str
Name of dataset to use... | 5,351,770 |
def main():
"""
Main function for data retrieval and loading. See argparse message for usage.
"""
logger.info("Starting SureChEMBL update process")
# Parse core command line arguments
parser = argparse.ArgumentParser(description='Load data into the SureChEMBL database')
parser.add_argument... | 5,351,771 |
def test_facet_size_default():
"""facet_size() has default return value 50"""
assert search_query.facet_size({}) == 50 | 5,351,772 |
def structures_at_boundaries(gdf, datamodel, areas, structures, tolerance, distance):
"""
Check if there are structures near area (typically water-level areas) boundaries.
Parameters
----------
gdf : ExtendedGeoDataframe
ExtendedGeoDataFrame, HyDAMO hydroobject layer
datamodel : HyDAMO
... | 5,351,773 |
def create_checkpoint_structure() -> None:
"""
Create a checkpoint structure in the log folder.
* train: Folder for train split tensorboard logs.
* dev: Folder for dev split tensorboard logs.
* test: Folder for test split tensorboard logs.
* checkpoints: Folder for the babilim, pytorch or t... | 5,351,774 |
def above_cutoff(gene_freq_tup_list: List[Tuple[Union[str, tuple], Tuple[str, str]]], cutoff: int) -> List[str]:
"""Return the genes/edges that are are in at least the given cutoff's networks
Parameters
----------
gene_freq_tup_list : List[Tuple[Union[str, tuple], Tuple[str, str]]]
list of (com... | 5,351,775 |
def normalizeWindows(X):
"""
Do point centering and sphere normalizing to each window
to control for linear drift and global amplitude
Parameters
----------
X: ndarray(N, Win)
An array of N sliding windows
Returns
XRet: ndarray(N, Win)
An array in which the mean of each r... | 5,351,776 |
def cost_matrix_slow(x, y):
"""
Input: x is a Nxd matrix
y is an optional Mxd matirx
Output: dist is a NxM matrix where dist[i,j] is the square norm between x[i,:] and y[j,:]
if y is not given then use 'y=x'.
i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2
"""
x_norm = (x ** 2).sum(1... | 5,351,777 |
def get_dists(ts1_sax, ts2_sax, lookup_table):
"""
Compute distance between each symbol of two words (series) using a lookup table
ts1_sax and ts2_sax are two sax representations (strings) built under the same conditions
"""
# Verify integrity
if ts1_sax.shape[0] != ts2_sax.shape[0]:
... | 5,351,778 |
def get_dom_coords(string, dom):
"""Get Coordinates of a DOM specified by the string and dom number.
Parameters
----------
string : int
String number (between 1 and 86)
dom : int
DOM number (between 1 and 60)
Returns
-------
tuple(float, float, float)
The x, y, ... | 5,351,779 |
def relevance_ka(x):
"""
based on code from https://www.kaggle.com/aleksandradeis/regression-addressing-extreme-rare-cases
see paper: https://www.researchgate.net/publication/220699419_Utility-Based_Regression
use the sigmoid function to create the relevance function, so that relevance function
has ... | 5,351,780 |
def now(mydateformat='%Y%m%dT%H%M%S'):
""" Return current datetime as string.
Just a shorthand to abbreviate the common task to obtain the current
datetime as a string, e.g. for result versioning.
Args:
mydateformat: optional format string (default: '%Y%m%dT%H%M%S')
Returns:
... | 5,351,781 |
def getOnePackageInfo(pkgpath):
"""Gets receipt info for a single bundle-style package"""
pkginfo = {}
plist = getBundleInfo(pkgpath)
if plist:
pkginfo['filename'] = os.path.basename(pkgpath)
try:
if 'CFBundleIdentifier' in plist:
pkginfo['packageid'] = plist[... | 5,351,782 |
def cmp_point_identities(a, b):
"""
Given point identities a, b (may be string, number, date, etc),
collation algorithm compares:
(a) strings case-insensitively
(b) dates and datetimes compared by normalizing date->datetime.
(c) all other types use __cmp__(self, other) defaults from typ... | 5,351,783 |
def shortest_complement(t, m, l):
"""
Given a primitive slope t and the holonomies of the current
meridian and longitude, returns a shortest complementary slope s
so that s.t = +1.
"""
c, d = t # second slope
_, a, b = xgcd(d, c) # first slope
b = -b
assert a*d - b*c == 1
return ... | 5,351,784 |
def run_pii(text, lang):
"""
Runs the given set of regexes on the data "lines" and pulls out the
tagged items.
The lines structure stores the language type(s). This can be used for
language-specific regexes, although we're dropping that for now and using
only "default"/non-language-specific regexes.
"""
... | 5,351,785 |
def get_ref_cat(butler, visit, center_radec, radius=2.1):
"""
Get the reference catalog for the desired visit for the requested
sky location and sky cone radius.
"""
ref_cats = RefCat(butler)
try:
band = list(butler.subset('src', visit=visit))[0].dataId['filter']
except dp.butlerExce... | 5,351,786 |
def replace_dict(d, **kwargs):
"""
Replace values by keyword on a dict, returning a new dict.
"""
e = d.copy()
e.update(kwargs)
return e | 5,351,787 |
def handle_pdb(sig, frame): # pylint: disable=unused-argument
""" Signal handler """
pdb.Pdb().set_trace(frame) | 5,351,788 |
def optimizer(settings_filepath):
"""
Performs Gaussian-process optimization to maximise agreement
between SOLPS and the given experimental data.
:param settings_filepath: The path to the settings file.
:return:
"""
# check the validity of the input file and return its contents
setting... | 5,351,789 |
def pageHeader(
headline="",
tagline=""):
"""
*Generate a pageHeader - TBS style*
**Key Arguments:**
- ``headline`` -- the headline text
- ``tagline`` -- the tagline text for below the headline
**Return:**
- ``pageHeader`` -- the pageHeader
"""
pageHeade... | 5,351,790 |
def get_counter_merge_suggestion(merge_suggestion_tokens):
"""Return opposite of merge suggestion
Args:
merge_suggestion_tokens (list): tokens in merge suggestion
Returns:
str: opposite of merge suggestion
"""
counter_merge_suggestion = ' '.join(merge_suggestion_tokens)
if merg... | 5,351,791 |
def parse_rfc3339_utc_string(rfc3339_utc_string):
"""Converts a datestamp from RFC3339 UTC to a datetime.
Args:
rfc3339_utc_string: a datetime string in RFC3339 UTC "Zulu" format
Returns:
A datetime.
"""
# The timestamp from the Google Operations are all in RFC3339 format, but
# they are sometime... | 5,351,792 |
def response(code, body='', etag=None, last_modified=None, expires=None, **kw):
"""Helper to build an HTTP response.
Parameters:
code
: An integer status code.
body
: The response body. See `Response.__init__` for details.
etag
: A value for the ETag header. Double quotes will be... | 5,351,793 |
def oauth2callback():
"""
The 'flow' has this one place to call back to. We'll enter here
more than once as steps in the flow are completed, and need to keep
track of how far we've gotten. The first time we'll do the first
step, the second time we'll skip the first step and do the second,
and so on.
"""
... | 5,351,794 |
def parse_tweet(raw_tweet, source, now=None):
"""
Parses a single raw tweet line from a twtxt file
and returns a :class:`Tweet` object.
:param str raw_tweet: a single raw tweet line
:param Source source: the source of the given tweet
:param Datetime now: the current datetime... | 5,351,795 |
def cmd_ssh(argv, args):
"""
Usage:
localstack ssh [options]
Commands:
ssh Obtain a shell in the running LocalStack container
Options:
"""
args.update(docopt(cmd_ssh.__doc__.strip(), argv=argv))
if not docker_container_running(MAIN_CONTAINER_NAME):
raise Exception('Expected 1... | 5,351,796 |
def scopes(request, coalition_id):
"""
Update coalition required scopes with a specific set of scopes
"""
scopes = []
for key in request.POST:
if key in ESI_SCOPES:
scopes.append(key)
url = f"{GLOBAL_URL}/{coalition_id}"
headers = global_headers(request, {"Content-type... | 5,351,797 |
def test_opening_hour_close():
"""
The POI should already be closed since it's 21h30 UTC while
the POI closes at 22h00 in UTC+3.
"""
oh_block = get_moscow_oh("Mo-Su 10:00-22:00")
assert oh_block.status == "closed"
assert oh_block.next_transition_datetime == "2018-06-15T10:00:00+03:00"
a... | 5,351,798 |
def recursive_dictionary_cleanup(dictionary):
"""Recursively enrich the dictionary and replace object links with names etc.
These patterns are replaced:
[phobostype, bpyobj] -> {'object': bpyobj, 'name': getObjectName(bpyobj, phobostype)}
Args:
dictionary(dict): dictionary to enrich
... | 5,351,799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.