content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def arraySeries(dates, values, observedproperty, unit):
"""
Display time series in tabular format.
arguments:
>>> dates:
list of 'datetime.datetime' objects containing entries of
time series date and time column.
>>> values:
list of float objects c... | 6,800 |
def download(auth, url, headers, output_path, size, overwrite,
f_name=None,
ext=None,
block_size=4096,
callback=None):
"""
Call GET for a file stream.
:Args:
- auth (:class:`.Credentials`): The session credentials object.
- url (str): The ... | 6,801 |
def to_int(matrix):
"""
Funciton to convert the eact element of the matrix to int
"""
for row in range(rows(matrix)):
for col in range(cols(matrix)):
for j in range(3):
matrix[row][col][j] = int(matrix[row][col][j])
return matrix | 6,802 |
def create_tastypie_resource(class_inst):
"""
Usage: url(r'^api/', include(create_tastypie_resource(UfsObjFileMapping).urls)),
Access url: api/ufs_obj_file_mapping/?format=json
:param class_inst:
:return:
"""
return create_tastypie_resource_class(class_inst)() | 6,803 |
def make_collector(entries):
""" Creates a function that collects the location data from openLCA. """
def fn(loc):
entry = [loc.getCode(), loc.getName(), loc.getRefId()]
entries.append(entry)
return fn | 6,804 |
def apiname(funcname):
""" Define what name the API uses, the short or the gl version.
"""
if funcname.startswith('gl'):
return funcname
else:
if funcname.startswith('_'):
return '_gl' + funcname[1].upper() + funcname[2:]
else:
return 'gl' + funcname[0].up... | 6,805 |
def add_numeric_gene_pos(gene_info):
"""
Add numeric gene (start) genomic position to a gene_info dataframe
"""
gene_chr_numeric = gene_info['chr']
gene_chr_numeric = ['23' if x == 'X' else x for x in gene_chr_numeric]
gene_chr_numeric = ['24' if x == 'Y' else x for x in gene_chr_numeric]
ge... | 6,806 |
def read_list_from_file(filename: str) -> set:
"""Build a set from a simple multiline text file.
Args:
filename: name of the text file
Returns:
a set of the unique lines from the file
"""
filepath = pathlib.Path(__file__).parent.joinpath(filename)
lines = filepath.read_text().s... | 6,807 |
def test_machine_status(
requests_mock: Mocker,
mock_hqs_api_handler: HoneywellQAPI,
) -> None:
"""Test that we can retrieve the machine state via Honeywell endpoint."""
machine_name = "HQS-LT-S1-APIVAL"
mock_machine_state = "online"
mock_url = f"https://qapi.honeywell.com/v1/machine/{machine... | 6,808 |
def process_poc_output(poc_list, target, verbose, quiet):
"""Write the finalized netblocks to a CSV file.
Args:
poc_list: A list of point of contact information from ARIN.
target: The company the PoC information was gathered for.
verbose: A boolean that indicates whether verbose s... | 6,809 |
def parseAnswerA(answer, index, data):
"""
parseAnswerA(data): Grab our IP address from an answer to an A query
"""
retval = {}
text = (str(answer[0]) + "." + str(answer[1])
+ "." + str(answer[2]) + "." + str(answer[3]))
retval["ip"] = text
#
# TODO: There may be pointers even for A responses. Will have ... | 6,810 |
def calc_initial_conditions(state):
"""
calculate dyn. enthalp, etc
"""
vs = state.variables
if npx.any(vs.salt < 0.0):
raise RuntimeError("encountered negative salinity")
vs.update(calc_initial_conditions_kernel(state)) | 6,811 |
def qlCleanCache(cloth):
"""Clean layback cache for given cloth. Accepts qlCloth object"""
cmds.select(cloth)
mel.eval('qlClearCache()') | 6,812 |
def test_toggle_off_show_all_files(editorstack, outlineexplorer, test_files):
"""
Test that toggling off the option to show all files in the Outline Explorer
hide all root file items but the one corresponding to the currently
selected Editor and assert that the remaning root file item is
expanded co... | 6,813 |
def _get_files(data_path, modality, img_or_label):
"""Gets files for the specified data type and dataset split.
Args:
data: String, desired data ('image' or 'label').
dataset_split: String, dataset split ('train', 'val', 'test')
Returns:
A list of sorted file names or None when getting label for
... | 6,814 |
def _parse_args() -> argparse.Namespace:
"""Registers the script's arguments on an argument parser."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--source-root',
type=Path,
required=True,
help='Prefix t... | 6,815 |
def stderr(string):
"""
Print the given ``string`` to stderr. This is equivalent to ``print >>
sys.stderr, string``
"""
print >> sys.stderr, string | 6,816 |
def qlist(q):
"""Convenience function that converts asyncio.Queues into lists.
This is inefficient and should not be used in real code.
"""
l = []
# get the messages out
while not q.empty():
l.append(q.get_nowait())
# now put the messages back (since we popped them out)
for i in... | 6,817 |
def is_stdin(name):
"""Tell whether or not the given name represents stdin."""
return name in STDINS | 6,818 |
def filter_marker_y_padding(markers_y_indexes, padding_y_top, padding_y_bottom):
"""
Filter the markers indexes for padding space in the top and bottom of answer sheet
:param markers_y_indexes:
:param padding_y_top:
:param padding_y_bottom:
:return:
"""
return markers_y_indexes[(markers... | 6,819 |
def budget_italy(path):
"""Budget Shares for Italian Households
a cross-section from 1973 to 1992
*number of observations* : 1729
*observation* : households
*country* : Italy
A dataframe containing :
wfood
food share
whouse
housing and fuels share
wmisc
miscellaneous share
... | 6,820 |
def hr_admin(request):
""" Views for HR2 Admin page """
template = 'hr2Module/hradmin.html'
# searched employee
query = request.GET.get('search')
if(request.method == "GET"):
if(query != None):
emp = ExtraInfo.objects.filter(
Q(user__first_name__icontains=query)... | 6,821 |
def create_aws_clients(region='us-east-1'):
"""Creates an S3, IAM, and Redshift client to interact with.
Parameters
----------
region : str
The aws region to create each client (default 'us-east-1').
Returns
-------
ec3
A boto3 ec2 resource.
s3
A boto3 s3 resour... | 6,822 |
def apply_function(f, *args, **kwargs):
""" Apply a function or staticmethod/classmethod to the given arguments.
"""
if callable(f):
return f(*args, **kwargs)
elif len(args) and hasattr(f, '__get__'):
# support staticmethod/classmethod
return f.__get__(None, args[0])(*args, **kwa... | 6,823 |
def test_precursormz_match_tolerance2_array_ppm():
"""Test with array and tolerance=2 and type=ppm."""
spectrum_1 = Spectrum(mz=numpy.array([], dtype="float"),
intensities=numpy.array([], dtype="float"),
metadata={"precursor_mz": 100.0})
spectrum_2 = Spec... | 6,824 |
def join_data(ycom_county, census, land_area_data):
"""
Getting one dataframe from the three datasets
"""
census['LogPopDensity'] = np.log10(census['TotalPop']/land_area_data['LND110200D'])
data = pd.concat(([ycom_county, census]), axis=1)
return data | 6,825 |
def _createController(config):
"""
Creates the appropriate (hypervisor) controller based on the
given configuration.
This is the place where to perform particular initialization tasks for
the particular hypervisor controller implementations.
@param config: an instance of L{ConfigParser}
... | 6,826 |
def main():
"""
Paths from phone and bounder for Skyfall data set
"""
is_export_bounder_csv: bool = False # If true, save only episode start to end as csv
# Concentrate on single station
phone_id = "1637610021"
# Load for all stations
loc_fields = ['station_id',
'loc... | 6,827 |
def path_graph():
"""Return a path graph of length three."""
G = nx.path_graph(3, create_using=nx.DiGraph)
G.graph["name"] = "path"
nx.freeze(G)
return G | 6,828 |
def dataset_to_cfl(dir_out, file_name, suffix="", file_png=None, verbose=False):
"""
Convert ISMRMRD to CFL files in specified directory.
Parameters
----------
dir_out : str
Output directory to write CFL files
file_name : str
Name of ISMRMRD file
suffix : str, optional
... | 6,829 |
def dropStudentsWithEvents(df, events,
saveDroppedAs=None,
studentId='BookletNumber',
eventId='Label',
verbose=True):
"""
Drop students with certain events.
It finds students with the events, and use... | 6,830 |
def get_html_app_files_dirs(output_file):
"""
Return a tuple of (parent_dir, dir_name) directory named after the
`output_file` file object file_base_name (stripped from extension) and a
`_files` suffix Return empty strings if output is to stdout.
"""
if is_stdout(output_file):
return '',... | 6,831 |
def create_slides(user, node, slideshow_data):
""" Generate SlideshowSlides from data """
""" Returns a collection of SlideshowSlide objects """
slides = []
with transaction.atomic():
for slide in slideshow_data:
slide_obj = SlideshowSlide(
contentnode=node,
... | 6,832 |
def is_valid(sequence):
"""
A string is not valid if the knight moves onto a blank square
and the string cannot contain more than two vowels.
"""
if any(letter == "_" for letter in sequence):
return False
# Check for vowels
# Strings shorter than 3 letters are always ok, as they
... | 6,833 |
def batch_dl1_to_dl2(
dict_paths,
config_file,
jobid_from_training,
batch_config,
logs,
):
"""
Function to batch the dl1_to_dl2 stage once the lstchain train_pipe batched jobs have finished.
Parameters
----------
dict_paths : dict
Core dictionary with {stage: PATHS} info... | 6,834 |
def runCommands(cmds, localTempDir, inPipes=None, outPipes=None, errPipes=None):
""" Run commands from CMDS list.
"""
if inPipes is None:
inPipes = [None] * len(cmds)
if outPipes is None:
outPipes = [None] * len(cmds)
if errPipes is None:
errPipes = [None] * len(cmds)
for i, c in enumerate(cmds,... | 6,835 |
def podmanOcpRegistryLogin(ctx):
""" Log into the default registry of an OpenShift cluster """
ocLogin(ctx, ctx.cr.ocp.user)
cmd = 'podman login'
cmd += ' --tls-verify=false'
cmd += ' -u $(oc whoami) -p $(oc whoami --show-token)'
cmd += f' default-route-openshift-image-registry.apps.{ctx.cf.oc... | 6,836 |
def browse():
"""
A browser for the bibmanager database.
"""
# Content of the text buffer:
bibs = bm.load()
keys = [bib.key for bib in bibs]
compact_text = "\n".join(keys)
expanded_text = "\n\n".join(bib.content for bib in bibs)
# A list object, since I want this to be a global varia... | 6,837 |
def read_lines_from_input(file):
"""
Reads the provided file line by line to provide a list representation of the contained names.
:param file: A text file containing one name per line. If it's None, the input is read from the standard input.
:return: A list of the names contained in the provided text f... | 6,838 |
def diag(input_, k=0):
"""Wrapper of `numpy.diag`.
Parameters
----------
input_ : DTensor
Input dense tensor.
k : int, optional
Offset to main diagonal, by default 0
"""
pass | 6,839 |
def is_dataproc_VM():
"""Check if this installation is being executed on a Google Compute Engine dataproc VM"""
try:
dataproc_metadata = urllib.request.urlopen("http://metadata.google.internal/0.1/meta-data/attributes/dataproc-bucket").read()
if dataproc_metadata.decode("UTF-8").startswith("data... | 6,840 |
def restore_tmux(tmux_id):
"""
retore tmux sessions by given backuped Tmux id
- check if there is tmux running and with same session name
- handle windows, panes ..
"""
#validate given tmux_id
LOG.info('loading backuped tmux sessions')
jsonfile = os.path.join(config.BACKUP_PATH,tmux_id... | 6,841 |
def lookup_all(base):
"""Looks up a subclass of a base class from the registry.
Looks up a subclass of a base class with name provided from the
registry. Returns a list of registered subclass if found, None otherwise.
Args:
base: The base class of the subclass to be found.
Returns:
A list of subcla... | 6,842 |
def makepyfile(testdir):
"""Fixture for making python files with single function and docstring."""
def make(*args, **kwargs):
func_name = kwargs.pop('func_name', 'f')
# content in args and kwargs is treated as docstring
wrap = partial(_wrap_docstring_in_func, func_name)
args = ma... | 6,843 |
def _clean_environment(env_dir):
"""Remove problem elements in environmental directories.
- Get rid of old history comment lines that cause parsing failures:
https://github.com/bcbio/bcbio-nextgen/issues/2431
"""
history_file = os.path.join(env_dir, "conda-meta", "history")
if os.path.exists(... | 6,844 |
def test_good_input3():
"""runs on good input"""
run(rna, 'codons.dna', '-P-RPE-R---P--T-E') | 6,845 |
def createTextWatermark(msg, size, loc, fontcolor='white', fontpath='arial.ttf', fontsize=18):
"""Creates a watermark image of the given text.
Puts it at the given location in an RGBA image of the given size.
Location should be a 2-tuple denoting the center location of the text."""
from PIL import Image... | 6,846 |
def generate_bookmarks(inputfile, sections, outputfile):
"""Operate on INPUTFILE, optionally filtering for WORKSPACES."""
with open(inputfile, 'rb') as fh:
rawdata = fh.read()
if inputfile.endswith('.json'):
data = json.loads(rawdata)
elif inputfile.endswith('.txt'):
data = yaml... | 6,847 |
def _discover_on_demand():
"""
Attempts to discover operator modules, if not already discovered
"""
global _DISCOVERED
if not _DISCOVERED:
_DISCOVERED = True
discover_local_chemistry_operators()
discover_preferences_chemistry_operators()
if logger.isEnabledFor(loggin... | 6,848 |
def get_server_pull_config(config:dict):
"""
takes a config dictionary and returns the variables related to server deployment (pull from intersections).
If there is any error in the configuration, returns a quadruple of -1 with a console output of the exception
"""
try:
server = config["Data... | 6,849 |
def buff_push(item: BufferItem):
"""
Add BufferItem to the buffer and execute if the buffer is full
"""
q.put(item)
make_dependencies(item)
if q.full():
return buff_empty_partial(q.maxsize - 1)
return None | 6,850 |
def installDirectory():
"""
Return the software installation directory, by looking at location of this
method.
"""
#path = os.path.abspath(os.path.join(os.path.realpath(__file__), os.pardir))
path = os.path.abspath(os.path.realpath(__file__))
path = os.path.abspath(os.path.join(path, '../..... | 6,851 |
def test_post_requests_fails_with_invalid_invalid_target_date_in_body(
invalid_request_body_with_invalid_date, client, request_headers
):
"""
Tests that response shows failure when request body has invalid
target date.
Args:
invalid_request_body_with_invalid_string_length (dict): a request ... | 6,852 |
def take(count: int) -> Callable[[Observable], Observable]:
"""Returns a specified number of contiguous elements from the start
of an observable sequence.
.. marble::
:alt: take
-----1--2--3--4----|
[ take(2) ]
-----1--2-|
Example:
>>> op = take(5)
... | 6,853 |
def stock(self):
"""Market search google"""
search_term = self.query.split("for")[-1]
url = "https://google.com/search?q=" + search_term
webbrowser.get().open(url)
speak("Here is what I found for " + search_term + " on google") | 6,854 |
def test_getitem(seas_metadict):
"""
Test `MetaDict.__getitem__(...)`
"""
assert seas_metadict['Norwegian'] == 'Europe'
assert seas_metadict['BALTIC'] != 'arctic'
with pytest.raises(KeyError):
seas_metadict['key-not-exists'] | 6,855 |
def _Net_batch(self, blobs):
"""
Batch blob lists according to net's batch size.
Take
blobs: Keys blob names and values are lists of blobs (of any length).
Naturally, all the lists should have the same length.
Give (yield)
batch: {blob name: list of blobs} dict for a single... | 6,856 |
def format_fields_for_join(
fields: List[Union[Field, DrivingKeyField]],
table_1_alias: str,
table_2_alias: str,
) -> List[str]:
"""Get formatted list of field names for SQL JOIN condition.
Args:
fields: Fields to be formatted.
table_1_alias: Alias that should be used in the field o... | 6,857 |
def get_payment_balance(currency):
"""
Returns available balance for selected currency
This method requires authorization.
"""
result = get_data("/payment/balances", ("currency", currency))
payment_balance = namedtuple("Payment_balance", get_namedtuple(result[0]))
return [payment_balance(... | 6,858 |
def build_scheduler(optimizer, config):
"""
"""
scheduler = None
config = config.__dict__
sch_type = config.pop('type')
if sch_type == 'LambdaLR':
burn_in, steps = config['burn_in'], config['steps']
# Learning rate setup
def burnin_schedule(i):
if i < burn_in... | 6,859 |
def calculate_z_caller(r, p, inv_t, actual):
"""
This calls the CUDA function and fills out
the array
"""
x = cuda.grid(1)
actual[x] = formal_integral_cuda.calculate_z_cuda(r, p, inv_t) | 6,860 |
def rpca_alm(X, lmbda=None, tol=1e-7, max_iters=1000, verbose=True,
inexact=True):
"""
Augmented Lagrange Multiplier
"""
if lmbda is None:
lmbda = 1.0 / np.sqrt(X.shape[0])
Y = np.sign(X)
norm_two = svd(Y, 1)[1]
norm_inf = np.abs(Y).max() / lmbda
dual_norm = np.max(... | 6,861 |
def dwt_embed(wmImage, hostImage, alpha, beta):
"""Embeds a watermark image into a host image, using the First Level
Discrete Wavelet Transform and Alpha Blending.\n
The formula used for the alpha blending is:
resultLL = alpha * hostLL + beta * watermarkLL
Arguments:
wmImage (NumPy arr... | 6,862 |
def process_file(file_path: str):
"""Reads a file and prints its C++ tokenization with nesting of groups."""
print()
print('#' * 80)
print('Finding nested C++ tokens in file', file_path, flush=True)
cpp_source = CppSource(file_path=file_path)
dump_grouped_tokens(cpp_source.grouped_cpp_tokens) | 6,863 |
def make_file_iterator(filename):
"""Return an iterator over the contents of the given file name."""
# pylint: disable=C0103
with open(filename) as f:
contents = f.read()
return iter(contents.splitlines()) | 6,864 |
def pull_partner_statistics(partner_id):
"""
This method pulls partner statistics.
Parameters:
- partner_id : the partner ID
Returns:
None
"""
signature = "pull_partner_statistics(partner_id)"
logginghelper.method_enter(logger, signature, partner_id)
# Send request to H... | 6,865 |
def build_summary(resource, children, attribute, summarizer, keep_details=False):
"""
Update the `resource` Resource with a summary of itself and its `children`
Resources and this for the `attribute` key (such as copyrights, etc).
- `attribute` is the name of the attribute ('copyrights', 'holders' etc... | 6,866 |
def test_write_process_button(handler):
""" Test the _write_process_button method. """
# patch the view context
handler.view_ctx = Mock(**{'format_url.return_value': 'an url'})
# patch the meld elements
cell_elt = Mock(attrib={'class': ''})
tr_elt = Mock(**{'findmeld.return_value': cell_elt})
... | 6,867 |
def little_endian_bytes_to_int(little_endian_byte_seq):
"""Converts a pair of bytes into an integer.
The `little_endian_byte_seq` input must be a 2 bytes sequence defined
according to the little-endian notation (i.e. the less significant byte
first).
For instance, if the `little_endian_byte_seq` i... | 6,868 |
def higher_follower_count(A, B):
""" Compares follower count key between two dictionaries"""
if A['follower_count'] >= B['follower_count']: return "A"
return "B" | 6,869 |
def _element_or_none(germanium, selector, point):
"""
Function to check if the given selector is only a regular
element without offset clicking. If that is the case, then we
enable the double hovering in the mouse actions, to solve a
host of issues with hovering and scrolling, such as elements
a... | 6,870 |
def ENDLEMuEpP_TransferMatrix( style, tempInfo, crossSection, productFrame, angularData, EMuEpPData, multiplicity, comment = None ) :
"""This is LLNL I = 1, 3 type data."""
logFile = tempInfo['logFile']
workDir = tempInfo['workDir']
s = versionStr + '\n'
s += "Process: 'Double differential EMuEpP... | 6,871 |
def _fit_gaussian(f, grid, image_spot, p0, lower_bound=None, upper_bound=None):
"""Fit a gaussian function to a 3-d or 2-d image.
# TODO add equations and algorithm
Parameters
----------
f : func
A 3-d or 2-d gaussian function with some parameters fixed.
grid : np.ndarray, np.float
... | 6,872 |
def createfourierdesignmatrix_chromatic(toas, freqs, nmodes=30, Tspan=None,
logf=False, fmin=None, fmax=None,
idx=4):
"""
Construct Scattering-variation fourier design matrix.
:param toas: vector of time series in seconds
... | 6,873 |
def get_word_data(char_data):
"""
获取分词的结果
:param char_data:
:return:
"""
seq_data = [''.join(l) for l in char_data]
word_data = []
# stop_words = [line.strip() for line in open(stop_word_file, 'r', encoding='utf-8')]
for seq in seq_data:
seq_cut = jieba.cut(seq, cut_all=False... | 6,874 |
def make_non_absolute(path):
"""
Make a path non-absolute (so it can be joined to a base directory)
@param path: The file path
"""
drive, path = os.path.splitdrive(path)
index = 0
while os.path.isabs(path[index:]):
index = index + 1
return path[index:] | 6,875 |
def assert_allclose(actual: numpy.float64, desired: int):
"""
usage.matplotlib: 1
usage.networkx: 4
usage.scipy: 20
usage.skimage: 1
usage.sklearn: 1
usage.statsmodels: 31
"""
... | 6,876 |
def test_bbox(tiler):
"""Bounding boxes of tiles."""
assert tiler.bbox(1, 5, 3) == (-15028131.257091932, -10018754.17139462, -10018754.171394622, -5009377.085697312)
assert tiler.bbox(77, 93, 8) == (-7983694.730330089, 5322463.153553393, -7827151.696402049, 5479006.187481433)
assert tiler.bbox(27685,... | 6,877 |
def get_output_stream(items: List[Dict[str, Any]]) -> List[OutputObject]:
"""Convert a list of items in an output stream into a list of output
objects. The element in list items are expected to be in default
serialization format for output objects.
Paramaters
----------
items: list(dict)
... | 6,878 |
def lvnf_stats(**kwargs):
"""Create a new module."""
return RUNTIME.components[LVNFStatsWorker.__module__].add_module(**kwargs) | 6,879 |
def parse_float(string):
"""
Finds the first float in a string without casting it.
:param string:
:return:
"""
matches = re.findall(r'(\d+\.\d+)', string)
if matches:
return matches[0]
else:
return None | 6,880 |
def _flush_temp_file():
"""
Clear directory where blob files are downloaded to if exists.
"""
global BLOB_DOWNLOAD_PATH
try:
dirPath = os.path.join(os.getcwd(), BLOB_DOWNLOAD_PATH)
print("PTH", os.getcwd())
print("DIR", dirPath)
if os.path.isdir(dirPath):
... | 6,881 |
def objectproxy_realaddress(obj):
"""
Obtain a real address as an integer from an objectproxy.
"""
voidp = QROOT.TPython.ObjectProxy_AsVoidPtr(obj)
return C.addressof(C.c_char.from_buffer(voidp)) | 6,882 |
def CollateRevisionHistory(builds, repo):
"""Sorts builds and revisions in repository order.
Args:
builds: a dict of the form:
```
builds := {
master: {
builder: [Build, ...],
...,
},
...
}
```
repo (GitWrapper): repository in which the revision occurs.
... | 6,883 |
def delta_shear(observed_gal, psf_deconvolve, psf_reconvolve, delta_g1, delta_g2):
"""
Takes in an observed galaxy object, two PSFs for metacal (deconvolving
and re-convolving), and the amount by which to shift g1 and g2, and returns
a tuple of tuples of modified galaxy objects.
((g1plus, g1minus), (g2plus, g2minu... | 6,884 |
def estimateModifiedPiSquared(n):
"""
Estimates that value of Pi^2 through a formula involving partial sums.
n is the number of terms to be summed; the larger the more accurate the
estimation of Pi^2 tends to be (but not always).
The modification relative to estimatePiSquared() is that the n terms a... | 6,885 |
def pattern_match(template, image, upsampling=16, metric=cv2.TM_CCOEFF_NORMED, error_check=False):
"""
Call an arbitrary pattern matcher using a subpixel approach where the template and image
are upsampled using a third order polynomial.
Parameters
----------
template : ndarray
T... | 6,886 |
def test_run_high_cardinality_forever(high_cardinality_instance):
"""
This test is a utility and is useful in situations where you want to connect to the database instance
and have queries executing against it. Note, you must kill the test execution to stop this test.
In order to run this test, you mus... | 6,887 |
def main(argv=[__name__]):
"""Raspi_x10 command line interface.
"""
try:
try:
devices_file, rules_file, special_days_file = argv[1:]
except ValueError:
raise Usage('Wrong number of arguments')
sched = Schedule()
try:
sched.load_conf(devices... | 6,888 |
def HexaMeshIndexCoord2VoxelValue(nodes, elements, dim, elementValues):
"""
Convert hexamesh (bricks) in index coordinates to volume in voxels with value of voxels assigned according to elementValues.
dim: dimension of volume in x, y and z in voxels (tuple)
elementValues: len(elements) == len(eleme... | 6,889 |
def main():
"""Create the database and add data to it"""
Base.metadata.create_all(engine)
create_session = sessionmaker(bind=engine)
session = create_session()
session.add_all([])
session.commit() | 6,890 |
def protocol_0101(abf):
"""0112 0101 tau -10pA"""
assert isinstance(abf, pyabf.ABF)
generic_overlay_average(abf, baselineSec1=0, baselineSec2=0.1)
return | 6,891 |
def hamiltonian(latt: Lattice, eps: (float, np.ndarray) = 0.,
t: (float, np.ndarray) = 1.0,
dense: bool = True) -> (csr_matrix, np.ndarray):
"""Computes the Hamiltonian-matrix of a tight-binding model.
Parameters
----------
latt : Lattice
The lattice the tight-bi... | 6,892 |
def zip_dir(name, srcs, zipname, **kwargs):
"""Zips up an entire directory or Fileset.
Args:
name: The name of the target
srcs: A single-item list with a directory or fileset
zipname: The name of the output zip file
**kwargs: Further generic arguments to pass to genrule, e.g. visibility... | 6,893 |
def pad_in(string: str, space: int) -> str:
"""
>>> pad_in('abc', 0)
'abc'
>>> pad_in('abc', 2)
' abc'
"""
return "".join([" "] * space) + string | 6,894 |
def maybe_download_and_extract_tar_gz(root, file_name, data_url):
"""Downloads file from given URL and extracts if compressed as tar.gz
Args:
root (str): The root directory
file_name (str): File name to download to
data_url (str): Url of data
"""
if not os.path.exists(root):
... | 6,895 |
def get_notifies(request):
"""页面展示全部通知"""
user = request.siteuser
if not user:
return HttpResponseRedirect(reverse('siteuser_login'))
notifies = Notify.objects.filter(user=user).select_related('sender').order_by('-notify_at')
# TODO 分页
ctx = get_notify_context(request)
ctx['notifies... | 6,896 |
def set_module_repos(module, path):
"""
Sets the repository path for a specific module
"""
if os.path.exists(path):
Settings.module_repos[module] = path
else:
raise VersionsException("Cannot set the repos path to a non existent directory.") | 6,897 |
def declare_eq_branch_power_ptdf_approx(model, index_set, PTDF, rel_ptdf_tol=None, abs_ptdf_tol=None):
"""
Create the equality constraints or expressions for power (from PTDF
approximation) in the branch
"""
m = model
con_set = decl.declare_set("_con_eq_branch_power_ptdf_approx_set", model, i... | 6,898 |
def get_existing_rule(text):
"""
Return the matched rule if the text is an existing rule matched exactly,
False otherwise.
"""
matches = get_license_matches(query_string=text)
if len(matches) == 1:
match = matches[0]
if match.matcher == MATCH_HASH:
return match.rule | 6,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.