content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def recognize_emotion(name, mode, dataset):
"""
The main program for building the system. And we support following kinds of model:
1. Convolutional Neural Network (CNN)
2. Support Vector Machine (SVM)
3. Adaboost
4. Multilayer Perceptron (MLP)
Args:
... | 5,352,000 |
def check_main_depencies():
""" Check if dependencies listed in TOOLS_NEEDED are installed """
print("# Checking dependencies")
for tool in TOOLS_NEEDED:
print("[+] Checking %s... " % tool, end='')
if which(tool) is not None:
print("ok!")
else:
print("missing!... | 5,352,001 |
def feedback(request):
"""
Feedback page. Here one can send feedback to improve the
website further.
"""
return render(request, "groundfloor/common/feedback.html", context = None) | 5,352,002 |
def AddCreateArgsToParser(parser):
"""Add flags for creating a node template to the argument parser."""
parser.add_argument(
'--description',
help='An optional description of this resource.')
parser.add_argument(
'--node-affinity-labels',
metavar='KEY=VALUE',
type=arg_parsers.ArgDict... | 5,352,003 |
def fill_nodata_image(dataset: xr.Dataset) -> Tuple[np.ndarray, np.ndarray]:
"""
Interpolate no data values in image. If no mask was given, create all valid masks
:param dataset: Dataset image
:type dataset: xarray.Dataset containing :
- im : 2D (row, col) xarray.DataArray
... | 5,352,004 |
def find(value, a_list):
"""
TestCase for find
>>> find(26, [12,14])
True
>>> find(40, [14, 15, 16, 4, 6, 5])
False
>>> find(1, [1])
False
>>> find(1, [])
False
>>> find(4, [2, 3, 2])
True
"""
# 现将列表变为<value, index>字典
if a_list is None or len(a_list) < 2:
... | 5,352,005 |
def insert_from_query(
conn: connection,
relation: LoadableRelation,
table_name: Optional[TableName] = None,
columns: Optional[Sequence[str]] = None,
query_stmt: Optional[str] = None,
dry_run=False,
) -> None:
"""
Load data into table from its query (aka materializing a view).
The t... | 5,352,006 |
def transform_bbox(
bbox, source_epsg_code, target_epsg_code, all_coords=False
):
"""
Transform bbox from source_epsg_code to target_epsg_code,
if necessary
:returns np.array of shape 4 which represent the two coordinates:
left, bottom and right, top.
When `all_coords` is set to... | 5,352,007 |
def distance_metric(vector1, vector2):
""" Returns a score value using Jaccard distance
Args:
vector1 (np.array): first vector with minHash values
vector2 (np.array): second vector with minHash values
Returns:
float: Jaccard similarity
"""
return distance.pdist(np.array([ve... | 5,352,008 |
def update_calendar(request):
"""
to update an entry to the academic calendar to be updated.
@param:
request - contains metadata about the requested page.
@variables:
from_date - The starting date for the academic calendar event.
to_date - The ending date for the academic calde... | 5,352,009 |
def get_section(entry: LogEntry) -> str:
"""returns the section of the request (/twiki/bin/edit/Main -> /twiki)"""
section = entry.request.split('/')[:2]
return '/'.join(section) | 5,352,010 |
def reverse_lookup(d, v):
"""
Reverse lookup all corresponding keys of a given value.
Return a lisy containing all the keys.
Raise and exception if the list is empty.
"""
l = []
for k in d:
if d[k] == v:
l.append(k)
if l == []:
raise ValueError
else:
... | 5,352,011 |
def compile_channels_table(*, channels_meta, sources, detectors, wavelengths):
"""Compiles a NIRSChannelsTable given the details about the channels, sources,
detectors, and wavelengths.
"""
table = NIRSChannelsTable()
for channel_id, channel in channels_meta.items():
source_label = sources.l... | 5,352,012 |
def cpe2pkg_tool():
"""Unsupported ecosystem CVE fixture."""
bin = Path(__file__).parent.parent / Path('tools/bin/cpe2pkg.jar')
if bin.exists():
return str(bin)
else:
raise RuntimeError('`cpe2pkg.jar` is not available, please run `make build-cpe2pkg once.`') | 5,352,013 |
def put_block_public_access_configuration(BlockPublicAccessConfiguration=None):
"""
Creates or updates an Amazon EMR block public access configuration for your AWS account in the current Region. For more information see Configure Block Public Access for Amazon EMR in the Amazon EMR Management Guide .
See al... | 5,352,014 |
def spawn_thread(func, *args, **kwds):
"""
Utility function for creating and starting a daemonic thread.
"""
thr = threading.Thread(target=func, args=args, kwargs=kwds)
thr.setDaemon(True)
thr.start()
return thr | 5,352,015 |
def decision(question):
"""Asks user for a question returning True/False answed"""
if sys.version_info[0] < 3:
if raw_input("\n%s [Y/n] " % question) in ["", "y", "Y"]:
return True
else:
if input("\n%s [Y/n] " % question) in ["", "y", "Y"]:
return True
return Fal... | 5,352,016 |
def _fail(msg):
"""Output failure message when auto configuration fails."""
fail("%s %s\n" % (red("ROS Confiugation Error:"), msg)) | 5,352,017 |
def get_thread_replies(parent_id):
"""
Get all replies to a thread
If the thread does not exist, return an empty list
:param parent_id: Thread ID
:return: replies to thread
"""
assert type(parent_id) is uuid.UUID, """parent_id is not correct type"""
reply_query = Query()
results = db... | 5,352,018 |
def compute_acc(pred, labels):
"""
Compute the accuracy of prediction given the labels.
"""
return (torch.argmax(pred, dim=1) == labels).float().sum() / len(pred) | 5,352,019 |
async def read_update_status() -> str:
"""Read update status."""
return (
await cache.get(Config.update_status_id())
if await cache.exists(Config.update_status_id())
else "ready_to_update"
) | 5,352,020 |
def _login_and_select_first_active_device(api):
"""Login Erie Connect and select first active device"""
# These do i/o
_LOGGER.debug(f'{DOMAIN}: erie_connect.login()')
api.login()
_LOGGER.debug(f'{DOMAIN}: erie_connect.select_first_active_device()')
api.select_first_active_device()
if (
... | 5,352,021 |
def stopall(qthread: Thread, target: str) -> None:
"""Stops child processes and queue thread."""
stop_processes(target)
sleep(5)
qthread.join() | 5,352,022 |
def PCopy (inFA, err):
"""
Make copy an GPUFArray
returns copy
* inFA = input Python GPUFArray
* err = Python Obit Error/message stack
"""
################################################################
# Checks
if not PIsA(inFA):
print("Actually ",inFA.__class_... | 5,352,023 |
def _get_statuses(policy_type_id, policy_instance_id):
"""
shared helper to get statuses for an instance
"""
_instance_is_valid(policy_type_id, policy_instance_id)
prefixes_for_handler = "{0}{1}.{2}.".format(HANDLER_PREFIX, policy_type_id, policy_instance_id)
return list(SDL.find_and_get(A1NS, p... | 5,352,024 |
def _interval_example(avg_price_with_interval):
"""# Plot the data ordered by the numerical axis"""
ch = chartify.Chart(blank_labels=True, x_axis_type='categorical')
ch.set_title("Interval plots")
ch.set_subtitle(
"Represent variation. Optional `middle_column` to mark a middle point."
)
... | 5,352,025 |
def phase_lines(graph):
""" Determines the phase lines of a graph.
:param graph: Graph
:return: dictionary with node id : phase in cut.
"""
if has_cycles(graph):
raise ValueError("a cyclic graph will not have phaselines.")
phases = {n: 0 for n in graph.nodes()}
q = graph.nodes(in_deg... | 5,352,026 |
def creds() -> Account:
"""Load or obtain credentials for user."""
credentials = "8da780f3-5ea0-4d97-ab13-9e7976370624"
protocol = MSGraphProtocol(timezone="Europe/Stockholm")
scopes = protocol.get_scopes_for(SCOPES)
token_backend = FileSystemTokenBackend(
token_path=os.path.dirname(__file_... | 5,352,027 |
def b64decode_str(b64string):
"""
Decodes an arbitrary string from a base 64 ASCII string
"""
output = base64.b64decode(b64string).decode("UTF-8")
logger.debug("Decoded %s as %s", b64string, output)
return output | 5,352,028 |
def BCELossConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Creates a criterion that measures the Binary Cross E... | 5,352,029 |
def gen_cities_avg(climate, multi_cities, years):
"""
Compute the average annual temperature over multiple cities.
Args:
climate: instance of Climate
multi_cities: the names of cities we want to average over (list of str)
years: the range of years of the yearly averaged temperature ... | 5,352,030 |
def tangentVectorsOnSphere( points, northPole = np.array([0.0,0.0,1.0]) ):
"""
Acquire a basis for the tangent space at given points on the surface of the unit sphere.
:param points: N x 3 array of N points at which to acquire basis of tangent space.
:param northPole: 3 array of point correspondin... | 5,352,031 |
def _colorvar_patch_destroy(fn):
"""Internal function.\n
Deletes the traces if any when widget is destroy."""
def _patch(self):
"""Interanl function."""
if self._tclCommands is not None:
# Deletes the widget from the _all_traces_colorvar
# and deletes the traces too.... | 5,352,032 |
def menu(screen):
"""prints the menu on the main screen and intiates all exeuctions
Args:
screen (screen object): object to refrence screen
"""
global viewMode
global dayDelta
global todayDate
global calName
while True:
screen.print_at(
"Day(0) | Week (1) | A... | 5,352,033 |
def dispatch_files(dispatch_dir='dump'):
"""
Moves audio files from 'dump' directory to proper audio sub-folders.
---
IN
dispatch_dir: directory from which to start if not 'dump'
NO OUT
"""
root = '/Users/dluther/ds/metis/metisgh/projects/05-kojak/audio/'
for file in os.lis... | 5,352,034 |
def distros_for_filename(filename, metadata=None):
"""Yield possible egg or source distribution objects based on a filename"""
return distros_for_location(
normalize_path(filename), os.path.basename(filename), metadata
) | 5,352,035 |
def tsne(
features: np.ndarray,
labels: np.ndarray,
fp: FreePlot,
index: Union[Tuple[int], str] = (0, 0),
fontsize: Union[int, str] = 'large',
annotate: bool = False,
style: Union[str, Iterable[str]] = 'bright',
**kwargs: "other kwargs of fp.scatterplot"
) -> None:
"""
Args:
... | 5,352,036 |
def control(_):
"""
Provides a control scenario for testing. In this case, none of the functions
share any overhead, so this function is empty.
:param _: a placeholder for the int input
:return: None
"""
pass | 5,352,037 |
def pres_from_hybrid(psfc, hya, hyb, p0=100000.):
"""Return pressure field on hybrid-sigma coordinates,
assuming formula is
p = a(k)*p0 + b(k)*ps.
"""
return hya*p0 + hyb*psfc | 5,352,038 |
def run():
"""
Warning system
1) Obtain a list of stations where the current relative level is already high
(Threshold set at 1)
2) For these stations, query the past 2 days data and find their fitting curves respectively
3) From the fitting curves, predict the water level in the next hour
... | 5,352,039 |
def url_exist(file_url):
""" Check if an url exist
Parameters
----------
file_url : string
url of www location
Returns
-------
verdict : dtype=boolean
verdict if present
"""
try:
urllib.request.urlopen(file_url).code == 200
return True
exce... | 5,352,040 |
def chi_square(observed, expected):
"""
Compute the chi square test
"""
from scipy import stats
# glen cowan pp61
temp = []
for (n, nu) in zip(observed, expected):
if nu != 0:
temp += [((n - nu) ** 2) / nu]
# compute p value
mychi = sum(temp)
p = stats.chi2.s... | 5,352,041 |
def fix_deform_for_children(pmx: pmxstruct.Pmx, me: int, already_visited=None) -> int:
"""
Recursively ensure everything that inherits from the specified bone will deform after it.
Only cares about parent and partial-inherit, doesnt try to understand IK groups.
Return the number of bones that were changed.
:param ... | 5,352,042 |
def theta_8(p, q, h, phi, a, b):
"""Lower limit of integration for the case rho > a, rho > b."""
result = np.arctan(r_8(p, q, phi, a, b)/h)
return(result) | 5,352,043 |
def test_db(app):
"""
Setup database, this only gets executed once per module.
:param app: Pytest fixture
:return: SQLAlchemy database session
"""
_db.drop_all()
_db.create_all()
yield _db
_db.session.remove()
_db.drop_all() | 5,352,044 |
def run(config):
"""start http exporter server"""
start_http_server(int(config.get("port", 9108)))
REGISTRY.register(VcenterCollector(config.get("collector")))
while True:
time.sleep(1) | 5,352,045 |
def test_add_se_beta() -> None:
"""
Test arguments for beta column.
@return: None
"""
se_beta = str(uuid.uuid4())
arguments = parse_harness([FLAG_SE_BETA, se_beta], add_se_beta_value_flag)
assert arguments.se_beta == se_beta
assert parse_harness([], add_se_beta_value_flag).se_beta == OU... | 5,352,046 |
def collect_genewise(fst_file, file_name, gene_names, gene_to_fst):
"""take in the file name, opens it.
populates a dictionary to [gene] = fst
file_name = defaultdict(str)
FBgn0031208 500000 16 0.002 21.0 1:2=0.05752690
"""
file_name = file_name.split("_gene")[0]
f_in = open(fst_file,... | 5,352,047 |
def bbox_overlaps_batch(anchors, gt_boxes):
"""
:param anchors: (N, 4) ndarray of float
:param gt_boxes: (b, K, 5) ndarray of float
:return: (N, K) ndarray of overlap between boxes and query_boxes
"""
batch_size = gt_boxes.size(0)
if anchors.dim() == 2:
N = anchors.size(0)
... | 5,352,048 |
def extract_curve_and_test(curve_names: str, name: str) -> Tuple[str, Callable[[Any], bool]]:
"""Return a curve and a test to apply for which of it's components to twist."""
twist_match = re.match(rf"(?P<curve>[{curve_names}])_(?P<n>-?\d+)$", name)
twist_index_match = re.match(rf"(?P<curve>[{curve_names}])... | 5,352,049 |
def test_filt_tx_from_proto():
""" Tests FilteredTX.from_proto """
proto = FilteredTXProto(
txid='09876',
type=HeaderType.MESSAGE,
tx_validation_code=TxValidationCode.VALID,
transaction_actions=FilteredTransactionActions()
)
print(proto.transaction_actions)
assert Fil... | 5,352,050 |
def test_you_can_build_a_time_series() -> None:
"""
This is a Hidden Markov Model -
see for example http://mlg.eng.cam.ac.uk/zoubin/papers/ijprai.pdf
... --> X[t-1] --> X[t] --> ...
| |
Y[t-1] Y[t]
"""
x_label = "x"
y_label = "y"
num_i... | 5,352,051 |
def psf_gaussian(psf_shape, psf_waist, psf_physical_size=1, psf_nphoton=2):
"""Return 3D gaussian approximation of PSF."""
def f(index):
s = psf_shape[index] // 2 * psf_physical_size
c = numpy.linspace(-s, s, psf_shape[index])
c *= c
c *= -2.0 / (psf_waist[index] * psf_waist[ind... | 5,352,052 |
def get_employee_record(id_num):
"""Gets an employee's details if record exists.
Arguments:
id_num -- ID of employee record to fetch
"""
if not id_num in names or not id_num in cities:
return 'Error viewing record'
return f'{id_num} {names[id_num]} {cities[id_num]}' | 5,352,053 |
def test_permissions_actions(api):
"""
Получение списка действий по контроллеру
:param api:
:return:
"""
data = api.call(
'PermissionAction', 'read',
filter=[{
'property': '_',
'operator': '_',
'value': 'UserRole'
}],
_web_sessi... | 5,352,054 |
def send_exploit():
"""
Sends a request with the payload for a remote buffer overflow
"""
try:
with so.socket(so.AF_INET, so.SOCK_STREAM) as s:
s.settimeout(5)
print_info('Connecting to {}'.format(target))
connect = s.connect_ex((target, port))
# Stop if connection cannot be established
if connect ... | 5,352,055 |
def list_aliases():
"""
Gets the list of aliases for the current account. An account has at most one alias.
:return: The list of aliases for the account.
"""
try:
response = iam.meta.client.list_account_aliases()
aliases = response['AccountAliases']
if len(aliases) > 0:
... | 5,352,056 |
def MapToSingleIncrease(val):
"""
Need 30 minute values to be sequential for some of the tools(i.e. 1,2,3,4) so using a format
like 5,10,15,20 won't work.
"""
return val/5 | 5,352,057 |
def get_columns_for_table(instance, db, table):
""" Get a list of columns in a table
Args:
instance - a hostAddr object
db - a string which contains a name of a db
table - the name of the table to fetch columns
Returns
A list of columns
"""
conn = connect_mysql(instance)
cursor... | 5,352,058 |
def registra_aluno(nome, ano_entrada, ano_nascimento, **misc):
"""Cria a entrada do registro de um aluno."""
registro = {'nome': nome,
'ano_entrada': ano_entrada,
'ano_nascimento': ano_nascimento}
for key in misc:
registro[key] = misc[key]
return registro | 5,352,059 |
def define_output_ports(docstring, short_description_word_count=4):
"""
Turn the 'Returns' fields into VisTrails output ports
Parameters
----------
docstring : NumpyDocString #List of strings?
The scraped docstring from the function being autowrapped into
vistrails
Returns
... | 5,352,060 |
def kill(pidfile, logger, signum=signal.SIGTERM):
"""Sends `signum` to the pid specified by `pidfile`.
Logs messages to `logger`. Returns True if the process is not running,
or signal was sent successfully. Returns False if the process for the
pidfile was running and there was an error sending the si... | 5,352,061 |
def create_small_table(small_dict):
"""
Create a small table using the keys of small_dict as headers. This is only
suitable for small dictionaries.
Args:
small_dict (dict): a result dictionary of only a few items.
Returns:
str: the table as a string.
"""
keys, valu... | 5,352,062 |
def get_normalized_list_for_every_month(variable_r, list_of_ranges_r, tags_r):
"""
:param variable_r: big list with all the data [sizes][months]
:param list_of_ranges_r: sorted list of range (sizes...Enormous, etc.)
:return: normalized list for each month (numbers are percentage respect to the total by... | 5,352,063 |
def mark_as_possible_cluster_member(g, possible_cluster_member, cluster, confidence, system, uri_ref=None):
"""
Mark an entity or event as a possible member of a cluster.
:param rdflib.graph.Graph g: The underlying RDF model
:param rdflib.term.URIRef possible_cluster_member: The entity or event to mark... | 5,352,064 |
def test_thing_action_run(consumed_exposed_pair):
"""Actions can be invoked on ConsumedThings using the map-like interface."""
consumed_thing = consumed_exposed_pair.pop("consumed_thing")
exposed_thing = consumed_exposed_pair.pop("exposed_thing")
@tornado.gen.coroutine
def test_coroutine():
... | 5,352,065 |
def create_xlsx_for_all_recordings(root_directory: str = ".") -> None:
"""Traverses subdirectories to analyze multiple recordings.
Assumes that any folder with an H5 file in it has only H5 files from a single recording.
For simple usage, navigate to the root folder you want to analyze, and run:
``pyth... | 5,352,066 |
def twodcontourplot(tadata_nm, tadata_timedelay, tadata_z_corr):
"""
make contour plot
Args:
tadata_nm: wavelength array
tadata_timedelay: time delay array
tadata_z_corr: matrix of z values
"""
timedelayi, nmi = np.meshgrid(tadata_timedelay, tadata_nm)
# find the maxi... | 5,352,067 |
def make_example_dags(module_path):
"""Loads DAGs from a module for test."""
dagbag = DagBag(module_path)
return dagbag.dags | 5,352,068 |
def test_ict2():
"""Is the ict calculated correctly?"""
model_embedding = [{0: 1}]
real_embedding = [{1: 1}]
context = np.array([[0, 4], [500, 4]])
# calculate Euclidean distance matrix
distance_matrix = calc_euclidean(context)
# calc d for embeddings
vocab_len = len(context)
d_mod... | 5,352,069 |
def texLatticeDeformContext(q=1,e=1,ev="float",ex=1,ch=1,i1="string",i2="string",i3="string",lc="uint",lr="uint",n="string",smm=1,spm=1,ubr=1):
"""
http://help.autodesk.com/cloudhelp/2019/ENU/Maya-Tech-Docs/CommandsPython/texLatticeDeformContext.html
-----------------------------------------
texLatticeDeformCon... | 5,352,070 |
def count_good_deals(df):
"""
7. Считает число прибыльных сделок
:param df: - датафрейм с колонкой '<DEAL_RESULT>'
:return: - число прибыльных сделок
"""
# http://stackoverflow.com/questions/27140860/count-occurrences-of-number-by-column-in-pandas-data-frame?rq=1
return (df['<DEAL_RESULT... | 5,352,071 |
def match_lines_by_hausdorff(target_features, match_features, distance_tolerance,
azimuth_tolerance=None, length_tolerance=0, match_features_sindex=None, match_fields=False,
match_stats=False, field_suffixes=('', '_match'), match_strings=None, constrain_target_features=False,
target_features_sindex=None,... | 5,352,072 |
def git_rmtree(path: os.PathLike) -> None:
"""Remove the given recursively.
:note: we use shutil rmtree but adjust its behaviour to see whether files that
couldn't be deleted are read-only. Windows will not remove them in that case"""
def onerror(func: Callable, path: os.PathLike, _) -> None:
... | 5,352,073 |
def get_num_streams():
"""Force an offset so high that the payload is small and quick.
In it, there will be a total number to base our reverse search from"""
result = get_streams()
logger.debug(result)
if "error" in result:
raise Exception("error in request: " + str(result))
total = int... | 5,352,074 |
def compute_corr_active(params):
""" Compute correlation only for active positions, i.e. where
at least one of the two signal tracks is non-zero
:param params:
:return:
"""
with pd.HDFStore(params['inputfilea'], 'r') as hdf:
load_group = os.path.join(params['inputgroupa'], params['chrom'... | 5,352,075 |
def test_same_seed():
"""
Test same output for random points method with same seed
"""
data, cluster_borders, _ = gen_acceptable_data()
k = 2
model = kmeans_py.kmeans(data=data, K=k)
model.initialize_centers(method='rp', seed=1234)
model2 = kmeans_py.kmeans(data=data, K=k)
model2... | 5,352,076 |
def test_image_lat_profile():
"""Tests GLAT profile with image of 1s of known size and shape."""
image = SkyImage.empty_like(FermiGalacticCenter.counts(), fill=1.)
coordinates = image.coordinates()
l = coordinates.data.lon
b = coordinates.data.lat
lons, lats = l.degree, b.degree
counts = Sk... | 5,352,077 |
def create_anchors_3d_stride(grid_size,
voxel_size=[0.16, 0.16, 0.5],
coordinates_offsets=[0, -19.84, -2.5],
dtype=np.float32):
"""
Args:
feature_size: list [D, H, W](zyx)
sizes: [N, 3] list of list or array,... | 5,352,078 |
def combined_roidb(imdb_names):
"""
Combine multiple roidbs
"""
def get_roidb(imdb_name):
imdb = get_imdb(imdb_name)
print('Loaded dataset `{:s}` for training'.format(imdb.name))
imdb.set_proposal_method("gt")
print('Set proposal method: {:s}'.format("gt"))
roidb... | 5,352,079 |
def render_locations_profile(list_id, item_id, resource, rfields, record):
"""
Custom dataList item renderer for Locations on the Profile Page
- UNUSED
@param list_id: the HTML ID of the list
@param item_id: the HTML ID of the item
@param resource: the S3Resource to render
... | 5,352,080 |
def enable_plugins():
"""add all available Larch plugin paths
"""
if 'larch_plugins' not in sys.modules:
import larch
sys.modules['larch_plugins'] = larch
return sys.modules['larch_plugins'] | 5,352,081 |
def exception_log_and_respond(exception, logger, message, status_code):
"""Log an error and send jsonified respond."""
logger.error(message, exc_info=True)
return make_response(
message,
status_code,
dict(exception_type=type(exception).__name__, exception_message=str(exception)),
... | 5,352,082 |
def parse_cdhit_clusters(cluster_file):
"""
Parses cdhit output into three collections in a named tuple:
clusters: list of lists of gene ids.
reps: list of representative gene for each cluster
lookup: dict mapping from gene names to cluster index
In this setup, cluster ids are the positio... | 5,352,083 |
def evaluate(model: nn.Module, dataloader: DataLoader) -> Scores:
"""
Evaluate a model without gradient calculation
:param model: instance of a model
:param dataloader: dataloader to evaluate the model on
:return: tuple of (accuracy, loss) values
"""
score = 0
loss = 0
loss_func = n... | 5,352,084 |
def remote_judge_get_problem_info(problem_id: str, contest_id: int = -1, contest_problem_id: int = -1):
"""
{
"code":0,
"data":{
"isContest":"是否在比赛中",
"problemData":{
"title":"题目名",
"content":"题目内容",
"background":"题... | 5,352,085 |
def project_directory(packaged_scene, package_root, source_scene):
"""
Project directory for packaged scene
Args:
packaged_scene (str): Packaged scene path
package_root (str): Package root path
source_scene (str): Source scene path
Returns:
str
"""
raise NotImpl... | 5,352,086 |
def can_write(obj, user):
"""
Takes article or related to article model.
Check if user can write article.
"""
return obj.can_write(user) | 5,352,087 |
def process_image(msg_cont):
"""
Processes the message container, loading the image from the message and forwarding the predictions.
:param msg_cont: the message container to process
:type msg_cont: MessageContainer
"""
config = msg_cont.params.config
try:
start_time = datetime.now... | 5,352,088 |
def create_external_question(url: str, height: int) -> str:
"""Create XML for an MTurk ExternalQuestion."""
return unparse({
'ExternalQuestion': {
'@xmlns': 'http://mechanicalturk.amazonaws.com/AWSMechanicalTurkDataSchemas/2006-07-14/ExternalQuestion.xsd',
'ExternalURL': url,
... | 5,352,089 |
def credentials_batch_account_key_secret_id(config):
# type: (dict) -> str
"""Get Batch account key KeyVault Secret Id
:param dict config: configuration object
:rtype: str
:return: keyvault secret id
"""
try:
secid = config[
'credentials']['batch']['account_key_keyvault_s... | 5,352,090 |
def is_C2D(lname):
"""
"""
import re
pattns = ['Conv2D']
return any([bool(re.match(t,lname)) for t in pattns]) | 5,352,091 |
def add_help_attribute(functions: Dict[str, Callable[..., Any]]) -> None:
"""Given a dict whose content is of the form
{function_name_string: function_obj}
it adds customs `help` and `__rich__repr` attributes for all such
function objects.
"""
for name in functions:
if name not in short... | 5,352,092 |
def p_caseWhenStmt(t):
"""caseWhenStmt : R_WHEN expresion R_THEN plInstructions"""
t[0] = CaseWhen(t[2], t[4],t.slice[1].lineno,t.slice[1].lexpos)
repGrammar.append(t.slice) | 5,352,093 |
def encounter_media(instance, filename):
"""Return an upload file path for an encounter media attachment."""
if not instance.encounter.id:
instance.encounter.save()
return 'encounter/{0}/{1}'.format(instance.encounter.source_id, filename) | 5,352,094 |
def findFonts(pattern, lazy=True):
"""Answers a list of Font instances where the pattern fits the font path.
If pattern is a list, all parts should have a match.
# TODO: make case insensitive
"""
"""
>>> findFonts('Roboto-Thi')
[<Font Roboto-Thin>, <Font Roboto-ThinItalic>]
>>> # Selec... | 5,352,095 |
def time_range_cutter_at_time(local,time_range,time_cut=(0,0,0)):
""" Given a range, return a list of DateTimes that match the time_cut
between start and end.
:param local: if False [default] use UTC datetime. If True use localtz
:param time_range: the TimeRange object
:param time_c... | 5,352,096 |
def _apply_nat_dns_host_resolver():
"""
This will make the Dusty VM always use the host's DNS resolver for lookups.
It solves an issue we were seeing where the VM's resolving settings would get
out of date when a laptop was moved between routers with different settings,
resulting in DNS lookup failu... | 5,352,097 |
def _darknet_conv(
x: np.ndarray, filters: int, size: int, strides: int = 1, batch_norm: bool = True
) -> tf.Tensor:
"""create 1 layer with [padding], conv2d, [bn and relu]"""
if strides == 1:
padding = "same"
else:
x = ZeroPadding2D(((1, 0), (1, 0)))(x) # top left half-padding
... | 5,352,098 |
def _build_topic_to_consumer_topic_state_map(watermarks):
"""Builds a topic_to_consumer_topic_state_map from a kafka
get_topics_watermarks response"""
return {
topic: ConsumerTopicState({
partition: int((marks.highmark + marks.lowmark) / 2)
for partition, marks in watermarks_... | 5,352,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.