content stringlengths 5 1.05M |
|---|
import sys
import cv2
import numpy as np
import gpyocr
plateCascade = cv2.CascadeClassifier("/home/pi/haarcascade_eu_plate_number.xml")
color = (255,120,255)
arg = float(sys.argv[1])
bbox_x = float(sys.argv[2])
bbox_y = float(sys.argv[3])
bbox_w = float(sys.argv[4])
bbox_h = float(sys.argv[5])
outText = ""
def getTex... |
import flask
from flask import request, Response
app = flask.Flask(__name__)
app.config["DEBUG"] = True
@app.route('/service/sub')
def sub():
qp = request.args
va = qp.get('va')
vb = qp.get('vb')
result = float(va) - float(vb)
print("VA: " + str(va) + " VB: " + str(vb) + " == " + str(result))
... |
"""
coding: utf-8
Created on 11/11/2020
@author: github.com/edrmonteiro
From: Hackerrank challenges
Language: Python
Title: Minimum Swaps 2
You are given an unordered array consisting of consecutive integers [1, 2, 3, ..., n] without any duplicates. You are allowed to swap any two elements. You need to find the mini... |
# ##############################################################################
# This file is part of df_websockets #
# #
# Copyright (C) 2020 Matthieu Gallet <[email protected]> ... |
# 987. 二叉树的垂序遍历
#
# 20200917
# huao
from typing import List
from functools import cmp_to_key
def cmp(x, y):
xx = x[1][0]
xy = x[1][1]
yx = y[1][0]
yy = y[1][1]
xnode = x[0].val
ynode = y[0].val
if xx < yx:
return -1
elif xx > yx:
return 1
else:
if xy > yy:... |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
import numpy as np
import time
import os
from six.moves import cPickle
import torch.backends.cudnn as cudnn
im... |
"""Main module."""
from enum import Enum, auto
from .parser import parse
# def parse(filepath: str):
# with open('tests/erds/one.er') as f:
# lxer.input(f.read())
# for tok in parser:
# print(tok)
class Option(Enum):
LABEL = 'label'
BG_COLOR = 'bgcolor'
COLOR = 'color'... |
#lucas numbers
def lucas():
first= 0
second = 1
terms = int(input("Number of terms?"))
n1, n2 = 2, 1
counter = 0
if terms <= 0:
print("Please enter a positive integer")
elif terms == 1:
print(n1)
elif terms == 2:
print(n1)
print(n2)
... |
#
# Copyright (c) 2021 Incisive Technology Ltd
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, pu... |
# coding: utf8
from .base_view import BaseView
class DummyView(BaseView):
def __init__(self, config, **kwargs):
super(DummyView).__init__(config, **kwargs)
|
#! /usr/bin/env python
#
# ReplayGain Album Analysis using gstreamer rganalysis element
# Copyright (C) 2005,2007,2009 Michael Urman
# 2012 Nick Boultbee
# 2013 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# ... |
# Copyright 2018 Hewlett Packard Enterprise Development LP
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2... |
values = {
"I": 1,
"V": 5,
"X": 10,
"L": 50,
"C": 100,
"D": 500,
"M": 1000,
}
class RightToLeftSolution:
def romanToInt(self, str):
total = values.get(str[-1])
for i in (range(len(str) - 1)):
if values[str[i]] < values[str[i + 1]]:
total -= va... |
#part1
# Change to use relplot() instead of scatterplot()
sns.relplot(x="absences", y="G3",
data=student_data,kind="scatter")
# Show plot
plt.show()
#part2
# Change to make subplots based on study time
sns.relplot(x="absences", y="G3",
data=student_data,
kind="sca... |
import torch
from torch import nn
import torch.nn.functional as F
from torch.nn.modules.loss import _Loss
from data.batcher import decode_tensor
from utils import remove_sep, mean
from metric.metric import Metric
class Loss(nn.CrossEntropyLoss):
def __init__(self, padding_idx=0):
super(Loss, self).__init... |
from unittest import TestCase
from models.user import UserModel
class UserTest(TestCase):
def test_create_user(self):
# Setup
# Exercise
user = UserModel('test', 'abcd')
# Verify
self.assertEqual('test', user.username)
self.assertEqual('abcd', user.... |
from SpatialCluster.methods.DMoN_core import convert_scipy_sparse_to_sparse_tensor, build_dmon, normalize_graph
from SpatialCluster.utils.get_areas import get_areas
from SpatialCluster.preprocess import adjacencyMatrix
from SpatialCluster.utils.data_format import data_format, position_data_format
# # # # # # # # # # #... |
#!/usr/bin/python
#
# =================================================================
# README
# =================================================================
#
#
# Author:
# Suraj singh bisht
# [email protected]
# www.bitforestinfo.com
#
# ----------------------... |
import urllib.parse
import requests
def shorten_url(url):
r = requests.get(
"http://is.gd/create.php?format=simple&url={}".format(urllib.parse.quote(url))
)
r.raise_for_status()
return r.text
|
from django.db import models
from django.contrib.auth.models import User
from papel.models import Papel
from compra.models import Compra
# Create your models here.
class Negociacoes(models.Model):
usuario = models.ForeignKey(User, on_delete=models.CASCADE)
papel = models.ForeignKey(Papel, on_delete=models.CA... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# src/routes.py
# Developed in 2019 by Travis Kessler <[email protected]>
#
# Contains website routing information
#
# 3rd party imports
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, SelectField, BooleanField
# CombustDB web ... |
"""Various helper scripts for serialization."""
import re
def serialize_enum(enum_object):
serialized_enum = {}
for element in enum_object:
serialized_enum[element.name] = element.value
return serialized_enum
def clean_input_string(line: str) -> str:
return re.sub('[!@#$&?%*+:;,/]', '', lin... |
from threepy.core import *
from threepy.geometry import *
from threepy.material import *
class AxesHelper(Mesh):
def __init__(self, axisLength=1, lineWidth=4):
vertexPositionData = [[0, 0, 0], [axisLength, 0, 0], [0, 0, 0],
[0, axisLength, 0], [0, 0, 0], [0, 0, axisLength]]... |
#!/usr/bin/env python3
"""RNA-Seq SE(dUTP)/PE(dUTP), ChIP-Seq SE/PE script produces jobs for four workflows"""
import os
import datetime
import sys
from json import dumps, loads
from collections import OrderedDict
import logging
import decimal
from .utils import biowardrobe_settings, remove_not_set_inputs
from .const... |
import sys
sys.path.append('../../')
from cortstim.edp.loaders.dataset.clinical.excel_meta import ExcelReader
def load_clinical_df(excelfilepath):
excelreader = ExcelReader(filepath=excelfilepath)
excelreader.read_formatted_df(excelfilepath)
clindf = excelreader.ieegdf.clindf
return clindf
|
#!python3
""" Main Meshtastic
"""
import argparse
import platform
import logging
import os
import sys
import time
import yaml
from pubsub import pub
import pyqrcode
import pkg_resources
import meshtastic.util
import meshtastic.test
from meshtastic import remote_hardware
from meshtastic.ble_interface import BLEInterfac... |
from __future__ import annotations
from typing import List, Dict, Union, Generator, ItemsView, Optional, \
Tuple, Any, no_type_check
from base64 import b64encode, b64decode
from datetime import date, datetime
from . import APTypes
from .namespace import *
def dateobj(source: str) -> Union[date, datetime]:
'''Par... |
"""Testing data splitter."""
import os
import tempfile
import unittest
from typing import Tuple
import pandas as pd
from pytoda.data_splitter import csv_data_splitter
from pytoda.tests.utils import TestFileContent
from pytoda.types import Files
class TestDataSplitter(unittest.TestCase):
"""Testing csv data spli... |
# see http://influxdb.com/docs/v0.8/api/reading_and_writing_data.html
import tornado.ioloop
import tornado.web
import logging
import json
logger = logging.getLogger('docker')
class BaseHandler(tornado.web.RequestHandler):
def _handle_request_exception(self, e):
logger.error(e)
self.write_error(st... |
import contextlib
import copy
import csv
import json
import logging
import os
import re
from pathlib import Path
import click
from fuzzywuzzy import process
from utils import make_mongodb
log_format = logging.Formatter(
"[%(asctime)s][%(filename)s:%(lineno)4s - %(funcName)10s()] %(message)s"
)
handler = logging.... |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 1 18:02:06 2019
@author: jiahuei
Adapted from `https://github.com/ruotianluo/self-critical.pytorch/blob/master/scripts/prepro_ngrams.py`
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_funct... |
#!/usr/bin/env python
# python parser module for pre-mir and mature miRNAs, guided by mirbase.org GFF3
# version 0.0.9 (1-6-2014)
# Usage MirParser.py <1:index source> <2:extraction directive> <3:output pre-mir> <4: output mature miRs> <5:mirbase GFF3>
# <6:pathToLatticeDataframe or "dummy_datafram... |
import numpy as np
from gym.envs.robotics.rotations import *
import cv2
# -------------------- Generic ----------------------------
def get_intrinsics(fovy, img_width, img_height):
# fovy = self.sim.model.cam_fovy[cam_no]
aspect = float(img_width) / img_height
fovx = 2 * np.arctan(np.tan(np.deg2rad(fovy) ... |
#!/usr/bin/python3
# Python script to convert RGB code in format NN,NN,NN to Hex
from math import floor
rgbSource = input("What is the RGB code to convert (format as nn,nn,nn)")
rgb = rgbSource.split(",")
red = int(rgb[0])
green = int(rgb[1])
blue = int(rgb[2])
charOne = floor(red / 16)
charTwo = red % 16
charThree... |
import concurrent
from threading import Lock
from typing import List
from base_automation import BaseAutomation
from lib.annoucer.announcement import Announcement
from lib.annoucer.announcer_config import AnnouncerConfig
from lib.annoucer.media_manager import MediaManager
from lib.annoucer.player import Player
from li... |
#convert string to hex
def toHex(s):
lst = []
for ch in s:
hv = hex(ord(ch)).replace('0x', '')
if len(hv) == 1:
hv = '0'+hv
lst.append(hv)
return reduce(lambda x,y:x+y, lst)
#convert hex repr to string
def toStr(s):
return s and chr(atoi(s[:2], base=16)) + toStr... |
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from recipe_engine.recipe_api import Property
from recipe_engine.types import freeze
DEPS = [
'build/gitiles',
'build/url',
'depot_tools/bot_update',
... |
from netapp.netapp_object import NetAppObject
class CoreSegmentInfo(NetAppObject):
"""
Core Segment Info
When returned as part of the output, all elements of this typedef
are reported, unless limited by a set of desired attributes
specified by the caller.
<p>
When used as input to specify d... |
"""
__M4IfClauseComplete_MDL.py_____________________________________________________
Automatically generated AToM3 Model File (Do not modify directly)
Author: gehan
Modified: Wed Oct 23 17:09:15 2013
________________________________________________________________________________
"""
from stickylink import *
from widt... |
import os
import sys
import argparse
import datetime
import time
import json
from common import ColonyClient, LoggerService
def parse_user_input():
parser = argparse.ArgumentParser(prog='Colony Sandbox Start')
parser.add_argument("sandbox_id", type=str, help="The name of sandbox")
parser.add_argument("time... |
import os
import sys
import json
projectfile = ".project"
def update_key_value(filepath, **kwargs):
with open(filepath, "r") as json_file:
data = json.load(json_file)
data.update(**kwargs)
with open(filepath, "w") as json_file:
json.dump(data, json_file)
def get_key_value(filepath, key)... |
#Written for Python3
#This code is responsible for connecting, fetching and committing data
#to the mariaDB.
import mariadb
import spooler
import configReader
config = configReader.parseFile("db")
err = None
def isError():
global err
return err
try:
conn = mariadb.connect(
host=config["dbHost"],
port=int... |
from qtpy import QtWidgets
from stream_viewer.widgets.interface import IControlPanel
class VisbrainControlPanel(IControlPanel):
"""
A panel of configuration widgets for configuring a visbrain mesh plot.
This widget assumes the renderer is an instance of CortexVB (maybe other VisBrain too).
"""
def... |
# schemas.py
#
# Starling Bank schema definitions
from datetime import datetime
from typing import List, Optional
from pydantic import BaseModel
# = ACCOUNTS ==========================================================================================================
class StarlingAccountSchema(BaseModel):
"""Rep... |
#!/usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
##################################################################
# Documentation
"""Package containing a collection of custom Keras layers.
Attributes:
.. moduleauthor:: Wladimir Sidorenko (Uladzimir Sidarenka)
"""
######################################... |
from user.recommendations.constraints.base_constraint import BaseConstraint
class Vegetarian(BaseConstraint):
threshold = 0.9
def is_user_vegetarian(self):
if self.classification['vegetarian'] is None:
return False
return self.classification['vegetarian'] > self.threshold and sel... |
import numpy as np
import pandas as pd
from scipy.stats import variation
def row_corr(A,B):
#number of columns in A or B
N = B.shape[1]
# Store row-wise sums of A and B, as they would be used at few places
sA = A.sum(1)
sB = B.sum(1)
# Compute the four terms in pcc matrix-wise
p1 = N*np.e... |
import json
import requests
import pickle as pkl
def save(object, filename):
filename = "pickles/" + filename
pkl.dump(object, open(filename, "wb"))
def load(filename, req=None, try_load=True):
filename = "pickles/" + filename
if not req and not try_load:
return {}
if not req and try_lo... |
from trasto.infrastructure.awsmultiprocess.comando_repository import (
COMANDOS_QUEUE_NAME, ComandoRepository)
from trasto.infrastructure.memory.repositories import Idefier
from trasto.model.commands import (Comando, ComandoNuevaAccion,
ComandoNuevaTarea)
from trasto.model.entiti... |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import re
import random
import numpy as np
import h5py
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
# def getDataFiles(list_filename):
# return [line.rstrip() for line in open(list_filename)]
# def loadDataFil... |
import types
import pytest
from stp_core.loop.eventually import eventually
from plenum.common.constants import DOMAIN_LEDGER_ID
from plenum.common.util import updateNamedTuple
from plenum.test.helper import sdk_send_random_requests, \
sdk_send_random_and_check
from plenum.test.test_node import getNonPrimaryReplic... |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-31 16:21
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('genevieve_client', '0008_auto_20160628_1453'),
]
operations = [
migrations.RenameFie... |
__version__ = '0.0.3a3'
|
'''Visualization functions'''
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.patheffects as path_effects
import matplotlib.patches
import matplotlib.collections
from matplotlib import cm
import numpy as np
from .globalVariables import *
from . import (visib... |
# -*- coding: utf-8 -*-
"""
test_extract
========
Test extraction of chemical schematic diagrams
"""
import unittest
import os
import chemschematicresolver as csr
tests_dir = os.path.dirname(os.path.abspath(__file__))
img_dir = os.path.join(tests_dir, 'data')
# train_markush_small_dir = os.path.join(train_dir, 'tr... |
"""Tests for :func:`nilearn.plotting.plot_roi`."""
import pytest
import numpy as np
import matplotlib.pyplot as plt
from nibabel import Nifti1Image
from nilearn.plotting import plot_roi
from nilearn.image.resampling import coord_transform
from .testing_utils import MNI_AFFINE
def demo_plot_roi(**kwargs):
"""Demo... |
# Generated by Django 3.1.7 on 2021-02-26 15:37
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Individual',
fields=[
... |
from ._resource import Resource
from ._resource_provider import ResourceProvider
from ._swagger_module import SwaggerModule, DataPlaneModule, MgmtPlaneModule
from ._swagger_specs import SwaggerSpecs
from ._swagger_loader import SwaggerLoader
|
import os
import torch as t
from utils.config import opt
from data.dataset import Dataset, TestDataset
from torch.utils import data as data_
from tqdm import tqdm
from model import FasterRCNNVGG16
from trainer import FasterRCNNTrainer
from data.util import read_image
from utils.vis_tool import vis_bbox
from utils impo... |
x=int(input())
v=0
if x>=1 and x<=100:
for y in range(0,x):
a=input().split(' ')
z=[]
v+=1
for i in a:
if (int(i))<=1000:
try:
int(i)
z.append(int(i))
except:
continue... |
import pyDes
import base64
from secret import security
key = security.key
iv = security.iv
def encrypt(data):
k = pyDes.des(key, pyDes.CBC, iv, pad=None, padmode=pyDes.PAD_PKCS5)
return base64.b64encode(k.encrypt(data))
def decrypt(data):
k = pyDes.des(key, pyDes.CBC, iv, pad=None, padmode=pyDes.PAD_PKCS... |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: object_detection/protos/multiscale_anchor_generator.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.p... |
##
#
# Logistic regression
#
# Y_{i} | \beta \sim \textrm{Bin}\left(n_{i},e^{x_{i}^{T}\beta}/(1+e^{x_{i}^{T}\beta})\right)
# \beta \sim N\left(\beta_{0},\Sigma_{0}\right)
#
##
import sys
import numpy as np
########################################################################################
## Handle batch job ar... |
#!/usr/bin/env python3
from .ast import Ast
from .error_logger import ErrorLogger
from .token_types import Token, TokenType
class ParseError(RuntimeError):
pass
class Parser:
def __init__(self, tokens):
self.current = 0
self.tokens = tokens
def peek(self) -> Token:
return self.... |
# coding=utf-8
import chatcommunicate
import chatcommands
from globalvars import GlobalVars
from datahandling import _remove_pickle
import collections
import io
import os
import os.path
import pytest
import threading
import time
import yaml
from fake import Fake
from unittest.mock import Mock, patch
def test_valida... |
"""
A Python implementation of the blob detector algorithm described in
Shyam Madhusudhana, Anita Murray, and Christine Erbe. (2020). "Automatic
detectors for low-frequency vocalizations of Omura’s whales, Balaenoptera
omurai: A performance comparison." The Journal of the Acoustical Society
of America. 147(4). ... |
# -*- coding: utf-8 -*-
"""
@author: J. Massey
@description: Script to check convergence and call kill if need be
@contact: [email protected]
"""
# Imports
import numpy as np
import postproc.io as io
import postproc.frequency_spectra
import postproc.visualise.plotter
import subprocess
import sys
from pathlib import ... |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-29 09:56
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0006_auto_20170828_1247'),
]
operations = [
migrations.AddField(
... |
import os, glob
import requests
from io import BytesIO
from shutil import copyfile, copytree
from zipfile import ZipFile
import param
from .element import (_Element, Feature, Tiles, # noqa (API import)
WMTS, LineContours, FilledContours, Text, Image,
Points, Path, Polyg... |
from modeling import *
d=0
n_train=1
#seed=5646
std=.3
n_shots = 8192
n_swap = 1
balanced = True
n = 200
test_size = .1
X, y = load_data(n=n, std=std)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=123, test_size=test_size)
Y_vector_train = label_to_array(y_train)
Y_vector_test = label_to_a... |
import sys
import os
import re
import curses
from atig.migration import Migration, MigrationCollection
class Atig():
def __init__(self):
self.valid_migration = re.compile(r'[0-9a-f]{12}\_.*\.py')
self.migrations = None
self.migration_collection = None
self.debug_message = 'Welcome ... |
"""Async API
This module contains the API for parallelism in TorchScript, notably:
* torch.jit.fork
* torch.jit.wait
This is not intended to be imported directly; please use the exposed
functionalities in `torch.jit`.
"""
import torch
from torch.utils import set_module
from torch.jit._builtins import _regist... |
# Build/install the npcomp-torch package.
# This uses PyTorch's setuptools support and requires an existing installation
# of npcomp-core in order to access its headers/libraries.
from pathlib import Path
from setuptools import find_packages, setup, Extension
from torch.utils import cpp_extension
try:
from npcomp ... |
# -*- coding: utf-8 -*-
"""Main module."""
import numpy as np
import scipy.stats as ss
import matplotlib.pyplot as plt
import argparse
def getinput():
input = argparse.ArgumentParser()
input.add_argument('--initial_position', type = float, default = 0, help = 'Initial position of the particle, default = 0' )... |
# -*- coding: utf-8 -*-
# @Time : 2020-06-03 11:34
# @Author : yingyuankai
# @Email : [email protected]
# @File : electra.py
import tensorflow as tf
from aispace.utils.hparams import Hparams
from aispace.layers.encoders.transformer import Transformer
from aispace.layers.pretrained.bert import BertPooler... |
from kubernetes import client
from kubernetes.watch import Watch
from loguru import logger
from .consts import CONTAINER_NAME, DEPLOYMENT_PREFIX, NAMESPACE
def create_deployment(v1, image, num_replicas):
container = client.V1Container(name=CONTAINER_NAME, image=image)
container_spec = client.V1PodSpec(contai... |
#!/usr/bin/env python3
import sys
from phockup import main
from src.printer import Printer
if __name__ == '__main__':
try:
main(sys.argv[1:])
except KeyboardInterrupt:
Printer().empty().line('Exiting...')
sys.exit(0)
|
"""Command generator for running a script against a Redshift cluster.
Contains the method to compile the Redshift specific script execution command
based on generic arguments (sql script, output destination) and Redshift
specific arguments (flag values).
"""
__author__ = '[email protected]'
from absl import flags
fla... |
# Class definition of planet
# Should include the keplerian elements:
# - Semi-major axis (a) in m, km, or AU
# - Eccentricity (e) no unit: 0 -> circle
# - Inclination (i) in degrees °
# - Longitude of ascending node (o) in degrees °
# - Argument of periapsis (w) in degrees °
# - True anomaly at t0 (v0) in degrees
#
# ... |
# Copyright 2016 Twitter. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agree... |
# Generated by Django 3.1.2 on 2020-11-16 12:47
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('feedback', '0001_initial'),
('users', '0001_initial'),
]
operations = [
migrati... |
from clases.Edificio_LA import Edificio_LA
from clases.Edificio_NY import Edificio_NY
from clases.Empleado_LA import Empleados_LA
from clases.Edificio_NY import Empleados_NY
from clases.LosAngeles import LosAngeles
from clases.NewYork import NewYork
from clases.inmortal import Yin
from clases.inmortal import Yang
from ... |
#!/usr/bin/env python
import sys
import math, pdb, random
import list_funcs
import array
#import pysam
class BED_Track:
"""A BED track, following the format defined in the UCSC browser website."""
def __init__(self, trackInput, peaksTrack=False, refGenomeFilename=None):
# Each chromosome of BED items... |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------... |
import random
import uuid
from math import gcd
import numpy as np
from ._population import Population
from pychemia import Composition, Structure, pcm_log
from pychemia.analysis import StructureAnalysis, StructureChanger, StructureMatch
from pychemia.analysis.splitting import SplitMatch
from pychemia.utils.mathematics ... |
#!/usr/bin/python
""" PN DCI """
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansi... |
0:'manicTimeDur'
1:'alpiskidur'
2:'climbdur'
3:'downskidur'
4:'mtbikedur'
5:'roadbikedur'
6:'swimdur'
7:'viadur'
8:'walkdur'
9:'nbkeys'
10:'nbclicks'
11:'totcal'
12:'totsteps'
13:'alpiskical'
14:'alpiskisteps'
15:'climbcal'
16:'climbsteps'
17:'downskical'
18:'downskisteps'
19:'mtbikecal'
20:'mtbikesteps'
21:'roadbikeca... |
import copy
from distutils.version import LooseVersion
import pickle
import numpy as np
from numpy.linalg import LinAlgError
from numpy.testing import (
assert_allclose,
assert_array_almost_equal,
assert_equal,
assert_no_warnings,
assert_raises,
assert_warns,
suppress_warnings,
)
import pyt... |
from torch.utils.data import random_split, DataLoader
from pytorch_lightning.core.datamodule import LightningDataModule
from tests.base.datasets import TrialMNIST
class TrialMNISTDataModule(LightningDataModule):
def __init__(self, data_dir: str = './'):
super().__init__()
self.data_dir = data_dir... |
from django.shortcuts import render
from django.http import HttpResponse
from .forms import TestForm
import tweepy
consumer_key = '1ajVPYtNS09p27YPq8SLo54y7'
consumer_secret = 'DP7VOUXFd7sLWRWsJnhMIthxHac4MgCETUA2KSvxzVXAOfw4xl'
access_token = '142843393-tITe8g8Z3DsqZKXFoqRHGXjJwvwui1GG0pRYrLRk'
access_token_secret =... |
#!/usr/bin/env python
__doc__ = """
Script resizes all images in the folder. Parameters which can be specified by the user.
scale (default = 0.5) <= how much we downscale images e.g 0.5 makes images of 1/4 or area of orig.
modify (default = True) <= are images changed in place (origs are overwritten)
path (default = '.... |
# Generated by Django 3.1 on 2020-08-30 02:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('somms', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='somm',
name='updated',
field=... |
import json
from django.dispatch import receiver
from django.http import HttpRequest, HttpResponse
from django.template.loader import get_template
from django.urls import resolve
from django.utils.translation import gettext_lazy as _
from pretix.base.middleware import _merge_csp, _parse_csp, _render_csp
from pretix.ba... |
try :
from tools.assistant import ask_question
from tools.AI.data import data , youtube , wiki , google , youtube_play , goto_keys
from tools.AI.data import install_keys , calc_keys , should_not_learn , version_keys
from tools.wiki_search import wiki_search
from settings.logs import *
from to... |
# usr/bin/env python3
import pandas as pd
import sys
import matplotlib.pyplot as plt
def readExcel(url):
data= pd.ExcelFile(url)
print(data.sheet_names)
data_gender= data.parse('Paciente',skiprows=0)#skipfooter=0,names=[)
data_gender.set_index('idPais',inplace=True)
data_gender.plot()
plt.show... |
from datetime import date
import pandas as pd
import lxml
from nsepy import get_history
print('Enter symbol---')
cols=['Date','Open','High','Low','Close','Volume']
df_new=pd.DataFrame(columns=cols)
#sym=input()
print(df_new)
stock = input("Enter stock name(ex:GOOGL, AAPL): ")
df = get_history(symbol=stock,
... |
# -*- coding: utf-8 -*-
"""
fix_self_assert - lib2to3 fix for replacing assertXXX() method calls
by their larky assertpy (assertion library for larky equivalent).
"""
#
# Mostly inspired by Hartmut Goebel <[email protected]>
# and the amazing project of unittest2pytest.
#
# Obligatory license...
# unittest2p... |
from python_structure.data_structures.linked_list.node import Node
class LinkedList:
def __init__(self):
self.head = None
def append_node(self, data):
"""
Add a Node to the Linked List
:param data:
:return:
"""
if not self.head:
self.head = ... |
from ..factory import Type
class publicMessageLink(Type):
link = None # type: "string"
html = None # type: "string"
|
'''OpenGL extension SGIX.blend_alpha_minmax
Automatically generated by the get_gl_extensions script, do not edit!
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions
from OpenGL.GL import glget
import ctypes
EXTENSION_NAME = 'GL_SGIX_blend_alpha_minmax'
_DEPRECATED = False
GL_ALP... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.