repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
tmenjo/cinder-2015.1.0
|
cinder/volume/manager.py
|
Python
|
apache-2.0
| 115,641
| 0
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Volume manager manages creating, attaching, detaching, and persistent storage.
Persistent storage volumes keep their state independent of instances. You can
attach to an instance, terminate the instance, spawn a new instance (even
one from a different image) and re-attach the volume with the same data
intact.
**Related Flags**
:volume_topic: What :mod:`rpc` topic to listen to (default: `cinder-volume`).
:volume_manager: The module name of a class derived from
:class:`manager.Manager` (default:
:class:`cinder.volume.manager.Manager`).
:volume_driver: Used by :class:`Manager`. Defaul
|
ts to
:class:`cinder.volume.drivers.lvm.LVMISCSIDriver`.
:volume_group: Name of the group that will contain exported volumes (de
|
fault:
`cinder-volumes`)
:num_shell_tries: Number of times to attempt to run commands (default: 3)
"""
import time
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_serialization import jsonutils
from oslo_utils import excutils
from oslo_utils import importutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
from osprofiler import profiler
import six
from taskflow import exceptions as tfe
from cinder import compute
from cinder import context
from cinder import exception
from cinder import flow_utils
from cinder.i18n import _, _LE, _LI, _LW
from cinder.image import glance
from cinder import manager
from cinder.openstack.common import periodic_task
from cinder import quota
from cinder import utils
from cinder.volume import configuration as config
from cinder.volume.flows.manager import create_volume
from cinder.volume.flows.manager import manage_existing
from cinder.volume import rpcapi as volume_rpcapi
from cinder.volume import utils as vol_utils
from cinder.volume import volume_types
from eventlet import greenpool
LOG = logging.getLogger(__name__)
QUOTAS = quota.QUOTAS
CGQUOTAS = quota.CGQUOTAS
VALID_REMOVE_VOL_FROM_CG_STATUS = ('available', 'in-use',)
VALID_CREATE_CG_SRC_SNAP_STATUS = ('available',)
volume_manager_opts = [
cfg.StrOpt('volume_driver',
default='cinder.volume.drivers.lvm.LVMISCSIDriver',
help='Driver to use for volume creation'),
cfg.IntOpt('migration_create_volume_timeout_secs',
default=300,
help='Timeout for creating the volume to migrate to '
'when performing volume migration (seconds)'),
cfg.BoolOpt('volume_service_inithost_offload',
default=False,
help='Offload pending volume delete during '
'volume service startup'),
cfg.StrOpt('zoning_mode',
default='none',
help='FC Zoning mode configured'),
cfg.StrOpt('extra_capabilities',
default='{}',
help='User defined capabilities, a JSON formatted string '
'specifying key/value pairs. The key/value pairs can '
'be used by the CapabilitiesFilter to select between '
'backends when requests specify volume types. For '
'example, specifying a service level or the geographical '
'location of a backend, then creating a volume type to '
'allow the user to select by these different '
'properties.'),
]
CONF = cfg.CONF
CONF.register_opts(volume_manager_opts)
MAPPING = {
'cinder.volume.drivers.huawei.huawei_hvs.HuaweiHVSISCSIDriver':
'cinder.volume.drivers.huawei.huawei_18000.Huawei18000ISCSIDriver',
'cinder.volume.drivers.huawei.huawei_hvs.HuaweiHVSFCDriver':
'cinder.volume.drivers.huawei.huawei_18000.Huawei18000FCDriver',
'cinder.volume.drivers.fujitsu_eternus_dx_fc.FJDXFCDriver':
'cinder.volume.drivers.fujitsu.eternus_dx_fc.FJDXFCDriver',
'cinder.volume.drivers.fujitsu_eternus_dx_iscsi.FJDXISCSIDriver':
'cinder.volume.drivers.fujitsu.eternus_dx_iscsi.FJDXISCSIDriver', }
def locked_volume_operation(f):
"""Lock decorator for volume operations.
Takes a named lock prior to executing the operation. The lock is named with
the operation executed and the id of the volume. This lock can then be used
by other operations to avoid operation conflicts on shared volumes.
Example use:
If a volume operation uses this decorator, it will block until the named
lock is free. This is used to protect concurrent operations on the same
volume e.g. delete VolA while create volume VolB from VolA is in progress.
"""
def lvo_inner1(inst, context, volume_id, **kwargs):
@utils.synchronized("%s-%s" % (volume_id, f.__name__), external=True)
def lvo_inner2(*_args, **_kwargs):
return f(*_args, **_kwargs)
return lvo_inner2(inst, context, volume_id, **kwargs)
return lvo_inner1
def locked_detach_operation(f):
"""Lock decorator for volume detach operations.
Takes a named lock prior to executing the detach call. The lock is named
with the operation executed and the id of the volume. This lock can then
be used by other operations to avoid operation conflicts on shared volumes.
This locking mechanism is only for detach calls. We can't use the
locked_volume_operation, because detach requires an additional
attachment_id in the parameter list.
"""
def ldo_inner1(inst, context, volume_id, attachment_id=None, **kwargs):
@utils.synchronized("%s-%s" % (volume_id, f.__name__), external=True)
def ldo_inner2(*_args, **_kwargs):
return f(*_args, **_kwargs)
return ldo_inner2(inst, context, volume_id, attachment_id, **kwargs)
return ldo_inner1
def locked_snapshot_operation(f):
"""Lock decorator for snapshot operations.
Takes a named lock prior to executing the operation. The lock is named with
the operation executed and the id of the snapshot. This lock can then be
used by other operations to avoid operation conflicts on shared snapshots.
Example use:
If a snapshot operation uses this decorator, it will block until the named
lock is free. This is used to protect concurrent operations on the same
snapshot e.g. delete SnapA while create volume VolA from SnapA is in
progress.
"""
def lso_inner1(inst, context, snapshot, **kwargs):
@utils.synchronized("%s-%s" % (snapshot.id, f.__name__), external=True)
def lso_inner2(*_args, **_kwargs):
return f(*_args, **_kwargs)
return lso_inner2(inst, context, snapshot, **kwargs)
return lso_inner1
class VolumeManager(manager.SchedulerDependentManager):
"""Manages attachable block storage devices."""
RPC_API_VERSION = '1.23'
target = messaging.Target(version=RPC_API_VERSION)
def __init__(self, volume_driver=None, service_name=None,
*args, **kwargs):
"""Load the driver from the one specified in args, or from flags."""
# update_service_capabilities needs service_name to be volume
super(VolumeManager, self).__init__(service_name='volume',
*args, **kwargs)
self.configuration = config.Configuration(volume_manager_opts,
config_group=service_name)
self._tp = greenpool.GreenPool()
self.stats = {}
if
|
joeyginorio/Action-Understanding-with-Rational-Rules
|
model_src/grid_world.py
|
Python
|
mit
| 9,591
| 0.033886
|
# Joey Velez-Ginorio
# Gridworld Implementation
# ---------------------------------
from mdp import MDP
from grid import Grid
from scipy.stats import uniform
from scipy.stats import beta
from scipy.stats import expon
import numpy as np
import random
import pyprind
import matplotlib.pyplot as plt
class GridWorld(MDP):
"""
Defines a gridworld environment to be solved by an MDP!
"""
def __init__(self, grid, goalVals, discount=.99, tau=.01, epsilon=.001):
MDP.__init__(self, discount=discount, tau=tau, epsilon=epsilon)
self.goalVals = goalVals
self.grid = grid
self.setGridWorld(
|
)
self.valueIteration()
self.extractPolicy()
def isTerminal(self, state):
"""
Specifies terminal conditions for gridworld.
"""
return True if tuple(self.scalarToCoord(state)) in self.grid.objects.values() else False
def isObst
|
acle(self, sCoord):
"""
Checks if a state is a wall or obstacle.
"""
if tuple(sCoord) in self.grid.walls:
return True
if sCoord[0] > (self.grid.row - 1) or sCoord[0] < 0:
return True
if sCoord[1] > (self.grid.col - 1) or sCoord[1] < 0:
return True
return False
def takeAction(self, sCoord, action):
"""
Receives an action value, performs associated movement.
"""
if action is 0:
return self.up(sCoord)
if action is 1:
return self.down(sCoord)
if action is 2:
return self.left(sCoord)
if action is 3:
return self.right(sCoord)
if action is 4:
return sCoord
if action is 5:
return self.upleft(sCoord)
if action is 6:
return self.upright(sCoord)
if action is 7:
return self.downleft(sCoord)
if action is 8:
return self.downright(sCoord)
def up(self, sCoord):
"""
Move agent up, uses state coordinate.
"""
newCoord = np.copy(sCoord)
newCoord[0] -= 1
# Check if action takes you to a wall/obstacle
if not self.isObstacle(newCoord):
return newCoord
# You hit a wall, return original coord
else:
return sCoord
def upright(self, sCoord):
"""
Move agent up and right, uses state coordinate.
"""
newCoord = np.copy(sCoord)
newCoord[0] -= 1
newCoord[1] += 1
# Check if action takes you to a wall/obstacle
if not self.isObstacle(newCoord):
return newCoord
# You hit a wall, return original coord
else:
return sCoord
def upleft(self, sCoord):
"""
Move agent up and left, uses state coordinate.
"""
newCoord = np.copy(sCoord)
newCoord[0] -= 1
newCoord[1] -= 1
# Check if action takes you to a wall/obstacle
if not self.isObstacle(newCoord):
return newCoord
# You hit a wall, return original coord
else:
return sCoord
def down(self, sCoord):
"""
Move agent down, uses state coordinate.
"""
newCoord = np.copy(sCoord)
newCoord[0] += 1
# Check if action takes you to a wall/obstacle
if not self.isObstacle(newCoord):
return newCoord
# You hit a wall, return original coord
else:
return sCoord
def downleft(self, sCoord):
"""
Move agent down, uses state coordinate.
"""
newCoord = np.copy(sCoord)
newCoord[0] += 1
newCoord[1] -= 1
# Check if action takes you to a wall/obstacle
if not self.isObstacle(newCoord):
return newCoord
# You hit a wall, return original coord
else:
return sCoord
def downright(self, sCoord):
"""
Move agent down, uses state coordinate.
"""
newCoord = np.copy(sCoord)
newCoord[0] += 1
newCoord[1] += 1
# Check if action takes you to a wall/obstacle
if not self.isObstacle(newCoord):
return newCoord
# You hit a wall, return original coord
else:
return sCoord
def left(self, sCoord):
"""
Move agent left, uses state coordinate.
"""
newCoord = np.copy(sCoord)
newCoord[1] -= 1
# Check if action takes you to a wall/obstacle
if not self.isObstacle(newCoord):
return newCoord
# You hit a wall, return original coord
else:
return sCoord
def right(self, sCoord):
"""
Move agent right, uses state coordinate.
"""
newCoord = np.copy(sCoord)
newCoord[1] += 1
# Check if action takes you to a wall/obstacle
if not self.isObstacle(newCoord):
return newCoord
# You hit a wall, return original coord
else:
return sCoord
def coordToScalar(self, sCoord):
"""
Convert state coordinates to corresponding scalar state value.
"""
return sCoord[0]*(self.grid.col) + sCoord[1]
def scalarToCoord(self, scalar):
"""
Convert scalar state value into coordinates.
"""
return np.array([scalar / self.grid.col, scalar % self.grid.col])
def getPossibleActions(self, sCoord):
"""
Will return a list of all possible actions from a current state.
"""
possibleActions = list()
if self.up(sCoord) is not sCoord:
possibleActions.append(0)
if self.down(sCoord) is not sCoord:
possibleActions.append(1)
if self.left(sCoord) is not sCoord:
possibleActions.append(2)
if self.right(sCoord) is not sCoord:
possibleActions.append(3)
if self.upleft(sCoord) is not sCoord:
possibleActions.append(5)
if self.upright(sCoord) is not sCoord:
possibleActions.append(6)
if self.downleft(sCoord) is not sCoord:
possibleActions.append(7)
if self.downright(sCoord) is not sCoord:
possibleActions.append(8)
return possibleActions
def setGridWorld(self):
"""
Initializes states, actions, rewards, transition matrix.
"""
# Possible coordinate positions + Death State
self.s = np.arange(self.grid.row*self.grid.col + 1)
# 4 Actions {Up, Down, Left, Right}
self.a = np.arange(9)
# Reward Zones
self.r = np.zeros(len(self.s))
for i in range(len(self.grid.objects)):
self.r[self.coordToScalar(self.grid.objects.values()[i])] = self.goalVals[i]
self.r_sa = np.zeros([len(self.s),len(self.a)])
for i in range(len(self.s)):
for j in range(len(self.a)):
if j <= 4:
self.r_sa[i][j] = self.r[self.coordToScalar(self.takeAction(self.scalarToCoord(i),j))]-1.0
else:
self.r_sa[i][j] = self.r[self.coordToScalar(self.takeAction(self.scalarToCoord(i),j))]-np.sqrt(2)
self.r = self.r_sa
# Transition Matrix
self.t = np.zeros([len(self.s),len(self.a),len(self.s)])
for state in range(len(self.s)):
possibleActions = self.getPossibleActions(self.scalarToCoord(state))
if self.isTerminal(state):
for i in range(len(self.a)):
if i == 4:
self.t[state][4][state]=1.0
else:
self.t[state][i][len(self.s)-1] = 1.0
continue
for action in self.a:
# Up
if action == 0:
currentState = self.scalarToCoord(state)
nextState = self.takeAction(currentState, 0)
self.t[state][action][self.coordToScalar(nextState)] = 1.0
if action == 1:
currentState = self.scalarToCoord(state)
nextState = self.takeAction(currentState, 1)
self.t[state][action][self.coordToScalar(nextState)] = 1.0
if action == 2:
currentState = self.scalarToCoord(state)
nextState = self.takeAction(currentState, 2)
self.t[state][action][self.coordToScalar(nextState)] = 1.0
if action == 3:
currentState = self.scalarToCoord(state)
nextState = self.takeAction(currentState, 3)
self.t[state][action][self.coordToScalar(nextState)] = 1.0
if action == 4:
self.t[state][action][state] = 1.0
if action == 5:
currentState = self.scalarToCoord(state)
nextState = self.takeAction(currentState, 5)
self.t[state][action][self.coordToScalar(nextState)] = 1.0
if action == 6:
currentState = self.scalarToCoord(state)
nextState = self.takeAction(currentState, 6)
self.t[state][action][self.coordToScalar(nextState)] = 1.0
if action == 7:
currentState = self.scalarToCoord(state)
nextState = self.takeAction(currentState, 7)
self.t[state][action][self.coordToScalar(nextState)] = 1.0
if action == 8:
currentState = self.scalarToCoord(state)
nextState = self.takeAction(currentState, 8)
self.t[state][action][self.coordToScalar(nextState)] = 1.0
def simulate(self, state):
"""
Runs the solver for the MDP, conducts value iteration, extracts policy,
then runs simulation of problem.
NOTE: Be sure to run value iteration (solve
|
justincely/classwork
|
UMD/AST615/HW6_2/plot_orbit.py
|
Python
|
bsd-3-clause
| 1,806
| 0.035991
|
import pylab
import string
import matplotlib
matplotlib.rcParams['figure.subplot.hspace']=.45
matplotlib.rcParams['figure.subplot.wspace']=.3
labels=('Step=1','Step=.5','Step=.25','Step=.01')
steps=(1,.5,.25,.01)
pylab.figure(figsize=(8.5,11))
for i,intxt in enumerate((
|
'O_RK1.txt','O_RK_5.txt','O_RK_25.txt','O_RK_1.txt')):
infile=open(intxt,'r')
t=[]
|
xs=[]
ys=[]
Es=[]
for line in infile.readlines():
line=string.split(line)
t.append(float(line[0]))
xs.append(float(line[1]))
ys.append(float(line[2]))
Es.append(float(line[5]))
pylab.subplot(4,2,2*i+1)
pylab.plot(xs,ys,'-',lw=2)
pylab.ylim(-1,1)
pylab.xlim(-1,1)
pylab.xlabel('X')
pylab.ylabel('Y')
pylab.title('Step=%f'%(steps[i]))
pylab.subplot(4,2,2*i+2)
pylab.plot(t,Es,'-',lw=1)
pylab.xlim(0,100)
pylab.xlabel('Time')
pylab.ylabel('Energy')
pylab.suptitle('RK4 Orbit Integration')
pylab.savefig('RK4_orbit_int.pdf')
pylab.close()
pylab.figure(figsize=(8.5,11))
for i,intxt in enumerate(('O_LF1.txt','O_LF_5.txt','O_LF_25.txt','O_LF_1.txt')):
infile=open(intxt,'r')
t=[]
xs=[]
ys=[]
Es=[]
for line in infile.readlines():
line=string.split(line)
t.append(float(line[0]))
xs.append(float(line[1]))
ys.append(float(line[2]))
Es.append(float(line[5]))
pylab.subplot(4,2,2*i+1)
pylab.plot(xs,ys,'-',lw=2)
pylab.ylim(-1,1)
pylab.xlim(-1,1)
pylab.xlabel('X')
pylab.ylabel('Y')
pylab.title('Step=%f'%(steps[i]))
pylab.subplot(4,2,2*i+2)
pylab.plot(t,Es,'-',lw=1)
pylab.xlim(0,100)
pylab.xlabel('Time')
pylab.ylabel('Energy')
pylab.suptitle('Leapfrog Orbit integration')
pylab.savefig('Leapfrog_orbit_int.pdf')
pylab.close()
|
datawire/ambassador
|
python/tests/kat/t_ingress.py
|
Python
|
apache-2.0
| 16,179
| 0.00309
|
import os
import sys
import json
import pytest
import subprocess
import time
from kat.harness import Query, is_ingress_class_compatible
from abstract_tests import AmbassadorTest, HTTP, ServiceType
from kat.utils import namespace_manifest
from tests.utils import KUBESTATUS_PATH
from ambassador.utils import parse_bool
class IngressStatusTest1(AmbassadorTest):
status_update = {
"loadBalancer": {
"ingress": [{
"ip": "42.42.42.42"
}]
}
}
def init(self):
self.target = HTTP()
def manifests(self) -> str:
return """
---
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
annotations:
kubernetes.io/ingress.class: ambassador
getambassador.io/ambassador-id: {self.ambassador_id}
name: {self.name.k8s}
spec:
rules:
- http:
paths:
- backend:
serviceName: {self.target.path.k8s}
servicePort: 80
path: /{self.name}/
""" + super().manifests()
def queries(self):
if sys.platform != 'darwin':
text = json.dumps(self.status_update)
update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0']
subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10)
# If you run these tests individually, the time between running kubestatus
# and the ingress resource actually getting updated is longer than the
# time spent waiting for resources to be ready, so this test will fail (most of the time)
time.sleep(1)
yield Query(self.url(self.name + "/"))
yield Query(self.url(f'need-normalization/../{self.name}/'))
def check(self):
if not parse_bool(os.environ.get("AMBASSADOR_PYTEST_INGRESS_TEST", "false")):
pytest.xfail('AMBASSADOR_PYTEST_INGRESS_TEST not set, xfailing...')
if sys.platform == 'darwin':
pytest.xfail('not supported on Darwin')
for r in self.results:
if r.backend:
assert r.backend.name == self.target.path.k8s, (r.backend.name, self.target.path.k8s)
assert r.backend.request.headers['x-envoy-original-path'][0] == f'/{self.name}/'
# check for Ingress IP here
ingress_cmd = ["kubectl", "get", "-n", "default", "-o", "json", "ingress", self.path.k8s]
ingress_run = subprocess.Popen(ingress_cmd, stdout=subprocess.PIPE)
ingress_out, _ = ingress_run.communicate()
ingress_json = json.loads(ingress_out)
assert ingress_json['status'] == self.status_update, f"Expected Ingress status to be {self.status_update}, got {ingress_json['status']} instead"
class IngressStatusTest2(AmbassadorTest):
status_update = {
"loadBalancer": {
"ingress": [{
"ip": "84.84.84.84"
}]
}
}
def init(self):
self.target = HTTP()
def manifests(self) -> str:
return """
---
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
annotations:
kubernetes.io/ingress.class: ambassador
getambassador.io/ambassador-id: {self.ambassador_id}
name: {self.name.k8s}
spec:
rules:
- http:
paths:
- backend:
serviceName: {self.target.path.k8s}
servicePort: 80
path: /{self.name}/
""" + super().manifests()
def queries(self):
if sys.platform != 'darwin':
text = json.dumps(self.status_update)
update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0']
subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10)
# If you run these tests individually, the time between running kubestatus
# and the ingress resource actually getting updated is longer than the
# time spent waiting for resources to be ready, so this test will fail (most of the time)
time.sleep(1)
yield Query(self.url(self.name + "/"))
yield Query(self.url(f'need-normalization/../{self.name}/'))
def check(self):
if not parse_bool(os.environ.get("AMBASSADOR_PYTEST_INGRESS_TEST", "false")):
pytest.xfail('AMBASSADOR_PYTEST_INGRESS_TEST not set, xfailing...')
if sys.platform == 'darwin':
pytest.xfail('not supported on Darwin')
for r in self.results:
if r.backend:
assert r.backend.name == self.target.path.k8s, (r.backend.name, self.target.path.k8s)
assert r.backend.request.headers['x-envoy-original-path'][0] == f'/{self.name}/'
# check for Ingress IP here
ingress_cmd = ["kubectl", "get", "-n", "default", "-o", "json", "ingress", self.path.k8s]
ingress_run = subprocess.Popen(ingress_cmd, stdout=subprocess.PIPE)
ingress_out, _ = ingress_run.communicate()
ingress_json = json.loads(ingress_out)
assert ingress_json['status'] == self.status_update, f"Expected Ingress status to be {self.status_update}, got {ingress_json['status']} instead"
class IngressStatusTestAcrossNamespaces(AmbassadorTest):
status_update = {
"loadBalancer": {
"ingress": [{
"ip": "168.168.168.168"
}]
}
}
def init(self):
self.target = HTTP(namespace="alt-namespace")
def manifests(self) -> str:
return namespace_manifest("alt-namespace") + """
---
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
annotations:
kubernetes.io/ingress.class: ambassador
getambassador.io/ambassador-id: {self.ambassador_id}
name: {self.name.k8s}
namespace: alt-namespace
spec:
rules:
- http:
paths:
- backend:
serviceName: {self.target.path.k8s}
servicePort: 80
path: /{self.name}/
""" + super().manifests()
def queries(self):
if sys.platform != 'darwin':
t
|
ext = json.dumps(self.status_update)
update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0']
subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10)
# If you run these tests individually, the time between running kubestatus
# and th
|
e ingress resource actually getting updated is longer than the
# time spent waiting for resources to be ready, so this test will fail (most of the time)
time.sleep(1)
yield Query(self.url(self.name + "/"))
yield Query(self.url(f'need-normalization/../{self.name}/'))
def check(self):
if not parse_bool(os.environ.get("AMBASSADOR_PYTEST_INGRESS_TEST", "false")):
pytest.xfail('AMBASSADOR_PYTEST_INGRESS_TEST not set, xfailing...')
if sys.platform == 'darwin':
pytest.xfail('not supported on Darwin')
for r in self.results:
if r.backend:
assert r.backend.name == self.target.path.k8s, (r.backend.name, self.target.path.k8s)
assert r.backend.request.headers['x-envoy-original-path'][0] == f'/{self.name}/'
# check for Ingress IP here
ingress_cmd = ["kubectl", "get", "-o", "json", "ingress", self.path.k8s, "-n", "alt-namespace"]
ingress_run = subprocess.Popen(ingress_cmd, stdout=subprocess.PIPE)
ingress_out, _ = ingress_run.communicate()
ingress_json = json.loads(ingress_out)
assert ingress_json['status'] == self.status_update, f"Expected Ingress status to be {self.status_update}, got {ingress_json['status']} instead"
class IngressStatusTestWithAnnotations(AmbassadorTest):
status_update = {
"loadBalancer": {
"ingress": [{
"ip": "200.200.200.200"
}]
}
}
def init(self):
self.target = HTTP()
def manifests(self) -> str:
return """
---
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
annotations:
getambassador.io/config: |
---
apiVersion: ambassador/v1
kind: Mapping
name: {self.name}-nested
prefix: /{s
|
quattor/aquilon
|
tests/broker/test_add_alias.py
|
Python
|
apache-2.0
| 18,764
| 0.001439
|
#!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2011,2012,2013,2014,2015,2016,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the add/show alias command."""
import unittest
if __name__ == '__main__':
from broker import utils
utils.import_depends()
from broker.brokertest import TestBrokerCommand
from eventstest import EventsTestMixin
class TestAddAlias(EventsTestMixin, TestBrokerCommand):
def test_100_add_alias2host(self):
self.event_add_dns(
fqdn='alias2host.aqd-unittest.ms.com',
dns_environment='internal',
dns_records=[
{
'target': 'arecord13.aqd-unittest.ms.com',
'targetEnvironmentName': 'internal',
'rrtype': 'CNAME'
},
],
)
cmd = ['add', 'alias', '--fqdn', 'alias2host.aqd-unittest.ms.com',
'--target', 'arecord13.aqd-unittest.ms.com']
self.noouttest(cmd)
self.events_verify()
def test_105_add_aliasduplicate(self):
cmd = ['add', 'alias', '--fqdn', 'alias2host.aqd-unittest.ms.com',
'--target', 'arecord13.aqd-unittest.ms.com']
out = self.badrequesttest(cmd)
self.matchoutput(out, "Alias alias2host.aqd-unittest.ms.com "
"already exists.", cmd)
def test_110_mscom_alias(self):
self.event_add_dns(
fqdn='alias.ms.com',
dns_environment='internal',
dns_records=[
{
'target': 'arecord13.aqd-unittest.ms.com',
'targetEnvironmentName': 'internal',
'rrtype': 'CNAME'
},
],
)
cmd = ['add', 'alias', '--fqdn', 'alias.ms.com',
'--target', 'arecord13.aqd-unittest.ms.com',
'--comments', 'Some alias comments']
self.dsdb_expect("add_host_alias "
"-host_name arecord13.aqd-unittest.ms.com "
"-alias_name alias.ms.com "
"-comments Some alias comments")
self.noouttest(cmd)
self.dsdb_verify()
self.events_verify()
def test_120_conflict_a_record(self):
cmd = ['add', 'alias', '--fqdn', 'arecord14.aqd-unittest.ms.com',
'--target', 'arecord13.aqd-unittest.ms.com']
out = self.badrequesttest(cmd)
self.matchoutput(out, "DNS Record arecord14.aqd-unittest.ms.com "
"already exists.", cmd)
def test_130_conflict_reserver_name(self):
cmd = ['add', 'alias', '--fqdn', 'nyaqd1.ms.com',
'--target', 'arecord13.aqd-unittest.ms.com']
out = self.badrequesttest(cmd)
self.matchoutput(out, "Reserved Name nyaqd1.ms.com already exists.", cmd)
def test_140_restricted_domain(self):
cmd = ["add", "alias", "--fqdn", "foo.restrict.aqd-unittest.ms.com",
"--target", "arecord13.aqd-unittest.ms.com"]
out = self.badrequesttest(cmd)
self.matchoutput(out,
"DNS Domain restrict.aqd-unittest.ms.com is "
"restricted, aliases are not allowed.",
|
cmd)
def test_150_add_alias2diff_environment(self):
self.event_add_dns(
fqdn='alias2host.aqd-unittest-ut-env.ms.com',
dns_environment='ut-env',
dns_records=[
{
'target': 'arecord13.aqd-unittest.ms.com',
'targetEnvironmentName': 'in
|
ternal',
'rrtype': 'CNAME'
},
],
)
cmd = ['add', 'alias', '--fqdn', 'alias2host.aqd-unittest-ut-env.ms.com',
'--dns_environment', 'ut-env',
'--target', 'arecord13.aqd-unittest.ms.com',
'--target_environment', 'internal']
self.noouttest(cmd)
self.events_verify()
def test_155_add_alias2explicit_target_environment(self):
cmd = ['add', 'alias', '--fqdn', 'alias2alias.aqd-unittest-ut-env.ms.com',
'--dns_environment', 'ut-env',
'--target', 'alias2host.aqd-unittest-ut-env.ms.com',
'--target_environment', 'ut-env']
self.noouttest(cmd)
def test_160_add_alias_with_fqdn_in_diff_environment(self):
cmd = ['add', 'alias', '--fqdn', 'alias13.aqd-unittest.ms.com',
'--dns_environment', 'ut-env',
'--target', 'arecord13.aqd-unittest.ms.com',
'--target_environment', 'internal']
self.noouttest(cmd)
def test_200_autocreate_target(self):
cmd = ["add", "alias", "--fqdn", "restrict1.aqd-unittest.ms.com",
"--target", "target.restrict.aqd-unittest.ms.com"]
out = self.statustest(cmd)
self.matchoutput(out,
"WARNING: Will create a reference to "
"target.restrict.aqd-unittest.ms.com, but ",
cmd)
def test_201_verify_autocreate(self):
cmd = ["search", "dns", "--fullinfo",
"--fqdn", "target.restrict.aqd-unittest.ms.com"]
out = self.commandtest(cmd)
self.matchoutput(out,
"Reserved Name: target.restrict.aqd-unittest.ms.com",
cmd)
def test_201_verify_noprimary(self):
cmd = ["search", "dns", "--noprimary_name",
"--record_type", "reserved_name"]
out = self.commandtest(cmd)
self.matchoutput(out, "target.restrict.aqd-unittest.ms.com", cmd)
def test_210_autocreate_second_alias(self):
cmd = ["add", "alias", "--fqdn", "restrict2.aqd-unittest.ms.com",
"--target", "target.restrict.aqd-unittest.ms.com"]
self.noouttest(cmd)
def test_220_restricted_alias_no_dsdb(self):
cmd = ["add", "alias", "--fqdn", "restrict.ms.com",
"--target", "no-dsdb.restrict.aqd-unittest.ms.com"]
out = self.statustest(cmd)
self.matchoutput(out,
"WARNING: Will create a reference to "
"no-dsdb.restrict.aqd-unittest.ms.com, but ",
cmd)
self.dsdb_verify(empty=True)
def test_400_verify_alias2host(self):
cmd = "show alias --fqdn alias2host.aqd-unittest.ms.com"
out = self.commandtest(cmd.split(" "))
self.matchoutput(out, "Alias: alias2host.aqd-unittest.ms.com", cmd)
self.matchoutput(out, "Target: arecord13.aqd-unittest.ms.com", cmd)
self.matchoutput(out, "DNS Environment: internal", cmd)
def test_405_verify_host_shows_alias(self):
cmd = "show address --fqdn arecord13.aqd-unittest.ms.com"
out = self.commandtest(cmd.split(" "))
self.matchoutput(out, "Aliases: alias.ms.com, "
"alias13.aqd-unittest.ms.com [environment: ut-env], "
"alias2alias.aqd-unittest-ut-env.ms.com [environment: ut-env], "
"alias2host.aqd-unittest-ut-env.ms.com [environment: ut-env], "
"alias2host.aqd-unittest.ms.com", cmd)
def test_410_verify_mscom_alias(self):
cmd = "show alias --fqdn alias.ms.com"
out = self.commandtest(cmd.split(" "))
self.matchoutput(out, "Alias: alias.ms.com", cmd)
self.matchoutput(out, "Target: arecord13.aqd-unittest.ms.com", cmd)
self.matchoutput(out, "DNS Environment: internal", cmd)
self.matchoutput(out, "
|
dbbhattacharya/kitsune
|
kitsune/sumo/tests/test_googleanalytics.py
|
Python
|
bsd-3-clause
| 15,153
| 0
|
from datetime import date
from mock import patch
from nose.tools import eq_
from kitsune.sumo import googleanalytics
from kitsune.sumo.tests import TestCase
from kitsune.wiki.tests import document, revision
class GoogleAnalyticsTests(TestCase):
"""Tests for the Google Analytics API helper."""
@patch.object(googleanalytics, '_build_request')
def test_visitors(self, _build_request):
"""Test googleanalytics.visitors()."""
execute = _build_request.return_value.get.return_value.execute
execute.return_value = VISITORS_RESPONSE
visits = googleanalytics.visitors(
date(2013, 01, 16), date(2013, 01, 16))
eq_(1, len(visits))
eq_(382719, visits['2013-01-16'])
@patch.object(googleanalytics, '_build_request')
def test_visitors_by_locale(self, _build_request):
"""Test googleanalytics.visits_by_locale()."""
|
execute = _build_request.return_value.get.return_value.execute
execute.return_value = VISITORS_BY_LOCALE_RESPONSE
visits = googleanalytics.
|
visitors_by_locale(
date(2013, 01, 16), date(2013, 01, 16))
eq_(50, len(visits))
eq_(221447, visits['en-US'])
eq_(24432, visits['es'])
@patch.object(googleanalytics, '_build_request')
def test_pageviews_by_document(self, _build_request):
"""Test googleanalytics.pageviews_by_document()."""
execute = _build_request.return_value.get.return_value.execute
execute.return_value = PAGEVIEWS_BY_DOCUMENT_RESPONSE
# Add some documents that match the response data.
documents = []
for i in range(1, 6):
documents.append(revision(
document=document(slug='doc-%s' % i, save=True),
is_approved=True,
save=True).document)
pageviews = googleanalytics.pageviews_by_document(
date(2013, 01, 16), date(2013, 01, 16))
eq_(5, len(pageviews))
eq_(1, pageviews[documents[0].pk])
eq_(2, pageviews[documents[1].pk])
eq_(10, pageviews[documents[2].pk])
eq_(39, pageviews[documents[3].pk])
eq_(46, pageviews[documents[4].pk])
@patch.object(googleanalytics, '_build_request')
def test_pageviews_by_question(self, _build_request):
"""Test googleanalytics.pageviews_by_question()."""
execute = _build_request.return_value.get.return_value.execute
execute.return_value = PAGEVIEWS_BY_QUESTION_RESPONSE
pageviews = googleanalytics.pageviews_by_question(
date(2013, 01, 16), date(2013, 01, 16))
eq_(3, len(pageviews))
eq_(3, pageviews[1])
eq_(2, pageviews[2])
eq_(11, pageviews[3])
@patch.object(googleanalytics, '_build_request')
def test_search_ctr(self, _build_request):
"""Test googleanalytics.search_ctr()."""
execute = _build_request.return_value.get.return_value.execute
execute.return_value = SEARCH_CTR_RESPONSE
ctr = googleanalytics.search_ctr(
date(2013, 6, 6), date(2013, 6, 6))
eq_(1, len(ctr))
eq_(74.88925980111263, ctr['2013-06-06'])
VISITORS_RESPONSE = {
u'kind': u'analytics#gaData',
u'rows': [[u'382719']], # <~ The number we are looking for.
u'containsSampledData': False,
u'profileInfo': {
u'webPropertyId': u'UA-1234567890',
u'internalWebPropertyId': u'1234567890',
u'tableId': u'ga:1234567890',
u'profileId': u'1234567890',
u'profileName': u'support.mozilla.org - Production Only',
u'accountId': u'1234567890'},
u'itemsPerPage': 1000,
u'totalsForAllResults': {
u'ga:visitors': u'382719'},
u'columnHeaders': [
{u'dataType': u'INTEGER',
u'columnType': u'METRIC',
u'name': u'ga:visitors'}],
u'query': {
u'max-results': 1000,
u'dimensions': u'',
u'start-date': u'2013-01-16',
u'start-index': 1,
u'ids': u'ga:1234567890',
u'metrics': [u'ga:visitors'],
u'end-date': u'2013-01-16'
},
u'totalResults': 1,
u'id': ('https://www.googleapis.com/analytics/v3/data/ga'
'?ids=ga:1234567890&metrics=ga:visitors&start-date=2013-01-16'
'&end-date=2013-01-16'),
u'selfLink': ('https://www.googleapis.com/analytics/v3/data/ga'
'?ids=ga:1234567890&metrics=ga:visitors'
'&start-date=2013-01-16&end-date=2013-01-16')
}
VISITORS_BY_LOCALE_RESPONSE = {
u'kind': u'analytics#gaData',
u'rows': [
[u'/1/', u'16'],
[u'/ach/', u'24'],
[u'/ak/', u'32'],
[u'/ar/', u'3362'],
[u'/as/', u'10'],
[u'/ast/', u'6'],
[u'/be/', u'13'],
[u'/bg/', u'989'],
[u'/bn-BD/', u'21'],
[u'/bn-IN/', u'3'],
[u'/bs/', u'73'],
[u'/ca/', u'432'],
[u'/cs/', u'3308'],
[u'/da/', u'947'],
[u'/de/', u'37313'],
[u'/el/', u'1720'],
[u'/en-US/', u'221447'],
[u'/eo/', u'12'],
[u'/es/', u'24432'],
[u'/et/', u'226'],
[u'/eu/', u'122'],
[u'/fa/', u'356'],
[u'/favicon.ico', u'4'],
[u'/ff/', u'6'],
[u'/fi/', u'2318'],
[u'/fr/', u'24922'],
[u'/fur/', u'5'],
[u'/fy-NL/', u'2'],
[u'/ga-IE/', u'7'],
[u'/gd/', u'7'],
[u'/gl/', u'43'],
[u'/gu-IN/', u'3'],
[u'/he/', u'202'],
[u'/hi-IN/', u'21'],
[u'/hr/', u'677'],
[u'/hu/', u'2873'],
[u'/hy-AM/', u'14'],
[u'/id/', u'3390'],
[u'/ilo/', u'5'],
[u'/is/', u'39'],
[u'/it/', u'9986'],
[u'/ja/', u'15508'],
[u'/kk/', u'9'],
[u'/km/', u'8'],
[u'/kn/', u'7'],
[u'/ko/', u'858'],
[u'/lt/', u'536'],
[u'/mai/', u'12'],
[u'/mk/', u'58'],
[u'/ml/', u'10'],
[u'/mn/', u'42'],
[u'/mr/', u'10'],
[u'/ms/', u'14'],
[u'/my/', u'413'],
[u'/nb-NO/', u'714'],
[u'/ne-NP/', u'7'],
[u'/nl/', u'4970'],
[u'/no/', u'135'],
[u'/pa-IN/', u'10'],
[u'/pl/', u'9701'],
[u'/pt-BR/', u'12299'],
[u'/pt-PT/', u'1332'],
[u'/rm/', u'8'],
[u'/ro/', u'1221'],
[u'/ru/', u'26194'],
[u'/rw/', u'5'],
[u'/si/', u'21'],
[u'/sk/', u'875'],
[u'/sl/', u'530'],
[u'/son/', u'1'],
[u'/sq/', u'27'],
[u'/sr-Cyrl/', u'256'],
[u'/sv/', u'1488'],
[u'/ta-LK/', u'13'],
[u'/ta/', u'13'],
[u'/te/', u'6'],
[u'/th/', u'2936'],
[u'/tr/', u'3470'],
[u'/uk/', u'434'],
[u'/vi/', u'4880'],
[u'/zh-CN/', u'5640'],
[u'/zh-TW/', u'3508']
],
u'containsSampledData': False,
u'profileInfo': {
u'webPropertyId': u'UA-1234567890',
u'internalWebPropertyId': u'1234567890',
u'tableId': u'ga:1234567890',
u'profileId': u'1234567890',
u'profileName': u'support.mozilla.org - Production Only',
u'accountId': u'1234567890'
},
u'itemsPerPage': 1000,
u'totalsForAllResults': {
u'ga:visitors': u'437598'},
u'columnHeaders': [
{u'dataType': u'STRING',
u'columnType': u'DIMENSION',
u'name': u'ga:pagePathLevel1'},
{u'dataType': u'INTEGER',
u'columnType': u'METRIC',
u'name': u'ga:visitors'}
],
u'query': {
u'max-results': 1000,
u'dimensions': u'ga:pagePathLevel1',
u'start-date': u'2013-01-16',
u'start-index': 1,
u'ids': u'ga:1234567890',
u'metrics': [u'ga:visitors'],
u'end-date': u'2013-01-16'
},
u'totalResults': 83,
u'id': ('https://www.googleapis.com/analytics/v3/data/ga'
'?ids=ga:1234567890&dimensions=ga:pagePathLevel1'
'&metrics=ga:visitors&start-date=2013-01-16&end-date=2013-01-16'),
u'selfLink': ('https://www.googleapis.com/analytics/v3/data/ga'
'?ids=ga:1234567890&dimensions=ga:pagePathLevel1'
'&metrics=ga:visitors&start-d
|
huffpostdata/python-pollster
|
setup.py
|
Python
|
bsd-2-clause
| 756
| 0.003968
|
# coding: utf-8
import sys
from setuptools import setup, find_packages
NAME = "pollster"
VERSION = "2.0.2"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = ["urllib3 >=
|
1.15", "six >= 1.10", "certifi", "python-dateutil", "pandas >= 0.19.1"]
setup(
name=NAME,
version=VERSION,
description="Pollster
|
API",
author_email="Adam Hooper <[email protected]>",
url="https://github.com/huffpostdata/python-pollster",
keywords=["Pollster API"],
install_requires=REQUIRES,
packages=find_packages(),
include_package_data=True,
long_description="""Download election-related polling data from Pollster."""
)
|
IntegralDefense/ptauto
|
bin/pt_query.py
|
Python
|
apache-2.0
| 19,839
| 0.00005
|
#!/usr/bin/env python3
import argparse
import datetime
import getpass
import json
import logging
import logging.config
import os
import re
import sys
import tabulate
import uuid
from critsapi.critsapi import CRITsAPI
from critsapi.critsdbapi import CRITsDBAPI
from lib.pt.common.config import Config
from lib.pt.common.constants import PT_HOME
from lib.pt.core.database import Database
from lib.pt.ptapi import PTAPI
from lib.crits.vocabulary.indicators import IndicatorTypes as it
from operator import itemgetter
from configparser import ConfigParser
log = logging.getLogger()
VERSION = "0.1337"
# Check configuration directory
local_config_dir = os.path.join(PT_HOME, 'etc', 'local')
if not os.path.exists(local_config_dir):
os.makedirs(local_config_dir)
sys.exit('No etc/local/ directory. See README to create.')
config = Config()
# Check local data directory
if config.core.cache_enabled:
if not os.path.exists(config.core.cache_dir):
log.info('Creating Cache directory in '
'{}'.format(config.core.cache_dir))
os.makedirs(config.core.cache_dir)
# Initialize loggin
log_path = os.path.join(PT_HOME, 'etc', 'local', 'logging.ini')
try:
logging.config.fileConfig(log_path)
except Exception as e:
sys.exit('unable to load logging configuration file {}: '
'{}'.format(log_path, str(e)))
pt = PTAPI(username=config.core.pt_username, apikey=config.core.pt_apikey)
pt.set_proxy(http=config.proxy.http, https=config.proxy.https)
argparser = argparse.ArgumentParser()
argparser.add_argument('QUERY', action='store', help='A value to send as a'
' query to PT. Email, phone, name, etc.')
argparser.add_argument('--dev', dest='dev', action='store_true', default=False)
argparser.add_argument('--crits', dest='crits', action='store_true',
default=False, help='Write the results to CRITs with'
' appropriate relationships.')
argparser.add_argument('--test', dest='test', action='store_true',
default=False, help='Run with test data. (Save PT '
'queries)')
argparser.add_argument('-f', dest='force', action='store_true', default=False,
help='Force a new API query (do not used cached '
'results.')
argparser.add_argument('-t', action='append', dest='tags', default=[],
help='Bucket list tags for crits. Multiple -t options '
'are allowed.')
# Add our mutually exclusive items
meg = argparser.add_mutually_exclusive_group()
meg.add_argument('-n', dest='name', action='store_true', default=False,
help='The query is a name and pt_query will not try to '
'determine the type automatically.')
meg.add_argument('-a', dest='address', action='store_true', default=False,
help='The query is an address and pt_query will not '
'try to determine the type automatically.')
args = argparser.parse_args()
# Patterns for determining which type of lookup to do
# Some items cannot be differentiated via regex (name vs address), so we use
# a flag to specify these
# Load patterns for regexes
pattern_config = ConfigParser()
patterns = {}
with open(os.path.join(PT_HOME, 'etc', 'patterns.ini')) as fp:
pattern_config.readfp(fp)
email_address_pattern = re.compile(pattern_config.get('email', 'pattern'))
phone_pattern = re.compile(pattern_config.get('phone', 'pattern'))
domain_pattern = re.compile(pattern_config.get('domain', 'pattern'))
database = None
if config.core.cache_enabled:
database = Database()
if args.crits:
HOME = os.path.expanduser("~")
if not os.path.exists(os.path.join(HOME, '.crits_api')):
print('''Please create a file with the following contents:
[crits]
user = lolnate
[keys]
prod_api_key = keyhere
dev_api_key = keyhere
''')
raise SystemExit('~/.crits_api was not found or was not accessible.')
crits_config = ConfigParser()
crits_config.read(os.path.join(HOME, '.crits_api'))
if crits_config.has_option("keys", "prod"):
crits_api_prod = crits_config.get("keys", "prod")
if crits_config.has_option("keys", "dev"):
crits_api_dev = crits_config.get("keys", "dev")
if crits_config.has_option("crits", "user"):
crits_username = crits_config.get("crits", "user")
if args.dev:
crits_url = config.crits.crits_dev_api_url
crits_api_key = crits_api_dev
if len(crits_api_key) != 40:
print("Dev API key in ~/.crits_api is the wrong length! Must be 40\
characters.")
else:
crits_url = config.crits.crits_prod_api_url
crits_api_key = crits_api_prod
if len(crits_api_key) != 40:
print("Prod API key in ~/.crits_api is the wrong length! Must be 40\
characters.")
crits_proxy = {
'http': config.crits.crits_proxy_url,
'https': config.crits.crits_proxy_url,
}
|
# Build our mongo connection
if args.dev:
crits_mongo = CRITsDBAPI(mongo_uri=config.crits.mongo_uri_dev,
db_name=config.crits.database)
else:
crits_mongo = CRITsDBAPI(mongo_uri=config.crits.mongo_uri,
db_name=config.crits.databas
|
e)
crits_mongo.connect()
# Connect to the CRITs API
crits = CRITsAPI(
api_url=crits_url,
api_key=crits_api_key,
username=crits_username,
proxies=crits_proxy,
verify=config.crits.crits_verify
)
query = args.QUERY.rstrip()
# Get the user launching all this
user = getpass.getuser()
# Used to store the type of indicator in CRITs for the query object.
crits_indicator_type = ''
# Used to store the cache file location
cache_file = None
if database and not args.force and config.core.cache_enabled:
cache_file = database.get_cache_file(query)
if cache_file:
log.info('Using cache file for query {}'.format(query))
with open(cache_file) as fp:
results = json.loads(fp.read())
bucket_list = ['whois', 'pt:query']
for t in args.tags:
bucket_list.append(t)
if args.name or args.address:
if args.name:
field_str = 'name'
if args.address:
field_str = 'address'
if args.test:
results = pt.get_test_results(field=field_str)
else:
results = pt.whois_search(query=query, field=field_str)
if database and not cache_file and config.core.cache_enabled:
filepath = os.path.join(config.core.cache_dir, str(uuid.uuid4()))
log.debug('Filepath is {}'.format(filepath))
database.add_results_to_cache(query, user, results, filepath)
base_reference = 'https://www.passivetotal.org/search/whois/'\
'{}'.format(field_str)
# Use our config defined indicator type of whois email objects
if args.name:
crits_indicator_type = it.WHOIS_NAME
if args.address:
crits_indicator_type = it.WHOIS_ADDR1
bucket_list.append('registrant')
elif re.match(email_address_pattern, query):
if args.test:
results = pt.get_test_results(field='email')
else:
results = pt.whois_search(query=query, field='email')
# Now add the results to the db if we have it
if database and not cache_file and config.core.cache_enabled:
filepath = os.path.join(config.core.cache_dir, str(uuid.uuid4()))
log.debug('Filepath is {}'.format(filepath))
database.add_results_to_cache(query, user, results, filepath)
base_reference = 'https://www.passivetotal.org/search/whois/email'
# Use our config defined indicator type of whois email objects
crits_indicator_type = it.WHOIS_REGISTRANT_EMAIL_ADDRESS
bucket_list.append('registrant')
elif re.match(phone_pattern, query):
if args.test:
results = pt.get_test_results(field='phone')
else:
results = pt.whois_search(query=query, field='phone')
# Now add the results to the db if we have it
if database and not cache_file and config.core.cache_enabled:
filep
|
mvaled/sentry
|
tests/sentry/api/endpoints/test_monitor_checkins.py
|
Python
|
bsd-3-clause
| 5,664
| 0.001589
|
from __future__ import absolute_import, print_function
from datetime import timedelta
from django.utils import timezone
from freezegun import freeze_time
from sentry.models import CheckInStatus, Monitor, MonitorCheckIn, MonitorStatus, MonitorType
from sentry.testutils import APITestCase
@freeze_time("2019-01-01")
class CreateMonitorCheckInTest(APITestCase):
def test_passing(self):
user = self.create_user()
org = self.create_organization(owner=user)
team = self.create_team(organization=org, members=[user])
project = self.create_project(teams=[team])
monitor = Monitor.objects.create(
organization_id=org.id,
project_id=project.id,
next_checkin=timezone.now() - timedelta(minutes=1),
type=MonitorType.CRON_JOB,
config={"schedule": "* * * * *"},
)
self.login_as(user=user)
with self.feature({"organizations:monitors": True}):
resp = self.client.post(
"/api/0/monitors/{}/checkins/".format(monitor.guid), data={"status": "ok"}
)
assert resp.status_code == 201, resp.content
checkin = MonitorCheckIn.objects.get(guid=resp.data["id"])
assert checkin.status == CheckInStatus.OK
monitor = Monitor.objects.get(id=monitor.id)
assert monitor.status == MonitorStatus.OK
assert monitor.last_checkin == checkin.date_added
assert monitor.next_checkin == monitor.get_next_scheduled_checkin(checkin.date_added)
def test_failing(self):
user = self.create_user()
org = self.create_organization(owner=user)
team = self.create_team(organization=org, members=[user])
project = self.create_project(teams=[team])
monitor = Monitor.objects.create(
organization_id=org.id,
project_id=project.id,
next_checkin=timezone.now() - timedelta(minutes=1),
type=MonitorType.CRON_JOB,
config={"schedule": "* * * * *"},
)
self.login_as(user=user)
with self.feature({"organizations:monitors": True}):
resp = self.client.post(
"/api/0/monitors/{}/checkins/".format(monitor.guid), data={"status": "error"}
)
assert resp.status_code == 201, resp.content
checkin = MonitorCheckIn.objects.get(guid=resp.data["id"])
assert checkin.status == CheckInStatus.ERROR
monitor = Monitor.objects.get(id=monitor.id)
assert monitor.status == MonitorStatus.ERROR
assert monitor.last_checkin == checkin.date_added
assert monitor.next_checkin == monitor.get_next_scheduled_checkin(checkin.date_added)
def test_disabled(self):
user = self.create_user()
org = self.create_organization(owner=user)
team = self.create_team(organization=org, members=[user])
project = self.create_project(teams=[team])
monitor = Monitor.objects.create(
organization_id=org.id,
project_id=project.id,
next_checkin=timezone.now() - timedelta(minutes=1),
type=MonitorType.CRON_JOB,
status=MonitorStatus.DISABLED,
config={"schedule": "* * * * *"},
)
self.login_as(user=user)
with self.feature({"organizations:monitors": True}):
resp = self.client.post(
"/api/0/monitors/{}/checkins/".format(monitor.guid), data={"status": "error"}
)
assert resp.status_code == 201, resp.content
checkin = MonitorCheckIn.objects.get(guid=resp.data["id"])
assert checkin.status == CheckInStatus.ERROR
monitor = Monitor.objects.get(id=monitor.id)
assert monitor.status == MonitorStatus.DISABLED
assert monitor.last_checkin == checkin.date_added
assert monitor.next_checkin == monitor.get_next_scheduled_checkin(checkin.date_added)
def test_pending_deletion(self):
user = self.create_user()
org = self.create_organization(owner=user)
team = self.create_team(organization=org, members=[user])
project = self.create_project(teams=[team])
monitor = Monitor.objects.create(
organization_id=org.id,
project_id=project.id,
next_checkin=timezone.now() - timedelta(minutes=1),
type=MonitorType.CRON_JOB,
status=MonitorStatus.PENDING_DELETION,
config={"schedule": "* * * * *"},
)
self.login_as(user=user)
with self.feature({"organizations:monitors": True}):
resp = self.c
|
lient.post(
"/api/0/monitors/{}/checkins/".format(monitor.guid), data={"status": "error"}
)
assert resp.status_code == 404, resp.content
def test_deletion_in_progress(self):
user = self.create_user()
org = self.create_organization(owner=user)
team = self.create_team(organization=org, members=[user])
pr
|
oject = self.create_project(teams=[team])
monitor = Monitor.objects.create(
organization_id=org.id,
project_id=project.id,
next_checkin=timezone.now() - timedelta(minutes=1),
type=MonitorType.CRON_JOB,
status=MonitorStatus.DELETION_IN_PROGRESS,
config={"schedule": "* * * * *"},
)
self.login_as(user=user)
with self.feature({"organizations:monitors": True}):
resp = self.client.post(
"/api/0/monitors/{}/checkins/".format(monitor.guid), data={"status": "error"}
)
assert resp.status_code == 404, resp.content
|
akx/license-grep
|
license_grep/models.py
|
Python
|
mit
| 1,190
| 0.002521
|
from dataclasses import asdict, dataclass
from typing import List, Optional
from license_grep.licenses import UnknownLicense, canonicalize_licenses
from license_grep.utils import unique_in_order
@dataclass
class PackageInfo:
name: str
version: str
type: str
raw_licenses: Optional[List
|
[str]]
location: str
context: Optional[str]
@property
def licenses(self):
for license, canonicalized_license in canonicalize_licenses(self.raw_licenses):
yield canonicalized_license
@property
def licenses_string(self):
|
return ", ".join(
unique_in_order(str(license or "<UNKNOWN>") for license in self.licenses)
)
@property
def spec(self):
return f"{self.name}@{self.version}"
@property
def full_spec(self):
return f"{self.type}:{self.name}@{self.version}"
def as_json_dict(self):
return {
**asdict(self),
"licenses": list(
unique_in_order(
f"?{l}" if isinstance(l, UnknownLicense) else l
for l in self.licenses
)
),
"spec": self.spec,
}
|
M4sse/chromium.src
|
native_client_sdk/src/test_all.py
|
Python
|
bsd-3-clause
| 3,095
| 0.006462
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top level script for running all python unittests in the NaCl SDK.
"""
from __future__ import print_function
import argparse
import os
import subprocess
import sys
import unittest
# add tools folder to sys.pat
|
h
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
TOOLS_DIR = os.path
|
.join(SCRIPT_DIR, 'tools')
BUILD_TOOLS_DIR = os.path.join(SCRIPT_DIR, 'build_tools')
sys.path.append(TOOLS_DIR)
sys.path.append(os.path.join(TOOLS_DIR, 'tests'))
sys.path.append(os.path.join(TOOLS_DIR, 'lib', 'tests'))
sys.path.append(BUILD_TOOLS_DIR)
sys.path.append(os.path.join(BUILD_TOOLS_DIR, 'tests'))
import build_paths
PKG_VER_DIR = os.path.join(build_paths.NACL_DIR, 'build', 'package_version')
TAR_DIR = os.path.join(build_paths.NACL_DIR, 'toolchain', '.tars')
PKG_VER = os.path.join(PKG_VER_DIR, 'package_version.py')
EXTRACT_PACKAGES = ['nacl_x86_glibc']
TOOLCHAIN_OUT = os.path.join(build_paths.OUT_DIR, 'sdk_tests', 'toolchain')
# List of modules containing unittests. The goal is to keep the total
# runtime of these tests under 2 seconds. Any slower tests should go
# in TEST_MODULES_BIG.
TEST_MODULES = [
'build_artifacts_test',
'build_version_test',
'create_html_test',
'create_nmf_test',
'easy_template_test',
'elf_test',
'fix_deps_test',
'getos_test',
'get_shared_deps_test',
'httpd_test',
'nacl_config_test',
'oshelpers_test',
'parse_dsc_test',
'quote_test',
'sdktools_config_test',
'sel_ldr_test',
'update_nacl_manifest_test',
'verify_filelist_test',
'verify_ppapi_test',
]
# Slower tests. For example the 'sdktools' are mostly slower system tests
# that longer to run. If --quick is passed then we don't run these.
TEST_MODULES_BIG = [
'sdktools_commands_test',
'sdktools_test',
]
def ExtractToolchains():
cmd = [sys.executable, PKG_VER,
'--packages', ','.join(EXTRACT_PACKAGES),
'--tar-dir', TAR_DIR,
'--dest-dir', TOOLCHAIN_OUT,
'extract']
subprocess.check_call(cmd)
def main(args):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('--quick', action='store_true')
options = parser.parse_args(args)
# Some of the unit tests use parts of toolchains. Extract to TOOLCHAIN_OUT.
print('Extracting toolchains...')
ExtractToolchains()
suite = unittest.TestSuite()
modules = TEST_MODULES
if not options.quick:
modules += TEST_MODULES_BIG
for module_name in modules:
module = __import__(module_name)
suite.addTests(unittest.defaultTestLoader.loadTestsFromModule(module))
if options.verbose:
verbosity = 2
else:
verbosity = 1
print('Running unittests...')
result = unittest.TextTestRunner(verbosity=verbosity).run(suite)
return int(not result.wasSuccessful())
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
ncclient/ncclient
|
test/unit/devices/test_alu.py
|
Python
|
apache-2.0
| 1,917
| 0.003652
|
import unittest
from ncclient.devices.alu import *
from ncclient.xml_ import *
import re
xml = """<rpc-reply xmlns:junos="http://xml.alu.net/alu/12.1x46/alu">
<routing-engin>
<name>reX</name>
<commit-success/>
<!-- This is a comment -->
</routing-engin>
<ok/>
</rpc-reply>"""
class TestAluDevice(unittest.TestCase):
def setUp(self):
self.obj = AluDeviceHandler({'name': 'alu'})
def test_remove_namespaces(self):
xmlObj = to_ele(xml)
expected = re.sub(r'<rpc-reply xmlns:junos="http://xml.alu.net/alu/12.1x46/alu">',
|
r'<?xml version="1.0" encoding="UTF-8"?><rpc-reply>', xml)
self.assertEqual(expected, to_xml(remove_namespaces(xmlObj)))
def test_get_capabilities(self):
expected = ["urn:ietf:params:netconf:base:1.0", ]
self.assertListEqual(expected, self.obj.get_capabilities())
def test_get_xml_base_namespace_dict(self):
expected = {None: BASE_NS_1_0}
self.assertDictEqual(expected, self.obj.get_xml_base_namespace_dict())
|
def test_get_xml_extra_prefix_kwargs(self):
expected = dict()
expected["nsmap"] = self.obj.get_xml_base_namespace_dict()
self.assertDictEqual(expected, self.obj.get_xml_extra_prefix_kwargs())
def test_add_additional_operations(self):
expected=dict()
expected["get_configuration"] = GetConfiguration
expected["show_cli"] = ShowCLI
expected["load_configuration"] = LoadConfiguration
self.assertDictEqual(expected, self.obj.add_additional_operations())
def test_transform_reply(self):
expected = re.sub(r'<rpc-reply xmlns:junos="http://xml.alu.net/alu/12.1x46/alu">',
r'<?xml version="1.0" encoding="UTF-8"?><rpc-reply>', xml)
actual = self.obj.transform_reply()
xmlObj = to_ele(xml)
self.assertEqual(expected, to_xml(actual(xmlObj)))
|
SahilTikale/haas
|
hil/cli/network.py
|
Python
|
apache-2.0
| 2,277
| 0
|
"""Commands related to networks are in this module"""
import click
import sys
from hil.cli.client_setup import client
@click.group()
def network():
"""Commands related to network"""
@network.command(name='create', short_help='Create a new network')
@click.argument('network')
@click.argument('owner')
@click.option('--access', help='Projects that can access this network. '
'Defaults to the owner of the network')
@click.option('--net-id',
help='Network ID for network. Only admins can specify this.')
def network_create(network, owner, access, net_id):
"""Create a link-layer <network>. See docs/networks.md for details"""
if net_id is None:
net_id = ''
if access is None:
access = owner
client.network.create(network, owner, access, net_id)
@network.command(name='delete')
@click.argument('network')
def network_delete(network):
"""Delete a network"""
client.network.delete(network)
@network.command(name='show')
@click.argument('network')
def network_show(network):
"""Display information about network"""
q = client.network.show(network)
for item in q.items():
sys.stdout.write("%s\t : %s\n" % (item[0], item[1]))
@network.command(name='list')
def network_list():
"""List all networks"""
q = cli
|
ent.network.list()
for item in q.items():
sys.stdout.write('%s \t : %s\n' % (item[0], item[1]))
|
@network.command('list-attachments')
@click.argument('network')
@click.option('--project', help='Name of project.')
def list_network_attachments(network, project):
"""Lists all the attachments from <project> for <network>
If <project> is `None`, lists all attachments for <network>
"""
print client.network.list_network_attachments(network, project)
@network.command(name='grant-access')
@click.argument('network')
@click.argument('project')
def network_grant_project_access(project, network):
"""Add <project> to <network> access"""
client.network.grant_access(project, network)
@network.command(name='revoke-access')
@click.argument('network')
@click.argument('project')
def network_revoke_project_access(project, network):
"""Remove <project> from <network> access"""
client.network.revoke_access(project, network)
|
elopezga/ErrorRate
|
ivi/lecroy/lecroyWR64XIA.py
|
Python
|
mit
| 1,644
| 0.001825
|
"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CO
|
NTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .lecroyWRXIA import *
class lecroyWR64XIA(lecroyWRXIA):
"Lecroy WaveRunner 64Xi-A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__
|
dict__.setdefault('_instrument_id', 'WaveRunner 64Xi-A')
super(lecroy104MXiA, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 0
self._channel_count = self._analog_channel_count + self._digital_channel_count
self._bandwidth = 600e6
self._init_channels()
|
HBPNeurorobotics/nest-simulator
|
testsuite/manualtests/test_pp_psc_delta_stdp.py
|
Python
|
gpl-2.0
| 2,827
| 0
|
# -*- coding: utf-8 -*-
#
# test_pp_psc_delta_stdp.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
#
# Moritz Deger, [email protected], Aug 14, 2015
#
#
# Python script to reproduce failure of pp_psc_delta to show spike timing
# dependent plasticity (STDP), as opposed to iaf_psc_delta.
# The problem is probably related to the setting of
|
'archiver_length'
# (printed at the end of the script)
import nest
import nest.raster_plot
import numpy as np
import pylab
Dt = 1.
nsteps = 100
w_0 = 100.
nest.ResetKernel()
nrn_pre = nest.Create('parrot_neuron')
nrn_post1 = nest.Create('iaf_psc_delta')
nrn_post2 = nest.Create('pp_psc_delta')
nest.Connect(nrn_pre, nrn_post1 + nrn_post2,
syn_spec={'model': 'stdp_synapse', 'weight': w_0})
conn1 = nest.GetConnections(nrn_pre, nrn_post1)
conn
|
2 = nest.GetConnections(nrn_pre, nrn_post2)
sg_pre = nest.Create('spike_generator')
nest.SetStatus(sg_pre, {'spike_times': np.arange(Dt, nsteps * Dt, 10. * Dt)})
nest.Connect(sg_pre, nrn_pre)
mm = nest.Create('multimeter')
nest.SetStatus(mm, {'record_from': ['V_m']})
nest.Connect(mm, nrn_post1 + nrn_post2)
sd = nest.Create('spike_detector')
nest.Connect(nrn_pre + nrn_post1 + nrn_post2, sd)
t = []
w1 = []
w2 = []
t.append(0.)
w1.append(nest.GetStatus(conn1, keys=['weight'])[0][0])
w2.append(nest.GetStatus(conn2, keys=['weight'])[0][0])
for i in xrange(nsteps):
nest.Simulate(Dt)
t.append(i * Dt)
w1.append(nest.GetStatus(conn1, keys=['weight'])[0][0])
w2.append(nest.GetStatus(conn2, keys=['weight'])[0][0])
pylab.figure(1)
pylab.plot(t, w1, 'g', label='iaf_psc_delta, ' + str(nrn_post1[0]))
pylab.plot(t, w2, 'r', label='pp_psc_delta, ' + str(nrn_post2[0]))
pylab.xlabel('time [ms]')
pylab.ylabel('weight [mV]')
pylab.legend(loc='best')
ylims = pylab.ylim()
pylab.ylim(ylims[0] - 5, ylims[1] + 5)
# pylab.savefig('test_pp_psc_delta_stdp_fig1.png')
nest.raster_plot.from_device(sd)
ylims = pylab.ylim()
pylab.ylim(ylims[0] - .5, ylims[1] + .5)
pylab.show()
# pylab.savefig('test_pp_psc_delta_stdp_fig2.png')
print 'Archiver lengths shall be equal:'
for nrn in [nrn_post1, nrn_post2]:
print nest.GetStatus(nrn, keys=['model', 'archiver_length'])[0]
|
jjgomera/pychemqt
|
lib/EoS/cubic.py
|
Python
|
gpl-3.0
| 18,485
| 0.000759
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
r"""Pychemqt, Chemical Engineering Process simulator
Copyright (C) 2009-2017, Juan José Gómez Romera <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Library with the implementantion of a generic cubic equation of state with the
form
.. math::
P = \frac{RT}{V-b}-\frac{\alpha(T)}{V^2+\delta V+\epsilon}
Expressing as a cubic polynomy in compressibility factor easy to solve
.. math::
Z^3 + \left(\delta'-B'-1\right)Z^2 +
\left(a'+\epsilon'-\delta'\left(b'+1\right)\right)Z -
\left(\epsilon'\left(b'+1\right)+a'b'\right) = 0
using the adimensional parameters
.. math::
\begin{array}[t]{l}
a' = \frac{aP}{RT}\\
b' = \frac{bP}{RT}\\
\delta' = \frac{\delta P}{RT}\\
\epsilon' = \frac{\epsilon P}{RT}\\
\end{array}
Each cubic EoS implemented here would a specific form of this general
expression changing the values of δ, ε and the expresion of α(T)
Each equation is specially suitable for different compounds, for example, the
Schmidt-Wenzel (SW) equation (1980) and the Adachi-Lu-Sugie (ALS) equation
(1983) are good for methane to n-decane. The Yu-Lu (YL) equation (1987) was
designed for asymmetric nonpolar mixtures, but not for polar substances. The
Iwai-Margerum-Lu (IML) equation ( 1987) was developed for polar substances, but
not suitable for nonpolar substances with large molecular weight.
"""
from math import log, exp
from scipy.constants import R
from PyQt5.QtWidgets import QApplication
from lib import unidades
from lib.eos import EoS
from lib.physics import R_atml, cubicCardano
from lib.bip import Kij, Mixing_Rule
from lib.utilities import refDoc
# TODO: Añadir parámetros, archivo /media/datos/Biblioteca/archivos/alfas.pdf
# self.Mathias = 0
# self.Adachi = [0, 0]
# self.Andoulakis = [0, 0, 0]
__doi__ = {
1:
{"autor": "Poling, B.E, Prausnitz, J.M, O'Connell, J.P",
"title": "The Properties of Gases and Liquids 5th Edition",
"ref": "McGraw-Hill, New York, 2001",
"doi": ""},
2:
{"autor": "Ahmed, T.",
"title": "Equations of State and PVT Analysis: Applications for"
"Improved Reservoir Modeling, 2nd Edition",
"ref": "Gulf Professional Publishing, 2016, ISBN 9780128015704,",
"doi": "10.1016/B978-0-12-801570-4.00002-7"},
3:
{"autor": "Bell, I.H., Jäger, A.",
"title": "Helmholtz Energy Transformations of Common Cubic Equations "
"of State for Use with Pure Fluids and Mixtures",
"ref": "J. Res. of NIST 121 (2016) 236-263",
"doi": "10.6028/jres.121.011"},
4:
{"autor": "",
"title": "",
"ref": "",
"doi": ""},
}
alfa = (QApplication.translate("pychemqt", "Original"),
"Boston-Mathias",
"Twu",
"Doridon")
@refDoc(__doi__, [3])
def CubicHelmholtz(tau, delta, **kw):
r"""Residual contribution to the free Helmholtz energy from a generic cubic
equation of state with the form:
.. math::
P = \frac{RT}{V-b}-\frac{\alpha(T)}{\left(v+\Delta_1b\right)
\left(v+\Delta_2b\right)}
From this formulation it's possible calculate the Helmholtz free energy
with the equation:
.. math::
\alpha^r = \phi^{(-)}-\frac{\tau\alpha}{RT_c}\phi^{(+)}
Parameters
----------
tau : float
Inverse reduced temperature, Tc/T [-]
delta : float
Reduced density, rho/rhoc [-]
kw : list
Aditional parameters specific of cubic equation of state
The parameters include: rhoc, Tc, b, alfa, Delta1, Delta2
Returns
-------
prop : dictionary with residual adimensional helmholtz energy and deriv
fir [-]
firt: [∂fir/∂τ]δ,x [-]
fird: [∂fir/∂δ]τ,x [-]
firtt: [∂²fir/∂τ²]
|
δ,x [-]
firdt: [∂²fir/∂
|
τ∂δ]x [-]
firdd: [∂²fir/∂δ²]τ,x [-]
"""
b = kw["b"]
a = kw["a"]
dat = kw["dat"]
datt = kw["datt"]
dattt = kw["dattt"]
Delta1 = kw["Delta1"]
Delta2 = kw["Delta2"]
R = kw["R"]
# This parameters are necessary only for multicomponent mixtures to
# calculate fugacity coefficient
bi = kw.get("bi", None)
daxi = kw.get("daxi", None)
rhoc = kw.get("rhoc", 1)
Tc = kw.get("Tc", 1)
phi1 = -log(1-b*delta*rhoc)
if Delta1 == Delta2:
# Special case using the l'Hôpital's rule
phi2 = rhoc*delta
else:
phi2 = log((Delta1*b*rhoc*delta+1)/(Delta2*b*rhoc*delta+1)) / \
b/(Delta1-Delta2)
phi1d = b*rhoc/(1-b*delta*rhoc)
phi1dd = b**2*rhoc**2/(1-b*delta*rhoc)**2
phi1ddd = 2*b**3*rhoc**3/(1-b*delta*rhoc)**3
PI12 = (1+Delta1*b*rhoc*delta) * (1+Delta2*b*rhoc*delta)
PI12d = b*rhoc * (2*Delta1*Delta2*b*delta*rhoc + Delta1 + Delta2)
PI12dd = 2*Delta1*Delta2*b**2*rhoc**2
phi2d = rhoc/PI12
phi2dd = -rhoc*PI12d/PI12**2
phi2ddd = rhoc*(-PI12*PI12dd+2*PI12d**2)/PI12**3
fir = phi1 - tau*a/R/Tc*phi2
fird = phi1d - tau*a/R/Tc*phi2d
firdd = phi1dd - tau*a/R/Tc*phi2dd
firddd = phi1ddd - tau*a/R/Tc*phi2ddd
# Eq 32
dtat = tau*dat + a
dtatt = tau*datt + 2*dat
dtattt = tau*dattt + 3*datt
firt = -dtat/R/Tc * phi2
firtt = -dtatt/R/Tc * phi2
firttt = -dtattt/R/Tc * phi2
firdt = -dtat/R/Tc * phi2d
firddt = -dtat/R/Tc * phi2dd
firdtt = -dtatt/R/Tc * phi2d
prop = {}
prop["fir"] = fir
prop["fird"] = fird
prop["firt"] = firt
prop["firdd"] = firdd
prop["firdt"] = firdt
prop["firtt"] = firtt
prop["firddd"] = firddd
prop["firddt"] = firddt
prop["firdtt"] = firdtt
prop["firttt"] = firttt
prop["B"] = 0
prop["C"] = 0
prop["D"] = 0
if bi:
# Composition derivatives for fugacity coefficient calculation
c = 1/b
dbxi = bi # Eq 132
A = log((delta*rhoc*b*Delta1+1)/(delta*rhoc*b*Delta2+1)) # Eq 103
dAxi = [delta*rhoc*db*(Delta1-Delta2)/PI12 for db in dbxi] # Eq 104
dcxi = [-db/b**2 for db in dbxi] # Eq 107
phi1xi = [delta*rhoc*db/(1-delta*rhoc*b) for db in dbxi] # Eq 80
# Eq 111
phi2xi = [(A*dc + c*dA)/(Delta1-Delta2) for dc, dA in zip(dcxi, dAxi)]
dtaxi = [tau*da for da in daxi]
# Eq 77
phirxi = []
for dt, p1x, p2x in zip(dtaxi, phi1xi, phi2xi):
phirxi.append(p1x - 1/R/Tc*(dt*phi2 + tau*a*p2x))
prop["firxi"] = phirxi
return prop
@refDoc(__doi__, [1, 2])
class Cubic(EoS):
r"""Class to implement the common functionality of cubic equation of state
This class implement a general cubic equation of state in the form:
.. math::
P = \frac{RT}{V-b}-\frac{\alpha(T)}{V^2+\delta V+\epsilon}
.. math::
P = \frac{RT}{V-b}-\frac{\alpha(T)}{\left(V+\delta_1b\right)
\left(V+\delta_2b\right)}
.. math::
\delta_1 = -\frac{\sqrt{\delta^2-4\epsilon}-\delta}{2b}
.. math::
\delta_2 = -\frac{\sqrt{\delta^2-4\epsilon}+\delta}{2b}
"""
def __init__(self, T, P, mezcla, **kwargs):
EoS.__init__(self, T, P, mezcla, **kwargs)
if "R" in kwargs:
self.R = kwargs["R"]
else:
self.R = R
self._cubicDefinition(T)
if self.mezcla.Tc < T:
self.x = 1
self.xi = self.zi
self.yi = self.zi
self.Zg = self._Z(self.zi, T, P)[-1]
self.Zl = None
else:
self.x
|
talumbau/webapp-public
|
webapp/apps/taxbrain/migrations/0003_taxsaveinputs_parameters.py
|
Python
|
mit
| 448
| 0
|
# -*- coding: utf-8 -*-
from
|
__future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('taxbrain', '0002_taxoutput_tax_result'),
]
operations = [
migrations.AddField(
model_name='taxsaveinputs',
name='parameters',
field=models.TextField(default=None),
preserve_defa
|
ult=True,
),
]
|
JackDanger/sentry
|
src/sentry/api/endpoints/project_filters.py
|
Python
|
bsd-3-clause
| 923
| 0.001083
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry import filters
from sentry.api.bases.project import ProjectEndpoint
class ProjectFiltersEndpoint(ProjectEndpoint):
def get(self, request, project):
"""
List a project's filters
Retrieve a list of filters for a given project.
{method} {path}
"""
results = []
for f_cls in filters.all():
filter = f_cls(p
|
roject)
results.append({
'id': filter.id,
# 'active' will be either a boolean or list for the legacy browser filters
# all other filters will be boolean
'active': filter.is_enabled(),
'description': filter.description,
'name': filter.name,
})
results.sort(key
|
=lambda x: x['name'])
return Response(results)
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-compute/azure/mgmt/compute/v2017_12_01/models/virtual_machine_update.py
|
Python
|
mit
| 6,810
| 0.001028
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .update_resource import UpdateResource
class VirtualMachineUpdate(UpdateResource):
"""Describes a Virtual Machine.
Variables are only populated by the server, and will be ignored when
sending a request.
:param tags: Resource tags
:type tags: dict[str, str]
:param plan: Specifies information about the marketplace image used to
create the virtual machine. This element is only used for marketplace
images. Before you can use a marketplace image from an API, you must
enable the image for programmatic use. In the Azure portal, find the
marketplace image that you want to use and then click **Want to deploy
programmatically, Get Started ->**. Enter any required information and
then click **Save**.
:type plan: ~azure.mgmt.compute.v2017_12_01.models.Plan
:param hardware_profile: Specifies the hardware settings for the virtual
machine.
:type hardware_profile:
~azure.mgmt.compute.v2017_12_01.models.HardwareProfile
:param storage_profile: Specifies the storage settings for the virtual
machine disks.
:type storage_profile:
~azure.mgmt.compute.v2017_12_01.models.StorageProfile
:param os_profile: Specifies the operating system settings for the virtual
machine.
:type os_profile: ~azure.mgmt.compute.v2017_12_01.models.OSProfile
:param network_profile: Specifies the network interfaces of the virtual
machine.
:type network_profile:
~azure.mgmt.compute.v2017_12_01.models.NetworkProfile
:param diagnostics_profile: Specifies the boot diagnostic settings state.
<br><br>Minimum api-version: 2015-06-15.
:type diagnostics_profile:
~azure.mgmt.compute.v2017_12_01.models.DiagnosticsProfile
:param availability_set: Specifies information about the availability set
that the virtual machine should be assigned to. Virtual machines specified
in the same availability set are allocated to different nodes to maximize
availability. For more information about availability sets, see [Manage
the availability of virtual
machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-manage-availability?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
<br><br> For more information on Azure planned maintainance, see [Planned
maintenance for virtual machines in
Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-planned-maintenance?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
<br><br> Currently, a VM can only be added to availability set at creation
time. An existing VM cannot be added to an availability set.
:type availability_set: ~azure.mgmt.compute.v2017_12_01.models.SubResource
:ivar provisioning_state: The provisioning state, which only appears in
the response.
:vartype provisioning_state: str
:ivar instance_view: The virtual machine instance view.
:vartype instance_view:
~azure.mgmt.compute.v2017_12_01.models.VirtualMachineInstanceView
:param license_type: Specifies that the image or disk that is being used
was licensed on-premises. This element is only used for images that
contain the Windows Server operating system. <br><br> Possible values are:
<br><br> Windows_Client <br><br> Windows_Server <br><br> If this element
is included in a request for an update, the value must match the initial
value. This value cannot be updated. <br><br> For more information, see
[Azure Hybrid Use Benefit for Windows
Server](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-hybrid-use-benefit-licensing?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
<br><br> Minimum api-version: 2015-06-15
:type license_type: str
:ivar vm_id: Specifies the VM unique ID which is a 128-bits identifier
that is encoded and stored in all Azure IaaS VMs SMBIOS and can be read
using platform BIOS commands.
:vartype vm_id: str
:param identity: The identity of the virtual machine, if configured.
:type identity:
~azure.mgmt.compute.v2017_12_01.models.VirtualMachineIdentity
:param zones: The virtual machine zones.
:type zones: list[str]
"""
_validation = {
'provisioning_state': {'readonly': True},
'instance_view': {'readonly': True},
'vm_id': {'readonly': True},
}
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'plan': {'key': 'plan', 'type': 'Plan'},
'hardware_profile': {'key': 'properties.hardwareProfile', 'type': 'HardwareProfile'},
'storage_profile': {'key': 'properties.storageProfile', 'type': 'StorageProfile'},
'os_profile': {'key': 'properties.osProfile', 'type': 'OSProfile'},
'network_profile': {'key': 'properties.networkProfile', 'type': 'NetworkProfile'},
'diagnostics_profile': {'key': 'properties.diagnosticsProfile', 'type': 'DiagnosticsProfile'},
'availability_set': {'key': 'properties.availabilitySet', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'instance_view': {'key': 'properties.instanceView', 'type': 'VirtualMachineInstanceView'},
'license_type': {'key': 'properties.licenseType', 'type': 'str'},
'vm_id': {'key': 'properties.vmId', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'VirtualMachineIdentity'},
'zones': {'key': 'zones', 'type': '[str
|
]'},
}
def __init__(self, **kwargs):
super(VirtualMachineUpdate, self).__init__(**kwargs)
|
self.plan = kwargs.get('plan', None)
self.hardware_profile = kwargs.get('hardware_profile', None)
self.storage_profile = kwargs.get('storage_profile', None)
self.os_profile = kwargs.get('os_profile', None)
self.network_profile = kwargs.get('network_profile', None)
self.diagnostics_profile = kwargs.get('diagnostics_profile', None)
self.availability_set = kwargs.get('availability_set', None)
self.provisioning_state = None
self.instance_view = None
self.license_type = kwargs.get('license_type', None)
self.vm_id = None
self.identity = kwargs.get('identity', None)
self.zones = kwargs.get('zones', None)
|
bdell/pyPWA
|
pythonPWA/dataTypes/resonance.py
|
Python
|
mit
| 464
| 0.032328
|
class resonance():
"""
This class represents a resonance.
"""
def __init__(self,cR=1.0,wR=[],w0=1.,r0=.5,phase=0.):
self.wR=wR
self.cR=cR
self.w0=w0
self.r0=r0
self
|
.phase=phase
def toString(self):
"""
Returns a string of the resonance data memebers delimited by newlines.
"""
return "\n".join(["wR="+str(s
|
elf.wR),"cR="+str(self.cR),"w0="+str(self.w0),"r0="+str(self.r0)])
|
raymonwu/Managing_Your_Biological_Data_with_Python_3
|
07-tabular_data/7.4.3_convert_table.py
|
Python
|
mit
| 1,222
| 0
|
'''
Convert a table from a nested list to a nested dictionary and back.
-----------------------------------------------------------
(c) 2013 Allegra Via and Kristian Rother
Licensed under the conditions of the Python License
This code appears in section 7.4.3 of the book
"Managing Biological Data with Python".
-------------------------------
|
----------------------------
'''
table = [
['protein', 'ext1', 'ext2', 'ext3'],
[0.16, 0.038, 0.044, 0.040],
[0.33, 0.089, 0.095, 0.091],
[0.66, 0.184, 0.191, 0.191],
[1.00, 0.280, 0.292, 0.283],
|
[1.32, 0.365, 0.367, 0.365],
[1.66, 0.441, 0.443, 0.444]
]
# convert nested list to nested dict
nested_dict = {}
n = 0
key = table[0]
# to include the header , run the for loop over
# All table elements (including the first one)
for row in table[1:]:
n = n + 1
entry = {key[0]: row[0], key[1]: row[1], key[2]: row[2],
key[3]: row[3]}
nested_dict['row'+str(n)] = entry
# Test
# print(table[1:])
print(nested_dict)
nested_list = []
for entry in nested_dict:
key = nested_dict[entry]
nested_list.append([key['protein'], key['ext1'], key['ext2'],
key['ext3']])
print(nested_list)
|
klahnakoski/ActiveData
|
vendor/pyLibrary/aws/s3.py
|
Python
|
mpl-2.0
| 18,596
| 0.001559
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Contact: Kyle Lahnakoski ([email protected])
#
from __future__ import absolute_import, division, unicode_literals
import gzip
import zipfile
import boto
from boto.s3.connection import Location
from bs4 import BeautifulSoup
import mo_files
from mo_dots import Data, Null, coalesce, unwrap, to_data, is_many, list_to_data
from mo_files import mimetype
from mo_files.url import value2url_param
from mo_future import StringIO, is_binary, text
from mo_http import http
from mo_http.big_data import (
LazyLines,
MAX_STRING_SIZE,
ibytes2ilines,
safe_size,
scompressed2ibytes,
)
from mo_kwargs import override
from mo_logs import Except, Log
from mo_testing.fuzzytestcase import assertAlmostEqual
from mo_times.dates import Date
from mo_times.timer import Timer
from pyLibrary import convert
VERIFY_UPLOAD = True
DEBUG = False
TOO_MANY_KEYS = 1000 * 1000 * 1000
READ_ERROR = "S3 read error"
MAX_FILE_SIZE = 100 * 1024 * 1024
VALID_KEY = r"\d+([.:]\d+)*"
KEY_IS_WRONG_FORMAT = "key {{key}} in bucket {{bucket}} is of the wrong format"
class File(object):
def __init__(self, bucket, key):
self.bucket = bucket
self.key = key
def read(self):
return self.bucket.read(self.key)
def read_lines(self):
return self.bucket.read_lines(self.key)
def write(self, value):
self.bucket.write(self.key, value)
def write_lines(self, lines):
self.bucket.write_lines(self.key, lines)
@property
def meta(self):
return self.bucket.meta(self.key)
def delete(self):
return self.bucket.delete_key(self.key)
class Connection(object):
@override
def __init__(
self,
aws_access_key_id=None, # CREDENTIAL
aws_secret_access_key=None, # CREDENTIAL
region=None, # NAME OF AWS REGION, REQUIRED FOR SOME BUCKETS
kwargs=None,
):
self.settings = kwargs
try:
if not kwargs.region:
self.connection = boto.connect_s3(
aws_access_key_id=unwrap(self.settings.aws_access_key_id),
aws_secret_access_key=unwrap(self.settings.aws_secret_access_key),
)
else:
self.connection = boto.s3.connect_to_region(
self.settings.region,
aws_access_key_id=unwrap(self.settings.aws_access_key_id),
aws_secret_access_key=unwrap(self.settings.aws_secret_access_key),
)
except Exception as e:
Log.error("Problem connecting to S3", e)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.connection:
self.connection.close()
def get_bucket(self, name):
output = SkeletonBucket()
output.bucket = self.connection.get_bucket(name, validate=False)
return output
class Bucket(object):
"""
STORE JSON, OR CR-DELIMITED LIST OF JSON, IN S3
THIS CLASS MANAGES THE ".json" EXTENSION, AND ".gz"
(ZIP/UNZIP) SHOULD THE FILE BE BIG ENOUGH TO
JUSTIFY IT
ALL KEYS ARE DIGITS, SEPARATED BY DOT (.) COLON (:)
"""
@override
def __init__(
self,
bucket, # NAME OF THE BUCKET
aws_access_key_id=None, # CREDENTIAL
aws_secret_access_key=None, # CREDENTIAL
region=None, # NAME OF AWS REGION, REQUIRED FOR SOME BUCKETS
public=False,
debug=False,
kwargs=None,
):
self.settings = kwargs
self.connection = None
self.bucket = None
self.key_format = _scrub_key(kwargs.key_format)
try:
self.connection = Connection(kwargs).connection
self.bucket = self.connection.get_bucket(
self.settings.bucket, validate=False
)
except Exception as e:
Log.error(
"Problem connecting to {{bucket}}", bucket=self.settings.bucket, cause=e
)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.con
|
nection:
self.connection.close()
def __getattr__(self, item):
return getattr(self.bucket, item)
def get_key(self, key, must_exist=True):
if must_exist:
meta = self.get_meta(key)
if not meta:
Log.error(
"Key {{key}} does not exist in bucket {{bucket}}",
key=key,
bucket=self.bucket.name,
)
key = strip_extension(meta.key)
retur
|
n File(self, key)
def delete_key(self, key):
# self._verify_key_format(key) DO NOT VERIFY, DELETE BAD KEYS ANYWAY!!
try:
meta = self.get_meta(key, conforming=False)
if meta == None:
return
self.bucket.delete_key(meta.key)
except Exception as e:
self.get_meta(key, conforming=False)
raise e
def delete_keys(self, keys):
self.bucket.delete_keys([str(k) for k in keys])
def get_meta(self, key, conforming=True):
"""
RETURN METADATA ON FILE IN BUCKET
:param key: KEY, OR PREFIX OF KEY
:param conforming: TEST IF THE KEY CONFORMS TO REQUIRED PATTERN
:return: METADATA, IF UNIQUE, ELSE ERROR
"""
try:
metas = list(self.bucket.list(prefix=str(key)))
metas = list_to_data([m for m in metas if text(m.name).find(".json") != -1])
perfect = Null
favorite = Null
too_many = False
error = None
for m in metas:
try:
simple = strip_extension(m.key)
if conforming:
self._verify_key_format(simple)
if simple == key:
perfect = m
too_many = False
if simple.startswith(key + ".") or simple.startswith(key + ":"):
if favorite and not perfect:
too_many = True
favorite = m
except Exception as e:
error = e
if too_many:
Log.error(
"multiple keys in {{bucket}} with prefix={{prefix|quote}}: {{list}}",
bucket=self.name,
prefix=key,
list=[k.name for k in metas],
)
if not perfect and error:
Log.error("Problem with key request", error)
return coalesce(perfect, favorite)
except Exception as e:
Log.error(
READ_ERROR + " can not read {{key}} from {{bucket}}",
key=key,
bucket=self.bucket.name,
cause=e,
)
def keys(self, prefix=None, delimiter=None):
"""
:param prefix: NOT A STRING PREFIX, RATHER PATH ID PREFIX (MUST MATCH TO NEXT "." OR ":")
:param delimiter: TO GET Prefix OBJECTS, RATHER THAN WHOLE KEYS
:return: SET OF KEYS IN BUCKET, OR
"""
if delimiter:
# WE REALLY DO NOT GET KEYS, BUT RATHER Prefix OBJECTS
# AT LEAST THEY ARE UNIQUE
candidates = [
k.name.rstrip(delimiter)
for k in self.bucket.list(prefix=str(prefix), delimiter=str(delimiter))
]
else:
candidates = [
strip_extension(k.key) for k in self.bucket.list(prefix=str(prefix))
]
if prefix == None:
return set(c for c in candidates if c != "0.json")
else:
return set(
k
for k in candidates
if k == prefix
or k.startswith(prefix + ".")
or k.startswith(prefix + ":")
)
def metas(se
|
mscuthbert/abjad
|
abjad/tools/pitchtools/test/test_pitchtools_PitchClass_is_pitch_class_number.py
|
Python
|
gpl-3.0
| 727
| 0.001376
|
# -*- encoding: utf-8 -*-
from abjad import *
def test_pitchtools_PitchClass_is_pitch_class_number_01():
assert pitchtools.PitchClass.is_pitch_class_number(0)
assert pitchtools.PitchClass.is_pitch_class
|
_number(0.5)
assert pitchtools.PitchClass.is_pitch_class_number(11)
assert pitchtools.PitchClass.is_pitch_class_number(11.5)
def test_pitchtools_PitchClass_is_pitch_class_number_02():
assert not pitchtools.PitchClass.is_pitch_class_number(-1)
assert not pitchtools.PitchClass.is_pitch_class_number(-0.5)
assert not pitchtools.PitchClass.is_pitch_class_number(12)
assert not pitchtools.PitchClass.is_pitch_class_number(99)
assert not pitchtools.Pi
|
tchClass.is_pitch_class_number('foo')
|
i5o/openshot-sugar
|
openshot/openshot/blender/scripts/neon_curves.py
|
Python
|
gpl-3.0
| 5,877
| 0.020759
|
# OpenShot Video Editor is a program that creates, modifies, and edits video files.
# Copyright (C) 2009 Jonathan Thomas
#
# This file is part of OpenShot Video Editor (http://launchpad.net/openshot/).
#
# OpenShot Video Editor is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenShot Video Editor is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OpenShot Video Editor. If not, see <http://www.gnu.org/licenses/>.
# Import Blender's python API. This only works when the script is being
# run from the context of Blender. Blender contains it's own version of Python
# with this library pre-installed.
import bpy
# Load a font
def load_font(font_path):
""" Load a new TTF font into Blender, and return the font object """
# get the original list of fonts (before we add a new one)
original_fonts = bpy.data.fonts.keys()
# load new font
bpy.ops.font.open(filepath=font_path)
# get the new list of fonts (after we added a new one)
for font_name in bpy.data.fonts.keys():
if font_name not in original_fonts:
return bpy.data.fonts[font_name]
# no new font was added
return None
# Debug Info:
# ./blender -b test.blend -P demo.py
# -b = background mode
# -P = run a Python script within the context of the project file
# Init all of the variables needed by this script. Because Blender executes
# this script, OpenShot will inject a dictionary of the required parameters
# before this script is executed.
params = {
'title' : 'Oh Yeah! OpenShot!',
'extrude' : 0.1,
'bevel_depth' : 0.02,
'spacemode' : 'CENTER',
'text_size' : 1.5,
'width' : 1.0,
'fontname' : 'Bfont',
'color' : [0.8,0.8,0.8],
'alpha' : 1.0,
'line1_color' : [0.8,0.8,0.8],
'line2_color' : [0.8,0.8,0.8],
'line3_color' : [0.8,0.8,0.8],
'line4_color' : [0.8,0.8,0.8],
'output_path' : '/tmp/',
'fps' : 24,
'quality' : 90,
'file_format' : 'PNG',
'color_mode' : 'RGBA',
'horizon_color' : [0.57, 0.57, 0.57],
'resolution_x' : 1920,
'resolution_y' : 1080,
'resolution_percentage' : 100,
'start_frame' : 20,
'end_frame' : 25,
'animation' : True,
}
#INJECT_PARAMS_HERE
# The remainder of this script will modify the current Blender .blend project
# file, and adjust the settings. The .blend file is specified in the XML file
# that defines this template in OpenShot.
#----------------------------------------------------------------------------
# Modify Text / Curve settings
#print (bpy.data.curves.keys())
text_object = bpy.data.curves["Text.001"]
text_object.extrude = params["extrude"]
text_object.bevel_depth = params["bevel_depth"]
text_object.body = params["title"]
text_object.align = params["spacemode"]
text_object.size = params["text_size"]
text_object.space_character = params["width"]
# Get font object
font = None
if params["fontname"] != "Bfont":
# Add font so it's available to Blender
font = load_font(params["fontname"])
else:
# Get default font
font = bpy.data.fonts["Bfont"]
text_object.font = font
# Change the material settings (color, alpha, etc...)
material_object = bpy.data.materials["Material.title"]
material_object.diffuse_color = params["diffuse_color"]
material_object.specular_color = params["specular_color"]
material_object.specular_intensity = params["specular_intensity"]
material_object.alpha = params["alpha"]
# Change line colors
material_object = bpy.data.materials["Material.line1"]
material_object.diffuse_color = params["line1_color"]
material_object = bpy.data.materials["Material.line2"]
material_object.diffuse_color = params["line2_color"]
material_object = bpy.data.materials["Material.line3"]
material_object.diffuse_color = params["line3_color"]
mat
|
erial_object = bpy.data.materials["Material.line4"]
material_object.diffuse_color = params["line4_color"]
# Set the render options. It is important that these are set
# to
|
the same values as the current OpenShot project. These
# params are automatically set by OpenShot
bpy.context.scene.render.filepath = params["output_path"]
bpy.context.scene.render.fps = params["fps"]
#bpy.context.scene.render.quality = params["quality"]
try:
bpy.context.scene.render.file_format = params["file_format"]
bpy.context.scene.render.color_mode = params["color_mode"]
except:
bpy.context.scene.render.image_settings.file_format = params["file_format"]
bpy.context.scene.render.image_settings.color_mode = params["color_mode"]
#bpy.data.worlds[0].horizon_color = params["horizon_color"]
bpy.context.scene.render.resolution_x = params["resolution_x"]
bpy.context.scene.render.resolution_y = params["resolution_y"]
bpy.context.scene.render.resolution_percentage = params["resolution_percentage"]
bpy.context.scene.frame_start = params["start_frame"]
bpy.context.scene.frame_end = params["end_frame"]
# Animation Speed (use Blender's time remapping to slow or speed up animation)
animation_speed = int(params["animation_speed"]) # time remapping multiplier
new_length = int(params["end_frame"]) * animation_speed # new length (in frames)
bpy.context.scene.frame_end = new_length
bpy.context.scene.render.frame_map_old = 1
bpy.context.scene.render.frame_map_new = animation_speed
if params["start_frame"] == params["end_frame"]:
bpy.context.scene.frame_start = params["end_frame"]
bpy.context.scene.frame_end = params["end_frame"]
# Render the current animation to the params["output_path"] folder
bpy.ops.render.render(animation=params["animation"])
|
dotCID/Graduation
|
Robot code/Sensors/simpleCV_3.py
|
Python
|
gpl-2.0
| 4,363
| 0.013981
|
#!/user/bin/python
'''
This script uses SimpleCV to grab an image from the camera and numpy to find an infrared LED and report its position relative to the camera view centre and whether it is inside the target area.
Attempted stabilisation of the output by tracking a circular object instead and altering exposure of the camera.
'''
# make it possible to import from parent directory:
import sys
sys.path.insert(0,'..')
## Change terminal window header for easier identification of contents
sys.stdout.write("\x1b]2;Sensors/simpleCV_3.py\x07")
import time, math, SimpleCV
import zmq, json
import subprocess as sp
from globalVars import CHANNEL_TARGETDATA
from globalVars import CAMERA_ID_NUMBER
printing = True
dpx = 0.0025 # approximate amount of degrees per pixel for Trust eLight
width = 1920
height = 1080
camera_id = 'video' + str(CAMERA_ID_NUMBER)
# To increase framerate, count the search() loops and render every n frames
renderFrame = 5
frame = 0
# Adjust camera settings from OS, since SimpleCV's commands don't do anything:
sp.call(["uvcdynctrl -d '"+camera_id+"' -s 'Exposure, Auto' 1"], shell = True) # Disable auto exposure
sp.call(["uvcdynctrl -d '"+camera_id+"' -s 'Exposure (Absolute)' 12"], shell = True) # Set absolute exposure
display = SimpleCV.Display()
cam = SimpleCV.Camera(CAMERA_ID_NUMBER, {"width":width,"height":height})
#target box for the marker
box_d = 20
yTgt = (height/2-box_d, height/2+box_d)
xTgt = (width/2-box_d, width/2+box_d)
box_clr = SimpleCV.Color.RED
centre = (height/2, width/2)
context = zmq.Context()
socket = context.socket(zmq.PUB)
socket.bind(CHANNEL_TARGETDATA)
def search():
global frame, renderFrame
img = cam.getImage()
objec
|
tive = img.colorDistance(color=(255,255,255)).invert()
seg_objective = objective.stretch(200,255)
blobs = seg_objective.findBlobs()
if blobs:
center_point = (blobs[-1].x, blobs[-1].y)
if frame is renderF
|
rame:
img.drawCircle((blobs[-1].x, blobs[-1].y), 10,SimpleCV.Color.YELLOW,3)
img.dl().rectangle2pts((xTgt[0], yTgt[0]), (xTgt[1],yTgt[1]), box_clr)
img.show()
frame = 0
frame +=1
return center_point
if frame is renderFrame:
img.dl().rectangle2pts((xTgt[0], yTgt[0]), (xTgt[1],yTgt[1]), box_clr)
img.show()
frame = 0
frame +=1
return None
#get current time in milliseconds
millis = lambda: int(round(time.time() * 1000))
#############################################################
# RUNNING CODE BELOW #
#############################################################
tar_x = 0
tar_y = 0
deg_x = 0
deg_y = 0
last_tar = tar_x
found = False
findTime = 0
lastFound = findTime
lossReported = False
while display.isNotDone():
target = search()
if target is not None:
tar_x = target[0]-width/2
tar_y = target[1]-height/2
findTime = millis()
found = True
lossReported = False
else:
found = False
lastFound = findTime
# Angular difference between the box and the target
# Having the target within the box is acceptable
if abs(tar_x) > box_d:
deg_x = tar_x * dpx
else:
deg_x = 0
if abs(tar_y) > box_d:
deg_y = tar_y * dpx
else:
deg_y = 0
# If the target is in the box, indicate this with the box colour
if deg_y is 0 and deg_x is 0 and found:
box_clr = SimpleCV.Color.GREEN
else:
box_clr = SimpleCV.Color.RED
#output the data
# not needed if there's no new data to report
if not lossReported:
message = {
't' : millis(),
'findTime': findTime,
'found' : found,
'tar_px' : {'x':tar_x, 'y':tar_y},
'tar_dg' : {'x':deg_x, 'y':deg_y}
}
# wait 20 ms to make sure Scan picks up on the last hit
if not found and millis() - findTime < 100:
continue
socket.send_json(message)
print "Sent targetData: ",
print message
if lastFound == findTime:
lossReported = False
#spam to keep data flowing
|
zomux/nlpy
|
nlpy/util/feature_container.py
|
Python
|
gpl-3.0
| 1,660
| 0.001205
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 NLPY.ORG
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
import numpy as np
from line_iterator import LineIterator
class FeatureContainer(object):
def __init__(self, path=None, dtype="libsvm", feature_n=-1):
self.N = 0
self.data = np.zeros(0)
self.targets = np.zeros(0)
self.feature_n = feature_n
self.path = path
self.dtype = dtype
# if path:
# self.read(path, dtype)
def read(self):
|
"""
Read feature matrix from data
:param path: data path
:param type: libsvm (only)
"""
ys = []
xs = []
for line in LineIterator(self.path):
items = line.split(" ")
feature_map = {}
y = 0
for item in items:
if ":" in item:
feature_idx, value = item.split(":")
feature_map[int(feature_idx)] = float(value)
else:
y
|
= int(item)
if self.feature_n == -1:
max_key = max(feature_map.keys()) if feature_map else 0
else:
max_key = self.feature_n
features = []
for fidx in range(1, max_key + 1):
if fidx in feature_map:
features.append(feature_map[fidx])
else:
features.append(0)
yield features, y
# xs.append(features)
# ys.append(y)
#
# self.data = np.array(xs)
# self.targets = np.array(ys)
|
wdmchaft/taskcoach
|
tests/unittests/widgetTests/DragAndDropTest.py
|
Python
|
gpl-3.0
| 2,283
| 0.005694
|
'''
Task Coach - Your friendly task manager
Copyright (C) 2004-2010 Task Coach developers <[email protected]>
Task Coach is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Task Coach is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import test
from taskcoachlib.widgets import treectrl
class DummyEvent(object):
def __init__(self, item=None):
self.item = item
self.vetoed = self.allowed = False
def GetItem(self):
return self.item
def Veto(self):
self.vetoed = True
def Allow(self):
self.allowed = True
class TreeCtrlDragAndDropMixinTest(test.wxTestCase):
# pylint: disable-msg=E1101
def setUp(self):
self.treeCtrl = treectrl.HyperTreeList(self.frame)
self.treeCtrl.AddColumn('First')
self.rootItem = self.treeCtrl.AddRoot('root')
self.item = self.treeCtrl.AppendItem(self.rootItem, 'item')
|
def assertEventIsVetoed(self, event):
self.failUnless(event.vetoed)
self.failIf(event.allowed)
def assertEventIsAllowed(self, event):
self.failUnless(event.allowed)
self.failIf(event.vetoed)
def testEventIsVetoedWhenDragBeginsWithoutItem(self):
event = DummyEvent()
self.treeCtrl.OnBeginDrag(event)
self.assertEventIsVetoed(event)
def testEventIsAllowedWhenDragBeginsWithItem(self):
event = DummyEvent(se
|
lf.item)
self.treeCtrl.OnBeginDrag(event)
self.assertEventIsAllowed(event)
def testEventIsAllowedWhenDragBeginWithSelectedItem(self):
self.treeCtrl.SelectItem(self.item)
event = DummyEvent(self.item)
self.treeCtrl.OnBeginDrag(event)
self.assertEventIsAllowed(event)
|
kumar303/addons-server
|
src/olympia/zadmin/views.py
|
Python
|
bsd-3-clause
| 9,220
| 0
|
from django import http
from django.apps import apps
from django.conf import settings
from django.contrib import admin
from django.core.exceptions import PermissionDenied
from django.core.files.storage import default_storage as storage
from django.shortcuts import get_object_or_404, redirect
from django.views import debug
from django.views.decorators.cache import never_cache
import six
import olympia.core.logger
from olympia import amo
from olympia.activity.models import ActivityLog
from olympia.addons.decorators import addon_view_factory
from olympia.addons.indexers import get_mappings as get_addons_mappings
from olympia.addons.models import Addon
from olympia.amo import messages, search
from olympia.amo.decorators import (
json_view, permission_required, post_required)
from olympia.amo.mail import DevEmailBackend
from olympia.amo.utils import HttpResponseSendFile, render
from olympia.bandwagon.models import Collection
from olympia.files.models import File, FileUpload
from olympia.stats.search import get_mappings as get_stats_mappings
from olympia.versions.models import Version
from .decorators import admin_required
from .forms import (
AddonStatusForm, FeaturedCollectionFormSet, FileFormSet,
MonthlyPickFormSet)
log = olympia.core.logger.getLogger('z.zadmin')
@admin_required
def show_settings(request):
settings_dict = debug.get_safe_settings()
return render(request, 'zadmin/settings.html',
{'settings_dict': settings_dict, 'title': 'Settings!'})
@admin_required
def env(request):
env = {}
for k in request.META.keys():
env[k] = debug.cleanse_setting(k, request.META[k])
return render(request, 'zadmin/settings.html',
{'settings_dict': env, 'title': 'Env!'})
@admin.site.admin_view
def fix_disabled_file(request):
file_ = None
if request.method == 'POST' and 'file' in request.POST:
file_ = get_object_or_404(File, id=request.POST['file'])
if 'confirm' in request.POST:
file_.unhide_disabled_file()
messages.success(request, 'We have done a great thing.')
return redirect('zadmin.fix-disabled')
return render(request, 'zadmin/fix-disabled.html',
{'file': file_, 'file_id': request.POST.get('file', '')})
@admin_required
@json_view
def collections_json(request):
app = request.GET.get('app', '')
q = request.GET.get('q', '')
data = []
if not q:
return data
qs = Collection.objects.all()
try:
qs = qs.filter(pk=int(q))
except ValueError:
qs = qs.filter(slug__startswith=q)
try:
qs = qs.filter(application=int(app))
except ValueError:
pass
for c in qs[:7]:
data.append({'id': c.id,
'name': six.text_type(c.name),
'slug': six.text_type(c.slug),
'all_personas': c.all_personas,
'url': c.get_url_path()})
return data
@admin_required
@post_required
def featured_collection(request):
try:
pk = int(request.POST.get('collection', 0))
except ValueError:
pk = 0
c = get_object_or_404(Collection, pk=pk)
return render(request, 'zadmin/featured_collection.html',
dict(collection=c))
@admin_required
def features(request):
form = FeaturedCollectionFormSet(request.POST or None)
if request.method == 'POST' and form.is_valid():
form.save(commit=False)
for obj in form.deleted_objects:
obj.delete()
messages.success(request, 'Changes successfully saved.')
return redirect('zadmin.features')
return render(request, 'zadmin/features.html', dict(form=form))
@admin_required
def monthly_pick(request):
form = MonthlyPickFormSet(request.POST or None)
if request.method == 'POST' and form.is_valid():
form.save()
messages.success(request, 'Changes successfully saved.')
return redirect('zadmin.monthly_pick')
return render(request, 'zadmin/monthly_pick.html', dict(form=form))
@admin_required
def elastic(request):
INDEX = settings.ES_INDEXES['default']
e
|
s = search.get_es()
indexes = set(settings.ES_INDEX
|
ES.values())
es_mappings = {
'addons': get_addons_mappings(),
'addons_stats': get_stats_mappings(),
}
ctx = {
'index': INDEX,
'nodes': es.nodes.stats(),
'health': es.cluster.health(),
'state': es.cluster.state(),
'mappings': [(index, es_mappings.get(index, {})) for index in indexes],
}
return render(request, 'zadmin/elastic.html', ctx)
@admin.site.admin_view
def mail(request):
backend = DevEmailBackend()
if request.method == 'POST':
backend.clear()
return redirect('zadmin.mail')
return render(request, 'zadmin/mail.html', dict(mail=backend.view_all()))
@permission_required(amo.permissions.ANY_ADMIN)
def index(request):
log = ActivityLog.objects.admin_events()[:5]
return render(request, 'zadmin/index.html', {'log': log})
@admin_required
def addon_search(request):
ctx = {}
if 'q' in request.GET:
q = ctx['q'] = request.GET['q']
if q.isdigit():
qs = Addon.objects.filter(id=int(q))
else:
qs = Addon.search().query(name__text=q.lower())[:100]
if len(qs) == 1:
return redirect('zadmin.addon_manage', qs[0].id)
ctx['addons'] = qs
return render(request, 'zadmin/addon-search.html', ctx)
@never_cache
@json_view
def general_search(request, app_id, model_id):
if not admin.site.has_permission(request):
raise PermissionDenied
try:
model = apps.get_model(app_id, model_id)
except LookupError:
raise http.Http404
limit = 10
obj = admin.site._registry[model]
ChangeList = obj.get_changelist(request)
# This is a hideous api, but uses the builtin admin search_fields API.
# Expecting this to get replaced by ES so soon, that I'm not going to lose
# too much sleep about it.
args = [request, obj.model, [], [], [], [], obj.search_fields, [],
obj.list_max_show_all, limit, [], obj]
try:
# python3.2+ only
from inspect import signature
if 'sortable_by' in signature(ChangeList.__init__).parameters:
args.append('None') # sortable_by is a django2.1+ addition
except ImportError:
pass
cl = ChangeList(*args)
qs = cl.get_queryset(request)
# Override search_fields_response on the ModelAdmin object
# if you'd like to pass something else back to the front end.
lookup = getattr(obj, 'search_fields_response', None)
return [{'value': o.pk, 'label': getattr(o, lookup) if lookup else str(o)}
for o in qs[:limit]]
@admin_required
@addon_view_factory(qs=Addon.objects.all)
def addon_manage(request, addon):
form = AddonStatusForm(request.POST or None, instance=addon)
pager = amo.utils.paginate(
request, Version.unfiltered.filter(addon=addon), 30)
# A list coercion so this doesn't result in a subquery with a LIMIT which
# MySQL doesn't support (at this time).
versions = list(pager.object_list)
files = File.objects.filter(version__in=versions).select_related('version')
formset = FileFormSet(request.POST or None, queryset=files)
if form.is_valid() and formset.is_valid():
if 'status' in form.changed_data:
ActivityLog.create(amo.LOG.CHANGE_STATUS, addon,
form.cleaned_data['status'])
log.info('Addon "%s" status changed to: %s' % (
addon.slug, form.cleaned_data['status']))
form.save()
for form in formset:
if 'status' in form.changed_data:
log.info('Addon "%s" file (ID:%d) status changed to: %s' % (
addon.slug, form.instance.id, form.cleaned_data['status']))
form.save()
return redirect('zadmin.addon_manage', addon.slug)
# Build a map from file.id to form in formset for precise form display
form_map = dict((form.instance.id, form) for form in formset.forms)
#
|
ViaSat/luigi
|
luigi/format.py
|
Python
|
apache-2.0
| 14,652
| 0.001092
|
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import signal
import subprocess
import io
import os
import re
import locale
import tempfile
import warnings
from luigi import six
class FileWrapper(object):
"""
Wrap `file` in a "real" so stuff can be added to it after creation.
"""
def __init__(self, file_object):
self._subpipe = file_object
def __getattr__(self, name):
# forward calls to 'write', 'close' and other methods not defined below
return getattr(self._subpipe, name)
def __enter__(self, *args, **kwargs):
# instead of returning whatever is returned by __enter__ on the subpipe
# this returns self, so whatever custom injected methods are still available
# this might cause problems with custom file_objects, but seems to work
# fine with standard python `file` objects which is the only default use
return self
def __exit__(self, *args, **kwargs):
return self._subpipe.__exit__(*args, **kwargs)
def __iter__(self):
return iter(self._subpipe)
class InputPipeProcessWrapper(object):
def __init__(self, command, input_pipe=None):
"""
Initializes a InputPipeProcessWrapper instance.
:param command: a subprocess.Popen instance with stdin=input_pipe and
stdout=subprocess.PIPE.
Alternatively, just its args argument as a convenience.
"""
self._command = command
self._input_pipe = input_pipe
self._original_input = True
if input_pipe is not None:
try:
input_pipe.fileno()
except AttributeError:
# subprocess require a fileno to work, if not present we copy to disk first
self._original_input = False
f = tempfile.NamedTemporaryFile('wb', prefix='luigi-process_tmp', delete=False)
self._tmp_file = f.name
f.write(input_pipe.read())
input_pipe.close()
f.close()
self._input_pipe = FileWrapper(io.BufferedReader(io.FileIO(self._tmp_file, 'r')))
self._process = command if isinstance(command, subprocess.Popen) else self.create_subprocess(command)
# we want to keep a circular reference to avoid garbage collection
# when the object is used in, e.g., pipe.read()
self._process._selfref = self
def create_subprocess(self, command):
"""
http://www.chiark.greenend.org.uk/ucgi/~cjwatson/blosxom/2009-07-02-python-sigpipe.html
"""
def subprocess_setup():
# Python installs a SIGPIPE handler by default. This is usually not what
# non-Python subprocesses expect
|
.
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
return subprocess.Popen(command,
stdin=self._input_pipe,
stdout=subprocess.PIPE,
preexec_fn=subprocess_setup,
|
close_fds=True)
def _finish(self):
# Need to close this before input_pipe to get all SIGPIPE messages correctly
self._process.stdout.close()
if not self._original_input and os.path.exists(self._tmp_file):
os.remove(self._tmp_file)
if self._input_pipe is not None:
self._input_pipe.close()
self._process.wait() # deadlock?
if self._process.returncode not in (0, 141, 128 - 141):
# 141 == 128 + 13 == 128 + SIGPIPE - normally processes exit with 128 + {reiceived SIG}
# 128 - 141 == -13 == -SIGPIPE, sometimes python receives -13 for some subprocesses
raise RuntimeError('Error reading from pipe. Subcommand exited with non-zero exit status %s.' % self._process.returncode)
def close(self):
self._finish()
def __del__(self):
self._finish()
def __enter__(self):
return self
def _abort(self):
"""
Call _finish, but eat the exception (if any).
"""
try:
self._finish()
except KeyboardInterrupt:
raise
except BaseException:
pass
def __exit__(self, type, value, traceback):
if type:
self._abort()
else:
self._finish()
def __getattr__(self, name):
if name == '_process':
raise AttributeError(name)
try:
return getattr(self._process.stdout, name)
except AttributeError:
return getattr(self._input_pipe, name)
def __iter__(self):
for line in self._process.stdout:
yield line
self._finish()
def readable(self):
return True
def writable(self):
return False
def seekable(self):
return False
class OutputPipeProcessWrapper(object):
WRITES_BEFORE_FLUSH = 10000
def __init__(self, command, output_pipe=None):
self.closed = False
self._command = command
self._output_pipe = output_pipe
self._process = subprocess.Popen(command,
stdin=subprocess.PIPE,
stdout=output_pipe,
close_fds=True)
self._flushcount = 0
def write(self, *args, **kwargs):
self._process.stdin.write(*args, **kwargs)
self._flushcount += 1
if self._flushcount == self.WRITES_BEFORE_FLUSH:
self._process.stdin.flush()
self._flushcount = 0
def writeLine(self, line):
assert '\n' not in line
self.write(line + '\n')
def _finish(self):
"""
Closes and waits for subprocess to exit.
"""
if self._process.returncode is None:
self._process.stdin.flush()
self._process.stdin.close()
self._process.wait()
self.closed = True
def __del__(self):
if not self.closed:
self.abort()
def __exit__(self, type, value, traceback):
if type is None:
self.close()
else:
self.abort()
def __enter__(self):
return self
def close(self):
self._finish()
if self._process.returncode == 0:
if self._output_pipe is not None:
self._output_pipe.close()
else:
raise RuntimeError('Error when executing command %s' % self._command)
def abort(self):
self._finish()
def __getattr__(self, name):
if name == '_process':
raise AttributeError(name)
try:
return getattr(self._process.stdin, name)
except AttributeError:
return getattr(self._output_pipe, name)
def readable(self):
return False
def writable(self):
return True
def seekable(self):
return False
class BaseWrapper(object):
def __init__(self, stream, *args, **kwargs):
self._stream = stream
try:
super(BaseWrapper, self).__init__(stream, *args, **kwargs)
except TypeError:
pass
def __getattr__(self, name):
if name == '_stream':
raise AttributeError(name)
return getattr(self._stream, name)
def __enter__(self):
self._stream.__enter__()
return self
def __exit__(self, *args):
self._stream.__exit__(*args)
def __iter__(self):
try:
for line in self._stream:
yield line
finally:
s
|
munhyunsu/Hobby
|
2018F_SCSCAlgorithm/week2/card_tests.py
|
Python
|
gpl-3.0
| 528
| 0
|
import unittest
from card import
|
Card
class CardTest(unittest.TestCase):
def test_create(self):
suit = 'Hearts'
rank = 'Ace'
card1 = Card(suit, rank)
self.assertEqual((suit, rank), card1.get_value())
def test___eq__(self):
card1 = Card('Spades', 'Queen')
card2 = Card('Spades', 'Queen')
self.assertEqual(card1, card2)
card3 =
|
Card('Hearts', 'Queen')
self.assertNotEqual(card1, card3)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
mandrive/FlaskTest
|
__init__.py
|
Python
|
mit
| 2,578
| 0.002327
|
import logging
from logging.handlers import RotatingFileH
|
andler
from flask import Flask, render_template
from flask_login import LoginManager
from flask_restful import Api
from flask_wtf.csrf import CsrfProtect
from itsdangerous import URLSafeTimedSerializer
from sqlalchemy import create_engine
import AppConfig
from RestResources.Resources import PostsList, Posts
from services.Services import UserService
from views import Login, Common, Po
|
st, Admin
app = Flask(__name__)
CsrfProtect(app)
login_serializer = URLSafeTimedSerializer(AppConfig.APPSECRETKEY)
@app.errorhandler(404)
def not_found(error):
return render_template('404.html'), 404
# set the secret key. keep this really secret:
app.secret_key = AppConfig.APPSECRETKEY
def register_mods():
app.register_blueprint(Common.mod)
app.register_blueprint(Login.mod)
app.register_blueprint(Post.mod)
app.register_blueprint(Admin.mod)
def create_db_engine():
return create_engine(AppConfig.CONNECTIONSTRING, pool_recycle=3600, echo=True)
def build_db_engine():
AppConfig.DBENGINE = create_db_engine()
def init_login():
login_manager = LoginManager()
login_manager.init_app(app)
AppConfig.LOGINMANAGER = login_manager
# Create user loader function
@login_manager.user_loader
def load_user(user_id):
return UserService().getAll().filter_by(id=user_id).first()
@login_manager.token_loader
def get_user_token(token):
max_age = app.config["REMEMBER_COOKIE_DURATION"].total_seconds()
#Decrypt the Security Token, data = [username, hashpass]
data = login_serializer.loads(token, max_age=max_age)
userService = UserService()
#Find the User
user = userService.getById(data[0])
#Check Password and return user or None
if user and userService.validate(user.username, user.password):
return user
return None
def init_logger():
handler = RotatingFileHandler('FlaskTest.log', maxBytes=10000, backupCount=1)
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
def register_rest_api():
return Api(app)
def register_rest_resources():
api.add_resource(PostsList, '/api/posts')
api.add_resource(Posts, '/api/posts/<string:post_id>')
def set_app_configuration():
app.config['REMEMBER_COOKIE_DURATION'] = AppConfig.REMEMBER_COOKIE_DURATION
register_mods()
api = register_rest_api()
register_rest_resources()
build_db_engine()
init_login()
init_logger()
set_app_configuration()
app.run(AppConfig.APPHOST, AppConfig.APPPORT)
|
simpleenergy/bughouse-ranking
|
tests/bughouse/ratings/test_overall_overall_ratings.py
|
Python
|
mit
| 2,377
| 0
|
import pytest
from bughouse.models import (
BLACK,
WHITE,
OVERALL_OVERALL,
)
from bughouse.ratings.engines.overall import (
rate_teams,
rate_players,
)
def test_rate_single_game(factories, models, elo_settings):
game = factories.GameFactory()
r1, r2 = rate_teams(game)
assert r1.rating == 1006
assert r2.rating == 994
def test_rate_multiple_games(factories, models):
team_a = factories.TeamFactory()
team_b = factories.TeamFactory()
rate_teams(factories.GameFactory(winning_team=team_a, losing_team=team_b))
rate_teams(factories.GameFactory(winning_team=team_a, losi
|
ng_team=team_b))
assert team_a.get_latest_rating(OVERALL_OVERALL) == 1012
assert team_b.get_latest_rating(OVERALL_OVERALL) == 988
@pytest.mark.parametrize(
'losing_color',
(BLACK, WHITE),
)
def test_individual_ratings(factories, models, losing_color):
game = factories.GameFactory(losing_color=losing_color)
if game.losing_color == game.BLACK:
wtwr, wtbr, ltwr, ltbr = rate_players(game)
assert wtwr.player.get_latest_rating(OVERALL_OVERALL) == 1007
asse
|
rt wtbr.player.get_latest_rating(OVERALL_OVERALL) == 1006
assert ltwr.player.get_latest_rating(OVERALL_OVERALL) == 994
assert ltbr.player.get_latest_rating(OVERALL_OVERALL) == 993
else:
wtwr, wtbr, ltwr, ltbr = rate_players(game)
assert wtwr.player.get_latest_rating(OVERALL_OVERALL) == 1006
assert wtbr.player.get_latest_rating(OVERALL_OVERALL) == 1007
assert ltwr.player.get_latest_rating(OVERALL_OVERALL) == 993
assert ltbr.player.get_latest_rating(OVERALL_OVERALL) == 994
def test_ratings_computation_is_idempotent(factories, models):
"""
Ensure that going back and re-computing old game ratings is an idempotent
process.
"""
team_a = factories.TeamFactory()
team_b = factories.TeamFactory()
factories.GameFactory(winning_team=team_a, losing_team=team_b)
game_b = factories.GameFactory(winning_team=team_a, losing_team=team_b)
factories.GameFactory(winning_team=team_a, losing_team=team_b)
first_rating_initial = team_a.ratings.get(
game=game_b,
).rating
rate_teams(game_b)
first_rating_recomputed = team_a.ratings.get(
game=game_b,
).rating
assert first_rating_initial == first_rating_recomputed
|
OCA/l10n-spain
|
l10n_es_aeat_mod123/models/__init__.py
|
Python
|
agpl-3.0
| 87
| 0
|
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from . im
|
por
|
t mod123
|
jrichte43/ProjectEuler
|
Problem-0283/solutions.py
|
Python
|
gpl-3.0
| 1,243
| 0.008045
|
__problem_title__ = "Integer sided triangles for which the area/perimeter ratio is integral"
__problem_url___ = "https://projecteuler.net/problem=283"
__problem_description__ = "Consider the triangle with sides 6, 8 and 10. It can be seen that the " \
"perimeter and the area are both equal to 24. So the area/perimeter " \
"ratio is equal to 1. Consider also the triangle with sides 13, 14 and " \
"15. The perimeter equals 42 while the area is equal to 84. So for " \
"this triangle the area/perimeter ratio is equal to 2. Find the sum of " \
"the perimeters of all integer sided triangles for which the " \
|
"area/perimeter ratios are equal to positive integers not exceeding " \
"1000."
import timeit
class Solution():
@staticmethod
def solution1():
pass
@staticmethod
def time_solutions():
setup = 'from __main__ import Solution'
print('Solution 1:', timeit.timeit('Solution.solution1()', setup=setup, number=1))
if __name__ == '__main__':
s = Solution()
print(s.solution1())
|
s.time_solutions()
|
profxj/old_xastropy
|
xastropy/xguis/spec_guis.py
|
Python
|
bsd-3-clause
| 22,801
| 0.006316
|
"""
#;+
#; NAME:
#; spec_guis
#; Version 1.0
#;
#; PURPOSE:
#; Module for Spectroscopy Guis with QT
#; These call pieces from spec_widgets
#; 12-Dec-2014 by JXP
#;-
#;------------------------------------------------------------------------------
"""
from __future__ import print_function, absolute_import, division, unicode_literals
# Import libraries
import numpy as np
import os, sys
import matplotlib.pyplot as plt
import glob
from PyQt4 import QtGui
from PyQt4 import QtCore
from matplotlib import mpl
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
# Matplotlib Figure object
from matplotlib.figure import Figure
from astropy.units import Quantity
from astropy import units as u
from linetools.lists.linelist import LineList
from xastropy.xutils import xdebug as xdb
from xastropy.xguis import spec_widgets as xspw
#class XSpecGui(QtGui.QMainWindow):
#class XAbsIDGui(QtGui.QMainWindow):
#class XVelPltGui(QtGui.QDialog):
# x_specplot replacement
class XSpecGui(QtGui.QMainWindow):
''' GUI to replace XIDL x_specplot
12-Dec-2014 by JXP v1.0
27-Mar-2015 by JXP v2.0 :: EW, column, better zooming + panning
'''
def __init__(self, spec, parent=None, zsys=None, norm=None):
QtGui.QMainWindow.__init__(self, parent)
'''
spec = Spectrum1D
'''
mpl.rcParams['agg.path.chunksize'] = 20000 # Needed to avoid carsh in large spectral files
# Build a widget combining several others
self.main_widget = QtGui.QWidget()
# Status bar
self.create_status_bar()
# Grab the pieces and tie together
self.pltline_widg = xspw.PlotLinesWidget(status=self.statusBar, init_z=zsys)
self.pltline_widg.setMaximumWidth(300)
# Hook the spec widget to Plot Line
self.spec_widg = xspw.ExamineSpecWidget(spec,status=self.statusBar,
llist=self.pltline_widg.llist,
zsys=zsys, norm=norm)
self.pltline_widg.spec_widg = self.spec_widg
self.spec_widg.canvas.mpl_connect('button_press_event', self.on_click)
extras = QtGui.QWidget()
extras.setMaximumWidth(130)
vbox = QtGui.QVBoxLayout()
qbtn = QtGui.QPushButton('Quit', self)
qbtn.clicked.connect(self.quit)
vbox.addWidget(self.pltline_widg)
vbox.addWidget(qbtn)
extras.setLayout(vbox)
hbox = QtGui.QHBoxLayout()
hbox.addWidget(self.spec_widg)
hbox.addWidget(extras)
self.main_widget.setLayout(hbox)
# Point MainWindow
self.setCentralWidget(self.main_widget)
def create_status_bar(self):
self.status_text = QtGui.QLabel("XSpec")
self.statusBar().addWidget(self.status_text, 1)
def on_click(self,event):
if event.button == 3: # Set redshift
if self.pltline_widg.llist['List'] is None:
return
self.select_line_widg = xspw.SelectLineWidget(
self.pltline_widg.llist[self.pltline_widg.llist['List']]._data)
self.select_line_widg.exec_()
line = self.select_line_widg.line
if line.strip() == 'None':
return
#
quant = line.split('::')[1].lstrip()
spltw = quant.split(' ')
wrest = Quantity(float(spltw[0]), unit=spltw[1])
z = event.xdata/wrest.value - 1.
self.pltline_widg.llist['z'] = z
self.statusBar().showMessage('z = {:f}'.format(z))
self.pltline_widg.zbox.setText('{:.5f}'.format(self.pltline_widg.llist['z']))
# Draw
self.spec_widg.on_draw()
# Quit
def quit(self):
self.close()
# GUI for Identifying many (all) Abs Systems in a Spectrum
class XAbsIDGui(QtGui.QMainWindow):
''' GUI to analyze absorption systems in a spectrum
16-Dec-2014 by JXP
'''
def __init__(self, spec, parent=None, abssys_dir=None, absid_list=None, norm=True,
srch_id=True, id_dir='ID_LINES/', second_file=None):
QtGui.QMainWindow.__init__(self, parent)
'''
spec = Spectrum1D
second_file = Second spectrum file (e.g. COS + STIS)
'''
# Build a widget combining several others
self.main_widget = QtGui.QWidget()
# Status bar
self.create_status_bar()
|
# Initialize
if absid_list is None:
# Automatically
|
search for ID files
if srch_id:
absid_list = glob.glob(id_dir+'*id.fits')
else:
absid_list = []
# Grab the pieces and tie together
self.abssys_widg = xspw.AbsSysWidget(absid_list)
self.pltline_widg = xspw.PlotLinesWidget(status=self.statusBar)
self.spec_widg = xspw.ExamineSpecWidget(spec,status=self.statusBar,
llist=self.pltline_widg.llist, norm=norm,
second_file=second_file,
abs_sys=self.abssys_widg.abs_sys)
self.pltline_widg.spec_widg = self.spec_widg
# Connections
self.spec_widg.canvas.mpl_connect('button_press_event', self.on_click)
self.spec_widg.canvas.mpl_connect('key_press_event', self.on_key)
self.abssys_widg.refine_button.clicked.connect(self.refine_abssys)
# Layout
anly_widg = QtGui.QWidget()
anly_widg.setMaximumWidth(300)
anly_widg.setMinimumWidth(150)
vbox = QtGui.QVBoxLayout()
vbox.addWidget(self.pltline_widg)
vbox.addWidget(self.abssys_widg)
anly_widg.setLayout(vbox)
hbox = QtGui.QHBoxLayout()
hbox.addWidget(self.spec_widg)
hbox.addWidget(anly_widg)
self.main_widget.setLayout(hbox)
# Point MainWindow
self.setCentralWidget(self.main_widget)
def create_status_bar(self):
self.status_text = QtGui.QLabel("XAbsID")
self.statusBar().addWidget(self.status_text, 1)
def on_key(self,event):
if event.key == 'v': # Stack plot
if self.spec_widg.vplt_flg == 1:
self.abssys_widg.add_fil(self.spec_widg.outfil)
self.abssys_widg.reload()
# Update line list
idx = self.pltline_widg.lists.index(self.spec_widg.llist['List'])
self.pltline_widg.llist_widget.setCurrentRow(idx)
elif event.key == '?': # Check for a match with known systems
wv_chosen = event.xdata
# Load grb
llist = xspw.set_llist('grb.lst')
# Loop through systems
for iabs_sys in self.abssys_widg.all_abssys:
z = iabs_sys.zabs
wvobs = np.array((1+z) * llist['grb.lst']['wrest'])
mtwv = np.where( np.abs( wvobs-wv_chosen ) < 0.2 )[0]
for imt in mtwv:
#QtCore.pyqtRemoveInputHook()
#xdb.set_trace()
#QtCore.pyqtRestoreInputHook()
print('z={:g}, {:s}, f={:g}'.format(z,
llist['grb.lst']['name'][imt],
llist['grb.lst']['fval'][imt]))
if len(mtwv) == 0:
print('No match. wrest={:g} for z={:g}'.format(wv_chosen/(1+z), z))
def on_click(self,event):
if event.button == 3: # Set redshift
# Line list?
try:
self.pltline_widg.llist['List']
except KeyError:
print('Set a line list first!!')
return
#
if self.pltline_widg.llist[self.pltline_widg.llist['List']] == 'None':
return
self.select_line_widg = xspw.SelectLineWidget(
self.pltline_widg.llist[self.pltline_widg.llist['List']]._data)
self.select_line_widg.exec_()
line = self.select_line_widg.line
|
varses/awsch
|
examples/using_blocks/example1-simpleloop.py
|
Python
|
bsd-3-clause
| 1,304
| 0.003067
|
# -*- coding: utf-8 -*-
"""
example1-simpleloop
~~~~~~~~~~~~~~~~~~~
This example shows how to use the loop block backend and frontend.
:copyright: 2015 by Lantz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
# From lantz, you import a helper function.
from lantz.ui.app import start_gui_app
# and the loop block and its user interface
from lantz.ui.blocks import Loop, LoopUi
# the drivers you need (In this case just simulated dummy drivers).
from lantz.drivers.examples.dummydrivers import DummyOsci
# Drivers are instantiated in the usual way.
osci = DummyOsci('COM2')
# You create a function that will be called by the loop
# It requires three parameters
# counter - the iteration number
# iterations - total number
|
of iterations
# overrun - a boolean indicating if the time required for the operation
# is longer than the interval.
def measure(counter, iterations, overrun):
print(counter, iterations, overrun)
data = osci.measur
|
e()
print(data)
# You instantiate the loop
app = Loop()
# and assign the function to the body of the loop
app.body = measure
# Finally you start the program
start_gui_app(app, LoopUi)
# This contains a very complete GUI for a loop you can easily create a customized version!
|
bitmazk/cmsplugin-filer-image-translated
|
cmsplugin_filer_image_translated/migrations/0004_auto__add_imagetranslationtranslation__add_unique_imagetranslationtran.py
|
Python
|
mit
| 12,501
| 0.007919
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ImageTranslationTranslation'
db.create_table('cmsplugin_filer_image_translated_imagetranslation_translation', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=256, blank=True)),
('description', self.gf('django.db.models.fields.TextField')(max_length=256, blank=True)),
('alt_text', self.gf('django.db.models.fields.CharField')(max_length=512, blank=True)),
('caption', self.gf('django.db.models.fields.CharField')(max_length=512, blank=True)),
('language_code', self.gf('django.db.models.fields.CharField')(max_length=15, db_index=True)),
('master', self.gf('django.db.models.fields.related.ForeignKey')(related_name='translations', null=True, to=orm['cmsplugin_filer_image_translated.ImageTranslation'])),
))
db.send_create_signal('cmsplugin_filer_image_translated', ['ImageTranslationTranslation'])
# Adding unique constraint on 'ImageTranslationTranslation', fields ['language_code', 'master']
db.create_unique('cmsplugin_filer_image_translated_imagetranslation_translation', ['language_code', 'master_id'])
# Adding model 'ImageTranslation'
db.create_table('cmsplugin_filer_image_translated_imagetranslation', (
('id', self.gf('
|
django.db.models.fields.AutoField')(primary_key=True)),
('image', self.gf('django.db.models.fields.related.OneToOneField')(related_name='translation', unique=True, t
|
o=orm['filer.Image'])),
))
db.send_create_signal('cmsplugin_filer_image_translated', ['ImageTranslation'])
def backwards(self, orm):
# Removing unique constraint on 'ImageTranslationTranslation', fields ['language_code', 'master']
db.delete_unique('cmsplugin_filer_image_translated_imagetranslation_translation', ['language_code', 'master_id'])
# Deleting model 'ImageTranslationTranslation'
db.delete_table('cmsplugin_filer_image_translated_imagetranslation_translation')
# Deleting model 'ImageTranslation'
db.delete_table('cmsplugin_filer_image_translated_imagetranslation')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cmsplugin_filer_image_translated.imagetranslation': {
'Meta': {'object_name': 'ImageTranslation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'translation'", 'unique': 'True', 'to': "orm['filer.Image']"})
},
'cmsplugin_filer_image_translated.imagetranslationrenamed': {
'Meta': {'object_name': 'ImageTranslationRenamed'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.Image']"}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'trans_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'trans_caption': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'trans_description': ('django.db.models.fields.TextField', [], {'max_length': '256', 'blank': 'True'}),
'trans_name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'})
},
'cmsplugin_filer_image_translated.imagetranslationtranslation': {
'Meta': {'unique_together': "[('language_code', 'master')]", 'object_name': 'ImageTranslationTranslation', 'db_table': "'cmsplugin_filer_image_translated_imagetranslation_translation'"},
'alt_text': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'caption': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '256', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': "orm['cmsplugin_filer_image_translated.ImageTranslation']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('dj
|
matternet/ardupilot
|
libraries/AP_Terrain/tools/create_terrain.py
|
Python
|
gpl-3.0
| 11,310
| 0.004156
|
#!/usr/bin/env python
'''
create ardupilot terrain database files
'''
from MAVProxy.modules.mavproxy_map import srtm
import math, struct, os, sys
import crc16, time, struct
# MAVLink sends 4x4 grids
TERRAIN_GRID_MAVLINK_SIZE = 4
# a 2k grid_block on disk contains 8x7 of the mavlink grids. Each
# grid block overlaps by one with its neighbour. This ensures that
# the altitude at any point can be calculated from a single grid
# block
TERRAIN_GRID_BLOCK_MUL_X = 7
TERRAIN_GRID_BLOCK_MUL_Y = 8
# this is the spacing between 32x28 grid blocks, in grid_spacing units
TERRAIN_GRID_BLOCK_SPACING_X = ((TERRAIN_GRID_BLOCK_MUL_X-1)*TERRAIN_GRID_MAVLINK_SIZE)
TERRAIN_GRID_BLOCK_SPACING_Y = ((TERRAIN_GRID_BLOCK_MUL_Y-1)*TERRAIN_GRID_MAVLINK_SIZE)
# giving a total grid size of a disk grid_block of 32x28
TERRAIN_GRID_BLOCK_SIZE_X = (TERRAIN_GRID_MAVLINK_SIZE*TERRAIN_GRID_BLOCK_MUL_X)
TERRAIN_GRID_BLOCK_SIZE_Y = (TERRAIN_GRID_MAVLINK_SIZE*TERRAIN_GRID_BLOCK_MUL_Y)
# format of grid on disk
TERRAIN_GRID_FORMAT_VERSION = 1
IO_BLOCK_SIZE = 2048
GRID_SPACING = 100
def to_float32(f):
'''emulate single precision float'''
return struct.unpack('f', struct.pack('f',f))[0]
LOCATION_SCALING_FACTOR = to_float32(0.011131884502145034)
LOCATION_SCALING_FACTOR_INV = to_float32(89.83204953368922)
def longitude_scale(lat):
'''get longitude scale factor'''
scale = to_float32(math.cos(to_float32(math.radians(lat))))
return max(scale, 0.01)
def get_distance_NE_e7(lat1, lon1, lat2, lon2):
'''get distance tuple between two positions in 1e7 format'''
return ((lat2 - lat1) * LOCATION_SCALING_FACTOR, (lon2 - lon1) * LOCATION_SCALING_FACTOR * longitude_scale(lat1*1.0e-7))
def add_offset(lat_e7, lon_e7, ofs_north, ofs_east):
'''add offset in meters to a position'''
dlat = int(float(ofs_north) * LOCATION_SCALING_FACTOR_INV)
dlng = int((float(ofs_east) * LOCATION_SCALING_FACTOR_INV) / longitude_scale(lat_e7*1.0e-7))
return (int(lat_e7+dlat), int(lon_e7+dlng))
def east_blocks(lat_e7, lon_e7):
'''work out how many blocks per stride on disk'''
lat2_e7 = lat_e7
lon2_e7 = lon_e7 + 10*1000*1000
# shift another two blocks east to ensure room is available
lat2_e7, lon2_e7 = add_offset(lat2_e7, lon2_e7, 0, 2*GRID_SPACING*TERRAIN_GRID_BLOCK_SIZE_Y)
offset = get_distance_NE_e7(lat_e7, lon_e7, lat2_e7, lon2_e7)
return int(offset[1] / (GRID_SPACING*TERRAIN_GRID_BLOCK_SPACING_Y))
def pos_from_file_offset(lat_degrees, lon_degrees, file_offset):
'''return a lat/lon in 1e7 format given a file offset'''
ref_lat = int(lat_degrees*10*1000*1000)
ref_lon = int(lon_degrees*10*1000*1000)
stride = east_blocks(ref_lat, ref_lon)
blocks = file_offset // IO_BLOCK_SIZE
grid_idx_x = blocks // stride
grid_idx_y = blocks % stride
idx_x = grid_idx_x * TERRAIN_GRID_BLOCK_SPACING_X
idx_y = grid_idx_y * TERRAIN_GRID_BLOCK_SPACING_Y
offset = (idx_x * GRID_SPACING, idx_y * GRID_SPACING)
(lat_e7, lon_e7) = add_offset(ref_lat, ref_lon, offset[0], offset[1])
offset = get_distance_NE_e7(ref_lat, ref_lon, lat_e7, lon_e7)
grid_idx_x = int(idx_x / TERRAIN_GRID_BLOCK_SPACING_X)
grid_idx_y = int(idx_y / TERRAIN_GRID_BLOCK_SPACING_Y)
(lat_e7, lon_e7) = add_offset(ref_lat, ref_lon,
grid_idx_x * TERRAIN_GRID_BLOCK_SPACING_X * float(GRID_SPACING),
grid_idx_y * TERRAIN_GRID_BLOCK_SPACING_Y * float(GRID_SPACING))
return (lat_e7, lon_e7)
class GridBlock(object):
def __init__(self, lat_int, lon_int, lat, lon):
'''
a grid block is a structure in a local file containing height
information. Each grid block is 2048 bytes in size, to keep file IO to
block oriented SD cards efficient
'''
# crc of whole block, taken with crc=0
self.crc = 0
# format version number
self.version = TERRAIN_GRID_FORMAT_VERSION
# grid spacing in meters
self.spacing = GRID_SPACING
# heights in meters over a 32*28 grid
self.height = []
for x in range(TERRAIN_GRID_BLOCK_SIZE_X):
self.height.append([0]*TERRAIN_GRID_BLOCK_SIZE_Y)
# bitmap of 4x4 grids filled in from GCS (56 bits are used)
self.bitmap = (1<<56)-1
lat_e7 = int(lat * 1.0e7)
lon_e7 = int(lon * 1.0e7)
# grids start on integer degrees. This makes storing terrain data on
# the SD card a bit easier. Note that this relies on the python floor
# behaviour with integer division
self.lat_degrees = lat_int
self.lon_degrees = lon_int
# create reference position for this rounded degree position
ref_lat = self.lat_degrees*10*1000*1000
ref_lon = self.lon_degrees*10*1000*1000
# find offset from reference
offset = get_distance_NE_e7(ref_lat, ref_lon, lat_e7, lon_e7)
offset = (round(offset[0]), round(offset[1]))
# get indices in terms of grid_spacing elements
idx_x = int(offset[0] / GRID_SPACING)
idx_y = int(offset[1] / GRID_SPACING)
# find indexes into 32*28 grids for this degree reference. Note
# the use of TERRAIN_GRID_BLOCK_SPACING_{X,Y} which gives a one square
# overlap between grids
self.grid_idx_x = idx_x // TERRAIN_GRID_BLOCK_SPACING_X
self.grid_idx_y = idx_y // TERRAIN_GRID_BLOCK_SPACING_Y
# calculate lat/lon of SW corner of 32*28 grid_block
(ref_lat, ref_lon) = add_offset(ref_lat, ref_lon,
self.grid_idx_x * TERRAIN_GRID_BLOCK_SPACING_X * float(GRID_SPACING),
self.grid_idx_y * T
|
ERRAIN_GRID_BLOCK_SPACING_Y * float(GRID_SPACING))
self.lat = ref_lat
self.lon = ref_lon
def fill(self, gx, gy, altitude):
'''fill a sq
|
uare'''
self.height[gx][gy] = int(altitude)
def blocknum(self):
'''find IO block number'''
stride = east_blocks(self.lat_degrees*1e7, self.lon_degrees*1e7)
return stride * self.grid_idx_x + self.grid_idx_y
class DataFile(object):
def __init__(self, lat, lon):
if lat < 0:
NS = 'S'
else:
NS = 'N'
if lon < 0:
EW = 'W'
else:
EW = 'E'
name = "terrain/%c%02u%c%03u.DAT" % (NS, min(abs(int(lat)), 99),
EW, min(abs(int(lon)), 999))
try:
os.mkdir("terrain")
except Exception:
pass
if not os.path.exists(name):
self.fh = open(name, 'w+b')
else:
self.fh = open(name, 'r+b')
def seek_offset(self, block):
'''seek to right offset'''
# work out how many longitude blocks there are at this latitude
file_offset = block.blocknum() * IO_BLOCK_SIZE
self.fh.seek(file_offset)
def pack(self, block):
'''pack into a block'''
buf = bytes()
buf += struct.pack("<QiiHHH", block.bitmap, block.lat, block.lon, block.crc, block.version, block.spacing)
for gx in range(TERRAIN_GRID_BLOCK_SIZE_X):
buf += struct.pack("<%uh" % TERRAIN_GRID_BLOCK_SIZE_Y, *block.height[gx])
buf += struct.pack("<HHhb", block.grid_idx_x, block.grid_idx_y, block.lon_degrees, block.lat_degrees)
return buf
def write(self, block):
'''write a grid block'''
self.seek_offset(block)
block.crc = 0
buf = self.pack(block)
block.crc = crc16.crc16xmodem(buf)
buf = self.pack(block)
self.fh.write(buf)
def check_filled(self, block):
'''read a grid block and check if already filled'''
self.seek_offset(block)
buf = self.fh.read(IO_BLOCK_SIZE)
if len(buf) != IO_BLOCK_SIZE:
return False
(bitmap, lat, lon, crc, version, spacing) = struct.unpack("<QiiHHH", buf[:22])
if (version != TERRAIN_GRID_FORMAT_VERSION or
abs(lat - block.lat)>2 or
abs(lon - block.lon)>2 or
spacing != GRID_SPACING or
bitma
|
sundrome21/FilterZZ
|
program.py
|
Python
|
mit
| 519
| 0.001927
|
import os
class Program:
socketColorBoTe = "255 255 255 255"
socketColorBa = "77 87 152 255"
progColorRareBoTe = "0 0 0 255"
progColorRareBa = "240 220 180 255
|
"
progColorElseBoTe = "77 87 152 255"
progColo
|
rElseBa = "0 0 0 255"
def createFile(self):
filepath = os.path.join('~/dest', "filterZZ.filter")
if not os.path.exists('~/dest'):
os.makedirs('~/dest')
setattr(self, 'f', open(filepath, "w"))
def addNewLine(self):
self.f.write("\n\n")
|
timberline-secondary/hackerspace
|
src/courses/migrations/0016_grades_initialdata.py
|
Python
|
gpl-3.0
| 963
| 0.001038
|
# Generated by Django 2.2.12 on 2020-05-09 06:28
from django.db import migrations
# Can't use fixtures because load_fixtures method is janky with django-tenant-schemas
def load_initial_data(apps, schema_editor):
Grade = apps.get_model('courses', 'Grade')
# add some initial data if none has been created yet
if not Grade.objects.exists():
Grade.objects.create(
name="8",
value=8
)
Grade.objects.create(
name="9",
value=9
)
Gra
|
de.objects.create(
name="10",
value=10
)
Grade.objects.create(
name="11",
value=11
)
Grade.objects.create(
name="12",
value=12
)
class Migration(migrations.Migration):
dependencies = [
('courses', '0015_auto_20
|
200508_1957'),
]
operations = [
migrations.RunPython(load_initial_data),
]
|
slackhq/python-slackclient
|
slack/web/async_slack_response.py
|
Python
|
mit
| 6,347
| 0.000788
|
"""A Python module for interacting and consuming responses from Slack."""
import logging
import slack.errors as e
from slack.web.internal_utils import _next_cursor_is_present
class AsyncSlackResponse:
"""An iterable container of response data.
Attributes:
data (dict): The json-encoded content of the response. Along
with the headers and status code information.
Methods:
validate: Check if the response from Slack was successful.
get: Retrieves any key from the response data.
next: Retrieves the next portion of results,
if 'next_cursor' is present.
Example:
```python
import os
import slack
client = slack.AsyncWebClient(token=os.environ['SLACK_API_TOKEN'])
response1 = await client.auth_revoke(test='true')
assert not response1['revoked']
response2 = await client.auth_test()
assert response2.get('ok', False)
users = []
async for page in await client.users_list(limit=2):
users = users + page['members']
```
Note:
Some responses return collections of information
like channel and user lists. If they do it's likely
that you'll only receive a portion of results. This
object allows you to iterate over the response which
makes subsequent API requests until your code hits
'break' or there are no more results to be found.
Any attributes or methods prefixed with _underscores are
intended to be "private" internal use only. They may be changed or
|
removed at anytime.
"""
def __init__(
self,
*,
client, # AsyncWebClient
http_verb: str,
api_url: str,
req_args: dict,
data: dict,
headers: dict,
status_code: int,
):
|
self.http_verb = http_verb
self.api_url = api_url
self.req_args = req_args
self.data = data
self.headers = headers
self.status_code = status_code
self._initial_data = data
self._iteration = None # for __iter__ & __next__
self._client = client
self._logger = logging.getLogger(__name__)
def __str__(self):
"""Return the Response data if object is converted to a string."""
if isinstance(self.data, bytes):
raise ValueError(
"As the response.data is binary data, this operation is unsupported"
)
return f"{self.data}"
def __contains__(self, key: str) -> bool:
return self.get(key) is not None
def __getitem__(self, key):
"""Retrieves any key from the data store.
Note:
This is implemented so users can reference the
SlackResponse object like a dictionary.
e.g. response["ok"]
Returns:
The value from data or None.
"""
if isinstance(self.data, bytes):
raise ValueError(
"As the response.data is binary data, this operation is unsupported"
)
if self.data is None:
raise ValueError(
"As the response.data is empty, this operation is unsupported"
)
return self.data.get(key, None)
def __aiter__(self):
"""Enables the ability to iterate over the response.
It's required async-for the iterator protocol.
Note:
This enables Slack cursor-based pagination.
Returns:
(AsyncSlackResponse) self
"""
self._iteration = 0
self.data = self._initial_data
return self
async def __anext__(self):
"""Retrieves the next portion of results, if 'next_cursor' is present.
Note:
Some responses return collections of information
like channel and user lists. If they do it's likely
that you'll only receive a portion of results. This
method allows you to iterate over the response until
your code hits 'break' or there are no more results
to be found.
Returns:
(AsyncSlackResponse) self
With the new response data now attached to this object.
Raises:
SlackApiError: If the request to the Slack API failed.
StopAsyncIteration: If 'next_cursor' is not present or empty.
"""
self._iteration += 1
if self._iteration == 1:
return self
if _next_cursor_is_present(self.data): # skipcq: PYL-R1705
params = self.req_args.get("params", {})
if params is None:
params = {}
params.update({"cursor": self.data["response_metadata"]["next_cursor"]})
self.req_args.update({"params": params})
response = await self._client._request( # skipcq: PYL-W0212
http_verb=self.http_verb,
api_url=self.api_url,
req_args=self.req_args,
)
self.data = response["data"]
self.headers = response["headers"]
self.status_code = response["status_code"]
return self.validate()
else:
raise StopAsyncIteration
def get(self, key, default=None):
"""Retrieves any key from the response data.
Note:
This is implemented so users can reference the
SlackResponse object like a dictionary.
e.g. response.get("ok", False)
Returns:
The value from data or the specified default.
"""
if isinstance(self.data, bytes):
raise ValueError(
"As the response.data is binary data, this operation is unsupported"
)
if self.data is None:
return None
return self.data.get(key, default)
def validate(self):
"""Check if the response from Slack was successful.
Returns:
(AsyncSlackResponse)
This method returns it's own object. e.g. 'self'
Raises:
SlackApiError: The request to the Slack API failed.
"""
if self.status_code == 200 and self.data and self.data.get("ok", False):
return self
msg = "The request to the Slack API failed."
raise e.SlackApiError(message=msg, response=self)
|
saltstack/salt
|
salt/states/ddns.py
|
Python
|
apache-2.0
| 4,297
| 0.000699
|
"""
Dynamic DNS updates
===================
Ensure a DNS record is present or absent utilizing RFC 2136
type dynamic updates.
:depends: - `dnspython <http://www.dnspython.org/>`_
.. note::
The ``dnspython`` module is required when managi
|
ng DDNS using a TSIG key.
If you are not using a TSIG key, DDNS is allowed by ACLs based on IP
address and the ``dnspython`` module is not required.
Example:
.. code-block:: yaml
webserver:
ddns.present:
- zone: example.com
- ttl: 60
- data: 111.222.333.444
- nameserver: 123.234.345.456
- keyfile: /srv/salt/dnspy_tsig_key.txt
"""
def
|
__virtual__():
if "ddns.update" in __salt__:
return "ddns"
return (False, "ddns module could not be loaded")
def present(name, zone, ttl, data, rdtype="A", **kwargs):
"""
Ensures that the named DNS record is present with the given ttl.
name
The host portion of the DNS record, e.g., 'webserver'. Name and zone
are concatenated when the entry is created unless name includes a
trailing dot, so make sure that information is not duplicated in these
two arguments.
zone
The zone to check/update
ttl
TTL for the record
data
Data for the DNS record. E.g., the IP address for an A record.
rdtype
DNS resource type. Default 'A'.
``**kwargs``
Additional arguments the ddns.update function may need (e.g.
nameserver, keyfile, keyname). Note that the nsupdate key file can’t
be reused by this function, the keyfile and other arguments must
follow the `dnspython <http://www.dnspython.org/>`_ spec.
"""
ret = {"name": name, "changes": {}, "result": False, "comment": ""}
if __opts__["test"]:
ret["result"] = None
ret["comment"] = '{} record "{}" will be updated'.format(rdtype, name)
return ret
status = __salt__["ddns.update"](zone, name, ttl, rdtype, data, **kwargs)
if status is None:
ret["result"] = True
ret["comment"] = '{} record "{}" already present with ttl of {}'.format(
rdtype, name, ttl
)
elif status:
ret["result"] = True
ret["comment"] = 'Updated {} record for "{}"'.format(rdtype, name)
ret["changes"] = {
"name": name,
"zone": zone,
"ttl": ttl,
"rdtype": rdtype,
"data": data,
}
else:
ret["result"] = False
ret["comment"] = 'Failed to create or update {} record for "{}"'.format(
rdtype, name
)
return ret
def absent(name, zone, data=None, rdtype=None, **kwargs):
"""
Ensures that the named DNS record is absent.
name
The host portion of the DNS record, e.g., 'webserver'. Name and zone
are concatenated when the entry is created unless name includes a
trailing dot, so make sure that information is not duplicated in these
two arguments.
zone
The zone to check
data
Data for the DNS record. E.g., the IP address for an A record. If omitted,
all records matching name (and rdtype, if provided) will be purged.
rdtype
DNS resource type. If omitted, all types will be purged.
``**kwargs``
Additional arguments the ddns.update function may need (e.g.
nameserver, keyfile, keyname). Note that the nsupdate key file can’t
be reused by this function, the keyfile and other arguments must
follow the `dnspython <http://www.dnspython.org/>`_ spec.
"""
ret = {"name": name, "changes": {}, "result": False, "comment": ""}
if __opts__["test"]:
ret["result"] = None
ret["comment"] = '{} record "{}" will be deleted'.format(rdtype, name)
return ret
status = __salt__["ddns.delete"](zone, name, rdtype, data, **kwargs)
if status is None:
ret["result"] = True
ret["comment"] = "No matching DNS record(s) present"
elif status:
ret["result"] = True
ret["comment"] = "Deleted DNS record(s)"
ret["changes"] = {"Deleted": {"name": name, "zone": zone}}
else:
ret["result"] = False
ret["comment"] = "Failed to delete DNS record(s)"
return ret
|
rabipanda/tensorflow
|
tensorflow/python/debug/wrappers/local_cli_wrapper.py
|
Python
|
apache-2.0
| 25,959
| 0.003929
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Debugger Wrapper Session Consisting of a Local Curses-based CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import shutil
import sys
import tempfile
# Google-internal import(s).
from tensorflow.python.debug.cli import analyzer_cli
from tensorflow.python.debug.cli import cli_shared
from tensorflow.python.debug.cli import command_parser
from tensorflow.python.debug.cli import debugger_cli_common
from tensorflow.python.debug.cli import profile_analyzer_cli
from tensorflow.python.debug.cli import stepper_cli
from tensorflow.python.debug.cli import ui_factory
from tensorflow.python.debug.lib import common
from tensorflow.python.debug.lib import debug_data
from tensorflow.python.debug.wrappers import framework
_DUMP_ROOT_PREFIX = "tfdbg_"
class LocalCLIDebugWrapperSession(framework.BaseDebugWrapperSession):
"""Concrete subclass of BaseDebugWrapperSession implementing a local CLI.
This class has all the methods that a `session.Session` object has, in order
to support debugging with minimal code changes. Invoking its `run()` method
will launch the command-line interface (CLI) of tfdbg.
"""
def __init__(self,
sess,
dump_root=None,
log_usage=True,
ui_type="curses",
thread_name_filter=None):
"""Constructor of LocalCLIDebugWrapperSession.
Args:
sess: The TensorFlow `Session` object being wrapped.
dump_root: (`str`) optional path to the dump root directory. Must be a
directory that does not exist or an empty directory. If the directory
does not exist, it will be created by the debugger core during debug
`run()` calls and removed afterwards. If `None`, the debug dumps will
be at tfdbg_<random_string> under the system temp directory.
log_usage: (`bool`) whether the usage of this class is to be logged.
ui_type: (`str`) requested UI type. Currently supported:
(curses | readline)
thread_name_filter: Regular-expression white list for thread name. See
the doc of `BaseDebugWrapperSession` for details.
Raises:
ValueError: If dump_root is an existing and non-empty directory or if
dump_root is a file.
"""
if log_usage:
pass # No logging for open-source.
framework.BaseDebugWrapperSession.__init__(
self, sess, thread_name_filter=thread_name_filter)
if not dump_root:
self._dump_root = tempfile.mktemp(prefix=_DUMP_ROOT_PREFIX)
else:
dump_root = os.path.expanduser(dump_root)
if os.path.isfile(dump_root):
raise ValueError("dump_root path points to a file: %s" % dump_root)
elif os.path.isdir(dump_root) and os.listdir(dump_root):
raise ValueError("dump_root path points to a non-empty directory: %s" %
dump_root)
self._dump_root = dump_root
self._initialize_argparsers()
# Registered tensor filters.
self._tensor_filters = {}
# Register frequently-used filter(s).
self.add_tensor_filter("has_inf_or_nan", debug_data.has_inf_or_nan)
# Below are the state variables of this wrapper object.
# _active_tensor_filter: what (if any) tensor filter is in effect. If such
# a filter is in effect, this object will call run() method of the
# underlying TensorFlow Session object until the filter passes. This is
# activated by the "-f" flag of the "run" command.
# _run_through_times: keeps track of how many times the wrapper needs to
# run through without stopping at the run-end CLI. It is activated by the
# "-t" option of the "run" command.
# _skip_debug: keeps track of whether the current run should be executed
# without debugging. It is activated by the "-n" option of the "run"
# command.
#
# _run_start_response: keeps track what OnRunStartResponse the wrapper
# should return at the next run-start callback. If this information is
# unavailable (i.e., is None), the run-start CLI will be launched to ask
# the user. This is the case, e.g., right before the first run starts.
self._active_tensor_filter = None
self._active_tensor_filter_run_start_response = None
self._run_through_times = 1
self._skip_debug = False
self._run_start_response = None
self._is_run_start = True
self._ui_type = ui_type
def _initialize_argparsers(self):
self._argparsers = {}
ap = argparse.ArgumentParser(
description="Run through, with or without debug tensor watching.",
usage=argparse.SUPPRESS)
ap.add_argument(
"-t",
"--times",
dest="times",
type=int,
default=1,
help="How many Session.run() calls to proceed with.")
ap.add_argument(
"-n",
"--no_debug",
dest="no_debug",
action="store_true",
help="Run through without debug tensor watching.")
ap.add_argument(
"-f",
"--till_filter_pass",
dest="till_filter_pass",
type=str,
default="",
help="Run until a tensor in the graph passes the specified filter.")
ap.add_argument(
"--node_name_filter",
dest="node_name_filter",
type=str,
default="",
help="Regular-expression filter for node names to be watched in the "
"run, e.g., loss, reshape.*")
ap.add_argument(
"--op_type_filter",
dest="op_type_filter",
type=str,
default="",
help="Regular-expression filter for op type to be watched in the run, "
"e.g., (MatMul|Add), Variable.*")
ap.add_argument(
"--tensor_dtype_filter",
dest="tensor_dtype_filter",
type=str,
default="",
help="Regular-expression filter for tensor dtype to be watched in the "
"run, e.g., (float32|float64), int.*")
ap.add_argument(
"-p",
"--profile",
dest="profile",
action="store_true",
help="Run and profile TensorFlow graph execution.")
self._argparsers["run"] = ap
ap = argparse.ArgumentParser(
description="Invoke stepper (cont, step, breakpoint, etc.)",
usage=argparse.SUPPRESS)
self._argparsers["invoke_stepper"] = ap
ap = argparse.ArgumentParser(
description="Display information about this Session.run() call.",
usage=argparse.SUPPRESS)
self._argparsers["run_info"] = ap
self._argp
|
arsers["print_feed"] = command_parser.get_print_tensor_argparser(
"Print the value of a feed in feed_dict.")
def add_tensor_filter(self, filter_name, tensor_filter):
"""Add a tensor filter.
Args:
filter_name: (`str`) name of the filter.
tensor_filter: (`callable`) the filter callable. See the doc string of
`DebugDumpDir.find()` for more details about its signature.
"""
self._tensor_filters[filter_name] = tensor_filter
def on_s
|
ession_init(self, request):
"""Overrides on-session-init callback.
Args:
request: An instance of `OnSessionInitRequest`.
Returns:
An instance of `OnSessionInitResponse`.
"""
return framework.OnSessionInitResponse(
framework.OnSessionInitAction.PROCEED)
def on_run_start(self, request):
"""Overrides on-run-start callback.
Invoke the CLI to let user choose what action to take:
`run` / `invoke_stepper`.
Args:
|
kevgathuku/compshop
|
store/tests/test_forms.py
|
Python
|
bsd-3-clause
| 1,654
| 0.008464
|
from django.test import TestCase
from store.forms import ReviewForm
from store.models import Review
from .factories import *
class ReviewFormTest(TestCase):
def test_form_validation_for_blank_items(self):
p1 = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': '', 'product':p1.id})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['text'],["Please fill in the review"])
self.assertEqual(form.errors['rating'],["Please leave a rating"])
def test_form_validation_for_invalid_review(self):
p1 = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': '', 'rating': 0, 'product':p1.id})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['text'],["Please fill in the review"])
self.assertEqual(form.errors['rating'],["Please leave a valid rating"])
def test_form_validation_for_required_name_field(self):
p1 = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': 'Hello', 'rating': 2, 'product':p1.id})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['name'],['Please fill in your name'])
def test_form_save_handles_saving_product_reviews(self):
|
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'Kevin', 'text': 'Review', 'rating': 3, 'product':prod.id})
new_review = form.save()
self.assertEqual(new_review, Review.objects.first())
self.assertEqual(new_review.name, 'Kevin')
self.assertEqual(new_rev
|
iew.product, prod)
|
veryberry/website-addons
|
website_crm_sales_team/__openerp__.py
|
Python
|
lgpl-3.0
| 361
| 0.024931
|
{
"name" : "Add
|
sales team to website leads (OBSOLETE)",
|
"version" : "0.1",
"author" : "IT-Projects LLC, Ivan Yelizariev",
'license': 'GPL-3',
"category" : "Website",
"website" : "https://yelizariev.github.io",
"depends" : ["website_crm"],
#"init_xml" : [],
#"update_xml" : [],
#"active": True,
"installable": True
}
|
jeonghoonkang/BerePi
|
apps/check/monitor.py
|
Python
|
bsd-2-clause
| 1,308
| 0.021407
|
#-*- coding: utf-8 -*-
# Author : Jeonghoonkang, github.com/jeonghoonkang
import platform
import sys
import os
import time
import traceback
import requests
import RPi.GPIO as GPIO
from socket import gethostname
hostname = gethostname()
SERVER_ADDR = '211.184.76.80'
GPIO.setwarnings(False)
GPIO.cleanup()
GPIO.setmode(GPIO.BCM)
GPIO.setup(19,GPIO.OUT) # for LED indicating
GPIO.setup(26, GPIO.OUT) # for LED indic
|
ating
def query_last_data_point(bridge_id):
url = 'http://%s/api/raw_bridge_last/?bridge_id=%d' % (SERVER_ADDR, bridge_id)
try:
ret = requests.get(url, timeout=10)
if ret.ok:
ctx = ret.json()
if ctx['code'] == 0:
return ctx['result']['time'], ctx['resu
|
lt']['value']
except Exception:
#print Exception
pass
return None
bridge_id = int(hostname[5:10])
GPIO.output(26, True) # server connection is OK, showing through LED
while True:
try:
ret = query_last_data_point(bridge_id)
except:
pass
if ret is not None:
t, v = ret
if t > time.time() - 30:
dt = time.time() - t
GPIO.output(19, True)
GPIO.output(26, False)
else:
GPIO.output(19, True)
GPIO.output(26, True)
else:
GPIO.output(19, False)
GPIO.output(26, True)
time.sleep(5.0)
|
CLLKazan/iCQA
|
qa-engine/forum/templatetags/extra_tags.py
|
Python
|
gpl-3.0
| 7,925
| 0.007823
|
import time
import os
import posixpath
import datetime
import math
import re
import logging
from django import template
from django.utils.encoding import smart_unicode
from django.utils.safestring import mark_safe
from forum.models import Question, Answer, QuestionRevision, AnswerRevision, NodeRevision
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext
from django.utils import simplejson
from forum import settings
from django.template.defaulttags import url as default_url
from forum import skins
from forum.utils import html
from extra_filters import decorated_int
from django.core.urlresolvers import reverse
register = template.Library()
GRAVATAR_TEMPLATE = ('<img class="gravatar" width="%(size)s" height="%(size)s" '
'src="http://www.gravatar.com/avatar/%(gravatar_hash)s'
'?s=%(size)s&d=%(default)s&r=%(rating)s" '
'alt="%(username)s\'s gravatar image" />')
@register.simple_tag
def gravatar(user, size):
try:
gravatar = user['gravatar']
username = user['username']
except (TypeError, AttributeError, KeyError):
gravatar = user.gravatar
username = user.username
return mark_safe(GRAVATAR_TEMPLATE % {
'size': size,
'gravatar_hash': gravatar,
'default': settings.GRAVATAR_DEFAULT_IMAGE,
'rating': settings.GRAVATAR_ALLOWED_RATING,
'username': template.defaultfilters.urlencode(username),
})
@register.simple_tag
def get_score_badge(user):
if user.is_suspended():
return _("(suspended)")
repstr = decorated_int(user.reputation, "")
BADGE_TEMPLATE = '<span class="score" title="%(reputation)s %(reputationword)s">%(repstr)s</span>'
if user.gold > 0 :
BADGE_TEMPLATE = '%s%s' % (BADGE_TEMPLATE, '<span title="%(gold)s %(badgesword)s">'
'<span class="badge1">●</span>'
'<span class="badgecount">%(gold)s</span>'
'</span>')
if user.silver > 0:
BADGE_TEMPLATE = '%s%s' % (BADGE_TEMPLATE, '<span title="%(silver)s %(badgesword)s">'
'<span class="silver">●</span>'
'<span class="badgecount">%(silver)s</span>'
'</span>')
if user.bronze > 0:
BADGE_TEMPLATE = '%s%s' % (BADGE_TEMPLATE, '<span title="%(bronze)s %(badgesword)s">'
'<span class="bronze">●</span>'
'<span class="badgecount">%(bronze)s</span>'
'</span>')
BADGE_TEMPLATE = smart_unicode(BADGE_TEMPLATE, encoding='utf-8', strings_only=False, errors='strict')
return mark_safe(BADGE_TEMPLATE % {
'reputation' : user.reputation,
'repstr': repstr,
'gold' : user.gold,
'silver' : user.silver,
'bronze' : user.bronze,
'badgesword' : _('badges'),
'reputationword' : _('reputation points'),
})
@register.simple_tag
def get_age(birthday):
current_time = datetime.datetime(*time.localtime()[0:6])
year = birthday.year
month = birthday.month
day = birthday.day
diff = current_time - datetime.datetime(year, month, day, 0, 0, 0)
return diff.days / 365
@register.simple_tag
def diff_date(date, limen=2):
if not date:
return _('unknown')
now = datetime.datetime.now()
diff = now - date
days = diff.days
hours = int(diff.seconds/3600)
minutes = int(diff.seconds/60)
if days > 2:
if date.year == now.year:
return date.strftime(_("%b %d at %H:%M").encode())
else:
return date.strftime(_("%b %d '%y at %H:%M").encode())
elif days == 2:
return _('2 days ago')
elif days == 1:
return _('yesterday')
elif minutes >= 60:
return ungettext('%(hr)d ' + _("hour ago"), '%(hr)d ' + _("hours ago"), hours) % {'hr':hours}
elif diff.seconds >= 60:
return ungettext('%(min)d ' + _("min ago"), '%(min)d ' + _("mins ago"), minutes) % {'min':minutes}
else:
return ungettext('%(sec)d ' + _("sec ago"), '%(sec)d ' + _("secs ago"), diff.seconds) % {'sec':diff.seconds}
@register.simple_tag
def media(url):
url = skins.find_media_source(url)
if url:
# Create the URL prefix.
url_prefix = settings.FORCE_SCRIPT_NAME + '/m/'
# Make sure any duplicate forward slashes are replaced with a single
# forward slash.
url_prefix = re.sub("/+", "/", url_prefix)
url = url_prefix + url
return url
|
class ItemSeparatorNode(template.Node):
def __init__(self, separator):
sep = separator.strip()
if sep[0] == sep[-1] and sep[0] in ('\'', '"'):
sep = sep[1:-1]
else:
raise template.TemplateSyntaxError('separator in joinitems tag must be quoted')
sel
|
f.content = sep
def render(self, context):
return self.content
class BlockMediaUrlNode(template.Node):
def __init__(self, nodelist):
self.items = nodelist
def render(self, context):
prefix = settings.APP_URL + 'm/'
url = ''
if self.items:
url += '/'
for item in self.items:
url += item.render(context)
url = skins.find_media_source(url)
url = prefix + url
out = url
return out.replace(' ', '')
@register.tag(name='blockmedia')
def blockmedia(parser, token):
try:
tagname = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError("blockmedia tag does not use arguments")
nodelist = []
while True:
nodelist.append(parser.parse(('endblockmedia')))
next = parser.next_token()
if next.contents == 'endblockmedia':
break
return BlockMediaUrlNode(nodelist)
@register.simple_tag
def fullmedia(url):
domain = settings.APP_BASE_URL
#protocol = getattr(settings, "PROTOCOL", "http")
path = media(url)
return "%s%s" % (domain, path)
class SimpleVarNode(template.Node):
def __init__(self, name, value):
self.name = name
self.value = template.Variable(value)
def render(self, context):
context[self.name] = self.value.resolve(context)
return ''
class BlockVarNode(template.Node):
def __init__(self, name, block):
self.name = name
self.block = block
def render(self, context):
source = self.block.render(context)
context[self.name] = source.strip()
return ''
@register.tag(name='var')
def do_var(parser, token):
tokens = token.split_contents()[1:]
if not len(tokens) or not re.match('^\w+$', tokens[0]):
raise template.TemplateSyntaxError("Expected variable name")
if len(tokens) == 1:
nodelist = parser.parse(('endvar',))
parser.delete_first_token()
return BlockVarNode(tokens[0], nodelist)
elif len(tokens) == 3:
return SimpleVarNode(tokens[0], tokens[2])
raise template.TemplateSyntaxError("Invalid number of arguments")
class DeclareNode(template.Node):
dec_re = re.compile('^\s*(\w+)\s*(:?=)\s*(.*)$')
def __init__(self, block):
self.block = block
def render(self, context):
source = self.block.render(context)
for line in source.splitlines():
m = self.dec_re.search(line)
if m:
clist = list(context)
clist.reverse()
d = {}
d['_'] = _
d['os'] = os
d['html'] = html
d['reverse'] = reverse
for c in clist:
d.update(c)
try:
context[m.group(1).strip()] = eval(m.group(3).strip(), d)
except Exception, e:
logging.error("Error in declare tag, when evaluating: %s" % m.group(3).strip())
raise
return ''
@register.tag(name='declare')
def do_declare(parser, token):
nodelist = parser.parse(('enddeclare',))
parser.delete_first_token()
return DeclareNode(nodelist)
|
EricIO/pasteit
|
pasteit/PasteSites.py
|
Python
|
gpl-2.0
| 3,878
| 0.005931
|
# Copright (C) 2015 Eric Skoglund
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, see http://www.gnu.org/licenses/gpl-2.0.html
import requests
import sys
class NotSupported(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class PasteSite(object):
def __init__(self, url):
self.url = url
self.paste_url = None
self.data = None
@staticmethod
def siteFactory(site_name):
if site_name == 'slexy.org':
return Slexy()
elif site_name == 'pastebin.mozilla.org
|
':
return Mozilla()
else:
raise NotSupported("This site is not supported")
def parse(self, args):
""" Internal method used by the PasteSite class.
Returns a dictionary of the parsed input arguments.
Parses the arguments given at the command line.
Many pastebin like sites use different arguments
for the paste so this method should be implemented
for each subclass of Paste
|
Site.
See the slexy class for an example of how to implement
this method for subclasses.
"""
self.data = args
def paste(self):
"""Posts the data to the paste site.
This method tries to post the data to the paste site.
If the resulting request does not have a ok status the
program exits else we return the resulting paste url.
The method assumes that the data is in a dictionary.
"""
if self.data == None:
print('You can only paste after a parse')
sys.exit(-1)
res = requests.post(self.url, self.data)
if not res.ok:
print('Bad response {0} {1}'.format(res.reason, res.status_code))
sys.exit(-1)
self.paste_url = res.url
class Slexy(PasteSite):
def __init__(self):
super(Slexy, self).__init__('http://slexy.org/submit')
def parse(self, args):
form_data = {}
arg_translation = {'text' : 'raw_paste',
'language' : 'language',
'expiration' : 'expire',
'comment' : 'comment',
'description' : 'descr',
'visibility' : 'permissions',
'linum' : 'linenumbers',
'author' : 'author'}
for k,v in args.items():
if arg_translation.get(k):
form_data[arg_translation[k]] = v
form_data['submit'] = 'Submit Paste'
self.data = form_data
class Mozilla(PasteSite):
def __init__(self):
super(Mozilla, self).__init__('https://pastebin.mozilla.org')
def parse(self, args):
form_data = {}
arg_translation = {'text' : 'code2',
'expiration' : 'expiry',
'syntax_highlight' : 'format',
'author' : 'poster'}
for k,v in args.items():
if arg_translation.get(k):
form_data[arg_translation[k]] = v
form_data['paste'] = 'Send'
form_data['parent_pid'] = ''
self.data = form_data
|
sadanandb/pmt
|
src/pyasm/prod/checkin/maya_checkin_test.py
|
Python
|
epl-1.0
| 1,263
| 0.008709
|
###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
import os,unittest
from pyasm.security import Batch
from pyasm.comma
|
nd import Command
from pyasm.prod.biz import Asset
from pyams.prod.maya import *
from maya_checkin import *
class MayaCheckinTest(unittest.TestCase):
def setUp(my):
batch = Batch()
def test_all(my):
# create a scene that will be checked in
asset_code = "prp101"
sid = "12345"
# create an asset
|
mel('sphere -n sphere1')
mel('circle -n circle1')
mel('group -n |%s |circle1 |sphere1' % asset_code )
# convert node into a maya asset
node = MayaNode("|%s" % asset_code )
asset_node = MayaAssetNode.add_sid( node, sid )
# checkin the asset
checkin = MayaAssetNodeCheckin(asset_node)
Command.execute_cmd(checkin)
# create a file from this node
asset_node.export()
if __name__ == '__main__':
unittest.main()
|
sandeepdsouza93/TensorFlow-15712
|
tensorflow/contrib/learn/python/learn/learn_io/pandas_io.py
|
Python
|
apache-2.0
| 6,001
| 0.004999
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Methods to allow pandas.DataFrame."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.learn.python.learn.dataframe.queues import feeding_functions
try:
# pylint: disable=g-import-not-at-top
import pandas as pd
HAS_PANDAS = True
except ImportError:
HAS_PANDAS = False
PANDAS_DTYPES = {
'int8': 'int',
'int16': 'int',
'int32': 'int',
'int64': 'int',
'uint8': 'int',
'uint16': 'int',
'uint32': 'int',
'uint64': 'int',
'float16': 'float',
'float32': 'float',
'float64': 'float',
'bool': 'i'
}
def extract_pandas_data(data):
"""Extract data from pandas.DataFrame for predictors.
Given a DataFrame, will extract the values and cast them to float. The
DataFrame is expected to contain values of type int, float or bool.
Args:
data: `pandas.DataFrame` containing the data to be extracted.
Returns:
A numpy `ndarray` of the DataFrame's values as floats.
Raises:
ValueError: if data contains types other than int, float or bool.
"""
if not isinstance(data, pd.DataFrame):
return data
bad_data = [column for column in data
if data[column].dtype.name not in PANDAS_DTYPES]
if not bad_data:
return data.values.astype('float')
else:
error_report = [("'" + str(column) + "' ty
|
pe='" +
data[column].dtype.name + "'") for column in bad_data]
raise ValueError('Data type
|
s for extracting pandas data must be int, '
'float, or bool. Found: ' + ', '.join(error_report))
def extract_pandas_matrix(data):
"""Extracts numpy matrix from pandas DataFrame.
Args:
data: `pandas.DataFrame` containing the data to be extracted.
Returns:
A numpy `ndarray` of the DataFrame's values.
"""
if not isinstance(data, pd.DataFrame):
return data
return data.as_matrix()
def extract_pandas_labels(labels):
"""Extract data from pandas.DataFrame for labels.
Args:
labels: `pandas.DataFrame` or `pandas.Series` containing one column of
labels to be extracted.
Returns:
A numpy `ndarray` of labels from the DataFrame.
Raises:
ValueError: if more than one column is found or type is not int, float or
bool.
"""
if isinstance(labels,
pd.DataFrame): # pandas.Series also belongs to DataFrame
if len(labels.columns) > 1:
raise ValueError('Only one column for labels is allowed.')
bad_data = [column for column in labels
if labels[column].dtype.name not in PANDAS_DTYPES]
if not bad_data:
return labels.values
else:
error_report = ["'" + str(column) + "' type="
+ str(labels[column].dtype.name) for column in bad_data]
raise ValueError('Data types for extracting labels must be int, '
'float, or bool. Found: ' + ', '.join(error_report))
else:
return labels
def pandas_input_fn(x, y=None, batch_size=128, num_epochs=None, shuffle=True,
queue_capacity=1000, num_threads=1, target_column='target',
index_column='index'):
"""Returns input function that would feed pandas DataFrame into the model.
Note: If y's index doesn't match x's index exception will be raised.
Args:
x: pandas `DataFrame` object.
y: pandas `Series` object.
batch_size: int, size of batches to return.
num_epochs: int, number of epochs to iterate over data. If `None` will
run indefinetly.
shuffle: int, if shuffle the queue. Please make sure you don't shuffle at
prediction time.
queue_capacity: int, size of queue to accumulate.
num_threads: int, number of threads used for reading and enqueueing.
target_column: str, used to pack `y` into `x` DataFrame under this column.
index_column: str, name of the feature return with index.
Returns:
Function, that has signature of ()->(dict of `features`, `target`)
Raises:
ValueError: if `target_column` column is already in `x` DataFrame.
"""
def input_fn():
"""Pandas input function."""
if y is not None:
if target_column in x:
raise ValueError('Found already column \'%s\' in x, please change '
'target_column to something else. Current columns '
'in x: %s', target_column, x.columns)
if not np.array_equal(x.index, y.index):
raise ValueError('Index for x and y are mismatch, this will lead '
'to missing values. Please make sure they match or '
'use .reset_index() method.\n'
'Index for x: %s\n'
'Index for y: %s\n', x.index, y.index)
x[target_column] = y
queue = feeding_functions.enqueue_data(
x, queue_capacity, shuffle=shuffle, num_threads=num_threads,
enqueue_size=batch_size, num_epochs=num_epochs)
if num_epochs is None:
features = queue.dequeue_many(batch_size)
else:
features = queue.dequeue_up_to(batch_size)
features = dict(zip([index_column] + list(x.columns), features))
if y is not None:
target = features.pop(target_column)
return features, target
return features
return input_fn
|
aarestad/euler-solutions
|
euler_38.py
|
Python
|
gpl-2.0
| 471
| 0.029724
|
f
|
rom euler_functions import is_pandigital_set, number_digits
for x in range(9123, 9876): # much smaller range: http://www.mathblog.dk/project-euler-38-pandigital-multiplying-fixed-number/
products = []
n = 1
num_digits_in_products = 0
while num_digits_in_products < 9:
products.append(x * n)
n += 1
num_digits_in_products = 0
for p in products:
num_digits_in_products += number_digits(p)
if is_pandigital_set(*products):
print products
break
| |
mezz64/home-assistant
|
tests/components/yeelight/test_init.py
|
Python
|
apache-2.0
| 22,950
| 0.001438
|
"""Test Yeelight."""
import asyncio
from datetime import timedelta
from unittest.mock import AsyncMock, patch
import pytest
from yeelight import BulbException, BulbType
from yeelight.aio import KEY_CONNECTED
from homeassistant.components.yeelight.const import (
CONF_DETECTED_MODEL,
CONF_NIGHTLIGHT_SWITCH,
CONF_NIGHTLIGHT_SWITCH_TYPE,
DOMAIN,
NIGHTLIGHT_SWITCH_TYPE_LIGHT,
STATE_CHANGE_TIME,
)
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import (
CONF_DEVICES,
CONF_HOST,
CONF_ID,
CONF_NAME,
STATE_ON,
STATE_UNAVAILABLE,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
from . import (
CONFIG_ENTRY_DATA,
ENTITY_AMBILIGHT,
ENTITY_BINARY_SENSOR,
ENTITY_BINARY_SENSOR_TEMPLATE,
ENTITY_LIGHT,
ENTITY_NIGHTLIGHT,
FAIL_TO_BIND_IP,
ID,
IP_ADDRESS,
MODEL,
MODULE,
SHORT_ID,
_mocked_bulb,
_patch_discovery,
_patch_discovery_interval,
_patch_discovery_timeout,
)
from tests.common import MockConfigEntry, async_fire_time_changed
async def test_ip_changes_fallb
|
ack_discovery(hass: HomeAssistant):
"""Test Yeelight ip changes and we fallback to discovery."""
config_entry = MockConfigEntry(
domain=DOMAIN, data={CONF_ID: ID, CONF_HOST: "5.5.5.5"}, unique_id=ID
)
config_entry.add_to_hass(hass)
mocked_fail_bulb = _mocked_bulb(cann
|
ot_connect=True)
mocked_fail_bulb.bulb_type = BulbType.WhiteTempMood
with patch(
f"{MODULE}.AsyncBulb", return_value=mocked_fail_bulb
), _patch_discovery():
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.SETUP_RETRY
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=2))
await hass.async_block_till_done()
# The discovery should update the ip address
assert config_entry.data[CONF_HOST] == IP_ADDRESS
assert config_entry.state is ConfigEntryState.SETUP_RETRY
mocked_bulb = _mocked_bulb()
with patch(f"{MODULE}.AsyncBulb", return_value=mocked_bulb), _patch_discovery():
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=10))
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.LOADED
binary_sensor_entity_id = ENTITY_BINARY_SENSOR_TEMPLATE.format(
f"yeelight_color_{SHORT_ID}"
)
entity_registry = er.async_get(hass)
assert entity_registry.async_get(binary_sensor_entity_id) is not None
# Make sure we can still reload with the new ip right after we change it
with patch(f"{MODULE}.AsyncBulb", return_value=mocked_bulb), _patch_discovery():
await hass.config_entries.async_reload(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.LOADED
entity_registry = er.async_get(hass)
assert entity_registry.async_get(binary_sensor_entity_id) is not None
async def test_ip_changes_id_missing_cannot_fallback(hass: HomeAssistant):
"""Test Yeelight ip changes and we fallback to discovery."""
config_entry = MockConfigEntry(domain=DOMAIN, data={CONF_HOST: "5.5.5.5"})
config_entry.add_to_hass(hass)
mocked_bulb = _mocked_bulb(True)
mocked_bulb.bulb_type = BulbType.WhiteTempMood
mocked_bulb.async_listen = AsyncMock(side_effect=[BulbException, None, None, None])
with patch(f"{MODULE}.AsyncBulb", return_value=mocked_bulb):
assert not await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.SETUP_RETRY
async def test_setup_discovery(hass: HomeAssistant):
"""Test setting up Yeelight by discovery."""
config_entry = MockConfigEntry(
domain=DOMAIN, data={CONF_HOST: IP_ADDRESS, **CONFIG_ENTRY_DATA}
)
config_entry.add_to_hass(hass)
mocked_bulb = _mocked_bulb()
with _patch_discovery(), patch(f"{MODULE}.AsyncBulb", return_value=mocked_bulb):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(ENTITY_BINARY_SENSOR) is not None
assert hass.states.get(ENTITY_LIGHT) is not None
# Unload
assert await hass.config_entries.async_unload(config_entry.entry_id)
assert hass.states.get(ENTITY_BINARY_SENSOR).state == STATE_UNAVAILABLE
assert hass.states.get(ENTITY_LIGHT).state == STATE_UNAVAILABLE
# Remove
assert await hass.config_entries.async_remove(config_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(ENTITY_BINARY_SENSOR) is None
assert hass.states.get(ENTITY_LIGHT) is None
_ADAPTERS_WITH_MANUAL_CONFIG = [
{
"auto": True,
"index": 2,
"default": False,
"enabled": True,
"ipv4": [{"address": "192.168.1.5", "network_prefix": 23}],
"ipv6": [],
"name": "eth1",
},
]
async def test_setup_discovery_with_manually_configured_network_adapter(
hass: HomeAssistant,
):
"""Test setting up Yeelight by discovery with a manually configured network adapter."""
config_entry = MockConfigEntry(
domain=DOMAIN, data={CONF_HOST: IP_ADDRESS, **CONFIG_ENTRY_DATA}
)
config_entry.add_to_hass(hass)
mocked_bulb = _mocked_bulb()
with _patch_discovery(), patch(
f"{MODULE}.AsyncBulb", return_value=mocked_bulb
), patch(
"homeassistant.components.zeroconf.network.async_get_adapters",
return_value=_ADAPTERS_WITH_MANUAL_CONFIG,
):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(ENTITY_BINARY_SENSOR) is not None
assert hass.states.get(ENTITY_LIGHT) is not None
# Unload
assert await hass.config_entries.async_unload(config_entry.entry_id)
assert hass.states.get(ENTITY_BINARY_SENSOR).state == STATE_UNAVAILABLE
assert hass.states.get(ENTITY_LIGHT).state == STATE_UNAVAILABLE
# Remove
assert await hass.config_entries.async_remove(config_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(ENTITY_BINARY_SENSOR) is None
assert hass.states.get(ENTITY_LIGHT) is None
_ADAPTERS_WITH_MANUAL_CONFIG_ONE_FAILING = [
{
"auto": True,
"index": 1,
"default": False,
"enabled": True,
"ipv4": [{"address": FAIL_TO_BIND_IP, "network_prefix": 23}],
"ipv6": [],
"name": "eth0",
},
{
"auto": True,
"index": 2,
"default": False,
"enabled": True,
"ipv4": [{"address": "192.168.1.5", "network_prefix": 23}],
"ipv6": [],
"name": "eth1",
},
]
async def test_setup_discovery_with_manually_configured_network_adapter_one_fails(
hass: HomeAssistant, caplog
):
"""Test setting up Yeelight by discovery with a manually configured network adapter with one that fails to bind."""
config_entry = MockConfigEntry(
domain=DOMAIN, data={CONF_HOST: IP_ADDRESS, **CONFIG_ENTRY_DATA}
)
config_entry.add_to_hass(hass)
mocked_bulb = _mocked_bulb()
with _patch_discovery(), patch(
f"{MODULE}.AsyncBulb", return_value=mocked_bulb
), patch(
"homeassistant.components.zeroconf.network.async_get_adapters",
return_value=_ADAPTERS_WITH_MANUAL_CONFIG_ONE_FAILING,
):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(ENTITY_BINARY_SENSOR) is not None
assert hass.states.get(ENTITY_LIGHT) is not None
# Unload
assert await hass.config_entries.async_unload(config_entry.entry_id)
assert hass.states.get(ENTITY_BINARY_SENSOR).state == STATE_UNAVAILABLE
assert hass.states.get(ENTITY_LIGHT).state == STATE_UNAVAILABLE
# Remove
assert await hass.config_entries.a
|
nburn42/tensorflow
|
tensorflow/python/grappler/hierarchical_controller.py
|
Python
|
apache-2.0
| 43,598
| 0.005069
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""HierarchicalController Class.
The HierarchicalController encompasses the entire lifecycle of training the
device placement policy, including generating op embeddings, getting groups for
each op, placing those groups and running the predicted placements.
Different assignment models can inherit from this class.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
import six
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops as tf_ops
from tensorflow.python.grappler.controller import Controller
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.summary import summary
from tensorflow.python.training import adam
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import learning_rate_decay
from
|
tensorflow.python.training import training_util
class PlacerParams(object):
"""Class to
|
hold a set of placement parameters as name-value pairs.
A typical usage is as follows:
```python
# Create a PlacerParams object specifying names and values of the model
# parameters:
params = PlacerParams(hidden_size=128, decay_steps=50)
# The parameters are available as attributes of the PlacerParams object:
hparams.hidden_size ==> 128
hparams.decay_steps ==> 50
```
"""
def __init__(self, **kwargs):
"""Create an instance of `PlacerParams` from keyword arguments.
The keyword arguments specify name-values pairs for the parameters.
The parameter types are inferred from the type of the values passed.
The parameter names are added as attributes of `PlacerParams` object,
and they can be accessed directly with the dot notation `params._name_`.
Example:
```python
# Define 1 parameter: 'hidden_size'
params = PlacerParams(hidden_size=128)
params.hidden_size ==> 128
```
Args:
**kwargs: Key-value pairs where the key is the parameter name and
the value is the value for the parameter.
"""
for name, value in six.iteritems(kwargs):
self.add_param(name, value)
def add_param(self, name, value):
"""Adds {name, value} pair to hyperparameters.
Args:
name: Name of the hyperparameter.
value: Value of the hyperparameter. Can be one of the following types:
int, float, string, int list, float list, or string list.
Raises:
ValueError: if one of the arguments is invalid.
"""
# Keys in kwargs are unique, but 'name' could be the name of a pre-existing
# attribute of this object. In that case we refuse to use it as a
# parameter name.
if getattr(self, name, None) is not None:
raise ValueError("Parameter name is reserved: %s" % name)
setattr(self, name, value)
def hierarchical_controller_hparams():
"""Hyperparameters for hierarchical planner."""
return PlacerParams(
hidden_size=512,
forget_bias_init=1.0,
temperature=1.0,
logits_std_noise=0.5,
stop_noise_step=750,
decay_steps=50,
max_num_outputs=5,
max_output_size=5,
tanh_constant=1.0,
adj_embed_dim=20,
grouping_hidden_size=64,
num_groups=None,
bi_lstm=True,
failing_signal=100,
stop_sampling=500,
start_with_failing_signal=True,
always_update_baseline=False,
bl_dec=0.9,
grad_bound=1.0,
lr=0.1,
lr_dec=0.95,
start_decay_step=400,
optimizer_type="adam",
stop_updating_after_steps=1000,
name="hierarchical_controller",
keep_prob=1.0,
reward_function="sqrt",
seed=1234,
# distributed training params
num_children=1)
class HierarchicalController(Controller):
"""HierarchicalController class."""
def __init__(self, hparams, item, cluster, controller_id=0):
"""HierarchicalController class initializer.
Args:
hparams: All hyper-parameters.
item: The metagraph to place.
cluster: The cluster of hardware devices to optimize for.
controller_id: the id of the controller in a multi-controller setup.
"""
super(HierarchicalController, self).__init__(item, cluster)
self.ctrl_id = controller_id
self.hparams = hparams
if self.hparams.num_groups is None:
self.num_groups = min(256, 20 * self.num_devices)
else:
self.num_groups = self.hparams.num_groups
# creates self.op_embeddings and self.type_dict
self.create_op_embeddings(verbose=False)
# TODO(azalia) clean up embedding/group_embedding_size names
self.group_emb_size = (
2 * self.num_groups + len(self.type_dict) +
self.hparams.max_num_outputs * self.hparams.max_output_size)
self.embedding_size = self.group_emb_size
self.initializer = init_ops.glorot_uniform_initializer(
seed=self.hparams.seed)
with variable_scope.variable_scope(
self.hparams.name,
initializer=self.initializer,
reuse=variable_scope.AUTO_REUSE):
# define parameters of feedforward
variable_scope.get_variable("w_grouping_ff", [
1 + self.hparams.max_num_outputs * self.hparams.max_output_size +
self.hparams.adj_embed_dim, self.hparams.grouping_hidden_size
])
variable_scope.get_variable(
"w_grouping_softmax",
[self.hparams.grouping_hidden_size, self.num_groups])
if self.hparams.bi_lstm:
variable_scope.get_variable("encoder_lstm_forward", [
self.embedding_size + self.hparams.hidden_size / 2,
2 * self.hparams.hidden_size
])
variable_scope.get_variable("encoder_lstm_backward", [
self.embedding_size + self.hparams.hidden_size / 2,
2 * self.hparams.hidden_size
])
variable_scope.get_variable(
"device_embeddings", [self.num_devices, self.hparams.hidden_size])
variable_scope.get_variable(
"decoder_lstm",
[2 * self.hparams.hidden_size, 4 * self.hparams.hidden_size])
variable_scope.get_variable(
"device_softmax", [2 * self.hparams.hidden_size, self.num_devices])
variable_scope.get_variable("device_go_embedding",
[1, self.hparams.hidden_size])
variable_scope.get_variable(
"encoder_forget_bias",
shape=1,
dtype=dtypes.float32,
initializer=init_ops.constant_initializer(
self.hparams.forget_bias_init))
variable_scope.get_variable(
"decoder_forget_bias",
shape=1,
dtype=dtypes.float32,
initializer=init_ops.constant_initializer(
self.hparams.forget_bias_init))
variable_scope.get_variable(
"attn_w_1", [self.hparams.hidden_size, self.hparams.hidden_size])
variable_scope.get_
|
osamak/medcloud-registration
|
register/temp/views.py
|
Python
|
agpl-3.0
| 9,644
| 0.002771
|
# -*- coding: utf-8 -*-
import json
import os
import random
import requests
import re
import subprocess
import string
from django import forms
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.core.mail import send_mail
from django.views.decorators.csrf import csrf_exempt
from django.core.exceptions import PermissionDenied, ObjectDoesNotExist
from constance import config
from register.models import Registration, Batch, colleges
new_message = u"""نشكرك على التسجيل في السحابة الطبية
اسم المستخدم: %s
كلمة السر: %s
رابط السحابة: https://ksauhs-med.com/
آملين أن تجد فيها ما يفيد!
"""
forgotten_message = u"""هذه معلوماتك الجديدة للدخول إلى السحابة الطبية:
اسم المستخدم: %s
كلمة السر: %s
رابط السحابة: https://ksauhs-med.com/
آملين أن تجد فيها ما يفيد!
"""
class RegistrationForm(forms.ModelForm):
college = forms.CharField(label=u'الكلية',
max_length=1,
widget=forms.Select(choices=colleges))
number = forms.IntegerField(label=u"الدفعة", widget=forms.Select(choices=[(i, i) for i in range(1, 17)]))
def clean(self):
cleaned_data = super(RegistrationForm, self).clean()
batch_msg = u"الدفعة التي اخترت غير موجودة."
if 'college' in cleaned_data and 'number' in cleaned_data:
try:
Batch.objects.get(
college=cleaned_data['college'],
number=int(cleaned_data['number']))
except Batch.DoesNotExist:
self._errors['college'] = self.error_class([batch_msg])
self._errors['number'] = self.error_class([batch_msg])
del cleaned_data['college']
del cleaned_data['number']
return cleaned_data
def save(self):
new_registration = super(RegistrationForm, self).save()
batch = Batch.objects.get(
college=self.cleaned_data['college'],
number=int(self.cleaned_data['number']),
)
new_registration.group = batch
new_registration.save()
return new_registration
class Meta:
model = Regi
|
stration
fields = ['email', 'college', 'number', 'unisersity_id']
widgets = {
'university_id': forms.T
|
extInput(),
}
class ResetPasswordForm(forms.Form):
email = forms.EmailField(label=u'بريدك الجامعي', max_length=100)
@csrf_exempt
def register(request):
if request.method == 'POST':
password = generate_password()
initial_registration = Registration(password=password)
form = RegistrationForm(request.POST,
instance=initial_registration)
if form.is_valid():
email = form.cleaned_data['email']
if not email.endswith('ksau-hs.edu.sa'):
context = {'form': form, 'error_message': 'university_email'}
elif Registration.objects.filter(email__iexact=email, is_successful=True):
context = {'form': form, 'error_message': u'already_registered'}
else:
user = email.split('@')[0].lower()
registration = form.save()
group = str(registration.group)
if createuser(user, password, group):
registration.is_successful = True
registration.save()
send_mail(u'حسابك على السحابة الطبية', new_message %
(user, password), '[email protected]',
[email], fail_silently=False)
return HttpResponseRedirect(reverse('register:thanks'))
else:
context = {'form': form, 'error_message': 'unknown'}
else:
context = {'form': form}
else:
form = RegistrationForm()
context = {'form': form}
return render(request, 'register/register.html', context)
@csrf_exempt
def forgotten(request):
if request.method == 'POST':
form = ResetPasswordForm(request.POST)
if form.is_valid():
email = form.cleaned_data['email']
if not email.endswith('ksau-hs.edu.sa'):
context = {'form': form, 'error_message': 'university_email'}
else:
try:
previous_registration = Registration.objects.get(email__iexact=email,
is_successful=True)
except ObjectDoesNotExist:
previous_registration = None
context = {'form': form, 'error_message': 'not_registered'}
if previous_registration:
new_password = generate_password()
user = previous_registration.email.split('@')[0]
if reset_password(user, new_password):
previous_registration.password = new_password
previous_registration.forgotten_password = True
previous_registration.save()
send_mail(u'حسابك على السحابة الطبية', forgotten_message %
(user, new_password), '[email protected]',
[email], fail_silently=False)
return HttpResponseRedirect(reverse('register:thanks'))
else:
context = {'form': form, 'error_message': 'unknown'}
else:
context = {'form': form}
else:
form = ResetPasswordForm()
context = {'form': form}
return render(request, 'register/reset.html', context)
def generate_password():
return ''.join(random.choice(string.ascii_uppercase) for i in range(6))
def login():
homepage_url = "https://www.ksauhs-med.com"
homepage = requests.get(homepage_url)
oc1d6beae686 = homepage.cookies['oc1d6beae686']
cookies = {'oc1d6beae686': oc1d6beae686}
login_requesttoken_regex = re.compile('data-requesttoken="(.+?)"', re.U)
login_requesttoken = re.findall(login_requesttoken_regex, homepage.content)[0]
login_data = {'user': config.OWNCLOUD_ADMIN_USERNAME,
'password': config.OWNCLOUD_ADMIN_PASSWORD,
'requesttoken': login_requesttoken,
'remember_login': '1',
'timezone-offset': 'Asia/Baghdad',
}
login_page = requests.post(homepage_url, data=login_data, cookies=cookies)
login_cookies = login_page.history[0].cookies
cookies = {#'oc_username': login_cookies['oc_username'],
#'oc_token': login_cookies['oc_token'],
#'oc_remember_login': login_cookies['oc_remember_login'],
'oc1d6beae686': login_cookies['oc1d6beae686'],
}
return cookies
def createuser(user, password, group):
os.environ['OC_PASS'] = password
command = "/usr/local/bin/php70 /home/medcloud/webapps/ownphp70/occ user:add {} --password-from-env -g {} -n".format(user, group)
output = subprocess.call(command, shell=True)
if output == 0:
return True
else:
return False
# createuser_url = "https://www.ksauhs-med.com/index.php/settings/users/users"
# user_url = "https://www.ksauhs-med.com/index.php/settings/users"
# login_cookies = login()
# user_page = requests.post(user_url, cookies=login_cookies)
# regex = re.findall("data-requesttoken=\"([^\"]+)\"", user_page.text)
# requesttoken = regex[0]
# user_data = {'username': user,
# 'password': password,
# 'groups[]': group}
# headers = {'requesttoken': requesttoken}
# createuser_page = requests.post(createuser_url, data=user_data, cookies=login_cookies, headers=headers)
# json_object = json.loads(createuser_page.text)
# if createuser_page.status_code == 201:
# return True
# else:
# print json_object # REMOVE
def reset_password(user, password):
os.environ['OC_PASS'] = password
command = "/usr/local/bin/php70 /home/medcloud/webapps/ownphp70/occ user:res
|
InspectorIncognito/visualization
|
AndroidRequests/migrations/0904_transformation_eventforbusv2_half_hour_period.py
|
Python
|
gpl-3.0
| 2,254
| 0.00976
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils import timezone
from django.db import models, migrations
def fill_tables(apps, schema_editor):
eventsforbusv2 = apps.get_model('AndroidRequests', 'EventForBusv2')
eventsforbusstop = apps.get_model('AndroidRequests', 'EventForBusStop')
hhperiods = apps.get_model('AndroidRequests', 'HalfHourPeriod')
for ev in eventsforbusv2.objects.all():
creationTime = timezone.localtime(ev.timeCreation).time().replace(microsecond=0)
hhperiod = hhperiods.objects.get(initial_time__lte = creationTime , end_time__gte = creationTime)
ev.halfHourPeriod = hhperiod
ev.save()
for ev in eventsforbusstop.objects.all():
creationTime = timezone.localtime(ev.timeCreation).time().replace(microsecond=0)
hhperiod = hhperiods.o
|
bjects.get(initial_time__lte = creationTime , end_time__gte = creationTime)
ev.halfHourPeriod = hhperiod
ev.save()
class Migration(migrations.Migration):
dependencies = [
('AndroidRequests', '0903_transformation_halfhourperiod'),
]
operations = [
migrations.AddField(
model_name='eventforbusv2',
name='halfHourPeriod',
field=models.Fo
|
reignKey(verbose_name=b'Half Hour Period', to='AndroidRequests.HalfHourPeriod', null=True),
preserve_default=False,
),
migrations.AddField(
model_name='eventforbusstop',
name='halfHourPeriod',
field=models.ForeignKey(verbose_name=b'Half Hour Period', to='AndroidRequests.HalfHourPeriod', null=True),
preserve_default=False,
),
migrations.RunPython(fill_tables, reverse_code=migrations.RunPython.noop),
migrations.AlterField(
model_name='eventforbusv2',
name='halfHourPeriod',
field=models.ForeignKey(verbose_name=b'Half Hour Period', to='AndroidRequests.HalfHourPeriod', null=False),
),
migrations.AlterField(
model_name='eventforbusstop',
name='halfHourPeriod',
field=models.ForeignKey(verbose_name=b'Half Hour Period', to='AndroidRequests.HalfHourPeriod', null=False),
),
]
|
zooko/egtp
|
common/MojoErrors.py
|
Python
|
agpl-3.0
| 355
| 0.014085
|
import exceptions
# throws by anything which doesn't like what was passed to it
class DataError(exceptions.StandardError):
pass
# thrown by MojoMessage
class MojoMessageError(DataError):
pass
#
|
thrown by DataTypes
class BadFormatError(DataError):
pass
#
|
throws by things which do block reassembly
class ReassemblyError(IOError):
pass
|
babadoo/zml
|
setup.py
|
Python
|
bsd-3-clause
| 1,343
| 0.018615
|
from setuptools import setup
setup(
name = "zml",
packages = ["zml"],
version = "0.8.1",
description = "zero markup language",
author = "Christof Hagedorn",
author_email = "[email protected]",
url = "http://www.zml.org/",
download_url = "https://pypi.python.org/pypi/zml",
keywords = ["zml", "zero", "markup", "language", "template", "templating"],
install_requires = ['pyparsing', 'html5lib', 'pyyaml', 'asteval' ],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Topic :: Internet",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/H
|
TTP :: WSGI",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description = """\
zml - zero markup language
-------------------------------------
Features
- zero markup templates
- clean syntax
- extensible
- components
- namespaces
- lean code
This ve
|
rsion requires Python 3 or later.
"""
)
|
jsirois/pants
|
src/python/pants/help/help_info_extracter.py
|
Python
|
apache-2.0
| 19,574
| 0.002963
|
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import dataclasses
import inspect
import json
from dataclasses import dataclass
from enum import Enum
from typing import Any, Callable, Dict, Generic, List, Optional, Tuple, Type, cast, get_type_hints
from pants.base import deprecated
from pants.engine.goal import GoalSubsystem
from pants.engine.target import (
AsyncFieldMixin,
BoolField,
DictStringToStringField,
DictStringToStringSequenceField,
Field,
FloatField,
IntField,
RegisteredTargetTypes,
ScalarField,
SequenceField,
StringField,
StringSequenceField,
Target,
)
from pants.engine.unions import UnionMembership
from pants.option.option_util import is_dict_option, is_list_option
from pants.option.options import Options
from pants.option.parser import OptionValueHistory, Parser
from pants.util.objects import get_docstring, get_docstring_summary, pretty_print_type_hint
from pants.util.strutil import first_paragraph
class HelpJSONEncoder(json.JSONEncoder):
"""Class for JSON-encoding help data (including option values).
Note that JSON-encoded data is not intended to be decoded back. It exists purely for terminal
and browser help display.
"""
def default(self, o):
if callable(o):
return o.__name__
if isinstance(o, type):
return type.__name__
if isinstance(o, Enum):
return o.value
return super().default(o)
def to_help_str(val) -> str:
if isinstance(val, (list, dict)):
return json.dumps(val, sort_keys=True, indent=2, cls=HelpJSONEncoder)
if isinstance(val, Enum):
return str(val.value)
else:
return str(val)
@dataclass(frozen=True)
class OptionHelpInfo:
"""A container for help information for a single option.
display_args: Arg strings suitable for display in help text, including value examples
(e.g., [-f, --[no]-foo-bar, --baz=<metavar>].)
comma_separated_display_args: Display args as a comma-delimited string, used in
reference documentation.
scoped_cmd_line_args: The explicitly scoped raw flag names allowed anywhere on the cmd line,
(e.g., [--scope-baz, --no-scope-baz, --scope-qux])
unscoped_cmd_line_args: The unscoped raw flag names allowed on the cmd line in this option's
scope context (e.g., [--baz, --no-baz, --qux])
env_var: The environment variable that set's the option.
config_key: The config key for this option (in the section named for its scope).
typ: The type of the option.
default: The value of this option if no flags are specified (derived from config and env vars).
help: The help message registered for this option.
deprecated_message: If deprecated: A message explaining that this option is deprecated at
removal_version.
removal_version: If deprecated: The version at which this option is to be removed.
removal_hint: If deprecated: The removal hint message registered for this option.
choices: If this option has a constrained set of choices, a tuple of the stringified choices.
"""
display_args: Tuple[str, ...]
comma_separated_display_args: str
scoped_cmd_line_args: Tuple[str, ...]
unscoped_cmd_line_args: Tuple[str, ...]
env_var: str
config_key: str
typ: Type
default: Any
help: str
deprecation_active: bool
deprecated_message: Optional[str]
removal_version: Optional[str]
removal_hint: Optional[str]
choices: Optional[Tuple[str, ...]]
comma_separated_choices: Optional[str]
value_history: Optional[OptionValueHistory]
@dataclass(frozen=True)
class OptionScopeHelpInfo:
"""A container for help information for a scope of options.
scope: The scope of the described options.
basic|advanced|deprecated: A list of OptionHelpInfo for the options in that group.
"""
scope: str
description: str
is_goal: bool # True iff the scope belongs to a GoalSubsystem.
basic: Tuple[OptionHelpInfo, ...]
advanced: Tuple[OptionHelpInfo, ...]
depreca
|
ted: Tuple[OptionHelpInfo, ...]
def collect_unscoped_flags(self) -> List[str]:
flags: List[str] = []
for options in (self.basic, self.advanced, self.
|
deprecated):
for ohi in options:
flags.extend(ohi.unscoped_cmd_line_args)
return flags
def collect_scoped_flags(self) -> List[str]:
flags: List[str] = []
for options in (self.basic, self.advanced, self.deprecated):
for ohi in options:
flags.extend(ohi.scoped_cmd_line_args)
return flags
@dataclass(frozen=True)
class GoalHelpInfo:
"""A container for help information for a goal."""
name: str
description: str
is_implemented: bool # True iff all unions required by the goal are implemented.
consumed_scopes: Tuple[str, ...] # The scopes of subsystems consumed by this goal.
@dataclass(frozen=True)
class TargetFieldHelpInfo:
"""A container for help information for a field in a target type."""
alias: str
description: Optional[str]
type_hint: str
required: bool
default: Optional[str]
@classmethod
def create(cls, field: Type[Field]) -> TargetFieldHelpInfo:
description: Optional[str]
if hasattr(field, "help"):
description = field.help
else:
# NB: It is very common (and encouraged) to subclass Fields to give custom behavior, e.g.
# `PythonSources` subclassing `Sources`. Here, we set `fallback_to_ancestors=True` so that
# we can still generate meaningful documentation for all these custom fields without
# requiring the Field author to rewrite the docstring.
#
# However, if the original plugin author did not define docstring, then this means we
# would typically fall back to the docstring for `Field` or a template like `StringField`.
# This is a an awkward edge of our heuristic and it's not intentional since these core
# `Field` types have documentation oriented to the plugin author and not the end user
# filling in fields in a BUILD file.
description = get_docstring(
field,
flatten=True,
fallback_to_ancestors=True,
ignored_ancestors={
*Field.mro(),
AsyncFieldMixin,
BoolField,
DictStringToStringField,
DictStringToStringSequenceField,
FloatField,
Generic, # type: ignore[arg-type]
IntField,
ScalarField,
SequenceField,
StringField,
StringSequenceField,
},
)
raw_value_type = get_type_hints(field.compute_value)["raw_value"]
type_hint = pretty_print_type_hint(raw_value_type)
# Check if the field only allows for certain choices.
if issubclass(field, StringField) and field.valid_choices is not None:
valid_choices = sorted(
field.valid_choices
if isinstance(field.valid_choices, tuple)
else (choice.value for choice in field.valid_choices)
)
type_hint = " | ".join([*(repr(c) for c in valid_choices), "None"])
if field.required:
# We hackily remove `None` as a valid option for the field when it's required. This
# greatly simplifies Field definitions because it means that they don't need to
# override the type hints for `PrimitiveField.compute_value()` and
# `AsyncField.sanitize_raw_value()` to indicate that `None` is an invalid type.
type_hint = type_hint.replace(" | None", "")
return cls(
alias=field.alias,
description=des
|
xeechou/mkblogs
|
mkblogs/tests/build_tests.py
|
Python
|
bsd-2-clause
| 10,943
| 0.001828
|
#!/usr/bin/env python
# coding: utf-8
import os
import shutil
import tempfile
import unittest
from mkdocs import build, nav, config
from mkdocs.compat import zip
from mkdocs.exceptions import MarkdownNotFound
from mkdocs.tests.base import dedent
class BuildTests(unittest.TestCase):
def test_empty_document(self):
html, toc, meta = build.convert_markdown("")
self.assertEqual(html, '')
self.assertEqual(len(list(toc)), 0)
self.assertEqual(meta, {})
def test_convert_markdown(self):
"""
Ensure that basic Markdown -> HTML and TOC works.
"""
html, toc, meta = build.convert_markdown(dedent("""
page_title: custom title
# Heading 1
This is some text.
# Heading 2
And some more text.
"""))
expected_html = dedent("""
<h1 id="heading-1">Heading 1</h1>
<p>This is some text.</p>
<h1 id="heading-2">Heading 2</h1>
<p>And some more text.</p>
""")
expected_toc = dedent("""
Heading 1 - #heading-1
Heading 2 - #heading-2
""")
expected_meta = {'page_title': ['custom title']}
self.assertEqual(html.strip(), expected_html)
self.assertEqual(str(toc).strip(), expected_toc)
self.assertEqual(meta, expected_meta)
def test_convert_internal_link(self):
md_text = 'An [internal link](internal.md) to another document.'
expected = '<p>An <a href="internal/">internal link</a> to another document.</p>'
html, toc, meta = build.convert_markdown(md_text)
self.assertEqual(html.strip(), expected.strip())
def test_convert_multiple_internal_links(self):
md_text = '[First link](first.md) [second link](second.md).'
expected = '<p><a href="first/">First link</a> <a href="second/">second link</a>.</p>'
html, toc, meta = build.convert_markdown(md_text)
self.assertEqual(html.strip(), expected.strip())
def test_convert_internal_link_differing_directory(self):
md_text = 'An [internal link](../internal.md) to another document.'
expected = '<p>An <a href="../internal/">internal link</a> to another document.</p>'
html, toc, meta = build.convert_markdown(md_text)
self.assertEqual(html.strip(), expected.strip())
def test_convert_internal_link_with_anchor(self):
md_text = 'An [internal link](internal.md#section1.1) to another document.'
expected = '<p>An <a href="internal/#section1.1">internal link</a> to another document.</p>'
html, toc, meta = build.convert_markdown(md_text)
self.assertEqual(html.strip(), expected.strip())
def test_convert_internal_media(self):
"""Test relative image URL's are the same for different base_urls"""
pages = [
('index.md',),
('internal.md',),
('sub/internal.md')
]
site_navigation = nav.SiteNavigation(pages)
expected_results = (
'./img/initial-layout.png',
'../img/initial-layout.png',
'../img/initial-layout.png',
)
template = '<p><img alt="The initial MkDocs layout" src="%s" /></p>'
for (page, expected) in zip(site_navigation.walk_pages(), expected_results):
md_text = ''
html, _, _ = build.convert_markdown(md_text, site_navigation=site_navigation)
self.assertEqual(html, template % expected)
def test_convert_internal_asbolute_media(self):
"""Test absolute image URL's are correct for different base_urls"""
pages = [
('index.md',),
('internal.md',),
('sub/internal.md')
]
site_navigation = nav.SiteNavigation(pages)
expected_results = (
'./img/initial-layout.png',
'../img/initial-layout.png',
'../../img/initial-layout.png',
)
template = '<p><img alt="The initial MkDocs layout" src="%s" /></p>'
for (page, expected) in zip(site_navigation.walk_pages(), expected_results):
md_text = ''
html, _, _ = build.convert_markdown(md_text, site_navigation=site_navigation)
self.assertEqual(html, template % expected)
def test_dont_convert_code_block_urls(self):
pages = [
('index.md',),
('internal.md',),
('sub/internal.md')
]
site_navigation = nav.SiteNavigation(pages)
expected = dedent("""
<p>An HTML Anchor::</p>
<pre><code><a href="index.md">My example link</a>
</code></pre>
""")
for page in site_navigation.walk_pages():
markdown = 'An HTML Anchor::\n\n <a href="index.md">My example link</a>\n'
html, _, _ = build.convert_markdown(markdown, site_navigation=site_navigation)
self.assertEqual(dedent(html), expected)
def test_anchor_only_link(self):
pages = [
('index.md',),
('internal.md',),
('sub/internal.md')
]
site_navigation = nav.SiteNavigation(pages)
for page in site_navigation.walk_pages():
markdown = '[test](#test)'
html, _, _ = build.convert_markdown(markdown, site_navigation=site_navigation)
self.assertEqual(html, '<p><a href="#test">test</a></p>')
def test_ignore_external_link(self):
md_text = 'An [external link](http://example.com/external.md).'
e
|
xpected = '<p>An <a href="http://example.com/external.md">external link</a>.</p>'
html, toc, meta = build.convert_markdown(md_text)
self.assertEqual(html.strip(), expected.strip())
def test_not_use_directory_urls(self):
md_text = 'An [internal link](internal.md) to another document.'
expected = '<p>An <a href="internal/index.html">internal link</a>
|
to another document.</p>'
pages = [
('internal.md',)
]
site_navigation = nav.SiteNavigation(pages, use_directory_urls=False)
html, toc, meta = build.convert_markdown(md_text, site_navigation=site_navigation)
self.assertEqual(html.strip(), expected.strip())
def test_markdown_table_extension(self):
"""
Ensure that the table extension is supported.
"""
html, toc, meta = build.convert_markdown(dedent("""
First Header | Second Header
-------------- | --------------
Content Cell 1 | Content Cell 2
Content Cell 3 | Content Cell 4
"""))
expected_html = dedent("""
<table>
<thead>
<tr>
<th>First Header</th>
<th>Second Header</th>
</tr>
</thead>
<tbody>
<tr>
<td>Content Cell 1</td>
<td>Content Cell 2</td>
</tr>
<tr>
<td>Content Cell 3</td>
<td>Content Cell 4</td>
</tr>
</tbody>
</table>
""")
self.assertEqual(html.strip(), expected_html)
def test_markdown_fenced_code_extension(self):
"""
Ensure that the fenced code extension is supported.
"""
html, toc, meta = build.convert_markdown(dedent("""
```
print 'foo'
```
"""))
expected_html = dedent("""
<pre><code>print 'foo'\n</code></pre>
""")
self.assertEqual(html.strip(), expected_html)
def test_markdown_custom_extension(self):
"""
Check that an extension applies when requested in the arguments to
`convert_markdown`.
"""
md_input = "foo__bar__baz"
# Check that the plugin is not active when not requested.
expected_without_smartstrong = "<p>foo<strong>bar</strong>baz</p>"
html_base, _, _ = build.convert_markdown(md_input)
self.assertEqual(html_base.strip(), expected_without_smartstrong)
# Check that the plugin is active when requested.
exp
|
siosio/intellij-community
|
python/testData/intentions/PyInvertIfConditionIntentionTest/commentsIf.py
|
Python
|
apache-2.0
| 136
| 0.014706
|
def func():
value = "not
|
-none"
# Is none
<caret>if value is Non
|
e:
print("None")
else:
print("Not none")
|
sckott/pygbif
|
pygbif/utils/wkt_rewind.py
|
Python
|
mit
| 1,305
| 0.003065
|
from geojson_rewind import rewind
from geomet import wkt
import decimal
import statistics
def wkt_rewind(x, digits=None):
"""
reverse WKT winding order
:param x: [str] WKT string
:param digits: [int] number of digits after decimal to use for the return string.
by default, we use the mean number of digits in your string.
:return: a string
Usage::
from pygbif import wkt_rewind
x = 'POLYGON((144.6 13.2, 144.6 13.6, 144.9 13.6, 144.9 13.2, 144.6 13.2))'
wkt_rewind(x)
wkt_rewind(x, digits = 0)
wkt_rewind(x, digits = 3)
wkt_rewind(x, digits = 7)
"""
z = wkt.loads(x)
if digits is None:
coords = z["coordinates"]
nums = __flatten(coords)
dec_n = [decimal.
|
Decimal(str(w)).as_tuple().exponent for w in nums]
digits = abs(statistics.mean(dec_n))
else:
if not isinstance(digits, int):
raise TypeError("'digits' must be an int")
wound = rewind(z)
back_to_wkt = wkt.dumps(wound, decimals=digits)
return back_to_wkt
# from http
|
s://stackoverflow.com/a/12472564/1091766
def __flatten(S):
if S == []:
return S
if isinstance(S[0], list):
return __flatten(S[0]) + __flatten(S[1:])
return S[:1] + __flatten(S[1:])
|
infobloxopen/infoblox-netmri
|
infoblox_netmri/api/broker/v3_8_0/device_service_service_broker.py
|
Python
|
apache-2.0
| 49,305
| 0.002109
|
from ..broker import Broker
class DeviceServiceServiceBroker(Broker):
controller = "device_service_services"
def show(self, **kwargs):
"""Shows the details for the specified device service service.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceServiceServiceID: The internal NetMRI identifier of this usage relationship between service objects.
:type DeviceServiceServiceID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device service service methods. The listed methods will be called on each device service service returned and included in the output. Available methods are: parent_device_service, child_device_service, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: parent_device_service, child_device_service, data_source, device.
:type include: Array of String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_service_service: The device service service identified by the specified DeviceServiceServiceID.
:rtype device_service_service: DeviceServiceService
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def index(self, **kwargs):
"""Lists the available device service services. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device to which belongs this services.
:type DeviceID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceServiceServiceID: The internal NetMRI identifier of this usage relationship between service objects.
:type DeviceServiceServiceID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device service services as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device service service methods. The listed methods will be called on e
|
ach device service service returned and included in the output. Available methods are: parent_device_service, child_device_service, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to
|
include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: parent_device_service, child_device_service, data_source, device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceServiceServiceID
:param sort: The data field(s) to use for sorting the output. Default is DeviceServiceServiceID. Valid values are DeviceServiceServiceID, DeviceID, DataSourceID, ParentDeviceServiceID, ChildDeviceServiceID, SvsvFirstSeenTime, SvsvStartTime, SvsvEndTime, SvsvTimestamp, SvsvChangedCols, SvsvUsage, SvsvProvisionData.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceServiceService. Valid values are DeviceServiceServiceID, DeviceID, DataSourceID, ParentDeviceServiceID, ChildDeviceServiceID, SvsvFirstSeenTime, SvsvStartTime, SvsvEndTime, SvsvTimestamp, SvsvChangedCols, SvsvUsage, SvsvProvisionData. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_service_services: An array of the DeviceServiceService objects that match the specified input criteria.
:rtype device_service_services: Array of DeviceServiceService
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def search(self, **kwargs):
"""Lists the available device service services matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more
|
ColinIanKing/autotest
|
server/autoserv.py
|
Python
|
gpl-2.0
| 8,389
| 0.001788
|
"""
Library for autotest-remote usage.
"""
import sys, os, re, traceback, signal, time, logging, getpass
try:
import autotest.common as common
except ImportError:
import common
from autotest.client.shared.global_config import global_config
require_atfork = global_config.get_config_value(
'AUTOSERV', 'require_atfork_module', type=bool, default=True)
try:
import atfork
atfork.monkeypatch_os_fork_functions()
import atfork.stdlib_fixer
# Fix the Python standard library for threading+fork safety with its
# internal locks. http://code.google.com/p/python-atfork/
import warnings
warnings.filterwarnings('ignore', 'logging module already imported')
atfork.stdlib_fixer.fix_logging_module()
except ImportError, e:
from autotest.client.shared import global_config
if global_config.global_config.get_config_value(
'AUTOSERV', 'require_atfork_module', type=bool, default=False):
print >>sys.stderr, 'Please run utils/build_externals.py'
print e
sys.exit(1)
from autotest.server import server_logging_config
from autotest.server import server_job, autoserv_parser
from autotest.server import autotest_remote
from autotest.client.shared import pidfile, logging_manager
def run_autoserv(pid_file_manager, results, parser):
# send stdin to /dev/null
dev_null = os.open(os.devnull, os.O_RDONLY)
os.dup2(dev_null, sys.stdin.fileno())
os.close(dev_null)
# Create separate process group
os.setpgrp()
# Implement SIGTERM handler
def handle_sigterm(signum, frame):
if pid_file_manager:
pid_file_manager.close_file(1, signal.SIGTERM)
os.killpg(os.getpgrp(), signal.SIGKILL)
# Set signal handler
signal.signal(signal.SIGTERM, handle_sigterm)
# Ignore SIGTTOU's generated by output from forked children.
signal.signal(signal.SIGTTOU, signal.SIG_IGN)
# Server side tests that call shell scripts often depend on $USER being set
# but depending on how you launch your autotest scheduler it may not be set.
os.environ['USER'] = getpass.getuser()
if parser.options.machines:
machines = parser.options.machines.replace(',', ' ').strip().split()
else:
machines = []
machines_file = parser.options.machines_file
label = parser.options.label
group_name = parser.options.group_name
user = parser.options.user
client = parser.options.client
server = parser.options.server
install_before = parser.options.install_before
install_after = parser.options.install_after
verify = parser.options.verify
repair = parser.options.repair
cleanup = parser.options.cleanup
no_tee = parser.options.no_tee
parse_job = parser.options.parse_job
execution_tag = parser.options.execution_tag
if not execution_tag:
execution_tag = parse_job
host_protection = parser.options.host_protection
ssh_user = parser.options.ssh_user
ssh_port = parser.options.ssh_port
ssh_pass = parser.options.ssh_pass
collect_crashinfo = parser.options.collect_crashinfo
control_filename = parser.options.control_filename
# can't be both a client and a server side test
if client and server:
parser.parser.error("Can not specify a test as both server and client!")
if len(parser.args) < 1 and not (verify or repair or cleanup
or collect_crashinfo):
parser.parser.error("Missing argument: control file")
# We have a control file unless it's just a verify/repair/cleanup job
if len(parser.args) > 0:
control = parser.args[0]
else:
control = None
if machines_file:
machines = []
for m in open(machines_file, 'r').readlines():
# remove comments, spaces
m = re.sub('#.*', '', m).strip()
if m:
machines.append(m)
print "Read list of machines from file: %s" % machines_file
print ','.join(ma
|
chines)
if machines:
for machine in machines:
if not machine or re.search('\s', machine):
parser.parser.error("Invalid m
|
achine: %s" % str(machine))
machines = list(set(machines))
machines.sort()
if group_name and len(machines) < 2:
parser.parser.error("-G %r may only be supplied with more than one machine."
% group_name)
kwargs = {'group_name': group_name, 'tag': execution_tag}
if control_filename:
kwargs['control_filename'] = control_filename
job = server_job.server_job(control, parser.args[1:], results, label,
user, machines, client, parse_job,
ssh_user, ssh_port, ssh_pass, **kwargs)
job.logging.start_logging()
job.init_parser()
# perform checks
job.precheck()
# run the job
exit_code = 0
try:
try:
if repair:
job.repair(host_protection)
elif verify:
job.verify()
else:
job.run(cleanup, install_before, install_after,
only_collect_crashinfo=collect_crashinfo)
finally:
while job.hosts:
host = job.hosts.pop()
host.close()
except:
exit_code = 1
traceback.print_exc()
if pid_file_manager:
pid_file_manager.num_tests_failed = job.num_tests_failed
pid_file_manager.close_file(exit_code)
job.cleanup_parser()
sys.exit(exit_code)
def main():
# grab the parser
parser = autoserv_parser.autoserv_parser
parser.parse_args()
if len(sys.argv) == 1:
parser.parser.print_help()
sys.exit(1)
if parser.options.no_logging:
results = None
else:
output_dir = global_config.get_config_value('COMMON',
'test_output_dir',
default="")
results = parser.options.results
if not results:
results = 'results.' + time.strftime('%Y-%m-%d-%H.%M.%S')
if output_dir:
results = os.path.join(output_dir, results)
results = os.path.abspath(results)
resultdir_exists = False
for filename in ('control.srv', 'status.log', '.autoserv_execute'):
if os.path.exists(os.path.join(results, filename)):
resultdir_exists = True
if not parser.options.use_existing_results and resultdir_exists:
error = "Error: results directory already exists: %s\n" % results
sys.stderr.write(error)
sys.exit(1)
# Now that we certified that there's no leftover results dir from
# previous jobs, lets create the result dir since the logging system
# needs to create the log file in there.
if not os.path.isdir(results):
os.makedirs(results)
logging_manager.configure_logging(
server_logging_config.ServerLoggingConfig(), results_dir=results,
use_console=not parser.options.no_tee,
verbose=parser.options.verbose,
no_console_prefix=parser.options.no_console_prefix)
if results:
logging.info("Results placed in %s" % results)
# wait until now to perform this check, so it get properly logged
if parser.options.use_existing_results and not resultdir_exists:
logging.error("No existing results directory found: %s", results)
sys.exit(1)
if parser.options.write_pidfile:
pid_file_manager = pidfile.PidFileManager(parser.options.pidfile_label,
results)
pid_file_manager.open_file()
else:
pid_file_manager = None
autotest_remote.BaseAutotest.set_install_in_tmpdir(
parser.options.install_in_tmpdir)
exit_code = 0
try:
try:
run_autoserv(pid_file_manager, results, parser)
except SystemExit, e:
exit_code = e.code
except:
traceback.print_exc()
# If we don't know what happ
|
automl/SpySMAC
|
cave/reader/configurator_run.py
|
Python
|
bsd-3-clause
| 16,612
| 0.003792
|
import copy
import logging
import os
import tempfile
from collections import OrderedDict
from contextlib import contextmanager
import numpy as np
from pimp.importance.importance import Importance
from smac.runhistory.runhistory import RunHistory, DataOrigin
from smac.utils.io.input_reader import InputReader
from smac.utils.validate import Validator
from smac import __version__ as smac_version
from cave.reader.smac2_reader import SMAC2Reader
from cave.reader.smac3_reader import SMAC3Reader
from cave.utils.helpers import scenario_sanity_check
from cave.utils.timing import timing
class ConfiguratorRun(object):
"""
ConfiguratorRuns load and maintain information about individual configurator
runs. There are different supported formats, like: BOHB, SMAC3, SMAC2 and CSV
This class is responsible for providing a scenario, a runhistory and a
trajectory and handling original/validated data appropriately.
To create a ConfiguratorRun from a folder, use Configurator.from_folder()
"""
def __init__(self,
scenario,
original_runhistory,
validated_runhistory,
trajectory,
options,
path_to_folder=None,
ta_exec_dir=None,
file_format=None,
validation_format=None,
reduced_to_budgets=None,
output_dir=None,
):
"""
Parameters
----------
scenario: Scenario
scenario
original_runhistory, validated_runhistory: RunHistory
runhistores containing only the original evaluated data (during optimization process) or the validated data
where points of interest are reevaluated after the optimization process
trajectory: List[dict]
a trajectory of the best performing configurations at each point in time
options: dict
options can define a number of custom settings
path_to_folder: str
path to the physical folder containing the data
ta_exec_dir: str
path to the target-algorithm-execution-directory. This is only important for SMAC-optimized data
file_format, validation_format: str
will be autodetected some point soon, until then, specify the file-format (SMAC2, SMAC3, BOHB, etc...)
reduced_to_budgets: List str int or float
budgets, with which this cr is associated
output_dir: str
where to save analysis-data for this cr
"""
self.logger = logging.getLogger("cave.ConfiguratorRun.{}".format(path_to_folder))
self.rng = np.random.RandomState(42)
self.options = options
self.path_to_folder = path_to_folder
self.reduced_to_budgets = [None] if reduced_to_budgets is None else reduced_to_budgets
self.scenario = scenario
self.original_runhistory = original_runhistory
self.validated_runhistory = validated_runhistory
self.trajectory = trajectory
self.ta_exec_dir = ta_exec_dir
self.file_format = file_format
self.validation_format = validation_format
if not output_dir:
self.logger.debug("New outputdir")
output_dir = tempfile.mkdtemp()
self.output_dir = os.path.join(output_dir, 'analysis_data', self.get_identifier())
os.makedirs(self.output_dir, exist_ok=True)
self.default = self.s
|
cenario.cs.get_default_configuration()
self.incumbent = self.trajectory[-1]['incumbent'] if self.trajectory else None
self.feature_names = self._get_feature_names()
# Create combined runhistory to collect all "real" runs
self.combined_runhistory = RunHistory()
self.combined_runhistory.update(self.original_runhistory, origin=DataOrigin.INTERNAL)
|
if self.validated_runhistory is not None:
self.combined_runhistory.update(self.validated_runhistory, origin=DataOrigin.EXTERNAL_SAME_INSTANCES)
# Create runhistory with estimated runs (create Importance-object of pimp and use epm-model for validation)
self.epm_runhistory = RunHistory()
self.epm_runhistory.update(self.combined_runhistory)
# Initialize importance and validator
self._init_pimp_and_validator()
try:
self._validate_default_and_incumbents("epm", self.ta_exec_dir)
except KeyError as err:
self.logger.debug(err, exc_info=True)
msg = "Validation of default and incumbent failed. SMAC (v: {}) does not support validation of budgets+ins"\
"tances yet, if you use budgets but no instances ignore this warning.".format(str(smac_version))
if self.feature_names:
self.logger.warning(msg)
else:
self.logger.debug(msg)
# Set during execution, to share information between Analyzers
self.share_information = {'parameter_importance': OrderedDict(),
'feature_importance': OrderedDict(),
'evaluators': OrderedDict(),
'validator': None,
'hpbandster_result': None, # Only for file-format BOHB
}
def get_identifier(self):
return self.identify(self.path_to_folder, self.reduced_to_budgets)
@classmethod
def identify(cls, path, budget):
path = path if path is not None else "all_folders"
budget = str(budget) if budget is not None else "all_budgets"
res = "_".join([path, budget]).replace('/', '_')
if len(res) > len(str(hash(res))):
res = str(hash(res))
return res
def get_budgets(self):
return set([k.budget for k in self.original_runhistory.data.keys()])
@classmethod
def from_folder(cls,
folder: str,
ta_exec_dir: str,
options,
file_format: str='SMAC3',
validation_format: str='NONE',
output_dir=None,
):
"""Initialize scenario, runhistory and incumbent from folder
Parameters
----------
folder: string
output-dir of this configurator-run -> this is also the 'id' for a single run in parallel optimization
ta_exec_dir: string
if the execution directory for the SMAC-run differs from the cwd,
there might be problems loading instance-, feature- or PCS-files
in the scenario-object. since instance- and PCS-files are necessary,
specify the path to the execution-dir of SMAC here
file_format: string
from [SMAC2, SMAC3, BOHB, APT, CSV]
validation_format: string
from [SMAC2, SMAC3, APT, CSV, NONE], in which format to look for validated data
"""
logger = logging.getLogger("cave.ConfiguratorRun.{}".format(folder))
logger.debug("Loading from \'%s\' with ta_exec_dir \'%s\' with file-format '%s' and validation-format %s. ",
folder, ta_exec_dir, file_format, validation_format)
if file_format == 'BOHB' or file_format == "APT":
logger.debug("File format is BOHB or APT, assmuming data was converted to SMAC3-format using "
"HpBandSter2SMAC from cave.reader.converter.hpbandster2smac.")
validation_format = validation_format if validation_format != 'NONE' else None
# Read in data (scenario, runhistory & trajectory)
reader = cls.get_reader(file_format, folder, ta_exec_dir)
scenario = reader.get_scenario()
scenario_sanity_check(scenario, logger)
original_runhistory = reader.get_runhistory(scenario.cs)
validated_runhistory = None
if validation_format == "NONE" or validation_format is None:
validation_format = None
else:
logger.debug('Using format %s for validation', validation_format)
vali_reader = cls.get_reader(validation_format, folder, ta_exec
|
django-stars/dash2011
|
presence/apps/workflow/admin.py
|
Python
|
bsd-3-clause
| 703
| 0
|
from
|
django.cont
|
rib import admin
from workflow.models import State, StateLog, NextState, Project, Location
from workflow.activities import StateActivity
class NextStateInline(admin.StackedInline):
model = NextState
fk_name = 'current_state'
extra = 0
class StateAdmin(admin.ModelAdmin):
inlines = [NextStateInline, ]
list_display = ('name', 'is_work_state',)
class StateLogAdmin(admin.ModelAdmin):
readonly_fields = ['start', 'end', 'state', 'user']
list_display = ('user', 'state', 'project', 'location', 'start', 'end',)
admin.site.register(State, StateAdmin)
admin.site.register(StateLog, StateLogAdmin)
admin.site.register(Project)
admin.site.register(Location)
|
chuckeles/genetic-treasures
|
test_instruction_set.py
|
Python
|
mit
| 2,849
| 0.002808
|
import unittest
import instruction_set
class TestInstructionSet(unittest.TestCase):
def test_generate(self):
self.assertIsInstance(instruction_set.generate(), list)
self.assertEqual(len(instruction_set.generate()), 64)
self.assertEqual(len(instruction_set.generate(32)), 32)
inset = instruction_set.generate()
for instruction in inset:
self.assertGreaterEqual(instruction, 0)
self.assertLess(instruction, 256)
def test_crossover(self):
parent1 = instruction_set.generate()
parent2 = instruction_set.generate()
children =
|
instruction_set.crossover(parent1, parent2)
random_children = instruction_set.crossover(parent1, parent2, take_random=True)
self.assertIsInstance(children, tuple)
self.assertIsInstance(children[0], list)
|
self.assertIsInstance(children[1], list)
self.assertEqual(len(children[0]), len(parent1))
self.assertEqual(len(children[1]), len(parent1))
for i, _ in enumerate(parent1):
self.assertTrue(
(children[0][i] in parent1 and children[1][i] in parent2) or
(children[0][i] in parent2 and children[1][i] in parent1)
)
self.assertTrue(
(random_children[0][i] in parent1 and random_children[1][i] in parent2) or
(random_children[0][i] in parent2 and random_children[1][i] in parent1)
)
def test_mutate_bits(self):
inset = instruction_set.generate()
self.assertEqual(len(inset), len(instruction_set.mutate_bits(inset)))
self.assertEqual(inset, instruction_set.mutate_bits(inset, mutation_chance=0))
self.assertNotEqual(inset, instruction_set.mutate_bits(inset, mutation_chance=100))
for instruction in instruction_set.mutate_bits(inset):
self.assertGreaterEqual(instruction, 0)
self.assertLess(instruction, 256)
def test_mutate_bytes(self):
inset = instruction_set.generate()
self.assertEqual(len(inset), len(instruction_set.mutate_bytes(inset)))
self.assertEqual(inset, instruction_set.mutate_bytes(inset, mutation_chance=0))
self.assertNotEqual(inset, instruction_set.mutate_bytes(inset, mutation_chance=100))
for instruction in instruction_set.mutate_bytes(inset):
self.assertGreaterEqual(instruction, 0)
self.assertLess(instruction, 256)
def test_mutate_combined(self):
inset = instruction_set.generate()
self.assertEqual(len(inset), len(instruction_set.mutate_combined(inset)))
for instruction in instruction_set.mutate_combined(inset):
self.assertGreaterEqual(instruction, 0)
self.assertLess(instruction, 256)
if __name__ == '__main__':
unittest.main()
|
OliverCole/ZeroNet
|
src/Test/TestContent.py
|
Python
|
gpl-2.0
| 12,511
| 0.004876
|
import json
import time
from cStringIO import StringIO
import pytest
from Crypt import CryptBitcoin
from Content.ContentManager import VerifyError, SignError
from util.SafeRe import UnsafePatternError
@pytest.mark.usefixtures("resetSettings")
class TestContent:
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
def testInclude(self, site):
# Rules defined in parent content.json
rules = site.content_manager.getRules("data/test_include/content.json")
assert rules["signers"] == ["15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo"] # Valid signer
assert rules["user_name"] == "test" # Extra data
assert rules["max_size"] == 20000 # Max size of files
assert not rules["includes_allowed"] # Don't allow more includes
assert rules["files_allowed"] == "data.json" # Allowed file pattern
# Valid signers for "data/test_include/content.json"
valid_signers = site.content_manager.getValidSigners("data/test_include/content.json")
assert "15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo" in valid_signers # Extra valid signer defined in parent content.json
assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in valid_signers # The site itself
assert len(valid_signers) == 2 # No more
# Valid signers for "data/users/content.json"
valid_signers = site.content_manager.getValidSigners("data/users/content.json")
assert "1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f" in valid_signers # Extra valid signer defined in parent content.json
assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in valid_signers # The site itself
assert len(valid_signers) == 2
# Valid signers for root content.json
assert site.content_manager.getValidSigners("content.json") == ["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"]
def testInlcudeLimits(self, site):
# Data validation
data_dict = {
"files": {
"data.json": {
"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906",
"size": 505
}
},
"modified": time.time()
}
# Normal data
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
data = StringIO(json.dumps(data_dict))
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
# Reset
del data_dict["signs"]
# Too large
data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
data = StringIO(json.dumps(data_dict))
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
assert "Include too large" in str(err)
# Reset
data_dict["files"]["data.json"]["size"] = 505
del data_dict["signs"]
# Not allowed file
data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"]
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
data = StringIO(json.dumps(data_dict))
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
assert "File not allowed" in str(err)
# Reset
del data_dict["files"]["notallowed.exe"]
del data_dict["signs"]
# Should work again
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
data = StringIO(json.dumps(data_dict))
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
@pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"])
def testSign(self, site, inner_path):
# Bad privatekey
with pytest.raises(SignError) as err:
site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False)
assert "Private key invalid" in str(err)
# Good privatekey
content = site.content_manager.sign(inner_path, privatekey=self.privatekey, filewrite=False)
content_old = site.content_manager.contents[inner_path] # Content before the sign
assert not content_old == content # Timestamp changed
assert site.address in content["signs"] # Used the site's private key to sign
if inner_path == "content.json":
assert len(content["files"]) == 17
elif inner_path == "data/test-include/content.json":
assert len(content["files"]) == 1
elif inner_path == "data/users/content.json":
assert len(content["files"]) == 0
# Everything should be same as before except the modified timestamp and the signs
assert (
{key: val for key, val in content_old.items() if key not in ["modified", "signs", "sign", "zeronet_version"]}
==
{key: val for key, val in content.items() if key not in ["modified", "signs", "sign", "zeronet_version"]}
)
def testSignOptionalFiles(self, site):
for hash in list(site.content_manager.hashfield):
site.content_manager.hashfield.remove(hash)
assert len(site.content_manager.hashfield) == 0
site.content_manager.contents["content.json"]["optional"] = "((data/img/zero.*))"
content_optional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True)
del site.content_manager.contents["content.json"]["optional"]
content_nooptional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True)
assert len(content_nooptional.get("files_optional", {})) == 0 # No optional files if no pattern
assert len(content_optional["files_optional"]) > 0
assert len(site.content_manager.hashfield) == len(content_optional["files_optional"]) # Hashed optional files should be added to hashfield
assert len(content_nooptional["files"]) > len(content_optional["files"])
def testFileInfo(self, site):
assert "sha512" in site.content_manager.getFileInfo("index.html")
assert site.content_manager.getFileInfo("data/img/domain.png")["content_inner_path"
|
] == "content.json"
assert site
|
.content_manager.getFileInfo("data/users/hello.png")["content_inner_path"] == "data/users/content.json"
assert site.content_manager.getFileInfo("data/users/content.json")["content_inner_path"] == "data/users/content.json"
assert not site.content_manager.getFileInfo("notexist")
# Optional file
file_info_optional = site.content_manager.getFileInfo("data/optional.txt")
assert "sha512" in file_info_optional
assert file_info_optional["optional"] is True
# Not exists yet user content.json
assert "cert_signers" in site.content_manager.getFileInfo("data/users/unknown/content.json")
# Optional user file
file_info_optional = site.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert "sha512" in file_info_optional
assert file_info_optional["optional"] is True
def testVerify(self, site):
inner_path = "data/test_include/content.json"
data_dict = site.storage.loadJson(inner_path)
data = StringIO(json.dumps(data_dict))
# Re-sign
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
}
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
# Wrong address
data_dict["address"] = "Othersite"
del data_dict["signs"
|
studybuffalo/studybuffalo
|
study_buffalo/rdrhc_calendar/migrations/0014_auto_20171016_1922.py
|
Python
|
gpl-3.0
| 813
| 0
|
# pylint: disable=missing-module-docstring, missing-class-docstring
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrati
|
ons.swappable_dependency(settings.AUTH_USER_MODEL),
('rdrhc_calendar', '0013_auto_20171016_1915'),
]
operations = [
migrations.RenameField(
model_name='shift',
old_name='user',
new_name='sb_user',
),
migrations.RenameField(
model_name='shiftcode',
old_nam
|
e='user',
new_name='sb_user',
),
migrations.AlterUniqueTogether(
name='shiftcode',
unique_together=set([('code', 'sb_user', 'role')]),
),
]
|
SDoc/py-sdoc
|
sdoc/sdoc1/data_type/StringDataType.py
|
Python
|
mit
| 3,015
| 0
|
from sdoc.sdoc1.data_type.DataType import DataType
class StringDataType(DataType):
"""
Class for string data types.
"""
# ------------------------------------------------------------------------------------------------------------------
def __init__(self, value: str):
"""
Object constructor.
:param str value: The value of this string constant.
"""
self._value: str = value
"""
The value of this constant integer.
"""
# ------------------------------------------------------------------------------------------------------------------
def debug(self, indent: int = 0) -> str:
"""
Returns a string for debugging.
:param int indent: Unused.
"""
return "'" + self._value + "'"
# ------------------------------------------------------------------------------------------------------------------
def dereference(self):
"""
Returns a clone of this string.
:rtype: sdoc.sdoc1.data_ty
|
pe.StringDataType.StringDataType
"""
return StringDataType(self._value)
# ------------------------------------------------------------------------------------------------------------------
def get_value(self) -> str:
"""
Returns the underling value of this data type.
"""
return self._value
# ----------------------------------------------------------
|
--------------------------------------------------------
def get_type_id(self) -> int:
"""
Returns the ID of this data type.
"""
return DataType.STRING
# ------------------------------------------------------------------------------------------------------------------
def is_constant(self) -> bool:
"""
Returns False always.
"""
return False
# ------------------------------------------------------------------------------------------------------------------
def is_defined(self) -> bool:
"""
Returns True always.
"""
return True
# ------------------------------------------------------------------------------------------------------------------
def is_scalar(self) -> bool:
"""
Returns True always.
"""
return True
# ------------------------------------------------------------------------------------------------------------------
def is_true(self) -> bool:
"""
Returns True if this string is not empty. Returns False otherwise.
"""
return self._value != ''
# ------------------------------------------------------------------------------------------------------------------
def __str__(self) -> str:
"""
Returns the string representation of the string constant.
"""
return self._value
# ----------------------------------------------------------------------------------------------------------------------
|
mitmproxy/mitmproxy
|
test/mitmproxy/addons/test_intercept.py
|
Python
|
mit
| 1,632
| 0
|
import pytest
from mitmproxy.addons import intercept
from mitmproxy import exceptions
from mitmproxy.test import taddons
from m
|
itmproxy.test import tflow
@pytest.mark.asyncio
async def test_simple():
r = intercept.Intercept()
with taddons.context(r) as tctx:
assert not r.filt
tctx.configure(r, intercept="~q")
assert r.filt
assert tctx.options.intercept_active
with pytest.raises(exceptions.OptionsError):
tctx.configure(r, intercept="~~")
tctx.configure(r, intercept=None)
assert not r.filt
assert not tctx.options.intercept_ac
|
tive
tctx.configure(r, intercept="~s")
f = tflow.tflow(resp=True)
await tctx.cycle(r, f)
assert f.intercepted
f = tflow.tflow(resp=False)
await tctx.cycle(r, f)
assert not f.intercepted
f = tflow.tflow(resp=True)
r.response(f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.tflow(resp=True)
await tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.tflow(resp=True)
await tctx.cycle(r, f)
assert f.intercepted
@pytest.mark.asyncio
async def test_tcp():
r = intercept.Intercept()
with taddons.context(r) as tctx:
tctx.configure(r, intercept="~tcp")
f = tflow.ttcpflow()
await tctx.cycle(r, f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.ttcpflow()
await tctx.cycle(r, f)
assert not f.intercepted
|
glemaitre/scikit-learn
|
examples/svm/plot_svm_nonlinear.py
|
Python
|
bsd-3-clause
| 1,136
| 0.002641
|
"""
==============
Non-linear SVM
==============
Perform binary classification using non-linear SVC
with RBF kernel. The target to predict is a XOR of the
inputs.
The color map illustrates the decision function learned by the SVC.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm
xx, yy = np.meshgrid(np.linspace(-3, 3, 500),
np.linspace(-3, 3, 500))
np.random.seed(0)
X = np.random.randn(300, 2)
Y = np.logical_xor(X[:, 0] > 0, X[:, 1] > 0)
# fit the model
clf = svm.NuSVC(gamma='auto')
clf.fit(X, Y)
# plot the decision function for each datap
|
oint on the grid
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.imshow(Z, interpolation='nearest
|
',
extent=(xx.min(), xx.max(), yy.min(), yy.max()), aspect='auto',
origin='lower', cmap=plt.cm.PuOr_r)
contours = plt.contour(xx, yy, Z, levels=[0], linewidths=2,
linestyles='dashed')
plt.scatter(X[:, 0], X[:, 1], s=30, c=Y, cmap=plt.cm.Paired,
edgecolors='k')
plt.xticks(())
plt.yticks(())
plt.axis([-3, 3, -3, 3])
plt.show()
|
AnnalisaS/migration_geonode
|
geonode/layers/models.py
|
Python
|
gpl-3.0
| 29,589
| 0.004968
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from urlparse import urlparse
import httplib2
import urllib
import logging
from datetime import datetime
from lxml import etree
from django.conf import settings
from django.db import models
from django.db.models import signals
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext, ugettext_lazy as _
from django.core.urlresolvers import reverse
from geonode import GeoNodeException
from geonode.base.models import ResourceBase, ResourceBaseManager, Link, \
resourcebase_post_save, resourcebase_post_delete
from geonode.utils import _user, _password, get_wms
from geonode.utils import http_client
from geonode.geoserver.helpers import cascading_delete
from geonode.people.models import Profile
from geonode.security.enumerations import AUTHENTICATED_USERS, ANONYMOUS_USERS
from geonode.layers.ows import wcs_links, wfs_links, wms_links, \
wps_execute_layer_attribute_statistics
from geonode.layers.enumerations import LAYER_ATTRIBUTE_NUMERIC_DATA_TYPES
from geonode.utils import ogc_server_settings
from geoserver.catalog import Catalog, FailedRequestError
from agon_ratings.models import OverallRating
logger = logging.getLogger("geonode.layers.models")
class Style(models.Model):
"""Model for storing styles.
"""
name = models.CharField(_('style name'), max_length=255, unique=True)
sld_title = models.CharField(max_length=255, null=True, blank=True)
sld_body = models.TextField(_('sld text'), null=True, blank=True)
sld_version = models.CharField(_('sld version'), max_length=12, null=True, blank=True)
sld_url = models.CharField(_('sld url'), null = True, max_length=1000)
workspace = models.CharField(max_length=255, null=True, blank=True)
def __str__(self):
return "%s" % self.name.encode('utf-8')
class LayerManager(ResourceBaseManager):
def __init__(self):
models.Manager.__init__(self)
url = ogc_server_settings.rest
self.gs_catalog = Catalog(url, _user, _password)
def add_bbox_query(q, bbox):
'''modify the queryset q to limit to the provided bbox
bbox - 4 tuple of floats representing x0,x1,y0,y1
returns the modified query
'''
bbox = map(str, bbox) # 2.6 compat - float to decimal conversion
q = q.filter(bbox_x0__gte=bbox[0])
q = q.filter(bbox_x1__lte=bbox[1])
q = q.filter(bbox_y0__gte=bbox[2])
return q.filter(bbox_y1__lte=bbox[3])
class Layer(ResourceBase):
"""
Layer (inherits ResourceBase fields)
"""
# internal fields
objects = LayerManager()
workspace = models.CharField(max_length=128)
store = models.CharField(max_length=128)
storeType = models.CharField(max_length=128)
name = models.CharField(max_length=128)
typename = models.CharField(max_length=128, unique=True)
popular_count = models.IntegerField(default=0)
share_count = models.IntegerField(default=0)
default_style = models.ForeignKey(Style, related_name='layer_default_style', null=True, blank=True)
styles = models.ManyToManyField(Style, related_name='layer_styles')
def update_thumbnail(self, save=True):
try:
self.save_thumbnail(self._thumbnail_url(width=200, height=150), save)
except RuntimeError, e:
logger.warn('Could not create thumbnail for %s' % self, e)
def _render_thumbnail(self, spec):
resp, content = http_client.request(spec)
if 'ServiceException' in content or resp.status < 200 or resp.status > 299:
msg = 'Unable to obtain thumbnail: %s' % content
raise RuntimeError(msg)
return content
def _thumbnail_url(self, width=20, height=None):
""" Generate a URL representing thumbnail of the layer """
params = {
'layers': self.typename.encode('utf-8'),
'format': 'image/png8',
'width': width,
}
if height is not Non
|
e:
params['height'] = height
# Avoid using urllib.urlencode here because it breaks the url.
# commas and slashes in values get encoded and then cause trouble
# with the WMS parser.
p = "&".join("%s=%s"%item for item in params.items())
|
return ogc_server_settings.LOCATION + "wms/reflect?" + p
def verify(self):
"""Makes sure the state of the layer is consistent in GeoServer and Catalogue.
"""
# Check the layer is in the wms get capabilities record
# FIXME: Implement caching of capabilities record site wide
_local_wms = get_wms()
record = _local_wms.contents.get(self.typename)
if record is None:
msg = "WMS Record missing for layer [%s]" % self.typename.encode('utf-8')
raise GeoNodeException(msg)
@property
def display_type(self):
return ({
"dataStore" : "Vector Data",
"coverageStore": "Raster Data",
}).get(self.storeType, "Data")
@property
def store_type(self):
cat = Layer.objects.gs_catalog
res = cat.get_resource(self.name)
res.store.fetch()
return res.store.dom.find('type').text
@property
def service_type(self):
if self.storeType == 'coverageStore':
return "WCS"
if self.storeType == 'dataStore':
return "WFS"
def get_absolute_url(self):
return reverse('layer_detail', args=(self.typename,))
def attribute_config(self):
#Get custom attribute sort order and labels if any
cfg = {}
visible_attributes = self.attribute_set.visible()
if (visible_attributes.count() > 0):
cfg["getFeatureInfo"] = {
"fields": [l.attribute for l in visible_attributes],
"propertyNames": dict([(l.attribute,l.attribute_label) for l in visible_attributes])
}
return cfg
def __str__(self):
return "%s Layer" % self.typename.encode('utf-8')
class Meta:
# custom permissions,
# change and delete are standard in django
permissions = (('view_layer', 'Can view'),
('change_layer_permissions', "Can change permissions"), )
# Permission Level Constants
# LEVEL_NONE inherited
LEVEL_READ = 'layer_readonly'
LEVEL_WRITE = 'layer_readwrite'
LEVEL_ADMIN = 'layer_admin'
def set_default_permissions(self):
self.set_gen_level(ANONYMOUS_USERS, self.LEVEL_READ)
self.set_gen_level(AUTHENTICATED_USERS, self.LEVEL_READ)
# remove specific user permissions
current_perms = self.get_all_level_info()
for username in current_perms['users'].keys():
user = User.objects.get(username=username)
self.set_user_level(user, self.LEVEL_NONE)
# assign owner admin privileges
if self.owner:
self.set_user_level(self.owner, self.LEVEL_ADMIN)
def tiles_url(self):
return self.link_set.get(name='Tiles').url
def maps(self):
from geonode.maps.models import MapLayer
return MapLayer.objects.filter(name=self.typename)
@property
def class_name(self):
return self.__class__.__name__
class Layer_Styles(models.Model):
layer = models.ForeignKey(La
|
taurenk/Flask-Angular-TaskList
|
backend/app/user_api.py
|
Python
|
mit
| 1,159
| 0.001726
|
__author__ = 'tauren'
from flask import abort
from flask_restful import Resource
from flask.ext.restful import fields, marshal, reqparse
from flask_login import current_user
from models import User, db
user_fields = {
'username': fields.String,
'id
|
': fields.Integer,
'uri': fields.Url('user')
}
class UserApi(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('username', type=str, required=True,
help='No username provided', location='json')
self.reqparse.add_argument('password', type=str, required=True,
help='No password provided', location='json')
|
super(UserApi, self).__init__()
def post(self):
args = self.reqparse.parse_args()
new_user = User(args['username'], args['password'])
db.session.add(new_user)
db.session.commit()
return 201
def get(self):
user = User.query.filter_by(id=current_user.id).all()
if not user:
return abort(404)
return {'results': marshal(user, user_fields)}
|
MansoorMajeed/encrypted-notes
|
app/forms.py
|
Python
|
gpl-2.0
| 138
| 0.014493
|
from flask
|
.ext.wtf import Form
from w
|
tforms import StringField, BooleanField, PasswordField, SelectField, DateTimeField, TextAreaField
|
bugobliterator/MAVProxy
|
MAVProxy/modules/mavproxy_map/mp_elevation.py
|
Python
|
gpl-3.0
| 3,785
| 0.004756
|
#!/usr/bin/python
'''
Wrapper for the SRTM module (srtm.py)
It will grab the altitude of a long,lat pair from the SRTM database
Created by Stephen Dade ([email protected])
'''
import os
import sys
import time
import numpy
from MAVProxy.modules.mavproxy_map import srtm
class ElevationModel():
'''Elevation Model. Only SRTM for now'''
def __init__(self, database='srtm', offline=0):
'''Use offline=1 to disable any downloading of tiles, regardless of whether the
tile exists'''
self.database = database
if self.database == 'srtm':
self.downloader = srtm.SRTMDownloader(offline=offline)
self.downloader.loadFileList()
self.tileDict = dict()
'''Use the Geoscience Australia database instead - watch for the correct database path'''
if self.database == 'geoscience':
from MAVProxy.modules.mavproxy_map import GAreader
self.mappy = GAreader.ERMap()
self.mappy.read_ermapper(os.path.join(os.environ['HOME'], './Documents/Elevation/Canberra/GSNSW_P756demg'))
def GetElevation(self, latitude, longitude, timeout=0):
'''Returns the altitude (m ASL) of a given lat/long pair, or None if unknown'''
if self.database == 'srtm':
TileID = (numpy.floor(latitude), numpy.floor(longitude))
if TileID in self.tileDict:
alt = self.tileDict[TileID].getAltitudeFromLatLon(latitude, longitude)
else:
tile = self.downloader.getTile(numpy.floor(latitude), numpy.floor(longitude))
if tile == 0:
if timeout > 0:
t0 = time.time()
while time.time() < t0+timeout and tile == 0:
tile = self.downloader.getTile(numpy.floor(latitude), numpy.floor(longitude))
if tile == 0:
time.sleep(0.1)
if tile == 0:
return None
self.tileDict[TileID] = tile
alt = tile.getAltitudeFromLatLon(latitude, longitude)
if self.database == 'geoscience':
alt = self.mappy.getAltitudeAtPoint(latitude, longitude)
return alt
if __name__ == "__main__":
from optparse import OptionParser
parser = OptionParser("mp_elevation.py [options]")
parser.add_option("--lat", type='float', default=-35.052544, help="start latitude")
parser.add_option("--lon", type='float', default=149.509165, help="start longitude")
parser.add_option("--database", type='string', default='srtm', help="elevation database")
(opts, args) = parser.parse_args()
EleModel = ElevationModel(opts.database)
lat = opts.lat
lon = opts.lon
'''Do a few lat/long pairs to demonstrate the caching
Note the +0.000001 to the time. On faster PCs, the two time periods
may in fact be equal, so we add a little extra time on the end to account for this'''
t0 = time.time()
alt = EleModel.GetElevation(lat, lon, timeout=10)
if alt is None:
print("Tile not available")
sys.exit(1)
t1 = time.time()+.000001
print("Altitude at (%.6f, %.6f) is %u m. Pulled at
|
%.1f FPS" % (lat, lon, alt, 1/(t1
|
-t0)))
lat = opts.lat+0.001
lon = opts.lon+0.001
t0 = time.time()
alt = EleModel.GetElevation(lat, lon, timeout=10)
t1 = time.time()+.000001
print("Altitude at (%.6f, %.6f) is %u m. Pulled at %.1f FPS" % (lat, lon, alt, 1/(t1-t0)))
lat = opts.lat-0.001
lon = opts.lon-0.001
t0 = time.time()
alt = EleModel.GetElevation(lat, lon, timeout=10)
t1 = time.time()+.000001
print("Altitude at (%.6f, %.6f) is %u m. Pulled at %.1f FPS" % (lat, lon, alt, 1/(t1-t0)))
|
osrg/bgperf
|
quagga.py
|
Python
|
apache-2.0
| 5,099
| 0.003334
|
# Copyright (C) 2016 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from base import *
class Quagga(Container):
CONTAINER_NAME = None
GUEST_DIR = '/root/config'
def __init__(self, host_dir, conf, image='bgperf/quagga'):
super(Quagga, self).__init__(self.CONTAINER_NAME, image, host_dir, self.GUEST_DIR, conf)
@classmethod
def build_image(cls, force=False, tag='bgperf/quagga', checkout='HEAD', nocache=False):
cls.dockerfile = '''
FROM ubuntu:latest
WORKDIR /root
RUN useradd -M quagga
RUN mkdir /var/log/quagga && chown quagga:quagga /var/log/quagga
RUN mkdir /var/run/quagga && chown quagga:quagga /var/run/quagga
RUN apt-get update && apt-get install -qy git autoconf libtool gawk make telnet libreadline6-dev
RUN git clone git://git.sv.gnu.org/quagga.git quagga
RUN cd quagga && git checkout {0} && ./bootstrap.sh && \
./configure --disable-doc --localstatedir=/var/run/quagga && make && make install
RUN ldconfig
'''.format(checkout)
super(Quagga, cls).build_image(force, tag, nocache)
class QuaggaTarget(Quagga, Target):
CONTAINER_NAME = 'bgperf_quagga_target'
CONFIG_FILE_NAME = 'bgpd.conf'
def write_config(self, scenario_global_conf):
config = """hostname bgpd
password zebra
router bgp {0}
bgp router-id {1}
""".format(self.conf['as'], self.conf['router-id'])
def gen_neighbor_config(n):
local_addr = n['local-address']
c = """neighbor {0} remote-as {1}
neighbor {0} advertisement-interval 1
neighbor {0} route-server-client
neighbor {0} timers 30 90
""".format(local_addr, n['as'])
if 'filter' in n:
for p in (n['filter']['in'] if 'in' in n['filter'] else []):
c += 'neighbor {0} route-map {1} export\n'.format(local_addr, p)
return c
with open('{0}/{1}'.format(self.host_dir, self.CONFIG_FILE_NAM
|
E), 'w') as
|
f:
f.write(config)
for n in list(flatten(t.get('neighbors', {}).values() for t in scenario_global_conf['testers'])) + [scenario_global_conf['monitor']]:
f.write(gen_neighbor_config(n))
if 'policy' in scenario_global_conf:
seq = 10
for k, v in scenario_global_conf['policy'].iteritems():
match_info = []
for i, match in enumerate(v['match']):
n = '{0}_match_{1}'.format(k, i)
if match['type'] == 'prefix':
f.write(''.join('ip prefix-list {0} deny {1}\n'.format(n, p) for p in match['value']))
f.write('ip prefix-list {0} permit any\n'.format(n))
elif match['type'] == 'as-path':
f.write(''.join('ip as-path access-list {0} deny _{1}_\n'.format(n, p) for p in match['value']))
f.write('ip as-path access-list {0} permit .*\n'.format(n))
elif match['type'] == 'community':
f.write(''.join('ip community-list standard {0} permit {1}\n'.format(n, p) for p in match['value']))
f.write('ip community-list standard {0} permit\n'.format(n))
elif match['type'] == 'ext-community':
f.write(''.join('ip extcommunity-list standard {0} permit {1} {2}\n'.format(n, *p.split(':', 1)) for p in match['value']))
f.write('ip extcommunity-list standard {0} permit\n'.format(n))
match_info.append((match['type'], n))
f.write('route-map {0} permit {1}\n'.format(k, seq))
for info in match_info:
if info[0] == 'prefix':
f.write('match ip address prefix-list {0}\n'.format(info[1]))
elif info[0] == 'as-path':
f.write('match as-path {0}\n'.format(info[1]))
elif info[0] == 'community':
f.write('match community {0}\n'.format(info[1]))
elif info[0] == 'ext-community':
f.write('match extcommunity {0}\n'.format(info[1]))
seq += 10
def get_startup_cmd(self):
return '\n'.join(
['#!/bin/bash',
'ulimit -n 65536',
'bgpd -u root -f {guest_dir}/{config_file_name}']
).format(
guest_dir=self.guest_dir,
config_file_name=self.CONFIG_FILE_NAME)
|
digibyte/digibyte
|
test/lint/check-rpc-mappings.py
|
Python
|
mit
| 6,062
| 0.003299
|
#!/usr/bin/env python3
# Copyright (c) 2009-2019 The Bitcoin Core developers
# Copyright (c) 2014-2019 The DigiByte Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Check RPC argument consistency."""
from collections import defaultdict
import os
import re
import sys
# Source files (relative to root) to scan for dispatch tables
SOURCES = [
"src/rpc/server.cpp",
"src/rpc/blockchain.cpp",
"src/rpc/mining.cpp",
"src/rpc/misc.cpp",
"src/rpc/net.cpp",
"src/rpc/rawtransaction.cpp",
"src/wallet/rpcwallet.cpp",
]
# Source file (relative to root) containing conversion mapping
SOURCE_CLIENT = 'src/rpc/client.cpp'
# Argument names that should be ignored in consistency checks
IGNORE_DUMMY_ARGS = {'dummy', 'arg0', 'arg1', 'arg2', 'arg3', 'arg4', 'arg5', 'arg6', 'arg7', 'arg8', 'arg9'}
class RPCCommand:
def __init__(self, name, args):
self.name = name
self.args = args
class RPCArgument:
def __init__(self, names, idx):
self.names = names
self.idx = idx
self.convert = False
def parse_string(s):
assert s[0] == '"'
assert s[-1] == '"'
return s[1:-1]
def process_commands(fname):
"""Find and parse dispatch table in implementation file `fname`."""
cmds = []
in_rpcs = False
with open(fname, "r", encoding="utf8") as f:
for line in f:
line = line.rstrip()
if not in_rpcs:
if re.match("static const CRPCCommand .*\[\] =", line):
in_rpcs = True
else:
if line.startswith('};'):
in_rpcs = False
elif '{' in line and '"' in line:
m = re.search('{ *("[^"]*"), *("[^"]*"), *&([^,]*), *{([^}]*)} *},', line)
assert m, 'No match to table expression: %s' % line
name = parse_string(m.group(2))
args_str = m.group(4).strip()
if args_str:
args = [RPCArgument(parse_string(x.strip()).split('|'), idx) for idx, x in enumerate(args_str.split(','))]
else:
args = []
cmds.append(RPCCommand(name, args))
assert not in_rpcs and cmds, "Something went wrong with parsing the C++ file: update the regexps"
return cmds
def process_mapping(fname):
"""Find and parse conversion table in implementation file `fname`."""
cmds = []
in_rpcs = False
with open(fname, "r", encoding="utf8") as f:
for line in f:
line = line.rstrip()
if not in_rpcs:
if line == 'static const CRPCConvertParam vRPCConvertParams[] =':
in_rpcs = True
else:
if line.startswith('};'):
in_rpcs = False
elif '{' in line and '"' in line:
m = re.search('{ *("[^"]*"), *([0-9]+) *, *("[^"]*") *},',
|
line)
assert m, 'No match to table expression: %s' % line
name = parse_string(m.group(1))
idx = int(m.group(2))
argname = parse_string(m.group(3))
cmds.app
|
end((name, idx, argname))
assert not in_rpcs and cmds
return cmds
def main():
root = sys.argv[1]
# Get all commands from dispatch tables
cmds = []
for fname in SOURCES:
cmds += process_commands(os.path.join(root, fname))
cmds_by_name = {}
for cmd in cmds:
cmds_by_name[cmd.name] = cmd
# Get current convert mapping for client
client = SOURCE_CLIENT
mapping = set(process_mapping(os.path.join(root, client)))
print('* Checking consistency between dispatch tables and vRPCConvertParams')
# Check mapping consistency
errors = 0
for (cmdname, argidx, argname) in mapping:
try:
rargnames = cmds_by_name[cmdname].args[argidx].names
except IndexError:
print('ERROR: %s argument %i (named %s in vRPCConvertParams) is not defined in dispatch table' % (cmdname, argidx, argname))
errors += 1
continue
if argname not in rargnames:
print('ERROR: %s argument %i is named %s in vRPCConvertParams but %s in dispatch table' % (cmdname, argidx, argname, rargnames), file=sys.stderr)
errors += 1
# Check for conflicts in vRPCConvertParams conversion
# All aliases for an argument must either be present in the
# conversion table, or not. Anything in between means an oversight
# and some aliases won't work.
for cmd in cmds:
for arg in cmd.args:
convert = [((cmd.name, arg.idx, argname) in mapping) for argname in arg.names]
if any(convert) != all(convert):
print('ERROR: %s argument %s has conflicts in vRPCConvertParams conversion specifier %s' % (cmd.name, arg.names, convert))
errors += 1
arg.convert = all(convert)
# Check for conversion difference by argument name.
# It is preferable for API consistency that arguments with the same name
# have the same conversion, so bin by argument name.
all_methods_by_argname = defaultdict(list)
converts_by_argname = defaultdict(list)
for cmd in cmds:
for arg in cmd.args:
for argname in arg.names:
all_methods_by_argname[argname].append(cmd.name)
converts_by_argname[argname].append(arg.convert)
for argname, convert in converts_by_argname.items():
if all(convert) != any(convert):
if argname in IGNORE_DUMMY_ARGS:
# these are testing or dummy, don't warn for them
continue
print('WARNING: conversion mismatch for argument named %s (%s)' %
(argname, list(zip(all_methods_by_argname[argname], converts_by_argname[argname]))))
sys.exit(errors > 0)
if __name__ == '__main__':
main()
|
eliben/luz-cpu
|
luz_asm_sim/lib/asmlib/linker.py
|
Python
|
unlicense
| 17,370
| 0.001267
|
# Linker - the linker for assembled objects
#
# Input: one or more ObjectFile objects
# Output: an executable suitable for loading into the Luz
# simulator or CPU memory.
#
# Luz micro-controller assembler
# Eli Bendersky (C) 2008-2010
#
import pprint, os, sys, string
from collections import defaultdict
from ..commonlib.utils import (
word2bytes, bytes2word, extract_bitfield,
build_bitfield, num_fits_in_nbits)
from ..commonlib.luz_opcodes import *
from .asm_common_types import ImportType, RelocType
from .assembler import Assembler
class LinkerError(Exception): pass
class Linker(object):
""" Links together several object files, adding a startup
object, and produces a binary image of the linked
executable. This binary image, when loaded at the initial
offset address, is ready to be executed by the CPU.
A Linker is created with the following parameters:
initial_offset:
The initial offset in memory where the image will be
placed. This is important for resolving relocations
and imports.
mem_size:
The total memory size available for the executable.
This is used to initialize the stack pointer.
Calling the link() method results in the binary image as
a list of bytes.
"""
def __init__(self, initial_offset=0, mem_size=128*1024):
self.initial_offset = initial_offset
self.mem_size = mem_size
def link(self, object_files=[]):
""" Link the given objects. object_files is a list of
ObjectFile. The objects are linked with the special
startup object (see LINKER_STARTUP_CODE).
Note: object files may be modified as a result of this
call, to resolve import and relocations.
"""
# Throughout the linking code we refer to objects by their offset in the
# object_files list. This offset uniquely identifies an object.
self.object_files = object_files
startup_object = self._assemble_startup_code()
self.object_files.append(startup_object)
segment_map, total_size = self._compute_segment_map(
object_files=self.object_files,
offset=self.initial_offset)
exports = self._collect_exports(
object_files=self.object_files)
self._resolve_imports(
object_files=self.object_files,
exports=exports,
segment_map=segment_map)
self._resolve_relocations(
object_files=self.object_files,
segment_map=segment_map)
image = self._build_memory_image(
object_files=self.object_files,
segment_map=segment_map,
total_size=total_size)
return image
######################-- PRIVATE --#####################
def _assemble_startup_code(
|
self):
sp_ptr = self.initial_offset + se
|
lf.mem_size - 4
startup_code = LINKER_STARTUP_CODE.substitute(SP_POINTER=sp_ptr)
asm = Assembler()
startup_object = asm.assemble(str=startup_code)
return startup_object
def _compute_segment_map(self, object_files, offset=0):
""" Compute a segment memory map from the list of object
files and a given offset.
A "segment map" is a list of:
dict[segment] = address
The ith item holds such a dictionary for the ith
object file.
Each dictionary maps the segments found in this
object file into the addresses to which they are
placed in the memory layout created by the linker.
For example, if several objects have a 'text' segment,
this function collects all the 'text' segments into
a contiguous region. However, the 'text' segment of
each object will point to a different offset inside
this region (since they're placed one after another).
The 'offset' argument allows to shift the whole memory
map by some constant amount.
Linker-created segments like __startup and __heap are
treated specially.
Returns the pair segment_map, total_size
total_size is the total size of memory occupied by
all the objects.
"""
# Step 1: Compute the total sizes of all segments that
# exist in the object files
segment_size = defaultdict(int)
for obj in object_files:
for segment in obj.seg_data:
segment_size[segment] += len(obj.seg_data[segment])
# Step 2: Initialize the pointers that point to the start
# of each combined segment.
# Note: the order of allocation of segments (what comes
# after what) isn't really important and could be totally
# arbitrary. To make it more predictable, segments are
# allocated one after another sorted by name in increasing
# lexicographical order.
# The __startup segment is placed before all others (i.e.
# it's mapped at 'offset'), and the __heap segment is
# placed after all others.
segment_ptr = {}
ptr = offset
if '__startup' in segment_size:
segment_ptr['__startup'] = ptr
ptr += segment_size['__startup']
for segment in sorted(segment_size):
if segment not in ('__startup', '__heap'):
segment_ptr[segment] = ptr
ptr += segment_size[segment]
if '__heap' in segment_size:
segment_ptr['__heap'] = ptr
ptr += segment_size['__heap']
total_size = ptr - offset
# Step 3: Create the segment map. For each segment in each
# object, record the memory offset where it will be
# mapped.
segment_map = []
for obj in object_files:
obj_segment_map = {}
for segment in obj.seg_data:
obj_segment_map[segment] = segment_ptr[segment]
segment_ptr[segment] += len(obj.seg_data[segment])
segment_map.append(obj_segment_map)
return segment_map, total_size
def _collect_exports(self, object_files):
""" Collects the exported symbols from all the objects.
Verifies that exported symbols are unique and
notifies of collisions.
The returned data structure is a dict mapping export
symbol names to a pair: (object_index, addr)
where object_index is the index in object_files of the
object that exports this symbol, and addr is
the address of the symbol (SegAddr) taken from the
export table of that object.
"""
exports = {}
for idx, obj in enumerate(object_files):
for export in obj.export_table:
sym_name = export.export_symbol
if sym_name in exports:
other_idx = exports[sym_name][0]
self._linker_error(
"Duplicated export symbol '%s' at objects [%s] and [%s]" % (
sym_name,
self._object_id(object_files[idx]),
self._object_id(object_files[other_idx])))
exports[sym_name] = (idx, export.addr)
return exports
def _resolve_relocations(self, object_files, segment_map):
""" Resolves the relocations in object files according to
their relocation tables and the updated segment_map
information.
"""
# Look at the relocation tables of all objects
#
for idx, obj in enumerate(object_files):
for reloc_seg, type, addr in obj.reloc_table:
# The requested relocation segment should exist
# in the segment map for this object.
#
if not reloc_seg in segment_map[idx]:
self._linker_error("Relocation entry in object [%t] refers to unknown segment %s" % (
self._object_id(obj
|
testing-cabal/extras
|
setup.py
|
Python
|
mit
| 1,687
| 0.000593
|
#!/usr/bin/env pytho
|
n
"""Distutils installer for extras."""
from setuptools import setup
import os.path
import extras
testtools_cmd = extras.try_import('t
|
esttools.TestCommand')
def get_version():
"""Return the version of extras that we are building."""
version = '.'.join(
str(component) for component in extras.__version__[0:3])
return version
def get_long_description():
readme_path = os.path.join(
os.path.dirname(__file__), 'README.rst')
return open(readme_path).read()
cmdclass = {}
if testtools_cmd is not None:
cmdclass['test'] = testtools_cmd
setup(name='extras',
author='Testing cabal',
author_email='[email protected]',
url='https://github.com/testing-cabal/extras',
description=('Useful extra bits for Python - things that shold be '
'in the standard library'),
long_description=get_long_description(),
version=get_version(),
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
packages=[
'extras',
'extras.tests',
],
cmdclass=cmdclass)
|
laudaa/bitcoin
|
test/functional/segwit.py
|
Python
|
mit
| 42,732
| 0.007208
|
#!/usr/bin/env python3
# Copyright (c) 2016-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the SegWit changeover logic."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import sha256,
|
CTransaction, CT
|
xIn, COutPoint, CTxOut, COIN, ToHex, FromHex
from test_framework.address import script_to_p2sh, key_to_p2pkh, key_to_p2sh_p2wpkh, key_to_p2wpkh, script_to_p2sh_p2wsh, script_to_p2wsh, program_to_witness
from test_framework.script import CScript, OP_HASH160, OP_CHECKSIG, OP_0, hash160, OP_EQUAL, OP_DUP, OP_EQUALVERIFY, OP_1, OP_2, OP_CHECKMULTISIG, OP_TRUE
from io import BytesIO
NODE_0 = 0
NODE_2 = 2
WIT_V0 = 0
WIT_V1 = 1
# Create a scriptPubKey corresponding to either a P2WPKH output for the
# given pubkey, or a P2WSH output of a 1-of-1 multisig for the given
# pubkey. Returns the hex encoding of the scriptPubKey.
def witness_script(use_p2wsh, pubkey):
if (use_p2wsh == False):
# P2WPKH instead
pubkeyhash = hash160(hex_str_to_bytes(pubkey))
pkscript = CScript([OP_0, pubkeyhash])
else:
# 1-of-1 multisig
witness_program = CScript([OP_1, hex_str_to_bytes(pubkey), OP_1, OP_CHECKMULTISIG])
scripthash = sha256(witness_program)
pkscript = CScript([OP_0, scripthash])
return bytes_to_hex_str(pkscript)
# Return a transaction (in hex) that spends the given utxo to a segwit output,
# optionally wrapping the segwit output using P2SH.
def create_witness_tx(node, use_p2wsh, utxo, pubkey, encode_p2sh, amount):
if use_p2wsh:
program = CScript([OP_1, hex_str_to_bytes(pubkey), OP_1, OP_CHECKMULTISIG])
addr = script_to_p2sh_p2wsh(program) if encode_p2sh else script_to_p2wsh(program)
else:
addr = key_to_p2sh_p2wpkh(pubkey) if encode_p2sh else key_to_p2wpkh(pubkey)
if not encode_p2sh:
assert_equal(node.validateaddress(addr)['scriptPubKey'], witness_script(use_p2wsh, pubkey))
return node.createrawtransaction([utxo], {addr: amount})
# Create a transaction spending a given utxo to a segwit output corresponding
# to the given pubkey: use_p2wsh determines whether to use P2WPKH or P2WSH;
# encode_p2sh determines whether to wrap in P2SH.
# sign=True will have the given node sign the transaction.
# insert_redeem_script will be added to the scriptSig, if given.
def send_to_witness(use_p2wsh, node, utxo, pubkey, encode_p2sh, amount, sign=True, insert_redeem_script=""):
tx_to_witness = create_witness_tx(node, use_p2wsh, utxo, pubkey, encode_p2sh, amount)
if (sign):
signed = node.signrawtransaction(tx_to_witness)
assert("errors" not in signed or len(["errors"]) == 0)
return node.sendrawtransaction(signed["hex"])
else:
if (insert_redeem_script):
tx = FromHex(CTransaction(), tx_to_witness)
tx.vin[0].scriptSig += CScript([hex_str_to_bytes(insert_redeem_script)])
tx_to_witness = ToHex(tx)
return node.sendrawtransaction(tx_to_witness)
def getutxo(txid):
utxo = {}
utxo["vout"] = 0
utxo["txid"] = txid
return utxo
def find_unspent(node, min_value):
for utxo in node.listunspent():
if utxo['amount'] >= min_value:
return utxo
class SegWitTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
# This test tests SegWit both pre and post-activation, so use the normal BIP9 activation.
self.extra_args = [["-rpcserialversion=0", "-vbparams=segwit:0:999999999999", "-addresstype=legacy"],
["-blockversion=4", "-promiscuousmempoolflags=517", "-prematurewitness", "-rpcserialversion=1", "-vbparams=segwit:0:999999999999", "-addresstype=legacy"],
["-blockversion=536870915", "-promiscuousmempoolflags=517", "-prematurewitness", "-vbparams=segwit:0:999999999999", "-addresstype=legacy"]]
def setup_network(self):
super().setup_network()
connect_nodes(self.nodes[0], 2)
self.sync_all()
def success_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
block = node.generate(1)
assert_equal(len(node.getblock(block[0])["tx"]), 2)
sync_blocks(self.nodes)
def skip_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
block = node.generate(1)
assert_equal(len(node.getblock(block[0])["tx"]), 1)
sync_blocks(self.nodes)
def fail_accept(self, node, error_msg, txid, sign, redeem_script=""):
assert_raises_rpc_error(-26, error_msg, send_to_witness, 1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
def fail_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
assert_raises_rpc_error(-1, "CreateNewBlock: TestBlockValidity failed", node.generate, 1)
sync_blocks(self.nodes)
def run_test(self):
self.nodes[0].generate(161) #block 161
self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
tmpl = self.nodes[0].getblocktemplate({})
assert(tmpl['sizelimit'] == 1000000)
assert('weightlimit' not in tmpl)
assert(tmpl['sigoplimit'] == 20000)
assert(tmpl['transactions'][0]['hash'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 2)
tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
assert(tmpl['sizelimit'] == 1000000)
assert('weightlimit' not in tmpl)
assert(tmpl['sigoplimit'] == 20000)
assert(tmpl['transactions'][0]['hash'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 2)
self.nodes[0].generate(1) #block 162
balance_presetup = self.nodes[0].getbalance()
self.pubkey = []
p2sh_ids = [] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh
wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness
for i in range(3):
newaddress = self.nodes[i].getnewaddress()
self.pubkey.append(self.nodes[i].validateaddress(newaddress)["pubkey"])
multiaddress = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]])
multiscript = CScript([OP_1, hex_str_to_bytes(self.pubkey[-1]), OP_1, OP_CHECKMULTISIG])
p2sh_addr = self.nodes[i].addwitnessaddress(newaddress)
bip173_addr = self.nodes[i].addwitnessaddress(newaddress, False)
p2sh_ms_addr = self.nodes[i].addwitnessaddress(multiaddress)
bip173_ms_addr = self.nodes[i].addwitnessaddress(multiaddress, False)
assert_equal(p2sh_addr, key_to_p2sh_p2wpkh(self.pubkey[-1]))
assert_equal(bip173_addr, key_to_p2wpkh(self.pubkey[-1]))
assert_equal(p2sh_ms_addr, script_to_p2sh_p2wsh(multiscript))
assert_equal(bip173_ms_addr, script_to_p2wsh(multiscript))
p2sh_ids.append([])
wit_ids.append([])
for v in range(2):
p2sh_ids[i].append([])
wit_ids[i].append([])
for i in range(5):
for n in range(3):
for v in range(2):
wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_unspent(self.nodes[0], 50), self.pubkey[n], False, Decimal("49.999")))
p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_unspent(self.nodes[0], 50), self.pubkey[n], True, Decimal("49.999")))
self.nodes[0].generate(1) #block 163
sync_blocks(s
|
neonbadger/DestinationUnknown
|
yelp_api.py
|
Python
|
mit
| 2,082
| 0.004803
|
"""Yelp API setup and random business selection function"""
import io
import json
import random
from yelp.client import Client
from yelp.oauth1_authenticator import Oauth1Authenticator
with io.open('config_yelp_secret.json') as cred:
creds = json.load(cred)
auth = Oauth1Authenticator(**creds)
yelp_client = Client(auth)
group_activity = ['arcades', 'amusementparks', 'lasertag', 'rock_climbing', 'gokarts',
'escapegames', 'mini_golf', 'trampoline', 'zoos', 'bowling', 'galleries']
fitness_activity = ['yoga', 'pilates', 'hiking', 'cyclingclasses']
relax_activity = ['spas', 'hair', 'skincare', 'othersalons', 'massage',
'outlet_stores', 'shoppingcenters', 'massage_therapy',
|
'acupuncture', 'ayurveda', 'chiropractors', 'venues', 'galleries',
'landmarks', 'gardens', 'museums', 'paintandsip', 'beaches']
night_activity = ['cabaret', 'movietheaters', 'musicvenues', 'opera', 'theater',
'cocktailbars', 'lounges', 'sportsbars', 'wine_bar',
'poolhalls', 'pianobars', 'karaoke', 'jazzandblues',
'danceclubs']
eat_activity
|
= ['wineries', 'farmersmarket', 'cafes', 'bakeries', 'bubbletea', 'coffee',
'restaurants','beer_and_wine', 'icecream', 'gourmet', 'juicebars',
'asianfusion', 'japanese', 'seafood', 'breweries']
def yelp_random_pick(event, city):
"""Generate a top business pick for user."""
if event == 'food':
category_filter = random.choice(eat_activity)
elif event == 'friends':
category_filter = random.choice(group_activity)
elif event == 'relax':
category_filter = random.choice(relax_activity)
elif event == 'nightlife':
category_filter = random.choice(night_activity)
elif event == 'fitness':
category_filter = random.choice(fitness_activity)
params = {
'sort': 2,
'category_filter': category_filter
}
response = yelp_client.search(city, **params)
biz = response.businesses[0]
return biz
|
tkingless/webtesting
|
venvs/dev/lib/python2.7/site-packages/selenium/webdriver/support/select.py
|
Python
|
mit
| 9,249
| 0.003027
|
# Licensed to the
|
Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for addi
|
tional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, UnexpectedTagNameException
class Select:
def __init__(self, webelement):
"""
Constructor. A check is made that the given element is, indeed, a SELECT tag. If it is not,
then an UnexpectedTagNameException is thrown.
:Args:
- webelement - element SELECT element to wrap
Example:
from selenium.webdriver.support.ui import Select \n
Select(driver.find_element_by_tag_name("select")).select_by_index(2)
"""
if webelement.tag_name.lower() != "select":
raise UnexpectedTagNameException(
"Select only works on <select> elements, not on <%s>" %
webelement.tag_name)
self._el = webelement
multi = self._el.get_attribute("multiple")
self.is_multiple = multi and multi != "false"
@property
def options(self):
"""Returns a list of all options belonging to this select tag"""
return self._el.find_elements(By.TAG_NAME, 'option')
@property
def all_selected_options(self):
"""Returns a list of all selected options belonging to this select tag"""
ret = []
for opt in self.options:
if opt.is_selected():
ret.append(opt)
return ret
@property
def first_selected_option(self):
"""The first selected option in this select tag (or the currently selected option in a
normal select)"""
for opt in self.options:
if opt.is_selected():
return opt
raise NoSuchElementException("No options are selected")
def select_by_value(self, value):
"""Select all options that have a value matching the argument. That is, when given "foo" this
would select an option like:
<option value="foo">Bar</option>
:Args:
- value - The value to match against
throws NoSuchElementException If there is no option with specisied value in SELECT
"""
css = "option[value =%s]" % self._escapeString(value)
opts = self._el.find_elements(By.CSS_SELECTOR, css)
matched = False
for opt in opts:
self._setSelected(opt)
if not self.is_multiple:
return
matched = True
if not matched:
raise NoSuchElementException("Cannot locate option with value: %s" % value)
def select_by_index(self, index):
"""Select the option at the given index. This is done by examing the "index" attribute of an
element, and not merely by counting.
:Args:
- index - The option at this index will be selected
throws NoSuchElementException If there is no option with specisied index in SELECT
"""
match = str(index)
for opt in self.options:
if opt.get_attribute("index") == match:
self._setSelected(opt)
return
raise NoSuchElementException("Could not locate element with index %d" % index)
def select_by_visible_text(self, text):
"""Select all options that display text matching the argument. That is, when given "Bar" this
would select an option like:
<option value="foo">Bar</option>
:Args:
- text - The visible text to match against
throws NoSuchElementException If there is no option with specisied text in SELECT
"""
xpath = ".//option[normalize-space(.) = %s]" % self._escapeString(text)
opts = self._el.find_elements(By.XPATH, xpath)
matched = False
for opt in opts:
self._setSelected(opt)
if not self.is_multiple:
return
matched = True
if len(opts) == 0 and " " in text:
subStringWithoutSpace = self._get_longest_token(text)
if subStringWithoutSpace == "":
candidates = self.options
else:
xpath = ".//option[contains(.,%s)]" % self._escapeString(subStringWithoutSpace)
candidates = self._el.find_elements(By.XPATH, xpath)
for candidate in candidates:
if text == candidate.text:
self._setSelected(candidate)
if not self.is_multiple:
return
matched = True
if not matched:
raise NoSuchElementException("Could not locate element with visible text: %s" % text)
def deselect_all(self):
"""Clear all selected entries. This is only valid when the SELECT supports multiple selections.
throws NotImplementedError If the SELECT does not support multiple selections
"""
if not self.is_multiple:
raise NotImplementedError("You may only deselect all options of a multi-select")
for opt in self.options:
self._unsetSelected(opt)
def deselect_by_value(self, value):
"""Deselect all options that have a value matching the argument. That is, when given "foo" this
would deselect an option like:
<option value="foo">Bar</option>
:Args:
- value - The value to match against
throws NoSuchElementException If there is no option with specisied value in SELECT
"""
if not self.is_multiple:
raise NotImplementedError("You may only deselect options of a multi-select")
matched = False
css = "option[value = %s]" % self._escapeString(value)
opts = self._el.find_elements(By.CSS_SELECTOR, css)
for opt in opts:
self._unsetSelected(opt)
matched = True
if not matched:
raise NoSuchElementException("Could not locate element with value: %s" % value)
def deselect_by_index(self, index):
"""Deselect the option at the given index. This is done by examing the "index" attribute of an
element, and not merely by counting.
:Args:
- index - The option at this index will be deselected
throws NoSuchElementException If there is no option with specisied index in SELECT
"""
if not self.is_multiple:
raise NotImplementedError("You may only deselect options of a multi-select")
for opt in self.options:
if opt.get_attribute("index") == str(index):
self._unsetSelected(opt)
return
raise NoSuchElementException("Could not locate element with index %d" % index)
def deselect_by_visible_text(self, text):
"""Deselect all options that display text matching the argument. That is, when given "Bar" this
would deselect an option like:
<option value="foo">Bar</option>
:Args:
- text - The visible text to match against
"""
if not self.is_multiple:
raise NotImplementedError("You may only deselect options of a multi-select")
matched = False
xpath = ".//option[normalize-space(.) = %s]" % self._escapeString(text)
opts = self._el.find_elements(By.XPATH, xpath)
for opt in opts:
self._unsetSelected(opt)
matched = True
|
mick-d/nipype
|
nipype/interfaces/minc/tests/test_auto_Voliso.py
|
Python
|
bsd-3-clause
| 1,379
| 0.023205
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from __future__ import unicode_literals
from ..minc import Voliso
def test_Voliso_inputs():
input_map = dict(args=dict(argstr='%s',
),
avgstep=dict(argst
|
r='--avgstep',
),
clobber=dict(argstr='--clobber',
usedefault=True,
),
environ=dict(nohash=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
input_file=dict(argstr='%s',
mandatory=True,
position=-2,
),
maxstep=dict(argstr='--maxstep %s',
),
minstep=dict(argstr='--minstep %s',
),
output_file=dict(argstr='%s',
genfile=True,
hash_files=False,
name_source=['input_file']
|
,
name_template='%s_voliso.mnc',
position=-1,
),
terminal_output=dict(deprecated='1.0.0',
nohash=True,
),
verbose=dict(argstr='--verbose',
),
)
inputs = Voliso.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_Voliso_outputs():
output_map = dict(output_file=dict(),
)
outputs = Voliso.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
|
yephper/django
|
django/contrib/gis/db/models/functions.py
|
Python
|
bsd-3-clause
| 16,825
| 0.002377
|
from decimal import Decimal
from django.contrib.gis.db.models.fields import GeometryField
from django.contrib.gis.db.models.sql import AreaField
from django.contrib.gis.measure import (
Area as AreaMeasure, Distance as DistanceMeasure,
)
from django.core.exceptions import FieldError
from django.db.models import FloatField, IntegerField, TextField
from django.db.models.expressions import Func, Value
from django.utils import six
NUMERIC_TYPES = six.integer_types + (float, Decimal)
class GeoFunc(Func):
function = None
output_field_class = None
geom_param_pos = 0
def __init__(self, *expressions, **extra):
if 'output_field' not in extra and self.output_field_class:
extra['output_field'] = self.output_field_class()
super(GeoFunc, self).__init__(*expressions, **extra)
@property
def name(self):
return self.__class__.__name__
@property
def srid(self):
expr = self.source_expressions[self.geom_param_pos]
if hasattr(expr, 'srid'):
return expr.srid
try:
return expr.field.srid
except (AttributeError, FieldError):
return None
def as_sql(self, compiler, connection):
if self.function is None:
self.function = connection.ops.spatial_function_name(self.name)
return super(GeoFunc, self).as_sql(compiler, connection)
def resolve_expression(self, *args, **kwargs):
res = super(GeoFunc, self).resolve_expression(*args, **kwargs)
base_srid = res.srid
if not base_srid:
raise TypeError("Geometry functions can only operate on geometric content.")
for pos, expr in enumerate(res.source_expressions[1:], start=1):
if isinstance(expr, GeomValue) and expr.srid != base_srid:
# Automatic SRID conversion so objects are comparable
res.source_expressions[pos] = Transform(expr, base_srid).resolve_expression(*args, **kwargs)
return res
def _handle_param(self, value, param_name='', check_types=None):
if not hasattr(value, 'resolve_expression'):
if check_types and not isinstance(value, check_types):
raise TypeError(
"The %s parameter has the wrong type: should be %s." % (
param_name, str(check_types))
)
return value
class GeomValue(Value):
geography = False
@property
def srid(self):
return self.value.srid
def as_sql(self, compiler, connection):
if self.geography:
self.value = connection.ops.Adapter(self.value, geography=self.geography)
else:
self.value = connection.ops.Adapter(self.value)
return super(GeomValue, self).as_sql(compiler, connection)
def as_mysql(self, compiler, connection):
return 'GeomFromText(%%s, %s)' % self.srid, [connection.ops.Adapter(self.value)]
def as_sqlite(self, compiler, connection):
return 'GeomFromText(%%s, %s)' % self.srid, [connection.ops.Adapter(self.value)]
def as_oracle(self, compiler, connection):
return 'SDO_GEOMETRY(%%s, %s)' % self.srid, [connection.ops.Adapter(self.value)]
class GeoFuncWithGeoParam(GeoFunc):
def __init__(self, expression, geom, *expressions, **extra):
if not hasattr(geom, 'srid') or not geom.srid:
raise ValueError("Please provide a geometry attribute with a defined SRID.")
super(GeoFuncWithGeoParam, self).__init__(expression, GeomValue(geom), *expressions, **extra)
class SQLiteDecimalToFloatMixin(object):
"""
By default, Decimal values are converted to str by the SQLite backend, which
is not acceptable by the GIS functions expecting numeric values.
"""
def as_sqlite(self, compiler, connection):
for expr in self.get_source_expressions():
if hasattr(expr, 'value') and isinstance(expr.value, Decimal):
expr.value = float(expr.value)
return super(SQLiteDecimalToFloatMixin, self).as_sql(compiler, connection)
class OracleToleranceMixin(object):
tolerance = 0.05
def as_oracle(self, compiler, connection):
tol = self.extra.get('tolerance', self.tolerance)
self.template = "%%(function)s(%%(expressions)s, %s)" % tol
return super(OracleToleranceMixin, self).as_sql(compiler, connection)
class Area(OracleToleranceMixin, GeoFunc):
output_field_class = AreaField
arity = 1
def as_sql(self, compiler, connection):
if connection.ops.geography:
self.output_field.area_att = 'sq_m'
else:
# Getting the area units of the geographic field.
source_fields = self.get_source_fields()
if len(source_fields):
source_field = source_fields[0]
if source_field.geodetic(connection):
# TODO: Do we want to support raw number areas for geodetic fields?
raise NotImplementedError('Area on geodetic coordinate systems not supported.')
units_name = source_field.units_name(connection)
if units_name:
self.output_field.area_att = AreaMeasure.unit_attname(units_name)
return super(Area, self).as_sql(compiler, connection)
def as_oracle(self, compiler, connection):
self.output_field = AreaField('sq_m') # Oracle returns area in units of meters.
return super(Area, self).as_oracle(compiler, connection)
class AsGeoJSON(GeoFunc):
output_field_class = TextField
def __init__(self, expression, bbox=False, crs=False, precision=8, **extra):
expressions = [expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', six.integer_types))
options = 0
if crs and bbox:
options = 3
elif bbox:
options = 1
elif crs:
options = 2
if options:
expressions.append(options)
super(AsGeoJSON, self).__init__(*expressions, **extra)
class AsGML(GeoFunc):
geom_param_pos = 1
output_field_class = TextField
def __init__(self, expression, version=2, precision=8, **extra):
expressions = [version, expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', six.integer_types))
super(AsGML, self).__init__(*expressions, **extra)
class AsKML(AsGML):
def as_sqlite(self, compiler, connection):
|
# No version parameter
self.source_expressions.pop(0)
return super(AsKML, self).as_sql(compiler, connection)
class AsSVG(GeoFunc):
output_field_class = TextField
def __init__(self, expression, relative=False, precision=8, **extra):
relative = relative if hasattr(relative, 'resolve_expression') else int(relative)
expressions = [
expression,
relative,
self._handle_param(precision
|
, 'precision', six.integer_types),
]
super(AsSVG, self).__init__(*expressions, **extra)
class BoundingCircle(GeoFunc):
def __init__(self, expression, num_seg=48, **extra):
super(BoundingCircle, self).__init__(*[expression, num_seg], **extra)
class Centroid(OracleToleranceMixin, GeoFunc):
arity = 1
class Difference(OracleToleranceMixin, GeoFuncWithGeoParam):
arity = 2
class DistanceResultMixin(object):
def source_is_geography(self):
return self.get_source_fields()[0].geography and self.srid == 4326
def convert_value(self, value, expression, connection, context):
if value is None:
return None
geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info
if geo_field.geodetic(connection):
dist_att = 'm'
else:
units = geo_field.units_name(connection)
if units:
dist_a
|
bendudson/freegs
|
freegs/_aeqdsk.py
|
Python
|
lgpl-3.0
| 12,250
| 0.001551
|
"""
fields - Lists the variables stored in the file, a default value, and a description
"""
from . impo
|
rt _fileutils as fu
import warnings
# List of file data variables, default values, and documentation
# This is used in both reader and writer
fields = [
(
"tsaisq",
0.0,
"total chi2 from magnetic probes, flux loops, Rogowski and external coils",
),
("rcencm", 100.0, "major radius in cm for vacuum field BCENTR"),
("bcentr", 1.0, "vacuum
|
toroidal magnetic field in Tesla at RCENCM"),
("pasmat", 1e6, "measured plasma toroidal current in Ampere"),
("cpasma", 1e6, "fitted plasma toroidal current in Ampere-turn"),
("rout", 100.0, "major radius of geometric center in cm"),
("zout", 0.0, "Z of geometric center in cm"),
("aout", 50.0, "plasma minor radius in cm"),
("eout", 1.0, "Plasma boundary elongation"),
("doutu", 1.0, "upper triangularity"),
("doutl", 1.0, "lower triangularity"),
("vout", 1000.0, "plasma volume in cm3"),
("rcurrt", 100.0, "major radius in cm of current centroid"),
("zcurrt", 0.0, "Z in cm at current centroid"),
("qsta", 5.0, "equivalent safety factor q*"),
("betat", 1.0, "toroidal beta in %"),
(
"betap",
1.0,
"poloidal beta with normalization average poloidal magnetic BPOLAV defined through Ampere's law",
),
(
"ali",
0.0,
"li with normalization average poloidal magnetic defined through Ampere's law",
),
("oleft", 10.0, "plasma inner gap in cm"),
("oright", 10.0, "plasma outer gap in cm"),
("otop", 10.0, "plasma top gap in cm"),
("obott", 10.0, "plasma bottom gap in cm"),
("qpsib", 5.0, "q at 95% of poloidal flux"),
("vertn", 1.0, "vacuum field (index? -- seems to be float) at current centroid"),
# fmt_1040 = r '^\s*' + 4 * r '([\s\-]\d+\.\d+[Ee][\+\-]\d\d)'
# read(neqdsk, 1040)(rco2v(k, jj), k = 1, mco2v)
(None, None, None), # New line
(
"rco2v",
lambda data: [0.0] * data["mco2v"],
"1D array : path length in cm of vertical CO2 density chord",
),
# read(neqdsk, 1040)(dco2v(jj, k), k = 1, mco2v)
(None, None, None), # New line
(
"dco2v",
lambda data: [0.0] * data["mco2v"],
"line average electron density in cm3 from vertical CO2 chord",
),
# read(neqdsk, 1040)(rco2r(k, jj), k = 1, mco2r)
(None, None, None), # New line
(
"rco2r",
lambda data: [0.0] * data["mco2r"],
"path length in cm of radial CO2 density chord",
),
# read(neqdsk, 1040)(dco2r(jj, k), k = 1, mco2r)
(None, None, None), # New line
(
"dco2r",
lambda data: [0.0] * data["mco2r"],
"line average electron density in cm3 from radial CO2 chord",
),
(None, None, None), # New line
("shearb", 0.0, ""),
(
"bpolav",
1.0,
"average poloidal magnetic field in Tesla defined through Ampere's law",
),
("s1", 0.0, "Shafranov boundary line integrals"),
("s2", 0.0, "Shafranov boundary line integrals"),
("s3", 0.0, "Shafranov boundary line integrals"),
("qout", 0.0, "q at plasma boundary"),
("olefs", 0.0, ""),
("orighs", 0.0, "outer gap of external second separatrix in cm"),
("otops", 0.0, "top gap of external second separatrix in cm"),
("sibdry", 1.0, ""),
("areao", 100.0, "cross sectional area in cm2"),
("wplasm", 0.0, ""),
("terror", 0.0, "equilibrium convergence error"),
("elongm", 0.0, "elongation at magnetic axis"),
("qqmagx", 0.0, "axial safety factor q(0)"),
("cdflux", 0.0, "computed diamagnetic flux in Volt-sec"),
("alpha", 0.0, "Shafranov boundary line integral parameter"),
("rttt", 0.0, "Shafranov boundary line integral parameter"),
("psiref", 1.0, "reference poloidal flux in VS/rad"),
(
"xndnt",
0.0,
"vertical stability parameter, vacuum field index normalized to critical index value",
),
("rseps1", 1.0, "major radius of x point in cm"),
("zseps1", -1.0, ""),
("rseps2", 1.0, "major radius of x point in cm"),
("zseps2", 1.0, ""),
("sepexp", 0.0, "separatrix radial expansion in cm"),
("obots", 0.0, "bottom gap of external second separatrix in cm"),
("btaxp", 1.0, "toroidal magnetic field at magnetic axis in Tesla"),
("btaxv", 1.0, "vacuum toroidal magnetic field at magnetic axis in Tesla"),
("aaq1", 100.0, "minor radius of q=1 surface in cm, 100 if not found"),
("aaq2", 100.0, "minor radius of q=2 surface in cm, 100 if not found"),
("aaq3", 100.0, "minor radius of q=3 surface in cm, 100 if not found"),
(
"seplim",
0.0,
"> 0 for minimum gap in cm in divertor configurations, < 0 absolute value for minimum distance to external separatrix in limiter configurations",
),
("rmagx", 100.0, "major radius in cm at magnetic axis"),
("zmagx", 0.0, ""),
("simagx", 0.0, "Poloidal flux at the magnetic axis"),
("taumhd", 0.0, "energy confinement time in ms"),
("betapd", 0.0, "diamagnetic poloidal b"),
("betatd", 0.0, "diamagnetic toroidal b in %"),
("wplasmd", 0.0, "diamagnetic plasma stored energy in Joule"),
("diamag", 0.0, "measured diamagnetic flux in Volt-sec"),
("vloopt", 0.0, "measured loop voltage in volt"),
("taudia", 0.0, "diamagnetic energy confinement time in ms"),
(
"qmerci",
0.0,
"Mercier stability criterion on axial q(0), q(0) > QMERCI for stability",
),
("tavem", 0.0, "average time in ms for magnetic and MSE data"),
# ishot > 91000
# The next section is dependent on the EFIT version
# New version of EFIT on 05/24/97 writes aeqdsk that includes
# data values for parameters nsilop,magpri,nfcoil and nesum.
(None, True, None), # New line
(
"nsilop",
lambda data: len(data.get("csilop", [])),
"Number of flux loop signals, len(csilop)",
),
(
"magpri",
lambda data: len(data.get("cmpr2", [])),
"Number of flux loop signals, len(cmpr2) (added to nsilop)",
),
(
"nfcoil",
lambda data: len(data.get("ccbrsp", [])),
"Number of calculated external coil currents, len(ccbrsp)",
),
(
"nesum",
lambda data: len(data.get("eccurt", [])),
"Number of measured E-coil currents",
),
(None, None, None), # New line
(
"csilop",
lambda data: [0.0] * data.get("nsilop", 0),
"computed flux loop signals in Weber",
),
("cmpr2", lambda data: [0.0] * data.get("magpri", 0), ""),
(
"ccbrsp",
lambda data: [0.0] * data.get("nfcoil", 0),
"computed external coil currents in Ampere",
),
(
"eccurt",
lambda data: [0.0] * data.get("nesum", 0),
"measured E-coil current in Ampere",
),
("pbinj", 0.0, "neutral beam injection power in Watts"),
("rvsin", 0.0, "major radius of vessel inner hit spot in cm"),
("zvsin", 0.0, "Z of vessel inner hit spot in cm"),
("rvsout", 0.0, "major radius of vessel outer hit spot in cm"),
("zvsout", 0.0, "Z of vessel outer hit spot in cm"),
("vsurfa", 0.0, "plasma surface loop voltage in volt, E EQDSK only"),
("wpdot", 0.0, "time derivative of plasma stored energy in Watt, E EQDSK only"),
("wbdot", 0.0, "time derivative of poloidal magnetic energy in Watt, E EQDSK only"),
("slantu", 0.0, ""),
("slantl", 0.0, ""),
("zuperts", 0.0, ""),
("chipre", 0.0, "total chi2 pressure"),
("cjor95", 0.0, ""),
("pp95", 0.0, "normalized P'(y) at 95% normalized poloidal flux"),
("ssep", 0.0, ""),
("yyy2", 0.0, "Shafranov Y2 current moment"),
("xnnc", 0.0, ""),
("cprof", 0.0, "current profile parametrization parameter"),
("oring", 0.0, "not used"),
(
"cjor0",
0.0,
"normalized flux surface average current density at 99% of normalized poloidal flux",
),
("fexpan", 0.0, "flux expansion at x point"),
("qqmin", 0.0, "minimum safety factor qmin"),
("chigamt", 0.0, "total chi2 MSE"),
("ssi01", 0.0, "magnetic sh
|
nicko96/Chrome-Infra
|
appengine/test_results/appengine_module/test_results/handlers/test/redirector_test.py
|
Python
|
bsd-3-clause
| 1,228
| 0.008143
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from appengine_module.test_results.handlers import redirector
class RedirectorTest(unittest.TestCase):
def test_url_from_commit_positions(self):
def mock_load_url(url):
if url == 'https://cr-rev.appspot.com/_ah/api/crrev/v1/redirect/1':
git_sha = 'aaaaaaa'
else:
git_sha = 'bbbbbbb'
return '''{
"git_sha": "%s",
"repo": "chromium/src",
"redirect_url": "https://chromium.googlesource.com/chromium/src/+/%s",
"project": "chromium",
"redirect_type": "GIT_FROM_NUMBER",
"repo_url": "https://chromium.googlesource.com/chromium/src/",
"kind": "crrev#redirectItem",
"etag": "\\\"vO
|
astG91kaV9uxC3-P-4NolRM6s/U8-bHfeejPZOn0ELRGhed-nrIX4\\\""
}''' % (git_sha, git_sha)
old_load_url = redirector.load_url
try:
redirector.load_url = mock_load_url
expected = ('https://chromium.googlesource.com/chromium/src/+log/'
'aaaaaaa^..bbbbbbb?pretty=fuller')
self.assertEqual(redirector.url_from_commit_positions(1, 2), expected)
fi
|
nally:
redirector.load_url = old_load_url
|
roscopecoltran/scraper
|
.staging/meta-engines/xlinkBook/outline.py
|
Python
|
mit
| 2,141
| 0.012611
|
#!/usr/bin/env python
# -*- coding: utf-8-*-
import getopt
import time
import re
import os,sys
reload(sys)
sys.setdefaultencoding('utf-8')
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfdocument import PDFDocument
from bs4 import BeautifulSoup
import requests
import webbrowser
import subprocess
class Outline():
def getToc(self, pdfPath):
infile = open(pdfPath, 'rb')
parser = PDFParser(infile)
document = PDFDocument(parser)
toc = list()
for (level,title,dest,a,structelem) in document.get_outlines():
toc.append((level, title))
return toc
def
|
toOutline(self, source):
if source.endswith('.pdf') and source.startswith('http') == False:
items = ''
for item in self.getToc(source):
items += item[1] + '\n'
return items
elif source.startswith('http'):
|
#url = 'https://gsnedders.html5.org/outliner/process.py?url=' + source
#webbrowser.open(url)
r = requests.get('https://gsnedders.html5.org/outliner/process.py?url=' + source)
return r.text
#soup = BeautifulSoup(r.text)
#for li in soup.find_all('li'):
# print li.text.strip()
'''
r = requests.get(source)
#script = "var data = new Array();"
#for line in r.text:
# script += "data.push('" + line + "')"
script = ''
script += "var HTML5Outline = require('h5o');"
script += "var outline = HTML5Outline('<html></html>');"
output = subprocess.check_output('node -p "' + script + '"' , shell=True)
return output
'''
return ''
def main(argv):
source = ''
try:
opts, args = getopt.getopt(sys.argv[1:],'i:', ['input'])
except getopt.GetoptError, err:
print str(err)
sys.exit(2)
for o, a in opts:
if o in ('-i', '--input'):
source = a
outline = Outline()
print outline.toOutline(source)
if __name__ == '__main__':
main(sys.argv)
|
o-kei/design-computing-aij
|
ch5/curve.py
|
Python
|
mit
| 983
| 0.001017
|
import numpy as np
import matplotlib.pyplot as plt
def bernstein(t, n, i):
cn = 1.0
ci = 1.0
cni = 1.0
for k in range(2, n, 1):
cn = cn * k
for k in range(1, i, 1):
if i == 1:
break
ci = ci * k
for k in range(1, n - i + 1, 1):
if n == i:
break
cni = cni * k
j = t**(i - 1) * (1 - t)**(n - i) * cn / (ci * cni)
return j
d
|
ef bezierplot(t, cp):
n = len(cp)
r = np.zeros([len(t), 2])
for k in range(len(t)):
sum1 =
|
0.0
sum2 = 0.0
for i in range(1, n + 1, 1):
bt = bernstein(t[k], n, i)
sum1 += cp[i - 1, 0] * bt
sum2 += cp[i - 1, 1] * bt
r[k, :] = [sum1, sum2]
return np.array(r)
cp = np.array([[0, -2], [1, -3], [2, -2], [3, 2], [4, 2], [5, 0]])
t = np.arange(0, 1 + 0.01, 0.01)
p = bezierplot(t, cp)
plt.figure()
plt.plot(p[:, 0], p[:, 1])
plt.plot(cp[:, 0], cp[:, 1], ls=':', marker='o')
plt.show()
|
pygraz/old-flask-website
|
pygraz_website/tests/test_filters.py
|
Python
|
bsd-3-clause
| 1,577
| 0.005707
|
from pygraz_website import filters
class TestFilters(object):
def test_url_detection(self):
"""
Test that urls are found correctly.
"""
no_urls_string = '''This is a test without any urls in it.'''
urls_string = '''This string has one link in it: http://pygraz.org . But it also has some text after it :D'''
assert filters.urlize(no_urls_string) == no_urls_string
assert filters.urlize(urls_string) == '''This string has one link in it: <a href="htt
|
p://pygraz.org">http://pygraz.org</a> . But it also has some text after it :D'''
assert filters.urlize(urls_string, True).matches == {'urls': set(['http://pygraz.org'])}
assert filters.urlize(None) == u''
assert filters.urlize("'http://test.com'") == """'<a href="http://test.com">http://test
|
.com</a>'"""
def test_namehandles(self):
"""
Tests the discory of linkable names.
"""
string_with_handles = 'Hallo @pygraz.'
assert filters.urlize(string_with_handles) == 'Hallo <a href="http://twitter.com/pygraz">@pygraz</a>.'
assert filters.urlize(string_with_handles, True).matches == {'handles': set(['pygraz'])}
def test_hashtags(self):
string_with_tags = 'This is a #test for #hashtags'
assert filters.urlize(string_with_tags) == 'This is a <a href="http://twitter.com/search?q=%23test">#test</a> for <a href="http://twitter.com/search?q=%23hashtags">#hashtags</a>'
assert filters.urlize(string_with_tags, True).matches == {'hashtags': set(['test', 'hashtags'])}
|
jhasse/sleeptimer
|
main.py
|
Python
|
gpl-3.0
| 8,446
| 0.004026
|
#!/usr/bin/env python3
import configparser, subprocess, platform
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, GLib, Gdk
class SleepTimer(Gtk.Builder):
def __init__(self):
super().__init__()
self.add_from_file("main.glade")
self.connect_signals(self)
self.spin_buttons = (
self.get_object("spinbutton_h"),
self.get_object("spinbutton_min"),
self.get_object("spinbutton_s"),
)
self.css_provider = Gtk.CssProvider()
self.get_object("togglebutton1").get_style_context().add_provider(
self.css_provider, Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION)
self.start_seconds_left = 0
self.config = configparser.ConfigParser()
self.config.read('settings.ini')
if 'default' in self.config.sections():
try:
self.spin_buttons[2].set_value(int(self.config['default']['seconds']))
self.get_object(self.config['default']['mode']).set_active(True)
if self.config['default']['mute'] == 'True':
self.get_object('checkbutton1').set_active(True)
except ValueError as err:
print(err)
except KeyError as err:
print('KeyError: {}'.format(err))
else:
self.config['default'] = {}
self.spin_buttons[0].set_value(1)
self.window = self.get_object("window1")
self.window.show_all()
def on_timer(self):
"""
Deincreases by one second
"""
if not self.get_object("togglebutton1").get_active():
return False
seconds = self.spin_buttons[2].get_value_as_int()
if seconds == 0:
seconds = 60
minutes = self.spin_buttons[1].get_value_as_int()
if minutes == 0:
minutes = 60
hours = self.spin_buttons[0].get_value_as_int()
if hours == 0:
try:
if self.get_object("checkbutton1").get_active():
if platform.system() == "Windows":
subprocess.check_output("nircmd.exe mutesysvolume 1")
else:
subprocess.check_output("pactl set-sink-mute 0 1", shell=True)
verb = "hibernate"
if platform.system() == "Windows":
if self.get_object("standby").get_active():
verb = "standby"
elif self.get_object("shutdown").get_active():
verb = "exitwin poweroff"
subprocess.check_output("nircmd.exe " + verb)
else:
if self.get_object("standby").get_active():
verb = "suspend"
elif self.get_object("shutdown").get_active():
verb = "poweroff"
subprocess.check_output("systemctl " + verb + " -i", shell=True,
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as err:
dialog = Gtk.MessageDialog(
parent=self.window, message_type=Gtk.MessageType.ERROR,
buttons=Gtk.ButtonsType.CLOSE,
text="`{}` failed with exit code {}".format(err.cmd, err.returncode))
dialog.format_secondary_text(err.stdout.decode('utf-8', 'ignore').strip())
dialog.run()
dialog
|
.destroy()
Gtk.main_quit()
return False
self.spin_buttons[0].set_value(hours - 1)
|
self.spin_buttons[1].set_value(minutes - 1)
self.spin_buttons[2].set_value(seconds - 1)
self.css_provider.load_from_data(".install-progress {{ background-size: {}%; }}".format(
int(self.get_seconds_left() * 100 / self.start_seconds_left)
).encode())
return True
def on_toggled(self, button):
"""
Start button toggled
"""
self.spin_buttons[2].set_sensitive(not button.get_active()) # seconds
context = button.get_style_context()
if button.get_active():
context.add_class("install-progress")
context.remove_class("suggested-action")
self.css_provider.load_from_data(b".install-progress { background-size: 100%; }")
self.start_seconds_left = self.get_seconds_left()
with open('settings.ini', 'w') as file:
self.config['default']['seconds'] = str(int(self.start_seconds_left))
self.config['default']['mode'] = 'standby'
if self.get_object('hibernate').get_active():
self.config['default']['mode'] = 'hibernate'
elif self.get_object('shutdown').get_active():
self.config['default']['mode'] = 'shutdown'
self.config['default']['mute'] = str(self.get_object("checkbutton1").get_active())
self.config.write(file)
self.previous_label = button.get_label()
button.set_label("_Stop")
else:
context.remove_class("install-progress")
context.add_class("suggested-action")
button.set_label(self.previous_label)
if button.get_active():
GLib.timeout_add(1000, self.on_timer)
def on_time_changed(self):
self.get_object("togglebutton1").set_sensitive(
self.spin_buttons[0].get_value() != 0 or
self.spin_buttons[1].get_value() != 0 or
self.spin_buttons[2].get_value() != 0
)
# If the user increases the time while it's running this could result in a negative
# percentage for the progress bar. Adjust the start time so that it never happens:
self.start_seconds_left = max(self.start_seconds_left, self.get_seconds_left())
def on_h_changed(self, spin_button):
self.on_time_changed()
def on_min_changed(self, spin_button):
"""
When minutes drop below 0 deincrease hours and when they get above 59 increase hours
"""
while spin_button.get_value() < 0:
if self.spin_buttons[0].get_value() == 0:
spin_button.set_value(0)
else:
spin_button.set_value(spin_button.get_value() + 60)
self.spin_buttons[0].set_value(self.spin_buttons[0].get_value() - 1)
while spin_button.get_value() > 59:
spin_button.set_value(spin_button.get_value() - 60)
self.spin_buttons[0].set_value(self.spin_buttons[0].get_value() + 1)
self.on_time_changed()
def on_s_changed(self, spin_button):
"""
When seconds drop below 0 deincrease minutes and when they get above 59 increase minutes
"""
while spin_button.get_value() < 0:
if self.spin_buttons[0].get_value() == 0 and self.spin_buttons[1].get_value() == 0:
spin_button.set_value(0)
else:
spin_button.set_value(spin_button.get_value() + 60)
self.spin_buttons[1].set_value(self.spin_buttons[1].get_value() - 1)
while spin_button.get_value() > 59:
spin_button.set_value(spin_button.get_value() - 60)
self.spin_buttons[1].set_value(self.spin_buttons[1].get_value() + 1)
self.on_time_changed()
def on_delete_window(self, *args):
Gtk.main_quit(*args)
def get_seconds_left(self):
return self.spin_buttons[0].get_value() * 3600 + self.spin_buttons[1].get_value() * 60 + \
self.spin_buttons[2].get_value()
style_provider = Gtk.CssProvider()
style_provider.load_from_data(b""".install-progress {
background-image: linear-gradient(to top, @theme_selected_bg_color 2px, alpha(@theme_selected_bg_color, 0) 2px);
background-repeat: no-repeat;
backg
|
imincik/gis-lab
|
utils/send-message.py
|
Python
|
gpl-3.0
| 1,440
| 0.008333
|
#!/usr/bin/env python
"""
Send message to '#gis.lab' IRC chat room.
Requires to run script 'utils/join-gislab-network.py' first to get connection
with server.
USAGE: send-message.py <message>
"""
import os, sys
import re
import socket
try:
message = sys.argv[1]
except IndexError:
print __doc__
sys.exit(0)
DIR=os.path.dirname(os.path.abspath(__file__))
def get_config(variable):
c = open(os.path.join(os.path.dirname(DIR), "config.cfg"), "ro")
for line in c:
if re.match("^" + variable, line):
value = line.split("=")[1].replace("'", "").replace('"', '')
c.close()
break
c = open(os.path.join(os.path.dirname(DIR), "config-user.cfg"), "ro")
for line in c:
if re.match("^" + variable, line):
value = line.split("=")[1].replace("'", "").replace('"', '')
c.close()
break
return value.strip()
GISLAB_NE
|
TWORK = get_config("GISLAB_NETWORK")
HOST="{0}.5".format(GISLAB_NETWORK)
PORT=6667
NICK=IDENT=os.environ['USER']
REALNAME="script"
CHANNEL="gis.lab"
s=socket.socket( socket.AF_INET, soc
|
ket.SOCK_STREAM )
s.connect((HOST, PORT))
print s.recv(4096)
s.send("NICK %s\r\n" % NICK)
s.send("USER %s %s bla :%s\r\n" % (IDENT, HOST, REALNAME))
s.send("JOIN #%s\r\n" % CHANNEL)
s.send("PRIVMSG #gislab :%s\r\n" % message)
s.send("QUIT: End of message.\r\n")
s.recv(4096)
s.close()
print "Done."
# vim: ts=8 sts=4 sw=4 et:
|
wxwilcke/MINOS
|
directives/pakbonLD_B3.py
|
Python
|
gpl-3.0
| 7,661
| 0.00496
|
#!/usr/bin/python3
import logging
from operator import itemgetter
from timeit import default_timer as timer
import rdflib
from .abstract_instruction_set import AbstractInstructionSet
from readers import rdf
from writers import rule_set, pickler
from samplers import by_definition as sampler
from algorithms.semantic_rule_learning import generate_semantic_association_rules,\
generate_semantic_item_sets,\
generate_common_behaviour_sets,\
support_of,\
confidence_of
class PakbonLD(AbstractInstructionSet):
def __init__(self, time=""):
self.time = time
self.logger = logging.getLogger(__name__)
def print_header(self):
header = "PAKBON: Context ('Sporen') with 12 attributes"
print(header)
print('-' * len(header))
def load_dataset(self, abox, tbox):
"""
# pakbonLD SPARQL endpoint
endpoint = "http://pakbon-ld.spider.d2s.labs.vu.nl/sparql/"
# query
query_string = "" "
prefix pbont: <http://pakbon-ld.spider.d2s.labs.vu.nl/ont/>
prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
SELECT DISTINCT ?s ?p ?o
WHERE {
?s a pbont:SIKB0102S_Vondstcontext;
?p ?o.
FILTER (?p != rdf:type)
} LIMIT 1000"" "
# perform query and return a KnowledgeGraph instance
kg_i = rdf.query(query_string, endpoint)
"""
# read graphs
kg_i = rdf.read(local_path=abox)
kg_s = rdf.read(local_path=tbox)
# sample by pattern
pattern = (None,
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_grondspoortype"),
None)
# define context
# spoor with vulling
context = [rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_grondspoortype"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P53i_is_former_or_current_location_of"),
(rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P89_falls_within"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_contexttype")),
(rdflib.URIRef("http://purl.org/crmeh#EHP3i"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_kleur")),
(rdflib.URIRef("http://purl.org/crmeh#EHP3i"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_textuur")),
(rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P53i_is_former_or_current_location_of"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_structuurtype")),
(rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_diepte"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P40_observed_dimension"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P90_has_value")),
(rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_diepte"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P40_observed_dimension"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P91_has_unit")),
(rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P140i_was_attributed_by"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P141_assigned"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_beginperiode")),
(rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P140i_was_attributed_by"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P141_assigned"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_eindperiode")),
(rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P53i_is_former_or_current_location_of"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P140i_was_attributed_by"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P141_assigned"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_beginperiode")),
(rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P53i_is_former_or_current_location_of"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P140i_was_attributed_by"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P141_assigned"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_eindperiode"))]
kg_i_sampled = kg_i.sample(sampler, patterns=[pattern], context=context)
return (kg_i_sampled, kg_s)
def run_program(self, dataset, hyperparameters):
self.logger.info("Starting run\nParameters:\n{}".format(
"\n".join(["\t{}: {}".format(k,v) for k,v in hyperparameters.items()])))
kg_i, kg_s = dataset
# fit model
t0 = timer()
# generate semantic item sets from sampled graph
si_sets = generate_semantic_item_sets(kg_i)
# generate common behaviour sets
cbs_sets = generate_common_behaviour_sets(si_sets,
hyperparameters["similarity_threshold"],
hyperparameters["max_cbs_size"])
# generate semantic association rules
rules = generate_semantic_association_rules(kg_i,
kg_s,
cbs_sets,
hyperparameters["minimal_local_support"])
# calculate support and confidence, skip those not meeting minimum requirements
final_rule_set = []
for rule in rules:
support = support_of(kg_i, rule)
confidence = confidence_of(kg_i, rule)
if support >= hyperparameters["minimal_support"] and\
confidence >= hyperparameters["minimal_confidence"]:
final_rule_set.append((rule, support, confidence))
# sorting rules on both support and confidence
final_rule_set.sort(key=itemgetter(2, 1), reverse=True)
# time took
t1 = timer()
dt = t1 - t0
print(" Program completed in {:.3f} ms".format(dt))
print(" Found {} rules".format(len(final_rule_set)))
return final_rule_set
def write_to_file(self, path="./of/latest", output=[]):
overwrite = False
print(" Writing output to {}...".format(path))
rule_set.pretty_write(output, path, overwrite)
pickler.write(output, path+".pickle", overwrite)
def run(self, abox, tbox, output_path):
self.print_header()
print(" {}\n".format(self.time))
hyperparameters = {}
hyperparameters["similarity_threshold"] = .8
|
hyperparameters["max_cbs_
|
size"] = 4
hyperparameters["minimal_local_support"] = 0.0
hyperparameters["minimal_support"] = 0.0
hyperparameters["minimal_confidence"] = 0.0
print(" Importing Data Sets...")
dataset = self.load_dataset(abox, tbox)
print(" Initiated Pattern Learning...")
output = self.run_program(dataset, hyperparameters)
if len(output) > 0:
self.write_to_file(output_path, output)
|
TUDelftNAS/SDN-NaaSPlatform
|
NaaSPlatform/Load_Balancing_App.py
|
Python
|
gpl-3.0
| 62,701
| 0.003397
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# #Copyright (C) 2015, Delft University of Technology, Faculty of Electrical Engineering, Mathematics and Computer Science, Network Architectures and Services and TNO, ICT - Service Enabling and Management, Mani Prashanth Varma Manthena, Niels van Adrichem, Casper van den Broek and F. A. Kuipers
#
# This file is part of NaaSPlatform.
#
# NaaSPlatform is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# NaaSPlatform is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License fo
|
r more details.
#
# You should have received a copy of the GNU General Public License
# along with NaaSPlatform. If not, see <http://www.gnu.org/licenses/>.
# Network-as-a-Service (NaaS) platform's load balancing application
# Importing Python modules
import sys # Python module for system (i.e. interpreter) specific parameters and functions
import select # Python module for I/O completion waiting
impo
|
rt time # Python module to perform various time related functions
# Importing NaaS platform's main application for performing NaaS related operations and functions
from Main_App import *
# Importing NaaS platform's sFlow based edge flow monitoring application for monitoring and detecting large traffic flows at the network edge
from sFlow_Edge_Flow_Monitoring_App import *
# Importing NaaS platform's sFlow based Core interface monitoring application for monitoring and detecting high bandwidth interface utilizations and failures in the network core of Testbed network 1 (i.e. network (i.e. edge + core) with sFlow enabled open (i.e. OF/OVS) switches)
from sFlow_Core_Interface_Monitoring_App import *
# Importing NaaS platform's SNMP based Core interface monitoring application for monitoring and detecting high bandwidth interface utilizations and failures in the network core of Testbed network 2 (i.e. network core with legacy (i.e. vendor-specific) switches)
from SNMP_Core_Interface_Monitoring_App import *
# Importing NaaS platform's optimal path computation application for optimal path computations and selections
from Optimal_Path_Computation_App import *
class load_balance_testbed_network_1():
# Starting a load balancing application for a network (i.e. edge + core) of open (i.e. OF/OVS) switches
def __init__(self):
try:
odl_base = naas_arch().odl_base_url()
url_odl = odl_base['URL']
ip_odl = odl_base['Host IP']
odl_header = naas_arch().odl_api_header()
cred = naas_arch().odl_user_cred()
name = cred['User Name']
password = cred['Password']
edge_sflow_base = naas_arch().edge_sflow_base_url()
url_edge_sflow = edge_sflow_base['URL']
ip_edge_sflow = edge_sflow_base['Host IP']
core_sflow_base = naas_arch().core_sflow_base_url()
url_core_sflow = core_sflow_base['URL']
ip_core_sflow = core_sflow_base['Host IP']
sflow_header = naas_arch().sflow_api_header()
odl_switches = naas_arch().odl_switches()
odl_switches_ip = naas_arch().odl_switches_ip()
edge_sflow_agents = naas_arch().edge_sflow_agents()
core_sflow_agents = naas_arch().core_sflow_agents()
testbed_1_topo = naas_arch().testbed_1_topology()
testbed_1_lsps = naas_arch().testbed_1_path_bindings()
sflow_if_map = naas_arch().sflow_interface_mapping()
flow = odl_api_json_formats()
stat = odl_api_flow_stat()
odl = odl_api_calls()
sflow = sflow_api_calls()
print '\n\n\n'
edge_flow = network_edge_flow_monitoring()
print '\n\n\n'
core_mon = sflow_network_core_interface_monitoring()
while True:
print '\n\n\n\nEnter the above configured sFlow based edge flow monitoring application name...\n\n'
flow_name = raw_input('sFlow based Edge Flow Monitoring Application Name (Required): ')
url_sflow = url_edge_sflow
flow_def = sflow.sflow_flow_def(url_sflow, flow_name)
if flow_def == {}:
print '\n\nThere is no such sFlow based edge flow monitoring application that is currently running in the NaaS platform...\n\n'
print '\n\nRe-configure and Re-enter the sFlow based edge flow monitoring application name...\n\n'
else:
break
flow_keys = flow_def['keys']
keys = re.sub(r'\s', '', flow_keys).split(',')
source_key = keys[0]
destination_key = keys[1]
print '\n\n\n\nEnter the priority value for this load balancing application and its corresponding actions...\n\n'
priority_load_balance = raw_input('Load Balancing Priority Value (Default Value = 100): ')
if priority_load_balance == '':
priority_load_balance = '100'
print '\n\n\n\nEnter the load balancing query timeout/interval (i.e. in seconds)...\n\n'
timeout = raw_input('Load Balancing Query Timeout/Interval (Default Value: 20): ')
if timeout == '':
timeout = 10
timeout = int(timeout)
print '\n\nStarting the load balancing application...\n\n'
while True:
print '\n\nQuerying for network core interface monitoring triggered events...\n\n'
print '\n\nChecking for Interface/Link Failures in the Network Core..\n\n'
delete_links = {}
update_link_weights = {}
int_failures = {}
high_utis = {}
int_failures = core_mon.int_fail_events()
if int_failures != {}:
for key in int_failures:
agent_node = int_failures[key]['Agent']
agent_interface_id = int_failures[key]['Interface ID']
agent_interface = sflow_if_map[agent_interface_id]
if delete_links != {}:
m = 0
for key in delete_links:
if key == agent_node:
m += 1
if m != 0:
old_link_list = delete_links[agent_node]
old_link_list.append(agent_interface)
delete_links[agent_node] = old_link_list
else:
new_link_list = []
new_link_list.append(agent_interface)
delete_links[agent_node] = new_link_list
else:
new_link_list = []
new_link_list.append(agent_interface)
delete_links[agent_node] = new_link_list
paths = optimal_testbed_network_1().optimal_path(delete_links, update_link_weights)
if paths != {}:
all_paths_right = paths['All Paths Right']
all_paths_left = paths['All Paths Left']
shortest_path_right = paths['Shortest Path Right']
shortest_path_left = paths['Shortest Path Left']
no_path_labels = []
sh
|
simonolander/euler
|
euler-106.py
|
Python
|
mit
| 450
| 0.002222
|
import it
|
ertools
import numpy
import math
def ncr(n, r):
f = math.factorial
return f(n) // f(r) // f(n-r)
def subset_pairs(s):
|
for a_size in range(1, len(s)):
for a in itertools.combinations(s, a_size):
remaining = s.difference(a)
for b_size in range(1, len(remaining) + 1):
for b in itertools.combinations(remaining, b_size):
yield a, b
[11, 18, 19, 20, 22, 25]
|
pliniopereira/ccd10
|
src/business/configuration/configProject.py
|
Python
|
gpl-3.0
| 2,363
| 0.003386
|
from PyQt5 import QtCore
from src.business.configuration.constants import project as p
from src.ui.commons.verification import cb
class ConfigProject:
def __init__(self):
self._settings = QtCore.QSettings(p.CONFIG_FILE, QtCore.QSettings.IniFormat)
def get_value(self, menu, value):
return self._settings.value(menu + '/' + value)
def set_site_settings(self, name, site_id, imager_id):
self._settings.beginGroup(p.SITE_TITLE)
self._settings.setValue(p.NAME, name)
self._settings.setValue(p.SITE_ID, site_id)
self._settings.setValue(p.IMAGER_ID, imager_id)
self._settings.endGroup()
def set_geographic_settings(self, lat, long, elev, press, temp):
self._settings.beginGroup(p.GEOGRAPHIC_TITLE)
self._settings.setValue(p.LATITUDE, lat)
self._settings.setValue(p.LONGITUDE, long)
self._settings.setValue(p.ELEVATION, elev)
self._settings.setValue(p.PRESSURE, press)
self._settings.setValue(p.TEMPERATURE, temp)
self._settings.endGroup()
def set_moonsun_settings(self, solarelev, ignore
|
Lunar, lunarph, lunarpos):
self._settings.beginGroup(p.SUN_MOON_TITLE)
self._settings.setValue(p.MAX_SOLAR_ELEVATION, solarelev)
self._settings.setVal
|
ue(p.IGNORE_LUNAR_POSITION, ignoreLunar)
self._settings.setValue(p.MAX_LUNAR_PHASE, lunarph)
self._settings.setValue(p.MAX_LUNAR_ELEVATION, lunarpos)
self._settings.endGroup()
def save_settings(self):
self._settings.sync()
def get_site_settings(self):
return self.get_value(p.SITE_TITLE, p.NAME),\
self.get_value(p.SITE_TITLE, p.SITE_ID),\
self.get_value(p.SITE_TITLE, p.IMAGER_ID)
def get_geographic_settings(self):
m = p.GEOGRAPHIC_TITLE
return self.get_value(m, p.LATITUDE),\
self.get_value(m, p.LONGITUDE),\
self.get_value(m, p.ELEVATION),\
self.get_value(m, p.PRESSURE),\
self.get_value(m, p.TEMPERATURE)
def get_moonsun_settings(self):
m = p.SUN_MOON_TITLE
return self.get_value(m, p.MAX_SOLAR_ELEVATION),\
cb(self.get_value(m, p.IGNORE_LUNAR_POSITION)),\
self.get_value(m, p.MAX_LUNAR_PHASE),\
self.get_value(m, p.MAX_LUNAR_ELEVATION)
|
ArcticWarriors/scouting-app
|
ScoutingWebsite/Scouting2017/model/get_stastics.py
|
Python
|
mit
| 4,360
| 0.004358
|
from Scouting2017.model.models2017 import ScoreResult
from django.db.models.aggregates import Avg
from django.db.models.expressions import Case, When
import json
import math
import collections
def get_statistics(regional_code, teams_at_competition, team=0):
'''
The get_statistics function() returns two lists of metrics.
The first thing it returns, stats, is a dictionary containing the values of overall averages for all score results, along with standard deviations for those same score results along the mean.
The function also returns a list called skills, which contains data for each team including their z-scores, calculated fuel scores for autonomous, teleop, and overall, and their accuracy in climbing the rope.
'''
skills = []
competition_srs = ScoreResult.objects.filter(competition__code=regional_code)
competition_averages = competition_srs.aggregate(Avg('auto_gears'),
Avg('auto_fuel_high_score'),
Avg('tele_gears'),
Avg('tele_fuel_high_score'),
rope__avg=Avg(Case(When(rope=True, then=1), When(rope=False, then=0))))
rope_avg = competition_averages['rope__avg']
gear_avg = competition_averages['tele_gears__avg']
if competition_averages['auto_fuel_high_score__avg'] and competition_averages['tele_fuel_high_score__avg']:
fuel_avg = competition_averages['auto_fuel_high_score__avg'] + (competition_averages['tele_fuel_high_score__avg'] / 3)
else:
fuel_avg = 0
# This part of the function (above) obtains overall averages for all score results
gear_v2 = 0
fuel_v2 = 0
rope_v2 = 0
num_srs = 0
for sr in competition_srs:
if sr.rope:
sr_rope = 1 - rope_avg
else:
|
sr_rope = 0 - rope_avg
sr_gear = sr.tele_gears - gear_avg
sr_fuel = ((sr.auto_fuel_high_score) + (sr.tele_fuel_high_score / 3)) - fuel_avg
gear_v2 += sr_gear * sr_gear
fuel_v2 += sr_fuel * sr_fuel
rope_v2 += sr_rope * sr_rope
num_srs += 1
if num_srs == 0:
gear_s
|
tdev = 0
fuel_stdev = 0
rope_stdev = 0
else:
gear_stdev = math.sqrt(gear_v2 / num_srs)
fuel_stdev = math.sqrt(fuel_v2 / num_srs)
rope_stdev = math.sqrt(rope_v2 / num_srs)
team_avgs = collections.defaultdict(int)
# This part of the function (above) obtains overall standard deviations for all score results
teams = team if bool(team) else teams_at_competition
for team in teams:
teams_srs = team.scoreresult_set.filter(competition__code=regional_code)
team_avgs = teams_srs.aggregate(Avg('tele_gears'),
Avg('tele_fuel_high_score'),
Avg('auto_fuel_high_score'),
team_rope__avg=Avg(Case(When(rope=True, then=1), When(rope=False, then=0))))
team.skills = {}
team.skills['fuel_z'] = 'NA'
team.skills['gear_z'] = 'NA'
team.skills['rope_z'] = 'NA'
team.skills['rope_pct'] = 'NA'
if len(teams_srs) != 0:
team.skills['fuel_score'] = ((team_avgs['auto_fuel_high_score__avg']) + (team_avgs['tele_fuel_high_score__avg'] / 3))
team.skills['gear_z'] = (team_avgs['tele_gears__avg'] - gear_avg) / gear_stdev if gear_stdev != 0 else 0
team.skills['fuel_z'] = (((team_avgs['auto_fuel_high_score__avg']) + (team_avgs['tele_fuel_high_score__avg'] / 3)) - fuel_avg) / fuel_stdev if fuel_stdev != 0 else 0
team.skills['rope_z'] = (team_avgs['team_rope__avg'] - rope_avg) / rope_stdev if rope_stdev != 0 else 0
team.skills['rope_pct'] = team_avgs['team_rope__avg'] * 100
skills.append({'team': team.teamNumber, 'skills': team.skills})
stats = {'gear_avg': gear_avg, 'rope_avg': rope_avg, 'fuel_avg': fuel_avg, 'fuel_hi_avg': team_avgs['tele_fuel_high_score__avg'],
'fuel_hi_auto_avg': team_avgs['auto_fuel_high_score__avg'], 'auto_gear_avg': competition_averages['auto_gears__avg'], 'gear_stdev': gear_stdev, 'rope_stdev': rope_stdev, 'fuel_stdev': fuel_stdev}
return (stats, json.dumps(skills))
|
miing/mci_migo
|
identityprovider/fields.py
|
Python
|
agpl-3.0
| 1,229
| 0
|
# Copyright 2010 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
import re
from django import forms
from django.utils.translation import ugettext as _
from identityprovider.widgets import CommaSeparatedWidget
class CommaSeparatedField(forms.MultipleChoiceField):
widget = CommaSeparatedWidget
def clean(self, value):
return ','.join(super(CommaSeparatedField, self).clean(value))
class OATHPasswordField(forms.CharField):
"""A string of between 6 or 8 digits."""
widget = forms.widgets.TextInput(attrs={
'autocomplete': 'off',
'autofocus': 'autofocus'
})
SIX = re.compile('[0-9]{6}$')
EIGHT = re.compile('[0-9]{8}$')
def clean(self, value):
"""Validate otp and detect type"""
|
# remove any whitespace from the string
if value:
value = value.strip().replace(' ', '')
value = super(OATHPasswordField, self).clean(value)
if self.SIX.match(value):
return value
elif self.EIGHT.ma
|
tch(value):
return value
raise forms.ValidationError(
_('Please enter a 6-digit or 8-digit one-time password.'))
|
ganga-devs/ganga
|
ganga/GangaGUI/test/test_internal_templates_api.py
|
Python
|
gpl-3.0
| 3,358
| 0.001489
|
from GangaCore.testlib.GangaUnitTest import GangaUnitTest
from GangaGUI.api import internal
# ******************** Test Class ******************** #
# Templates API Tests
class TestGangaGUIInternalTemplatesAPI(GangaUnitTest):
# Setup
def setUp(self, extra_opts=[]):
super(TestGangaGUIInternalTemplatesAPI, self).setUp(extra_opts=[])
# App config and database creation
internal.config["TESTING"] = True
|
# Flask test client
self.app = internal.test_client()
# Templates API - GET Method
def test_GET_method_templates_list(self):
from GangaCore.GPI import templates, JobTemplate, GenericSplitter, Local
# Create 20 test templates
for i in range(0, 20):
|
t = JobTemplate()
t.name = f"Template Test {i}"
t.application.exe = "sleep"
t.splitter = GenericSplitter()
t.splitter.attribute = 'application.args'
t.splitter.values = [['3'] for _ in range(0, 3)]
t.backend = Local()
# GET request
res = self.app.get(f"/internal/templates")
self.assertTrue(res.status_code == 200)
self.assertTrue(len(res.json) == 20)
# Response data assertions
supported_attributes = ["id", "fqid", "name", "application", "backend", "comment", "backend.actualCE"]
for i in range(0, 20):
for attribute in supported_attributes:
self.assertTrue(attribute in res.json[i])
self.assertTrue(res.json[i]["name"] == f"Template Test {i}")
# Templates API - DELETE Method, ID Out of Index
def test_DELETE_method_id_out_of_range(self):
res = self.app.delete(f"/internal/templates/1")
self.assertTrue(res.status_code == 400)
# Templates API - DELETE Method, ID is Negative
def test_DELETE_method_id_negative(self):
res = self.app.delete(f"/internal/templates/-1")
self.assertTrue(res.status_code == 404)
# Templates API - DELETE Method, ID is String
def test_DELETE_method_id_string(self):
res = self.app.delete(f"/internal/templates/test")
self.assertTrue(res.status_code == 404)
# Templates API - DELETE Method
def test_DELETE_method_templates_list(self):
from GangaCore.GPI import templates, JobTemplate, GenericSplitter, Local
# Clean template repository check
self.assertTrue(len(templates) == 0)
# Create 20 test templates
created_template_ids = []
for i in range(0, 20):
t = JobTemplate()
t.name = f"Template Test {i}"
created_template_ids.append(t.id)
self.assertTrue(len(templates) == 20)
self.assertTrue(len(created_template_ids) == 20)
# Delete one template every request and assert the deletion
for i in range(0,20):
self.assertTrue(created_template_ids[i] in templates.ids())
res = self.app.delete(f"/internal/templates/{created_template_ids[i]}")
self.assertTrue(res.status_code == 200)
self.assertTrue(len(templates) == (20-(i+1)))
self.assertTrue(created_template_ids[i] not in templates.ids())
# Tear down
def tearDown(self):
super(TestGangaGUIInternalTemplatesAPI, self).tearDown()
# ******************** EOF ******************** #
|
CamJam-EduKit/EduKit3
|
CamJam Edukit 3 - RPi.GPIO/Code/7-pwm.py
|
Python
|
mit
| 2,662
| 0
|
# CamJam EduKit 3 - Robotics
# Worksheet 7 - Controlling the motors with PWM
import RPi.GPIO as GPIO # Import the GPIO Library
import time # Import the Time library
# Set the GPIO modes
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
# Set variables for the GPIO motor pins
pinMotorAForwards = 10
pinMotorABackwards = 9
pinMotorBForwards = 8
pinMotorBBackwards = 7
# How many times to turn the pin on and off each second
Frequency = 20
# How long the pin stays on each cycle, as a percent (here, it's 30%)
DutyCycle = 30
# Setting the duty cycle to 0 means the motors will not turn
Stop = 0
# Set the GPIO Pin mode to be Output
GPIO.setup(pinMotorAForwards, GPIO.OUT)
GPIO.setup(pinMotorABackwards, GPIO.OUT)
GPIO.setup(pinMotorBForwards, GPIO.OUT)
GPIO.setup(pinMotorBBackwards, GPIO.OUT)
# Set the GPIO to software PWM at 'Frequency' Hertz
pwmMotorAForwards = GPIO.PWM(pinMotorAForwards, Frequency)
pwmMotorABackwards = GPIO.PWM(pinMotorABackwards, Frequency)
pwmMotorBForwards = GPIO.PWM(pinMotorBForwards, Frequency)
pwmMotorBBackwards = GPIO.PWM(pinMotorBBackwards, Frequency)
# Start the software PWM with a duty cycle of 0 (i.e. not moving)
pwmMotorAForwards.start(Stop)
pwmMotorABackwards.start(Stop)
pwmMotorBForwards.start(Stop)
pwmMotorBBackwards.start(Stop)
# Turn all motors off
def stopmotors():
pwmMotorAForwards.ChangeDutyCycle(Stop)
pwmMotorABackwards.ChangeDutyCycle(Stop)
pwmMotorBForwards.ChangeDutyCycle(Stop)
pwmMotorBBackwards.ChangeDutyCycle(Stop)
# Turn both motors forwards
def forwards():
pwmMotorAForwards.ChangeDutyCycle(DutyCycle)
pwmMotorABackwards.ChangeDutyCycle(Stop)
pwmMotorBForwards.ChangeDutyCycle(DutyCycle)
pwmMotorBBackwards.ChangeDutyCycle(Stop)
# Turn both motors backwards
def backwards():
pwmMotorAForwards.ChangeDutyCycle(Stop)
pwmMotorABackwards.ChangeDutyCycle(DutyCycle)
pwmMotorBForwards.ChangeDutyCycle(Stop)
pwmMotorBBackwards.ChangeDutyCycle(DutyCycle)
# Turn left
def left():
pwmMotorAForwards.ChangeDutyCycle(Stop)
pwmMotorABackwards.ChangeDutyCycle(DutyCycle)
pwmMotorBForwards.ChangeDutyCycle(DutyCycle)
pwmMotorBBackwards.ChangeDutyCycle(Stop)
# Turn Right
def right():
pwmMotorAForwards.ChangeDutyCycle(DutyCycle)
pwmMotorABackwards.
|
ChangeDutyCycle(Stop)
pwmMotorBForwards.ChangeDutyCycle(Stop)
pwmMotorBBackwards.ChangeDutyCycle(DutyCycle)
# Your code to control the robot goes below this line
forwards()
time.sleep(1) # Pause for 1 second
left()
time.sleep(0.5) # Pause for half a second
forwards()
time.sleep(1)
right()
time.sleep(0.5)
backwards()
time.sleep(0.5)
stopmotors()
GPIO.clea
|
nup()
|
thezakman/CTF-Scripts
|
Differ.py
|
Python
|
artistic-2.0
| 631
| 0.011094
|
#!/usr/bin/python
# Script to Che
|
ck the difference in 2 files
# 1 fevereiro de 2015
# https://github.com/thezakman
file1 = raw_input('[file1:] ')
modified = open(file1,"r").readlines()[0]
file2 = raw_inpu
|
t('[file2:] ')
pi = open(file2, "r").readlines()[0] # [:len(modified)]
resultado = "".join( x for x,y in zip(modified, pi) if x != y)
resultado2 = "".join( x for x,y in zip(pi, modified) if x != y)
print "[Differ:]
print '\n-------------------------------------'
print "[file1] -> [file2]", resultado
print '-------------------------------------'
print "[file2] -> [file1]", resultado2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.