code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1
value | license stringclasses 15
values | size int64 3 1.05M |
|---|---|---|---|---|---|
from __future__ import print_function
#import glob
import os
from collections import defaultdict
from six import iteritems
import numpy as np
#from numpy import where, unique, array, zeros, searchsorted, log10, array_equal
from pyNastran.bdf.bdf import BDF
from pyNastran.op2.op2 import OP2, read_op2, FatalError
from pyNastran.applications.aero_panel_buckling.run_patch_buckling_helper import (
load_sym_regions_map)
def load_regions(regions_filename):
"""
Loads a regions.csv file
"""
with open(regions_filename, 'r') as regions_file:
lines = regions_file.readlines()
regions_to_pid_map = {}
regions_to_eids_map = {}
for line in lines[1:]:
sline = line.strip().split(',')
values = [int(val) for val in sline]
pid = values[0]
regions_patch_id = values[1]
eids = values[2:]
regions_to_eids_map[regions_patch_id] = eids
regions_to_pid_map[regions_patch_id] = pid
return regions_to_pid_map, regions_to_eids_map
def load_regions_and_create_eigenvalue_csv(bdf_model, op2_filenames,
regions_filename, sym_regions_filename=None,
eig_min=-1.0, eig_max=1.0, eig_default=3.0):
"""
loads a BDF and a series of OP2 filenames and creates an eigenvalue buckling plot
Parameters
----------
eig_min : float
the required minimum eigenvalue
eig_max : float
the required maximum eigenvalue
eig_default : float
the default eigenvalue for cases that do not calculate eigenvectors
because there were no eigenvalues in the range
regions_filename : str
path to regions.txt file
sym_regions_filename : str; default=None -> No symmetry
path to sym_regions.txt file
Returns
-------
min_eigenvalue_by_patch_id : dict
key : patch_id : int
the integer patch id
value : eigenvalue or reserve factor
the reserve factor eigenvalue for buckling
eigenvalues : (n, ) float ndarray
the minimum eigenvalues
Creates
-------
eigenvalues_output.csv : file
csv of log10(eigenvalue), eigenvalue, is_buckled
"""
bdf_model.log.info('load_regions_and_create_eigenvalue_csv')
#patch_numbers = []
#evals = []
assert isinstance(bdf_model, BDF), type(bdf_model)
min_eigenvalue_by_patch_id = {}
is_sym_regions = False
if sym_regions_filename is not None:
is_sym_regions = True
region_to_symregion_map, symregion_to_region_map = load_sym_regions_map(
sym_regions_filename)
msg = ''
assert len(op2_filenames) > 0, 'op2_filenames=%s' % op2_filenames
print('eig_min=%s eig_max=%s' % (eig_min, eig_max))
for op2_filename in op2_filenames:
bdf_model.log.info('op2_filename = %r' % op2_filename)
if not os.path.exists(op2_filename):
print(op2_filename)
continue
patch_id_str = op2_filename.split('_')[1].split('.')[0]
patch_id = int(patch_id_str)
sym_patch_id = None
if is_sym_regions:
if patch_id in symregion_to_region_map:
sym_patch_id = symregion_to_region_map[patch_id]
elif patch_id in region_to_symregion_map:
sym_patch_id = region_to_symregion_map[patch_id]
else:
raise RuntimeError("can this happen???")
# = pf.split('_')[1].split('.')[0]
#patch_numbers.append(patch_id)
#model = BDF(debug=False)
#model.read_bdf(pf)
# eids = model.elements.keys()
#op2_path = '%s_.op2' % patch_id)
try:
model2 = read_op2(op2_filename, combine=True, log=None,
debug=False, mode='msc')
except FatalError:
#os.remove(op2_filename)
print('fatal on %r' % op2_filename)
msg += '%s\n' % op2_filename
continue
cases = model2.eigenvectors.keys()
if len(cases) == 0:
#assert is_sym_regions == False, is_sym_regions
min_eigenvalue_by_patch_id[patch_id] = eig_default
min_eigenvalue_by_patch_id[sym_patch_id] = eig_default
continue
isubcase = cases[0]
eigenvector = model2.eigenvectors[isubcase]
eigrs = np.array(eigenvector.eigrs)
#cycles = (eigrs * 2 * np.pi) ** 2.
#print('eigrs =', eigrs)
#----------------------------------
# calculate what's basically a reserve factor (RF); margin = reserve_factor - 1
# take the minimum of the "tension"/"compression" RFs, which are
# compared to different allowables
# lambda > 0
i = np.where(eigrs >= 0.0)[0]
if len(i) == 0:
pos_eigenvalue = eig_default # TODO: no buckling eigenvalue...wat?
pos_reserve_factor = eig_default
else:
pos_eigenvalue = eigrs[i].min()
pos_reserve_factor = pos_eigenvalue / eig_max
#max_eigenvalue = np.log10(eigi)
# lambda < 0
if 0:
j = np.where(eigrs < 0.0)[0]
if len(j) == 0:
neg_eigenvalue = eig_default # TODO: no buckling eigenvalue...wat?
neg_reserve_factor = eig_default
else:
neg_eigenvalue = np.abs(eigrs[j]).min()
neg_reserve_factor = neg_eigenvalue / abs(eig_min)
#min_eigenvalue = np.log10(eigi)
else:
neg_reserve_factor = 10.
neg_eigenvalue = 10.
#evals.append(min_eval)
bdf_model.log.info('Patch=%s compression (lambda > 0); lambda=%.3f RF=%.3f' % (patch_id, pos_eigenvalue, pos_reserve_factor))
#bdf_model.log.info('Patch=%s tension (lambda < 0); lambda=%.3f RF=%.3f' % (patch_id, neg_eigenvalue, neg_reserve_factor))
reserve_factor = min(neg_reserve_factor, pos_reserve_factor, eig_default)
assert reserve_factor > 0.
min_eigenvalue_by_patch_id[patch_id] = reserve_factor
if is_sym_regions:
min_eigenvalue_by_patch_id[sym_patch_id] = reserve_factor
print(msg)
bdf_model.log.info('finished parsing eigenvalues...')
#model = BDF(debug=False)
#model.read_bdf(bdf_filename)
all_eids = np.unique(bdf_model.elements.keys())
neids = len(all_eids)
eigenvalues = np.zeros(neids, dtype='float32')
with open(regions_filename, 'r') as regions_file:
lines = regions_file.readlines()
for iline, line in enumerate(lines[1:]):
sline = line.strip().split(',')
#print(sline)
values = [int(val) for val in sline]
pid = values[0]
regions_patch_id = values[1]
eids = values[2:]
i = np.searchsorted(all_eids, eids) # [0] ???
assert np.array_equal(all_eids[i], eids), 'iline=%s pid=%s patch_id=%s' % (
iline, pid, regions_patch_id)
if regions_patch_id not in min_eigenvalue_by_patch_id:
bdf_model.log.info('missing pid=%s' % pid)
continue
#patch_id[i] = panel_id
eigenvalues[i] = min_eigenvalue_by_patch_id[regions_patch_id]
eigenvalue_filename = 'eigenvalues_output.csv'
with open(eigenvalue_filename, 'w') as eigenvalue_file:
eigenvalue_file.write('# log(Eigenvalue), eigenvalue, is_buckled\n')
for eig in eigenvalues:
eig = max(eig, 0.000001)
if eig < 1.0:
is_buckled = 1.0
else:
is_buckled = 0.0
log10_eig = np.log10(eig)
eigenvalue_file.write('%f, %f, %i\n' % (log10_eig, eig, is_buckled))
return min_eigenvalue_by_patch_id, eigenvalues
def split_model_by_pid_panel(patch_filenames, workpath='results'):
"""
creates a list of element ids for each patch
"""
pid_panel = defaultdict(list)
for patch_filename in patch_filenames:
basename = os.path.basename(patch_filename)
try:
sline = basename.split('.')[0].split('_')[1]
except:
print('patch_filename=%r' % patch_filename)
print('basename=%r' % basename)
raise
#print('sline', sline)
ipanel = int(sline)
#print('ipanel = %s' % ipanel)
bdf_model = BDF(debug=False)
bdf_model.read_bdf(patch_filename, xref=False)
eids = defaultdict(list)
for eid, elem in iteritems(bdf_model.elements):
pid = elem.pid
key = (pid, ipanel)
pid_panel[key].append(eid)
eids[pid].append(eid)
if 0:
for pid, eidsi in sorted(iteritems(eids)):
pid_filename = os.path.join(workpath, 'pid_%i_ipanel_%i.txt' % (pid, ipanel))
out = str(eidsi)
with open(pid_filename, 'w') as pid_file:
pid_file.write('# PSHELL pid\n')
pid_file.write('# eids\n')
pid_file.write('%s\n' % pid)
pid_file.write('%s\n' % out[1:-1])
regions_filename = 'regions.txt'
nregions = 0
with open(regions_filename, 'w') as regions_file:
regions_file.write('# pid, ipanel, eids\n')
for key, eidsi in iteritems(pid_panel):
pid, ipanel = key
out = str(eidsi)
regions_file.write('%s, %s, ' % (pid, ipanel))
regions_file.write('%s\n' % out[1:-1])
nregions += 1
assert nregions > 0, nregions
return regions_filename
def main():
"""
key=AELINK value=8
key=AELIST value=4
key=AERO value=1
key=AEROS value=1
key=AESTAT value=11
key=AESURF value=4
key=CAERO1 value=6
key=CBAR value=8
key=CONM2 value=1
key=CORD2R value=4
key=CTRIA3 value=120338
key=DMI value=2
key=EIGRL value=1
key=ENDDATA value=1
key=GRID value=55541
key=MAT1 value=2
key=PAERO1 value=1
key=PARAM value=2
key=PBAR value=1
key=PSHELL value=117
key=RBE2 value=4
key=SET1 value=6
key=SPLINE1 value=6
key=SUPORT value=1
key=TRIM value=4
"""
workpath = 'results'
#split_model_by_pid_panel(workpath)
bdf_filename = 'model_144.bdf'
op2_filenames = [os.path.join(workpath, op2_filename)
for op2_filename in ['patch_1.op2', 'patch_2.op2']]
sym_regions_filename = 'sym_regions_map.csv'
load_regions_and_create_eigenvalue_csv(bdf_filename, op2_filenames,
'regions.txt', sym_regions_filename=sym_regions_filename)
if __name__ == '__main__':
main()
| saullocastro/pyNastran | pyNastran/applications/aero_panel_buckling/split_model.py | Python | lgpl-3.0 | 10,773 |
"""The PiStacking module defines the PiStackingType and
PiStackingInx for explicit hydrogens.
"""
import itertools as it
from collections import namedtuple
import numpy as np
import numpy.linalg as la
from scipy.spatial.distance import cdist
import mastic.config.interactions as masticinxconfig
from mastic.interactions.interactions import InteractionType, Interaction, InteractionError
class PiStackingType(InteractionType):
"""Defines an InteractionType class for hydrogen bonds between members
with explicit hydrogens.
"""
attributes = {}
interaction_name = "PiStacking"
feature_keys = masticinxconfig.PISTACKING_FEATURE_KEYS
feature_classifiers = masticinxconfig.PISTACKING_FEATURES
degree = 2
commutative = True
interaction_param_keys = masticinxconfig.PISTACKING_PARAM_KEYS
# parameters set from the config file
centroid_max_distance = masticinxconfig.PISTACKING_CENTROID_DIST_MAX
ring_normal_angle_deviation = masticinxconfig.PISTACKING_ANGLE_DEVIATION
centroid_offset_max = masticinxconfig.PISTACKING_OFFSET_MAX
heavy_atoms = masticinxconfig.PISTACKING_HEAVY_ATOMS_ELEMENT_SYMBOLS
def __init__(self, pi_stacking_type_name,
feature_types=None,
association_type=None,
assoc_member_pair_idxs=None,
**pi_stacking_attrs):
super().__init__(pi_stacking_type_name,
feature_types=feature_types,
association_type=association_type,
assoc_member_pair_idxs=assoc_member_pair_idxs,
**pi_stacking_attrs)
@staticmethod
def interaction_constructor(*params, **kwargs):
return PiStackingInx(*params, **kwargs)
@classmethod
def find_hits(cls, members,
interaction_classes=None,
return_feature_keys=False,
return_failed_hits=False):
# TODO value checks
# scan the pairs for hits and assign interaction classes if given
return super().find_hits(members,
interaction_classes=interaction_classes,
return_feature_keys=return_feature_keys,
return_failed_hits=return_failed_hits)
@classmethod
def check(cls, arom_a, arom_b):
features = [arom_a, arom_b]
feature_tests = [cls.test_features_centroid_distance,
cls.test_features_ring_normal_angle,
cls.test_features_centroid_offset,
cls.test_features_stacking_type]
return super().check(features, feature_tests)
@classmethod
def check_centroid_distance(cls, distance):
"""For a float distance checks if it is less than the configuration
file HBOND_DIST_MAX value.
"""
if distance <= cls.centroid_max_distance:
return True
else:
return False
@classmethod
def check_ring_normal_angle(cls, angle):
"""For a float distance checks if it is less than the configuration
file HBOND_DON_ANGLE_MIN value.
"""
dev = cls.ring_normal_angle_deviation
if 0 <= angle <= dev or 90 - dev <= angle <= 90 + dev:
return True
else:
return False
@classmethod
def check_centroid_offset_distance(cls, distance):
"""For a float distance checks if it is less than the configuration
file HBOND_DIST_MAX value.
"""
if distance <= cls.centroid_offset_max:
return True
else:
return False
@classmethod
def check_stacking_type(cls, angle):
dev = cls.ring_normal_angle_deviation
if 0.0 <= angle <= dev:
return 'parallel'
elif 90 - dev <= angle <= 90 + dev:
return 'perpendicular'
else:
return None
@classmethod
def test_features_centroid_distance(cls, arom_a, arom_b):
arom_a_heavy_atom_coords = np.array([atom.coords for atom in arom_a.atoms if
atom.atom_type.element in cls.heavy_atoms])
arom_b_heavy_atom_coords = np.array([atom.coords for atom in arom_b.atoms if
atom.atom_type.element in cls.heavy_atoms])
centroid_distance = calc_centroid_distance(arom_a_heavy_atom_coords,
arom_b_heavy_atom_coords)
# if this passes then move on
if cls.check_centroid_distance(centroid_distance) is False:
return False, centroid_distance
else:
return True, centroid_distance
@classmethod
def test_features_ring_normal_angle(cls, arom_a, arom_b):
arom_a_heavy_atom_coords = np.array([atom.coords for atom in arom_a.atoms if
atom.atom_type.element in cls.heavy_atoms])
arom_b_heavy_atom_coords = np.array([atom.coords for atom in arom_b.atoms if
atom.atom_type.element in cls.heavy_atoms])
# calculate the normal angle
ring_normal_angle = calc_arom_normal_angle(arom_a_heavy_atom_coords,
arom_b_heavy_atom_coords)
if cls.check_ring_normal_angle(ring_normal_angle) is False:
return False, ring_normal_angle
else:
return True, ring_normal_angle
@classmethod
def test_features_centroid_offset(cls, arom_a, arom_b):
arom_a_heavy_atom_coords = np.array([atom.coords for atom in arom_a.atoms if
atom.atom_type.element in cls.heavy_atoms])
arom_b_heavy_atom_coords = np.array([atom.coords for atom in arom_b.atoms if
atom.atom_type.element in cls.heavy_atoms])
# calculate the centroid offset
centroid_offset = calc_centroid_offset(arom_a_heavy_atom_coords,
arom_b_heavy_atom_coords)
if cls.check_centroid_offset_distance(centroid_offset) is False:
return False, centroid_offset
else:
return True, centroid_offset
@classmethod
def test_features_stacking_type(cls, arom_a, arom_b):
arom_a_heavy_atom_coords = np.array([atom.coords for atom in arom_a.atoms if
atom.atom_type.element in cls.heavy_atoms])
arom_b_heavy_atom_coords = np.array([atom.coords for atom in arom_b.atoms if
atom.atom_type.element in cls.heavy_atoms])
# Determine whether the stacking is parallel of perpendicular, as
# a string
ring_normal_angle = calc_arom_normal_angle(arom_a_heavy_atom_coords,
arom_b_heavy_atom_coords)
stacking_type = cls.check_stacking_type(ring_normal_angle)
if stacking_type is None:
return False, stacking_type
else:
return True, stacking_type
@property
def record(self):
record_attr = {'interaction_class' : self.name}
record_attr['interaction_type'] = self.interaction_name
record_attr['association_type'] = self.association_type.name
record_attr['assoc_member_pair_idxs'] = self.assoc_member_pair_idxs
record_attr['arom_a_feature_type'] = self.feature_types[0]
record_attr['arom_b_feature_type'] = self.feature_types[1]
return PiStackingTypeRecord(**record_attr)
# PiStackingTypeRecord
_pi_stacking_type_record_fields = ['interaction_class', 'interaction_type',
'association_type', 'assoc_member_pair_idxs',
'arom_a_feature_type', 'arom_b_feature_type']
PiStackingTypeRecord = namedtuple('PiStackingTypeRecord', _pi_stacking_type_record_fields)
def calc_centroid_offset(arom_a_coords, arom_b_coords):
"""Project the centers of each ring over each other and get
the offset"""
# get the centroid coordinates
centroid_a, centroid_b = calc_centroids(arom_a_coords, arom_b_coords)
# get the norms that are facing each other
face_norm_a, face_norm_b = calc_arom_facing_norms(arom_a_coords, arom_b_coords)
# the vector going from centroid a to b
centroid_vec_a = centroid_b - centroid_a
# vector from b to a
centroid_vec_b = centroid_a - centroid_b
# calculate the rejection of the centroid vector on the normal
# face vector, which is the projection on the plane defined by the
# normal vector in the direction of the centroid vector
norm_a_proj = calc_vector_rejection(centroid_vec_a, face_norm_a)
norm_b_proj = calc_vector_rejection(centroid_vec_b, face_norm_b)
# compare the magnitudes of the two
centroid_offset = min(la.norm(norm_a_proj), la.norm(norm_b_proj))
return centroid_offset
def calc_vector_rejection(vec_a, vec_b):
"""Reject vec_a onto vec_b"""
projection_vec = calc_vector_projection(vec_a, vec_b)
# a_2 = a - a_1
return vec_a - projection_vec
def calc_vector_projection(vec_a, vec_b):
"""Project vec_a onto vec_b."""
# a_1 = (a dot b_norm) dot b_norm
vec_b_norm = vec_b / la.norm(vec_b)
return np.dot(np.dot(vec_a, vec_b_norm), vec_b_norm)
def calc_arom_facing_norms(arom_a_coords, arom_b_coords):
"""Given two aromatic rings get the normal vectors that face the other ring"""
centroids = calc_centroids(arom_a_coords, arom_b_coords)
arom_norms = calc_arom_norms(arom_a_coords, arom_b_coords)
face_norms = []
for i, arom_norm in enumerate(arom_norms):
# get the index of the other arom
j = 1 if i ==0 else 0
norm_up = arom_norm
norm_down = -1 * arom_norm
# get the norm so that it points to the other ring
d_up = cdist([norm_up + centroids[i]], [centroids[j]])
d_down = cdist([norm_down + centroids[i]], [centroids[j]])
norm = norm_up if d_up < d_down else norm_down
face_norms.append(norm)
return face_norms
def calc_centroids(arom_a_coords, arom_b_coords):
centroid_a = arom_a_coords.mean(axis=0)
centroid_b = arom_b_coords.mean(axis=0)
centroids = [centroid_a, centroid_b]
return centroid_a, centroid_b
def calc_centroid_distance(arom_a_coords, arom_b_coords):
centroid_a, centroid_b = calc_centroids(arom_a_coords, arom_b_coords)
centroid_distance = cdist([centroid_a], [centroid_b])[0,0]
return centroid_distance
def calc_arom_norms(arom_a_coords, arom_b_coords):
# Calculate and check the angle between the two ring normal vectors
centroids = calc_centroids(arom_a_coords, arom_b_coords)
arom_norms = []
for i, arom_coords in enumerate([arom_a_coords, arom_b_coords]):
# get the coordinates of two atoms on the ring
a0 = arom_coords[0]
if len(arom_coords) in [6,5]:
a1 = arom_coords[2]
else:
raise InteractionError("aromatic rings without 5 or 6 atoms not supported")
# get two plane vectors
a0c = a0 - centroids[i]
a1c = a1 - centroids[i]
norm = np.cross(a0c, a1c)
arom_norms.append(norm)
return tuple(arom_norms)
def calc_arom_normal_angle(arom_a_coords, arom_b_coords):
# get the normal vectors
arom_norm_a, arom_norm_b = calc_arom_norms(arom_a_coords, arom_b_coords)
try:
# flip one of them because it is opposite the other
ring_normal_angle = np.degrees(np.arccos(
np.dot(arom_norm_a, -1 * arom_norm_b)/(la.norm(
arom_norm_a) * la.norm(arom_norm_b))))
except RuntimeWarning:
print("v1: {0} \n"
"v2: {1}".format(arom_norms[0], arom_norms[1]))
# if normals happen to be opposite directions correct and get
# the angle that is non-negative and smallest
alt_angle = 180 - ring_normal_angle
ring_normal_angle = min(ring_normal_angle, alt_angle) if not\
alt_angle < 0 else ring_normal_angle
return ring_normal_angle
# @classmethod
# def check_T_distance(cls, distance):
# """For a float distance checks if it is less than the configuration
# file HBOND_DON_ANGLE_MIN value.
# """
# if distance > masticinxconfig.PISTACK_T_DIST:
# return True
# else:
# return False
# @classmethod
# def my_check_ring_normal_angle(cls, angle,
# dev=ring_normal_angle_deviation):
# """For a float distance checks if it is less than the configuration
# file HBOND_DON_ANGLE_MIN value.
# """
# if (angle > 180.0 - dev and angle < 180.0 + dev) or \
# (angle > 360.0 - dev and angle < 0.0 + dev):
# return 'parallel'
# elif (angle > 90.0 - dev and angle < 90.0 + dev):
# return 'perpendicular'
# else:
# return False
# @classmethod
# def my_check(cls, arom_a_atoms, arom_b_atoms):
# # parameter initialization for return
# centroid_distance = None
# ring_normal_angle = None
# T_distance = None
# proj_centroid_distance = None
# # coordinates for atoms of aromatic rings (heavy atoms only)
# arom_a_coords = np.array([atom.coords for atom in arom_a_atoms])
# arom_b_coords = np.array([atom.coords for atom in arom_b_atoms])
# arom_coords = [arom_a_coords, arom_b_coords]
# # 1) calculate the distance between centroids
# centroid_a = atom_a_coords.mean(axis=1)
# centroid_b = atom_b_coords.mean(axis=1)
# centroids = [centroid_a, centroid_b]
# centroid_distance = cdist(centroid_a, centroid_b)[0,0]
# # if this passes then move on
# if cls.check_centroid_distance(distance) is False:
# return (False, centroid_distance, ring_normal_angle,
# T_distance, proj_centroid_distance,)
# # 2) determine whether it is parallel or perpendicular stacking
# # 2.1) calculate the normal vectors of the rings by using
# # vectors from the centroid to 2 different points on the ring
# arom_plane_vectors = []
# arom_norms = []
# for i, atom_coords in enumerate(arom_coords):
# # choose the atoms
# a0 = atom_coords[0]
# if len(atom_coords) in [6,5]:
# a1 = 3
# else:
# raise InteractionError("aromatic rings without 5 or 6 atoms not supported")
# a0c = a0 - centroid[i]
# arom_plane_vectors.append(a0c)
# a1c = a1 - centroid
# norm = a0c.cross(a1c)
# arom_norms.append(norm)
# # 2.2) calculate the angle between the normal vectors
# try:
# ring_normal_angle = np.degrees(np.arccos(
# np.dot(arom_norms[0], arom_norms[1])/(la.norm(
# arom_norms[0]) * la.norm(arom_norms[1]))))
# except RuntimeWarning:
# print("v1: {0} \n"
# "v2: {1}".format(arom_norms[0], arom_norms[1]))
# # 2.3) check the ring normal angles, we expect a string if it
# # passed or False if it failed
# ring_normal_result = cls.check_ring_normal_angle(ring_normal_angle)
# if ring_normal_result is False:
# return (False, centroid_distance, ring_normal_angle,
# T_distance, proj_centroid_distance,)
# # 3) Project the closest carbon onto the other ring to see if
# # it is oriented correctly. A different atom will be use for each
# # 3.1) Get the atom to be projected for each condition.
# # 3.1.p) The stacking is parallel
# ref_arom_idx = None
# proj_arom_idx = None
# proj_atom_idx = None
# # there is an extra parameter that needs checked for the T
# T_distance = None
# elif result == 'parallel':
# # our choice doesn't matter here so arbitrarily choose
# # reference ring
# ref_arom_idx = 0
# proj_arom_idx = 1
# # we need to find which atom is closest to the centroid of
# # the reference
# proj_arom_coords = arom_coords[proj_arom_idx]
# ref_centroid = centroids[ref_arom_idx]
# proj_arom_ds = cdist(proj_arom_coords, [ref_centroid])
# proj_atom_idx = arom_a_ds.argmin()
# # 3.1.t) The stacking is perpendicular (T-stacking)
# elif result == 'perpendicular':
# # 3.1.t.1) find which ring bisects the other by getting the
# # distances from each atom of the rings to the centroid
# # the other ring
# arom_a_ds = cdist(arom_a_coords, [centroid_b])
# arom_b_ds = cdist(arom_b_coords, [centroid_a])
# arom_ds = [arom_a_ds, arom_b_ds]
# # take the minimum from each comparison
# arom_a_min_d = min(arom_a_ds)
# arom_b_min_d = min(arom_b_ds)
# # the one that is closer is the bisecting aromatic ring
# mins = np.array([arom_a_min_d, arom_b_min_d])
# T_distance = mins.min()
# # if this meets the distance requirement we save the index
# # of (0 or 1) based on input, or return a False tuple
# T_distance_result = cls.check_T_distance(T_distance)
# if T_distance_result is False:
# return (False, centroid_distance ring_normal_angle,
# T_distance, proj_centroid_distance,)
# elif T_distance_result is True:
# # set the appropriate reference etc.
# ref_arom_idx = mins.argmin()
# proj_arom_idx = mins.argmax()
# proj_atom_idx = arom_ds[mins.argmin()].argmin()
# else:
# raise InteractionError("unknown result from check_T_distance")
# else:
# raise InteractionError("unknown result from check_ring_normal_angle")
# # 3.2) project the point to the reference ring plane
# proj_point = arom_plane_vectors[ref_arom_idx] * \
# (arom_coords[proj_arom_idx][proj_atom_idx].dot(
# arom_plane_vectors[ref_arom_idx]))
# proj_centroid_distance = cdist([proj_point], [centroids[ref_arom_idx]])
# offset_result = cls.check_offset_distance(proj_centroid_distance)
# if offset_result is False:
# return (False, centroid_distance ring_normal_angle,
# T_distance, proj_centroid_distance,)
# elif offset_result is True:
# return (True, centroid_distance ring_normal_angle,
# T_distance, proj_centroid_distance,)
# else:
# raise InteractionError("unknown result from check_projection_centroid_distance")
class PiStackingInx(Interaction):
"""Substantiates PiStackingType by selecting donor and acceptor
features, as well as the involved Hydrogen atom.
"""
interaction_type = PiStackingType
def __init__(self, arom_a, arom_b,
check=True,
interaction_class=None,
**param_values):
if check:
# use the default settings for the interaction_type only
# for implicit checks, the idea is that we want the user
# to mutate the InteractionType to change the
# classification criteria
okay, param_values = self.interaction_type.check(arom_a.atoms,
arom_b.atoms,)
if not okay:
raise InteractionError
# success, finish creating interaction
atom_system = arom_a.system
super().__init__(features=[arom_a, arom_b],
interaction_type=self.interaction_type,
system=atom_system,
**param_values)
self._arom_a = arom_a
self._arom_b = arom_b
@property
def arom_a(self):
return self._arom_a
@property
def arom_b(self):
return self._arom_b
@property
def record(self):
record_attr = {'interaction_class' : self.interaction_class.name}
return PiStackingInxRecord(**record_attr, **self.interaction_params)
# PiStackingInxRecord
_pi_stacking_inx_record_fields = ['interaction_class'] + \
PiStackingType.interaction_param_keys
PiStackingInxRecord = namedtuple('PiStackingInxRecord', _pi_stacking_inx_record_fields)
#### parallel yaw calculations
# # There are 3 different parallel possibilities for 6 member rings:
# # - parallel stacked, yaw parallel
# # 77s from Bahrach
# # - parallel stacked, yaw perpendicular
# # 77s' from Bahrach
# # - parallel displaced, yaw parallel
# # 77pd from Bahrach
# # 3.p.1) calculate the angle betwee yaw vectors for each 6
# # member ring
# # calculate the projected vector from arom_b to arom_a plane
# arom_b_proj_vector = arom_vectors[0] * (arom_vectors[1].dot(arom_vectors[0]))
# # calculate the yaw angle
# yaw_angle = np.degrees(np.arccos(
# np.dot(arom_vectors[0], arom_b_proj_vector)/(la.norm(
# arom_vectors[0]) * la.norm(arom_b_proj_vector))))
# return (True, distance, ring_normal_angle, yaw_angle, )
# # yaw_result = cls.check_yaw(yaw_angle)
# # if yaw_result is False:
# # return (False, distance, ring_normal_angle, yaw_angle, )
# # else:
# # return (True, distance, ring_normal_angle, yaw_angle, )
# # 3.p.2) for either parallel or perpendicular yaw
# # elif yaw_result == 'parallel-stacked':
# # pass
# # elif yaw_result == 'parallel-displaced':
# # pass
# # elif yaw_result == 'perpendicular':
# # pass
# @classmethod
# def check_yaw(cls, angle):
# """For a float distance checks if it is less than the configuration
# file HBOND_DON_ANGLE_MIN value.
# """
# if angle > masticinxconfig.HBOND_DON_ANGLE_MIN:
# return True
# else:
# return False
| salotz/mast | mastic/interactions/pi_stacking.py | Python | mit | 22,652 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# $Id$
"""
CGI - TestBox Interaction (see testboxscript or the other party).
"""
__copyright__ = \
"""
Copyright (C) 2012-2014 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
The contents of this file may alternatively be used under the terms
of the Common Development and Distribution License Version 1.0
(CDDL) only, as it comes in the "COPYING.CDDL" file of the
VirtualBox OSE distribution, in which case the provisions of the
CDDL are applicable instead of those of the GPL.
You may elect to license modified versions of this file under the
terms and conditions of either the GPL or the CDDL or both.
"""
__version__ = "$Revision$"
# Standard python imports.
import os
import sys
# Only the main script needs to modify the path.
g_ksValidationKitDir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))));
sys.path.append(g_ksValidationKitDir);
# Validation Kit imports.
from testmanager import config;
from testmanager.core.webservergluecgi import WebServerGlueCgi;
from testmanager.core.testboxcontroller import TestBoxController;
def main():
"""
Main function a la C/C++. Returns exit code.
"""
oSrvGlue = WebServerGlueCgi(g_ksValidationKitDir, fHtmlOutput = False);
oCtrl = TestBoxController(oSrvGlue);
try:
oCtrl.dispatchRequest()
oSrvGlue.flush();
except Exception as oXcpt:
return oSrvGlue.errorPage('Internal error: %s' % (str(oXcpt),),
sys.exc_info(),
config.g_ksTestBoxDispXpctLog);
return 0;
if __name__ == '__main__':
sys.exit(main());
| svn2github/vbox | src/VBox/ValidationKit/testmanager/cgi/testboxdisp.py | Python | gpl-2.0 | 2,121 |
# Copyright 2014 eBay Inc.
#
# Author: Ron Rickard <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import six
from oslo_log import log as logging
from designate import exceptions
from designate import utils
from designate.backend import base
LOG = logging.getLogger(__name__)
DEFAULT_MASTER_PORT = 5354
class Bind9Backend(base.Backend):
__plugin_name__ = 'bind9'
__backend_status__ = 'integrated'
def __init__(self, target):
super(Bind9Backend, self).__init__(target)
self.host = self.options.get('host', '127.0.0.1')
self.port = int(self.options.get('port', 53))
self.rndc_host = self.options.get('rndc_host', '127.0.0.1')
self.rndc_port = int(self.options.get('rndc_port', 953))
self.rndc_config_file = self.options.get('rndc_config_file')
self.rndc_key_file = self.options.get('rndc_key_file')
def create_domain(self, context, domain):
LOG.debug('Create Domain')
masters = []
for master in self.masters:
host = master['host']
port = master['port']
masters.append('%s port %s' % (host, port))
# Ensure different MiniDNS instances are targeted for AXFRs
random.shuffle(masters)
rndc_op = [
'addzone',
'%s { type slave; masters { %s;}; file "slave.%s%s"; };' %
(domain['name'].rstrip('.'), '; '.join(masters), domain['name'],
domain['id']),
]
try:
self._execute_rndc(rndc_op)
except exceptions.Backend as e:
# If create fails because the domain exists, don't reraise
if "already exists" not in six.text_type(e):
raise
self.mdns_api.notify_zone_changed(
context, domain, self.host, self.port, self.timeout,
self.retry_interval, self.max_retries, self.delay)
def delete_domain(self, context, domain):
LOG.debug('Delete Domain')
rndc_op = [
'delzone',
'%s' % domain['name'].rstrip('.'),
]
try:
self._execute_rndc(rndc_op)
except exceptions.Backend as e:
# If domain is already deleted, don't reraise
if "not found" not in six.text_type(e):
raise
def _rndc_base(self):
rndc_call = [
'rndc',
'-s', self.rndc_host,
'-p', str(self.rndc_port),
]
if self.rndc_config_file:
rndc_call.extend(
['-c', self.rndc_config_file])
if self.rndc_key_file:
rndc_call.extend(
['-k', self.rndc_key_file])
return rndc_call
def _execute_rndc(self, rndc_op):
try:
rndc_call = self._rndc_base()
rndc_call.extend(rndc_op)
LOG.debug('Executing RNDC call: %s' % " ".join(rndc_call))
utils.execute(*rndc_call)
except utils.processutils.ProcessExecutionError as e:
LOG.debug('RNDC call failure: %s' % e)
raise exceptions.Backend(e)
| tonyli71/designate | designate/backend/impl_bind9.py | Python | apache-2.0 | 3,615 |
# Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
class FlaskResponse(object):
def __init__(self, response):
self.response = response
self.status_code = response.status_code
self.data = response.data
self.headers = response.headers
def json(self):
return json.loads(self.response.data)
class FlaskRequest(object):
def __init__(self, flask_client):
self.flask_client = flask_client
def get(self, url, **kwargs):
return FlaskResponse(self.flask_client.get(path=url, **kwargs))
def post(self, url, **kwargs):
return FlaskResponse(self.flask_client.post(path=url, **kwargs))
def put(self, url, **kwargs):
return FlaskResponse(self.flask_client.put(path=url, **kwargs))
def delete(self, url, **kwargs):
return FlaskResponse(self.flask_client.delete(path=url, **kwargs))
| lindycoder/netman | tests/adapters/flask_helper.py | Python | apache-2.0 | 1,418 |
import aiohttp
def example_processor(): # extends processor from gen_scraperV2
async def route(scrape_bundle):
pass
return route
| puddl3glum/gen_scraper | example/example_processor.py | Python | mit | 158 |
from __future__ import print_function
size(800,700)
background(0.7)
speed( 2 )
import itertools
import pprint
# Set the font and create the text path.
fonts = fontnames()
chars = (u"abcdefghijklmnopqrstuvwxyzäöüß"
u"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZÄÖÜ"
u"!§$%&/()=¡“¶¢[]|{}≠»„‰¸˝ˇÁÛØ∏°ÅÍ™ÏÌÓıˆflŒÆ‡ÙÇ◊‹›˘˛÷—")
announcefont = "Chicago" if "Chicago" in fonts else "Helvetica"
randomfont = "--RANDOM FONT--"
fontMenu = [ randomfont ]
fontMenu.extend( fonts )
currentFont = randomfont
def selectFont( fontname ):
global currentFont
currentFont = fontname
var("Font", MENU, handler=selectFont, menuitems=fontMenu)
def setup():
background(0.7)
def box( b ):
push()
oldfill = fill( None )
nofill()
oldstrokewidth = strokewidth( 0.5 )
oldstroke = stroke( 0 )
x, y = b.origin.x, b.origin.y
w, h = b.size.width, b.size.height
rect( x,y,w,h)
pop()
fill( oldfill )
strokewidth( oldstrokewidth )
stroke( oldstroke )
def label( p, s ):
push()
fs = fontsize(9)
f = font( "Geneva" )
fl = fill( 0 )
x, y, = p
text( s, x+4,y-4 )
pop()
fontsize(fs)
font( f )
fill( fl )
def marker(p, style):
push()
oldfill = fill( None )
nofill()
r = 5
if style == 1:
fill(1,0,0, 0.6)
r = 3
d = 2 * r
oldstrokewidth = strokewidth( 0.5 )
oldstroke = stroke( 0 )
x, y, = p
oval( x-r, y-r, d, d )
pop()
fill( oldfill )
strokewidth( oldstrokewidth )
stroke( oldstroke )
i = 0
def draw():
global i
background( 0.7 )
if currentFont == randomfont:
f = choice( fonts )
else:
f = currentFont
char = chars[ i % len(chars) ]
i += 1
fontsize( 450 )
tp = textpath(char, 200, 500, width=WIDTH, font=f)
fill(0.85,0.85,0.85, 0.5)
stroke(0)
strokewidth(0.5)
drawpath( tp.copy() )
fontsize(30)
font( announcefont )
s = u"%s %s" % (char, f)
fill(1)
text(s, 10, 35, outline=False)
print( f )
# remember last point
currentpoint = (0,0)
# box around the char
box( tp.bounds )
idx = 0
for segment in tp:
p = (segment.x, segment.y)
cmd = segment.cmd
if cmd in (MOVETO, LINETO):
# make a on-curve point
marker(p,0)
elif cmd == CURVETO:
# make a on-curve point
marker(p, 0)
# make 2 of-curve points
ctrl1 = (segment.ctrl1.x, segment.ctrl1.y)
marker( ctrl1, 1)
ctrl2 = (segment.ctrl2.x, segment.ctrl2.y)
marker( ctrl2, 1)
line(currentpoint[0], currentpoint[1],ctrl1[0], ctrl1[1])
line(p[0], p[1],ctrl2[0], ctrl2[1])
else:
pass
label( p, str(idx) )
currentpoint = p
idx += 1
| karstenw/nodebox-pyobjc | examples/New Functions/Example var menu 1.py | Python | mit | 2,919 |
#############################################################################
# Copyright (c) 2010 by Casey Duncan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name(s) of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AS IS AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#############################################################################
from __future__ import division
from numpy import (array, zeros, argmin, arange, hypot, abs as npabs,
logical_and as land, logical_not as lnot)
import planar
from planar import Vec2
from planar.util import cached_property
class BoundingBox(object):
"""An axis-aligned immutable rectangular shape described
by two points that define the minimum and maximum
corners.
:param points: Iterable containing one or more :class:`~planar.Vec2`
objects.
"""
def __init__(self, points):
self._init_min_max(points)
def _init_min_max(self, points):
# points = iter(points)
# try:
# min_x, min_y = max_x, max_y = points.next()
# except StopIteration:
# raise ValueError, "BoundingBox() requires at least one point"
# for x, y in points:
# if x < min_x:
# min_x = x * 1.0
# elif x > max_x:
# max_x = x * 1.0
# if y < min_y:
# min_y = y * 1.0
# elif y > max_y:
# max_y = y * 1.0
#NOTE: numpy way should be faster on large polygons
xs,ys = array(points).T
min_x = xs.min()*1.0
min_y = ys.min()*1.0
max_x = xs.max()*1.0
max_y = ys.max()*1.0
self._min = planar.Vec2(min_x, min_y)
self._max = planar.Vec2(max_x, max_y)
self._edge_segments = None
@property
def bounding_box(self):
"""The bounding box for this shape. For a BoundingBox instance,
this is always itself.
"""
return self
@property
def min_point(self):
"""The minimum corner point for the shape. This is the corner
with the smallest x and y value.
"""
return self._min
@property
def max_point(self):
"""The maximum corner point for the shape. This is the corner
with the largest x and y value.
"""
return self._max
@property
def width(self):
"""The width of the box."""
return self._max.x - self._min.x
@property
def height(self):
"""The height of the box."""
return self._max.y - self._min.y
@property
def edge_segments(self):
"""The edges of the bounding box as LineSegments"""
if self._edge_segments is None:
self._edge_segments = []
verts = [self._min, (self._min.x, self._max.y),
self._max, (self._max.x, self._min.y), self._min]
for i in range(4):
self._edge_segments.append(
planar.LineSegment(verts[i],verts[i+1]-verts[i]))
return self._edge_segments
@cached_property
def center(self):
"""The center point of the box."""
return (self._min + self._max) / 2.0
@cached_property
def is_empty(self):
"""True if the box has zero area."""
width, height = self._max - self._min
return not width or not height
@classmethod
def from_points(cls, points):
"""Create a bounding box that encloses all of the specified points.
"""
box = object.__new__(cls)
box._init_min_max(points)
return box
@classmethod
def from_shapes(cls, shapes):
"""Creating a bounding box that completely encloses all of the
shapes provided.
"""
shapes = iter(shapes)
try:
shape = shapes.next()
except StopIteration:
raise ValueError, (
"BoundingBox.from_shapes(): requires at least one shape")
min_x, min_y = shape.bounding_box.min_point
max_x, max_y = shape.bounding_box.max_point
for shape in shapes:
x, y = shape.bounding_box.min_point
if x < min_x:
min_x = x
if y < min_y:
min_y = y
x, y = shape.bounding_box.max_point
if x > max_x:
max_x = x
if y > max_y:
max_y = y
box = object.__new__(cls)
box._min = planar.Vec2(min_x, min_y)
box._max = planar.Vec2(max_x, max_y)
return box
@classmethod
def from_center(cls, center, width, height):
"""Create a bounding box centered at a particular point.
:param center: Center point
:type center: :class:`~planar.Vec2`
:param width: Box width.
:type width: float
:param height: Box height.
:type height: float
"""
cx, cy = center
half_w = width * 0.5
half_h = height * 0.5
return cls.from_points([
(cx - half_w, cy - half_h),
(cx + half_w, cy + half_h),
])
def inflate(self, amount):
"""Return a new box resized from this one. The new
box has its size changed by the specified amount,
but remains centered on the same point.
:param amount: The quantity to add to the width and
height of the box. A scalar value changes
both the width and height equally. A vector
will change the width and height independently.
Negative values reduce the size accordingly.
:type amount: float or :class:`~planar.Vec2`
"""
try:
dx, dy = amount
except (TypeError, ValueError):
dx = dy = amount * 1.0
dv = planar.Vec2(dx, dy) / 2.0
return self.from_points((self._min - dv, self._max + dv))
def contains_point(self, point):
"""Return True if the box contains the specified point.
:param other: A point vector
:type other: :class:`~planar.Vec2`
:rtype: bool
"""
x, y = point
return (self._min.x <= x < self._max.x
and self._min.y < y <= self._max.y)
def contains_points(self, points):
"""Like contains_point but takes a list or array of points."""
xs, ys = array(points).T
return land( land(self._min.x <= xs, xs < self._max.x),
land(self._min.y < ys, ys <= self._max.y) )
def distance_to(self, point):
"""Return the distance between the given point and this box."""
x, y = point
lt_min_x = x < self._min.x
le_max_x = x <= self._max.x
lt_min_y = y < self._min.y
le_max_y = y <= self._max.y
if lt_min_x:
if lt_min_y:
return self._min.distance_to(point)
elif le_max_y:
return self._min.x - point.x
else:
return Vec2(self._min.x,self._max.y).distance_to(point)
elif le_max_x:
if lt_min_y:
return self._min.y - point.y
elif le_max_y:
return 0
else:
return point.y - self._max.y
else:
if lt_min_y:
return Vec2(self._max.x,self._min.y).distance_to(point)
elif le_max_y:
return point.x - self._max.x
else:
return self._max.distance_to(point)
# def distance_to_points(self, points):
# """Like distance_to but takes a list or array of points."""
# points = array(points)
# xs, ys = points.T
# lt_min_x = xs < self._min.x
# le_max_x = xs <= self._max.x
# lt_min_y = ys < self._min.y
# le_max_y = ys <= self._max.y
# distances = zeros(len(points))
# # if lt_min_x:
# # if lt_min_y:
# one = land(lt_min_x,lt_min_y)
# distances[one] = self._min.distance_to_points(points[one,:])
# # elif le_max_y:
# _elif = land(le_max_y,lnot(lt_min_y))
# two = land(lt_min_x,_elif)
# distances[two] = self._min.x - xs[two]
# # else:
# _else = lnot(le_max_y)
# three = land(lt_min_x,_else)
# distances[three] = \
# Vec2(self._min.x,self._max.y).distance_to_points(points[three,:])
# # elif le_max_x:
# elif_ = land(le_max_x,lnot(lt_min_x))
# # if lt_min_y:
# four = land(elif_,lt_min_y)
# distances[four] = self._min.y - ys[four]
# # elif le_max_y:
# #five, these are already 0
# # else:
# six = land(elif_,_else)
# distances[six] = ys[six] - self._max.y
# # else:
# else_ = lnot(le_max_x)
# # if lt_min_y:
# seven = land(else_,lt_min_y)
# distances[seven] = \
# Vec2(self._max.x,self._min.y).distance_to_points(points[seven,:])
# # elif le_max_y:
# eight = land(else_,_elif)
# distances[eight] = xs[eight] - self._max.x
# # else:
# nine = land(else_,_else)
# distances[nine] = self._max.distance_to_points(points[nine,:])
# return distances
def distance_to_points(self, points):
xs, ys = array(points).T
xds = array([self._min.x - xs, xs - self._max.x])
yds = array([self._min.y - ys, ys - self._max.y])
r = arange(len(xs))
xds = xds[argmin(npabs(xds),axis=0),r]
yds = yds[argmin(npabs(yds),axis=0),r]
xds[xds<0] = 0
yds[yds<0] = 0
return hypot(xds,yds)
def signed_distance_to_points(self, points):
"""Returns negative distance if point is inside"""
#TODO implement signed_distance_to_points?
raise NotImplementedError
def project(self,point):
#TODO implement box specific version
return self.to_polygon().project(point)
def project_points(self,points):
#TODO implement box specific version
return self.to_polygon().project_points(points)
def _distance_to_line_ray_or_segment(self, lros):
min_dist = float('inf')
for edge in self.edge_segments():
dist = lros.distance_to_segment(edge)
if dist < min_dist:
min_dist = dist
return min_dist
def distance_to_line(self, line):
return self._distance_to_line_ray_or_segment(line)
def distance_to_ray(self, ray):
return self._distance_to_line_ray_or_segment(ray)
def distance_to_segment(self, segment):
return self._distance_to_line_ray_or_segment(segment)
def distance_to_box(self, box):
#TODO: optimize distance_to_box?
xdelta = ydelta = 0
if self._min.x > box._max.x:
xdelta = self._min.x - box._max.x
elif self._max.x < box._min.x:
xdelta = box._min.x - self._max.x
if self._min.y > box._max.y:
ydelta = self._min.y - box._max.y
elif self._max.y < box._min.y:
ydelta = box._min.y - self._max.y
return hypot(xdelta,ydelta)
def distance_to_polygon(self, poly):
return poly.distance_to_box(self)
def fit(self, shape):
"""Create a new shape by translating and scaling shape so that
it fits in this bounding box. The shape is scaled evenly so that
it retains the same aspect ratio.
:param shape: A transformable shape with a bounding box.
"""
if isinstance(shape, BoundingBox):
scale = min(self.width / shape.width, self.height / shape.height)
return shape.from_center(
self.center, shape.width * scale, shape.height * scale)
else:
shape_bbox = shape.bounding_box
offset = planar.Affine.translation(self.center - shape_bbox.center)
scale = planar.Affine.scale(min(self.width / shape_bbox.width,
self.height / shape_bbox.height))
return shape * (offset * scale)
def to_polygon(self):
"""Return a rectangular :class:`~planar.Polygon` object with the same
vertices as the bounding box.
:rtype: :class:`~planar.Polygon`
"""
return planar.Polygon([
self._min, (self._min.x, self._max.y),
self._max, (self._max.x, self._min.y)],
is_convex=True)
def __eq__(self, other):
return (self.__class__ is other.__class__
and self.min_point == other.min_point
and self.max_point == other.max_point)
def __ne__(self, other):
return not self.__eq__(other)
def almost_equals(self, other):
"""Return True if this bounding box is approximately equal to another
box, within precision limits.
"""
return (self.__class__ is other.__class__
and self.min_point.almost_equals(other.min_point)
and self.max_point.almost_equals(other.max_point))
def __repr__(self):
"""Precise string representation."""
return "BoundingBox([(%r, %r), (%r, %r)])" % (
self.min_point.x, self.min_point.y,
self.max_point.x, self.max_point.y)
__str__ = __repr__
def __mul__(self, other):
try:
rectilinear = other.is_rectilinear
except AttributeError:
return NotImplemented
if rectilinear:
return self.from_points(
[self._min * other, self._max * other])
else:
p = self.to_polygon()
p *= other
return p
__rmul__ = __mul__
# vim: ai ts=4 sts=4 et sw=4 tw=78
| wrightjb/bolt-planar | box.py | Python | bsd-3-clause | 15,045 |
# gridpanel-5.py
from wax import *
class MainFrame(Frame):
def Body(self):
gp = GridPanel(self, rows=3, cols=3, hgap=0, vgap=0)
gp.AddComponent(0, 0, Button(gp, "(0, 0)"), expand=1)
gp.AddComponent(1, 0, Button(gp, "(1, 0)"), expand=1)
gp.AddComponent(2, 0, Button(gp, "(2, 0)"), expand=1)
gp.AddComponent(0, 1, Button(gp, "(0, 1)"), expand=1)
gp.AddComponent(0, 2, Button(gp, "(0, 2)"), expand=1)
yellow_button = Button(gp, "(1, 1)", BackgroundColor='yellow')
gp.AddComponent(1, 1, yellow_button, border=10, align='th', expand=0)
gp.Pack()
self.AddComponent(gp, expand='both')
self.Pack()
# set the size so the alignment of the yellow button shows, together
# with the border
self.Size = 300, 300
app = Application(MainFrame)
app.Run()
| MSMBA/msmba-workflow | msmba-workflow/srclib/wax/examples/gridpanel-5.py | Python | gpl-2.0 | 859 |
# Copyright (C) 2008-2010 Adam Olsen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
# The developers of the Exaile media player hereby grant permission
# for non-GPL compatible GStreamer and Exaile plugins to be used and
# distributed together with GStreamer and Exaile. This permission is
# above and beyond the permissions granted by the GPL license by which
# Exaile is covered. If you modify this code, you may extend this
# exception to your version of the code, but you are not obligated to
# do so. If you do not wish to do so, delete this exception statement
# from your version.
# Here's where it all begins.....
#
# Holds the main Exaile class, whose instantiation starts up the entiriety
# of Exaile and which also handles Exaile shutdown.
#
# Also takes care of parsing commandline options.
import os
import platform
import sys
import threading
from xl import logger_setup
from xl.nls import gettext as _
# Imported later to avoid PyGObject imports just for --help.
Gio = common = xdg = None
def _do_heavy_imports():
global Gio, common, xdg
import gi
gi.require_version('Gdk', '3.0')
gi.require_version('Gtk', '3.0')
gi.require_version('Gst', '1.0')
gi.require_version('GIRepository', '2.0')
from gi.repository import Gio
from xl import common, xdg
# placeholder, - xl.version can be slow to import, which would slow down
# cli args. Thus we import __version__ later.
__version__ = None
logger = None
def create_argument_parser():
"""Create command-line argument parser for Exaile"""
import argparse
# argparse hard-codes "usage:" uncapitalized. We replace this with an
# empty string and put "Usage:" in the actual usage string instead.
class Formatter(argparse.HelpFormatter):
def _format_usage(self, usage, actions, groups, prefix):
return super(self.__class__, self)._format_usage(usage, actions, groups, "")
p = argparse.ArgumentParser(
usage=_("Usage: exaile [OPTION...] [LOCATION...]"),
description=_("Launch Exaile, optionally adding tracks specified by"
" LOCATION to the active playlist."
" If Exaile is already running, this attempts to use the existing"
" instance instead of creating a new one."),
add_help=False, formatter_class=Formatter)
p.add_argument('locs', nargs='*', help=argparse.SUPPRESS)
group = p.add_argument_group(_('Playback Options'))
group.add_argument("-n", "--next", dest="Next", action="store_true",
default=False, help=_("Play the next track"))
group.add_argument("-p", "--prev", dest="Prev", action="store_true",
default=False, help=_("Play the previous track"))
group.add_argument("-s", "--stop", dest="Stop", action="store_true",
default=False, help=_("Stop playback"))
group.add_argument("-a", "--play", dest="Play", action="store_true",
default=False, help=_("Play"))
group.add_argument("-u", "--pause", dest="Pause", action="store_true",
default=False, help=_("Pause"))
group.add_argument("-t", "--play-pause", dest="PlayPause",
action="store_true", default=False, help=_("Pause or resume playback"))
group.add_argument("--stop-after-current", dest="StopAfterCurrent",
action="store_true", default=False,
help=_("Stop playback after current track"))
group = p.add_argument_group(_('Collection Options'))
group.add_argument("--add", dest="Add",
# TRANSLATORS: Meta variable for --add and --export-playlist
metavar=_("LOCATION"),
help=_("Add tracks from LOCATION to the collection"))
group = p.add_argument_group(_('Playlist Options'))
group.add_argument("--export-playlist", dest="ExportPlaylist",
# TRANSLATORS: Meta variable for --add and --export-playlist
metavar=_("LOCATION"),
help=_('Export the current playlist to LOCATION'))
group = p.add_argument_group(_('Track Options'))
group.add_argument("-q", "--query", dest="Query", action="store_true",
default=False, help=_("Query player"))
group.add_argument("--format-query", dest="FormatQuery",
# TRANSLATORS: Meta variable for --format-query
metavar=_('FORMAT'),
help=_('Retrieve the current playback state and track information as FORMAT'))
group.add_argument("--format-query-tags", dest="FormatQueryTags",
# TRANSLATORS: Meta variable for --format-query-tags
metavar=_('TAGS'),
help=_('Tags to retrieve from the current track; use with --format-query'))
group.add_argument("--gui-query", dest="GuiQuery", action="store_true",
default=False, help=_("Show a popup with data of the current track"))
group.add_argument("--get-title", dest="GetTitle", action="store_true",
default=False, help=_("Print the title of current track"))
group.add_argument("--get-album", dest="GetAlbum", action="store_true",
default=False, help=_("Print the album of current track"))
group.add_argument("--get-artist", dest="GetArtist", action="store_true",
default=False, help=_("Print the artist of current track"))
group.add_argument("--get-length", dest="GetLength", action="store_true",
default=False, help=_("Print the length of current track"))
group.add_argument('--set-rating', dest="SetRating", type=int,
# TRANSLATORS: Variable for command line options with arguments
metavar=_('N'),
help=_('Set rating for current track to N%').replace("%", "%%"))
group.add_argument('--get-rating', dest='GetRating', action='store_true',
default=False, help=_('Get rating for current track'))
group.add_argument("--current-position", dest="CurrentPosition",
action="store_true", default=False,
help=_("Print the current playback position as time"))
group.add_argument("--current-progress", dest="CurrentProgress",
action="store_true", default=False,
help=_("Print the current playback progress as percentage"))
group = p.add_argument_group(_('Volume Options'))
group.add_argument("-i", "--increase-vol", dest="IncreaseVolume", type=int,
# TRANSLATORS: Meta variable for --increase-vol and--decrease-vol
metavar=_("N"),
help=_("Increase the volume by N%").replace("%", "%%"))
group.add_argument("-l", "--decrease-vol", dest="DecreaseVolume", type=int,
# TRANSLATORS: Meta variable for --increase-vol and--decrease-vol
metavar=_("N"),
help=_("Decrease the volume by N%").replace("%", "%%"))
group.add_argument("-m", "--toggle-mute", dest="ToggleMute",
action="store_true", default=False,
help=_("Mute or unmute the volume"))
group.add_argument("--get-volume", dest="GetVolume", action="store_true",
default=False, help=_("Print the current volume percentage"))
group = p.add_argument_group(_('Other Options'))
group.add_argument("--new", dest="NewInstance", action="store_true",
default=False, help=_("Start new instance"))
group.add_argument("-h", "--help", action="help",
help=_("Show this help message and exit"))
group.add_argument("--version", dest="ShowVersion", action="store_true",
help=_("Show program's version number and exit."))
group.add_argument("--start-minimized", dest="StartMinimized",
action="store_true", default=False,
help=_("Start minimized (to tray, if possible)"))
group.add_argument("--toggle-visible", dest="GuiToggleVisible",
action="store_true", default=False,
help=_("Toggle visibility of the GUI (if possible)"))
group.add_argument("--safemode", dest="SafeMode", action="store_true",
default=False, help=_("Start in safe mode - sometimes"
" useful when you're running into problems"))
group.add_argument("--force-import", dest="ForceImport",
action="store_true", default=False, help=_("Force import of old data"
" from version 0.2.x (overwrites current data)"))
group.add_argument("--no-import", dest="NoImport",
action="store_true", default=False, help=_("Do not import old data"
" from version 0.2.x"))
group.add_argument("--start-anyway", dest="StartAnyway",
action="store_true", default=False, help=_("Make control options like"
" --play start Exaile if it is not running"))
group = p.add_argument_group(_('Development/Debug Options'))
group.add_argument("--datadir", dest="UseDataDir",
metavar=_('DIRECTORY'), help=_("Set data directory"))
group.add_argument("--all-data-dir", dest="UseAllDataDir",
metavar=_('DIRECTORY'), help=_("Set data and config directory"))
group.add_argument("--modulefilter", dest="ModuleFilter",
metavar=_('MODULE'), help=_('Limit log output to MODULE'))
group.add_argument("--levelfilter", dest="LevelFilter",
metavar=_('LEVEL'), help=_('Limit log output to LEVEL'),
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'])
group.add_argument("--debug", dest="Debug", action="store_true",
default=False, help=_("Show debugging output"))
group.add_argument("--eventdebug", dest="DebugEvent",
action="store_true", default=False, help=_("Enable debugging of"
" xl.event. Generates lots of output"))
group.add_argument("--eventdebug-full", dest="DebugEventFull",
action="store_true", default=False, help=_("Enable full debugging of"
" xl.event. Generates LOTS of output"))
group.add_argument("--threaddebug", dest="DebugThreads",
action="store_true", default=False, help=_("Add thread name to logging"
" messages."))
group.add_argument("--eventfilter", dest="EventFilter", metavar=_('TYPE'),
help=_("Limit xl.event debug to output of TYPE"))
group.add_argument("--quiet", dest="Quiet", action="store_true",
default=False, help=_("Reduce level of output"))
group.add_argument('--startgui', dest='StartGui', action='store_true',
default=False)
group.add_argument('--no-dbus', dest='Dbus', action='store_false',
default=True, help=_("Disable D-Bus support"))
group.add_argument('--no-hal', dest='Hal', action='store_false',
default=True, help=_("Disable HAL support."))
return p
class Exaile(object):
_exaile = None
def __get_player(self):
raise DeprecationWarning('Using exaile.player is deprecated: '
'import xl.player.PLAYER instead.')
def __get_queue(self):
raise DeprecationWarning('Using exaile.queue is deprecated: '
'import xl.player.QUEUE instead.')
def __get_lyrics(self):
raise DeprecationWarning('Using exaile.lyrics is deprecated: '
'import xl.lyrics.MANAGER instead.')
player = property(__get_player)
queue = property(__get_queue)
lyrics = property(__get_lyrics)
def __init__(self):
"""
Initializes Exaile.
"""
self.quitting = False
self.loading = True
# NOTE: This automatically exits on --help.
self.options = create_argument_parser().parse_args()
if self.options.ShowVersion:
self.version()
return
_do_heavy_imports()
if self.options.UseDataDir:
xdg.data_dirs.insert(1, self.options.UseDataDir)
# this is useful on Win32, because you cannot set these directories
# via environment variables
if self.options.UseAllDataDir:
xdg.data_home = self.options.UseAllDataDir
xdg.data_dirs.insert(0, xdg.data_home)
xdg.config_home = self.options.UseAllDataDir
xdg.config_dirs.insert(0, xdg.config_home)
xdg.cache_home = self.options.UseAllDataDir
try:
xdg._make_missing_dirs()
except OSError as e:
print >> sys.stderr, 'ERROR: Could not create configuration directories: %s' % e
return
# Make event debug imply debug
if self.options.DebugEventFull:
self.options.DebugEvent = True
if self.options.DebugEvent:
self.options.Debug = True
try:
logger_setup.start_logging(self.options.Debug,
self.options.Quiet,
self.options.DebugThreads,
self.options.ModuleFilter,
self.options.LevelFilter)
except OSError as e:
print >> sys.stderr, 'ERROR: could not setup logging: %s' % e
return
global logger
import logging
logger = logging.getLogger(__name__)
try:
# Late import ensures xl.event uses correct logger
from xl import event
if self.options.EventFilter:
event.EVENT_MANAGER.logger_filter = self.options.EventFilter
self.options.DebugEvent = True
if self.options.DebugEvent:
event.EVENT_MANAGER.use_logger = True
if self.options.DebugEventFull:
event.EVENT_MANAGER.use_verbose_logger = True
# initial mainloop setup. The actual loop is started later,
# if necessary
self.mainloop_init()
#initialize DbusManager
if self.options.StartGui and self.options.Dbus:
from xl import xldbus
exit = xldbus.check_exit(self.options, self.options.locs)
if exit == "exit":
sys.exit(0)
elif exit == "command":
if not self.options.StartAnyway:
sys.exit(0)
self.dbus = xldbus.DbusManager(self)
# import version, see note above
global __version__
from xl.version import __version__
#load the rest.
self.__init()
#handle delayed commands
if self.options.StartGui and self.options.Dbus and \
self.options.StartAnyway and exit == "command":
xldbus.run_commands(self.options, self.dbus)
#connect dbus signals
if self.options.StartGui and self.options.Dbus:
self.dbus._connect_signals()
# On SIGTERM, quit normally.
import signal
signal.signal(signal.SIGTERM, (lambda sig, stack: self.quit()))
# run the GUIs mainloop, if needed
if self.options.StartGui:
import xlgui
xlgui.mainloop()
except KeyboardInterrupt:
logger.exception("User exited program")
except:
logger.exception("Unhandled exception")
def __init(self):
"""
Initializes Exaile
"""
# pylint: disable-msg=W0201
logger.info("Loading Exaile %s on Python %s..." % (__version__, platform.python_version()))
logger.info("Loading settings...")
try:
from xl import settings
except common.VersionError:
logger.exception("Error loading settings")
sys.exit(1)
logger.debug("Settings loaded from %s" % settings.location)
# display locale information if available
try:
import locale
lc, enc = locale.getlocale()
if enc is not None:
logger.info("Using %s %s locale" % (lc, enc))
else:
logger.info("Using unknown locale")
except:
pass
splash = None
if self.options.StartGui:
from xl import settings
if settings.get_option('gui/use_splash', True):
from xlgui.widgets.info import Splash
splash = Splash()
splash.show()
firstrun = settings.get_option("general/first_run", True)
if not self.options.NoImport and \
(firstrun or self.options.ForceImport):
try:
sys.path.insert(0, xdg.get_data_path("migrations"))
import migration_200907100931 as migrator
del sys.path[0]
migrator.migrate(force=self.options.ForceImport)
del migrator
except:
logger.exception("Failed to migrate from 0.2.14")
# Migrate old rating options
from xl.migrations.settings import rating
rating.migrate()
# Migrate builtin OSD to plugin
from xl.migrations.settings import osd
osd.migrate()
# Migrate engines
from xl.migrations.settings import engine
engine.migrate()
# TODO: enable audio plugins separately from normal
# plugins? What about plugins that use the player?
# Gstreamer doesn't initialize itself automatically, and fails
# miserably when you try to inherit from something and GST hasn't
# been initialized yet. So this is here.
from gi.repository import Gst
Gst.init(None)
# Initialize plugin manager
from xl import plugins
self.plugins = plugins.PluginsManager(self)
if not self.options.SafeMode:
logger.info("Loading plugins...")
self.plugins.load_enabled()
else:
logger.info("Safe mode enabled, not loading plugins.")
# Initialize the collection
logger.info("Loading collection...")
from xl import collection
try:
self.collection = collection.Collection("Collection",
location=os.path.join(xdg.get_data_dir(), 'music.db'))
except common.VersionError:
logger.exception("VersionError loading collection")
sys.exit(1)
from xl import event
# Set up the player and playback queue
from xl import player
event.log_event("player_loaded", player.PLAYER, None)
# Initalize playlist manager
from xl import playlist
self.playlists = playlist.PlaylistManager()
self.smart_playlists = playlist.PlaylistManager('smart_playlists',
playlist.SmartPlaylist)
if firstrun:
self._add_default_playlists()
event.log_event("playlists_loaded", self, None)
# Initialize dynamic playlist support
from xl import dynamic
dynamic.MANAGER.collection = self.collection
# Initalize device manager
logger.info("Loading devices...")
from xl import devices
self.devices = devices.DeviceManager()
event.log_event("device_manager_ready", self, None)
# Initialize dynamic device discovery interface
# -> if initialized and connected, then the object is not None
self.udisks2 = None
self.udisks = None
self.hal = None
if self.options.Hal:
from xl import hal
udisks2 = hal.UDisks2(self.devices)
if udisks2.connect():
self.udisks2 = udisks2
else:
udisks = hal.UDisks(self.devices)
if udisks.connect():
self.udisks = udisks
else:
self.hal = hal.HAL(self.devices)
self.hal.connect()
else:
self.hal = None
# Radio Manager
from xl import radio
self.stations = playlist.PlaylistManager('radio_stations')
self.radio = radio.RadioManager()
self.gui = None
# Setup GUI
if self.options.StartGui:
logger.info("Loading interface...")
import xlgui
self.gui = xlgui.Main(self)
self.gui.main.window.show_all()
event.log_event("gui_loaded", self, None)
if splash is not None:
splash.destroy()
restore = True
if self.gui:
# Find out if the user just passed in a list of songs
# TODO: find a better place to put this
# using arg[2:] because arg[1:] will include --startgui
args = [ Gio.File.new_for_path(arg).get_uri() for arg in self.options.locs ]
if len(args) > 0:
restore = False
self.gui.open_uri(args[0], play=True)
for arg in args[1:]:
self.gui.open_uri(arg)
# kick off autoscan of libraries
# -> don't do it in command line mode, since that isn't expected
self.gui.rescan_collection_with_progress(True)
if restore:
player.QUEUE._restore_player_state(
os.path.join(xdg.get_data_dir(), 'player.state'))
if firstrun:
settings.set_option("general/first_run", False)
self.loading = False
Exaile._exaile = self
event.log_event("exaile_loaded", self, None)
# pylint: enable-msg=W0201
def __show_splash(self):
"""
Displays the splash screen
"""
from xl import settings
if not settings.get_option('gui/use_splash', True):
return
from xlgui.widgets.info import Splash
splash = Splash()
splash.show()
def version(self):
from xl.version import __version__
print "Exaile", __version__
sys.exit(0)
def _add_default_playlists(self):
"""
Adds some default smart playlists to the playlist manager
"""
from xl import playlist
# entire playlist
entire_lib = playlist.SmartPlaylist(_("Entire Library"),
collection=self.collection)
self.smart_playlists.save_playlist(entire_lib, overwrite=True)
# random playlists
for count in (100, 300, 500):
pl = playlist.SmartPlaylist(_("Random %d") % count,
collection=self.collection)
pl.set_return_limit(count)
pl.set_random_sort(True)
self.smart_playlists.save_playlist(pl, overwrite=True)
# rating based playlists
for item in (3, 4):
pl = playlist.SmartPlaylist(_("Rating > %d") % item,
collection=self.collection)
pl.add_param('__rating', '>', item)
self.smart_playlists.save_playlist(pl, overwrite=True)
def mainloop_init(self):
from gi.repository import GObject
major, minor, patch = GObject.pygobject_version
logger.info("Using PyGObject %d.%d.%d", major, minor, patch)
if major < 3 or \
(major == 3 and minor < 10) or \
(major == 3 and minor == 10 and patch < 2):
# Probably should exit?
logger.warning("Exaile requires PyGObject 3.10.2 or greater!")
if self.options.Dbus:
import dbus, dbus.mainloop.glib
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
dbus.mainloop.glib.threads_init()
dbus.mainloop.glib.gthreads_init()
if not self.options.StartGui:
from gi.repository import GLib
loop = GLib.MainLoop()
context = loop.get_context()
t = threading.Thread(target=self.__mainloop, args=(context,))
t.daemon = True
t.start()
def __mainloop(self, context):
while 1:
try:
context.iteration(True)
except:
pass
def get_version(self):
"""
Returns the current version
"""
return __version__
def get_user_agent_string(self, plugin_name=None):
'''
Returns an approrpiately formatted User-agent string for
web requests. When possible, plugins should use this to
format user agent strings.
Users can control this agent string by manually setting
general/user_agent and general/user_agent_w_plugin in settings.ini
:param plugin_name: the name of the plugin
'''
version = __version__
if '+' in version: # strip out revision identifier
version = version[:version.index('+')]
fmt = {
'version': version
}
if not hasattr(self, '_user_agent_no_plugin'):
from xl import settings
default_no_plugin = 'Exaile/%(version)s (+http://www.exaile.org)'
default_plugin = 'Exaile/%(version)s %(plugin_name)s/%(plugin_version)s (+http://www.exaile.org)'
self._user_agent_no_plugin = \
settings.get_option('general/user_agent', default_no_plugin)
self._user_agent_w_plugin = \
settings.get_option('general/user_agent_w_plugin', default_plugin)
if plugin_name is not None:
plugin_info = self.plugins.get_plugin_info(plugin_name)
fmt['plugin_name'] = plugin_info['Name'].replace(' ', '')
fmt['plugin_version'] = plugin_info['Version']
return self._user_agent_w_plugin % fmt
else:
return self._user_agent_no_plugin % fmt
def quit(self, restart=False):
"""
Exits Exaile normally. Takes care of saving
preferences, databases, etc.
:param restart: Whether to directly restart
:type restart: bool
"""
if self.quitting:
return
self.quitting = True
logger.info("Exaile is shutting down...")
logger.info("Disabling plugins...")
for k, plugin in self.plugins.enabled_plugins.iteritems():
if hasattr(plugin, 'teardown'):
try:
plugin.teardown(self)
except:
pass
from xl import event
# this event should be used by modules that dont need
# to be saved in any particular order. modules that might be
# touched by events triggered here should be added statically
# below.
event.log_event("quit_application", self, None)
logger.info("Saving state...")
self.plugins.save_enabled()
if self.gui:
self.gui.quit()
from xl import covers
covers.MANAGER.save()
self.collection.save_to_location()
# Save order of custom playlists
self.playlists.save_order()
self.stations.save_order()
# save player, queue
from xl import player
player.QUEUE._save_player_state(
os.path.join(xdg.get_data_dir(), 'player.state') )
player.QUEUE.save_to_location(
os.path.join(xdg.get_data_dir(), 'queue.state') )
player.PLAYER.stop()
from xl import settings
settings.MANAGER.save()
if restart:
logger.info("Restarting...")
logger_setup.stop_logging()
python = sys.executable
if sys.platform == 'win32':
# Python Win32 bug: it does not quote individual command line
# arguments. Here we do it ourselves and pass the whole thing
# as one string.
# See https://bugs.python.org/issue436259 (closed wontfix).
import subprocess
cmd = [python] + sys.argv
cmd = subprocess.list2cmdline(cmd)
os.execl(python, cmd)
else:
os.execl(python, python, *sys.argv)
logger.info("Bye!")
logger_setup.stop_logging()
sys.exit(0)
def exaile():
if not Exaile._exaile:
raise AttributeError(_("Exaile is not yet finished loading"
". Perhaps you should listen for the exaile_loaded"
" signal?"))
return Exaile._exaile
# vim: et sts=4 sw=4
| virtuald/exaile | xl/main.py | Python | gpl-2.0 | 28,843 |
import numpy as np
from threeML.bayesian.sampler_base import MCMCSampler
from threeML.config.config import threeML_config
from threeML.parallel.parallel_client import ParallelClient
from astromodels import use_astromodels_memoization
try:
import zeus
except:
has_zeus = False
else:
has_zeus = True
try:
# see if we have mpi and/or are using parallel
from mpi4py import MPI
if MPI.COMM_WORLD.Get_size() > 1: # need parallel capabilities
using_mpi = True
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
from mpi4py.futures import MPIPoolExecutor
else:
using_mpi = False
except:
using_mpi = False
class ZeusSampler(MCMCSampler):
def __init__(self, likelihood_model=None, data_list=None, **kwargs):
assert has_zeus, "You must install zeus-mcmc to use this sampler"
super(ZeusSampler, self).__init__(likelihood_model, data_list, **kwargs)
def setup(self, n_iterations, n_burn_in=None, n_walkers=20, seed=None):
self._n_iterations = int(n_iterations)
if n_burn_in is None:
self._n_burn_in = int(np.floor(n_iterations / 4.0))
else:
self._n_burn_in = n_burn_in
self._n_walkers = int(n_walkers)
self._seed = seed
self._is_setup = True
def sample(self, quiet=False):
assert self._is_setup, "You forgot to setup the sampler!"
loud = not quiet
self._update_free_parameters()
n_dim = len(list(self._free_parameters.keys()))
# Get starting point
p0 = self._get_starting_points(self._n_walkers)
# Deactivate memoization in astromodels, which is useless in this case since we will never use twice the
# same set of parameters
with use_astromodels_memoization(False):
if using_mpi:
with MPIPoolExecutor() as executor:
sampler = zeus.sampler(
logprob_fn=self.get_posterior,
nwalkers=self._n_walkers,
ndim=n_dim,
pool=executor,
)
# if self._seed is not None:
# sampler._random.seed(self._seed)
# Run the true sampling
_ = sampler.run(
p0, self._n_iterations + self._n_burn_in, progress=loud,
)
elif threeML_config["parallel"]["use-parallel"]:
c = ParallelClient()
view = c[:]
sampler = zeus.sampler(
logprob_fn=self.get_posterior,
nwalkers=self._n_walkers,
ndim=n_dim,
pool=view,
)
else:
sampler = zeus.sampler(
logprob_fn=self.get_posterior, nwalkers=self._n_walkers, ndim=n_dim
)
# If a seed is provided, set the random number seed
# if self._seed is not None:
# sampler._random.seed(self._seed)
# Sample the burn-in
if not using_mpi:
_ = sampler.run(p0, self._n_iterations + self._n_burn_in, progress=loud)
self._sampler = sampler
self._raw_samples = sampler.flatten(discard=self._n_burn_in)
# Compute the corresponding values of the likelihood
# First we need the prior
log_prior = np.array([self._log_prior(x) for x in self._raw_samples])
self._log_probability_values = sampler.get_log_prob(flat=True, discard=self._n_burn_in)
# np.array(
# [self.get_posterior(x) for x in self._raw_samples]
# )
# Now we get the log posterior and we remove the log prior
self._log_like_values = self._log_probability_values - log_prior
# we also want to store the log probability
self._marginal_likelihood = None
self._build_samples_dictionary()
self._build_results()
# Display results
if loud:
print(self._sampler.summary)
self._results.display()
return self.samples
| giacomov/3ML | threeML/bayesian/zeus_sampler.py | Python | bsd-3-clause | 4,215 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-11-25 04:08
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('currency', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='currency',
name='favorite',
field=models.ManyToManyField(related_name='favoritesCurrencies', to=settings.AUTH_USER_MODEL),
),
]
| sebastienbarbier/723e_server | seven23/models/currency/migrations/0002_currency_favorite.py | Python | mit | 599 |
# -*- coding: utf-8 -*-
num1 = 0x1010;
num2 = 11;
num3 = 10.25;
num4 = 1 + 1j;
num5 = "12.25";
num6 = "1245";
print cmp(num1,num2);
print str(num1);
print type(num1);
print float(num1);
print long(num1);
print complex(num2);
print "float('12.25') = %f" % (float(num5));
print long(num6);
| zhangjl/python-learn-notes | src/number.py | Python | gpl-2.0 | 295 |
# Unix SMB/CIFS implementation.
# Copyright (C) Michael Adam 2012
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import time
import ldb
from samba.tests.samba_tool.base import SambaToolCmdTest
from samba import (
nttime2unix,
dsdb
)
class GroupCmdTestCase(SambaToolCmdTest):
"""Tests for samba-tool group subcommands"""
groups = []
samdb = None
def setUp(self):
super(GroupCmdTestCase, self).setUp()
self.samdb = self.getSamDB("-H", "ldap://%s" % os.environ["DC_SERVER"],
"-U%s%%%s" % (os.environ["DC_USERNAME"], os.environ["DC_PASSWORD"]))
self.groups = []
self.groups.append(self._randomGroup({"name": "testgroup1"}))
self.groups.append(self._randomGroup({"name": "testgroup2"}))
self.groups.append(self._randomGroup({"name": "testgroup3"}))
self.groups.append(self._randomGroup({"name": "testgroup4"}))
# setup the 4 groups and ensure they are correct
for group in self.groups:
(result, out, err) = self._create_group(group)
self.assertCmdSuccess(result)
self.assertEquals(err, "", "There shouldn't be any error message")
self.assertIn("Added group %s" % group["name"], out)
found = self._find_group(group["name"])
self.assertIsNotNone(found)
self.assertEquals("%s" % found.get("name"), group["name"])
self.assertEquals("%s" % found.get("description"), group["description"])
def tearDown(self):
super(GroupCmdTestCase, self).tearDown()
# clean up all the left over groups, just in case
for group in self.groups:
if self._find_group(group["name"]):
self.runsubcmd("group", "delete", group["name"])
def test_newgroup(self):
"""This tests the "group add" and "group delete" commands"""
# try to add all the groups again, this should fail
for group in self.groups:
(result, out, err) = self._create_group(group)
self.assertCmdFail(result, "Succeeded to create existing group")
self.assertIn("LDAP error 68 LDAP_ENTRY_ALREADY_EXISTS", err)
# try to delete all the groups we just added
for group in self.groups:
(result, out, err) = self.runsubcmd("group", "delete", group["name"])
self.assertCmdSuccess(result,
"Failed to delete group '%s'" % group["name"])
found = self._find_group(group["name"])
self.assertIsNone(found,
"Deleted group '%s' still exists" % group["name"])
# test adding groups
for group in self.groups:
(result, out, err) = self.runsubcmd("group", "add", group["name"],
"--description=%s" % group["description"],
"-H", "ldap://%s" % os.environ["DC_SERVER"],
"-U%s%%%s" % (os.environ["DC_USERNAME"],
os.environ["DC_PASSWORD"]))
self.assertCmdSuccess(result)
self.assertEquals(err,"","There shouldn't be any error message")
self.assertIn("Added group %s" % group["name"], out)
found = self._find_group(group["name"])
self.assertEquals("%s" % found.get("samaccountname"),
"%s" % group["name"])
def test_list(self):
(result, out, err) = self.runsubcmd("group", "list",
"-H", "ldap://%s" % os.environ["DC_SERVER"],
"-U%s%%%s" % (os.environ["DC_USERNAME"],
os.environ["DC_PASSWORD"]))
self.assertCmdSuccess(result, "Error running list")
search_filter = "(objectClass=group)"
grouplist = self.samdb.search(base=self.samdb.domain_dn(),
scope=ldb.SCOPE_SUBTREE,
expression=search_filter,
attrs=["samaccountname"])
self.assertTrue(len(grouplist) > 0, "no groups found in samdb")
for groupobj in grouplist:
name = groupobj.get("samaccountname", idx=0)
found = self.assertMatch(out, name,
"group '%s' not found" % name)
def test_listmembers(self):
(result, out, err) = self.runsubcmd("group", "listmembers", "Domain Users",
"-H", "ldap://%s" % os.environ["DC_SERVER"],
"-U%s%%%s" % (os.environ["DC_USERNAME"],
os.environ["DC_PASSWORD"]))
self.assertCmdSuccess(result, "Error running listmembers")
search_filter = "(|(primaryGroupID=513)(memberOf=CN=Domain Users,CN=Users,%s))" % self.samdb.domain_dn()
grouplist = self.samdb.search(base=self.samdb.domain_dn(),
scope=ldb.SCOPE_SUBTREE,
expression=search_filter,
attrs=["samAccountName"])
self.assertTrue(len(grouplist) > 0, "no groups found in samdb")
for groupobj in grouplist:
name = groupobj.get("samAccountName", idx=0)
found = self.assertMatch(out, name, "group '%s' not found" % name)
def _randomGroup(self, base={}):
"""create a group with random attribute values, you can specify base attributes"""
group = {
"name": self.randomName(),
"description": self.randomName(count=100),
}
group.update(base)
return group
def _create_group(self, group):
return self.runsubcmd("group", "add", group["name"],
"--description=%s" % group["description"],
"-H", "ldap://%s" % os.environ["DC_SERVER"],
"-U%s%%%s" % (os.environ["DC_USERNAME"],
os.environ["DC_PASSWORD"]))
def _find_group(self, name):
search_filter = ("(&(sAMAccountName=%s)(objectCategory=%s,%s))" %
(ldb.binary_encode(name),
"CN=Group,CN=Schema,CN=Configuration",
self.samdb.domain_dn()))
grouplist = self.samdb.search(base=self.samdb.domain_dn(),
scope=ldb.SCOPE_SUBTREE,
expression=search_filter,
attrs=[])
if grouplist:
return grouplist[0]
else:
return None
| yasoob/PythonRSSReader | venv/lib/python2.7/dist-packages/samba/tests/samba_tool/group.py | Python | mit | 7,464 |
#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rds
version_added: "1.3"
short_description: create, delete, or modify Amazon rds instances, rds snapshots, and related facts
description:
- Creates, deletes, or modifies rds resources.
- When creating an instance it can be either a new instance or a read-only replica of an existing instance.
- This module has a dependency on python-boto >= 2.5 and will soon be deprecated.
- The 'promote' command requires boto >= 2.18.0. Certain features such as tags rely on boto.rds2 (boto >= 2.26.0).
- Please use boto3 based M(rds_instance) instead.
options:
command:
description:
- Specifies the action to take. The 'reboot' option is available starting at version 2.0
required: true
choices: [ 'create', 'replicate', 'delete', 'facts', 'modify' , 'promote', 'snapshot', 'reboot', 'restore' ]
instance_name:
description:
- Database instance identifier. Required except when using command=facts or command=delete on just a snapshot
source_instance:
description:
- Name of the database to replicate. Used only when command=replicate.
db_engine:
description:
- The type of database. Used only when command=create.
- mariadb was added in version 2.2
choices: ['mariadb', 'MySQL', 'oracle-se1', 'oracle-se2', 'oracle-se', 'oracle-ee',
'sqlserver-ee', 'sqlserver-se', 'sqlserver-ex', 'sqlserver-web', 'postgres', 'aurora']
size:
description:
- Size in gigabytes of the initial storage for the DB instance. Used only when command=create or command=modify.
instance_type:
description:
- The instance type of the database. Must be specified when command=create. Optional when command=replicate, command=modify or command=restore.
If not specified then the replica inherits the same instance type as the source instance.
username:
description:
- Master database username. Used only when command=create.
password:
description:
- Password for the master database username. Used only when command=create or command=modify.
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: true
aliases: [ 'aws_region', 'ec2_region' ]
db_name:
description:
- Name of a database to create within the instance. If not specified then no database is created. Used only when command=create.
engine_version:
description:
- Version number of the database engine to use. Used only when command=create. If not specified then the current Amazon RDS default engine version is used
parameter_group:
description:
- Name of the DB parameter group to associate with this instance. If omitted then the RDS default DBParameterGroup will be used. Used only
when command=create or command=modify.
license_model:
description:
- The license model for this DB instance. Used only when command=create or command=restore.
choices: [ 'license-included', 'bring-your-own-license', 'general-public-license', 'postgresql-license' ]
multi_zone:
description:
- Specifies if this is a Multi-availability-zone deployment. Can not be used in conjunction with zone parameter. Used only when command=create or
command=modify.
type: bool
iops:
description:
- Specifies the number of IOPS for the instance. Used only when command=create or command=modify. Must be an integer greater than 1000.
security_groups:
description:
- Comma separated list of one or more security groups. Used only when command=create or command=modify.
vpc_security_groups:
description:
- Comma separated list of one or more vpc security group ids. Also requires `subnet` to be specified. Used only when command=create or command=modify.
port:
description:
- Port number that the DB instance uses for connections. Used only when command=create or command=replicate.
- Prior to 2.0 it always defaults to null and the API would use 3306, it had to be set to other DB default values when not using MySql.
Starting at 2.0 it automatically defaults to what is expected for each C(db_engine).
default: 3306 for mysql, 1521 for Oracle, 1433 for SQL Server, 5432 for PostgreSQL.
upgrade:
description:
- Indicates that minor version upgrades should be applied automatically.
- Used only when command=create or command=modify or command=restore or command=replicate.
type: bool
default: 'no'
option_group:
description:
- The name of the option group to use. If not specified then the default option group is used. Used only when command=create.
maint_window:
description:
- >
Maintenance window in format of ddd:hh24:mi-ddd:hh24:mi. (Example: Mon:22:00-Mon:23:15) If not specified then a random maintenance window is
assigned. Used only when command=create or command=modify.
backup_window:
description:
- Backup window in format of hh24:mi-hh24:mi. If not specified then a random backup window is assigned. Used only when command=create or command=modify.
backup_retention:
description:
- >
Number of days backups are retained. Set to 0 to disable backups. Default is 1 day. Valid range: 0-35. Used only when command=create or
command=modify.
zone:
description:
- availability zone in which to launch the instance. Used only when command=create, command=replicate or command=restore.
aliases: ['aws_zone', 'ec2_zone']
subnet:
description:
- VPC subnet group. If specified then a VPC instance is created. Used only when command=create.
snapshot:
description:
- Name of snapshot to take. When command=delete, if no snapshot name is provided then no snapshot is taken. If used with command=delete with
no instance_name, the snapshot is deleted. Used with command=facts, command=delete or command=snapshot.
aws_secret_key:
description:
- AWS secret key. If not set then the value of the AWS_SECRET_KEY environment variable is used.
aliases: [ 'ec2_secret_key', 'secret_key' ]
aws_access_key:
description:
- AWS access key. If not set then the value of the AWS_ACCESS_KEY environment variable is used.
aliases: [ 'ec2_access_key', 'access_key' ]
wait:
description:
- When command=create, replicate, modify or restore then wait for the database to enter the 'available' state. When command=delete wait for
the database to be terminated.
type: bool
default: 'no'
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
apply_immediately:
description:
- Used only when command=modify. If enabled, the modifications will be applied as soon as possible rather than waiting for the next
preferred maintenance window.
type: bool
default: 'no'
force_failover:
description:
- Used only when command=reboot. If enabled, the reboot is done using a MultiAZ failover.
type: bool
default: 'no'
version_added: "2.0"
new_instance_name:
description:
- Name to rename an instance to. Used only when command=modify.
version_added: "1.5"
character_set_name:
description:
- Associate the DB instance with a specified character set. Used with command=create.
version_added: "1.9"
publicly_accessible:
description:
- explicitly set whether the resource should be publicly accessible or not. Used with command=create, command=replicate. Requires boto >= 2.26.0
version_added: "1.9"
tags:
description:
- tags dict to apply to a resource. Used with command=create, command=replicate, command=restore. Requires boto >= 2.26.0
version_added: "1.9"
requirements:
- "python >= 2.6"
- "boto"
author:
- "Bruce Pennypacker (@bpennypacker)"
- "Will Thames (@willthames)"
extends_documentation_fragment:
- aws
- ec2
'''
# FIXME: the command stuff needs a 'state' like alias to make things consistent -- MPD
EXAMPLES = '''
# Basic mysql provisioning example
- rds:
command: create
instance_name: new-database
db_engine: MySQL
size: 10
instance_type: db.m1.small
username: mysql_admin
password: 1nsecure
tags:
Environment: testing
Application: cms
# Create a read-only replica and wait for it to become available
- rds:
command: replicate
instance_name: new-database-replica
source_instance: new_database
wait: yes
wait_timeout: 600
# Delete an instance, but create a snapshot before doing so
- rds:
command: delete
instance_name: new-database
snapshot: new_database_snapshot
# Get facts about an instance
- rds:
command: facts
instance_name: new-database
register: new_database_facts
# Rename an instance and wait for the change to take effect
- rds:
command: modify
instance_name: new-database
new_instance_name: renamed-database
wait: yes
# Reboot an instance and wait for it to become available again
- rds:
command: reboot
instance_name: database
wait: yes
# Restore a Postgres db instance from a snapshot, wait for it to become available again, and
# then modify it to add your security group. Also, display the new endpoint.
# Note that the "publicly_accessible" option is allowed here just as it is in the AWS CLI
- local_action:
module: rds
command: restore
snapshot: mypostgres-snapshot
instance_name: MyNewInstanceName
region: us-west-2
zone: us-west-2b
subnet: default-vpc-xx441xxx
publicly_accessible: yes
wait: yes
wait_timeout: 600
tags:
Name: pg1_test_name_tag
register: rds
- local_action:
module: rds
command: modify
instance_name: MyNewInstanceName
region: us-west-2
vpc_security_groups: sg-xxx945xx
- debug:
msg: "The new db endpoint is {{ rds.instance.endpoint }}"
'''
RETURN = '''
instance:
description: the rds instance
returned: always
type: complex
contains:
engine:
description: the name of the database engine
returned: when RDS instance exists
type: string
sample: "oracle-se"
engine_version:
description: the version of the database engine
returned: when RDS instance exists
type: string
sample: "11.2.0.4.v6"
license_model:
description: the license model information
returned: when RDS instance exists
type: string
sample: "bring-your-own-license"
character_set_name:
description: the name of the character set that this instance is associated with
returned: when RDS instance exists
type: string
sample: "AL32UTF8"
allocated_storage:
description: the allocated storage size in gigabytes (GB)
returned: when RDS instance exists
type: string
sample: "100"
publicly_accessible:
description: the accessibility options for the DB instance
returned: when RDS instance exists
type: boolean
sample: "true"
latest_restorable_time:
description: the latest time to which a database can be restored with point-in-time restore
returned: when RDS instance exists
type: string
sample: "1489707802.0"
secondary_availability_zone:
description: the name of the secondary AZ for a DB instance with multi-AZ support
returned: when RDS instance exists and is multy-AZ
type: string
sample: "eu-west-1b"
backup_window:
description: the daily time range during which automated backups are created if automated backups are enabled
returned: when RDS instance exists and automated backups are enabled
type: string
sample: "03:00-03:30"
auto_minor_version_upgrade:
description: indicates that minor engine upgrades will be applied automatically to the DB instance during the maintenance window
returned: when RDS instance exists
type: boolean
sample: "true"
read_replica_source_dbinstance_identifier:
description: the identifier of the source DB instance if this RDS instance is a read replica
returned: when read replica RDS instance exists
type: string
sample: "null"
db_name:
description: the name of the database to create when the DB instance is created
returned: when RDS instance exists
type: string
sample: "ASERTG"
endpoint:
description: the endpoint uri of the database instance
returned: when RDS instance exists
type: string
sample: "my-ansible-database.asdfaosdgih.us-east-1.rds.amazonaws.com"
port:
description: the listening port of the database instance
returned: when RDS instance exists
type: int
sample: 3306
parameter_groups:
description: the list of DB parameter groups applied to this RDS instance
returned: when RDS instance exists and parameter groups are defined
type: complex
contains:
parameter_apply_status:
description: the status of parameter updates
returned: when RDS instance exists
type: string
sample: "in-sync"
parameter_group_name:
description: the name of the DP parameter group
returned: when RDS instance exists
type: string
sample: "testawsrpprodb01spfile-1ujg7nrs7sgyz"
option_groups:
description: the list of option group memberships for this RDS instance
returned: when RDS instance exists
type: complex
contains:
option_group_name:
description: the option group name for this RDS instance
returned: when RDS instance exists
type: string
sample: "default:oracle-se-11-2"
status:
description: the status of the RDS instance's option group membership
returned: when RDS instance exists
type: string
sample: "in-sync"
pending_modified_values:
description: a dictionary of changes to the RDS instance that are pending
returned: when RDS instance exists
type: complex
contains:
db_instance_class:
description: the new DB instance class for this RDS instance that will be applied or is in progress
returned: when RDS instance exists
type: string
sample: "null"
db_instance_identifier:
description: the new DB instance identifier this RDS instance that will be applied or is in progress
returned: when RDS instance exists
type: string
sample: "null"
allocated_storage:
description: the new allocated storage size for this RDS instance that will be applied or is in progress
returned: when RDS instance exists
type: string
sample: "null"
backup_retention_period:
description: the pending number of days for which automated backups are retained
returned: when RDS instance exists
type: string
sample: "null"
engine_version:
description: indicates the database engine version
returned: when RDS instance exists
type: string
sample: "null"
iops:
description: the new provisioned IOPS value for this RDS instance that will be applied or is being applied
returned: when RDS instance exists
type: string
sample: "null"
master_user_password:
description: the pending or in-progress change of the master credentials for this RDS instance
returned: when RDS instance exists
type: string
sample: "null"
multi_az:
description: indicates that the single-AZ RDS instance is to change to a multi-AZ deployment
returned: when RDS instance exists
type: string
sample: "null"
port:
description: specifies the pending port for this RDS instance
returned: when RDS instance exists
type: string
sample: "null"
db_subnet_groups:
description: information on the subnet group associated with this RDS instance
returned: when RDS instance exists
type: complex
contains:
description:
description: the subnet group associated with the DB instance
returned: when RDS instance exists
type: string
sample: "Subnets for the UAT RDS SQL DB Instance"
name:
description: the name of the DB subnet group
returned: when RDS instance exists
type: string
sample: "samplesubnetgrouprds-j6paiqkxqp4z"
status:
description: the status of the DB subnet group
returned: when RDS instance exists
type: string
sample: "complete"
subnets:
description: the description of the DB subnet group
returned: when RDS instance exists
type: complex
contains:
availability_zone:
description: subnet availability zone information
returned: when RDS instance exists
type: complex
contains:
name:
description: avaialbility zone
returned: when RDS instance exists
type: string
sample: "eu-west-1b"
provisioned_iops_capable:
description: whether provisioned iops are available in AZ subnet
returned: when RDS instance exists
type: boolean
sample: "false"
identifier:
description: the identifier of the subnet
returned: when RDS instance exists
type: string
sample: "subnet-3fdba63e"
status:
description: the status of the subnet
returned: when RDS instance exists
type: string
sample: "active"
'''
import time
try:
import boto.rds
import boto.exception
except ImportError:
pass # Taken care of by ec2.HAS_BOTO
try:
import boto.rds2
import boto.rds2.exceptions
HAS_RDS2 = True
except ImportError:
HAS_RDS2 = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import AWSRetry
from ansible.module_utils.ec2 import HAS_BOTO, connect_to_aws, ec2_argument_spec, get_aws_connection_info
DEFAULT_PORTS = {
'aurora': 3306,
'mariadb': 3306,
'mysql': 3306,
'oracle': 1521,
'sqlserver': 1433,
'postgres': 5432,
}
class RDSException(Exception):
def __init__(self, exc):
if hasattr(exc, 'error_message') and exc.error_message:
self.message = exc.error_message
self.code = exc.error_code
elif hasattr(exc, 'body') and 'Error' in exc.body:
self.message = exc.body['Error']['Message']
self.code = exc.body['Error']['Code']
else:
self.message = str(exc)
self.code = 'Unknown Error'
class RDSConnection:
def __init__(self, module, region, **aws_connect_params):
try:
self.connection = connect_to_aws(boto.rds, region, **aws_connect_params)
except boto.exception.BotoServerError as e:
module.fail_json(msg=e.error_message)
def get_db_instance(self, instancename):
try:
return RDSDBInstance(self.connection.get_all_dbinstances(instancename)[0])
except boto.exception.BotoServerError:
return None
def get_db_snapshot(self, snapshotid):
try:
return RDSSnapshot(self.connection.get_all_dbsnapshots(snapshot_id=snapshotid)[0])
except boto.exception.BotoServerError:
return None
def create_db_instance(self, instance_name, size, instance_class, db_engine,
username, password, **params):
params['engine'] = db_engine
try:
result = self.connection.create_dbinstance(instance_name, size, instance_class,
username, password, **params)
return RDSDBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def create_db_instance_read_replica(self, instance_name, source_instance, **params):
try:
result = self.connection.createdb_instance_read_replica(instance_name, source_instance, **params)
return RDSDBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def delete_db_instance(self, instance_name, **params):
try:
result = self.connection.delete_dbinstance(instance_name, **params)
return RDSDBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def delete_db_snapshot(self, snapshot):
try:
result = self.connection.delete_dbsnapshot(snapshot)
return RDSSnapshot(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def modify_db_instance(self, instance_name, **params):
try:
result = self.connection.modify_dbinstance(instance_name, **params)
return RDSDBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def reboot_db_instance(self, instance_name, **params):
try:
result = self.connection.reboot_dbinstance(instance_name)
return RDSDBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def restore_db_instance_from_db_snapshot(self, instance_name, snapshot, instance_type, **params):
try:
result = self.connection.restore_dbinstance_from_dbsnapshot(snapshot, instance_name, instance_type, **params)
return RDSDBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def create_db_snapshot(self, snapshot, instance_name, **params):
try:
result = self.connection.create_dbsnapshot(snapshot, instance_name)
return RDSSnapshot(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def promote_read_replica(self, instance_name, **params):
try:
result = self.connection.promote_read_replica(instance_name, **params)
return RDSDBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
class RDS2Connection:
def __init__(self, module, region, **aws_connect_params):
try:
self.connection = connect_to_aws(boto.rds2, region, **aws_connect_params)
except boto.exception.BotoServerError as e:
module.fail_json(msg=e.error_message)
def get_db_instance(self, instancename):
try:
dbinstances = self.connection.describe_db_instances(
db_instance_identifier=instancename
)['DescribeDBInstancesResponse']['DescribeDBInstancesResult']['DBInstances']
result = RDS2DBInstance(dbinstances[0])
return result
except boto.rds2.exceptions.DBInstanceNotFound as e:
return None
except Exception as e:
raise e
def get_db_snapshot(self, snapshotid):
try:
snapshots = self.connection.describe_db_snapshots(
db_snapshot_identifier=snapshotid,
snapshot_type='manual'
)['DescribeDBSnapshotsResponse']['DescribeDBSnapshotsResult']['DBSnapshots']
result = RDS2Snapshot(snapshots[0])
return result
except boto.rds2.exceptions.DBSnapshotNotFound:
return None
def create_db_instance(self, instance_name, size, instance_class, db_engine,
username, password, **params):
try:
result = self.connection.create_db_instance(instance_name, size, instance_class, db_engine, username, password,
**params)['CreateDBInstanceResponse']['CreateDBInstanceResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def create_db_instance_read_replica(self, instance_name, source_instance, **params):
try:
result = self.connection.create_db_instance_read_replica(
instance_name,
source_instance,
**params
)['CreateDBInstanceReadReplicaResponse']['CreateDBInstanceReadReplicaResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def delete_db_instance(self, instance_name, **params):
try:
result = self.connection.delete_db_instance(instance_name, **params)['DeleteDBInstanceResponse']['DeleteDBInstanceResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def delete_db_snapshot(self, snapshot):
try:
result = self.connection.delete_db_snapshot(snapshot)['DeleteDBSnapshotResponse']['DeleteDBSnapshotResult']['DBSnapshot']
return RDS2Snapshot(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def modify_db_instance(self, instance_name, **params):
try:
result = self.connection.modify_db_instance(instance_name, **params)['ModifyDBInstanceResponse']['ModifyDBInstanceResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def reboot_db_instance(self, instance_name, **params):
try:
result = self.connection.reboot_db_instance(instance_name, **params)['RebootDBInstanceResponse']['RebootDBInstanceResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def restore_db_instance_from_db_snapshot(self, instance_name, snapshot, instance_type, **params):
try:
result = self.connection.restore_db_instance_from_db_snapshot(
instance_name,
snapshot,
**params
)['RestoreDBInstanceFromDBSnapshotResponse']['RestoreDBInstanceFromDBSnapshotResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def create_db_snapshot(self, snapshot, instance_name, **params):
try:
result = self.connection.create_db_snapshot(snapshot, instance_name, **params)['CreateDBSnapshotResponse']['CreateDBSnapshotResult']['DBSnapshot']
return RDS2Snapshot(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
def promote_read_replica(self, instance_name, **params):
try:
result = self.connection.promote_read_replica(instance_name, **params)['PromoteReadReplicaResponse']['PromoteReadReplicaResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError as e:
raise RDSException(e)
class RDSDBInstance:
def __init__(self, dbinstance):
self.instance = dbinstance
self.name = dbinstance.id
self.status = dbinstance.status
def get_data(self):
d = {
'id': self.name,
'create_time': self.instance.create_time,
'status': self.status,
'availability_zone': self.instance.availability_zone,
'backup_retention': self.instance.backup_retention_period,
'backup_window': self.instance.preferred_backup_window,
'maintenance_window': self.instance.preferred_maintenance_window,
'multi_zone': self.instance.multi_az,
'instance_type': self.instance.instance_class,
'username': self.instance.master_username,
'iops': self.instance.iops
}
# Only assign an Endpoint if one is available
if hasattr(self.instance, 'endpoint'):
d["endpoint"] = self.instance.endpoint[0]
d["port"] = self.instance.endpoint[1]
if self.instance.vpc_security_groups is not None:
d["vpc_security_groups"] = ','.join(x.vpc_group for x in self.instance.vpc_security_groups)
else:
d["vpc_security_groups"] = None
else:
d["endpoint"] = None
d["port"] = None
d["vpc_security_groups"] = None
d['DBName'] = self.instance.DBName if hasattr(self.instance, 'DBName') else None
# ReadReplicaSourceDBInstanceIdentifier may or may not exist
try:
d["replication_source"] = self.instance.ReadReplicaSourceDBInstanceIdentifier
except Exception:
d["replication_source"] = None
return d
class RDS2DBInstance:
def __init__(self, dbinstance):
self.instance = dbinstance
if 'DBInstanceIdentifier' not in dbinstance:
self.name = None
else:
self.name = self.instance.get('DBInstanceIdentifier')
self.status = self.instance.get('DBInstanceStatus')
def get_data(self):
d = {
'id': self.name,
'create_time': self.instance['InstanceCreateTime'],
'engine': self.instance['Engine'],
'engine_version': self.instance['EngineVersion'],
'license_model': self.instance['LicenseModel'],
'character_set_name': self.instance['CharacterSetName'],
'allocated_storage': self.instance['AllocatedStorage'],
'publicly_accessible': self.instance['PubliclyAccessible'],
'latest_restorable_time': self.instance['LatestRestorableTime'],
'status': self.status,
'availability_zone': self.instance['AvailabilityZone'],
'secondary_availability_zone': self.instance['SecondaryAvailabilityZone'],
'backup_retention': self.instance['BackupRetentionPeriod'],
'backup_window': self.instance['PreferredBackupWindow'],
'maintenance_window': self.instance['PreferredMaintenanceWindow'],
'auto_minor_version_upgrade': self.instance['AutoMinorVersionUpgrade'],
'read_replica_source_dbinstance_identifier': self.instance['ReadReplicaSourceDBInstanceIdentifier'],
'multi_zone': self.instance['MultiAZ'],
'instance_type': self.instance['DBInstanceClass'],
'username': self.instance['MasterUsername'],
'db_name': self.instance['DBName'],
'iops': self.instance['Iops'],
'replication_source': self.instance['ReadReplicaSourceDBInstanceIdentifier']
}
if self.instance['DBParameterGroups'] is not None:
parameter_groups = []
for x in self.instance['DBParameterGroups']:
parameter_groups.append({'parameter_group_name': x['DBParameterGroupName'], 'parameter_apply_status': x['ParameterApplyStatus']})
d['parameter_groups'] = parameter_groups
if self.instance['OptionGroupMemberships'] is not None:
option_groups = []
for x in self.instance['OptionGroupMemberships']:
option_groups.append({'status': x['Status'], 'option_group_name': x['OptionGroupName']})
d['option_groups'] = option_groups
if self.instance['PendingModifiedValues'] is not None:
pdv = self.instance['PendingModifiedValues']
d['pending_modified_values'] = {
'multi_az': pdv['MultiAZ'],
'master_user_password': pdv['MasterUserPassword'],
'port': pdv['Port'],
'iops': pdv['Iops'],
'allocated_storage': pdv['AllocatedStorage'],
'engine_version': pdv['EngineVersion'],
'backup_retention_period': pdv['BackupRetentionPeriod'],
'db_instance_class': pdv['DBInstanceClass'],
'db_instance_identifier': pdv['DBInstanceIdentifier']
}
if self.instance["DBSubnetGroup"] is not None:
dsg = self.instance["DBSubnetGroup"]
db_subnet_groups = {}
db_subnet_groups['vpc_id'] = dsg['VpcId']
db_subnet_groups['name'] = dsg['DBSubnetGroupName']
db_subnet_groups['status'] = dsg['SubnetGroupStatus'].lower()
db_subnet_groups['description'] = dsg['DBSubnetGroupDescription']
db_subnet_groups['subnets'] = []
for x in dsg["Subnets"]:
db_subnet_groups['subnets'].append({
'status': x['SubnetStatus'].lower(),
'identifier': x['SubnetIdentifier'],
'availability_zone': {
'name': x['SubnetAvailabilityZone']['Name'],
'provisioned_iops_capable': x['SubnetAvailabilityZone']['ProvisionedIopsCapable']
}
})
d['db_subnet_groups'] = db_subnet_groups
if self.instance["VpcSecurityGroups"] is not None:
d['vpc_security_groups'] = ','.join(x['VpcSecurityGroupId'] for x in self.instance['VpcSecurityGroups'])
if "Endpoint" in self.instance and self.instance["Endpoint"] is not None:
d['endpoint'] = self.instance["Endpoint"].get('Address', None)
d['port'] = self.instance["Endpoint"].get('Port', None)
else:
d['endpoint'] = None
d['port'] = None
d['DBName'] = self.instance['DBName'] if hasattr(self.instance, 'DBName') else None
return d
class RDSSnapshot:
def __init__(self, snapshot):
self.snapshot = snapshot
self.name = snapshot.id
self.status = snapshot.status
def get_data(self):
d = {
'id': self.name,
'create_time': self.snapshot.snapshot_create_time,
'status': self.status,
'availability_zone': self.snapshot.availability_zone,
'instance_id': self.snapshot.instance_id,
'instance_created': self.snapshot.instance_create_time,
}
# needs boto >= 2.21.0
if hasattr(self.snapshot, 'snapshot_type'):
d["snapshot_type"] = self.snapshot.snapshot_type
if hasattr(self.snapshot, 'iops'):
d["iops"] = self.snapshot.iops
return d
class RDS2Snapshot:
def __init__(self, snapshot):
if 'DeleteDBSnapshotResponse' in snapshot:
self.snapshot = snapshot['DeleteDBSnapshotResponse']['DeleteDBSnapshotResult']['DBSnapshot']
else:
self.snapshot = snapshot
self.name = self.snapshot.get('DBSnapshotIdentifier')
self.status = self.snapshot.get('Status')
def get_data(self):
d = {
'id': self.name,
'create_time': self.snapshot['SnapshotCreateTime'],
'status': self.status,
'availability_zone': self.snapshot['AvailabilityZone'],
'instance_id': self.snapshot['DBInstanceIdentifier'],
'instance_created': self.snapshot['InstanceCreateTime'],
'snapshot_type': self.snapshot['SnapshotType'],
'iops': self.snapshot['Iops'],
}
return d
def await_resource(conn, resource, status, module):
start_time = time.time()
wait_timeout = module.params.get('wait_timeout') + start_time
check_interval = 5
while wait_timeout > time.time() and resource.status != status:
time.sleep(check_interval)
if wait_timeout <= time.time():
module.fail_json(msg="Timeout waiting for RDS resource %s" % resource.name)
if module.params.get('command') == 'snapshot':
# Temporary until all the rds2 commands have their responses parsed
if resource.name is None:
module.fail_json(msg="There was a problem waiting for RDS snapshot %s" % resource.snapshot)
# Back off if we're getting throttled, since we're just waiting anyway
resource = AWSRetry.backoff(tries=5, delay=20, backoff=1.5)(conn.get_db_snapshot)(resource.name)
else:
# Temporary until all the rds2 commands have their responses parsed
if resource.name is None:
module.fail_json(msg="There was a problem waiting for RDS instance %s" % resource.instance)
# Back off if we're getting throttled, since we're just waiting anyway
resource = AWSRetry.backoff(tries=5, delay=20, backoff=1.5)(conn.get_db_instance)(resource.name)
if resource is None:
break
# Some RDS resources take much longer than others to be ready. Check
# less aggressively for slow ones to avoid throttling.
if time.time() > start_time + 90:
check_interval = 20
return resource
def create_db_instance(module, conn):
required_vars = ['instance_name', 'db_engine', 'size', 'instance_type', 'username', 'password']
valid_vars = ['backup_retention', 'backup_window',
'character_set_name', 'db_name', 'engine_version',
'instance_type', 'iops', 'license_model', 'maint_window',
'multi_zone', 'option_group', 'parameter_group', 'port',
'subnet', 'upgrade', 'zone']
if module.params.get('subnet'):
valid_vars.append('vpc_security_groups')
else:
valid_vars.append('security_groups')
if HAS_RDS2:
valid_vars.extend(['publicly_accessible', 'tags'])
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
result = conn.get_db_instance(instance_name)
if result:
changed = False
else:
try:
result = conn.create_db_instance(instance_name, module.params.get('size'),
module.params.get('instance_type'), module.params.get('db_engine'),
module.params.get('username'), module.params.get('password'), **params)
changed = True
except RDSException as e:
module.fail_json(msg="Failed to create instance: %s" % e.message)
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_instance(instance_name)
module.exit_json(changed=changed, instance=resource.get_data())
def replicate_db_instance(module, conn):
required_vars = ['instance_name', 'source_instance']
valid_vars = ['instance_type', 'port', 'upgrade', 'zone']
if HAS_RDS2:
valid_vars.extend(['iops', 'option_group', 'publicly_accessible', 'tags'])
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
source_instance = module.params.get('source_instance')
result = conn.get_db_instance(instance_name)
if result:
changed = False
else:
try:
result = conn.create_db_instance_read_replica(instance_name, source_instance, **params)
changed = True
except RDSException as e:
module.fail_json(msg="Failed to create replica instance: %s " % e.message)
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_instance(instance_name)
module.exit_json(changed=changed, instance=resource.get_data())
def delete_db_instance_or_snapshot(module, conn):
required_vars = []
valid_vars = ['instance_name', 'snapshot', 'skip_final_snapshot']
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
snapshot = module.params.get('snapshot')
if not instance_name:
result = conn.get_db_snapshot(snapshot)
else:
result = conn.get_db_instance(instance_name)
if not result:
module.exit_json(changed=False)
if result.status == 'deleting':
module.exit_json(changed=False)
try:
if instance_name:
if snapshot:
params["skip_final_snapshot"] = False
if HAS_RDS2:
params["final_db_snapshot_identifier"] = snapshot
else:
params["final_snapshot_id"] = snapshot
else:
params["skip_final_snapshot"] = True
result = conn.delete_db_instance(instance_name, **params)
else:
result = conn.delete_db_snapshot(snapshot)
except RDSException as e:
module.fail_json(msg="Failed to delete instance: %s" % e.message)
# If we're not waiting for a delete to complete then we're all done
# so just return
if not module.params.get('wait'):
module.exit_json(changed=True)
try:
await_resource(conn, result, 'deleted', module)
module.exit_json(changed=True)
except RDSException as e:
if e.code == 'DBInstanceNotFound':
module.exit_json(changed=True)
else:
module.fail_json(msg=e.message)
except Exception as e:
module.fail_json(msg=str(e))
def facts_db_instance_or_snapshot(module, conn):
instance_name = module.params.get('instance_name')
snapshot = module.params.get('snapshot')
if instance_name and snapshot:
module.fail_json(msg="Facts must be called with either instance_name or snapshot, not both")
if instance_name:
resource = conn.get_db_instance(instance_name)
if not resource:
module.fail_json(msg="DB instance %s does not exist" % instance_name)
if snapshot:
resource = conn.get_db_snapshot(snapshot)
if not resource:
module.fail_json(msg="DB snapshot %s does not exist" % snapshot)
module.exit_json(changed=False, instance=resource.get_data())
def modify_db_instance(module, conn):
required_vars = ['instance_name']
valid_vars = ['apply_immediately', 'backup_retention', 'backup_window',
'db_name', 'engine_version', 'instance_type', 'iops', 'license_model',
'maint_window', 'multi_zone', 'new_instance_name',
'option_group', 'parameter_group', 'password', 'size', 'upgrade']
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
new_instance_name = module.params.get('new_instance_name')
try:
result = conn.modify_db_instance(instance_name, **params)
except RDSException as e:
module.fail_json(msg=e.message)
if params.get('apply_immediately'):
if new_instance_name:
# Wait until the new instance name is valid
new_instance = None
while not new_instance:
new_instance = conn.get_db_instance(new_instance_name)
time.sleep(5)
# Found instance but it briefly flicks to available
# before rebooting so let's wait until we see it rebooting
# before we check whether to 'wait'
result = await_resource(conn, new_instance, 'rebooting', module)
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_instance(instance_name)
# guess that this changed the DB, need a way to check
module.exit_json(changed=True, instance=resource.get_data())
def promote_db_instance(module, conn):
required_vars = ['instance_name']
valid_vars = ['backup_retention', 'backup_window']
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
result = conn.get_db_instance(instance_name)
if not result:
module.fail_json(msg="DB Instance %s does not exist" % instance_name)
if result.get_data().get('replication_source'):
try:
result = conn.promote_read_replica(instance_name, **params)
changed = True
except RDSException as e:
module.fail_json(msg=e.message)
else:
changed = False
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_instance(instance_name)
module.exit_json(changed=changed, instance=resource.get_data())
def snapshot_db_instance(module, conn):
required_vars = ['instance_name', 'snapshot']
valid_vars = ['tags']
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
snapshot = module.params.get('snapshot')
changed = False
result = conn.get_db_snapshot(snapshot)
if not result:
try:
result = conn.create_db_snapshot(snapshot, instance_name, **params)
changed = True
except RDSException as e:
module.fail_json(msg=e.message)
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_snapshot(snapshot)
module.exit_json(changed=changed, snapshot=resource.get_data())
def reboot_db_instance(module, conn):
required_vars = ['instance_name']
valid_vars = []
if HAS_RDS2:
valid_vars.append('force_failover')
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
result = conn.get_db_instance(instance_name)
changed = False
try:
result = conn.reboot_db_instance(instance_name, **params)
changed = True
except RDSException as e:
module.fail_json(msg=e.message)
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_instance(instance_name)
module.exit_json(changed=changed, instance=resource.get_data())
def restore_db_instance(module, conn):
required_vars = ['instance_name', 'snapshot']
valid_vars = ['db_name', 'iops', 'license_model', 'multi_zone',
'option_group', 'port', 'publicly_accessible',
'subnet', 'tags', 'upgrade', 'zone']
if HAS_RDS2:
valid_vars.append('instance_type')
else:
required_vars.append('instance_type')
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
instance_type = module.params.get('instance_type')
snapshot = module.params.get('snapshot')
changed = False
result = conn.get_db_instance(instance_name)
if not result:
try:
result = conn.restore_db_instance_from_db_snapshot(instance_name, snapshot, instance_type, **params)
changed = True
except RDSException as e:
module.fail_json(msg=e.message)
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_instance(instance_name)
module.exit_json(changed=changed, instance=resource.get_data())
def validate_parameters(required_vars, valid_vars, module):
command = module.params.get('command')
for v in required_vars:
if not module.params.get(v):
module.fail_json(msg="Parameter %s required for %s command" % (v, command))
# map to convert rds module options to boto rds and rds2 options
optional_params = {
'port': 'port',
'db_name': 'db_name',
'zone': 'availability_zone',
'maint_window': 'preferred_maintenance_window',
'backup_window': 'preferred_backup_window',
'backup_retention': 'backup_retention_period',
'multi_zone': 'multi_az',
'engine_version': 'engine_version',
'upgrade': 'auto_minor_version_upgrade',
'subnet': 'db_subnet_group_name',
'license_model': 'license_model',
'option_group': 'option_group_name',
'size': 'allocated_storage',
'iops': 'iops',
'new_instance_name': 'new_instance_id',
'apply_immediately': 'apply_immediately',
}
# map to convert rds module options to boto rds options
optional_params_rds = {
'db_engine': 'engine',
'password': 'master_password',
'parameter_group': 'param_group',
'instance_type': 'instance_class',
}
# map to convert rds module options to boto rds2 options
optional_params_rds2 = {
'tags': 'tags',
'publicly_accessible': 'publicly_accessible',
'parameter_group': 'db_parameter_group_name',
'character_set_name': 'character_set_name',
'instance_type': 'db_instance_class',
'password': 'master_user_password',
'new_instance_name': 'new_db_instance_identifier',
'force_failover': 'force_failover',
}
if HAS_RDS2:
optional_params.update(optional_params_rds2)
sec_group = 'db_security_groups'
else:
optional_params.update(optional_params_rds)
sec_group = 'security_groups'
# Check for options only supported with rds2
for k in set(optional_params_rds2.keys()) - set(optional_params_rds.keys()):
if module.params.get(k):
module.fail_json(msg="Parameter %s requires boto.rds (boto >= 2.26.0)" % k)
params = {}
for (k, v) in optional_params.items():
if module.params.get(k) is not None and k not in required_vars:
if k in valid_vars:
params[v] = module.params[k]
else:
if module.params.get(k) is False:
pass
else:
module.fail_json(msg="Parameter %s is not valid for %s command" % (k, command))
if module.params.get('security_groups'):
params[sec_group] = module.params.get('security_groups').split(',')
vpc_groups = module.params.get('vpc_security_groups')
if vpc_groups:
if HAS_RDS2:
params['vpc_security_group_ids'] = vpc_groups
else:
groups_list = []
for x in vpc_groups:
groups_list.append(boto.rds.VPCSecurityGroupMembership(vpc_group=x))
params['vpc_security_groups'] = groups_list
# Convert tags dict to list of tuples that rds2 expects
if 'tags' in params:
params['tags'] = module.params['tags'].items()
return params
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
command=dict(choices=['create', 'replicate', 'delete', 'facts', 'modify', 'promote', 'snapshot', 'reboot', 'restore'], required=True),
instance_name=dict(required=False),
source_instance=dict(required=False),
db_engine=dict(choices=['mariadb', 'MySQL', 'oracle-se1', 'oracle-se2', 'oracle-se', 'oracle-ee', 'sqlserver-ee', 'sqlserver-se', 'sqlserver-ex',
'sqlserver-web', 'postgres', 'aurora'], required=False),
size=dict(required=False),
instance_type=dict(aliases=['type'], required=False),
username=dict(required=False),
password=dict(no_log=True, required=False),
db_name=dict(required=False),
engine_version=dict(required=False),
parameter_group=dict(required=False),
license_model=dict(choices=['license-included', 'bring-your-own-license', 'general-public-license', 'postgresql-license'], required=False),
multi_zone=dict(type='bool', required=False),
iops=dict(required=False),
security_groups=dict(required=False),
vpc_security_groups=dict(type='list', required=False),
port=dict(required=False, type='int'),
upgrade=dict(type='bool', default=False),
option_group=dict(required=False),
maint_window=dict(required=False),
backup_window=dict(required=False),
backup_retention=dict(required=False),
zone=dict(aliases=['aws_zone', 'ec2_zone'], required=False),
subnet=dict(required=False),
wait=dict(type='bool', default=False),
wait_timeout=dict(type='int', default=300),
snapshot=dict(required=False),
apply_immediately=dict(type='bool', default=False),
new_instance_name=dict(required=False),
tags=dict(type='dict', required=False),
publicly_accessible=dict(required=False),
character_set_name=dict(required=False),
force_failover=dict(type='bool', required=False, default=False)
)
)
module = AnsibleModule(
argument_spec=argument_spec,
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
invocations = {
'create': create_db_instance,
'replicate': replicate_db_instance,
'delete': delete_db_instance_or_snapshot,
'facts': facts_db_instance_or_snapshot,
'modify': modify_db_instance,
'promote': promote_db_instance,
'snapshot': snapshot_db_instance,
'reboot': reboot_db_instance,
'restore': restore_db_instance,
}
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg="Region must be specified as a parameter, in EC2_REGION or AWS_REGION environment variables or in boto configuration file")
# set port to per db defaults if not specified
if module.params['port'] is None and module.params['db_engine'] is not None and module.params['command'] == 'create':
if '-' in module.params['db_engine']:
engine = module.params['db_engine'].split('-')[0]
else:
engine = module.params['db_engine']
module.params['port'] = DEFAULT_PORTS[engine.lower()]
# connect to the rds endpoint
if HAS_RDS2:
conn = RDS2Connection(module, region, **aws_connect_params)
else:
conn = RDSConnection(module, region, **aws_connect_params)
invocations[module.params.get('command')](module, conn)
if __name__ == '__main__':
main()
| aperigault/ansible | lib/ansible/modules/cloud/amazon/rds.py | Python | gpl-3.0 | 56,646 |
from django.http import HttpResponseRedirect, Http404
from django.utils.translation import ugettext as _
from django.contrib import messages
from notifications.models import CommentNotification, PostNotification, AlreadyNotifiedError
def notify(request, NotificationType, name,
login_msg=_('You need to login to receive notifications.'),
success_msg=_("You will receive emails alerting you to new posts."),
already_msg=_("You already receive notifications for new posts"),
*args):
'''Common method for processing different types of notification view.
'''
if not request.user.is_authenticated():
messages.warning(request, login_msg)
try:
c = NotificationType()
c.add_user(name, request.user, *args)
messages.info(request, success_msg)
except AlreadyNotifiedError:
messages.warning(request, already_msg)
except (ValueError, KeyError):
messages.warning(request, _("An unexpected error has occurred in "
"the notification system"))
try:
return HttpResponseRedirect(request.GET['next'])
except:
raise Http404
def notify_post(request):
'''Process notification request, typically after user submits form requesting to be
notified of new comments on a post.
'''
try:
name = request.POST['name']
except:
name = 'post'
return notify(request, PostNotification, name)
def notify_comment(request):
'''Process notification request, typically after user submits form requesting to be
notified of new comments on a post.
'''
try:
name = request.POST['name']
except:
name = 'post'
return notify(request, CommentNotification, name,
None,
_("You will receive emails notifying you of new comments on this post."),
_("You already receive emails notifying you of new comments on this post."),
request.POST['app_label'],
request.POST['model'], int(request.POST['pk']))
def remove_notification(request, NotificationType, name,
login_msg=_('You need to login to stop notifications.'),
success_msg=_('You will no longer receive emails notifying you of new posts.'),
already_msg=_('You do not get emailed notifications of new posts.'),
*args):
if not request.user.is_authenticated:
messages.warning(request, login_msg)
else:
try:
try:
notification = NotificationType.objects.get(name=name)
except:
notification = NotificationType()
notification.remove_user(request.user, *args)
messages.info(request, success_msg)
except:
messages.info(request, already_msg)
try:
return HttpResponseRedirect(request.GET['next'])
except:
raise Http404
def remove_post_notification(request):
try:
name = request.POST['name']
except:
name = 'post'
return remove_notification(request, PostNotification, name)
def remove_comment_notification(request):
try:
name = request.POST['name']
except:
name = 'comment'
return remove_notification(request, CommentNotification, name,
None,
_('You will no longer receive emails notifying you of new comments on this post.'),
_('You do not receive emails notifying you of new comments on this post.'),
request.POST['app_label'],
request.POST['model'],
int(request.POST['pk']))
| nathangeffen/tbonline-old | tbonlineproject/notifications/views.py | Python | mit | 3,929 |
#locomotion mirror
if scene.getMotion('ChrMarine@WalkCircleLf01') == None:
#locomotion smooth cycle
print 'locomotion smooth cycle'
smoothMotion = scene.getMotion("ChrMarine@RunCircleRt01")
smoothMotion.smoothCycle("ChrMarine@RunCircleRt01_smooth",0.1);
smoothMotion = scene.getMotion("ChrMarine@WalkCircleRt01")
smoothMotion.smoothCycle("ChrMarine@WalkCircleRt01_smooth",0.1);
smoothMotion = scene.getMotion("ChrMarine@WalkTightCircleRt01")
smoothMotion.smoothCycle("ChrMarine@WalkTightCircleRt01_smooth",0.1);
smoothMotion = scene.getMotion("ChrMarine@StrafeFastRt01")
smoothMotion.smoothCycle("ChrMarine@StrafeFastRt01_smooth",0.1);
smoothMotion = scene.getMotion("ChrMarine@Turn180Rt01")
smoothMotion.smoothCycle("ChrMarine@Turn180Rt01_smooth",0.1);
#locomotion mirror
#print 'locomotion mirror'
mirrorMotion = scene.getMotion("ChrMarine@WalkCircleRt01")
mirrorMotion.mirror("ChrMarine@WalkCircleLf01", "ChrBackovic.sk")
mirrorMotion = scene.getMotion("ChrMarine@WalkTightCircleRt01")
mirrorMotion.mirror("ChrMarine@WalkTightCircleLf01", "ChrBackovic.sk")
mirrorMotion = scene.getMotion("ChrMarine@StrafeFastRt01")
mirrorMotion.mirror("ChrMarine@StrafeFastLf01", "ChrBackovic.sk")
mirrorMotion = scene.getMotion("ChrMarine@StrafeSlowRt01")
mirrorMotion.mirror("ChrMarine@StrafeSlowLf01", "ChrBackovic.sk")
mirrorMotion = scene.getMotion("ChrMarine@RunCircleRt01")
mirrorMotion.mirror("ChrMarine@RunCircleLf01", "ChrBackovic.sk")
mirrorMotion = scene.getMotion("ChrMarine@RunTightCircleRt01")
mirrorMotion.mirror("ChrMarine@RunTightCircleLf01", "ChrBackovic.sk")
#idle turn
#print 'idle turn mirror'
mirrorMotion = scene.getMotion("ChrMarine@Turn90Rt01")
mirrorMotion.mirror("ChrMarine@Turn90Lf01", "ChrBackovic.sk")
mirrorMotion1 = scene.getMotion("ChrMarine@Turn180Rt01_smooth")
mirrorMotion1.mirror("ChrMarine@Turn180Lf01_smooth", "ChrBackovic.sk")
mirrorMotion = scene.getMotion("ChrMarine@Turn360Rt01")
mirrorMotion.mirror("ChrMarine@Turn360Lf01", "ChrBackovic.sk")
#mirroring for smooth cycle motion
mirrorMotion = scene.getMotion("ChrMarine@WalkCircleRt01_smooth")
mirrorMotion.mirror("ChrMarine@WalkCircleLf01_smooth", "ChrBackovic.sk")
mirrorMotion = scene.getMotion("ChrMarine@WalkTightCircleRt01_smooth")
mirrorMotion.mirror("ChrMarine@WalkTightCircleLf01_smooth", "ChrBackovic.sk")
mirrorMotion = scene.getMotion("ChrMarine@StrafeFastRt01_smooth")
mirrorMotion.mirror("ChrMarine@StrafeFastLf01_smooth", "ChrBackovic.sk")
mirrorMotion = scene.getMotion("ChrMarine@RunCircleRt01_smooth")
mirrorMotion.mirror("ChrMarine@RunCircleLf01_smooth", "ChrBackovic.sk")
#starting mirror
mirrorMotion = scene.getMotion("ChrMarine@Idle01_ToWalk01")
mirrorMotion.mirror("ChrMarine@Idle01_ToWalkLf01", "ChrBackovic.sk")
mirrorMotion = scene.getMotion("ChrMarine@Idle01_ToWalk01_Turn90Rt01")
mirrorMotion.mirror("ChrMarine@Idle01_ToWalk01_Turn90Lf01", "ChrBackovic.sk")
mirrorMotion = scene.getMotion("ChrMarine@Idle01_ToWalk01_Turn180Rt01")
mirrorMotion.mirror("ChrMarine@Idle01_ToWalk01_Turn180Lf01", "ChrBackovic.sk")
#step mirror
#print 'step mirror'
mirrorMotion = scene.getMotion("ChrMarine@Idle01_StepBackwardsRt01")
mirrorMotion.mirror("ChrMarine@Idle01_StepBackwardsLf01", "ChrBackovic.sk")
mirrorMotion = scene.getMotion("ChrMarine@Idle01_StepForwardRt01")
mirrorMotion.mirror("ChrMarine@Idle01_StepForwardLf01", "ChrBackovic.sk")
mirrorMotion = scene.getMotion("ChrMarine@Idle01_StepSidewaysRt01")
mirrorMotion.mirror("ChrMarine@Idle01_StepSidewaysLf01", "ChrBackovic.sk")
# locomotion main state
scene.run("locomotion-ChrMarine-state-Locomotion.py")
# starting state, starting locomotion with different angle
scene.run("locomotion-ChrMarine-state-StartingLeft.py")
scene.run("locomotion-ChrMarine-state-StartingRight.py")
# idle turn state, facing adjusting
scene.run("locomotion-ChrMarine-state-IdleTurn.py")
# step state, stepping adjusting
scene.run("locomotion-ChrMarine-state-Step.py")
# transitions
scene.run("locomotion-ChrMarine-transitions.py")
| USC-ICT/gift-integration-demo | GiftDemo/Assets/StreamingAssets/SB/locomotion-ChrMarine-init.py | Python | bsd-3-clause | 4,135 |
"""
Author: Dr. John T. Hwang <[email protected]>
This package is distributed under New BSD license.
Tensor-product of cos, exp, or tanh.
"""
import numpy as np
from smt.problems.problem import Problem
class TensorProduct(Problem):
def _initialize(self):
self.options.declare("name", "TP", types=str)
self.options.declare("func", values=["cos", "exp", "tanh", "gaussian"])
self.options.declare("width", 1.0, types=(float, int))
def _setup(self):
self.xlimits[:, 0] = -1.0
self.xlimits[:, 1] = 1.0
a = self.options["width"]
if self.options["func"] == "cos":
self.func = lambda v: np.cos(a * np.pi * v)
self.dfunc = lambda v: -a * np.pi * np.sin(a * np.pi * v)
elif self.options["func"] == "exp":
self.func = lambda v: np.exp(a * v)
self.dfunc = lambda v: a * np.exp(a * v)
elif self.options["func"] == "tanh":
self.func = lambda v: np.tanh(a * v)
self.dfunc = lambda v: a / np.cosh(a * v) ** 2
elif self.options["func"] == "gaussian":
self.func = lambda v: np.exp(-2.0 * a * v ** 2)
self.dfunc = lambda v: -4.0 * a * v * np.exp(-2.0 * a * v ** 2)
def _evaluate(self, x, kx):
"""
Arguments
---------
x : ndarray[ne, nx]
Evaluation points.
kx : int or None
Index of derivative (0-based) to return values with respect to.
None means return function value rather than derivative.
Returns
-------
ndarray[ne, 1]
Functions values if kx=None or derivative values if kx is an int.
"""
ne, nx = x.shape
y = np.ones((ne, 1), complex)
if kx is None:
y[:, 0] = np.prod(self.func(x), 1).T
else:
for ix in range(nx):
if kx == ix:
y[:, 0] *= self.dfunc(x[:, ix])
else:
y[:, 0] *= self.func(x[:, ix])
return y
| relf/smt | smt/problems/tensor_product.py | Python | bsd-3-clause | 2,048 |
import os
import time
from random import Random
from pyqtgraph.Qt import QtCore, QtGui
app = QtGui.QApplication([])
from qtreactor import pyqt4reactor
pyqt4reactor.install()
from twisted.internet import defer, task, threads
from orchstr8.services import LbryServiceStack
import pyqtgraph as pg
class Profiler:
pens = [
(230, 25, 75), # red
(60, 180, 75), # green
(255, 225, 25), # yellow
(0, 130, 200), # blue
(245, 130, 48), # orange
(145, 30, 180), # purple
(70, 240, 240), # cyan
(240, 50, 230), # magenta
(210, 245, 60), # lime
(250, 190, 190), # pink
(0, 128, 128), # teal
]
def __init__(self, graph=None):
self.times = {}
self.graph = graph
def start(self, name):
if name in self.times:
self.times[name]['start'] = time.time()
else:
self.times[name] = {
'start': time.time(),
'data': [],
'plot': self.graph.plot(
pen=self.pens[len(self.times)],
symbolBrush=self.pens[len(self.times)],
name=name
)
}
def stop(self, name):
elapsed = time.time() - self.times[name]['start']
self.times[name]['start'] = None
self.times[name]['data'].append(elapsed)
def draw(self):
for plot in self.times.values():
plot['plot'].setData(plot['data'])
class ThePublisherOfThings:
def __init__(self, blocks=100, txns_per_block=100, seed=2015, start_blocks=110):
self.blocks = blocks
self.txns_per_block = txns_per_block
self.start_blocks = start_blocks
self.random = Random(seed)
self.profiler = Profiler()
self.service = LbryServiceStack(verbose=True, profiler=self.profiler)
self.publish_file = None
@defer.inlineCallbacks
def start(self):
yield self.service.startup(
after_lbrycrd_start=lambda: self.service.lbrycrd.generate(1010)
)
wallet = self.service.lbry.wallet
address = yield wallet.get_least_used_address()
sendtxid = yield self.service.lbrycrd.sendtoaddress(address, 100)
yield self.service.lbrycrd.generate(1)
yield wallet.wait_for_tx_in_wallet(sendtxid)
yield wallet.update_balance()
self.publish_file = os.path.join(self.service.lbry.download_directory, 'the_file')
with open(self.publish_file, 'w') as _publish_file:
_publish_file.write('message that will be heard around the world\n')
yield threads.deferToThread(time.sleep, 0.5)
@defer.inlineCallbacks
def generate_publishes(self):
win = pg.GraphicsLayoutWidget(show=True)
win.setWindowTitle('orchstr8: performance monitor')
win.resize(1800, 600)
p4 = win.addPlot()
p4.addLegend()
p4.setDownsampling(mode='peak')
p4.setClipToView(True)
self.profiler.graph = p4
for block in range(self.blocks):
for txn in range(self.txns_per_block):
name = f'block{block}txn{txn}'
self.profiler.start('total')
yield self.service.lbry.daemon.jsonrpc_publish(
name=name, bid=self.random.randrange(1, 5)/1000.0,
file_path=self.publish_file, metadata={
"description": "Some interesting content",
"title": "My interesting content",
"author": "Video shot by [email protected]",
"language": "en", "license": "LBRY Inc", "nsfw": False
}
)
self.profiler.stop('total')
self.profiler.draw()
yield self.service.lbrycrd.generate(1)
def stop(self):
return self.service.shutdown(cleanup=False)
@defer.inlineCallbacks
def generate_publishes(_):
pub = ThePublisherOfThings(50, 10)
yield pub.start()
yield pub.generate_publishes()
yield pub.stop()
print(f'lbrycrd: {pub.service.lbrycrd.data_path}')
print(f'lbrynet: {pub.service.lbry.data_path}')
print(f'lbryumserver: {pub.service.lbryumserver.data_path}')
if __name__ == "__main__":
task.react(generate_publishes)
| lbryio/lbry | lbry/scripts/publish_performance.py | Python | mit | 4,344 |
# Copyright 2015 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import mock
import pytest
from f5.bigip import ManagementRoot
from f5.bigip.tm.ltm.nat import Nat
from f5.sdk_exception import MissingRequiredCreationParameter
@pytest.fixture
def FakeNat():
fake_nat_s = mock.MagicMock()
fake_nat = Nat(fake_nat_s)
return fake_nat
class TestCreate(object):
def test_create_two(self, fakeicontrolsession):
b = ManagementRoot('192.168.1.1', 'admin', 'admin')
n1 = b.tm.ltm.nats.nat
n2 = b.tm.ltm.nats.nat
assert n1 is not n2
def test_create_no_args(self, FakeNat):
with pytest.raises(MissingRequiredCreationParameter):
FakeNat.create()
def test_create_name(self, FakeNat):
with pytest.raises(MissingRequiredCreationParameter):
FakeNat.create(name='myname')
def test_create_partition(self, FakeNat):
with pytest.raises(MissingRequiredCreationParameter):
FakeNat.create(name='myname', partition='Common')
def test_create_translation(self, FakeNat):
with pytest.raises(MissingRequiredCreationParameter):
FakeNat.create(name='myname', partition='Common',
translationAddress='192.168.1.1')
def test_create_originating(self, FakeNat):
with pytest.raises(MissingRequiredCreationParameter):
FakeNat.create(name='myname', partition='Common',
originatingAddress='192.168.2.1')
def test_create_inheritedtrafficgroup_false_no_tg(self, FakeNat):
with pytest.raises(MissingRequiredCreationParameter):
FakeNat.create(name='mynat', partition='Common',
translationAddress='192.168.1.1',
originatingAddress='192.168.2.1',
inheritedTrafficGroup='false')
def test_create_inheritedtrafficgroup_false_empty_tg(self, FakeNat):
with pytest.raises(MissingRequiredCreationParameter):
FakeNat.create(name='mynat', partition='Common',
translationAddress='192.168.1.1',
originatingAddress='192.168.2.1',
inheritedTrafficGroup='false',
trafficGroup='')
| F5Networks/f5-common-python | f5/bigip/tm/ltm/test/unit/test_nat.py | Python | apache-2.0 | 2,807 |
import random
from igraph import *
from utils import weighted_choice
def BATFGraph(m0 = 10, m = 1):
# initialize graph with m0 vertices
g = Graph(n = m0, directed=True)
for v in xrange(m0):
# PA step
weights = g.vs.degree()
u = weighted_choice(weights)
g.add_edges((v,u))
# TF step
neighbors_u = g.neighbors(u)
if neighbors_u:
w = random.choice(neighbors_u)
g.add_edges((v,w))
return g
if __name__ == '__main__':
g = BATFGraph(m0=1000)
print g.vs.degree()
| talevy/hetnet | hetnet/graphs.py | Python | bsd-2-clause | 564 |
"""
fuzza.cli
---------
Entry point to the application.
"""
import click
from . import __description__
from . import __prog__
from . import __version__
from . import configuration as Configuration
from . import data as Data
from . import dispatcher as Dispatcher
from . import transformer as Transformer
from . import protocol as Protocol
from . import templater as Templater
def validate_comma_separated(ctx, param, value):
"""
Validate multiple string input values are comma-separated. Each of
the value is put into a list, which is returned after validation.
"""
if value is None:
return
return value.split(',')
@click.group(help=__description__)
@click.version_option(version=__version__, prog_name=__prog__)
def cli():
pass
@cli.command()
@click.option(
'--host',
type=str,
metavar='<host>',
prompt='Target hostname or IP',
help='The hostname or IP address of target to fuzz.'
)
@click.option(
'--port',
type=int,
metavar='<port>',
prompt='Target port',
help='The port of target to fuzz.'
)
@click.option(
'--data-path',
type=str,
metavar='<path>',
prompt='Path to fuzz data',
help='Path containing fuzz data. Support glob patterns.'
)
@click.option(
'-c',
'--data-chunk',
is_flag=True,
help='Read each fuzz data file in chunk, instead of line-by-line. [False]'
)
@click.option(
'--template-path',
type=str,
metavar='[path]',
help='Path containing template files. Support glob patterns. []'
)
@click.option(
'--dispatcher',
type=str,
metavar='[dispatcher]',
help='Type of dispatcher to use. [tcp]'
)
@click.option(
'-r',
'--dispatcher-reuse',
is_flag=True,
help='Enable dispatcher connection reuse. [False]'
)
@click.option(
'--transformer',
type=str,
metavar='[transformer[, ...]]',
callback=validate_comma_separated,
help='List of transformations to be sequentially applied to fuzz data. []'
)
@click.option(
'--protocol',
type=str,
metavar='[protocol]',
help='Type of communication protocol. [textual]'
)
def init(**kwargs):
"""
Create a fuzzer configuration file.
"""
# Store configuration to file
conf = Configuration.load(kwargs)
Configuration.to_file(conf)
@cli.command()
def fuzz():
"""
Execute the fuzzer.
"""
# Read configuration from file
conf = Configuration.from_file()
conf = Configuration.load(conf)
# Load fuzz data and template
data = Data.read(conf)
templates = Templater.read(conf)
# Transform the data using transformer
transform = Transformer.init(conf)
data = transform(data)
# Initialize a dispatcher
dispatch = Dispatcher.init(conf)
# Initialize a protocol adapter
adapt = Protocol.init(conf)
# Dispatch the payloads
payload = b''
for payload in Templater.render(templates, data):
dispatch(adapt(payload))
# Hack: Ensure connection is closed by re-sending the last payload
dispatch(adapt(payload), True)
if __name__ == '__main__':
cli()
| Raphx/fuzza | fuzza/cli.py | Python | mit | 3,100 |
# This file is part of Wolnelektury, licensed under GNU Affero GPLv3 or later.
# Copyright © Fundacja Nowoczesna Polska. See NOTICE for more information.
#
from django import forms
from django.forms.utils import flatatt
from django.utils.html import format_html
class HeaderWidget(forms.widgets.Widget):
def render(self, name, value, attrs=None, renderer=None):
attrs.update(self.attrs)
return format_html('<a{0}></a>', flatatt(attrs))
| fnp/wolnelektury | src/contact/widgets.py | Python | agpl-3.0 | 459 |
#!/usr/bin/env python
from __future__ import print_function
import pyslurm
from time import gmtime, strftime
def display(block_dict):
if block_dict:
date_fields = [ ]
print('{0:*^80}'.format(''))
for key, value in block_dict.items():
print("{0} :".format(key))
for part_key in sorted(value.items()):
if part_key in date_fields:
ddate = value[part_key]
if ddate == 0:
print("\t{0:<17} : N/A".format(part_key))
elif ('reason_uid' in part_key) and (value['reason'] is None):
print("\t{0:<17} :".format(part_key))
else:
ddate = pyslurm.epoch2date(ddate)
print("\t{0:<17} : {1}".format(part_key, ddate))
elif part_key == 'connection_type':
print("\t{0:<17} : {1}".format(part_key, pyslurm.get_connection_type(value[part_key])))
elif part_key == 'state':
print("\t{0:<17} : {1}".format(part_key, value[part_key]))
else:
print("\t{0:<17} : {1}".format(part_key, value[part_key]))
print('{0:*^80}'.format(''))
if __name__ == "__main__":
a = pyslurm.block()
try:
a.load()
block_dict = a.get()
except ValueError as e:
print("Block query failed - {0}".format(e.args[0]))
else:
if len(block_dict) > 0:
display(block_dict)
print()
print("Block IDs - {0}".format(a.ids()))
print()
else:
print("No Blocks found !")
| edf-hpc/python-pyslurm | examples/blocks_list.py | Python | gpl-2.0 | 1,695 |
# -*- coding: utf-8 -*-
from django.shortcuts import get_object_or_404, render_to_response, redirect
from django.template import RequestContext
from django.core.context_processors import csrf
from django.views.decorators.csrf import csrf_exempt
from django.http import Http404, HttpResponse, HttpResponseForbidden, HttpResponseNotFound
from django.utils.encoding import smart_str
from django.conf import settings
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required, user_passes_test
from django.http import HttpResponseRedirect
from django.db import connections
from django.core.paginator import InvalidPage, EmptyPage, Paginator
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.utils.translation import ugettext_lazy as _
from mezzanine.forms.forms import FormForForm
from mezzanine.forms.signals import form_invalid, form_valid
from mezzanine.pages.page_processors import processor_for
from mezzanine.utils.email import split_addresses, send_mail_template
from mezzanine.utils.views import is_spam
from django.core.mail import send_mail
from .fields import EMAIL
from .models import Form, FormEntry, Payment
from polybanking import PolyBanking
def start_payment(request, pk):
"""Start payment for a form"""
api = PolyBanking(settings.POLYBANKING_SERVER, settings.POLYBANKING_ID, settings.POLYBANKING_KEY_REQUEST, settings.POLYBANKING_KEY_IPN, settings.POLYBANKING_KEY_API)
entry = get_object_or_404(FormEntry, pk=pk)
if not entry.form.need_payment:
raise Http404
payment = entry.get_payment()
error = ''
if not entry.form.can_start_payment():
error = 'form_full'
elif payment.started and not payment.is_valid:
request.session["current_payment"] = payment.id
return HttpResponseRedirect(payment.redirect_url)
if payment.is_valid:
error = 'payment_already_ok'
if not error:
for field in entry.form.fields.all():
if field.field_type == EMAIL:
email_to = payment.entry.fields.filter(field_id=field.id).first().value
error, url = api.new_transaction(str(entry.form.amount * 100), payment.reference(), unicode(entry.form) + " - " + unicode(email_to))
if error != 'OK':
return render_to_response('forms/error.html', {'error': error}, context_instance=RequestContext(request))
else:
payment.redirect_url = url
payment.started = True
payment.save()
request.session["current_payment"] = payment.id
return HttpResponseRedirect(url)
@csrf_exempt
def ipn(request):
api = PolyBanking(settings.POLYBANKING_SERVER, settings.POLYBANKING_ID, settings.POLYBANKING_KEY_REQUEST, settings.POLYBANKING_KEY_IPN, settings.POLYBANKING_KEY_API)
ok, message, ref, status, status_ok, date = api.check_ipn(request.POST)
if ok:
payment = get_object_or_404(Payment, pk=ref.split('-')[-1])
payment.is_valid = status_ok
payment.save()
entry = payment.entry
subject = payment.entry.form.final_confirmation_subject
context = {
"message": payment.entry.form.final_confirmation_email,
}
if status_ok:
email_from = payment.entry.form.email_from or settings.DEFAULT_FROM_EMAIL
for field in payment.entry.form.fields.all():
if field.field_type == EMAIL:
email_to = payment.entry.fields.filter(field_id=field.id).first().value
if email_to and payment.entry.form.send_email:
send_mail_template(subject, "email/form_response_paid", email_from,
email_to, context)
headers = None
if email_to:
# Add the email entered as a Reply-To header
headers = {'Reply-To': email_to}
email_copies = split_addresses(payment.entry.form.email_copies)
if email_copies:
send_mail_template(subject, "email/form_response_copies_paid",
email_from, email_copies, context, headers=headers)
return HttpResponse('')
def result_ok(request):
return render_to_response('forms/payment_ok.html', {'final_confirmation_message': Payment.objects.get(pk=request.session["current_payment"]).entry.form.final_confirmation_message}, context_instance=RequestContext(request))
def result_err(request):
return render_to_response('forms/payment_error.html', {}, context_instance=RequestContext(request))
| agepoly/mezzanine | mezzanine/forms/views.py | Python | bsd-2-clause | 4,648 |
# Generated by Django 3.0.7 on 2021-04-03 18:37
import django.core.validators
from django.db import migrations, models
import report.models
class Migration(migrations.Migration):
dependencies = [
('report', '0014_purchaseorderreport_salesorderreport'),
]
operations = [
migrations.AlterField(
model_name='reportasset',
name='asset',
field=models.FileField(help_text='Report asset file', upload_to=report.models.rename_asset, verbose_name='Asset'),
),
migrations.AlterField(
model_name='reportasset',
name='description',
field=models.CharField(help_text='Asset file description', max_length=250, verbose_name='Description'),
),
migrations.AlterField(
model_name='reportsnippet',
name='description',
field=models.CharField(help_text='Snippet file description', max_length=250, verbose_name='Description'),
),
migrations.AlterField(
model_name='reportsnippet',
name='snippet',
field=models.FileField(help_text='Report snippet file', upload_to=report.models.rename_snippet, validators=[django.core.validators.FileExtensionValidator(allowed_extensions=['html', 'htm'])], verbose_name='Snippet'),
),
]
| inventree/InvenTree | InvenTree/report/migrations/0015_auto_20210403_1837.py | Python | mit | 1,333 |
#!/usr/bin/env python
"""
Run a jsbsim model as a child process.
"""
import atexit
import errno
import fdpexpect
import math
import os
import select
import signal
import socket
import struct
import sys
import time
import pexpect
from pymavlink import fgFDM
from .. pysim import util
class control_state(object):
def __init__(self):
self.aileron = 0
self.elevator = 0
self.throttle = 0
self.rudder = 0
self.ground_height = 0
sitl_state = control_state()
def interpret_address(addrstr):
"""Interpret a IP:port string."""
a = addrstr.split(':')
a[1] = int(a[1])
return tuple(a)
def jsb_set(variable, value):
"""Set a JSBSim variable."""
global jsb_console
jsb_console.send('set %s %s\r\n' % (variable, value))
def setup_template(home):
"""Setup aircraft/Rascal/reset.xml ."""
global opts
v = home.split(',')
if len(v) != 4:
print("home should be lat,lng,alt,hdg - '%s'" % home)
sys.exit(1)
latitude = float(v[0])
longitude = float(v[1])
altitude = float(v[2])
heading = float(v[3])
sitl_state.ground_height = altitude
template = os.path.join('aircraft', 'Rascal', 'reset_template.xml')
reset = os.path.join('aircraft', 'Rascal', 'reset.xml')
xml = open(template).read() % {'LATITUDE': str(latitude),
'LONGITUDE': str(longitude),
'HEADING': str(heading)}
open(reset, mode='w').write(xml)
print("Wrote %s" % reset)
baseport = int(opts.simout.split(':')[1])
template = os.path.join('jsb_sim', 'fgout_template.xml')
out = os.path.join('jsb_sim', 'fgout.xml')
xml = open(template).read() % {'FGOUTPORT': str(baseport+3)}
open(out, mode='w').write(xml)
print("Wrote %s" % out)
template = os.path.join('jsb_sim', 'rascal_test_template.xml')
out = os.path.join('jsb_sim', 'rascal_test.xml')
xml = open(template).read() % {'JSBCONSOLEPORT': str(baseport+4)}
open(out, mode='w').write(xml)
print("Wrote %s" % out)
def process_sitl_input(buf):
"""Process control changes from SITL sim."""
control = list(struct.unpack('<14H', buf))
pwm = control[:11]
(speed, direction, turbulance) = control[11:]
global wind
wind.speed = speed*0.01
wind.direction = direction*0.01
wind.turbulance = turbulance*0.01
aileron = (pwm[0]-1500)/500.0
elevator = (pwm[1]-1500)/500.0
throttle = (pwm[2]-1000)/1000.0
if opts.revthr:
throttle = 1.0 - throttle
rudder = (pwm[3]-1500)/500.0
if opts.elevon:
# fake an elevon plane
ch1 = aileron
ch2 = elevator
aileron = (ch2-ch1)/2.0
# the minus does away with the need for RC2_REV=-1
elevator = -(ch2+ch1)/2.0
if opts.vtail:
# fake an elevon plane
ch1 = elevator
ch2 = rudder
# this matches VTAIL_OUTPUT==2
elevator = (ch2-ch1)/2.0
rudder = (ch2+ch1)/2.0
buf = ''
if aileron != sitl_state.aileron:
buf += 'set fcs/aileron-cmd-norm %s\n' % aileron
sitl_state.aileron = aileron
if elevator != sitl_state.elevator:
buf += 'set fcs/elevator-cmd-norm %s\n' % elevator
sitl_state.elevator = elevator
if rudder != sitl_state.rudder:
buf += 'set fcs/rudder-cmd-norm %s\n' % rudder
sitl_state.rudder = rudder
if throttle != sitl_state.throttle:
buf += 'set fcs/throttle-cmd-norm %s\n' % throttle
sitl_state.throttle = throttle
buf += 'step\n'
global jsb_console
jsb_console.send(buf)
def update_wind(wind):
"""Update wind simulation."""
(speed, direction) = wind.current()
jsb_set('atmosphere/psiw-rad', math.radians(direction))
jsb_set('atmosphere/wind-mag-fps', speed/0.3048)
def process_jsb_input(buf, simtime):
"""Process FG FDM input from JSBSim."""
global fdm, fg_out, sim_out
fdm.parse(buf)
if fg_out:
try:
agl = fdm.get('agl', units='meters')
fdm.set('altitude', agl+sitl_state.ground_height, units='meters')
fdm.set('rpm', sitl_state.throttle*1000)
fg_out.send(fdm.pack())
except socket.error as e:
if e.errno not in [errno.ECONNREFUSED]:
raise
timestamp = int(simtime*1.0e6)
simbuf = struct.pack('<Q17dI',
timestamp,
fdm.get('latitude', units='degrees'),
fdm.get('longitude', units='degrees'),
fdm.get('altitude', units='meters'),
fdm.get('psi', units='degrees'),
fdm.get('v_north', units='mps'),
fdm.get('v_east', units='mps'),
fdm.get('v_down', units='mps'),
fdm.get('A_X_pilot', units='mpss'),
fdm.get('A_Y_pilot', units='mpss'),
fdm.get('A_Z_pilot', units='mpss'),
fdm.get('phidot', units='dps'),
fdm.get('thetadot', units='dps'),
fdm.get('psidot', units='dps'),
fdm.get('phi', units='degrees'),
fdm.get('theta', units='degrees'),
fdm.get('psi', units='degrees'),
fdm.get('vcas', units='mps'),
0x4c56414f)
try:
sim_out.send(simbuf)
except socket.error as e:
if e.errno not in [errno.ECONNREFUSED]:
raise
################## main program ##################
from optparse import OptionParser
parser = OptionParser("runsim.py [options]")
parser.add_option("--simin", help="SITL input (IP:port)", default="127.0.0.1:5502")
parser.add_option("--simout", help="SITL output (IP:port)", default="127.0.0.1:5501")
parser.add_option("--fgout", help="FG display output (IP:port)", default="127.0.0.1:5503")
parser.add_option("--home", type='string', help="home lat,lng,alt,hdg (required)")
parser.add_option("--script", type='string', help='jsbsim model script', default='jsb_sim/rascal_test.xml')
parser.add_option("--options", type='string', help='jsbsim startup options')
parser.add_option("--elevon", action='store_true', default=False, help='assume elevon input')
parser.add_option("--revthr", action='store_true', default=False, help='reverse throttle')
parser.add_option("--vtail", action='store_true', default=False, help='assume vtail input')
parser.add_option("--wind", dest="wind", help="Simulate wind (speed,direction,turbulance)", default='0,0,0')
parser.add_option("--rate", type='int', help="Simulation rate (Hz)", default=1000)
parser.add_option("--speedup", type='float', default=1.0, help="speedup from realtime")
(opts, args) = parser.parse_args()
for m in ['home', 'script']:
if not opts.__dict__[m]:
print("Missing required option '%s'" % m)
parser.print_help()
sys.exit(1)
os.chdir(util.reltopdir('Tools/autotest'))
# kill off child when we exit
atexit.register(util.pexpect_close_all)
setup_template(opts.home)
# start child
cmd = "JSBSim --realtime --suspend --nice --simulation-rate=%u --logdirectivefile=jsb_sim/fgout.xml --script=%s" % (opts.rate, opts.script)
if opts.options:
cmd += ' %s' % opts.options
jsb = pexpect.spawn(cmd, logfile=sys.stdout, timeout=10)
jsb.delaybeforesend = 0
util.pexpect_autoclose(jsb)
i = jsb.expect(["Successfully bound to socket for input on port (\d+)",
"Could not bind to socket for input"])
if i == 1:
print("Failed to start JSBSim - is another copy running?")
sys.exit(1)
jsb_out_address = interpret_address("127.0.0.1:%u" % int(jsb.match.group(1)))
jsb.expect("Creating UDP socket on port (\d+)")
jsb_in_address = interpret_address("127.0.0.1:%u" % int(jsb.match.group(1)))
jsb.expect("Successfully connected to socket for output")
jsb.expect("JSBSim Execution beginning")
# setup output to jsbsim
print("JSBSim console on %s" % str(jsb_out_address))
jsb_out = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
jsb_out.connect(jsb_out_address)
jsb_console = fdpexpect.fdspawn(jsb_out.fileno(), logfile=sys.stdout)
jsb_console.delaybeforesend = 0
# setup input from jsbsim
print("JSBSim FG FDM input on %s" % str(jsb_in_address))
jsb_in = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
jsb_in.bind(jsb_in_address)
jsb_in.setblocking(0)
# socket addresses
sim_out_address = interpret_address(opts.simout)
sim_in_address = interpret_address(opts.simin)
# setup input from SITL sim
sim_in = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sim_in.bind(sim_in_address)
sim_in.setblocking(0)
# setup output to SITL sim
sim_out = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sim_out.connect(interpret_address(opts.simout))
sim_out.setblocking(0)
# setup possible output to FlightGear for display
fg_out = None
if opts.fgout:
fg_out = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
fg_out.connect(interpret_address(opts.fgout))
# setup wind generator
wind = util.Wind(opts.wind)
fdm = fgFDM.fgFDM()
jsb_console.send('info\n')
jsb_console.send('resume\n')
jsb.expect(["trim computation time", "Trim Results"])
time.sleep(1.5)
jsb_console.send('step\n')
jsb_console.logfile = None
print("Simulator ready to fly")
def main_loop():
"""Run main loop."""
tnow = time.time()
last_report = tnow
last_sim_input = tnow
last_wind_update = tnow
frame_count = 0
paused = False
simstep = 1.0/opts.rate
simtime = simstep
frame_time = 1.0/opts.rate
scaled_frame_time = frame_time/opts.speedup
last_wall_time = time.time()
achieved_rate = opts.speedup
while True:
new_frame = False
rin = [jsb_in.fileno(), sim_in.fileno(), jsb_console.fileno(), jsb.fileno()]
try:
(rin, win, xin) = select.select(rin, [], [], 1.0)
except select.error:
util.check_parent()
continue
tnow = time.time()
if jsb_in.fileno() in rin:
buf = jsb_in.recv(fdm.packet_size())
process_jsb_input(buf, simtime)
frame_count += 1
new_frame = True
if sim_in.fileno() in rin:
simbuf = sim_in.recv(28)
process_sitl_input(simbuf)
simtime += simstep
last_sim_input = tnow
# show any jsbsim console output
if jsb_console.fileno() in rin:
util.pexpect_drain(jsb_console)
if jsb.fileno() in rin:
util.pexpect_drain(jsb)
# only simulate wind above 5 meters, to prevent crashes while
# waiting for takeoff
if tnow - last_wind_update > 0.1:
update_wind(wind)
last_wind_update = tnow
if tnow - last_report > 3:
print("FPS %u asl=%.1f agl=%.1f roll=%.1f pitch=%.1f a=(%.2f %.2f %.2f) AR=%.1f" % (
frame_count / (time.time() - last_report),
fdm.get('altitude', units='meters'),
fdm.get('agl', units='meters'),
fdm.get('phi', units='degrees'),
fdm.get('theta', units='degrees'),
fdm.get('A_X_pilot', units='mpss'),
fdm.get('A_Y_pilot', units='mpss'),
fdm.get('A_Z_pilot', units='mpss'),
achieved_rate))
frame_count = 0
last_report = time.time()
if new_frame:
now = time.time()
if now < last_wall_time + scaled_frame_time:
dt = last_wall_time+scaled_frame_time - now
time.sleep(last_wall_time+scaled_frame_time - now)
now = time.time()
if now > last_wall_time and now - last_wall_time < 0.1:
rate = 1.0/(now - last_wall_time)
achieved_rate = (0.98*achieved_rate) + (0.02*rate)
if achieved_rate < opts.rate*opts.speedup:
scaled_frame_time *= 0.999
else:
scaled_frame_time *= 1.001
last_wall_time = now
def exit_handler():
"""Exit the sim."""
print("running exit handler")
signal.signal(signal.SIGINT, signal.SIG_IGN)
signal.signal(signal.SIGTERM, signal.SIG_IGN)
# JSBSim really doesn't like to die ...
if getattr(jsb, 'pid', None) is not None:
os.kill(jsb.pid, signal.SIGKILL)
jsb_console.send('quit\n')
jsb.close(force=True)
util.pexpect_close_all()
sys.exit(1)
signal.signal(signal.SIGINT, exit_handler)
signal.signal(signal.SIGTERM, exit_handler)
try:
main_loop()
except Exception as ex:
print(ex)
exit_handler()
raise
| WickedShell/ardupilot | Tools/autotest/jsb_sim/runsim.py | Python | gpl-3.0 | 12,739 |
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def deleteDuplicates(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
if head is None:return
dummy = ListNode(0)
dummy.next = head
duplicate = head.val
node = dummy
while node.next:
if node.next.next and node.next.val == node.next.next.val:
duplicate = node.next.val
temp = node.next
while temp and temp.val == duplicate:
temp = temp.next
node.next = temp
else:
node = node.next
return dummy.next
| hufeiya/leetcode | python/82_Remove_Duplicates_from_Sorted_List_II.py | Python | gpl-2.0 | 790 |
from flask import Blueprint
import os
main = Blueprint('main', __name__, static_folder="/app/app/static")
from . import views, errors
from ..models import Permission
@main.app_context_processor
def inject_permissions():
return dict(Permission=Permission)
| adamwwt/chvac | app/main/__init__.py | Python | mit | 263 |
"""
Overview
========
Used to spawn new processes, this module works only on unix-like
platforms.
Commands
========
Command: hbash
Description: Start a new bash process whose output
is directed to a horizontal pane.
Command: vbash
Description: Start a new bash process whose output
is directed to a vertical pane.
Command: vpy
Description: Start a python interpreter process
in a vertical pane.
Command: hpy
Description: Start a python interpreter process
in a horizontal pane.
Command: vrb
Description: Start a ruby interpreter process
in a vertical pane.
Command: hrb
Description: Start a ruby interpreter process
in a horizontal pane.
Notes
=====
**Run python from your bash process**
tee -i >(stdbuf -o 0 python -i -u)
**Run ruby from your bash process**
stdbuf -o 0 irb --inf-ruby-mode
The above commands could be slightly modified
to work with other interpreters.
"""
from untwisted.iofile import Stdout, Stdin, LOAD, CLOSE
from untwisted.network import Device
from vyapp.plugins import ENV
from vyapp.ask import Ask
from subprocess import Popen, PIPE, STDOUT
from os import environ, setsid, killpg
from vyapp.plugins.spawn.base_spawn import BaseSpawn
from vyapp.areavi import AreaVi
from vyapp.app import root
from os import environ
class Spawn(BaseSpawn):
def __init__(self, cmd):
self.child = Popen(cmd,
shell=True, stdout=PIPE, stdin=PIPE, preexec_fn=setsid,
stderr=STDOUT, env=environ)
self.stdout = Device(self.child.stdout)
self.stdin = Device(self.child.stdin)
def install_events(self):
super(Spawn, self).install_events()
Stdout(self.stdout)
Stdin(self.stdin)
self.stdout.add_map(LOAD, lambda con, data: \
self.output.append(data))
self.stdin.add_map(CLOSE, self.handle_close)
self.stdout.add_map(CLOSE, self.handle_close)
def dump_signal(self, num):
killpg(self.child.pid, num)
def terminate_process(self):
self.child.kill()
root.status.set_msg('Killed process!')
def dump_line(self):
data = self.input.get('insert linestart', 'insert +1l linestart')
data = data.encode(self.input.charset)
self.stdin.dump(data)
self.input.down()
def handle_close(self, dev, err):
root.status.set_msg('Killed process!')
self.stdout.destroy()
self.stdin.destroy()
class HSpawn(Spawn):
def __init__(self, cmd):
Spawn.__init__(self, cmd)
BaseSpawn.__init__(self, cmd, AreaVi.ACTIVE,
AreaVi.ACTIVE.master.master.create())
class VSpawn(Spawn):
def __init__(self, cmd):
Spawn.__init__(self, cmd)
BaseSpawn.__init__(self, cmd, AreaVi.ACTIVE,
AreaVi.ACTIVE.master.master.master.create())
ENV['hspawn'] = HSpawn
ENV['vspawn'] = VSpawn
ENV['vbash'] = lambda : VSpawn('bash -i')
ENV['hbash'] = lambda : HSpawn('bash -i')
ENV['hpy'] = lambda : HSpawn('bash -c "tee -i >(stdbuf -o 0 python -i -u)"')
ENV['vpy'] = lambda : VSpawn('bash -c "tee -i >(stdbuf -o 0 python -i -u)"')
ENV['hrb'] = lambda : HSpawn('bash -c "stdbuf -o 0 irb --inf-ruby-mode"')
ENV['vrb'] = lambda : VSpawn('bash -c "stdbuf -o 0 irb --inf-ruby-mode"')
| iogf/vy | vyapp/plugins/spawn/unix_platform.py | Python | mit | 3,231 |
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
server_create = {
'user_data': {
'type': 'string',
'format': 'base64'
},
}
server_create_v20 = {
'user_data': {
'oneOf': [
{'type': 'string', 'format': 'base64'},
{'type': 'null'},
],
},
}
| hanlind/nova | nova/api/openstack/compute/schemas/user_data.py | Python | apache-2.0 | 894 |
from abc import ABC, abstractmethod
from asyncio import Future, ensure_future
from collections import OrderedDict
from pulsar.api import chain_future, ImproperlyConfigured
from pulsar.apps.http import HttpClient
from pulsar.apps.greenio import GreenPool
from pulsar.apps.data.channels import Connector
from pulsar.utils.importer import module_attribute
from .utils.serializers import serializers
class MessageFuture(Future):
def __init__(self, message_id, backend, *, loop=None):
super().__init__(loop=loop)
self.message_id = message_id
self.backend = backend
def wait(self): # pragma nocover
assert not self._loop.is_running(), 'cannot wait if loop is running'
return self._loop.run_until_complete(_wait(self))
def _repr_info(self):
info = super()._repr_info()
info.append('ID=%s' % self.message_id)
return info
async def _wait(task_future):
await task_future.backend.channels.connect()
result = await task_future
return result
class BaseComponent:
def __init__(self, backend):
self.backend = backend
self.logger = self.backend.logger
@property
def cfg(self):
return self.backend.cfg
@property
def _loop(self):
return self.backend._loop
def encode(self, message, serializer=None):
"""Encode a message"""
serializer = serializer or self.cfg.message_serializer
return serializers[serializer].encode(message)
def decode(self, data, serializer=None):
"""Decode a message"""
serializer = serializer or self.cfg.message_serializer
return serializers[serializer].decode(data)
class Manager(BaseComponent):
def green_pool(self):
return GreenPool(loop=self._loop)
def http(self):
return HttpClient(loop=self._loop)
def queues(self):
"""List of queue names for Message consumers
"""
queues = [self.backend.node_name]
queues.extend(self.cfg.task_queues)
return queues
async def store_message(self, message):
"""Dummy function to store a message into a persistent database
"""
pass
def start(self):
"""Optional start method, called by the backend when it starts
"""
pass
def close(self):
pass
class MQ(BaseComponent, Connector, ABC):
"""Interface class for a distributed message queue
"""
def __init__(self, backend, store, namespace=None):
super().__init__(backend)
Connector.__init__(self, store, namespace=namespace)
self.store = store
self.queued_messages = {}
def __repr__(self):
return 'message-broker - %s' % self.store
def queue(self, message, callback=True):
'''Queue the ``message``.
If callback is True (default) returns a Future
called back once the message is delivered,
otherwise return a future called back once the messaged is queued
'''
future_done = MessageFuture(message.id, self.backend, loop=self._loop)
if message.queue:
self.queued_messages[message.id] = future_done
else: # the task is not queued instead it is executed immediately
coro = self.backend.execute(message)
return chain_future(coro, next=future_done)
# queue the message
coro = self._queue_message(message, future_done)
if callback:
ensure_future(coro, loop=self._loop)
return future_done
else:
future = MessageFuture(message.id, self.backend, loop=self._loop)
return chain_future(coro, next=future)
@abstractmethod
async def size(self, *queues): # pragma nocover
'''Asynchronously retrieve the size of queues
:return: the list of sizes
'''
pass
@abstractmethod
async def get_message(self, *queues): # pragma nocover
'''Asynchronously retrieve a :class:`.Task` from queues
:return: a :class:`.Task` or ``None``.
'''
pass
@abstractmethod
async def flush_queues(self, *queues): # pragma nocover
'''Clear a list of task queues
'''
pass
@abstractmethod
async def queue_message(self, queue, message): # pragma nocover
"""Add a message to the ``queue``
"""
pass
@abstractmethod
async def incr(self, name):
"""Increase the counter for name
"""
pass
@abstractmethod
async def decr(self, name):
"""Decrease the counter for name
"""
pass
# INTERNALS
async def _queue_message(self, message, future):
'''Asynchronously queue a task
'''
await self.backend.publish('queued', message)
try:
await self.queue_message(message.queue, self.encode(message))
except ConnectionRefusedError:
self.logger.critical('Could not queue task - connection error')
else:
self.logger.debug('%s in "%s"', message.lazy_info(), message.queue)
message.done_callback = future
return message
def register_broker(name, factory=None):
if factory is None:
dotted_path = brokers.get(name)
if not dotted_path:
raise ImproperlyConfigured('No such message broker: %s' % name)
factory = module_attribute(dotted_path, safe=True)
if not factory:
raise ImproperlyConfigured(
'"%s" store not available' % dotted_path)
else:
brokers[name] = factory
return factory
brokers = OrderedDict()
| quantmind/pulsar-queue | pq/mq.py | Python | bsd-3-clause | 5,657 |
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import api as oslo_db_api
from sqlalchemy.ext.hybrid import hybrid_property
from keystone.common import driver_hints
from keystone.common import sql
from keystone.credential.backends import base
from keystone import exception
class CredentialModel(sql.ModelBase, sql.ModelDictMixinWithExtras):
__tablename__ = 'credential'
attributes = [
'id', 'user_id', 'project_id', 'encrypted_blob', 'type', 'key_hash'
]
id = sql.Column(sql.String(64), primary_key=True)
user_id = sql.Column(sql.String(64),
nullable=False)
project_id = sql.Column(sql.String(64))
_encrypted_blob = sql.Column('encrypted_blob', sql.Text(), nullable=True)
type = sql.Column(sql.String(255), nullable=False)
key_hash = sql.Column(sql.String(64), nullable=True)
extra = sql.Column(sql.JsonBlob())
@hybrid_property
def encrypted_blob(self):
return self._encrypted_blob
@encrypted_blob.setter
def encrypted_blob(self, encrypted_blob):
# Make sure to hand over the encrypted credential as a string value
# to the backend driver to avoid the sql drivers (esp. psycopg2)
# treating this as binary data and e.g. hex-escape it.
if isinstance(encrypted_blob, bytes):
encrypted_blob = encrypted_blob.decode('utf-8')
self._encrypted_blob = encrypted_blob
class Credential(base.CredentialDriverBase):
# credential crud
@sql.handle_conflicts(conflict_type='credential')
def create_credential(self, credential_id, credential):
with sql.session_for_write() as session:
ref = CredentialModel.from_dict(credential)
session.add(ref)
return ref.to_dict()
@driver_hints.truncated
def list_credentials(self, hints):
with sql.session_for_read() as session:
credentials = session.query(CredentialModel)
credentials = sql.filter_limit_query(CredentialModel,
credentials, hints)
return [s.to_dict() for s in credentials]
def list_credentials_for_user(self, user_id, type=None):
with sql.session_for_read() as session:
query = session.query(CredentialModel)
query = query.filter_by(user_id=user_id)
if type:
query = query.filter_by(type=type)
refs = query.all()
return [ref.to_dict() for ref in refs]
def _get_credential(self, session, credential_id):
ref = session.query(CredentialModel).get(credential_id)
if ref is None:
raise exception.CredentialNotFound(credential_id=credential_id)
return ref
def get_credential(self, credential_id):
with sql.session_for_read() as session:
return self._get_credential(session, credential_id).to_dict()
@sql.handle_conflicts(conflict_type='credential')
def update_credential(self, credential_id, credential):
with sql.session_for_write() as session:
ref = self._get_credential(session, credential_id)
old_dict = ref.to_dict()
for k in credential:
old_dict[k] = credential[k]
new_credential = CredentialModel.from_dict(old_dict)
for attr in CredentialModel.attributes:
if attr != 'id':
setattr(ref, attr, getattr(new_credential, attr))
ref.extra = new_credential.extra
return ref.to_dict()
def delete_credential(self, credential_id):
with sql.session_for_write() as session:
ref = self._get_credential(session, credential_id)
session.delete(ref)
def delete_credentials_for_project(self, project_id):
with sql.session_for_write() as session:
query = session.query(CredentialModel)
query = query.filter_by(project_id=project_id)
query.delete()
@oslo_db_api.wrap_db_retry(retry_on_deadlock=True)
def delete_credentials_for_user(self, user_id):
with sql.session_for_write() as session:
query = session.query(CredentialModel)
query = query.filter_by(user_id=user_id)
query.delete()
| openstack/keystone | keystone/credential/backends/sql.py | Python | apache-2.0 | 4,819 |
#!/usr/bin/env python
import sys, re, operator, string, inspect
def read_stop_words():
""" This function can only be called from a function
named extract_words."""
# Meta-level data: inspect.stack()
if inspect.stack()[1][3] != 'extract_words':
return None
with open('../stop_words.txt') as f:
stop_words = f.read().split(',')
stop_words.extend(list(string.ascii_lowercase))
return stop_words
def extract_words(path_to_file):
# Meta-level data: locals()
with open(locals()['path_to_file']) as f:
str_data = f.read()
pattern = re.compile('[\W_]+')
word_list = pattern.sub(' ', str_data).lower().split()
stop_words = read_stop_words()
return [w for w in word_list if not w in stop_words]
def frequencies(word_list):
# Meta-level data: locals()
word_freqs = {}
for w in locals()['word_list']:
if w in word_freqs:
word_freqs[w] += 1
else:
word_freqs[w] = 1
return word_freqs
def sort(word_freq):
# Meta-level data: locals()
return sorted(locals()['word_freq'].iteritems(), key=operator.itemgetter(1), reverse=True)
def main():
word_freqs = sort(frequencies(extract_words(sys.argv[1])))
for (w, c) in word_freqs[0:25]:
print w, ' - ', c
if __name__ == "__main__":
main()
| mathkann/exercises-in-programming-style | 16-introspective/tf-16.py | Python | mit | 1,338 |
from yanntricks import *
def KScolorD():
pspict,fig = SinglePicture("KScolorD")
pspict.dilatation(1)
x=var('x')
C=Circle(Point(0,0),1)
N1=C.graph(90,180)
N2=C.graph(270,360)
C.parameters.color="blue"
N1.parameters.color="black"
N2.parameters.color=N1.parameters.color
N1.wave(0.1,0.2)
#N2.wave(0.1,0.2)
N=Point(0,1)
S=Point(0,-1)
pspict.axes.no_graduation()
pspict.DrawGraphs(C,N1,N2,N,S)
pspict.DrawDefaultAxes()
fig.conclude()
fig.write_the_file()
| LaurentClaessens/mazhe | src_yanntricks/yanntricksKScolorD.py | Python | gpl-3.0 | 526 |
#!/usr/bin/env python
# coding: utf8
from setuptools import setup, find_packages
# Get long_description from README
import os
here = os.path.dirname(os.path.abspath(__file__))
f = open(os.path.join(here, 'README.rst'))
long_description = f.read().strip()
f.close()
setup(
name='onkyo-eiscp',
version='1.2.8',
url='https://github.com/miracle2k/onkyo-eiscp',
license='MIT',
author='Michael Elsdörfer',
author_email='[email protected]',
description='Control Onkyo receivers over ethernet.',
long_description=long_description,
packages = find_packages(exclude=('tests*',)),
entry_points="""[console_scripts]\nonkyo = eiscp.script:run\n""",
install_requires=['docopt>=0.4.1', 'netifaces', 'xmltodict>=0.12.0'],
platforms='any',
classifiers=[
'Topic :: System :: Networking',
'Topic :: Games/Entertainment',
'Topic :: Multimedia',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
| miracle2k/onkyo-eiscp | setup.py | Python | mit | 1,149 |
"""
Tests for line search routines
"""
from __future__ import division, print_function, absolute_import
from numpy.testing import assert_, assert_equal, \
assert_array_almost_equal, assert_array_almost_equal_nulp, assert_warns
from scipy._lib._numpy_compat import suppress_warnings
import scipy.optimize.linesearch as ls
from scipy.optimize.linesearch import LineSearchWarning
import numpy as np
def assert_wolfe(s, phi, derphi, c1=1e-4, c2=0.9, err_msg=""):
"""
Check that strong Wolfe conditions apply
"""
phi1 = phi(s)
phi0 = phi(0)
derphi0 = derphi(0)
derphi1 = derphi(s)
msg = "s = %s; phi(0) = %s; phi(s) = %s; phi'(0) = %s; phi'(s) = %s; %s" % (
s, phi0, phi1, derphi0, derphi1, err_msg)
assert_(phi1 <= phi0 + c1*s*derphi0, "Wolfe 1 failed: " + msg)
assert_(abs(derphi1) <= abs(c2*derphi0), "Wolfe 2 failed: " + msg)
def assert_armijo(s, phi, c1=1e-4, err_msg=""):
"""
Check that Armijo condition applies
"""
phi1 = phi(s)
phi0 = phi(0)
msg = "s = %s; phi(0) = %s; phi(s) = %s; %s" % (s, phi0, phi1, err_msg)
assert_(phi1 <= (1 - c1*s)*phi0, msg)
def assert_line_wolfe(x, p, s, f, fprime, **kw):
assert_wolfe(s, phi=lambda sp: f(x + p*sp),
derphi=lambda sp: np.dot(fprime(x + p*sp), p), **kw)
def assert_line_armijo(x, p, s, f, **kw):
assert_armijo(s, phi=lambda sp: f(x + p*sp), **kw)
def assert_fp_equal(x, y, err_msg="", nulp=50):
"""Assert two arrays are equal, up to some floating-point rounding error"""
try:
assert_array_almost_equal_nulp(x, y, nulp)
except AssertionError as e:
raise AssertionError("%s\n%s" % (e, err_msg))
class TestLineSearch(object):
# -- scalar functions; must have dphi(0.) < 0
def _scalar_func_1(self, s):
self.fcount += 1
p = -s - s**3 + s**4
dp = -1 - 3*s**2 + 4*s**3
return p, dp
def _scalar_func_2(self, s):
self.fcount += 1
p = np.exp(-4*s) + s**2
dp = -4*np.exp(-4*s) + 2*s
return p, dp
def _scalar_func_3(self, s):
self.fcount += 1
p = -np.sin(10*s)
dp = -10*np.cos(10*s)
return p, dp
# -- n-d functions
def _line_func_1(self, x):
self.fcount += 1
f = np.dot(x, x)
df = 2*x
return f, df
def _line_func_2(self, x):
self.fcount += 1
f = np.dot(x, np.dot(self.A, x)) + 1
df = np.dot(self.A + self.A.T, x)
return f, df
# --
def setup_method(self):
self.scalar_funcs = []
self.line_funcs = []
self.N = 20
self.fcount = 0
def bind_index(func, idx):
# Remember Python's closure semantics!
return lambda *a, **kw: func(*a, **kw)[idx]
for name in sorted(dir(self)):
if name.startswith('_scalar_func_'):
value = getattr(self, name)
self.scalar_funcs.append(
(name, bind_index(value, 0), bind_index(value, 1)))
elif name.startswith('_line_func_'):
value = getattr(self, name)
self.line_funcs.append(
(name, bind_index(value, 0), bind_index(value, 1)))
np.random.seed(1234)
self.A = np.random.randn(self.N, self.N)
def scalar_iter(self):
for name, phi, derphi in self.scalar_funcs:
for old_phi0 in np.random.randn(3):
yield name, phi, derphi, old_phi0
def line_iter(self):
for name, f, fprime in self.line_funcs:
k = 0
while k < 9:
x = np.random.randn(self.N)
p = np.random.randn(self.N)
if np.dot(p, fprime(x)) >= 0:
# always pick a descent direction
continue
k += 1
old_fv = float(np.random.randn())
yield name, f, fprime, x, p, old_fv
# -- Generic scalar searches
def test_scalar_search_wolfe1(self):
c = 0
for name, phi, derphi, old_phi0 in self.scalar_iter():
c += 1
s, phi1, phi0 = ls.scalar_search_wolfe1(phi, derphi, phi(0),
old_phi0, derphi(0))
assert_fp_equal(phi0, phi(0), name)
assert_fp_equal(phi1, phi(s), name)
assert_wolfe(s, phi, derphi, err_msg=name)
assert_(c > 3) # check that the iterator really works...
def test_scalar_search_wolfe2(self):
for name, phi, derphi, old_phi0 in self.scalar_iter():
s, phi1, phi0, derphi1 = ls.scalar_search_wolfe2(
phi, derphi, phi(0), old_phi0, derphi(0))
assert_fp_equal(phi0, phi(0), name)
assert_fp_equal(phi1, phi(s), name)
if derphi1 is not None:
assert_fp_equal(derphi1, derphi(s), name)
assert_wolfe(s, phi, derphi, err_msg="%s %g" % (name, old_phi0))
def test_scalar_search_armijo(self):
for name, phi, derphi, old_phi0 in self.scalar_iter():
s, phi1 = ls.scalar_search_armijo(phi, phi(0), derphi(0))
assert_fp_equal(phi1, phi(s), name)
assert_armijo(s, phi, err_msg="%s %g" % (name, old_phi0))
# -- Generic line searches
def test_line_search_wolfe1(self):
c = 0
smax = 100
for name, f, fprime, x, p, old_f in self.line_iter():
f0 = f(x)
g0 = fprime(x)
self.fcount = 0
s, fc, gc, fv, ofv, gv = ls.line_search_wolfe1(f, fprime, x, p,
g0, f0, old_f,
amax=smax)
assert_equal(self.fcount, fc+gc)
assert_fp_equal(ofv, f(x))
if s is None:
continue
assert_fp_equal(fv, f(x + s*p))
assert_array_almost_equal(gv, fprime(x + s*p), decimal=14)
if s < smax:
c += 1
assert_line_wolfe(x, p, s, f, fprime, err_msg=name)
assert_(c > 3) # check that the iterator really works...
def test_line_search_wolfe2(self):
c = 0
smax = 512
for name, f, fprime, x, p, old_f in self.line_iter():
f0 = f(x)
g0 = fprime(x)
self.fcount = 0
with suppress_warnings() as sup:
sup.filter(LineSearchWarning,
"The line search algorithm could not find a solution")
sup.filter(LineSearchWarning,
"The line search algorithm did not converge")
s, fc, gc, fv, ofv, gv = ls.line_search_wolfe2(f, fprime, x, p,
g0, f0, old_f,
amax=smax)
assert_equal(self.fcount, fc+gc)
assert_fp_equal(ofv, f(x))
assert_fp_equal(fv, f(x + s*p))
if gv is not None:
assert_array_almost_equal(gv, fprime(x + s*p), decimal=14)
if s < smax:
c += 1
assert_line_wolfe(x, p, s, f, fprime, err_msg=name)
assert_(c > 3) # check that the iterator really works...
def test_line_search_wolfe2_bounds(self):
# See gh-7475
# For this f and p, starting at a point on axis 0, the strong Wolfe
# condition 2 is met if and only if the step length s satisfies
# |x + s| <= c2 * |x|
f = lambda x: np.dot(x, x)
fp = lambda x: 2 * x
p = np.array([1, 0])
# Smallest s satisfying strong Wolfe conditions for these arguments is 30
x = -60 * p
c2 = 0.5
s, _, _, _, _, _ = ls.line_search_wolfe2(f, fp, x, p, amax=30, c2=c2)
assert_line_wolfe(x, p, s, f, fp)
s, _, _, _, _, _ = assert_warns(LineSearchWarning,
ls.line_search_wolfe2, f, fp, x, p,
amax=29, c2=c2)
assert_(s is None)
# s=30 will only be tried on the 6th iteration, so this won't converge
assert_warns(LineSearchWarning, ls.line_search_wolfe2, f, fp, x, p,
c2=c2, maxiter=5)
def test_line_search_armijo(self):
c = 0
for name, f, fprime, x, p, old_f in self.line_iter():
f0 = f(x)
g0 = fprime(x)
self.fcount = 0
s, fc, fv = ls.line_search_armijo(f, x, p, g0, f0)
c += 1
assert_equal(self.fcount, fc)
assert_fp_equal(fv, f(x + s*p))
assert_line_armijo(x, p, s, f, err_msg=name)
assert_(c >= 9)
# -- More specific tests
def test_armijo_terminate_1(self):
# Armijo should evaluate the function only once if the trial step
# is already suitable
count = [0]
def phi(s):
count[0] += 1
return -s + 0.01*s**2
s, phi1 = ls.scalar_search_armijo(phi, phi(0), -1, alpha0=1)
assert_equal(s, 1)
assert_equal(count[0], 2)
assert_armijo(s, phi)
def test_wolfe_terminate(self):
# wolfe1 and wolfe2 should also evaluate the function only a few
# times if the trial step is already suitable
def phi(s):
count[0] += 1
return -s + 0.05*s**2
def derphi(s):
count[0] += 1
return -1 + 0.05*2*s
for func in [ls.scalar_search_wolfe1, ls.scalar_search_wolfe2]:
count = [0]
r = func(phi, derphi, phi(0), None, derphi(0))
assert_(r[0] is not None, (r, func))
assert_(count[0] <= 2 + 2, (count, func))
assert_wolfe(r[0], phi, derphi, err_msg=str(func))
| mbayon/TFG-MachineLearning | venv/lib/python3.6/site-packages/scipy/optimize/tests/test_linesearch.py | Python | mit | 9,849 |
# coding=utf-8
__author__ = "Gina Häußge <[email protected]>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
from flask import request, jsonify, make_response
from octoprint.settings import settings
from octoprint.printer import getConnectionOptions
from octoprint.server import printer, restricted_access, NO_CONTENT
from octoprint.server.api import api
import octoprint.util as util
@api.route("/connection", methods=["GET"])
def connectionState():
state, port, baudrate = printer.getCurrentConnection()
current = {
"state": state,
"port": port,
"baudrate": baudrate
}
return jsonify({"current": current, "options": getConnectionOptions()})
@api.route("/connection", methods=["POST"])
@restricted_access
def connectionCommand():
valid_commands = {
"connect": ["autoconnect"],
"disconnect": []
}
command, data, response = util.getJsonCommandFromRequest(request, valid_commands)
if response is not None:
return response
if command == "connect":
options = getConnectionOptions()
port = None
baudrate = None
if "port" in data.keys():
port = data["port"]
if port not in options["ports"]:
return make_response("Invalid port: %s" % port, 400)
if "baudrate" in data.keys():
baudrate = data["baudrate"]
if baudrate not in options["baudrates"]:
return make_response("Invalid baudrate: %d" % baudrate, 400)
if "save" in data.keys() and data["save"]:
settings().set(["serial", "port"], port)
settings().setInt(["serial", "baudrate"], baudrate)
if "autoconnect" in data.keys():
settings().setBoolean(["serial", "autoconnect"], data["autoconnect"])
settings().save()
printer.connect(port=port, baudrate=baudrate)
elif command == "disconnect":
printer.disconnect()
return NO_CONTENT
| koenkooi/OctoPrint | src/octoprint/server/api/connection.py | Python | agpl-3.0 | 1,794 |
from glob import glob
from sys import path
import numpy as np
from matplotlib import use as mpluse
mpluse('Agg')
import matplotlib.pyplot as plt
path.append("./../bin/")
from plothelp import plot_fit
from plothelp import read_logf
from filter import filter
import resx
I_EMFs = list()
T_MSs = list()
aliemfs = list()
altms = list()
ref_logs = glob("./../logs/mcal*11.10*.csv")
for i in range(0, len(ref_logs)):
print "Calculating...",
print " {}".format(ref_logs[i])
__, st, __, __, f_spd1, r_spd1, f_spd2, r_spd2, cra, crb, T, Vpz, Vms, gamma_dot, tau, tag = read_logf(ref_logs[i])
# mcal_[name]_[viscosity]_[date+time].csv
v_term = ref_logs[i].split('_')[2]
print "\tVisc. Term:", v_term
viscosity = 0.0
try:
viscosity = float(v_term) # if is any of the 'smart' options, this will not work
except:
try:
viscosity = resx.get_mu_of_T(v_term, T) # will not work if is mixture
except:
parts = v_term.split("@")
viscosity = resx.get_mu_of_T(parts[0], T, parts[1]) # will not work if is wrong
print "\tViscosity:", np.average(viscosity)
## filtering!
Vms = filter(st, Vms)
cra = filter(st, cra)
I_MS = resx.get_current(cra)
I_CO = resx.get_current_coil(Vms)
I_EMF = [0.0] * len(I_MS)
for j in range(0, len(I_MS)):
I_EMF[j] = I_MS[j] - I_CO[j]
aliemfs.extend(I_EMF)
I_EMFs.append(np.mean(I_EMF))
stress = viscosity * gamma_dot
torque = resx.get_torque(stress, 15)
#print "\tStrain: ", np.average(gamma_dot)
#print "\tStress: ", stress
#print "\tTorque: ", torque
#print "\tI emf : ", I_EMFs[-1]
altms.extend(torque)
T_MSs.append(np.mean(torque))
#print T_MSs
#print I_EMFs
fit, f_eqn, mot_cal = plot_fit(I_EMFs, T_MSs, 1, x_name="Iemf", y_name="T")
f = plt.figure()
ax = f.add_subplot(111)
ax.plot(aliemfs, altms, "x")
ax.plot(I_EMFs, T_MSs, "o")
ax.plot(I_EMFs, fit)
#plt.show()
plt.savefig("trd.png")
print "New fit:"
print "\tT = Iemf * {} + {}".format(mot_cal[0], mot_cal[1])
| cbosoft/pi_rheo_proj | etc/rd.py | Python | gpl-3.0 | 2,119 |
"""
Page objects for patternfly/bootstrap modal window.
Modal window is a window which makes itself the only active element on the
page, so that one needs to close it first to access the rest of the page again.
"""
# Copyright 2016 Red Hat
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from webstr.core import WebstrPage
from webstr.patternfly.modal import models as m_modal
class ModalWindow(WebstrPage):
"""
Base page object class for any modal window.
"""
_model = m_modal.ModalWindowModel
_required_elems = ['header', 'body', 'footer', 'title', 'close_btn']
def close(self):
"""
Close the modal windown via default close button in the deader
(a button labeled "X" in the top right corner).
"""
self._model.close_btn.click()
def get_title(self):
return self._model.title.text
| Webstr-framework/webstr | webstr/patternfly/modal/pages.py | Python | apache-2.0 | 1,363 |
# Copyright 2021 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import os
import unittest
import base64
from unittest import mock
from unittest.mock import MagicMock, patch
from os.path import dirname
from fedlearner_webconsole.sparkapp.schema import SparkAppInfo
from testing.common import BaseTestCase
from envs import Envs
BASE_DIR = Envs.BASE_DIR
class SparkAppApiTest(BaseTestCase):
def setUp(self):
super().setUp()
self._upload_path = os.path.join(BASE_DIR, 'test')
self._upload_path_patcher = patch(
'fedlearner_webconsole.sparkapp.service.UPLOAD_PATH',
self._upload_path)
self._upload_path_patcher.start()
def tearDown(self):
self._upload_path_patcher.stop()
super().tearDown()
@patch(
'fedlearner_webconsole.sparkapp.service.SparkAppService.submit_sparkapp'
)
def test_submit_sparkapp(self, mock_submit_sparkapp: MagicMock):
mock_submit_sparkapp.return_value = SparkAppInfo()
tarball_file_path = os.path.join(
BASE_DIR, 'test/fedlearner_webconsole/test_data/sparkapp.tar')
with open(tarball_file_path, 'rb') as f:
files_bin = f.read()
self.post_helper(
'/api/v2/sparkapps', {
'name': 'fl-transformer-yaml',
'files': base64.b64encode(files_bin).decode(),
'image_url': 'dockerhub.com',
'driver_config': {
'cores': 1,
'memory': '200m',
'core_limit': '4000m',
},
'executor_config': {
'cores': 1,
'memory': '200m',
'instances': 5,
},
'command': ['data.csv', 'data.rd'],
'main_application': '${prefix}/convertor.py'
}).json
mock_submit_sparkapp.assert_called_once()
_, kwargs = mock_submit_sparkapp.call_args
self.assertTrue(kwargs['config'].name, 'fl-transformer-yaml')
@patch(
'fedlearner_webconsole.sparkapp.service.SparkAppService.get_sparkapp_info'
)
def test_get_sparkapp_info(self, mock_get_sparkapp: MagicMock):
mock_get_sparkapp.return_value = SparkAppInfo()
self.get_helper('/api/v2/sparkapps/fl-transformer-yaml').json
mock_get_sparkapp.assert_called_once_with('fl-transformer-yaml')
@patch(
'fedlearner_webconsole.sparkapp.service.SparkAppService.delete_sparkapp'
)
def test_delete_sparkapp(self, mock_delete_sparkapp: MagicMock):
mock_delete_sparkapp.return_value = SparkAppInfo()
resp = self.delete_helper('/api/v2/sparkapps/fl-transformer-yaml').json
mock_delete_sparkapp.assert_called_once_with('fl-transformer-yaml')
if __name__ == '__main__':
unittest.main()
| bytedance/fedlearner | web_console_v2/api/test/fedlearner_webconsole/sparkapp/apis_test.py | Python | apache-2.0 | 3,416 |
from quipp import *
def run():
num_components = 2
point_dim = 5
ComponentsType = Vector(num_components, Double)
PointType = Vector(point_dim, Double)
get_point = rand_function(ComponentsType, PointType)
def sample():
components = [normal(0, 1) for i in range(num_components)]
return (components, get_point(components))
return sample
run_factor_analysis_example(run)
| jessica-taylor/quipp2 | src/python/examples/factor_analysis.py | Python | mit | 394 |
from flask_tryton import Tryton
import flask_restful as restful
from flask_login import LoginManager, login_required
from .utils import output_xml
#### Extensions
tryton = Tryton()
login_manager = LoginManager()
# Handle different outputs
class Api(restful.Api):
def __init__(self, *args, **kwargs):
# Set application/xml as default content-type
media = kwargs.pop('default_mediatype', 'application/xml')
super(Api, self).__init__(*args, default_mediatype=media, **kwargs)
self.representations = {
'text/xml': output_xml,
'application/xml': output_xml,
'application/xml+fhir': output_xml
}
# Authentication on every resource
# Import this for authenticated routes
class Resource(restful.Resource):
method_decorators = [login_required]
#### /Extensions
__all__=['Resource', 'Api', 'tryton', 'login_manager']
| teffalump/fhir | common/extensions.py | Python | gpl-3.0 | 897 |
"""Implementation of packaging-related magic functions.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2018 The IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
import re
import shlex
import sys
from IPython.core.magic import Magics, magics_class, line_magic
def _is_conda_environment():
"""Return True if the current Python executable is in a conda env"""
# TODO: does this need to change on windows?
conda_history = os.path.join(sys.prefix, 'conda-meta', 'history')
return os.path.exists(conda_history)
def _get_conda_executable():
"""Find the path to the conda executable"""
# Check if there is a conda executable in the same directory as the Python executable.
# This is the case within conda's root environment.
conda = os.path.join(os.path.dirname(sys.executable), 'conda')
if os.path.isfile(conda):
return conda
# Otherwise, attempt to extract the executable from conda history.
# This applies in any conda environment.
R = re.compile(r"^#\s*cmd:\s*(?P<command>.*conda)\s[create|install]")
with open(os.path.join(sys.prefix, 'conda-meta', 'history')) as f:
for line in f:
match = R.match(line)
if match:
return match.groupdict()['command']
# Fallback: assume conda is available on the system path.
return "conda"
CONDA_COMMANDS_REQUIRING_PREFIX = {
'install', 'list', 'remove', 'uninstall', 'update', 'upgrade',
}
CONDA_COMMANDS_REQUIRING_YES = {
'install', 'remove', 'uninstall', 'update', 'upgrade',
}
CONDA_ENV_FLAGS = {'-p', '--prefix', '-n', '--name'}
CONDA_YES_FLAGS = {'-y', '--y'}
@magics_class
class PackagingMagics(Magics):
"""Magics related to packaging & installation"""
@line_magic
def pip(self, line):
"""Run the pip package manager within the current kernel.
Usage:
%pip install [pkgs]
"""
self.shell.system(' '.join([sys.executable, '-m', 'pip', line]))
print("Note: you may need to restart the kernel to use updated packages.")
@line_magic
def conda(self, line):
"""Run the conda package manager within the current kernel.
Usage:
%conda install [pkgs]
"""
if not _is_conda_environment():
raise ValueError("The python kernel does not appear to be a conda environment. "
"Please use ``%pip install`` instead.")
conda = _get_conda_executable()
args = shlex.split(line)
command = args[0]
args = args[1:]
extra_args = []
# When the subprocess does not allow us to respond "yes" during the installation,
# we need to insert --yes in the argument list for some commands
stdin_disabled = getattr(self.shell, 'kernel', None) is not None
needs_yes = command in CONDA_COMMANDS_REQUIRING_YES
has_yes = set(args).intersection(CONDA_YES_FLAGS)
if stdin_disabled and needs_yes and not has_yes:
extra_args.append("--yes")
# Add --prefix to point conda installation to the current environment
needs_prefix = command in CONDA_COMMANDS_REQUIRING_PREFIX
has_prefix = set(args).intersection(CONDA_ENV_FLAGS)
if needs_prefix and not has_prefix:
extra_args.extend(["--prefix", sys.prefix])
self.shell.system(' '.join([conda, command] + extra_args + args))
print("\nNote: you may need to restart the kernel to use updated packages.")
| sserrot/champion_relationships | venv/Lib/site-packages/IPython/core/magics/packaging.py | Python | mit | 3,775 |
# Copyright 2018-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
import argparse
def parse_file_name():
parser = argparse.ArgumentParser(description="Outputs given file.")
parser.add_argument("file_name", type=str, help="Name of file to output")
return parser.parse_args().file_name
def output_file(file_name):
with open(file_name, mode="r") as f:
print(f.read())
if __name__ == "__main__":
output_file(parse_file_name())
| facebook/buck | test/com/facebook/buck/testutil/endtoend/testdata/mobile/ios/plist/py_cat.py | Python | apache-2.0 | 578 |
[('GROUP', 10),
('UNITED STATES SENATOR', 8),
("''", 4),
# media outlet
('NEWS ORGANIZATION', 5),
('NEWSPAPER', 4),
('MAGAZINE', 3),
('TELEVISION SHOW', 2),
('NEWS WEBSITE', 1),
# media person
('BLOGGER, THE WASHINGTON POST', 1),
('ANCHOR, FOX NEWS', 1),
('FOX NEWS ANCHOR', 1),
("CONTRIBUTOR, 'THE VIEW'", 1),
("CONTRIBUTOR, 'MORNING EDITION'", 1),
('OPINION WRITER, THE WASHINGTON POST', 1),
('JOURNALIST, PBS', 1),
('JOURNALIST, NBC NEWS', 1),
('JOURNALIST, BLOOMBERG', 1),
('GLOBAL ANCHOR, YAHOO NEWS', 1),
('PUBLISHER, NEW HAMPSHIRE UNION LEADER', 1),
('REPORTER, THE NEW YORK TIMES', 3),
('COLUMNIST, THE NEW YORK TIMES', 3),
('COLUMNIST', 3),
('JOURNALIST, THE NEW YORK TIMES', 2),
('JOURNALIST', 2),
('WHITE HOUSE CORRESPONDENT, CBS', 1),
('WALL STREET EXECUTIVE, NEW YORK TIMES CONTRIBUTING WRITER', 1),
('TELEVISION PERSONALITY', 1),
('TELEVISION HOST, MSNBC', 1),
('TELEVISION HOST', 1),
('STAFF WRITER, FORBES', 1),
('REPORTER, THE ASSOCIATED PRESS', 1),
('REPORTER, FOX NEWS', 1),
('REPORTER, CBS NEWS', 1),
('POLITICO REPORTER', 1),
('EDITOR-IN-CHIEF, ROLL CALL', 1),
('EDITOR, VANITY FAIR', 1),
('EDITOR, THE WEEKLY STANDARD', 1),
('EDITOR, NATIONAL REVIEW', 1),
('EDITOR, FOX NEWS CHANNEL', 1),
('COLUMNIST, THE WASHINGTON POST', 1),
('COLUMNIST AND FOX NEWS CONTRIBUTOR', 1),
("CO-HOST, 'TODAY'", 1),
("CO-HOST, 'MORNING JOE'", 1),
("CO-ANCHOR, 'NEW DAY'", 1),
('CNN CONTRIBUTOR', 1),
('CNN ANCHOR', 1),
('CHIEF WASHINGTON CORRESPONDENT, CNBC', 1),
('CHIEF NATIONAL CORRESPONDENT, YAHOO NEWS', 1),
('FOUNDER, THE HUFFINGTON POST', 1),
("HOST, 'MORNING JOE'", 1),
("FORMER CO-HOST, 'THE VIEW'", 1),
("MODERATOR, 'MEET THE PRESS'", 1),
('CORRESPONDENT, NBC NEWS', 1),
# media/pundit/commentator
('CONSERVATIVE COMMENTATOR', 1),
('POLITICAL CORRESPONDENT, MSNBC', 1),
('POLITICAL COMMENTATOR', 1),
('POLITICAL ANALYST, CNN', 1),
('POLITICAL ANALYST', 1),
# political organization
('POLITICAL PARTY', 3),
('FORMER PRESIDENT OF THE UNITED STATES', 3),
('POLITICAL CONSULTANT', 2),
('POLITICAL ANALYST, FOX NEWS', 2),
('CNN NEWS PROGRAM', 2),
# political: governor
('SOUTH CAROLINA GOVERNOR', 1),
('OHIO GOVERNOR', 1),
# political: GOP rival
('FORMER NEW YORK GOVERNOR', 1),
('NEW JERSEY GOVERNOR', 1),
('WISCONSIN GOVERNOR', 1),
('FORMER LOUISIANA GOVERNOR', 1),
('FORMER FLORIDA GOVERNOR', 1),
('FLORIDA GOVERNOR', 1),
('RETIRED NEUROSURGEON', 1),
('FORMER TEXAS GOVERNOR', 1),
# political: GOP misc
('FORMER NEW HAMPSHIRE GOVERNOR', 1),
('SUPREME COURT CHIEF JUSTICE', 1),
('FORMER PENNSYLVANIA GOVERNOR', 1),
# campaign/staffer
('THE PRESIDENTIAL CAMPAIGN OF TED CRUZ', 1),
('THE PRESIDENTIAL CAMPAIGN OF JEB BUSH', 1),
('STAFFER FOR JOHN KASICH', 1),
('EMPLOYEE FOR JEB BUSH', 1),
('JEB BUSH, SUPPORTERS OF', 1),
# foreign entity
('TERRORIST GROUP', 1),
('INTERNATIONAL ALLIANCE', 1),
# political organization
('REPUBLICAN POLITICAL CONSULTANT', 1),
# political: Democratic rival
('DEMOCRATIC CANDIDATE, FORMER GOVERNOR OF MARYLAND', 1),
('FORMER RHODE ISLAND GOVERNOR', 1),
# political: other Democratic
('MARYLAND SENATOR', 1),
('MAYOR OF SAN JOSE, CALIF.', 1),
('MAYOR OF NEW YORK CITY', 1),
('FORMER MAYOR OF PHILADELPHIA', 1),
("PROTESTERS OF MR. TRUMP'S RALLIES", 1),
# foreign leader
('PRINCE, SAUDI ARABIA', 1),
('GERMAN CHANCELLOR', 1),
# business leader
('FORMER BUSINESS EXECUTIVE', 1),
('OWNER, THE NEW YORK JETS', 1),
('OWNER, THE NEW YORK DAILY NEWS', 1),
('HEDGE FUND MANAGER', 1),
('ENTREPRENEUR', 1),
('PRESIDENT OF THE UNITED STATES', 1),
('PRESIDENT AND CHIEF EXECUTIVE, THE FAMILY LEADER', 1),
('POLITICAL FUND-RAISING COMMITTEES', 1),
('PERFORMER', 1),
('MUSICIAN', 1),
('MOSTLY REPUBLICAN POLITICIANS', 1),
('MIXED MARTIAL ARTIST', 1),
('MISS UNIVERSE, 2014', 1),
('LAWYER', 1),
('FORMER WHITE HOUSE PRESS SECRETARY', 1),
("FORMER TRUMP EXECUTIVE AND AUTHOR OF 'ALL ALONE ON THE 68TH FLOOR']", 1),
('FORMER SECRETARY OF STATE', 1),
('FORMER POLITICAL ADVISER TO BILL CLINTON', 1),
('FORMER MASSACHUSETTS GOVERNOR', 1),
('FORMER DEPUTY WHITE HOUSE CHIEF OF STAFF', 1),
('EVANGELICAL LEADER', 1),
('DISTRICT JUDGE OF THE UNITED STATES DISTRICT COURT FOR THE SOUTHERN DISTRICT OF CALIFORNIA', 1),
('DEPUTY EDITOR, WALL STREET JOURNAL EDITORIAL PAGE', 1),
('CONSERVATIVE DONOR, BILLIONAIRE, PHILANTHROPIST', 1),
("COMEDIAN, HOST, 'LAST WEEK TONIGHT'", 1),
('CHIEF EXECUTIVE, T-MOBILE', 1),
('BOSTON MAYOR', 1),
("AUTHOR, 'LOST TYCOON: THE MANY LIVES OF DONALD J. TRUMP'", 1),
('ANTITAX POLITICAL GROUP', 1),
('ACTRESS AND TELEVISION PERSONALITY', 1),
('ACTOR', 1),
('', 1)]
| philpot/trump-insult-haiku | nytorg.py | Python | apache-2.0 | 4,644 |
from .ga import GeneticAlgorithm
| Ewande/kheppy | kheppy/evocom/ga/__init__.py | Python | mit | 33 |
import climate
import numpy as np
def main(dataset, output, codebook):
np.save(output, np.random.randn(int(codebook, 10), np.load(dataset).shape[1]))
if __name__ == '__main__':
climate.call(main)
| lmjohns3/movement-classify-experiment | scripts/train-random-codebook.py | Python | mit | 208 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from .. import models
class EdgeNodesOperations(object):
"""EdgeNodesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: Version of the API to be used with the client request. Current version is 2017-04-02. Constant value: "2017-04-02".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-04-02"
self.config = config
def list(
self, custom_headers=None, raw=False, **operation_config):
"""Edgenodes are the global Point of Presence (POP) locations used to
deliver CDN content to end users.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of EdgeNode
:rtype:
~azure.mgmt.cdn.models.EdgeNodePaged[~azure.mgmt.cdn.models.EdgeNode]
:raises:
:class:`ErrorResponseException<azure.mgmt.cdn.models.ErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.EdgeNodePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.EdgeNodePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list.metadata = {'url': '/providers/Microsoft.Cdn/edgenodes'}
| lmazuel/azure-sdk-for-python | azure-mgmt-cdn/azure/mgmt/cdn/operations/edge_nodes_operations.py | Python | mit | 3,846 |
#!/usr/bin/python
import argparse
import os
import signal
import sys
import numpy as np
from time import strftime
from rtlsdr import RtlSdr
import pysdrscan
def terminate(signal, frame):
print "\nCaught SIGINT. Salvaging data."
header['enddate'] = strftime("%Y-%m-%dT%H:%M:%S")
save_fits(header, data, argv.overwrite)
sys.exit(0)
def parse_arguments():
parser = argparse.ArgumentParser(prog=pysdrscan.__title__,
version=pysdrscan.__version__,
description="Use librtl-supported SDR devices to scan wide spectrum data over time")
parser.add_argument("startfreq",
help="Starting center frequency in MHz", type=float)
parser.add_argument("endfreq",
help="Ending center frequency in MHz", type=float)
parser.add_argument("-fs", "--sample_rate",
help="SDR sampling rate in Hz (negative for default)",
default=-1,
type=int)
parser.add_argument("-p", "--passes",
help="Number of full spectrum passes",
default=1,
type=int)
parser.add_argument("-g", "--gain",
help="SDR device gain (negative for auto)",
default=-1,
type=int)
parser.add_argument("-t", "--time-per-segment",
help="Time in seconds to average per segment",
default=0,
type=int)
parser.add_argument("-fft", "--fft-size",
help="FFT size per segment",
default=256,
type=int)
parser.add_argument("-w", "--window",
help="Window function for FFT",
choices=['rectangular', 'hanning', 'hamming',
'bartlett', 'blackman', 'kaiser'],
default='hanning',
type=str)
parser.add_argument("-o", "--output-file",
help="Output file name for FITS data (defaults to start date and time)",
default=strftime("%Y-%m-%dT%H:%M:%S.fits"),
type=str)
parser.add_argument("--overwrite",
help="Automatically overwrite any existing output files",
action='store_true',
default=False)
parser.add_argument("--silence",
help="Do not report the status of the scanning phase",
choices=['none', 'segments', 'passes', 'all'],
default='none')
return parser.parse_args()
argv = parse_arguments()
# Create new generic header
header = {
'version': pysdrscan.__version__,
'startdate': strftime("%Y-%m-%dT%H:%M:%S"),
'startfreq': argv.startfreq*1e6,
'endfreq': argv.endfreq*1e6,
'passes': argv.passes
}
# Perform sanity check on the arguments
if header['endfreq'] < header['startfreq']:
print("Ending frequency must be greater than or equal to starting frequency.")
sys.exit()
if header['passes'] < 1:
print("Number of passes cannot be less than one.")
sys.exit()
print("\nStarting center frequency: %f MHz" % (header['startfreq']/1e6))
print("Ending center frequency: %f MHz" % (header['endfreq']/1e6))
print("Number of passes: %d\n" % header['passes'])
print("Will write to output file '%s'" % argv.output_file)
if argv.window == 'rectangular':
windowfunc = np.ones
header['winfunc'] = 'rectangular'
else:
windowfunc = getattr(np, argv.window)
header['winfunc'] = argv.window
print("Will use a %s window" % header['winfunc'])
header['fftsize'] = argv.fft_size
print("Using an FFT size of %f" % header['fftsize'])
# Initialize SDR
sdr = RtlSdr()
if argv.gain < 0:
sdr.set_gain('auto')
header['gain'] = 'auto'
print("Using an automatic gain")
else:
sdr.set_gain(argv.gain)
header['gain'] = sdr.get_gain()
print("Using a gain of %f" % sdr.get_gain())
if argv.sample_rate > 0:
sdr.set_sample_rate(argv.sample_rate)
header['bandwidth'] = sdr.sample_rate
sdr.set_center_freq(header['startfreq'])
print("Device bandwidth: %f MHz" % ((sdr.sample_rate)/1e6))
num_segments = int(np.ceil((header['endfreq'] - header['startfreq'])/(sdr.get_sample_rate())))
print("Sampling %d segments per %d passes\n" % (num_segments, header['passes']))
data = np.zeros(shape=(header['passes'], num_segments, argv.fft_size), dtype=np.float64)
signal.signal(signal.SIGINT, terminate)
print("Press Ctrl+C to cancel scanning and save")
# A 'primer' pass may need to be done
for i in range(0, 8):
sdr.read_samples(argv.fft_size)
segments = pysdrscan.util.secs_to_segments(argv.time_per_segment,
argv.fft_size,
sdr.sample_rate)
header['segavg'] = segments
for i in range(0, header['passes']):
freq = header['startfreq']
if argv.silence != 'passes' and argv.silence != 'all':
print("\nBeginning pass %d of %d\n" % (i+1, header['passes']))
for j in range(0, num_segments):
sdr.set_center_freq(freq)
if argv.silence != 'segments' and argv.silence != 'all':
print("Scanning segment %d of %d at %f MHz..." %
(j+1, num_segments, (freq/1e6)))
samples = sdr.read_samples(argv.fft_size)
samples = windowfunc(len(samples)) * samples
spectrum = np.fft.fft(samples)
spectrum = np.fft.fftshift(spectrum)
power = np.abs(spectrum)**2.0
data[i][j] = power
for k in range(1, segments):
samples = sdr.read_samples(argv.fft_size)
samples = windowfunc(len(samples)) * samples
spectrum = np.fft.fft(samples)
spectrum = np.fft.fftshift(spectrum)
power = np.abs(spectrum)**2.0
data[i][j] = (data[i][j] + power)/2.0
freq += sdr.get_sample_rate()
header['enddate'] = strftime("%Y-%m-%dT%H:%M:%S")
print("Writing to file '%s'..." % argv.output_file)
pysdrscan.io.save_fits(argv.output_file, header, data, argv.overwrite)
print("File '%s' written successfully" % argv.output_file)
| jordanhalase/pysdrscan | src/pysdrscan_cli/__main__.py | Python | isc | 5,857 |
# coding=utf-8
from __future__ import absolute_import
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2015 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
import mock
import os
import ddt
import sys
import octoprint.util
class BomAwareOpenTest(unittest.TestCase):
"""
Tests for :func:`octoprint.util.bom_aware_open`.
"""
def setUp(self):
self.filename_utf8_with_bom = os.path.join(os.path.abspath(os.path.dirname(__file__)), "_files", "utf8_with_bom.txt")
self.filename_utf8_without_bom = os.path.join(os.path.abspath(os.path.dirname(__file__)), "_files", "utf8_without_bom.txt")
def test_bom_aware_open_with_bom(self):
"""Tests that the contents of a UTF8 file with BOM are loaded correctly (without the BOM)."""
# test
with octoprint.util.bom_aware_open(self.filename_utf8_with_bom, encoding="utf-8") as f:
contents = f.readlines()
# assert
self.assertEquals(len(contents), 3)
self.assertTrue(contents[0].startswith("#"))
def test_bom_aware_open_without_bom(self):
"""Tests that the contents of a UTF8 file without BOM are loaded correctly."""
# test
with octoprint.util.bom_aware_open(self.filename_utf8_without_bom, encoding="utf-8") as f:
contents = f.readlines()
# assert
self.assertEquals(len(contents), 3)
self.assertTrue(contents[0].startswith("#"))
def test_bom_aware_open_ascii(self):
"""Tests that the contents of a UTF8 file loaded as ASCII are replaced correctly if "replace" is specified on errors."""
# test
with octoprint.util.bom_aware_open(self.filename_utf8_with_bom, errors="replace") as f:
contents = f.readlines()
# assert
self.assertEquals(len(contents), 3)
self.assertTrue(contents[0].startswith(u"\ufffd" * 3 + "#"))
self.assertTrue(contents[2].endswith(u"\ufffd\ufffd" * 6))
def test_bom_aware_open_encoding_error(self):
"""Tests that an encoding error is thrown if not suppressed when opening a UTF8 file as ASCII."""
try:
with octoprint.util.bom_aware_open(self.filename_utf8_without_bom) as f:
f.readlines()
self.fail("Expected an exception")
except UnicodeDecodeError:
pass
def test_bom_aware_open_parameters(self):
"""Tests that the parameters are propagated properly."""
with mock.patch("codecs.open") as mock_open:
with octoprint.util.bom_aware_open(self.filename_utf8_without_bom, mode="rb", encoding="utf-8", errors="ignore") as f:
f.readlines()
mock_open.assert_called_once_with(self.filename_utf8_without_bom, encoding="utf-8", mode="rb", errors="ignore")
class TestAtomicWrite(unittest.TestCase):
"""
Tests for :func:`octoprint.util.atomic_write`.
"""
def setUp(self):
pass
@mock.patch("shutil.move")
@mock.patch("tempfile.NamedTemporaryFile")
def test_atomic_write(self, mock_tempfile, mock_move):
"""Tests the regular basic "good" case."""
# setup
mock_file = mock.MagicMock()
mock_file.name = "tempfile.tmp"
mock_tempfile.return_value = mock_file
# test
with octoprint.util.atomic_write("somefile.yaml") as f:
f.write("test")
# assert
mock_tempfile.assert_called_once_with(mode="w+b", prefix="tmp", suffix="", delete=False)
mock_file.write.assert_called_once_with("test")
mock_file.close.assert_called_once_with()
mock_move.assert_called_once_with("tempfile.tmp", "somefile.yaml")
@mock.patch("shutil.move")
@mock.patch("tempfile.NamedTemporaryFile")
def test_atomic_write_error_on_write(self, mock_tempfile, mock_move):
"""Tests the error case where something in the wrapped code fails."""
# setup
mock_file = mock.MagicMock()
mock_file.name = "tempfile.tmp"
mock_file.write.side_effect = RuntimeError()
mock_tempfile.return_value = mock_file
# test
try:
with octoprint.util.atomic_write("somefile.yaml") as f:
f.write("test")
self.fail("Expected an exception")
except RuntimeError:
pass
# assert
mock_tempfile.assert_called_once_with(mode="w+b", prefix="tmp", suffix="", delete=False)
mock_file.close.assert_called_once_with()
self.assertFalse(mock_move.called)
@mock.patch("shutil.move")
@mock.patch("tempfile.NamedTemporaryFile")
def test_atomic_write_error_on_move(self, mock_tempfile, mock_move):
"""Tests the error case where the final move fails."""
# setup
mock_file = mock.MagicMock()
mock_file.name = "tempfile.tmp"
mock_tempfile.return_value = mock_file
mock_move.side_effect = RuntimeError()
# test
try:
with octoprint.util.atomic_write("somefile.yaml") as f:
f.write("test")
self.fail("Expected an exception")
except RuntimeError:
pass
# assert
mock_tempfile.assert_called_once_with(mode="w+b", prefix="tmp", suffix="", delete=False)
mock_file.close.assert_called_once_with()
self.assertTrue(mock_move.called)
@mock.patch("shutil.move")
@mock.patch("tempfile.NamedTemporaryFile")
def test_atomic_write_parameters(self, mock_tempfile, mock_move):
"""Tests that the open parameters are propagated properly."""
# setup
mock_file = mock.MagicMock()
mock_file.name = "tempfile.tmp"
mock_tempfile.return_value = mock_file
# test
with octoprint.util.atomic_write("somefile.yaml", mode="w", prefix="foo", suffix="bar") as f:
f.write("test")
# assert
mock_tempfile.assert_called_once_with(mode="w", prefix="foo", suffix="bar", delete=False)
mock_file.close.assert_called_once_with()
mock_move.assert_called_once_with("tempfile.tmp", "somefile.yaml")
@ddt.ddt
class IsHiddenPathTest(unittest.TestCase):
def setUp(self):
import tempfile
self.basepath = tempfile.mkdtemp()
self.path_always_visible = os.path.join(self.basepath, "always_visible.txt")
self.path_hidden_on_windows = os.path.join(self.basepath, "hidden_on_windows.txt")
self.path_always_hidden = os.path.join(self.basepath, ".always_hidden.txt")
for attr in ("path_always_visible", "path_hidden_on_windows", "path_always_hidden"):
path = getattr(self, attr)
with open(path, "w+b") as f:
f.write(attr)
import sys
if sys.platform == "win32":
# we use ctypes and the windows API to set the hidden attribute on the file
# only hidden on windows
import ctypes
ctypes.windll.kernel32.SetFileAttributesW(unicode(self.path_hidden_on_windows), 2)
def tearDown(self):
import shutil
shutil.rmtree(self.basepath)
@ddt.data(
(None, False),
("path_always_visible", False),
("path_always_hidden", True),
("path_hidden_on_windows", sys.platform == "win32")
)
@ddt.unpack
def test_is_hidden_path(self, path_id, expected):
path = getattr(self, path_id) if path_id is not None else None
self.assertEqual(octoprint.util.is_hidden_path(path), expected)
| masterhou/OctoPrint | tests/util/test_file_helpers.py | Python | agpl-3.0 | 6,690 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Programm name : IDDB.py
Date : 2012-10-25
Author : Steffen Eichhorn [[email protected]]
License : GPL
Description : abstrahiert den Zugriff auf Daten aus der SQL-Datenbank vom ID Scorer
"""
import os.path
import pyodbc
import datetime
import csv
class PsychPV(object):
""""PsychPV Object"""
def __init__(self):
self.StatusStr = ''
self.Date = ''
self.Finished = False
def __str__(self):
return " - ".join((str(self.Date), self.StatusStr))
class PsychStatus(object):
"""PsychStatus Object"""
def __init__(self):
self.Date = ''
self.Status = ''
self.StatusStr = ''
self.IntensiveCare = False
self.IntensiveCareReasons = []
self.IntegratedCare = False
self.ParentsSettingCare = False
self.Hardship = False
self.HardshipReasons = []
self.NoStation = False
self.HalfDay = False
self.QE = False
self.Finished = False
def __str__(self):
return " - ".join((str(self.Date), self.StatusStr))
class BI_Score(object):
"""Behandlungsintensität Object"""
def __init__(self):
self.Date = ''
self.Finished = False
self.SomaScore = 0
self.PsyScore = 0
self.SocialScore = 0
self.TotalScore = 0
def __str__(self):
return " - ".join((str(self.Date), str(self.TotalScore)))
class Case(object):
def __init__(self, connection_str, soarian_nr):
self.PsychPV = ''
self.__connection_str = connection_str
self.setCaseNr(soarian_nr)
def setCaseNr(self, soarian_nr):
"""
initiert einen neuen Fall zur Abfrage
"""
SQLConn = pyodbc.connect(self.__connection_str)
SQLCursor = SQLConn.cursor()
self.__Soarian_Nr = soarian_nr
self.__CaseID = ''
self.__CaseNodeID = ''
self.__CaseEHRID = ''
self.__CaseNodeChildID = ''
## get CaseID
sqlquery = """
select ID from id_scorer.dbo.CASES
where CID=?
"""
SQLCursor.execute(sqlquery, self.__Soarian_Nr)
try:
self.__CaseID = SQLCursor.fetchone().ID
except:
pass
## get CaseNodeID by CaseID
if self.__CaseID != '':
sqlquery = """
select nodes_ID from id_scorer.dbo.CASES_NODES
where CASES_ID=?
"""
SQLCursor.execute(sqlquery, self.__CaseID)
try:
self.__CaseNodeID = SQLCursor.fetchone().nodes_ID
except:
pass
if self.__CaseNodeID != '':
## get CaseEHRID by CaseNodeID
sqlquery = """
select ehrid from id_scorer.dbo.NODES
where ID=?
"""
SQLCursor.execute(sqlquery, self.__CaseNodeID)
try:
self.__CaseEHRID = SQLCursor.fetchone().ehrid
except:
pass
## get CaseNodeChildID by CaseNodeID
sqlquery = """
select ID from id_scorer.dbo.NODES
where PARENTID=?
"""
SQLCursor.execute(sqlquery, self.__CaseNodeID)
try:
self.__CaseNodeChildID = SQLCursor.fetchone().ID
except:
pass
SQLConn.close()
def getPsychPV(self):
"""
Liefert eine Liste der PsychPV Einträge zurück,
die nach Datum sortiert ist
"""
PsychPVList = []
if self.__CaseNodeChildID != '':
# init SQL connections and cursors
NodesSQLConn = pyodbc.connect(self.__connection_str)
NodesSQLCursor = NodesSQLConn.cursor()
NodeChildsSQLConn = pyodbc.connect(self.__connection_str)
NodeChildsSQLCursor = NodeChildsSQLConn.cursor()
PropertiesSQLConn = pyodbc.connect(self.__connection_str)
PropertiesSQLCursor = PropertiesSQLConn.cursor()
# fetch nodes
sqlquery = """
select * from id_scorer.dbo.NODES
where NODETYPEID='3' and PARENTID=?
"""
for node in NodesSQLCursor.execute(sqlquery, self.__CaseNodeChildID):
newPsychPV = PsychPV()
sqlquery = """
select * from id_scorer.dbo.PROPERTIES
where NodeID=?
"""
for property in PropertiesSQLCursor.execute(sqlquery, node.ID):
if property.PROPERTYNAME == 'Finished':
if property.PROPERTYVALUE == 'true':
newPsychPV.Finished = True
if property.PROPERTYNAME == 'Date':
newPsychPV.Date = datetime.datetime.strptime(
property.PROPERTYVALUE.split('T')[0],
"%Y-%m-%d").date()
sqlquery = """
select * from id_scorer.dbo.NODES
where ParentID=?
and NODETYPEID='7'
"""
for ChildNode in NodeChildsSQLCursor.execute(sqlquery, node.ID):
sqlquery = """
select * from id_scorer.dbo.PROPERTIES
where NodeID=?
"""
for ChildNodeProperty in PropertiesSQLCursor.execute(sqlquery, ChildNode.ID):
if ChildNodeProperty.PROPERTYNAME == 'Value':
newPsychPV.StatusStr = ChildNodeProperty.PROPERTYVALUE
PsychPVList.append(newPsychPV)
del newPsychPV
# close SQL connections and cursors
NodesSQLCursor.close()
NodesSQLConn.close()
NodeChildsSQLCursor.close()
NodeChildsSQLConn.close()
PropertiesSQLCursor.close()
PropertiesSQLConn.close()
PsychPVList.sort(key = lambda x: x.Date)
return PsychPVList
def getLastPsychPVCode(self):
"""
Liefert letzten PsychPVCode als string zurück
"""
returnCode = ''
PsychPVList = self.getPsychPV()
if len(PsychPVList) > 0:
returnCode = PsychPVList[-1].StatusStr
return returnCode
def getPsychStatus(self):
"""
Liefert eine Liste aller PsychStatus Einträge zurück,
die nach Datum sortiert ist
"""
PsychStatusList = []
if self.__CaseNodeChildID != '':
# init SQL connections and cursors
NodesSQLConn = pyodbc.connect(self.__connection_str)
NodesSQLCursor = NodesSQLConn.cursor()
NodeChildsSQLConn = pyodbc.connect(self.__connection_str)
NodeChildsSQLCursor = NodeChildsSQLConn.cursor()
PropertiesSQLConn = pyodbc.connect(self.__connection_str)
PropertiesSQLCursor = PropertiesSQLConn.cursor()
## fetch all Status Nodes
sqlquery = """
select ID from id_scorer.dbo.NODES
where NODETYPEID='8'
and PARENTID=?
"""
for node in NodesSQLCursor.execute(sqlquery, self.__CaseNodeChildID):
newPsychStatus = PsychStatus()
sqlquery = """
select * from id_scorer.dbo.PROPERTIES
where NODEID=?
"""
for row in PropertiesSQLCursor.execute(sqlquery, node.ID):
if row.PROPERTYNAME == 'Date':
newPsychStatus.Date = datetime.datetime.strptime(
row.PROPERTYVALUE.split('T')[0],
"%Y-%m-%d").date()
if row.PROPERTYNAME =='Finished':
if row.PROPERTYVALUE == 'false':
newPsychStatus.Finished = False
elif row.PROPERTYVALUE == 'true':
newPsychStatus.Finished = True
## get Child Nodes and Data
sqlquery = """
select * from id_scorer.dbo.NODES
where PARENTID=?
"""
IntensiveCareNode = ''
Hardship = ''
for node_row in NodeChildsSQLCursor.execute(sqlquery, node.ID):
sqlquery = """
select * from id_scorer.dbo.PROPERTIES
where NODEID=?
"""
for nodeprop in PropertiesSQLCursor.execute(sqlquery, node_row.ID):
if nodeprop.PROPERTYNAME == 'IntensiveCare':
if nodeprop.PROPERTYVALUE == 'true':
newPsychStatus.IntensiveCare = True
IntensiveCareNode = node_row.ID
if nodeprop.PROPERTYNAME == 'Integrated':
if nodeprop.PROPERTYVALUE == 'true':
newPsychStatus.IntegratedCare = True
if nodeprop.PROPERTYNAME == 'ParentsSetting':
if nodeprop.PROPERTYVALUE == 'true':
newPsychStatus.ParentsSettingCare = True
if nodeprop.PROPERTYNAME == 'NoStation':
if nodeprop.PROPERTYVALUE == 'true':
newPsychStatus.NoStation = True
if nodeprop.PROPERTYNAME == 'HalfDay':
if nodeprop.PROPERTYVALUE == 'true':
newPsychStatus.HalfDay = True
if nodeprop.PROPERTYNAME == 'ExtraWithdrawlTreatment':
if nodeprop.PROPERTYVALUE == 'true':
newPsychStatus.QE = True
if nodeprop.PROPERTYNAME == 'Hardship':
if nodeprop.PROPERTYVALUE == 'true':
newPsychStatus.Hardship = True
if nodeprop.PROPERTYNAME == 'Hardship1':
if nodeprop.PROPERTYVALUE == 'true':
newPsychStatus.HardshipReasons.append('1')
if nodeprop.PROPERTYNAME == 'Hardship2':
if nodeprop.PROPERTYVALUE == 'true':
newPsychStatus.HardshipReasons.append('2')
if nodeprop.PROPERTYNAME == 'Hardship3':
if nodeprop.PROPERTYVALUE == 'true':
newPsychStatus.HardshipReasons.append('3')
if nodeprop.PROPERTYNAME == 'Hardship4':
if nodeprop.PROPERTYVALUE == 'true':
newPsychStatus.HardshipReasons.append('4')
if nodeprop.PROPERTYNAME == 'Value':
newPsychStatus.Status = nodeprop.PROPERTYVALUE
if IntensiveCareNode != '':
sqlquery = """
select * from id_scorer.dbo.NODES
where PARENTID=?
"""
for childnode in NodeChildsSQLCursor.execute(sqlquery, IntensiveCareNode):
sqlquery = """
select * from id_scorer.dbo.PROPERTIES
where NODEID=?
"""
for childnodeproperty in PropertiesSQLCursor.execute(sqlquery, childnode.ID):
if childnodeproperty.PROPERTYNAME[:6] == 'Reason':
if childnodeproperty.PROPERTYVALUE != '':
newPsychStatus.IntensiveCareReasons.append(childnodeproperty.PROPERTYNAME[-1:])
if newPsychStatus.IntensiveCare:
newPsychStatus.StatusStr = "I" + str(len(newPsychStatus.IntensiveCareReasons))
if newPsychStatus.Status == "0":
newPsychStatus.StatusStr = "R"
if newPsychStatus.Status == "1":
newPsychStatus.StatusStr = "PSYK"
if newPsychStatus.Status == "2":
newPsychStatus.StatusStr = "PSOK"
if newPsychStatus.Status == "5":
newPsychStatus.StatusStr = "QE"
if newPsychStatus.IntegratedCare:
newPsychStatus.StatusStr = "".join((newPsychStatus.StatusStr,"+"))
if newPsychStatus.ParentsSettingCare:
newPsychStatus.StatusStr = "".join((newPsychStatus.StatusStr,"*"))
if newPsychStatus.QE:
newPsychStatus.StatusStr = "".join((newPsychStatus.StatusStr,"%"))
if newPsychStatus.NoStation:
newPsychStatus.StatusStr = "".join((newPsychStatus.StatusStr,"?"))
if newPsychStatus.HalfDay:
newPsychStatus.StatusStr = "".join((newPsychStatus.StatusStr,"?"))
PsychStatusList.append(newPsychStatus)
if newPsychStatus.Hardship:
newPsychStatus.StatusStr = "".join((
newPsychStatus.StatusStr,
"!",
str(len(newPsychStatus.HardshipReasons))
))
del newPsychStatus
# close SQL connections and cursors
PropertiesSQLCursor.close()
PropertiesSQLConn.close()
NodeChildsSQLCursor.close()
NodeChildsSQLConn.close()
NodesSQLCursor.close()
NodesSQLConn.close()
PsychStatusList.sort(key = lambda x: x.Date)
return PsychStatusList
def getLastPsychStatusCode(self):
"""
Liefert den letzten PsychStatus Code als string zurück
"""
returnCode = ''
PsychStatusList = self.getPsychStatus()
if len(PsychStatusList) > 0:
returnCode = PsychStatusList[-1].StatusStr
return returnCode
def getProcedures(self):
"""
Liefert die Prozeduren Codes als array zurück
"""
ProceduresList = []
if self.__CaseID != '':
# init SQL connections and cursors
ProceduresSQLConn = pyodbc.connect(self.__connection_str)
ProceduresSQLCursor = ProceduresSQLConn.cursor()
Procedures_CodesSQLConn = pyodbc.connect(self.__connection_str)
Procedures_CodesSQLCursor = Procedures_CodesSQLConn.cursor()
CodesSQLConn = pyodbc.connect(self.__connection_str)
CodesSQLCursor = CodesSQLConn.cursor()
## fetch all Procedures
sqlquery = """
select * from id_scorer.dbo.PROCEDURES
where CID=?
order by PDATE asc
"""
for procedure in ProceduresSQLCursor.execute(sqlquery, self.__CaseID):
# get Code ID
sqlquery = """
select codes_ID from id_scorer.dbo.PROCEDURES_CODES
where PROCEDURES_ID=?
"""
Procedures_CodesSQLCursor.execute(sqlquery, procedure.ID)
CodesID = Procedures_CodesSQLCursor.fetchone().codes_ID
# get Code
sqlquery = """
select * from id_scorer.dbo.CODES
where ID=?
"""
CodesSQLCursor.execute(sqlquery, CodesID)
code = CodesSQLCursor.fetchone()
ProceduresList.append({
'cdate': procedure.PDATE,
'code': code.VALUE
})
# close SQL connections and cursors
ProceduresSQLCursor.close()
ProceduresSQLConn.close()
Procedures_CodesSQLCursor.close()
Procedures_CodesSQLConn.close()
CodesSQLCursor.close()
CodesSQLConn.close()
return ProceduresList
def getBIScore(self):
"""
Liefert eine Liste aller Einträge zur Behandlungsintensität zurück,
die nach Datum sortiert ist
"""
BIScoreList = []
if self.__CaseNodeChildID != '':
# init SQL connections and cursors
NodesSQLConn = pyodbc.connect(self.__connection_str)
NodesSQLCursor = NodesSQLConn.cursor()
NodeChildsSQLConn = pyodbc.connect(self.__connection_str)
NodeChildsSQLCursor = NodeChildsSQLConn.cursor()
PropertiesSQLConn = pyodbc.connect(self.__connection_str)
PropertiesSQLCursor = PropertiesSQLConn.cursor()
## fetch all CareIntensityERW2013 Nodes
sqlquery = """
select ID from id_scorer.dbo.NODES
where NODETYPEID='16'
and PARENTID=?
"""
for node in NodesSQLCursor.execute(sqlquery, self.__CaseNodeChildID):
sqlquery = """
select * from id_scorer.dbo.PROPERTIES
where NODEID=?
"""
newBIScore = BI_Score()
for property in PropertiesSQLCursor.execute(sqlquery, node.ID):
if property.PROPERTYNAME == "SomaScore":
newBIScore.SomaScore = int(property.PROPERTYVALUE)
if property.PROPERTYNAME == 'PsyScore':
newBIScore.PsyScore = int(property.PROPERTYVALUE)
if property.PROPERTYNAME == 'SocialScore':
newBIScore.SocialScore = int(property.PROPERTYVALUE)
if property.PROPERTYNAME == 'totalScore':
newBIScore.TotalScore = int(property.PROPERTYVALUE)
if property.PROPERTYNAME == 'Finished':
if property.PROPERTYVALUE == 'true':
newBIScore.Finished = True
if property.PROPERTYNAME == 'Date':
newBIScore.Date = datetime.datetime.strptime(
property.PROPERTYVALUE.split('T')[0],
"%Y-%m-%d").date()
BIScoreList.append(newBIScore)
del newBIScore
# close SQL connections and cursors
NodesSQLCursor.close()
NodesSQLConn.close()
NodeChildsSQLCursor.close()
NodeChildsSQLConn.close()
PropertiesSQLCursor.close()
PropertiesSQLConn.close()
BIScoreList.sort(key = lambda x: x.Date)
return BIScoreList
def getLastBIScore(self):
"""
Liefert den letzten Score der Betreuungsintensität als string zurück
"""
returnCode = ''
BIScoreList = self.getBIScore()
if len(BIScoreList) > 0:
returnCode = str(BIScoreList[-1].TotalScore)
return returnCode
class connection(object):
"""ID Datenbank Objekt"""
def __init__(self, *args, **kwargs):
"""
Stellt die Verbindung zur ID Datenbank her
host
string, Host der Datenbank
session
string, Session der Datenbank
dc
string, DomainController
user
string, Benutzername
passwd
string, Benutzerpasswort
"""
user_str = kwargs['user']
if kwargs['dc'] != '':
user_str = "\\".join([
kwargs['dc'],
kwargs['user']
])
server_str = kwargs['host']
if kwargs['session'] != '':
server_str = "\\".join([
kwargs['host'],
kwargs['session']
])
self.__connection_str = ';'.join([
'DRIVER={FreeTDS}',
'SERVER=%s' % server_str,
'UID=%s' % user_str,
'PWD=%s' % kwargs['passwd'],
'CLIENTCHARSET=UTF-8',
'TDS_Version=8.0',
''
])
def con(self):
return pyodbc.connect(self.__connection_str)
def case(self, soarian_nr):
"""
generiert ein Case Objekt, über das fallbezogene Daten abgefragt
werden können
"""
return Case(self.__connection_str, soarian_nr)
def getCases(self):
"""
liefert die Case-Tabelle zurück
"""
SQLConn = pyodbc.connect(self.__connection_str)
SQLCursor = SQLConn.cursor()
sqlquery = """
SELECT CID, WARD, ADMHOSPITAL, SEPHOSPITAL from id_scorer.dbo.CASES
WHERE APSYCH = 1
"""
SQLCursor.execute(sqlquery)
try:
casesID = SQLCursor.fetchall()
except:
pass
SQLCursor.close()
SQLConn.close()
return casesID
def Export_OPS_Codes(self, filename):
"""
exportiert alle OPS-Codes, gleich der Daten-Exportfunktion im SpezialPsych Programm
"""
returnstatus = False
SQLConn = pyodbc.connect(self.__connection_str)
SQLCursor = SQLConn.cursor()
sqlquery = """
SELECT
Procedures.PDATE as 'Datum',
Codes.VALUE as 'OPS_Code',
Procedures.NAME as 'OPS_Name',
Cases.CID as 'Fall_Nr',
Patients.PID as 'Patienten_Nr',
Cases.WARD as 'Station',
Cases.ADMHOSPITAL as 'Aufnahmedatum',
Cases.SEPHOSPITAL as 'Entlassdatum'
FROM
"id_scorer"."dbo"."PROCEDURES" Procedures
JOIN "id_scorer"."dbo"."PROCEDURES_CODES" Procedures2Codes ON Procedures2Codes.PROCEDURES_ID = Procedures.ID
JOIN "id_scorer"."dbo"."CODES" Codes ON Codes.ID = Procedures2Codes.codes_ID
JOIN "id_scorer"."dbo"."CASES" Cases ON Cases.ID = Procedures.CID
JOIN "id_scorer"."dbo"."PATIENTS" Patients ON Patients.ID = CASES.PID
WHERE Cases.APSYCH = 1
ORDER BY Procedures.PDATE
"""
SQLCursor.execute(sqlquery)
try:
f_csv = open(filename, 'wt')
csvwriter = csv.writer(f_csv, delimiter = ';', doublequote = True, quoting=csv.QUOTE_ALL)
csvwriter.writerow([
"Patienten.Nr", "Fall.Nr", "Datum", "Zeit", "OPS.Code",
"Aufnahmedatum", "Entlassdatum", "Station"
])
for i in SQLCursor.fetchall():
codedatum = ""
aufnahmedatum = ""
entlassdatum = ""
if i.Datum.date() == datetime.date(1899, 12, 30):
codedatum = ""
else:
codedatum = str(i.Datum.date())
if i.Aufnahmedatum.date() == datetime.date(1899, 12, 30):
aufnahmedatum = ""
else:
aufnahmedatum = str(i.Aufnahmedatum.date())
if i.Entlassdatum.date() == datetime.date(1899, 12, 30):
entlassdatum = ""
else:
entlassdatum = str(i.Entlassdatum.date())
csvwriter.writerow([
i.Patienten_Nr, i.Fall_Nr, codedatum, i.Datum.time(),
i.OPS_Code, aufnahmedatum, entlassdatum, i.Station
])
f_csv.close()
returnstatus = True
except:
print sys.exc_info()[0]
SQLCursor.close()
SQLConn.close()
return returnstatus
if __name__ == '__main__':
pass
| indubio/BES | system/BESpy/IDDB.py | Python | agpl-3.0 | 23,819 |
#---------------------------------------------------------------------------------------------------
# Python Module File to describe a cleaner
#
# Author: C.Paus (Jun 16, 2016)
#---------------------------------------------------------------------------------------------------
import os,sys,re,string,socket
import rex
from task import Task
DEBUG = 0
#---------------------------------------------------------------------------------------------------
"""Class: Cleaner(condorTask)
A given condor task can be cleaned with this tool.
Method to remove all log files of successfully completed jobs and analysis of all failed jobs and
remove the remaining condor queue entires of the held jobs.
We are assuming that most failed jobs are converted into a held jobs but also consider jobs that are
not otherwise completed to be 'failed' jobs. We collect all failed jobs and safe those log files
into our web repository for browsing. Only the last copy of the failed job is kept as repeated
failure overwrites the old failed job's log files. The held job entries in the condor queue are
removed and all failed job are resubmitted.
It should be noted that if a job succeeds all log files including the ones from earlier failures
will be removed.
"""
#---------------------------------------------------------------------------------------------------
class Cleaner:
"The cleaner of a given condor task."
#-----------------------------------------------------------------------------------------------
# constructor for new creation
#-----------------------------------------------------------------------------------------------
def __init__(self,task):
self.task = task
self.localUser = os.getenv('USER')
self.activity = os.getenv('KRAKEN_ACTIVITY')
self.logRemoveScript = ''
self.webRemoveScript = ''
self.logSaveScript = ''
self.rex = rex.Rex(self.task.scheduler.host,self.task.scheduler.user)
#-----------------------------------------------------------------------------------------------
# analyze known failures
#-----------------------------------------------------------------------------------------------
def logCleanup(self):
print('\n ==== C l e a n e r ====')
# A - take all completed jobs and remove all related logs
self.removeCompletedLogs()
# B - find all logs from the held and quietly failed jobs, save and generate summary
self.saveFailedLogs()
self.analyzeLogs()
# C - remove all held jobs from the queue
self.removeHeldJobs()
# D - remove entire cache on scheduler (if dataset completed)
self.removeCache()
# E - other leftover directory stubs
self.removeDirectoryStubs()
return
#-----------------------------------------------------------------------------------------------
# analyze saved logs and produce a summary web page
#-----------------------------------------------------------------------------------------------
def analyzeLogs(self):
cfg = self.task.request.config
vers = self.task.request.version
dset = self.task.request.sample.dataset
local = os.getenv('KRAKEN_AGENTS_LOG') + '/reviewd/%s/%s/%s/README'%(cfg,vers,dset)
print(' - analyze failed logs')
cmd = "failedFiles.py --book=%s/%s --pattern=%s"%(cfg,vers,dset)
if DEBUG > 0:
print(" CMD: %s"%(cmd))
(rc,out,err) = self.rex.executeLocalAction(cmd)
#print(out)
# make sure to reload this
self.task.request.loadNFailedJobs()
cmd = "analyzeErrors.py --config=%s --version=%s --dataset=%s >& %s"%(cfg,vers,dset,local)
if DEBUG > 0:
print(" CMD: %s"%(cmd))
(rc,out,err) = self.rex.executeLocalAction(cmd)
return
#-----------------------------------------------------------------------------------------------
# remove all log files of completed jobs or queued
#-----------------------------------------------------------------------------------------------
def removeCompletedLogs(self):
cfg = self.task.request.config
vers = self.task.request.version
dset = self.task.request.sample.dataset
local = os.getenv('KRAKEN_AGENTS_LOG') + '/reviewd'
print(' - remove completed logs')
#for file,job in self.task.sample.completedJobs.iteritems():
for (file,job) in self.task.sample.completedJobs.items():
# we will make a lot of reference to the ID
id = file.replace('.root','')
cmd = 'rm -f %s/*/%s/%s/%s/*%s*\n'%(self.activity,cfg,vers,dset,id)
self.logRemoveScript += cmd
cmd = 'rm -f %s/%s/%s/%s/*%s*\n'%(local,cfg,vers,dset,id)
self.webRemoveScript += cmd
for (file,job) in self.task.sample.noCatalogJobs.items():
# we will make a lot of reference to the ID
id = file.replace('.root','')
cmd = 'rm -f %s/*/%s/%s/%s/*%s*\n'%(self.activity,cfg,vers,dset,id)
self.logRemoveScript += cmd
cmd = 'rm -f %s/%s/%s/%s/*%s*\n'%(local,cfg,vers,dset,id)
self.webRemoveScript += cmd
for (file,job) in self.task.sample.queuedJobs.items():
# we will make a lot of reference to the ID
id = file.replace('.root','')
cmd = 'rm -f %s/*/%s/%s/%s/*%s*\n'%(self.activity,cfg,vers,dset,id)
self.logRemoveScript += cmd
cmd = 'rm -f %s/%s/%s/%s/*%s*\n'%(local,cfg,vers,dset,id)
self.webRemoveScript += cmd
print(' -- LogRemoval')
(irc,rc,out,err) = self.rex.executeLongAction(self.logRemoveScript)
print(' -- WebRemoval')
(rc,out,err) = self.rex.executeLocalLongAction(self.webRemoveScript)
return
#-----------------------------------------------------------------------------------------------
# remove entire remote cache of this task
#-----------------------------------------------------------------------------------------------
def removeCache(self):
print(' - trying to remove task cache')
if len(self.task.sample.completedJobs) == len(self.task.sample.allJobs):
print(' job is complete, remove the potentially remaining cache.')
else:
return
cmd = "rm -rf " + self.task.logs
if DEBUG > 0:
print(" CMD: %s"%(cmd))
if self.task.scheduler.isLocal():
(rc,out,err) = self.rex.executeLocalAction(cmd)
else:
(irc,rc,out,err) = self.rex.executeAction(cmd)
if DEBUG > 0 and (irc != 0 or rc != 0):
print(' IRC: %d'%(irc))
if DEBUG > 0 and (irc != 0 or rc != 0):
print(' RC: %d'%(rc))
print(' ERR:\n%s'%(err))
print(' OUT:\n%s'%(out))
return
#-----------------------------------------------------------------------------------------------
# remove entire remote cache of this task
#-----------------------------------------------------------------------------------------------
def removeDirectoryStubs(self):
print(' - trying to remove remaining directory stubs in storage')
if len(self.task.sample.completedJobs) == len(self.task.sample.allJobs):
print(' job is complete, remove the potentially remaining cache.')
else:
return
cfg = self.task.request.config
vers = self.task.request.version
dset = self.task.request.sample.dataset
prefix = os.getenv('KRAKEN_TMP_PREFIX')
base = os.getenv('KRAKEN_SE_BASE')
directory = '%s/%s/%s/%s/%s*'%(base,cfg,vers,dset,prefix)
cmd = " removedir " + directory
if DEBUG > 0:
print(" CMD: %s"%(cmd))
(rc,out,err) = self.rex.executeLocalAction(cmd)
if DEBUG > 0 and rc != 0:
print(' RC: %d'%(rc))
print(' ERR:\n%s'%(err))
print(' OUT:\n%s'%(out))
return
#-----------------------------------------------------------------------------------------------
# remove held jobs from the queue
#-----------------------------------------------------------------------------------------------
def removeHeldJobs(self):
base = self.task.scheduler.base + "/%s/data"%self.activity
iwd = base + "/%s/%s/%s"%\
(self.task.request.config,self.task.request.version,self.task.request.sample.dataset)
cmd = 'condor_rm -constraint "JobStatus==5 && Iwd==\\\"%s\\\""'%(iwd)
irc = 0
rc = 0
if len(self.task.request.sample.heldJobs) > 0:
print(' - remove held jobs (n=%d): %s'%(len(self.task.request.sample.heldJobs),cmd))
if not self.task.scheduler.isLocal():
(irc,rc,out,err) = self.rex.executeAction(cmd)
if DEBUG > 0 and (irc != 0 or rc != 0):
print(' IRC: %d'%(irc))
else:
(rc,out,err) = self.rex.executeLocalAction(cmd)
if DEBUG > 0 and (irc != 0 or rc != 0):
print(' RC: %d'%(rc))
print(' ERR:\n%s'%(err))
print(' OUT:\n%s'%(out))
else:
print(' - no held jobs to remove')
return
#-----------------------------------------------------------------------------------------------
# save the log files of the failed jobs
#-----------------------------------------------------------------------------------------------
def saveFailedLogs(self):
print(' - find failed logs')
# make sure this is not a new workflow
if (self.task.request.sample.isNew):
print(" - savedFailedLogs: new workflow needs no logs saving.")
return
# shortcuts to relevant configuration
cfg = self.task.request.config
vers = self.task.request.version
dset = self.task.request.sample.dataset
local = os.getenv('KRAKEN_AGENTS_LOG') + '/reviewd'
# make the directory in any case
cmd = 'mkdir -p %s/%s/%s/%s/;'%(local,cfg,vers,dset)
if DEBUG>0:
print(' Mkdir: %s'%(cmd))
(rc,out,err) = self.rex.executeLocalAction(cmd)
# copy the indexer to make it pretty
cmd = 'cp ' + os.getenv('KRAKEN_AGENTS_BASE') + '/html/index-sample.php ' \
+ '%s/%s/%s/%s/index.php'%(local,cfg,vers,dset)
if DEBUG>0:
print(' Index: %s'%(cmd))
(rc,out,err) = self.rex.executeLocalAction(cmd)
# construct the script to make the tar ball
self.logSaveScript += 'cd %s/logs/%s/%s/%s\ntar --ignore-failed-read --create --gzip --file %s-%s-%s.tgz'\
%(self.activity,cfg,vers,dset,cfg,vers,dset)
# find out whether we have held jobs == failures
haveFailures = False
# OLD -- for file,job in self.task.sample.heldJobs.iteritems():
for (file,job) in self.task.sample.missingJobs.items():
id = file.replace('.root','')
cmd = ' \\\n %s.{out,err}'%(id)
self.logSaveScript += cmd
haveFailures = True
# no need to continue if there are no failures
if not haveFailures:
if DEBUG>0:
print(' INFO - no failed jobs found.')
return
# log saver script
(irc,rc,out,err) = self.rex.executeLongAction(self.logSaveScript)
if DEBUG > 0:
print(" CMD:%s\n IRC: %s RC: %s\n OUT: \n%s\n ERR: \n%s"%(self.logSaveScript,irc,rc,out,err))
# pull the tar ball over
cmd = 'scp ' + self.task.scheduler.user + '@' + self.task.scheduler.host \
+ ':%s/logs/%s/%s/%s/%s-%s-%s.tgz'%(self.activity,cfg,vers,dset,cfg,vers,dset) \
+ ' %s/%s/%s/%s/'%(local,cfg,vers,dset)
if DEBUG>0:
print(' Get tar: %s'%(cmd))
(rc,out,err) = self.rex.executeLocalAction(cmd)
cmd = 'cd %s/%s/%s/%s/\n'%(local,cfg,vers,dset) \
+ 'tar fvzx %s-%s-%s.tgz\n'%(cfg,vers,dset) \
+ 'chmod a+r *'
if DEBUG>0:
print(' Untar: %s'%(cmd))
(rc,out,err) = self.rex.executeLocalAction(cmd)
cmd = 'rm -f %s/%s/%s/%s/%s-%s-%s.tgz'%(local,cfg,vers,dset,cfg,vers,dset)
if DEBUG>0:
print(' Remove local tar: %s'%(cmd))
(rc,out,err) = self.rex.executeLocalAction(cmd)
cmd = 'rm -f %s/logs/%s/%s/%s/%s-%s-%s.tgz'%(self.activity,cfg,vers,dset,cfg,vers,dset)
if DEBUG>0:
print(' Remove remote tar: %s'%(cmd))
(irc,rc,out,err) = self.rex.executeAction(cmd)
return
| cpausmit/Kraken | python/cleaner.py | Python | mit | 12,891 |
"""Solution to Project Euler Problem 8
https://projecteuler.net/problem=8
"""
from functools import reduce
from operator import mul
FILENAME = "resources/p008_number.txt"
SEGMENT_SIZE = 13
def compute(filename=FILENAME, segment_size=SEGMENT_SIZE):
"""Compute the largest product of `segment_size` adjacent digits in the
number given in the file `filename`.
"""
number = open(filename, 'r').read().strip()
segments = set(
number[index:index+segment_size] for index in range(len(number)-1)
)
products = tuple(
reduce(mul, tuple(int(digit) for digit in segment))
for segment in segments
)
return max(products)
| 2Cubed/ProjectEuler | euler/p008.py | Python | mit | 676 |
# -*- coding: utf-8 -*-
"""
MIT License
Copyright (c) 2017 Christian Pfarr
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from codecs import StreamRecoder, getencoder, getdecoder, getreader, getwriter
from io import BytesIO, StringIO
from itertools import repeat
from multiprocessing import Pool, cpu_count
from sys import byteorder
class Key(object):
"""
A Base Class for the Private- and Public-Key
"""
_FILLER = '#'
_PACKAGE_WIDTH = 100
_LINE_SEPARATOR = '\n'
_EMPTY_SEPARATOR = ''
_EMPTY_UTF32_BYTE = b'\x00\x00\x00\x00'
def __init__(self, modulo, exponent):
"""
Creates the keystore
:param modulo: the modulo of the key
:param exponent: either the encipher or the decipher exponent
"""
self._modulo = modulo
self._exponent = exponent
@property
def bit_length(self):
"""
Returns the length of the key in bit
:return: Length of the key in bit
:rtype: int
"""
return self._modulo.bit_length()
@staticmethod
def is_public():
"""
Implements the base method for the indicator of public key.
Returns always False
:return: False
:rtype: bool
"""
return False
@staticmethod
def is_private():
"""
Implements the base method for the indicator of private key.
Returns always False.
:return: False
:rtype: bool
"""
return False
@property
def exponent(self):
"""
Get the exponent of the key.
:return: Exponent of the key.
:rtype: int
"""
return self._exponent
@property
def modulo(self):
"""
Get the modulo of the key.
:return: Modulo of the key.
:rtype: int
"""
return self._modulo
def _modular_exponentiation(self, integer):
"""
The operation of modular exponentiation calculates the remainder when an integer (the base)
raised to the power of the exponent, divided by an modulo.
:param integer: Integer to process the modular exponentiation.
:return: The result of the modular exponentiation.
:rtype: int
"""
return pow(integer, self._exponent, self._modulo)
def _stream_modular_exponentiation(self, utf32bytes):
"""
Calculates the operation of modular exponentiation over a stream of UTF-32 formatted bytes.
Works with a pool of processes by cpu_count().
:param utf32bytes: UTF-32 formatted input bytes.
:return: UTF-32 formatted output bytes.
:rtype: bytes
"""
# input stream of UTF-32 formatted bytes
input_stream = BytesIO(utf32bytes)
# clean memory
del utf32bytes
# output stream of UTF-32 formatted bytes
output_stream = BytesIO()
# biggest possible buffer size is length of the modulo in bytes
buffer_length = self.bit_length // 8
buffer = bytearray(buffer_length)
input_integers = []
# read the integer values into an array
while input_stream.readinto(buffer) > 1:
input_integers.append(int.from_bytes(buffer, byteorder))
# clear the buffer after every read
buffer = bytearray(buffer_length)
# clean memory
del buffer, input_stream
# create a pool to compute the modular exponentiation in parallel processors
worker = Pool(cpu_count())
# compute the modular exponentiation parallel
output_integers = worker.map(self._modular_exponentiation, input_integers)
# clean memory
del input_integers, worker
# write the calculated integer values into an output stream
for output_integer in output_integers:
# save the output integers as bytes, trim empty UTF-32 formatted bytes and write them to the output stream
output_stream.write(self._trim_empty_utf32bytes(output_integer.to_bytes(buffer_length, byteorder)))
# clean memory
del buffer_length
# return the whole stream as bytes
return output_stream.getvalue()
@staticmethod
def _utf8str2utf32bytes(utf8str):
"""
Change the format of a given UTF-8 string into UTF-32 with a fix length of bytes.
:param utf8str: UTF-8 string.
:return: UTF-32 string.
:rtype: string
"""
return StreamRecoder(stream=BytesIO(utf8str.encode(encoding='utf8')),
encode=getencoder('utf32'),
decode=getdecoder('utf8'),
Reader=getreader('utf8'),
Writer=getwriter('utf32')).read()
@staticmethod
def _utf32bytes2utf8str(utf32bytes):
"""
Change the format of a given UTF-32 string with a fix length of bytes into an UTF-8 string
with variable length of bytes.
:param utf32bytes: UTF-32 string.
:return: UTF-8 string.
:rtype: string
"""
return StreamRecoder(stream=BytesIO(utf32bytes),
encode=getencoder('utf8'),
decode=getdecoder('utf32'),
Reader=getreader('utf32'),
Writer=getwriter('utf8')).read().decode(encoding='utf8')
def _pack(self, content, title='OPEN RSA DATA',
description='build with OpenRSA - a free and simple pure python implementation'):
"""
Packs the content into an human readable format.
:param content: Hex representation of an UTF-32 string.
:return: Human readable format for the content.
:rtype: string
"""
# calculate configured length of the fillers
filler_length = (self._PACKAGE_WIDTH - len(title)) // 2
input_stream = StringIO(content)
# clean memory
del content
output_stream = StringIO()
# write header line
output_stream.write(self._EMPTY_SEPARATOR.join(repeat(self._FILLER, filler_length))
+ title
+ self._EMPTY_SEPARATOR.join(repeat(self._FILLER, filler_length))
+ self._LINE_SEPARATOR)
# write description
output_stream.write(description + self._LINE_SEPARATOR)
# clean memory
del description, title, filler_length
# split content into lines
while True:
# read input lines
line = input_stream.read(self._PACKAGE_WIDTH)
# check if something could be read
if len(line) > 0:
# write the line into the output stream
output_stream.write(line + self._LINE_SEPARATOR)
else:
# clean memory
del line, input_stream
break
# write footer line
output_stream.write(self._EMPTY_SEPARATOR.join(repeat(self._FILLER, self._PACKAGE_WIDTH)))
# return the output stream as a single string
return output_stream.getvalue()
def _unpack(self, packed):
"""
Unpacks the content from an human readable format, into a raw hex representation.
:param packed: Human readable format for the content.
:return: Hex representation of an UTF-32 string.
:rtype: string
"""
# split the packed content into lines
content = packed.splitlines()
# remove header
del content[0]
# remove description
del content[0]
# remove footer
del content[len(content) - 1]
# join and return the remaining lines without a separator
return self._EMPTY_SEPARATOR.join(content)
def _trim_empty_utf32bytes(self, utf32bytes):
"""
Usual trim function for UTF-32 bytes.
:param utf32bytes: UTF-32 bytes.
:return: Trimmed UTF-32 bytes.
:rtype: bytes
"""
input_stream = BytesIO(utf32bytes)
# clean memory
del utf32bytes
output_stream = BytesIO()
buffer = bytearray(32 // 8)
while input_stream.readinto(buffer) > 1:
if buffer != self._EMPTY_UTF32_BYTE:
output_stream.write(buffer)
# clean memory
del buffer, input_stream
return output_stream.getvalue()
| PfarrCh/openrsa | key.py | Python | mit | 9,416 |
# coding=utf-8
# URL: http://code.google.com/p/sickbeard
# Originally written for SickGear
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import urllib
from sickbeard.providers import generic
from sickbeard import logger
from sickbeard import tvcache
from sickbeard.helpers import mapIndexersToShow
from sickrage.helper.exceptions import AuthException
class TitansOfTVProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'TitansOfTV')
self.supportsAbsoluteNumbering = True
self.api_key = None
self.ratio = None
self.cache = TitansOfTVCache(self)
self.url = 'http://titansof.tv/api/torrents'
self.download_url = 'http://titansof.tv/api/torrents/%s/download?apikey=%s'
def seedRatio(self):
return self.ratio
def _checkAuth(self):
if not self.api_key:
raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.')
return True
def _checkAuthFromData(self, data):
if 'error' in data:
logger.log(u"Invalid api key. Check your settings", logger.WARNING)
return False
return True
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
# FIXME ADD MODE
self._checkAuth()
results = []
params = {}
self.headers.update({'X-Authorization': self.api_key})
if search_params:
params.update(search_params)
searchURL = self.url + '?' + urllib.urlencode(params)
logger.log(u"Search string: %s " % search_params, logger.DEBUG)
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
parsedJSON = self.getURL(searchURL, json=True) # do search
if not parsedJSON:
logger.log(u"No data returned from provider", logger.DEBUG)
return results
if self._checkAuthFromData(parsedJSON):
try:
found_torrents = parsedJSON['results']
except Exception:
found_torrents = {}
for result in found_torrents:
title = result.get('release_name', '')
tid = result.get('id', '')
download_url = self.download_url % (tid, self.api_key)
# FIXME size, seeders, leechers
size = -1
seeders = 1
leechers = 0
if not all([title, download_url]):
continue
# Filter unseeded torrent
# if seeders < self.minseed or leechers < self.minleech:
# if mode != 'RSS':
# logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
# continue
item = title, download_url, size, seeders, leechers
logger.log(u"Found result: %s " % title, logger.DEBUG)
results.append(item)
# FIXME SORTING
return results
def _get_season_search_strings(self, ep_obj):
search_params = {'limit': 100}
search_params['season'] = 'Season %02d' % ep_obj.scene_season
if ep_obj.show.indexer == 1:
search_params['series_id'] = ep_obj.show.indexerid
elif ep_obj.show.indexer == 2:
tvdbid = mapIndexersToShow(ep_obj.show)[1]
if tvdbid:
search_params['series_id'] = tvdbid
return [search_params]
def _get_episode_search_strings(self, ep_obj, add_string=''):
if not ep_obj:
return [{}]
search_params = {'limit': 100}
# Do a general name search for the episode, formatted like SXXEYY
search_params['episode'] = 'S%02dE%02d' % (ep_obj.scene_season, ep_obj.scene_episode)
if ep_obj.show.indexer == 1:
search_params['series_id'] = ep_obj.show.indexerid
elif ep_obj.show.indexer == 2:
tvdbid = mapIndexersToShow(ep_obj.show)[1]
if tvdbid:
search_params['series_id'] = tvdbid
return [search_params]
class TitansOfTVCache(tvcache.TVCache):
def __init__(self, provider_obj):
tvcache.TVCache.__init__(self, provider_obj)
# At least 10 minutes between queries
self.minTime = 10
def _getRSSData(self):
search_params = {'limit': 100}
return self.provider._doSearch(search_params)
provider = TitansOfTVProvider()
| hernandito/SickRage | sickbeard/providers/titansoftv.py | Python | gpl-3.0 | 5,198 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# ===========================================================================
# Copyright (c) 2007-2012 Barend Gehrels, Amsterdam, the Netherlands.
# Copyright (c) 2008-2012 Bruno Lalande, Paris, France.
# Copyright (c) 2009-2012 Mateusz Loskot ([email protected]), London, UK
#
# Use, modification and distribution is subject to the Boost Software License,
# Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# ============================================================================
import os, sys
script_dir = os.path.dirname(__file__)
os.chdir(os.path.abspath(script_dir))
print("Boost.Geometry is making .qbk files in %s" % os.getcwd())
if 'DOXYGEN' in os.environ:
doxygen_cmd = os.environ['DOXYGEN']
else:
doxygen_cmd = 'doxygen'
if 'DOXYGEN_XML2QBK' in os.environ:
doxygen_xml2qbk_cmd = os.environ['DOXYGEN_XML2QBK']
else:
doxygen_xml2qbk_cmd = 'doxygen_xml2qbk'
cmd = doxygen_xml2qbk_cmd
cmd = cmd + " --xml doxy/doxygen_output/xml/%s.xml"
cmd = cmd + " --start_include boost/geometry/"
cmd = cmd + " --convenience_header_path ../../../boost/geometry/"
cmd = cmd + " --convenience_headers geometry.hpp,geometries/geometries.hpp"
cmd = cmd + " --skip_namespace boost::geometry::"
cmd = cmd + " --copyright src/copyright_block.qbk"
cmd = cmd + " --output_member_variables false"
cmd = cmd + " > generated/%s.qbk"
def run_command(command):
if os.system(command) != 0:
raise Exception("Error running %s" % command)
def remove_all_files(dir):
if os.path.exists(dir):
for f in os.listdir(dir):
os.remove(dir+f)
def call_doxygen():
os.chdir("doxy")
remove_all_files("doxygen_output/xml/")
run_command(doxygen_cmd)
os.chdir("..")
def group_to_quickbook(section):
run_command(cmd % ("group__" + section.replace("_", "__"), section))
def model_to_quickbook(section):
run_command(cmd % ("classboost_1_1geometry_1_1model_1_1" + section.replace("_", "__"), section))
def model_to_quickbook2(classname, section):
run_command(cmd % ("classboost_1_1geometry_1_1model_1_1" + classname, section))
def struct_to_quickbook(section):
run_command(cmd % ("structboost_1_1geometry_1_1" + section.replace("_", "__"), section))
def class_to_quickbook(section):
run_command(cmd % ("classboost_1_1geometry_1_1" + section.replace("_", "__"), section))
def strategy_to_quickbook(section):
p = section.find("::")
ns = section[:p]
strategy = section[p+2:]
run_command(cmd % ("classboost_1_1geometry_1_1strategy_1_1"
+ ns.replace("_", "__") + "_1_1" + strategy.replace("_", "__"),
ns + "_" + strategy))
def cs_to_quickbook(section):
run_command(cmd % ("structboost_1_1geometry_1_1cs_1_1" + section.replace("_", "__"), section))
call_doxygen()
algorithms = ["append", "assign", "make", "clear"
, "area", "buffer", "centroid", "convert", "correct", "covered_by"
, "convex_hull", "crosses", "difference", "disjoint", "distance"
, "envelope", "equals", "expand", "for_each", "is_simple", "is_valid"
, "intersection", "intersects", "length", "num_geometries"
, "num_interior_rings", "num_points", "overlaps", "perimeter"
, "reverse", "simplify", "sym_difference", "touches", "transform"
, "union", "unique", "within"]
access_functions = ["get", "set", "exterior_ring", "interior_rings"
, "num_points", "num_interior_rings", "num_geometries"]
coordinate_systems = ["cartesian", "geographic", "polar", "spherical", "spherical_equatorial"]
core = ["closure", "coordinate_system", "coordinate_type", "cs_tag"
, "dimension", "exception", "interior_type"
, "degree", "radian"
, "is_radian", "point_order"
, "point_type", "ring_type", "tag", "tag_cast" ]
exceptions = ["exception", "centroid_exception"];
iterators = ["circular_iterator", "closing_iterator"
, "ever_circling_iterator"]
models = ["point", "linestring", "box"
, "polygon", "segment", "ring"
, "multi_linestring", "multi_point", "multi_polygon", "referring_segment"]
strategies = ["distance::pythagoras", "distance::pythagoras_box_box"
, "distance::pythagoras_point_box", "distance::haversine"
, "distance::cross_track", "distance::projected_point"
, "within::winding", "within::franklin", "within::crossings_multiply"
, "area::surveyor", "area::huiller"
, "buffer::point_circle", "buffer::point_square"
, "buffer::join_round", "buffer::join_miter"
, "buffer::end_round", "buffer::end_flat"
, "buffer::distance_symmetric", "buffer::distance_asymmetric"
, "buffer::side_straight"
, "centroid::bashein_detmer", "centroid::average"
, "convex_hull::graham_andrew"
, "simplify::douglas_peucker"
, "side::side_by_triangle", "side::side_by_cross_track", "side::spherical_side_formula"
, "transform::inverse_transformer", "transform::map_transformer"
, "transform::rotate_transformer", "transform::scale_transformer"
, "transform::translate_transformer", "transform::ublas_transformer"
]
views = ["box_view", "segment_view"
, "closeable_view", "reversible_view", "identity_view"]
for i in algorithms:
group_to_quickbook(i)
for i in access_functions:
group_to_quickbook(i)
for i in coordinate_systems:
cs_to_quickbook(i)
for i in core:
struct_to_quickbook(i)
for i in exceptions:
class_to_quickbook(i)
for i in iterators:
struct_to_quickbook(i)
for i in models:
model_to_quickbook(i)
for i in strategies:
strategy_to_quickbook(i)
for i in views:
struct_to_quickbook(i)
model_to_quickbook2("d2_1_1point__xy", "point_xy")
group_to_quickbook("arithmetic")
group_to_quickbook("enum")
group_to_quickbook("register")
group_to_quickbook("svg")
class_to_quickbook("svg_mapper")
group_to_quickbook("wkt")
os.chdir("index")
execfile("make_qbk.py")
os.chdir("..")
# Use either bjam or b2 or ../../../b2 (the last should be done on Release branch)
run_command("b2")
| rkq/cxxexp | third-party/src/boost_1_56_0/libs/geometry/doc/make_qbk.py | Python | mit | 6,060 |
# Copyright 2015 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
import functools
from pyramid.settings import asbool
# Define default values for options.
DEFAULT_BUNDLESERVICE_URL = 'https://api.jujucharms.com/bundleservice/'
DEFAULT_CHARMSTORE_URL = 'https://api.jujucharms.com/charmstore/'
DEFAULT_PLANS_URL = 'https://api.jujucharms.com/omnibus/'
DEFAULT_PAYMENT_URL = 'https://api.jujucharms.com/payment/'
DEFAULT_TERMS_URL = 'https://api.jujucharms.com/terms/'
def update(settings):
"""Normalize and update the Juju GUI app settings.
Modify the given settings object in place.
"""
_update(settings, 'jujugui.apiAddress', default=None)
_update(settings, 'jujugui.jujushellURL', default=None)
_update(settings, 'jujugui.auth', default=None)
_update(settings, 'jujugui.controllerSocketTemplate', default='/api')
_update(settings, 'jujugui.GTM_enabled', default=False)
_update(settings, 'jujugui.password', default=None)
_update(settings, 'jujugui.socketTemplate', default='/model/$uuid/api')
_update(settings, 'jujugui.stripe_key', default='')
_update(settings, 'jujugui.user', default=None)
_update(settings, 'jujugui.static_url', default='')
_update(settings, 'jujugui.stats_url', default='')
_update(settings, 'jujugui.flags', default={})
_update_url(settings, 'jujugui.base_url', default='')
_update_url(settings, 'jujugui.bundleservice_url',
default=DEFAULT_BUNDLESERVICE_URL)
_update_url(settings, 'jujugui.charmstore_url',
default=DEFAULT_CHARMSTORE_URL)
_update_url(settings, 'jujugui.payment_url', default=DEFAULT_PAYMENT_URL)
_update_url(settings, 'jujugui.plans_url', default=DEFAULT_PLANS_URL)
_update_url(settings, 'jujugui.terms_url', default=DEFAULT_TERMS_URL)
_update_bool(settings, 'jujugui.combine', default=True)
_update_bool(settings, 'jujugui.gisf', default=False)
_update_bool(settings, 'jujugui.gzip', default=True)
_update_bool(settings, 'jujugui.insecure', default=False)
_update_bool(settings, 'jujugui.interactive_login', default=False)
_update_bool(settings, 'jujugui.raw', default=False)
def _update(settings, name, default=None, convert=lambda value: value):
"""Update the value with the given name on the given settings.
If the value is not found in settings, or it is empty, the given default is
used. If a convert callable is provided, it is called on the resulting
value.
Modify the given settings object in place.
"""
val = settings.get(name, default)
if val == '' or val is None:
val = default
settings[name] = convert(val)
def _ensure_trailing_slash(url):
if not url.endswith('/'):
url += '/'
return url
_update_bool = functools.partial(_update, convert=asbool)
_update_url = functools.partial(_update, convert=_ensure_trailing_slash)
| mitechie/juju-gui | jujugui/options.py | Python | agpl-3.0 | 2,966 |
#!/usr/bin/env python
import clip, logging, logtool, temporary
from path import Path
from cfgtool.cmdbase import CmdBase
LOG = logging.getLogger (__name__)
class Action (CmdBase):
@logtool.log_call
def run (self):
if not self.conf.force:
self.error (" Must force writes (--force).")
clip.exit (err = True)
rc = False
content = None
in_file = Path (self.kwargs.in_file)
if not in_file.isfile ():
self.error ("Template file does not exist.")
clip.exit (err = True)
self.info (" Generate...")
if self.kwargs.out_file == "=":
f = Path (self.kwargs.in_file)
out_file = f.parent / f.namebase
rc = self.process_one_file (in_file, out_file, self.make_file)
content = out_file.bytes ()
elif self.kwargs.out_file:
out_file = Path (self.kwargs.out_file)
rc = self.process_one_file (in_file, out_file, self.make_file)
content = out_file.bytes ()
else:
with temporary.temp_file () as fname:
out_file = Path (fname)
rc = self.process_one_file (in_file, out_file, self.make_file)
content = out_file.bytes ()
if not self.kwargs.out_file:
self.info ("Produced file:")
self.report (content)
clip.exit (err = (True if rc else False))
| clearclaw/cfgtool | cfgtool/cmd_process.py | Python | lgpl-3.0 | 1,277 |
# coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. # noqa: E501
OpenAPI spec version: v2.1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class UpdateTransactionResponse(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'redirection_url': 'str'
}
attribute_map = {
'redirection_url': 'redirectionUrl'
}
def __init__(self, redirection_url=None): # noqa: E501
"""UpdateTransactionResponse - a model defined in Swagger""" # noqa: E501
self._redirection_url = None
self.discriminator = None
if redirection_url is not None:
self.redirection_url = redirection_url
@property
def redirection_url(self):
"""Gets the redirection_url of this UpdateTransactionResponse. # noqa: E501
# noqa: E501
:return: The redirection_url of this UpdateTransactionResponse. # noqa: E501
:rtype: str
"""
return self._redirection_url
@redirection_url.setter
def redirection_url(self, redirection_url):
"""Sets the redirection_url of this UpdateTransactionResponse.
# noqa: E501
:param redirection_url: The redirection_url of this UpdateTransactionResponse. # noqa: E501
:type: str
"""
self._redirection_url = redirection_url
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(UpdateTransactionResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UpdateTransactionResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| docusign/docusign-python-client | docusign_esign/models/update_transaction_response.py | Python | mit | 3,447 |
# encoding: utf-8
"""
Step implementations for image characterization features
"""
from __future__ import absolute_import, print_function, unicode_literals
from behave import given, then, when
from docx.image.image import Image
from helpers import test_file
# given ===================================================
@given('the image file \'{filename}\'')
def given_image_filename(context, filename):
context.image_path = test_file(filename)
# when ====================================================
@when('I construct an image using the image path')
def when_construct_image_using_path(context):
context.image = Image.from_file(context.image_path)
# then ====================================================
@then('the image has content type \'{mime_type}\'')
def then_image_has_content_type(context, mime_type):
content_type = context.image.content_type
assert content_type == mime_type, (
"expected MIME type '%s', got '%s'" % (mime_type, content_type)
)
@then('the image has {horz_dpi_str} horizontal dpi')
def then_image_has_horizontal_dpi(context, horz_dpi_str):
expected_horz_dpi = int(horz_dpi_str)
horz_dpi = context.image.horz_dpi
assert horz_dpi == expected_horz_dpi, (
"expected horizontal dpi %d, got %d" % (expected_horz_dpi, horz_dpi)
)
@then('the image has {vert_dpi_str} vertical dpi')
def then_image_has_vertical_dpi(context, vert_dpi_str):
expected_vert_dpi = int(vert_dpi_str)
vert_dpi = context.image.vert_dpi
assert vert_dpi == expected_vert_dpi, (
"expected vertical dpi %d, got %d" % (expected_vert_dpi, vert_dpi)
)
@then('the image is {px_height_str} pixels high')
def then_image_is_cx_pixels_high(context, px_height_str):
expected_px_height = int(px_height_str)
px_height = context.image.px_height
assert px_height == expected_px_height, (
"expected pixel height %d, got %d" % (expected_px_height, px_height)
)
@then('the image is {px_width_str} pixels wide')
def then_image_is_cx_pixels_wide(context, px_width_str):
expected_px_width = int(px_width_str)
px_width = context.image.px_width
assert px_width == expected_px_width, (
"expected pixel width %d, got %d" % (expected_px_width, px_width)
)
| LuoZijun/uOffice | temp/pydocxx/features/steps/image.py | Python | gpl-3.0 | 2,273 |
"""
Context processor for lightweight session messages.
Time-stamp: <2008-07-19 23:16:19 carljm context_processors.py>
"""
from django.conf import settings as django_settings
from django.contrib import messages as django_messages
from django.utils.encoding import StrAndUnicode
from askbot.user_messages import get_and_delete_messages
def user_messages(request):
"""
Returns session messages for the current session.
"""
#don't delete messages on ajax requests b/c we can't show
#them the same way as in the server side generated html
if request.is_ajax():
return {}
if not request.path.startswith('/' + django_settings.ASKBOT_URL):
#todo: a hack, for real we need to remove this middleware
#and switch to the new-style session messages
return {}
if hasattr(request.user, 'get_and_delete_messages'):
messages = request.user.get_and_delete_messages()
messages += django_messages.get_messages(request)
return { 'user_messages': messages }
else:
return { 'user_messages': django_messages.get_messages(request) }
| knowledgepoint-devs/askbot-devel | askbot/user_messages/context_processors.py | Python | gpl-3.0 | 1,115 |
import os
import sys
import time
# get the utils from the parent directory
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from utils import SEx_head
from pipe_utils import tableio, deredden
# Find the eBV dust correction for each source in the catalogs
def DustCorrection(self):
''' This figures out the dust extinction and corrects the sextractor
photometry that has been cleaned by the BuildColorCat function. It also
puts the dust corrections into a series of dictions that are used by
BuildColorCat. So if we don't run this function it doesn't include the
dust correction. This is even true after it writes a dust file. I think
the dust file is really just there for us to inspect for funny stuff.
'''
print()
self.DustCat = self.tilename + ".dust"
# Get RA,DEC from the detection catalog
detCatalog = self.combcat['i']
detcols = SEx_head(detCatalog, verb=None)
cols = (detcols['NUMBER'], detcols['X_WORLD'], detcols['Y_WORLD'])
(id, ra, dec) = tableio.get_data(detCatalog, cols)
outColumns = ['ID', ]
# Get e(B-V) for every source in the detection catalog
print("# Computing e(B-V) for all %s ra,dec" % len(ra), file=sys.stderr)
self.eBV = deredden.get_EBV(ra, dec)
print("# Done...", file=sys.stderr)
# Prepare the header for the output file
header = '## {}\n'.format(time.ctime()) + \
'## Dust correction extinction ' +\
'for each object/filter in: {}\n'.format(self.tilename) +\
'## This file was generated automatically by the BCS ' +\
'Rutgers pipeline\n' +\
'## These must be subtracted from the SExtractor ' +\
'magnitudes \n' +\
'## Dust Correction e(B-V), mean, min, max: ' +\
'{0:.4f}, {0:.4f}, {0:.4f}\n'.format(self.eBV.mean(),
self.eBV.min(), self.eBV.max())
VarsOut = [id]
# Get the dust extinction correction for each filter
for filter in self.filters:
self.XCorr[filter] = deredden.filterFactor(filter) * self.eBV
self.XCorrError[filter] = self.XCorr[filter] * 0.16
# Some more work on the header
header += "## Dust Correction %s, mean, min, max: %.4f %.4f, %.4f mags\n" % (
filter, self.XCorr[filter].mean(), self.XCorr[filter].min(),
self.XCorr[filter].max())
outColumns.append(filter + '_MOSAICII Dust Correction')
outColumns.append(filter + '_MOSAICII Dust Correction Error')
VarsOut.append(self.XCorr[filter])
VarsOut.append(self.XCorrError[filter])
#print outColumns
i = 0
header += '# ' + str(i + 1) + '\t' + outColumns[i] + '\n'
for filter in self.filters:
header += '# {}\t{}\n'.format(str(i + 2), outColumns[i + 1])
header += '# {}\t{}\n'.format(str(i + 3), outColumns[i + 2])
i += 2
vars = tuple(VarsOut)
format = '%8i' + '%10.5f ' * (len(vars) - 1)
print('# Writing Dust Extinction Catalog...', file=sys.stderr)
tableio.put_data(self.DustCat,
vars,
header=header,
format=format,
append='no')
print('# Dust file complete.', file=sys.stderr)
return
| boada/planckClusters | MOSAICpipe/plugins/_dust.py | Python | mit | 3,312 |
# Copyright 2015: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from rally.plugins.openstack.context.murano import murano_environments
from rally.plugins.openstack.scenarios.murano import utils as murano_utils
from tests.unit import test
CTX = "rally.plugins.openstack.context.murano.murano_environments"
SCN = "rally.plugins.openstack.scenarios"
class MuranoEnvironmentGeneratorTestCase(test.TestCase):
def setUp(self):
super(MuranoEnvironmentGeneratorTestCase, self).setUp()
@staticmethod
def _get_context():
return {
"config": {
"users": {
"tenants": 2,
"users_per_tenant": 1,
"concurrent": 1,
},
"murano_environments": {
"environments_per_tenant": 1
}
},
"admin": {
"credential": mock.MagicMock()
},
"task": mock.MagicMock(),
"owner_id": "foo_uuid",
"users": [
{
"id": "user_0",
"tenant_id": "tenant_0",
"credential": mock.MagicMock()
},
{
"id": "user_1",
"tenant_id": "tenant_1",
"credential": mock.MagicMock()
}
],
"tenants": {
"tenant_0": {"name": "tenant_0_name"},
"tenant_1": {"name": "tenant_1_name"}
}
}
@mock.patch("%s.murano.utils.MuranoScenario._create_environment" % SCN)
def test_setup(self, mock_create_env):
murano_ctx = murano_environments.EnvironmentGenerator(
self._get_context())
murano_ctx.setup()
self.assertEqual(2, len(murano_ctx.context["tenants"]))
tenant_id = murano_ctx.context["users"][0]["tenant_id"]
self.assertEqual([mock_create_env.return_value],
murano_ctx.context["tenants"][tenant_id][
"environments"])
@mock.patch("%s.murano.utils.MuranoScenario._create_environment" % SCN)
@mock.patch("%s.resource_manager.cleanup" % CTX)
def test_cleanup(self, mock_cleanup, mock_create_env):
murano_ctx = murano_environments.EnvironmentGenerator(
self._get_context())
murano_ctx.setup()
murano_ctx.cleanup()
mock_cleanup.assert_called_once_with(
names=["murano.environments"],
users=murano_ctx.context["users"],
superclass=murano_utils.MuranoScenario,
task_id="foo_uuid")
| yeming233/rally | tests/unit/plugins/openstack/context/murano/test_murano_environments.py | Python | apache-2.0 | 3,203 |
# -*- coding: utf-8 -*-
#Copyright (c) 2010, Walter Bender
#Copyright (c) 2010, Tony Forster
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
import gst, time
import gobject
from TurtleArt.tautils import debug_output
class Camera():
''' Sets up a pipe from the camera to a pixbuf and emits a signal
when the image is ready. '''
def __init__(self, device='/dev/video0'):
''' Prepare camera pipeline to pixbuf and signal watch '''
self.pipe = gst.Pipeline('pipeline')
v4l2src = gst.element_factory_make('v4l2src', None)
v4l2src.props.device = device
self.pipe.add(v4l2src)
ffmpegcolorspace = gst.element_factory_make('ffmpegcolorspace', None)
self.pipe.add(ffmpegcolorspace)
gdkpixbufsink = gst.element_factory_make('gdkpixbufsink', None)
self.pipe.add(gdkpixbufsink)
gst.element_link_many(v4l2src, ffmpegcolorspace, gdkpixbufsink)
if self.pipe is not None:
self.bus = self.pipe.get_bus()
self.bus.add_signal_watch()
self.bus.connect('message', self._on_message)
status = True
else:
status = False
def _on_message(self, bus, message):
''' We get a message if a pixbuf is available '''
if message.structure is not None:
if message.structure.get_name() == 'pixbuf':
self.pixbuf = message.structure['pixbuf']
self.image_ready = True
def start_camera_input(self):
''' Start grabbing '''
self.pixbuf = None
self.image_ready = False
self.pipe.set_state(gst.STATE_PLAYING)
while not self.image_ready:
self.bus.poll(gst.MESSAGE_ANY, -1)
def stop_camera_input(self):
''' Stop grabbing '''
self.pipe.set_state(gst.STATE_NULL)
| walterbender/turtleartmini | plugins/camera_sensor/tacamera.py | Python | mit | 2,820 |
#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: iam_managed_policy
short_description: Manage User Managed IAM policies
description:
- Allows creating and removing managed IAM policies
version_added: "2.4"
options:
policy_name:
description:
- The name of the managed policy.
required: True
policy_description:
description:
- A helpful description of this policy, this value is immuteable and only set when creating a new policy.
default: ''
policy:
description:
- A properly json formatted policy
make_default:
description:
- Make this revision the default revision.
default: True
only_version:
description:
- Remove all other non default revisions, if this is used with C(make_default) it will result in all other versions of this policy being deleted.
required: False
default: False
state:
description:
- Should this managed policy be present or absent. Set to absent to detach all entities from this policy and remove it if found.
required: True
default: null
choices: [ "present", "absent" ]
author: "Dan Kozlowski (@dkhenry)"
requirements:
- boto3
- botocore
'''
EXAMPLES = '''
# Create Policy ex nihilo
- name: Create IAM Managed Policy
iam_managed_policy:
policy_name: "ManagedPolicy"
policy_description: "A Helpful managed policy"
policy: "{{ lookup('template', 'managed_policy.json.j2') }}"
state: present
# Update a policy with a new default version
- name: Create IAM Managed Policy
iam_managed_policy:
policy_name: "ManagedPolicy"
policy: "{{ lookup('file', 'managed_policy_update.json') }}"
state: present
# Update a policy with a new non default version
- name: Create IAM Managed Policy
iam_managed_policy:
policy_name: "ManagedPolicy"
policy: "{{ lookup('file', 'managed_policy_update.json') }}"
make_default: false
state: present
# Update a policy and make it the only version and the default version
- name: Create IAM Managed Policy
iam_managed_policy:
policy_name: "ManagedPolicy"
policy: "{ 'Version': '2012-10-17', 'Statement':[{'Effect': 'Allow','Action': '*','Resource': '*'}]}"
only_version: true
state: present
# Remove a policy
- name: Create IAM Managed Policy
iam_managed_policy:
policy_name: "ManagedPolicy"
state: absent
'''
RETURN = '''
policy:
description: Returns the policy json structure, when state == absent this will return the value of the removed policy.
returned: success
type: string
sample: '{
"arn": "arn:aws:iam::aws:policy/AdministratorAccess "
"attachment_count": 0,
"create_date": "2017-03-01T15:42:55.981000+00:00",
"default_version_id": "v1",
"is_attachable": true,
"path": "/",
"policy_id": "ANPALM4KLDMTFXGOOJIHL",
"policy_name": "AdministratorAccess",
"update_date": "2017-03-01T15:42:55.981000+00:00"
}'
'''
import json
import traceback
try:
import botocore
except ImportError:
pass # caught by imported HAS_BOTO3
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import (boto3_conn, get_aws_connection_info, ec2_argument_spec, AWSRetry,
camel_dict_to_snake_dict, HAS_BOTO3, compare_policies)
from ansible.module_utils._text import to_native
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def list_policies_with_backoff(iam):
paginator = iam.get_paginator('list_policies')
return paginator.paginate(Scope='Local').build_full_result()
def get_policy_by_name(module, iam, name):
try:
response = list_policies_with_backoff(iam)
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't list policies: %s" % str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
for policy in response['Policies']:
if policy['PolicyName'] == name:
return policy
return None
def delete_oldest_non_default_version(module, iam, policy):
try:
versions = [v for v in iam.list_policy_versions(PolicyArn=policy['Arn'])['Versions']
if not v['IsDefaultVersion']]
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't list policy versions: %s" % str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
versions.sort(key=lambda v: v['CreateDate'], reverse=True)
for v in versions[-1:]:
try:
iam.delete_policy_version(PolicyArn=policy['Arn'], VersionId=v['VersionId'])
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't delete policy version: %s" % str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
# This needs to return policy_version, changed
def get_or_create_policy_version(module, iam, policy, policy_document):
try:
versions = iam.list_policy_versions(PolicyArn=policy['Arn'])['Versions']
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't list policy versions: %s" % str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
for v in versions:
try:
document = iam.get_policy_version(PolicyArn=policy['Arn'],
VersionId=v['VersionId'])['PolicyVersion']['Document']
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't get policy version %s: %s" % (v['VersionId'], str(e)),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
# If the current policy matches the existing one
if not compare_policies(document, json.loads(to_native(policy_document))):
return v, False
# No existing version so create one
# There is a service limit (typically 5) of policy versions.
#
# Rather than assume that it is 5, we'll try to create the policy
# and if that doesn't work, delete the oldest non default policy version
# and try again.
try:
version = iam.create_policy_version(PolicyArn=policy['Arn'], PolicyDocument=policy_document)['PolicyVersion']
return version, True
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'LimitExceeded':
delete_oldest_non_default_version(module, iam, policy)
try:
version = iam.create_policy_version(PolicyArn=policy['Arn'], PolicyDocument=policy_document)['PolicyVersion']
return version, True
except botocore.exceptions.ClientError as e:
pass
# Handle both when the exception isn't LimitExceeded or
# the second attempt still failed
module.fail_json(msg="Couldn't create policy version: %s" % str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def set_if_default(module, iam, policy, policy_version, is_default):
if is_default and not policy_version['IsDefaultVersion']:
try:
iam.set_default_policy_version(PolicyArn=policy['Arn'], VersionId=policy_version['VersionId'])
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't set default policy version: %s" % str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
return True
return False
def set_if_only(module, iam, policy, policy_version, is_only):
if is_only:
try:
versions = [v for v in iam.list_policy_versions(PolicyArn=policy['Arn'])[
'Versions'] if not v['IsDefaultVersion']]
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't list policy versions: %s" % str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
for v in versions:
try:
iam.delete_policy_version(PolicyArn=policy['Arn'], VersionId=v['VersionId'])
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't delete policy version: %s" % str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
return len(versions) > 0
return False
def detach_all_entities(module, iam, policy, **kwargs):
try:
entities = iam.list_entities_for_policy(PolicyArn=policy['Arn'], **kwargs)
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't detach list entities for policy %s: %s" % (policy['PolicyName'], str(e)),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
for g in entities['PolicyGroups']:
try:
iam.detach_group_policy(PolicyArn=policy['Arn'], GroupName=g['GroupName'])
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't detach group policy %s: %s" % (g['GroupName'], str(e)),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
for u in entities['PolicyUsers']:
try:
iam.detach_user_policy(PolicyArn=policy['Arn'], UserName=u['UserName'])
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't detach user policy %s: %s" % (u['UserName'], str(e)),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
for r in entities['PolicyRoles']:
try:
iam.detach_role_policy(PolicyArn=policy['Arn'], RoleName=r['RoleName'])
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't detach role policy %s: %s" % (r['RoleName'], str(e)),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
if entities['IsTruncated']:
detach_all_entities(module, iam, policy, marker=entities['Marker'])
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
policy_name=dict(required=True),
policy_description=dict(default=''),
policy=dict(type='json'),
make_default=dict(type='bool', default=True),
only_version=dict(type='bool', default=False),
fail_on_delete=dict(type='bool', default=True),
state=dict(required=True, choices=['present', 'absent']),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_if=[['state', 'present', ['policy']]]
)
if not HAS_BOTO3:
module.fail_json(msg='boto3 is required for this module')
name = module.params.get('policy_name')
description = module.params.get('policy_description')
state = module.params.get('state')
default = module.params.get('make_default')
only = module.params.get('only_version')
policy = None
if module.params.get('policy') is not None:
policy = json.dumps(json.loads(module.params.get('policy')))
try:
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
iam = boto3_conn(module, conn_type='client', resource='iam',
region=region, endpoint=ec2_url, **aws_connect_kwargs)
except (botocore.exceptions.NoCredentialsError, botocore.exceptions.ProfileNotFound) as e:
module.fail_json(msg="Can't authorize connection. Check your credentials and profile.",
exceptions=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
p = get_policy_by_name(module, iam, name)
if state == 'present':
if p is None:
# No Policy so just create one
try:
rvalue = iam.create_policy(PolicyName=name, Path='/',
PolicyDocument=policy, Description=description)
except Exception as e:
module.fail_json(msg="Couldn't create policy %s: %s" % (name, to_native(e)),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
module.exit_json(changed=True, policy=camel_dict_to_snake_dict(rvalue['Policy']))
else:
policy_version, changed = get_or_create_policy_version(module, iam, p, policy)
changed = set_if_default(module, iam, p, policy_version, default) or changed
changed = set_if_only(module, iam, p, policy_version, only) or changed
# If anything has changed we needto refresh the policy
if changed:
try:
p = iam.get_policy(PolicyArn=p['Arn'])['Policy']
except Exception as e:
module.fail_json(msg="Couldn't get policy: %s" % to_native(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
module.exit_json(changed=changed, policy=camel_dict_to_snake_dict(p))
else:
# Check for existing policy
if p:
# Detach policy
detach_all_entities(module, iam, p)
# Delete Versions
try:
versions = iam.list_policy_versions(PolicyArn=p['Arn'])['Versions']
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't list policy versions: %s" % to_native(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
for v in versions:
if not v['IsDefaultVersion']:
try:
iam.delete_policy_version(PolicyArn=p['Arn'], VersionId=v['VersionId'])
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Couldn't delete policy version %s: %s" %
(v['VersionId'], to_native(e)),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
# Delete policy
try:
iam.delete_policy(PolicyArn=p['Arn'])
except Exception as e:
module.fail_json(msg="Couldn't delete policy %s: %s" % (p['PolicyName'], to_native(e)),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
# This is the one case where we will return the old policy
module.exit_json(changed=True, policy=camel_dict_to_snake_dict(p))
else:
module.exit_json(changed=False, policy=None)
# end main
if __name__ == '__main__':
main()
| tsdmgz/ansible | lib/ansible/modules/cloud/amazon/iam_managed_policy.py | Python | gpl-3.0 | 15,784 |
"""
A set of request processors that return dictionaries to be merged into a
template context. Each function takes the request object as its only parameter
and returns a dictionary to add to the context.
These are referenced from the 'context_processors' option of the configuration
of a DjangoTemplates backend and used by RequestContext.
"""
import itertools
from django.conf import settings
from django.middleware.csrf import get_token
from django.utils.functional import SimpleLazyObject, lazy
def csrf(request):
"""
Context processor that provides a CSRF token, or the string 'NOTPROVIDED' if
it has not been provided by either a view decorator or the middleware
"""
def _get_val():
token = get_token(request)
if token is None:
# In order to be able to provide debugging info in the
# case of misconfiguration, we use a sentinel value
# instead of returning an empty dict.
return 'NOTPROVIDED'
else:
return token
return {'csrf_token': SimpleLazyObject(_get_val)}
def debug(request):
"""
Return context variables helpful for debugging.
"""
context_extras = {}
if settings.DEBUG and request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS:
context_extras['debug'] = True
from django.db import connections
# Return a lazy reference that computes connection.queries on access,
# to ensure it contains queries triggered after this function runs.
context_extras['sql_queries'] = lazy(
lambda: list(itertools.chain.from_iterable(connections[x].queries for x in connections)),
list
)
return context_extras
def i18n(request):
from django.utils import translation
return {
'LANGUAGES': settings.LANGUAGES,
'LANGUAGE_CODE': translation.get_language(),
'LANGUAGE_BIDI': translation.get_language_bidi(),
}
def tz(request):
from django.utils import timezone
return {'TIME_ZONE': timezone.get_current_timezone_name()}
def static(request):
"""
Add static-related context variables to the context.
"""
return {'STATIC_URL': settings.STATIC_URL}
def media(request):
"""
Add media-related context variables to the context.
"""
return {'MEDIA_URL': settings.MEDIA_URL}
def request(request):
return {'request': request}
| sametmax/Django--an-app-at-a-time | ignore_this_directory/django/template/context_processors.py | Python | mit | 2,407 |
from setuptools import setup
package_dir = {"": "src"}
packages = ["extended"]
package_data = {"": ["*"]}
setup_kwargs = {
"name": "extended",
"version": "0.1",
"description": "Some description.",
"long_description": "Module 1\n========\n",
"author": "Sébastien Eustace",
"author_email": "[email protected]",
"maintainer": "None",
"maintainer_email": "None",
"url": "https://python-poetry.org/",
"package_dir": package_dir,
"packages": packages,
"package_data": package_data,
}
from build import *
build(setup_kwargs)
setup(**setup_kwargs)
| python-poetry/poetry-core | tests/masonry/builders/fixtures/src_extended/setup.py | Python | mit | 598 |
from mock import patch
from django.test import TestCase
from django.core.urlresolvers import reverse
from ..models import Contact
class ContactViewTest(TestCase):
def setUp(self):
self.url = reverse('contact')
self.response = self.client.get(self.url)
def test_contact_view_is_accessible(self):
self.assertEqual(self.response.status_code, 200)
def test_contact_view_should_have_form_tag(self):
expected = '<form action="." method="post">'
self.assertContains(self.response, expected, status_code=200)
def test_contact_view_should_have_firstname_input(self):
expected = '<label>Firstname: '
self.assertContains(self.response, expected, status_code=200)
expected = '<input id="id_firstname" maxlength="100" name="firstname" '
expected += 'type="text" />'
self.assertContains(self.response, expected, status_code=200)
def test_contact_view_should_have_lastname_and_input(self):
expected = '<label>Last Name:</label>'
self.assertContains(self.response, expected, status_code=200)
expected = '<input id="id_lastname" maxlength="100" name="lastname" '
expected += 'type="text" />'
self.assertContains(self.response, expected, status_code=200)
def test_contact_view_should_have_email_and_input(self):
expected = '<label>Email:</label>'
self.assertContains(self.response, expected, status_code=200)
expected = '<input id="id_email" maxlength="100" name="email" '
expected += 'type="email" />'
self.assertContains(self.response, expected, status_code=200)
def test_contact_view_should_have_submit_button(self):
expected = '<input type="submit" value="Submit">'
self.assertContains(self.response, expected, status_code=200)
def test_contact_view_should_accessible_by_post(self):
response = self.client.post(self.url)
self.assertEqual(response.status_code, 200)
@patch('contacts.views.GeoIP')
def test_submit_contact_data_successfully(self, mock):
mock.return_value.getGeoIP.return_value = {
"longitude": 100.5014,
"latitude": 13.754,
"asn": "AS4750",
"offset": "7",
"ip": "58.137.162.34",
"area_code": "0",
"continent_code": "AS",
"dma_code": "0",
"city": "Bangkok",
"timezone": "Asia/Bangkok",
"region": "Krung Thep",
"country_code": "TH",
"isp": "CS LOXINFO PUBLIC COMPANY LIMITED",
"country": "Thailand",
"country_code3": "THA",
"region_code": "40"
}
data = {
'firstname': 'John',
'lastname': 'Smith',
'email': '[email protected]'
}
self.client.post(self.url, data=data)
contact = Contact.objects.get(firstname='John')
self.assertEqual(contact.firstname, 'John')
self.assertEqual(contact.lastname, 'Smith')
self.assertEqual(contact.email, '[email protected]')
self.assertEqual(contact.ip, '58.137.162.34')
self.assertEqual(contact.lat, '13.754')
self.assertEqual(contact.lng, '100.5014')
def test_submit_contact_data_without_firstname_should_not_save_data(self):
data = {
'firstname': '',
'lastname': 'Smith',
'email': '[email protected]'
}
self.client.post(self.url, data=data)
contact_count = Contact.objects.filter(lastname='Smith').count()
self.assertEqual(contact_count, 0)
def test_submit_contact_data_without_lastname_should_not_save_data(self):
data = {
'firstname': 'John',
'lastname': '',
'email': '[email protected]'
}
self.client.post(self.url, data=data)
contact_count = Contact.objects.all().count()
self.assertEqual(contact_count, 0)
def test_submit_contact_data_without_email_should_not_save_data(self):
data = {
'firstname': 'John',
'lastname': 'Smith',
'email': ''
}
self.client.post(self.url, data=data)
contact_count = Contact.objects.filter(lastname='Smith').count()
self.assertEqual(contact_count, 0)
def test_submit_contact_data_without_firstname_should_get_error_message(
self):
data = {
'firstname': '',
'lastname': 'Smith',
'email': '[email protected]'
}
response = self.client.post(self.url, data=data)
expected = 'This field is required.'
self.assertContains(response, expected, status_code=200)
def test_submit_contact_data_without_email_should_get_error_message(
self
):
data = {
'firstname': 'John',
'lastname': 'Smith',
'email': ''
}
response = self.client.post(self.url, data=data)
expected = 'This field is required.'
self.assertContains(response, expected, status_code=200)
def test_submit_contact_data_without_lastname_should_get_error_message(
self
):
data = {
'firstname': 'John',
'lastname': '',
'email': '[email protected]'
}
response = self.client.post(self.url, data=data)
expected = 'This field is required.'
self.assertContains(response, expected, status_code=200)
@patch('contacts.views.GeoIP')
def test_redirect_to_thank_you_page_successfully(self, mock):
mock.return_value.getGeoIP.return_value = {
"longitude": 100.5014,
"latitude": 13.754,
"asn": "AS4750",
"offset": "7",
"ip": "58.137.162.34",
"area_code": "0",
"continent_code": "AS",
"dma_code": "0",
"city": "Bangkok",
"timezone": "Asia/Bangkok",
"region": "Krung Thep",
"country_code": "TH",
"isp": "CS LOXINFO PUBLIC COMPANY LIMITED",
"country": "Thailand",
"country_code3": "THA",
"region_code": "40"
}
data = {
'firstname': 'John',
'lastname': 'Smith',
'email': '[email protected]'
}
response = self.client.post(
self.url,
data=data
)
self.assertRedirects(
response,
'/thankyou/?firstname=John',
status_code=302,
target_status_code=200
)
@patch('contacts.views.GeoIP')
def test_redirected_page_should_contain_firstname(self, mock):
mock.return_value.getGeoIP.return_value = {
"longitude": 100.5014,
"latitude": 13.754,
"asn": "AS4750",
"offset": "7",
"ip": "58.137.162.34",
"area_code": "0",
"continent_code": "AS",
"dma_code": "0",
"city": "Bangkok",
"timezone": "Asia/Bangkok",
"region": "Krung Thep",
"country_code": "TH",
"isp": "CS LOXINFO PUBLIC COMPANY LIMITED",
"country": "Thailand",
"country_code3": "THA",
"region_code": "40"
}
data = {
'firstname': 'John',
'lastname': 'Smith',
'email': '[email protected]'
}
response = self.client.post(
self.url,
data=data,
follow=True
)
expected = 'Firstname: John'
self.assertContains(response, expected, status_code=200)
@patch('contacts.views.GeoIP')
def test_thank_you_page_should_contain_lastname(self, mock):
mock.return_value.getGeoIP.return_value = {
"longitude": 100.5014,
"latitude": 13.754,
"asn": "AS4750",
"offset": "7",
"ip": "58.137.162.34",
"area_code": "0",
"continent_code": "AS",
"dma_code": "0",
"city": "Bangkok",
"timezone": "Asia/Bangkok",
"region": "Krung Thep",
"country_code": "TH",
"isp": "CS LOXINFO PUBLIC COMPANY LIMITED",
"country": "Thailand",
"country_code3": "THA",
"region_code": "40"
}
data = {
'firstname': 'lnwBoss',
'lastname': 'yong',
'email': '[email protected]'
}
response = self.client.post(self.url, data=data, follow=True)
expected = 'Lastname: yong'
self.assertContains(response, expected, status_code=200)
@patch('contacts.views.GeoIP')
def test_call_geoip_api_successfully(self, mock):
mock.return_value.getGeoIP.return_value = {
"longitude": 100.5014,
"latitude": 13.754,
"asn": "AS4750",
"offset": "7",
"ip": "58.137.162.34",
"area_code": "0",
"continent_code": "AS",
"dma_code": "0",
"city": "Bangkok",
"timezone": "Asia/Bangkok",
"region": "Krung Thep",
"country_code": "TH",
"isp": "CS LOXINFO PUBLIC COMPANY LIMITED",
"country": "Thailand",
"country_code3": "THA",
"region_code": "40"
}
data = {
'firstname': 'John',
'lastname': 'Smith',
'email': '[email protected]'
}
response = self.client.post(
self.url,
data=data
)
mock.return_value_getGeoIP.assert_once_with()
@patch('contacts.views.GeoIP')
def test_thank_you_page_should_contain_ip(self, mock):
mock.return_value.getGeoIP.return_value = {
"longitude": 100.5014,
"latitude": 13.754,
"asn": "AS4750",
"offset": "7",
"ip": "58.137.162.34",
"area_code": "0",
"continent_code": "AS",
"dma_code": "0",
"city": "Bangkok",
"timezone": "Asia/Bangkok",
"region": "Krung Thep",
"country_code": "TH",
"isp": "CS LOXINFO PUBLIC COMPANY LIMITED",
"country": "Thailand",
"country_code3": "THA",
"region_code": "40"
}
data = {
'firstname': 'lnwBoss',
'lastname': 'yong',
'email': '[email protected]'
}
response = self.client.post(self.url, data=data, follow=True)
expected = 'IP: 58.137.162.34'
self.assertContains(response, expected, status_code=200)
@patch('contacts.views.GeoIP')
def test_thank_you_page_should_contain_lat(self, mock):
mock.return_value.getGeoIP.return_value = {
"longitude": 100.5014,
"latitude": 13.754,
"asn": "AS4750",
"offset": "7",
"ip": "58.137.162.34",
"area_code": "0",
"continent_code": "AS",
"dma_code": "0",
"city": "Bangkok",
"timezone": "Asia/Bangkok",
"region": "Krung Thep",
"country_code": "TH",
"isp": "CS LOXINFO PUBLIC COMPANY LIMITED",
"country": "Thailand",
"country_code3": "THA",
"region_code": "40"
}
data = {
'firstname': 'lnwBoss',
'lastname': 'yong',
'email': '[email protected]'
}
response = self.client.post(self.url, data=data, follow=True)
expected = 'Lat: 13.754'
self.assertContains(response, expected, status_code=200)
@patch('contacts.views.GeoIP')
def test_thank_you_page_should_contain_lng(self, mock):
mock.return_value.getGeoIP.return_value = {
"longitude": 100.5014,
"latitude": 13.754,
"asn": "AS4750",
"offset": "7",
"ip": "58.137.162.34",
"area_code": "0",
"continent_code": "AS",
"dma_code": "0",
"city": "Bangkok",
"timezone": "Asia/Bangkok",
"region": "Krung Thep",
"country_code": "TH",
"isp": "CS LOXINFO PUBLIC COMPANY LIMITED",
"country": "Thailand",
"country_code3": "THA",
"region_code": "40"
}
data = {
'firstname': 'lnwBoss',
'lastname': 'yong',
'email': '[email protected]'
}
response = self.client.post(self.url, data=data, follow=True)
expected = 'Lng: 100.5014'
self.assertContains(response, expected, status_code=200)
@patch('contacts.views.GeoIP')
def test_thank_you_page_should_contain_email(self, mock):
mock.return_value.getGeoIP.return_value = {
"longitude": 100.5014,
"latitude": 13.754,
"asn": "AS4750",
"offset": "7",
"ip": "58.137.162.34",
"area_code": "0",
"continent_code": "AS",
"dma_code": "0",
"city": "Bangkok",
"timezone": "Asia/Bangkok",
"region": "Krung Thep",
"country_code": "TH",
"isp": "CS LOXINFO PUBLIC COMPANY LIMITED",
"country": "Thailand",
"country_code3": "THA",
"region_code": "40"
}
data = {
'firstname': 'lnwBoss',
'lastname': 'yong',
'email': '[email protected]'
}
response = self.client.post(self.url, data=data, follow=True)
expected = 'Email: [email protected]'
self.assertContains(response, expected, status_code=200)
class ThankYouViewTest(TestCase):
def setUp(self):
self.url = reverse('thankyou')
self.response = self.client.get(self.url)
def test_thank_you_view_is_accessible(self):
self.assertEqual(self.response.status_code, 200)
def test_thank_you_page_should_contain_title_thank_you(self):
expected = '<h1>Thank you</h1>'
self.assertContains(self.response, expected, status_code=200)
| prontointern/django-contact-form | django_contact_form_project/contacts/tests/test_views.py | Python | mit | 14,146 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class GrantAccessData(Model):
"""Data used for requesting a SAS.
All required parameters must be populated in order to send to Azure.
:param access: Required. Possible values include: 'None', 'Read'
:type access: str or
~azure.mgmt.compute.v2016_04_30_preview.models.AccessLevel
:param duration_in_seconds: Required. Time duration in seconds until the
SAS access expires.
:type duration_in_seconds: int
"""
_validation = {
'access': {'required': True},
'duration_in_seconds': {'required': True},
}
_attribute_map = {
'access': {'key': 'access', 'type': 'AccessLevel'},
'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'int'},
}
def __init__(self, *, access, duration_in_seconds: int, **kwargs) -> None:
super(GrantAccessData, self).__init__(**kwargs)
self.access = access
self.duration_in_seconds = duration_in_seconds
| lmazuel/azure-sdk-for-python | azure-mgmt-compute/azure/mgmt/compute/v2016_04_30_preview/models/grant_access_data_py3.py | Python | mit | 1,464 |
import requests
from requests.auth import HTTPBasicAuth
class GithubRequester:
def __init__(self, auth):
self.auth = auth
def retrieve_issues(self, repo, filters):
#GET /repos/:owner/:repo/issues
f = self._join_filters(filters)
r = requests.get('http://api.github.com/repos/'+ repo +'/issues',
params=f, auth=HTTPBasicAuth(self.auth['user'],
self.auth['pass']))
return r
def retrieve_issue_comments(self, repo, number):
#GET /repos/:owner/:repo/issues/:number/comments
r = requests.get('https://api.github.com/repos/'+ repo +'/issues/'
+ number + '/comments',
auth=HTTPBasicAuth(self.auth['user'],
self.auth['pass']))
return r
def _join_filters(self, filters):
f_joined = {}
for f in filters:
f_joined[f] = ",".join(filters[f])
return f_joined
| sterchelen/ghtracker | ghtracker/github_requests.py | Python | apache-2.0 | 981 |
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/usr/bin/env python
"""Convoluational MLP Agent."""
import os
import time
import numpy as np
from ravens import cameras
from ravens import utils
from ravens.models import mdn_utils
from ravens.models import Regression
import tensorflow as tf
import transformations
class ConvMlpAgent:
"""Convoluational MLP Agent."""
def __init__(self, name, task):
self.name = name
self.task = task
self.total_iter = 0
self.pixel_size = 0.003125
self.input_shape = (320, 160, 6)
self.camera_config = cameras.RealSenseD415.CONFIG
self.bounds = np.array([[0.25, 0.75], [-0.5, 0.5], [0, 0.28]])
self.total_iter = 0
# A place to save pre-trained models.
self.models_dir = os.path.join('checkpoints', self.name)
if not os.path.exists(self.models_dir):
os.makedirs(self.models_dir)
self.batch_size = 4
self.use_mdn = True
self.theta_scale = 10.0
def show_images(self, colormap, heightmap):
import matplotlib.pyplot as plt # pylint: disable=g-import-not-at-top
plt.imshow(colormap)
plt.show()
plt.imshow(heightmap)
plt.show()
def extract_x_y_theta(self,
object_info,
t_worldaug_world=None,
preserve_theta=False):
"""Extract in-plane theta."""
object_position = object_info[0]
object_quat_xyzw = object_info[1]
if t_worldaug_world is not None:
object_quat_wxyz = (object_quat_xyzw[3], object_quat_xyzw[0],
object_quat_xyzw[1], object_quat_xyzw[2])
t_world_object = transformations.quaternion_matrix(object_quat_wxyz)
t_world_object[0:3, 3] = np.array(object_position)
t_worldaug_object = t_worldaug_world @ t_world_object
object_quat_wxyz = transformations.quaternion_from_matrix(
t_worldaug_object)
if not preserve_theta:
object_quat_xyzw = (object_quat_wxyz[1], object_quat_wxyz[2],
object_quat_wxyz[3], object_quat_wxyz[0])
object_position = t_worldaug_object[0:3, 3]
object_xy = object_position[0:2]
object_theta = -np.float32(
utils.quatXYZW_to_eulerXYZ(object_quat_xyzw)
[2]) / self.theta_scale
return np.hstack(
(object_xy,
object_theta)).astype(np.float32), object_position, object_quat_xyzw
def act_to_gt_act(self, act, t_worldaug_world=None):
# dont update theta due to suction invariance to theta
pick_se2, _, _ = self.extract_x_y_theta(
act['params']['pose0'], t_worldaug_world, preserve_theta=True)
place_se2, _, _ = self.extract_x_y_theta(
act['params']['pose1'], t_worldaug_world, preserve_theta=True)
return np.hstack((pick_se2, place_se2)).astype(np.float32)
def get_data_batch(self, dataset, augment=True):
"""Sample batch."""
batch_obs = []
batch_act = []
for _ in range(self.batch_size):
obs, act, _ = dataset.random_sample()
# Get heightmap from RGB-D images.
configs = act['camera_config']
colormap, heightmap = self.get_heightmap(obs, configs)
# self.show_images(colormap, heightmap)
# Concatenate color with depth images.
input_image = np.concatenate((colormap, heightmap[Ellipsis, None],
heightmap[Ellipsis, None], heightmap[Ellipsis, None]),
axis=2)
# or just use rgb
# input_image = colormap
# Apply augmentation
if augment:
# note: these pixels are made up,
# just to keep the perturb function happy.
p0 = (160, 80)
p1 = (160, 80)
input_image, _, _, transform_params = utils.perturb(
input_image, [p0, p1], set_theta_zero=False)
t_world_center, t_world_centeraug = utils.get_se3_from_image_transform(
*transform_params, heightmap, self.bounds, self.pixel_size)
t_worldaug_world = t_world_centeraug @ np.linalg.inv(t_world_center)
else:
t_worldaug_world = np.eye(4)
batch_obs.append(input_image)
batch_act.append(self.act_to_gt_act(
act, t_worldaug_world)) # this samples pick points from surface
batch_obs = np.array(batch_obs)
batch_act = np.array(batch_act)
return batch_obs, batch_act
def train(self, dataset, num_iter, writer, validation_dataset):
"""Train on dataset for a specific number of iterations."""
validation_rate = 100
@tf.function
def pick_train_step(model, optim, in_tensor, yxtheta, loss_criterion):
with tf.GradientTape() as tape:
output = model(in_tensor)
loss = loss_criterion(yxtheta, output)
grad = tape.gradient(loss, model.trainable_variables)
optim.apply_gradients(zip(grad, model.trainable_variables))
return loss
@tf.function
def pick_valid_step(model, optim, in_tensor, yxtheta, loss_criterion):
del optim
with tf.GradientTape() as tape: # pylint: disable=unused-variable
output = model(in_tensor)
loss = loss_criterion(yxtheta, output)
return loss
for i in range(num_iter):
start = time.time()
batch_obs, batch_act = self.get_data_batch(dataset)
# Compute train loss
loss0 = self.regression_model.train_pick(batch_obs, batch_act,
pick_train_step)
with writer.as_default():
tf.summary.scalar(
'pick_loss',
self.regression_model.metric.result(),
step=self.total_iter + i)
print(f'Train Iter: {self.total_iter + i} Loss: {loss0:.4f} Iter time:',
time.time() - start)
if (self.total_iter + i) % validation_rate == 0:
print('Validating!')
tf.keras.backend.set_learning_phase(0)
batch_obs, batch_act = self.get_data_batch(
validation_dataset, augment=False)
# Compute valid loss
loss0 = self.regression_model.train_pick(
batch_obs, batch_act, pick_valid_step, validate=True)
with writer.as_default():
tf.summary.scalar(
'validation_pick_loss',
self.regression_model.val_metric.result(),
step=self.total_iter + i)
tf.keras.backend.set_learning_phase(1)
self.total_iter += num_iter
self.save()
def act(self, obs, gt_act, info):
"""Run inference and return best action given visual observations."""
del gt_act
del info
self.regression_model.set_batch_size(1)
act = {'camera_config': self.camera_config, 'primitive': None}
if not obs:
return act
# Get heightmap from RGB-D images.
colormap, heightmap = self.get_heightmap(obs, self.camera_config)
# Concatenate color with depth images.
input_image = np.concatenate(
(colormap, heightmap[Ellipsis, None], heightmap[Ellipsis, None], heightmap[Ellipsis,
None]),
axis=2)[None, Ellipsis]
# or just use rgb
# input_image = colormap[None, ...]
# Regression
prediction = self.regression_model.forward(input_image)
if self.use_mdn:
mdn_prediction = prediction
pi, mu, var = mdn_prediction
# prediction = mdn_utils.pick_max_mean(pi, mu, var)
prediction = mdn_utils.sample_from_pdf(pi, mu, var)
prediction = prediction[:, 0, :]
prediction = prediction[0]
p0_position = np.hstack((prediction[0:2], 0.02))
p1_position = np.hstack((prediction[3:5], 0.02))
p0_rotation = utils.eulerXYZ_to_quatXYZW(
(0, 0, -prediction[2] * self.theta_scale))
p1_rotation = utils.eulerXYZ_to_quatXYZW(
(0, 0, -prediction[5] * self.theta_scale))
act['primitive'] = 'pick_place'
if self.task == 'sweeping':
act['primitive'] = 'sweep'
elif self.task == 'pushing':
act['primitive'] = 'push'
params = {
'pose0': (p0_position, p0_rotation),
'pose1': (p1_position, p1_rotation)
}
act['params'] = params
self.regression_model.set_batch_size(self.batch_size)
return act
#-------------------------------------------------------------------------
# Helper Functions
#-------------------------------------------------------------------------
def preprocess(self, image):
"""Pre-process images (subtract mean, divide by std).
Args:
image: shape: [B, H, W, C]
Returns:
preprocessed image.
"""
color_mean = 0.18877631
depth_mean = 0.00509261
color_std = 0.07276466
depth_std = 0.00903967
del depth_mean
del depth_std
image[:, :, :, :3] = (image[:, :, :, :3] / 255 - color_mean) / color_std
# image[:, :, :, 3:] = (image[:, :, :, 3:] - depth_mean) / depth_std
return image
def get_heightmap(self, obs, configs):
"""Reconstruct orthographic heightmaps with segmentation masks."""
heightmaps, colormaps = utils.reconstruct_heightmaps(
obs['color'], obs['depth'], configs, self.bounds, self.pixel_size)
colormaps = np.float32(colormaps)
heightmaps = np.float32(heightmaps)
# Fuse maps from different views.
valid = np.sum(colormaps, axis=3) > 0
repeat = np.sum(valid, axis=0)
repeat[repeat == 0] = 1
colormap = np.sum(colormaps, axis=0) / repeat[Ellipsis, None]
colormap = np.uint8(np.round(colormap))
heightmap = np.max(heightmaps, axis=0)
return colormap, heightmap
def load(self, num_iter):
pass
def save(self):
pass
class PickPlaceConvMlpAgent(ConvMlpAgent):
def __init__(self, name, task):
super().__init__(name, task)
self.regression_model = Regression(
input_shape=self.input_shape,
preprocess=self.preprocess,
use_mdn=self.use_mdn)
self.regression_model.set_batch_size(self.batch_size)
| google-research/google-research | ravens/ravens/agents/conv_mlp.py | Python | apache-2.0 | 10,399 |
#!/usr/bin/env python
from __future__ import print_function
import argparse
import datetime
import errno
import json
import os
import shutil
import subprocess
import sys
import tarfile
from enum import Enum
try:
# Python3
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
DATA_TABLE_NAME = "kraken2_databases"
class KrakenDatabaseTypes(Enum):
standard = 'standard'
minikraken = 'minikraken'
special = 'special'
custom = 'custom'
def __str__(self):
return self.value
class SpecialDatabaseTypes(Enum):
rdp = 'rdp'
greengenes = 'greengenes'
silva = 'silva'
def __str__(self):
return self.value
class Minikraken2Versions(Enum):
v1 = 'v1'
v2 = 'v2'
def __str__(self):
return self.value
def kraken2_build_standard(kraken2_args, target_directory, data_table_name=DATA_TABLE_NAME):
now = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H%M%SZ")
database_value = "_".join([
now,
"standard",
"kmer-len", str(kraken2_args["kmer_len"]),
"minimizer-len", str(kraken2_args["minimizer_len"]),
"minimizer-spaces", str(kraken2_args["minimizer_spaces"]),
])
database_name = " ".join([
"Standard",
"(Created:",
now + ",",
"kmer-len=" + str(kraken2_args["kmer_len"]) + ",",
"minimizer-len=" + str(kraken2_args["minimizer_len"]) + ",",
"minimizer-spaces=" + str(kraken2_args["minimizer_spaces"]) + ")",
])
database_path = database_value
args = [
'--threads', str(kraken2_args["threads"]),
'--standard',
'--kmer-len', str(kraken2_args["kmer_len"]),
'--minimizer-len', str(kraken2_args["minimizer_len"]),
'--minimizer-spaces', str(kraken2_args["minimizer_spaces"]),
'--db', database_path
]
subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
if kraken2_args["clean"]:
args = [
'--threads', str(kraken2_args["threads"]),
'--clean',
'--db', database_path
]
subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
data_table_entry = {
'data_tables': {
data_table_name: [
{
"value": database_value,
"name": database_name,
"path": database_path,
}
]
}
}
return data_table_entry
def kraken2_build_minikraken(minikraken2_version, target_directory, data_table_name=DATA_TABLE_NAME):
now = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H%M%SZ")
database_value = "_".join([
now,
"minikraken2",
minikraken2_version,
"8GB",
])
database_name = " ".join([
"Minikraken2",
minikraken2_version,
"(Created:",
now + ")"
])
database_path = database_value
# download the minikraken2 data
src = urlopen(
'ftp://ftp.ccb.jhu.edu/pub/data/kraken2_dbs/minikraken2_%s_8GB_201904_UPDATE.tgz'
% minikraken2_version
)
with open('tmp_data.tar.gz', 'wb') as dst:
shutil.copyfileobj(src, dst)
# unpack the downloaded archive to the target directory
with tarfile.open('tmp_data.tar.gz', 'r:gz') as fh:
for member in fh.getmembers():
if member.isreg():
member.name = os.path.basename(member.name)
fh.extract(member, os.path.join(target_directory, database_path))
data_table_entry = {
'data_tables': {
data_table_name: [
{
"value": database_value,
"name": database_name,
"path": database_path,
}
]
}
}
return data_table_entry
def kraken2_build_special(kraken2_args, target_directory, data_table_name=DATA_TABLE_NAME):
now = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H%M%SZ")
special_database_names = {
"rdp": "RDP",
"greengenes": "Greengenes",
"silva": "Silva",
}
database_value = "_".join([
now,
kraken2_args["special_database_type"],
"kmer-len", str(kraken2_args["kmer_len"]),
"minimizer-len", str(kraken2_args["minimizer_len"]),
"minimizer-spaces", str(kraken2_args["minimizer_spaces"]),
])
database_name = " ".join([
special_database_names[kraken2_args["special_database_type"]],
"(Created:",
now + ",",
"kmer-len=" + str(kraken2_args["kmer_len"]) + ",",
"minimizer-len=" + str(kraken2_args["minimizer_len"]) + ",",
"minimizer-spaces=" + str(kraken2_args["minimizer_spaces"]) + ")",
])
database_path = database_value
args = [
'--threads', str(kraken2_args["threads"]),
'--special', kraken2_args["special_database_type"],
'--kmer-len', str(kraken2_args["kmer_len"]),
'--minimizer-len', str(kraken2_args["minimizer_len"]),
'--minimizer-spaces', str(kraken2_args["minimizer_spaces"]),
'--db', database_path
]
subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
if kraken2_args["clean"]:
args = [
'--threads', str(kraken2_args["threads"]),
'--clean',
'--db', database_path
]
subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
data_table_entry = {
'data_tables': {
data_table_name: [
{
"value": database_value,
"name": database_name,
"path": database_path,
}
]
}
}
return data_table_entry
def kraken2_build_custom(kraken2_args, custom_database_name, target_directory, data_table_name=DATA_TABLE_NAME):
args = [
'--threads', str(kraken2_args["threads"]),
'--download-taxonomy',
'--db', custom_database_name,
]
if kraken2_args['skip_maps']:
args.append('--skip-maps')
subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
args = [
'--threads', str(kraken2_args["threads"]),
'--add-to-library', kraken2_args["custom_fasta"],
'--db', custom_database_name
]
subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
args = [
'--threads', str(kraken2_args["threads"]),
'--build',
'--kmer-len', str(kraken2_args["kmer_len"]),
'--minimizer-len', str(kraken2_args["minimizer_len"]),
'--minimizer-spaces', str(kraken2_args["minimizer_spaces"]),
'--db', custom_database_name
]
subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
if kraken2_args["clean"]:
args = [
'--threads', str(kraken2_args["threads"]),
'--clean',
'--db', custom_database_name
]
subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
data_table_entry = {
'data_tables': {
data_table_name: [
{
"value": custom_database_name,
"name": custom_database_name,
"path": custom_database_name
}
]
}
}
return data_table_entry
def main():
parser = argparse.ArgumentParser()
parser.add_argument('data_manager_json')
parser.add_argument('--kmer-len', dest='kmer_len', type=int, default=35, help='kmer length')
parser.add_argument('--minimizer-len', dest='minimizer_len', type=int, default=31, help='minimizer length')
parser.add_argument('--minimizer-spaces', dest='minimizer_spaces', default=6, help='minimizer spaces')
parser.add_argument('--threads', dest='threads', default=1, help='threads')
parser.add_argument('--database-type', dest='database_type', type=KrakenDatabaseTypes, choices=list(KrakenDatabaseTypes), required=True, help='type of kraken database to build')
parser.add_argument('--minikraken2-version', dest='minikraken2_version', type=Minikraken2Versions, choices=list(Minikraken2Versions), help='MiniKraken2 version (only applies to --database-type minikraken)')
parser.add_argument('--special-database-type', dest='special_database_type', type=SpecialDatabaseTypes, choices=list(SpecialDatabaseTypes), help='type of special database to build (only applies to --database-type special)')
parser.add_argument('--custom-fasta', dest='custom_fasta', help='fasta file for custom database (only applies to --database-type custom)')
parser.add_argument('--custom-database-name', dest='custom_database_name', help='Name for custom database (only applies to --database-type custom)')
parser.add_argument('--skip-maps', dest='skip_maps', action='store_true', help='')
parser.add_argument('--clean', dest='clean', action='store_true', help='Clean up extra files')
args = parser.parse_args()
data_manager_input = json.loads(open(args.data_manager_json).read())
target_directory = data_manager_input['output_data'][0]['extra_files_path']
try:
os.mkdir( target_directory )
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir( target_directory ):
pass
else:
raise
data_manager_output = {}
if str(args.database_type) == 'standard':
kraken2_args = {
"kmer_len": args.kmer_len,
"minimizer_len": args.minimizer_len,
"minimizer_spaces": args.minimizer_spaces,
"threads": args.threads,
"clean": args.clean,
}
data_manager_output = kraken2_build_standard(
kraken2_args,
target_directory,
)
elif str(args.database_type) == 'minikraken':
data_manager_output = kraken2_build_minikraken(
str(args.minikraken2_version),
target_directory
)
elif str(args.database_type) == 'special':
kraken2_args = {
"special_database_type": str(args.special_database_type),
"kmer_len": args.kmer_len,
"minimizer_len": args.minimizer_len,
"minimizer_spaces": args.minimizer_spaces,
"threads": args.threads,
"clean": args.clean,
}
data_manager_output = kraken2_build_special(
kraken2_args,
target_directory,
)
elif str(args.database_type) == 'custom':
kraken2_args = {
"custom_fasta": args.custom_fasta,
"skip_maps": args.skip_maps,
"kmer_len": args.kmer_len,
"minimizer_len": args.minimizer_len,
"minimizer_spaces": args.minimizer_spaces,
"threads": args.threads,
"clean": args.clean,
}
data_manager_output = kraken2_build_custom(
kraken2_args,
args.custom_database_name,
target_directory,
)
else:
sys.exit("Invalid database type")
open(args.data_manager_json, 'w').write(json.dumps(data_manager_output, sort_keys=True))
if __name__ == "__main__":
main()
| Delphine-L/tools-iuc | data_managers/data_manager_build_kraken2_database/data_manager/kraken2_build_database.py | Python | mit | 11,226 |
import os
import boto
# Before running this script, set envars AWS_ACCESS_KEY_ID
# and AWS_SECRET_ACCESS_KEY, Boto uses them automatically
# ember build --environment="production"
print boto.s3.regions()
conn = boto.connect_s3()
bucket = conn.get_bucket('nodember-rain')
for root, folder, files in os.walk('frontend/dist'):
for filename in files:
file_path = os.path.join(root, filename)
print 'Uploading file: {}'.format(file_path)
key = bucket.new_key(file_path[len('frontend/dist'):])
key.size = os.stat(file_path).st_size
key.set_contents_from_filename(file_path)
key.set_acl('public-read')
print 'Done uploading'
| holandes22/nodember-rain | deploy.py | Python | mit | 674 |
import interop
url = "http://10.10.130.10:80"
username = "ucsd-auvsi"
password = "1979543783"
connection = interop.InterOp(url, username, password)
#start interop connection
connection.commandLoop()
#start the sending of data
#connection.start_interop()
| UCSD-AUVSI/MissionDirector | interoperability/test.py | Python | gpl-3.0 | 259 |
import sys, os, glob
# add the converter tools to the syspath
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
from pynmrml import io
from pynmrml import nmrML
#for filename in files:
input_file = "sucrose.fid"
output_file = "sucrose.nmrML"
writer = io.factory.varian_converter(input_file)
# Add additional information to the document
doc = writer.doc()
# Add some contacts
contactList = nmrML.ContactListType()
contact = nmrML.ContactType(id= "ID004",fullname= "Michael Wilson",email= "[email protected]" )
contactList.add_contact(contact)
doc.set_contactList(contactList)
# Add some software
software_list = nmrML.SoftwareListType()
software_list.add_software(nmrML.SoftwareType(
id="SOFTWARE_1", cvRef="NMRCV", accession="NMR:1000277",
name="VnmrJ software", version="2.2C"))
doc.set_softwareList(software_list)
# Add some instrument configurations
configList = doc.get_instrumentConfigurationList()
instconfig = nmrML.InstrumentConfigurationType(id="INST_CONFIG_1")
instconfig.add_cvTerm(
nmrML.CVTermType(cvRef="NMRCV", accession="NMR:400234", name="Varian NMR instrument"))
instconfig.add_cvTerm(
nmrML.CVTermType(cvRef="NMRCV", accession="??", name="Varian VNMRS 600 NMR spectrometer"))
instconfig.add_userParam(
nmrML.UserParamType(name="5 mm inverse detection cryoprobe"))
writer.write(open("sucrose.nmrML","w"))
| nmrML/nmrML | tools/Parser_and_Converters/python/pynmrml/examples/adding_annotation.py | Python | mit | 1,379 |
import abc
import warnings
from pathlib import Path
import pandas as pd
import ibis
import ibis.expr.types as ir
from ibis.backends.base import BaseBackend, Database
from ibis.backends.pandas.core import execute_and_reset
# Load options of pandas backend
ibis.pandas
class FileDatabase(Database):
def __init__(self, name, client):
super().__init__(name, client)
self.path = client.path
def __str__(self):
return '{0.__class__.__name__}({0.name})'.format(self)
def __dir__(self):
dbs = self.list_databases(path=self.path)
tables = self.list_tables(path=self.path)
return sorted(set(dbs).union(set(tables)))
def __getattr__(self, name):
try:
return self.table(name, path=self.path)
except AttributeError:
return self.database(name, path=self.path)
def table(self, name, path):
return self.client.table(name, path=path)
def database(self, name=None, path=None):
return self.client.database(name=name, path=path)
def list_databases(self, path=None):
if path is None:
path = self.path
return sorted(self.client.list_databases(path=path))
def list_tables(self, path=None, database=None):
if path is None:
path = self.path
return sorted(self.client.list_tables(path=path, database=database))
class BaseFileBackend(BaseBackend):
"""
Base backend class for pandas pseudo-backends for file formats.
"""
database_class = FileDatabase
def connect(self, path):
"""Create a Client for use with Ibis
Parameters
----------
path : str or pathlib.Path
Returns
-------
Backend
"""
new_backend = self.__class__()
new_backend.path = new_backend.root = Path(path)
new_backend.dictionary = {}
return new_backend
@property
def version(self) -> str:
return pd.__version__
def list_tables(
self, path: Path = None, like: str = None, database: str = None
):
# For file backends, we return files in the `path` directory.
def is_valid(path):
return path.is_file() and path.suffix == '.' + self.extension
path = path or self.path
if path.is_dir():
tables = [f.stem for f in path.iterdir() if is_valid(f)]
elif is_valid(path):
tables = [path.stem]
else:
tables = []
return self._filter_with_like(tables, like)
@property
def current_database(self):
# Databases for the file backend are a bit confusing
# `list_databases()` will return the directories in the current path
# The `current_database` is not in that list. Probably we want to
# rethink this eventually. For now we just return `None` here, as if
# databases were not supported
return '.'
def compile(self, expr, *args, **kwargs):
return expr
def _list_databases_dirs(self, path=None):
tables = []
if path.is_dir():
for d in path.iterdir():
if d.is_dir():
tables.append(d.name)
return tables
def _list_tables_files(self, path=None):
# tables are files in a dir
if path is None:
path = self.root
tables = []
if path.is_dir():
for d in path.iterdir():
if d.is_file():
if str(d).endswith(self.extension):
tables.append(d.stem)
elif path.is_file():
if str(path).endswith(self.extension):
tables.append(path.stem)
return tables
def list_databases(self, path=None, like=None):
if path is None:
path = self.path
else:
warnings.warn(
'The `path` argument of `list_databases` is deprecated and '
'will be removed in a future version of Ibis. Connect to a '
'different path with the `connect()` method instead.',
FutureWarning,
)
databases = ['.'] + self._list_databases_dirs(path)
return self._filter_with_like(databases, like)
@abc.abstractmethod
def insert(self, path, expr, **kwargs):
pass
@abc.abstractmethod
def table(self, name, path):
pass
def database(self, name=None, path=None):
if name is None:
self.path = path or self.path
return super().database(name)
if path is None:
path = self.root
if name not in self.list_databases(path):
raise AttributeError(name)
new_name = f"{name}.{self.extension}"
if (self.root / name).is_dir():
path /= name
elif not str(path).endswith(new_name):
path /= new_name
self.path = path
return super().database(name)
def execute(self, expr, params=None, **kwargs): # noqa
assert isinstance(expr, ir.Expr)
return execute_and_reset(expr, params=params, **kwargs)
| cloudera/ibis | ibis/backends/base/file/__init__.py | Python | apache-2.0 | 5,137 |
# Time: O(n)
# Space: O(n)
# Given a binary tree, return the tilt of the whole tree.
#
# The tilt of a tree node is defined as the absolute difference
# between the sum of all left subtree node values and
# the sum of all right subtree node values. Null node has tilt 0.
#
# The tilt of the whole tree is defined as the sum of all nodes' tilt.
#
# Example:
# Input:
# 1
# / \
# 2 3
# Output: 1
# Explanation:
# Tilt of node 2 : 0
# Tilt of node 3 : 0
# Tilt of node 1 : |2-3| = 1
# Tilt of binary tree : 0 + 0 + 1 = 1
# Note:
#
# The sum of node values in any subtree won't exceed
# the range of 32-bit integer.
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def findTilt(self, root):
"""
:type root: TreeNode
:rtype: int
"""
def postOrderTraverse(root, tilt):
if not root:
return 0, tilt
left, tilt = postOrderTraverse(root.left, tilt)
right, tilt = postOrderTraverse(root.right, tilt)
tilt += abs(left-right)
return left+right+root.val, tilt
return postOrderTraverse(root, 0)[1]
| kamyu104/LeetCode | Python/binary-tree-tilt.py | Python | mit | 1,289 |
from django.urls import path
from wishlist import views
app_name = 'wishlist'
urlpatterns = [
path('add/<int:product_id>/', views.add, name='add'),
path('remove/<int:product_id>/', views.remove, name='remove')
]
| pmaigutyak/mp-shop | wishlist/urls.py | Python | isc | 229 |
'''
FBO example
===========
This is an example of how to use FBO (Frame Buffer Object) to speedup graphics.
An Fbo is like a texture that you can draw on it.
By default, all the children are added in the canvas of the parent.
When you are displaying thousand of widget, you'll do thousands of graphics
instructions each frame.
The idea is to do this drawing only one time in a Fbo, and then, draw the Fbo
every frame instead of all children's graphics instructions.
We created a FboFloatLayout that create his canvas, and a Fbo.
After the Fbo is created, we are adding Color and Rectangle instruction to
display the texture of the Fbo itself.
The overload of on_pos/on_size are here to update size of Fbo if needed, and
adapt the position/size of the rectangle too.
Then, when a child is added or removed, we are redirecting addition/removal of
graphics instruction to our Fbo. This is why add_widget/remove_widget are
overloaded too.
.. note::
This solution can be helpful but not ideal. Multisampling are not available
in Framebuffer. We will work to add the support of it if the hardware is
capable of, but it could be not the same.
'''
# needed to create Fbo, must be resolved in future kivy version
from kivy.core.window import Window
from kivy.graphics import Color, Rectangle, Canvas, Callback
from kivy.graphics.fbo import Fbo
from kivy.uix.floatlayout import FloatLayout
from kivy.properties import ObjectProperty, BooleanProperty
from kivy.resources import resource_find
from kivy.graphics.opengl import *
from kivy.graphics import *
from kivy.graphics.texture import Texture
class FboFloatLayout(FloatLayout):
texture = ObjectProperty(None, allownone=True)
alpha_blending = BooleanProperty(False)
def __init__(self, **kwargs):
self.canvas = Canvas()
with self.canvas.before:
Callback(self._set_blend_func)
#self.size
self.fbo_texture = Texture.create(size=self.size,
colorfmt='rgba')
self.fbo_texture.mag_filter = 'linear'
self.fbo_texture.min_filter = 'linear'
with self.canvas:
#self.cbs = Callback(self.prepare_canvas)
self.fbo = Fbo(size=self.size, texture=self.fbo_texture)
#Color(0, 0, 0, 1)
#self.fbo_rect = Rectangle(size=self.size)
with self.fbo:
ClearColor(0.0, 0.0, 0.0, 1.0)
ClearBuffers()
self.fbo_rect = Rectangle(size=self.size)
#self.fbo.shader.source = resource_find('./kivy3dgui/gles2.0/shaders/invert.glsl')
#with self.fbo.after:
# self.cbr = Callback(self.reset_gl_context)
# PopMatrix()
with self.canvas.before:
Callback(self._set_blend_func)
# wait that all the instructions are in the canvas to set texture
self.texture = self.fbo.texture
try:
self.size = kwargs.pop("size")
self.size_hint = kwargs.pop("size_hint")
self.clear_color = kwargs.pop("clear_color")
super(FboFloatLayout, self).__init__(**kwargs)
except:
print(kwargs)
def prepare_canvas(self, *args):
glEnable(GL_BLEND)
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_ONE, GL_ONE)
glEnable(GL_DEPTH_TEST)
def _set_blend_func(self, instruction):
# clobber the blend mode
if self.alpha_blending:
glBlendFunc(GL_ONE,
GL_ZERO)
else:
glBlendFunc(GL_SRC_ALPHA,
GL_ONE_MINUS_SRC_ALPHA)
glDisable(GL_CULL_FACE)
self.fbo.draw()
glBlendFunc(GL_SRC_ALPHA,
GL_ONE_MINUS_SRC_ALPHA)
def setup_gl_context(self, *args):
glEnable(GL_BLEND)
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_DEPTH_TEST)
def reset_gl_context(self, *args):
glDisable(GL_DEPTH_TEST)
glDisable(GL_CULL_FACE)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
def add_widget(self, *largs):
# trick to attach kivy3dgui instructino to fbo instead of canvas
canvas = self.canvas
self.canvas = self.fbo
ret = super(FboFloatLayout, self).add_widget(*largs)
self.canvas = canvas
return ret
def remove_widget(self, *largs):
canvas = self.canvas
self.canvas = self.fbo
super(FboFloatLayout, self).remove_widget(*largs)
self.canvas = canvas
def on_size(self, instance, value):
self.fbo.size = value
self.texture = self.fbo_texture
self.fbo_rect.size = value
def on_pos(self, instance, value):
self.fbo_rect.pos = value
def on_texture(self, instance, value):
self.fbo_rect.texture = value
def on_touch_down(self, touch):
return super(FboFloatLayout, self).on_touch_down(touch)
def on_touch_move(self, touch):
return super(FboFloatLayout, self).on_touch_move(touch)
"""def on_touch_up(self, touch):
return super(FboFloatLayout, self).on_touch_up(touch)"""
def on_touch_up(self, touch):
for e in self.children:
if e.collide_point(touch.x, touch.y):
return e.on_touch_up(touch)
if __name__ == '__main__':
from kivy.uix.button import Button
from kivy.app import App
class TestFboApp(App):
def build(self):
# test with FboFloatLayout or FloatLayout
# comment/uncomment to test it
root = FboFloatLayout()
#root = FloatLayout()
# this part of creation can be slow. try to optimize the loop a
# little bit.
s = 30
size = (s, s)
sh = (None, None)
add = root.add_widget
print('Creating 5000 widgets...')
for i in range(5000):
x = (i % 40) * s
y = int(i / 40) * s
add(Button(text=str(i), pos=(x, y), size_hint=sh, size=size))
if i % 1000 == 1000 - 1:
print(5000 - i - 1, 'left...')
return root
TestFboApp().run()
| kpiorno/kivy3dgui | kivy3dgui/fbowidget.py | Python | mit | 6,221 |
#!/usr/bin/env python
#/******************************************************************************
# * $Id$
# *
# * Project: GDAL Make Histogram and Cumulative graph from Tab delimited tab as
# generated by gdal_hist.py
# * Purpose: Take a gdal_hist.py output and create a histogram plot using matplotlib
# * Author: Trent Hare, [email protected]
# *
# ******************************************************************************
# * Public domain licenes (unlicense)
# *
# * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# * DEALINGS IN THE SOFTWARE.
# ****************************************************************************/
import sys
import os
import math
import numpy as np
import pandas as pd
from pandas.tools.plotting import table
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
def usage():
print 'Usage: slope_histogram_cumulative_graph.py -name "InSight E1" slope_histogram_table.tab outfile.png'
print " This program is geared to run on a table as generated by gdal_hist.py"
print 'slope_histogram_cumulative_graph.py -name "E_Marg_CE 01" DEM_1m_E_Marg_CE_adir_1m_hist.xls DEM_1m_E_Marg_CE_adir_1m_hist.png'
sys.exit(0)
#set None for commandline options
name = ""
infile = None
outfile = None
# =============================================================================
# Parse command line arguments.
# =============================================================================
i = 1
while i < len(sys.argv):
arg = sys.argv[i]
if arg == '-name':
i = i + 1
name = sys.argv[i]
elif infile is None:
infile = arg
elif outfile is None:
outfile = arg
else:
Usage()
i = i + 1
if infile is None:
usage()
if not(os.path.isfile(infile)):
input = sys.argv[1]
print "filename %s does not exist." % (infile)
sys.exit(1)
#load table
df = pd.DataFrame.from_csv(infile, sep='\t', header=1)
#initialize figure
fig, ax1 = plt.subplots()
#calculate unscaled values
#df.value = (df.value * 5) - 0.2
#df.ix[df.value < 0] = 0; df
#not to reverse histogram before calculating 'approx' stats
#min = round(df.value.min(),2)
#max = round(df.value.max(),2)
#mean = round(df.value.mean(),2)
#stddev = round(df.value.std(),2)
#rms = round(math.sqrt((mean * mean) + (stddev * stddev)),2)
#statsDict = {'Min':min,'Max':max,'Mean':mean \
#,'StdDev':stddev,'RMS':rms}
#statsSeries = pd.Series(statsDict,name='stats')
#statsSeries.sort()
#t = table(ax1, statsSeries, \
#loc='lower right', colWidths=[0.1] * 2)
#t.set_fontsize(18)
#props = t.properties()
#cells = props['child_artists']
#for c in cells:
#c.set_height(0.05)
#Plot frequency histogram from input table
ax1.fill(df.value,df['count'],'gray')
#df.plot(ax1=ax1, kind='area', color='gray', legend=True)
ax1.ticklabel_format(style='sci', axis='y', scilimits=(0,0))
ax1.get_yaxis().set_tick_params(direction='out')
#get min and max as found by pandas for plotting 'arrow' at X=15
#minY = round(df['count'].min(),0)
#maxY = round(df['count'].max(),0)
#grab existing ax1 axes
#ax = plt.axes()
#ax.arrow(15, minY, 0, maxY, head_width=0, head_length=0, fc='k', ec='k')
ax1.axvline(x=15, color='black', alpha=0.5)
#add cumulative plot on 'Y2' axis using save X axes
ax2 = ax1.twinx()
ax2.plot(df.value,df['cumulative'],'blue')
#df.plot(ax2=ax2, df.value,df['cumulative'],'blue')
ax2.get_yaxis().set_tick_params(direction='out')
#define labels
ax1.set_xlabel('Slope (degrees)')
ax1.set_ylabel('Count')
ax2.set_ylabel('Cumulative')
plt.suptitle(name + ' Slope Histogram and Cumulative Plot')
#save out PNG
plt.savefig(outfile)
print "Graph exported to %s" % (outfile)
| USGS-Astrogeology/GDAL_scripts | gdal_baseline_slope/python2/slope_histogram_cumulative_graph.py | Python | unlicense | 4,103 |
from __future__ import absolute_import
from .version import __version__
from .auxiliary import *
from .simulation import Simulation
from .data_record import DataRecord
from .server import Server
from .individual import Individual
from .arrival_node import ArrivalNode
from .exit_node import ExitNode
from .node import Node
from .processor_sharing import PSNode
from .exactnode import *
from .import_params import *
from .network import *
import ciw.dists
import ciw.deadlock
import ciw.trackers | CiwPython/Ciw | ciw/__init__.py | Python | mit | 495 |
#!/usr/bin/env python3
# ################################################################
# Copyright (c) 2018-2020, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under both the BSD-style license (found in the
# LICENSE file in the root directory of this source tree) and the GPLv2 (found
# in the COPYING file in the root directory of this source tree).
# You may select, at your option, one of the above-listed licenses.
# ##########################################################################
# Rate limiter, replacement for pv
# this rate limiter does not "catch up" after a blocking period
# Limitations:
# - only accepts limit speed in MB/s
import sys
import time
MB = 1024 * 1024
rate = float(sys.argv[1]) * MB
start = time.time()
total_read = 0
# sys.stderr.close() # remove error message, for Ctrl+C
try:
buf = " "
while len(buf):
now = time.time()
to_read = max(int(rate * (now - start)), 1)
max_buf_size = 1 * MB
to_read = min(to_read, max_buf_size)
start = now
buf = sys.stdin.buffer.read(to_read)
sys.stdout.buffer.write(buf)
except (KeyboardInterrupt, BrokenPipeError) as e:
pass
| Cyan4973/zstd | tests/rateLimiter.py | Python | bsd-3-clause | 1,168 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Optional
from flask import flash, request, Response
from flask_appbuilder import expose
from flask_appbuilder.security.decorators import has_access_api
from werkzeug.utils import redirect
from superset import db, event_logger
from superset.models import core as models
from superset.typing import FlaskResponse
from superset.views.base import BaseSupersetView
logger = logging.getLogger(__name__)
class R(BaseSupersetView): # pylint: disable=invalid-name
"""used for short urls"""
@staticmethod
def _validate_url(url: Optional[str] = None) -> bool:
if url and (
url.startswith("//superset/dashboard/")
or url.startswith("//superset/explore/")
):
return True
return False
@event_logger.log_this
@expose("/<int:url_id>")
def index(self, url_id: int) -> FlaskResponse:
url = db.session.query(models.Url).get(url_id)
if url and url.url:
explore_url = "//superset/explore/?"
if url.url.startswith(explore_url):
explore_url += f"r={url_id}"
return redirect(explore_url[1:])
if self._validate_url(url.url):
return redirect(url.url[1:])
return redirect("/")
flash("URL to nowhere...", "danger")
return redirect("/")
@event_logger.log_this
@has_access_api
@expose("/shortner/", methods=["POST"])
def shortner(self) -> FlaskResponse:
url = request.form.get("data")
if not self._validate_url(url):
logger.warning("Invalid URL")
return Response("Invalid URL", 400)
obj = models.Url(url=url)
db.session.add(obj)
db.session.commit()
return Response(
"{scheme}://{request.headers[Host]}/r/{obj.id}".format(
scheme=request.scheme, request=request, obj=obj
),
mimetype="text/plain",
)
| apache/incubator-superset | superset/views/redirects.py | Python | apache-2.0 | 2,758 |
from ConfigParser import SafeConfigParser, NoSectionError, NoOptionError, Error
import os
import logging
import sys
class Config:
rethinkdb = {
"host" : "localhost",
"port" : 28015,
"db" : "myops2"
}
def __init__(self):
self.system = os.path.abspath("/etc/myops2")
self.user = os.path.expanduser("~/.myops2")
self.files = [ self.general + "/settings.ini", self.local + "/settings.ini" ]
if not os.path.exists(self.general):
pass
if not os.path.exists(self.local):
os.mkdir(self.local)
self.parser = SafeConfigParser()
s = self.parser.read(self.files)
print s
#print config.sections()
def get(self, section, key, default = None):
try :
value = self.parser.get(section, key)
except NoSectionError:
value = default
except NoOptionError:
value = default
except Error:
value = default
return value
| onelab-eu/myslice-v2 | myops2/settings.py | Python | mit | 1,049 |
#!/usr/bin/env python
# coding=utf-8
# Contributor:
# Phus Lu <[email protected]>
__version__ = '2.1.11'
__password__ = ''
__hostsdeny__ = () # __hostsdeny__ = ('.youtube.com', '.youku.com')
import sys
import os
import re
import time
import struct
import zlib
import binascii
import logging
import httplib
import urlparse
import base64
import cStringIO
import hashlib
import hmac
import errno
try:
from google.appengine.api import urlfetch
from google.appengine.runtime import apiproxy_errors
except ImportError:
urlfetch = None
try:
import sae
except ImportError:
sae = None
try:
import socket, select, ssl, thread
except:
socket = None
FetchMax = 2
FetchMaxSize = 1024*1024*4
DeflateMaxSize = 1024*1024*4
Deadline = 60
def error_html(errno, error, description=''):
ERROR_TEMPLATE = '''
<html><head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<title>{{errno}} {{error}}</title>
<style><!--
body {font-family: arial,sans-serif}
div.nav {margin-top: 1ex}
div.nav A {font-size: 10pt; font-family: arial,sans-serif}
span.nav {font-size: 10pt; font-family: arial,sans-serif; font-weight: bold}
div.nav A,span.big {font-size: 12pt; color: #0000cc}
div.nav A {font-size: 10pt; color: black}
A.l:link {color: #6f6f6f}
A.u:link {color: green}
//--></style>
</head>
<body text=#000000 bgcolor=#ffffff>
<table border=0 cellpadding=2 cellspacing=0 width=100%>
<tr><td bgcolor=#3366cc><font face=arial,sans-serif color=#ffffff><b>Error</b></td></tr>
<tr><td> </td></tr></table>
<blockquote>
<H1>{{error}}</H1>
{{description}}
<p>
</blockquote>
<table width=100% cellpadding=0 cellspacing=0><tr><td bgcolor=#3366cc><img alt="" width=1 height=4></td></tr></table>
</body></html>
'''
kwargs = dict(errno=errno, error=error, description=description)
template = ERROR_TEMPLATE
for keyword, value in kwargs.items():
template = template.replace('{{%s}}' % keyword, value)
return template
def socket_forward(local, remote, timeout=60, tick=2, bufsize=8192, maxping=None, maxpong=None, idlecall=None, bitmask=None):
timecount = timeout
try:
while 1:
timecount -= tick
if timecount <= 0:
break
(ins, _, errors) = select.select([local, remote], [], [local, remote], tick)
if errors:
break
if ins:
for sock in ins:
data = sock.recv(bufsize)
if bitmask:
data = ''.join(chr(ord(x)^bitmask) for x in data)
if data:
if sock is local:
remote.sendall(data)
timecount = maxping or timeout
else:
local.sendall(data)
timecount = maxpong or timeout
else:
return
else:
if idlecall:
try:
idlecall()
except Exception:
logging.exception('socket_forward idlecall fail')
finally:
idlecall = None
except Exception:
logging.exception('socket_forward error')
raise
finally:
if idlecall:
idlecall()
def socks5_handler(sock, address, hls={'hmac':{}}):
if not hls['hmac']:
hls['hmac'] = dict((hmac.new(__password__, chr(x)).hexdigest(),x) for x in xrange(256))
bufsize = 8192
rfile = sock.makefile('rb', bufsize)
wfile = sock.makefile('wb', 0)
remote_addr, remote_port = address
MessageClass = dict
try:
line = rfile.readline(bufsize)
if not line:
raise socket.error('empty line')
method, path, version = line.rstrip().split(' ', 2)
headers = MessageClass()
while 1:
line = rfile.readline(bufsize)
if not line or line == '\r\n':
break
keyword, _, value = line.partition(':')
keyword = keyword.title()
value = value.strip()
headers[keyword] = value
logging.info('%s:%s "%s %s %s" - -', remote_addr, remote_port, method, path, version)
if headers.get('Connection', '').lower() != 'upgrade':
logging.error('%s:%s Connection(%s) != "upgrade"', remote_addr, remote_port, headers.get('Connection'))
return
m = re.search('([0-9a-f]{32})', path)
if not m:
logging.error('%s:%s Path(%s) not valid', remote_addr, remote_port, path)
return
need_digest = m.group(1)
bitmask = hls['hmac'].get(need_digest)
if bitmask is None:
logging.error('%s:%s Digest(%s) not match', remote_addr, remote_port, need_digest)
return
else:
logging.info('%s:%s Digest(%s) return bitmask=%r', remote_addr, remote_port, need_digest, bitmask)
wfile.write('HTTP/1.1 101 Switching Protocols\r\nConnection: Upgrade\r\n\r\n')
wfile.flush()
rfile_read = lambda n:''.join(chr(ord(x)^bitmask) for x in rfile.read(n))
wfile_write = lambda s:wfile.write(''.join(chr(ord(x)^bitmask) for x in s))
rfile_read(ord(rfile_read(2)[-1]))
wfile_write(b'\x05\x00');
# 2. Request
data = rfile_read(4)
mode = ord(data[1])
addrtype = ord(data[3])
if addrtype == 1: # IPv4
addr = socket.inet_ntoa(rfile_read(4))
elif addrtype == 3: # Domain name
addr = rfile_read(ord(rfile_read(1)[0]))
port = struct.unpack('>H',rfile_read(2))
reply = b'\x05\x00\x00\x01'
try:
logging.info('%s:%s socks5 mode=%r', remote_addr, remote_port, mode)
if mode == 1: # 1. TCP Connect
remote = socket.create_connection((addr, port[0]))
logging.info('%s:%s TCP Connect to %s:%s', remote_addr, remote_port, addr, port[0])
local = remote.getsockname()
reply += socket.inet_aton(local[0]) + struct.pack(">H", local[1])
else:
reply = b'\x05\x07\x00\x01' # Command not supported
except socket.error:
# Connection refused
reply = '\x05\x05\x00\x01\x00\x00\x00\x00\x00\x00'
wfile_write(reply)
# 3. Transfering
if reply[1] == '\x00': # Success
if mode == 1: # 1. Tcp connect
socket_forward(sock, remote, bitmask=bitmask)
except socket.error as e:
if e[0] not in (10053, errno.EPIPE, 'empty line'):
raise
finally:
rfile.close()
wfile.close()
sock.close()
def paas_application(environ, start_response):
if environ['REQUEST_METHOD'] == 'GET':
start_response('302 Found', [('Location', 'https://www.google.com')])
raise StopIteration
# inflate = lambda x:zlib.decompress(x, -15)
wsgi_input = environ['wsgi.input']
data = wsgi_input.read(2)
metadata_length, = struct.unpack('!h', data)
metadata = wsgi_input.read(metadata_length)
metadata = zlib.decompress(metadata, -15)
headers = dict(x.split(':', 1) for x in metadata.splitlines() if x)
method = headers.pop('G-Method')
url = headers.pop('G-Url')
kwargs = {}
any(kwargs.__setitem__(x[2:].lower(), headers.pop(x)) for x in headers.keys() if x.startswith('G-'))
headers['Connection'] = 'close'
payload = environ['wsgi.input'].read() if 'Content-Length' in headers else None
if 'Content-Encoding' in headers:
if headers['Content-Encoding'] == 'deflate':
payload = zlib.decompress(payload, -15)
headers['Content-Length'] = str(len(payload))
del headers['Content-Encoding']
if __password__ and __password__ != kwargs.get('password'):
random_host = 'g%d%s' % (int(time.time()*100), environ['HTTP_HOST'])
conn = httplib.HTTPConnection(random_host, timeout=3)
conn.request('GET', '/')
response = conn.getresponse(True)
status_line = '%s %s' % (response.status, httplib.responses.get(response.status, 'OK'))
start_response(status_line, response.getheaders())
yield response.read()
raise StopIteration
if __hostsdeny__ and urlparse.urlparse(url).netloc.endswith(__hostsdeny__):
start_response('403 Forbidden', [('Content-Type', 'text/html')])
yield error_html('403', 'Hosts Deny', description='url=%r' % url)
raise StopIteration
timeout = Deadline
logging.info('%s "%s %s %s" - -', environ['REMOTE_ADDR'], method, url, 'HTTP/1.1')
if method != 'CONNECT':
try:
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
HTTPConnection = httplib.HTTPSConnection if scheme == 'https' else httplib.HTTPConnection
if params:
path += ';' + params
if query:
path += '?' + query
conn = HTTPConnection(netloc, timeout=timeout)
conn.request(method, path, body=payload, headers=headers)
response = conn.getresponse()
headers = [('X-Status', str(response.status))]
headers += [(k, v) for k, v in response.msg.items() if k != 'transfer-encoding']
start_response('200 OK', headers)
bufsize = 8192
while 1:
data = response.read(bufsize)
if not data:
response.close()
break
yield data
except httplib.HTTPException as e:
raise
def gae_application(environ, start_response):
if environ['REQUEST_METHOD'] == 'GET':
if '204' in environ['QUERY_STRING']:
start_response('204 No Content', [])
yield ''
else:
timestamp = long(os.environ['CURRENT_VERSION_ID'].split('.')[1])/pow(2,28)
ctime = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(timestamp+8*3600))
html = u'GoAgent Python Server %s \u5df2\u7ecf\u5728\u5de5\u4f5c\u4e86\uff0c\u90e8\u7f72\u65f6\u95f4 %s\n' % (__version__, ctime)
start_response('200 OK', [('Content-Type', 'text/plain; charset=utf-8')])
yield html.encode('utf8')
raise StopIteration
# inflate = lambda x:zlib.decompress(x, -15)
wsgi_input = environ['wsgi.input']
data = wsgi_input.read(2)
metadata_length, = struct.unpack('!h', data)
metadata = wsgi_input.read(metadata_length)
metadata = zlib.decompress(metadata, -15)
headers = dict(x.split(':', 1) for x in metadata.splitlines() if x)
method = headers.pop('G-Method')
url = headers.pop('G-Url')
kwargs = {}
any(kwargs.__setitem__(x[2:].lower(), headers.pop(x)) for x in headers.keys() if x.startswith('G-'))
#logging.info('%s "%s %s %s" - -', environ['REMOTE_ADDR'], method, url, 'HTTP/1.1')
#logging.info('request headers=%s', headers)
if __password__ and __password__ != kwargs.get('password', ''):
start_response('403 Forbidden', [('Content-Type', 'text/html')])
yield error_html('403', 'Wrong password', description='GoAgent proxy.ini password is wrong!')
raise StopIteration
if __hostsdeny__ and urlparse.urlparse(url).netloc.endswith(__hostsdeny__):
start_response('403 Forbidden', [('Content-Type', 'text/html')])
yield error_html('403', 'Hosts Deny', description='url=%r' % url)
raise StopIteration
fetchmethod = getattr(urlfetch, method, '')
if not fetchmethod:
start_response('501 Unsupported', [('Content-Type', 'text/html')])
yield error_html('501', 'Invalid Method: %r'% method, description='Unsupported Method')
raise StopIteration
deadline = Deadline
headers = dict(headers)
headers['Connection'] = 'close'
payload = environ['wsgi.input'].read() if 'Content-Length' in headers else None
if 'Content-Encoding' in headers:
if headers['Content-Encoding'] == 'deflate':
payload = zlib.decompress(payload, -15)
headers['Content-Length'] = str(len(payload))
del headers['Content-Encoding']
accept_encoding = headers.get('Accept-Encoding', '')
errors = []
for i in xrange(int(kwargs.get('fetchmax', FetchMax))):
try:
response = urlfetch.fetch(url, payload, fetchmethod, headers, allow_truncated=False, follow_redirects=False, deadline=deadline, validate_certificate=False)
break
except apiproxy_errors.OverQuotaError as e:
time.sleep(5)
except urlfetch.DeadlineExceededError as e:
errors.append('%r, deadline=%s' % (e, deadline))
logging.error('DeadlineExceededError(deadline=%s, url=%r)', deadline, url)
time.sleep(1)
deadline = Deadline * 2
except urlfetch.DownloadError as e:
errors.append('%r, deadline=%s' % (e, deadline))
logging.error('DownloadError(deadline=%s, url=%r)', deadline, url)
time.sleep(1)
deadline = Deadline * 2
except urlfetch.ResponseTooLargeError as e:
response = e.response
logging.error('ResponseTooLargeError(deadline=%s, url=%r) response(%r)', deadline, url, response)
m = re.search(r'=\s*(\d+)-', headers.get('Range') or headers.get('range') or '')
if m is None:
headers['Range'] = 'bytes=0-%d' % int(kwargs.get('fetchmaxsize', FetchMaxSize))
else:
headers.pop('Range', '')
headers.pop('range', '')
start = int(m.group(1))
headers['Range'] = 'bytes=%s-%d' % (start, start+int(kwargs.get('fetchmaxsize', FetchMaxSize)))
deadline = Deadline * 2
except Exception as e:
errors.append(str(e))
if i==0 and method=='GET':
deadline = Deadline * 2
else:
start_response('500 Internal Server Error', [('Content-Type', 'text/html')])
yield error_html('502', 'Python Urlfetch Error: %r' % method, description='<br />\n'.join(errors) or 'UNKOWN')
raise StopIteration
#logging.debug('url=%r response.status_code=%r response.headers=%r response.content[:1024]=%r', url, response.status_code, dict(response.headers), response.content[:1024])
data = response.content
if 'content-encoding' not in response.headers and len(response.content) < DeflateMaxSize and response.headers.get('content-type', '').startswith(('text/', 'application/json', 'application/javascript')):
if 'deflate' in accept_encoding:
response.headers['Content-Encoding'] = 'deflate'
data = zlib.compress(data)[2:-4]
elif 'gzip' in accept_encoding:
response.headers['Content-Encoding'] = 'gzip'
compressobj = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS, zlib.DEF_MEM_LEVEL, 0)
dataio = cStringIO.StringIO()
dataio.write('\x1f\x8b\x08\x00\x00\x00\x00\x00\x02\xff')
dataio.write(compressobj.compress(data))
dataio.write(compressobj.flush())
dataio.write(struct.pack('<LL', zlib.crc32(data)&0xFFFFFFFFL, len(data)&0xFFFFFFFFL))
data = dataio.getvalue()
response.headers['Content-Length'] = str(len(data))
response_headers = zlib.compress('\n'.join('%s:%s'%(k.title(),v) for k, v in response.headers.items() if not k.startswith('x-google-')))[2:-4]
start_response('200 OK', [('Content-Type', 'image/gif')])
yield struct.pack('!hh', int(response.status_code), len(response_headers))+response_headers
yield data
app = gae_application if urlfetch else paas_application
application = app if sae is None else sae.create_wsgi_app(app)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO, format='%(levelname)s - - %(asctime)s %(message)s', datefmt='[%b %d %H:%M:%S]')
import gevent, gevent.server, gevent.wsgi, gevent.monkey, getopt
gevent.monkey.patch_all(dns=gevent.version_info[0]>=1)
options = dict(getopt.getopt(sys.argv[1:], 'l:p:a:')[0])
host = options.get('-l', '0.0.0.0')
port = options.get('-p', '80')
app = options.get('-a', 'socks5')
if app == 'socks5':
server = gevent.server.StreamServer((host, int(port)), socks5_handler)
else:
server = gevent.wsgi.WSGIServer((host, int(port)), paas_application)
logging.info('serving %s at http://%s:%s/', app.upper(), server.address[0], server.address[1])
server.serve_forever()
| imwiththou/GoAgent | server/python/wsgi.py | Python | mit | 16,708 |
import base64
import os
import re
import logging
from datetime import datetime
from uuid import uuid4
from django.conf import settings
fmt = getattr(settings, 'LOG_FORMAT', None)
lvl = getattr(settings, 'LOG_LEVEL', logging.DEBUG)
logging.basicConfig(format=fmt, level=lvl)
from watson import search as watson
from auditlog.registry import auditlog
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.core.validators import RegexValidator
from django.db import models
from django.db.models import Q
from django.utils.timezone import now
from imagekit.models import ImageSpecField
from imagekit.processors import ResizeToCover
from django.utils import timezone
from pytz import all_timezones
from tagging.registry import register as tag_register
from multiselectfield import MultiSelectField
import hashlib
class System_Settings(models.Model):
enable_deduplication = models.BooleanField(default=False,
blank=False,
verbose_name='Deduplicate findings',
help_text='With this setting turned on, Dojo deduplicates findings by comparing endpoints, ' \
'cwe fields, and titles. ' \
'If two findings share a URL and have the same CWE or title, Dojo marks the ' \
'less recent finding as a duplicate. When deduplication is enabled, a list of ' \
'deduplicated findings is added to the engagement view.')
delete_dupulicates = models.BooleanField(default=False, blank=False)
max_dupes = models.IntegerField(blank=True, null=True, verbose_name='Max Duplicates', help_text='When enabled, if' \
'a single issue reaches the maximum number of duplicates, the oldest will be' \
'deleted.')
enable_jira = models.BooleanField(default=False, verbose_name='Enable JIRA integration', blank=False)
enable_slack_notifications = models.BooleanField(default=False, verbose_name='Enable Slack notifications', blank=False)
slack_channel = models.CharField(max_length=100, default='', blank=True)
slack_token = models.CharField(max_length=100, default='', blank=True, help_text='Token required for interacting with Slack. Get one at https://api.slack.com/tokens')
slack_username = models.CharField(max_length=100, default='', blank=True)
enable_hipchat_notifications = models.BooleanField(default=False, verbose_name='Enable HipChat notifications', blank=False)
hipchat_site = models.CharField(max_length=100, default='', blank=True, help_text='The full fqdn of your hipchat site, e.g. "yoursite.hipchat.com"')
hipchat_channel = models.CharField(max_length=100, default='', blank=True)
hipchat_token = models.CharField(max_length=100, default='', blank=True, help_text='Token required for interacting with HipChat. Get one at https://patriktest.hipchat.com/addons/')
enable_mail_notifications = models.BooleanField(default=False, blank=False)
mail_notifications_from = models.CharField(max_length=200, default='[email protected]', blank=True)
mail_notifications_to = models.CharField(max_length=200, default='', blank=True)
s_finding_severity_naming = models.BooleanField(default=False,
blank=False,
help_text='With this setting turned on, Dojo will display S0, S1, S2, etc ' \
'in most places, whereas if turned off Critical, High, Medium, etc will be displayed.')
false_positive_history = models.BooleanField(default=False)
url_prefix = models.CharField(max_length=300, default='', blank=True)
team_name = models.CharField(max_length=100, default='', blank=True)
time_zone = models.CharField(max_length=50,
choices=[(tz,tz) for tz in all_timezones],
default='UTC',blank=False)
def get_current_date():
return timezone.now().date()
def get_current_datetime():
return timezone.now()
# proxy class for convenience and UI
class Dojo_User(User):
class Meta:
proxy = True
def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
full_name = '%s %s (%s)' % (self.first_name,
self.last_name,
self.username)
return full_name.strip()
def __unicode__(self):
return self.get_full_name()
class UserContactInfo(models.Model):
user = models.OneToOneField(User)
title = models.CharField(blank=True, null=True, max_length=150)
phone_regex = RegexValidator(regex=r'^\+?1?\d{9,15}$',
message="Phone number must be entered in the format: '+999999999'. "
"Up to 15 digits allowed.")
phone_number = models.CharField(validators=[phone_regex], blank=True, max_length=15,
help_text="Phone number must be entered in the format: '+999999999'. "
"Up to 15 digits allowed.")
cell_number = models.CharField(validators=[phone_regex], blank=True, max_length=15,
help_text="Phone number must be entered in the format: '+999999999'. "
"Up to 15 digits allowed.")
twitter_username = models.CharField(blank=True, null=True, max_length=150)
github_username = models.CharField(blank=True, null=True, max_length=150)
slack_username = models.CharField(blank=True, null=True, max_length=150)
hipchat_username = models.CharField(blank=True, null=True, max_length=150)
class Contact(models.Model):
name = models.CharField(max_length=100)
email = models.EmailField()
team = models.CharField(max_length=100)
is_admin = models.BooleanField(default=False)
is_globally_read_only = models.BooleanField(default=False)
updated = models.DateTimeField(editable=False)
class Product_Type(models.Model):
name = models.CharField(max_length=300)
critical_product = models.BooleanField(default=False)
key_product = models.BooleanField(default=False)
def critical_present(self):
c_findings = Finding.objects.filter(test__engagement__product__prod_type=self, severity='Critical')
if c_findings.count() > 0:
return True
def high_present(self):
c_findings = Finding.objects.filter(test__engagement__product__prod_type=self, severity='High')
if c_findings.count() > 0:
return True
def calc_health(self):
h_findings = Finding.objects.filter(test__engagement__product__prod_type=self, severity='High')
c_findings = Finding.objects.filter(test__engagement__product__prod_type=self, severity='Critical')
health = 100
if c_findings.count() > 0:
health = 40
health = health - ((c_findings.count() - 1) * 5)
if h_findings.count() > 0:
if health == 100:
health = 60
health = health - ((h_findings.count() - 1) * 2)
if health < 5:
return 5
else:
return health
def findings_count(self):
return Finding.objects.filter(mitigated__isnull=True,
verified=True,
false_p=False,
duplicate=False,
out_of_scope=False,
test__engagement__product__prod_type=self).filter(Q(severity="Critical") |
Q(severity="High") |
Q(severity="Medium") |
Q(severity="Low")).count()
def products_count(self):
return Product.objects.filter(prod_type=self).count()
def __unicode__(self):
return self.name
def get_breadcrumbs(self):
bc = [{'title': self.__unicode__(),
'url': reverse('edit_product_type', args=(self.id,))}]
return bc
class Product_Line(models.Model):
name = models.CharField(max_length=300)
description = models.CharField(max_length=2000)
def __unicode__(self):
return self.name
class Report_Type(models.Model):
name = models.CharField(max_length=300)
class Test_Type(models.Model):
name = models.CharField(max_length=200)
def __unicode__(self):
return self.name
def get_breadcrumbs(self):
bc = [{'title': self.__unicode__(),
'url': None}]
return bc
class Product(models.Model):
name = models.CharField(max_length=300)
description = models.CharField(max_length=4000)
'''
The following three fields are deprecated and no longer in use.
They remain in model for backwards compatibility and will be removed
in a future release. prod_manager, tech_contact, manager
The admin script migrate_product_contacts should be used to migrate data from
these fields to their replacements. ./manage.py migrate_product_contacts
'''
prod_manager = models.CharField(default=0, max_length=200) # unused
tech_contact = models.CharField(default=0, max_length=200) # unused
manager = models.CharField(default=0, max_length=200) # unused
product_manager = models.ForeignKey(Dojo_User, null=True, blank=True, related_name='product_manager')
technical_contact = models.ForeignKey(Dojo_User, null=True, blank=True, related_name='technical_contact')
team_manager = models.ForeignKey(Dojo_User, null=True, blank=True, related_name='team_manager')
created = models.DateTimeField(editable=False, null=True, blank=True)
prod_type = models.ForeignKey(Product_Type, related_name='prod_type',
null=True, blank=True)
updated = models.DateTimeField(editable=False, null=True, blank=True)
tid = models.IntegerField(default=0, editable=False)
authorized_users = models.ManyToManyField(User, blank=True)
def __unicode__(self):
return self.name
class Meta:
ordering = ('name',)
@property
def findings_count(self):
return Finding.objects.filter(mitigated__isnull=True,
verified=True,
false_p=False,
duplicate=False,
out_of_scope=False,
test__engagement__product=self).count()
@property
def endpoint_count(self):
endpoints = Endpoint.objects.filter(finding__test__engagement__product=self,
finding__active=True,
finding__verified=True,
finding__mitigated__isnull=True)
hosts = []
ids = []
for e in endpoints:
if ":" in e.host:
host_no_port = e.host[:e.host.index(':')]
else:
host_no_port = e.host
if host_no_port in hosts:
continue
else:
hosts.append(host_no_port)
ids.append(e.id)
return len(hosts)
def open_findings(self, start_date=None, end_date=None):
if start_date is None or end_date is None:
return {}
else:
critical = Finding.objects.filter(test__engagement__product=self,
mitigated__isnull=True,
verified=True,
false_p=False,
duplicate=False,
out_of_scope=False,
severity="Critical",
date__range=[start_date,
end_date]).count()
high = Finding.objects.filter(test__engagement__product=self,
mitigated__isnull=True,
verified=True,
false_p=False,
duplicate=False,
out_of_scope=False,
severity="High",
date__range=[start_date,
end_date]).count()
medium = Finding.objects.filter(test__engagement__product=self,
mitigated__isnull=True,
verified=True,
false_p=False,
duplicate=False,
out_of_scope=False,
severity="Medium",
date__range=[start_date,
end_date]).count()
low = Finding.objects.filter(test__engagement__product=self,
mitigated__isnull=True,
verified=True,
false_p=False,
duplicate=False,
out_of_scope=False,
severity="Low",
date__range=[start_date,
end_date]).count()
return {'Critical': critical,
'High': high,
'Medium': medium,
'Low': low,
'Total': (critical + high + medium + low)}
def get_breadcrumbs(self):
bc = [{'title': self.__unicode__(),
'url': reverse('view_product', args=(self.id,))}]
return bc
class ScanSettings(models.Model):
product = models.ForeignKey(Product, default=1, editable=False)
addresses = models.TextField(default="none")
user = models.ForeignKey(User, editable=False)
date = models.DateTimeField(editable=False, blank=True,
default=get_current_datetime)
frequency = models.CharField(max_length=10000, null=True,
blank=True)
email = models.CharField(max_length=512)
protocol = models.CharField(max_length=10, default='TCP')
def addresses_as_list(self):
if self.addresses:
return [a.strip() for a in self.addresses.split(',')]
return []
def get_breadcrumbs(self):
bc = self.product.get_breadcrumbs()
bc += [{'title': "Scan Settings",
'url': reverse('view_scan_settings', args=(self.product.id, self.id,))}]
return bc
"""
Modified by Fatimah and Micheal
removed ip_scans field
"""
class Scan(models.Model):
scan_settings = models.ForeignKey(ScanSettings, default=1, editable=False)
date = models.DateTimeField(editable=False, blank=True,
default=get_current_datetime)
protocol = models.CharField(max_length=10, default='TCP')
status = models.CharField(max_length=10, default='Pending', editable=False)
baseline = models.BooleanField(default=False,
verbose_name="Current Baseline")
def __unicode__(self):
return self.scan_settings.protocol + " Scan " + str(self.date)
def get_breadcrumbs(self):
bc = self.scan_settings.get_breadcrumbs()
bc += [{'title': self.__unicode__(),
'url': reverse('view_scan', args=(self.id,))}]
return bc
"""
Modified by Fatimah and Micheal
Changed services from a ManytToMany field to a formatted string
"port,protocol,status"
Added scan_id
"""
class IPScan(models.Model):
address = models.TextField(editable=False, default="none")
services = models.CharField(max_length=800, null=True)
scan = models.ForeignKey(Scan, default=1, editable=False)
class Engagement_Type(models.Model):
name = models.CharField(max_length=200)
class Engagement(models.Model):
name = models.CharField(max_length=300, null=True, blank=True)
description = models.CharField(max_length=2000, null=True, blank=True)
version = models.CharField(max_length=100, null=True, blank=True)
eng_type = models.ForeignKey(Engagement_Type, null=True, blank=True)
first_contacted = models.DateField(null=True, blank=True)
target_start = models.DateField(null=False, blank=False)
target_end = models.DateField(null=False, blank=False)
lead = models.ForeignKey(User, editable=True, null=True)
requester = models.ForeignKey(Contact, null=True, blank=True)
reason = models.CharField(max_length=2000, null=True, blank=True)
report_type = models.ForeignKey(Report_Type, null=True, blank=True)
product = models.ForeignKey(Product)
updated = models.DateTimeField(editable=False, null=True, blank=True)
active = models.BooleanField(default=True, editable=False)
test_strategy = models.URLField(editable=True, blank=True, null=True)
threat_model = models.BooleanField(default=True)
api_test = models.BooleanField(default=True)
pen_test = models.BooleanField(default=True)
check_list = models.BooleanField(default=True)
status = models.CharField(editable=True, max_length=2000, default='',
null=True,
choices=(('In Progress', 'In Progress'),
('On Hold', 'On Hold'),
('Completed', 'Completed')))
progress = models.CharField(max_length=100,
default='threat_model', editable=False)
tmodel_path = models.CharField(max_length=1000, default='none',
editable=False, blank=True, null=True)
risk_path = models.CharField(max_length=1000, default='none',
editable=False, blank=True, null=True)
risk_acceptance = models.ManyToManyField("Risk_Acceptance",
default=None,
editable=False,
blank=True)
done_testing = models.BooleanField(default=False, editable=False)
class Meta:
ordering = ['-target_start']
def __unicode__(self):
return "Engagement: %s (%s)" % (self.name if self.name else '',
self.target_start.strftime(
"%b %d, %Y"))
def get_breadcrumbs(self):
bc = self.product.get_breadcrumbs()
bc += [{'title': self.__unicode__(),
'url': reverse('view_engagement', args=(self.id,))}]
return bc
class CWE(models.Model):
url = models.CharField(max_length=1000)
description = models.CharField(max_length=2000)
number = models.IntegerField()
class Endpoint(models.Model):
protocol = models.CharField(null=True, blank=True, max_length=10,
help_text="The communication protocol such as 'http', 'ftp', etc.")
host = models.CharField(null=True, blank=True, max_length=500,
help_text="The host name or IP address, you can also include the port number. For example"
"'127.0.0.1', '127.0.0.1:8080', 'localhost', 'yourdomain.com'.")
fqdn = models.CharField(null=True, blank=True, max_length=500)
port = models.IntegerField(null=True, blank=True, help_text="The network port associated with the endpoint.")
path = models.CharField(null=True, blank=True, max_length=500,
help_text="The location of the resource, it should start with a '/'. For example"
"/endpoint/420/edit")
query = models.CharField(null=True, blank=True, max_length=5000,
help_text="The query string, the question mark should be omitted."
"For example 'group=4&team=8'")
fragment = models.CharField(null=True, blank=True, max_length=500,
help_text="The fragment identifier which follows the hash mark. The hash mark should "
"be omitted. For example 'section-13', 'paragraph-2'.")
product = models.ForeignKey(Product, null=True, blank=True, )
class Meta:
ordering = ['product', 'protocol', 'host', 'path', 'query', 'fragment']
def __unicode__(self):
from urlparse import uses_netloc
netloc = self.host
fqdn = self.fqdn
port = self.port
scheme = self.protocol
url = self.path if self.path else ''
query = self.query
fragment = self.fragment
if port:
netloc += ':%s' % port
if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'):
if url and url[:1] != '/': url = '/' + url
if scheme and scheme in uses_netloc and url[:2] != '//':
url = '//' + (netloc or '') + url
else:
url = (netloc or '') + url
if scheme:
url = scheme + ':' + url
if query:
url = url + '?' + query
if fragment:
url = url + '#' + fragment
return url
def __eq__(self, other):
if isinstance(other, Endpoint):
return self.__unicode__() == other.__unicode__()
else:
return NotImplemented
def finding_count(self):
host = self.host_no_port
endpoints = Endpoint.objects.filter(host__regex="^" + host + ":?",
product=self.product).distinct()
findings = Finding.objects.filter(endpoints__in=endpoints,
active=True,
verified=True,
out_of_scope=False).distinct()
return findings.count()
def active_findings(self):
host = self.host_no_port
endpoints = Endpoint.objects.filter(host__regex="^" + host + ":?",
product=self.product).distinct()
return Finding.objects.filter(endpoints__in=endpoints,
active=True,
verified=True,
mitigated__isnull=True,
false_p=False,
duplicate=False).distinct().order_by('numerical_severity')
def get_breadcrumbs(self):
bc = self.product.get_breadcrumbs()
bc += [{'title': self.host_no_port,
'url': reverse('view_endpoint', args=(self.id,))}]
return bc
@staticmethod
def from_uri(uri):
return Endpoint()
@property
def host_no_port(self):
if ":" in self.host:
return self.host[:self.host.index(":")]
else:
return self.host
class Notes(models.Model):
entry = models.CharField(max_length=2400)
date = models.DateTimeField(null=False, editable=False,
default=get_current_datetime)
author = models.ForeignKey(User, editable=False)
class Meta:
ordering = ['-date']
def __unicode__(self):
return self.entry
class Multi_Usage_Notes(models.Model):
entry = models.CharField(max_length=2400)
date = models.DateTimeField(null=False, editable=False,
default=get_current_datetime)
occurrence_number=models.CharField(max_length=2000)
note = models.ForeignKey(Notes)
class Meta:
ordering = ['-date']
def __unicode__(self):
return self.entry
class Development_Environment(models.Model):
name = models.CharField(max_length=200)
def __unicode__(self):
return self.name
def get_breadcrumbs(self):
return [{"title": self.__unicode__(), "url": reverse("edit_dev_env", args=(self.id,))}]
class Test(models.Model):
engagement = models.ForeignKey(Engagement, editable=False)
lead = models.ForeignKey(User, editable=True, null=True)
test_type = models.ForeignKey(Test_Type)
target_start = models.DateTimeField()
target_end = models.DateTimeField()
estimated_time = models.TimeField(null=True, blank=True, editable=False)
actual_time = models.TimeField(null=True, blank=True, editable=False, )
percent_complete = models.IntegerField(null=True, blank=True,
editable=True)
notes = models.ManyToManyField(Notes, blank=True,
editable=False)
environment = models.ForeignKey(Development_Environment, null=True,
blank=False)
def __unicode__(self):
return "%s (%s)" % (self.test_type,
self.target_start.strftime("%b %d, %Y"))
def get_breadcrumbs(self):
bc = self.engagement.get_breadcrumbs()
bc += [{'title': self.__unicode__(),
'url': reverse('view_test', args=(self.id,))}]
return bc
def verified_finding_count(self):
return Finding.objects.filter(test=self, verified=True).count()
class VA(models.Model):
address = models.TextField(editable=False, default="none")
user = models.ForeignKey(User, editable=False)
result = models.ForeignKey(Test, editable=False, null=True, blank=True)
status = models.BooleanField(default=False, editable=False)
start = models.CharField(max_length=100)
class Finding(models.Model):
title = models.TextField(max_length=1000)
date = models.DateField(default=get_current_date)
cwe = models.IntegerField(default=0, null=True, blank=True)
url = models.TextField(null=True, blank=True, editable=False)
severity = models.CharField(max_length=200)
description = models.TextField()
mitigation = models.TextField()
impact = models.TextField()
endpoints = models.ManyToManyField(Endpoint, blank=True, )
unsaved_endpoints = []
unsaved_request = None
unsaved_response = None
unsaved_tags = None
references = models.TextField(null=True, blank=True, db_column="refs")
test = models.ForeignKey(Test, editable=False)
# TODO: Will be deprecated soon
is_template = models.BooleanField(default=False)
active = models.BooleanField(default=True)
verified = models.BooleanField(default=True)
false_p = models.BooleanField(default=False, verbose_name="False Positive")
duplicate = models.BooleanField(default=False)
duplicate_finding = models.ForeignKey('self', editable=False, null=True, related_name='original_finding', blank=True)
duplicate_list = models.ManyToManyField("self",editable=False, blank=True)
out_of_scope = models.BooleanField(default=False)
under_review = models.BooleanField(default=False)
review_requested_by = models.ForeignKey(Dojo_User, null=True, blank=True, related_name='review_requested_by')
reviewers = models.ManyToManyField(Dojo_User, blank=True)
#Defect Tracking Review
under_defect_review = models.BooleanField(default=False)
defect_review_requested_by = models.ForeignKey(Dojo_User, null=True, blank=True, related_name='defect_review_requested_by')
thread_id = models.IntegerField(default=0, editable=False)
mitigated = models.DateTimeField(editable=False, null=True, blank=True)
mitigated_by = models.ForeignKey(User, null=True, editable=False, related_name="mitigated_by")
reporter = models.ForeignKey(User, editable=False, related_name='reporter')
notes = models.ManyToManyField(Notes, blank=True,
editable=False)
numerical_severity = models.CharField(max_length=4)
last_reviewed = models.DateTimeField(null=True, editable=False)
last_reviewed_by = models.ForeignKey(User, null=True, editable=False, related_name='last_reviewed_by')
images = models.ManyToManyField('FindingImage', blank=True)
issue_id = models.TextField(null=True, blank=True)
line_number = models.TextField(null=True, blank=True)
sourcefilepath = models.TextField(null=True, blank=True)
sourcefile = models.TextField(null=True, blank=True)
param = models.TextField(null=True, blank=True)
payload = models.TextField(null=True, blank=True)
#alter table dojo_finding add column function longtext;
function = models.TextField(null=True, blank=True)
SEVERITIES = {'Info': 4, 'Low': 3, 'Medium': 2,
'High': 1, 'Critical': 0}
class Meta:
ordering = ('numerical_severity', '-date', 'title')
def get_hash_code(self):
hash_string = self.title + self.description
return hashlib.sha256(hash_string).hexdigest()
@staticmethod
def get_numerical_severity(severity):
if severity == 'Critical':
return 'S0'
elif severity == 'High':
return 'S1'
elif severity == 'Medium':
return 'S2'
elif severity == 'Low':
return 'S3'
else:
return 'S4'
def __unicode__(self):
return self.title
def status(self):
status = []
if self.active:
status += ['Active']
else:
status += ['Inactive']
if self.verified:
status += ['Verified']
if self.mitigated:
status += ['Mitigated']
if self.false_p:
status += ['False Positive']
if self.out_of_scope:
status += ['Out Of Scope']
if self.duplicate:
status += ['Duplicate']
if len(self.risk_acceptance_set.all()) > 0:
status += ['Accepted']
if not len(status):
status += ['Initial']
return ", ".join([str(s) for s in status])
def age(self):
if self.mitigated:
days = (self.mitigated.date() - datetime.combine(self.date, datetime.min.time()).date()).days
else:
days = (get_current_date() - datetime.combine(self.date, datetime.min.time()).date()).days
return days if days > 0 else 0
def jira(self):
try:
jissue = JIRA_Issue.objects.get(finding=self)
except:
jissue = None
pass
return jissue
def jira_conf(self):
try:
jpkey = JIRA_PKey.objects.get(product=self.test.engagement.product)
jconf = jpkey.conf
except:
jconf = None
pass
return jconf
def long_desc(self):
long_desc = ''
long_desc += '*' + self.title + '*\n\n'
long_desc += '*Severity:* ' + self.severity + '\n\n'
long_desc += '*Systems*: \n'
for e in self.endpoints.all():
long_desc += str(e) + '\n\n'
long_desc += '*Description*: \n' + self.description + '\n\n'
long_desc += '*Mitigation*: \n' + self.mitigation + '\n\n'
long_desc += '*Impact*: \n' + self.impact + '\n\n'
long_desc += '*References*:' + self.references
return long_desc
def save(self, *args, **kwargs):
super(Finding, self).save(*args, **kwargs)
self.hash_code = self.get_hash_code()
system_settings = System_Settings.objects.get()
if system_settings.enable_deduplication :
from dojo.tasks import async_dedupe
async_dedupe.delay(self, *args, **kwargs)
if system_settings.false_positive_history:
from dojo.tasks import async_false_history
async_false_history.delay(self, *args, **kwargs)
def clean(self):
no_check = ["test", "reporter"]
bigfields = ["description", "mitigation", "references", "impact", "url"]
for field_obj in self._meta.fields:
field = field_obj.name
if field not in no_check:
val = getattr(self, field)
if not val and field == "title":
setattr(self, field, "No title given")
if not val and field in bigfields:
setattr(self, field, "No %s given" % field)
def severity_display(self):
try:
system_settings = System_Settings.objects.get()
if system_settings.s_finding_severity_naming:
return self.numerical_severity
else:
return self.severity
except:
return self.severity
def get_breadcrumbs(self):
bc = self.test.get_breadcrumbs()
bc += [{'title': self.__unicode__(),
'url': reverse('view_finding', args=(self.id,))}]
return bc
# def get_request(self):
# if self.burprawrequestresponse_set.count() > 0:
# reqres = BurpRawRequestResponse.objects.get(finding=self)
# return base64.b64decode(reqres.burpRequestBase64)
#
# def get_response(self):
# if self.burprawrequestresponse_set.count() > 0:
# reqres = BurpRawRequestResponse.objects.get(finding=self)
# res = base64.b64decode(reqres.burpResponseBase64)
# # Removes all blank lines
# res = re.sub(r'\n\s*\n', '\n', res)
# return res
Finding.endpoints.through.__unicode__ = lambda x: "Endpoint: " + x.endpoint.host
class Stub_Finding(models.Model):
title = models.TextField(max_length=1000, blank=False, null=False)
date = models.DateField(default=get_current_date, blank=False, null=False)
severity = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
test = models.ForeignKey(Test, editable=False)
reporter = models.ForeignKey(User, editable=False)
class Meta:
ordering = ('-date', 'title')
def __unicode__(self):
return self.title
def get_breadcrumbs(self):
bc = self.test.get_breadcrumbs()
bc += [{'title': "Potential Finding: " + self.__unicode__(),
'url': reverse('view_potential_finding', args=(self.id,))}]
return bc
class Finding_Template(models.Model):
title = models.TextField(max_length=1000)
cwe = models.IntegerField(default=None, null=True, blank=True)
severity = models.CharField(max_length=200, null=True, blank=True)
description = models.TextField(null=True, blank=True)
mitigation = models.TextField(null=True, blank=True)
impact = models.TextField(null=True, blank=True)
references = models.TextField(null=True, blank=True, db_column="refs")
numerical_severity = models.CharField(max_length=4, null=True, blank=True, editable=False)
SEVERITIES = {'Info': 4, 'Low': 3, 'Medium': 2,
'High': 1, 'Critical': 0}
class Meta:
ordering = ['-cwe']
def __unicode__(self):
return self.title
def get_breadcrumbs(self):
bc = [{'title': self.__unicode__(),
'url': reverse('view_template', args=(self.id,))}]
return bc
class Check_List(models.Model):
session_management = models.CharField(max_length=50, default='none')
session_issues = models.ManyToManyField(Finding,
related_name='session_issues',
blank=True)
encryption_crypto = models.CharField(max_length=50, default='none')
crypto_issues = models.ManyToManyField(Finding,
related_name='crypto_issues',
blank=True)
configuration_management = models.CharField(max_length=50, default='')
config_issues = models.ManyToManyField(Finding,
related_name='config_issues',
blank=True)
authentication = models.CharField(max_length=50, default='none')
auth_issues = models.ManyToManyField(Finding,
related_name='auth_issues',
blank=True)
authorization_and_access_control = models.CharField(max_length=50,
default='none')
author_issues = models.ManyToManyField(Finding,
related_name='author_issues',
blank=True)
data_input_sanitization_validation = models.CharField(max_length=50,
default='none')
data_issues = models.ManyToManyField(Finding, related_name='data_issues',
blank=True)
sensitive_data = models.CharField(max_length=50, default='none')
sensitive_issues = models.ManyToManyField(Finding,
related_name='sensitive_issues',
blank=True)
other = models.CharField(max_length=50, default='none')
other_issues = models.ManyToManyField(Finding, related_name='other_issues',
blank=True)
engagement = models.ForeignKey(Engagement, editable=False,
related_name='eng_for_check')
@staticmethod
def get_status(pass_fail):
if pass_fail == 'Pass':
return 'success'
elif pass_fail == 'Fail':
return 'danger'
else:
return 'warning'
def get_breadcrumb(self):
bc = self.engagement.get_breadcrumb()
bc += [{'title': "Check List",
'url': reverse('complete_checklist', args=(self.engagement.id,))}]
return bc
class BurpRawRequestResponse(models.Model):
finding = models.ForeignKey(Finding, blank=True, null=True)
burpRequestBase64 = models.BinaryField()
burpResponseBase64 = models.BinaryField()
def get_request(self):
return base64.b64decode(self.burpRequestBase64)
def get_response(self):
res = base64.b64decode(self.burpResponseBase64)
# Removes all blank lines
res = re.sub(r'\n\s*\n', '\n', res)
return res
class Risk_Acceptance(models.Model):
path = models.FileField(upload_to='risk/%Y/%m/%d',
editable=False, null=False,
blank=False, verbose_name="Risk Acceptance File")
accepted_findings = models.ManyToManyField(Finding)
reporter = models.ForeignKey(User, editable=False)
notes = models.ManyToManyField(Notes, editable=False)
created = models.DateTimeField(null=False, editable=False,
default=now)
def __unicode__(self):
return "Risk Acceptance added on %s" % self.created.strftime(
"%b %d, %Y")
def filename(self):
return os.path.basename(self.path.name) \
if self.path is not None else ''
def get_breadcrumbs(self):
bc = self.engagement_set.first().get_breadcrumbs()
bc += [{'title': self.__unicode__(),
'url': reverse('view_risk', args=(self.engagement_set.first().product.id, self.id,))}]
return bc
class Report(models.Model):
name = models.CharField(max_length=200)
type = models.CharField(max_length=100, default='Finding')
format = models.CharField(max_length=15, default='AsciiDoc')
requester = models.ForeignKey(User)
task_id = models.CharField(max_length=50)
file = models.FileField(upload_to='reports/%Y/%m/%d', verbose_name='Report File', null=True)
status = models.CharField(max_length=10, default='requested')
options = models.TextField()
datetime = models.DateTimeField(auto_now_add=True)
done_datetime = models.DateTimeField(null=True)
def __unicode__(self):
return self.name
def get_url(self):
return reverse('download_report', args=(self.id,))
class Meta:
ordering = ['-datetime']
class FindingImage(models.Model):
image = models.ImageField(upload_to='finding_images', null=True)
image_thumbnail = ImageSpecField(source='image',
processors=[ResizeToCover(100, 100)],
format='JPEG',
options={'quality': 70})
image_small = ImageSpecField(source='image',
processors=[ResizeToCover(640, 480)],
format='JPEG',
options={'quality': 100})
image_medium = ImageSpecField(source='image',
processors=[ResizeToCover(800, 600)],
format='JPEG',
options={'quality': 100})
image_large = ImageSpecField(source='image',
processors=[ResizeToCover(1024, 768)],
format='JPEG',
options={'quality': 100})
def __unicode__(self):
return self.image.name
class FindingImageAccessToken(models.Model):
"""This will allow reports to request the images without exposing the media root to the world without
authentication"""
user = models.ForeignKey(User, null=False, blank=False)
image = models.ForeignKey(FindingImage, null=False, blank=False)
token = models.CharField(max_length=255)
size = models.CharField(max_length=9,
choices=(
('small', 'Small'),
('medium', 'Medium'),
('large', 'Large'),
('thumbnail', 'Thumbnail'),
('original', 'Original')),
default='medium')
def save(self, *args, **kwargs):
if not self.token:
self.token = uuid4()
return super(FindingImageAccessToken, self).save(*args, **kwargs)
class JIRA_Conf(models.Model):
url = models.URLField(max_length=2000, verbose_name="JIRA URL")
# product = models.ForeignKey(Product)
username = models.CharField(max_length=2000 )
password = models.CharField(max_length=2000)
# project_key = models.CharField(max_length=200,null=True, blank=True)
# enabled = models.BooleanField(default=True)
default_issue_type = models.CharField(max_length=9,
choices=(
('Task', 'Task'),
('Story', 'Story'),
('Epic', 'Epic'),
('Spike', 'Spike'),
('Bug', 'Bug')),
default='Bug')
epic_name_id = models.IntegerField()
open_status_key = models.IntegerField()
close_status_key = models.IntegerField()
low_mapping_severity = models.CharField(max_length=200)
medium_mapping_severity = models.CharField(max_length=200)
high_mapping_severity = models.CharField(max_length=200)
critical_mapping_severity = models.CharField(max_length=200)
finding_text = models.TextField(null=True, blank=True)
def __unicode__(self):
return self.url + " | " + self.username
def get_priority(self, status):
if status == 'Low':
return self.low_mapping_severity
elif status == 'Medium':
return self.medium_mapping_severity
elif status == 'High':
return self.high_mapping_severity
elif status == 'Critical':
return self.critical_mapping_severity
else:
return 'N/A'
class JIRA_Issue(models.Model):
jira_id = models.CharField(max_length=200)
jira_key = models.CharField(max_length=200)
finding = models.OneToOneField(Finding, null=True, blank=True)
engagement = models.OneToOneField(Engagement, null=True, blank=True)
class JIRA_Clone(models.Model):
jira_id = models.CharField(max_length=200)
jira_clone_id = models.CharField(max_length=200)
class JIRA_Details_Cache(models.Model):
jira_id = models.CharField(max_length=200)
jira_key = models.CharField(max_length=200)
jira_status = models.CharField(max_length=200)
jira_resolution = models.CharField(max_length=200)
class JIRA_PKey(models.Model):
project_key = models.CharField(max_length=200, blank=True)
product = models.ForeignKey(Product)
conf = models.ForeignKey(JIRA_Conf, verbose_name="JIRA Configuration", null=True, blank=True)
component = models.CharField(max_length=200, blank=True)
push_all_issues = models.BooleanField(default=False, blank=True)
enable_engagement_epic_mapping = models.BooleanField(default=False, blank=True)
push_notes = models.BooleanField(default=False, blank=True)
NOTIFICATION_CHOICES=(("slack","slack"),("hipchat","hipchat"),("mail","mail"),("alert","alert"))
class Notifications(models.Model):
engagement_added = MultiSelectField(choices=NOTIFICATION_CHOICES, default='alert', blank=True)
test_added = MultiSelectField(choices=NOTIFICATION_CHOICES, default='alert', blank=True)
results_added = MultiSelectField(choices=NOTIFICATION_CHOICES, default='alert', blank=True)
report_created = MultiSelectField(choices=NOTIFICATION_CHOICES, default='alert', blank=True)
jira_update = MultiSelectField(choices=NOTIFICATION_CHOICES, default='alert', blank=True)
upcoming_engagement = MultiSelectField(choices=NOTIFICATION_CHOICES, default='alert', blank=True)
user_mentioned = MultiSelectField(choices=NOTIFICATION_CHOICES, default='alert', blank=True)
other = MultiSelectField(choices=NOTIFICATION_CHOICES, default='alert', blank=True)
user = models.ForeignKey(User, default=None, null=True, editable=False)
class Tool_Type(models.Model):
name = models.CharField(max_length=200)
description = models.CharField(max_length=2000, null=True)
class Meta:
ordering = ['name']
def __unicode__(self):
return self.name
class Tool_Configuration(models.Model):
name = models.CharField(max_length=200, null=False)
description = models.CharField(max_length=2000, null=True, blank=True)
url = models.URLField(max_length=2000, null=True)
tool_type = models.ForeignKey(Tool_Type, related_name='tool_type')
authentication_type = models.CharField(max_length=15,
choices=(
('API', 'API Key'),
('Password', 'Username/Password'),
('SSH', 'SSH')),
null=True, blank=True)
username = models.CharField(max_length=200, null=True, blank=True)
password = models.CharField(max_length=600, null=True, blank=True)
auth_title = models.CharField(max_length=200, null=True, blank=True, verbose_name="Title for SSH/API Key")
ssh = models.CharField(max_length=6000, null=True, blank=True)
api_key = models.CharField(max_length=600, null=True, blank=True, verbose_name="API Key")
class Meta:
ordering = ['name']
def __unicode__(self):
return self.name
class Tool_Product_Settings(models.Model):
name = models.CharField(max_length=200, null=False)
description = models.CharField(max_length=2000, null=True, blank=True)
url = models.URLField(max_length=2000, null=True, blank=True)
product = models.ForeignKey(Product, default=1, editable=False)
tool_configuration = models.ForeignKey(Tool_Configuration, null=False, related_name='tool_configuration')
tool_project_id = models.CharField(max_length=200, null=True, blank=True)
notes = models.ManyToManyField(Notes, blank=True, editable=False)
class Meta:
ordering = ['name']
class Tool_Product_History(models.Model):
product = models.ForeignKey(Tool_Product_Settings, editable=False)
last_scan = models.DateTimeField(null=False, editable=False, default=now)
succesfull = models.BooleanField(default=True, verbose_name="Succesfully")
configuration_details = models.CharField(max_length=2000, null=True, blank=True)
class Alerts(models.Model):
title = models.CharField(max_length=100, default='', null=False)
description = models.CharField(max_length=2000, null=True)
url = models.URLField(max_length=2000, null=True)
source = models.CharField(max_length=100, default='Generic')
icon = models.CharField(max_length=25, default='icon-user-check')
user_id = models.ForeignKey(User, null=True, editable=False)
created = models.DateTimeField(null=False, editable=False, default=now)
class Meta:
ordering = ['-created']
class Cred_User(models.Model):
name = models.CharField(max_length=200, null=False)
username = models.CharField(max_length=200, null=False)
password = models.CharField(max_length=600, null=False)
role = models.CharField(max_length=200, null=False)
authentication = models.CharField(max_length=15,
choices=(
('Form', 'Form Authentication'),
('SSO', 'SSO Redirect')),
default='Form')
http_authentication = models.CharField(max_length=15,
choices=(
('Basic', 'Basic'),
('NTLM', 'NTLM')),
null=True, blank=True)
description = models.CharField(max_length=2000, null=True, blank=True)
url = models.URLField(max_length=2000, null=False)
environment = models.ForeignKey(Development_Environment, null=False)
login_regex = models.CharField(max_length=200, null=True, blank=True)
logout_regex = models.CharField(max_length=200, null=True, blank=True)
notes = models.ManyToManyField(Notes, blank=True, editable=False)
is_valid = models.BooleanField(default=True, verbose_name="Login is valid")
#selenium_script = models.CharField(max_length=1000, default='none',
# editable=False, blank=True, null=True, verbose_name="Selenium Script File")
class Meta:
ordering = ['name']
def __unicode__(self):
return self.name + " (" + self.role + ")"
class Cred_Mapping(models.Model):
cred_id = models.ForeignKey(Cred_User, null=False, related_name="cred_user", verbose_name="Credential")
product = models.ForeignKey(Product, null=True, blank=True, related_name="product")
finding = models.ForeignKey(Finding, null=True, blank=True, related_name="finding")
engagement = models.ForeignKey(Engagement, null=True, blank=True, related_name="engagement")
test = models.ForeignKey(Test, null=True, blank=True, related_name="test")
is_authn_provider = models.BooleanField(default=False, verbose_name="Authentication Provider")
url = models.URLField(max_length=2000, null=True, blank=True)
def __unicode__(self):
return self.cred_id.name + " (" + self.cred_id.role + ")"
# Register for automatic logging to database
auditlog.register(Dojo_User)
auditlog.register(Endpoint)
auditlog.register(Engagement)
auditlog.register(Finding)
auditlog.register(Product)
auditlog.register(Test)
auditlog.register(Risk_Acceptance)
auditlog.register(Finding_Template)
auditlog.register(Cred_User)
# Register tagging for models
tag_register(Product)
tag_register(Test)
tag_register(Finding)
tag_register(Engagement)
tag_register(Endpoint)
tag_register(Finding_Template)
admin.site.register(Test)
admin.site.register(Finding)
admin.site.register(FindingImage)
admin.site.register(FindingImageAccessToken)
admin.site.register(Stub_Finding)
admin.site.register(Engagement)
admin.site.register(Risk_Acceptance)
admin.site.register(Check_List)
admin.site.register(Test_Type)
admin.site.register(Endpoint)
admin.site.register(Product)
admin.site.register(Product_Type)
admin.site.register(Dojo_User)
admin.site.register(UserContactInfo)
admin.site.register(Notes)
admin.site.register(Multi_Usage_Notes)
admin.site.register(Report)
admin.site.register(Scan)
admin.site.register(ScanSettings)
admin.site.register(IPScan)
admin.site.register(Alerts)
admin.site.register(JIRA_Issue)
admin.site.register(Tool_Configuration)
admin.site.register(Tool_Product_Settings)
admin.site.register(Tool_Type)
admin.site.register(Cred_User)
admin.site.register(Cred_Mapping)
admin.site.register(System_Settings)
watson.register(Product)
watson.register(Test)
watson.register(Finding)
| wso2/security-tools | external/django-DefectDojo-1.2.1/dojo/models.py | Python | apache-2.0 | 53,517 |
# -*- coding: utf-8 -*-
# Copyright: See the LICENSE file.
from __future__ import unicode_literals
import collections
import logging
import warnings
from . import builder
from . import declarations
from . import enums
from . import errors
from . import utils
logger = logging.getLogger('factory.generate')
# Factory metaclasses
def get_factory_bases(bases):
"""Retrieve all FactoryMetaClass-derived bases from a list."""
return [b for b in bases if issubclass(b, BaseFactory)]
def resolve_attribute(name, bases, default=None):
"""Find the first definition of an attribute according to MRO order."""
for base in bases:
if hasattr(base, name):
return getattr(base, name)
return default
class FactoryMetaClass(type):
"""Factory metaclass for handling ordered declarations."""
def __call__(cls, **kwargs):
"""Override the default Factory() syntax to call the default strategy.
Returns an instance of the associated class.
"""
if cls._meta.strategy == enums.BUILD_STRATEGY:
return cls.build(**kwargs)
elif cls._meta.strategy == enums.CREATE_STRATEGY:
return cls.create(**kwargs)
elif cls._meta.strategy == enums.STUB_STRATEGY:
return cls.stub(**kwargs)
else:
raise errors.UnknownStrategy('Unknown Meta.strategy: {0}'.format(
cls._meta.strategy))
def __new__(mcs, class_name, bases, attrs):
"""Record attributes as a pattern for later instance construction.
This is called when a new Factory subclass is defined; it will collect
attribute declaration from the class definition.
Args:
class_name (str): the name of the class being created
bases (list of class): the parents of the class being created
attrs (str => obj dict): the attributes as defined in the class
definition
Returns:
A new class
"""
parent_factories = get_factory_bases(bases)
if parent_factories:
base_factory = parent_factories[0]
else:
base_factory = None
attrs_meta = attrs.pop('Meta', None)
attrs_params = attrs.pop('Params', None)
base_meta = resolve_attribute('_meta', bases)
options_class = resolve_attribute('_options_class', bases, FactoryOptions)
meta = options_class()
attrs['_meta'] = meta
new_class = super(FactoryMetaClass, mcs).__new__(
mcs, class_name, bases, attrs)
meta.contribute_to_class(
new_class,
meta=attrs_meta,
base_meta=base_meta,
base_factory=base_factory,
params=attrs_params,
)
return new_class
def __str__(cls):
if cls._meta.abstract:
return '<%s (abstract)>' % cls.__name__
else:
return '<%s for %s>' % (cls.__name__, cls._meta.model)
class BaseMeta:
abstract = True
strategy = enums.CREATE_STRATEGY
class OptionDefault(object):
"""The default for an option.
Attributes:
name: str, the name of the option ('class Meta' attribute)
value: object, the default value for the option
inherit: bool, whether to inherit the value from the parent factory's `class Meta`
when no value is provided
checker: callable or None, an optional function used to detect invalid option
values at declaration time
"""
def __init__(self, name, value, inherit=False, checker=None):
self.name = name
self.value = value
self.inherit = inherit
self.checker = checker
def apply(self, meta, base_meta):
value = self.value
if self.inherit and base_meta is not None:
value = getattr(base_meta, self.name, value)
if meta is not None:
value = getattr(meta, self.name, value)
if self.checker is not None:
self.checker(meta, value)
return value
def __str__(self):
return '%s(%r, %r, inherit=%r)' % (
self.__class__.__name__,
self.name, self.value, self.inherit)
class FactoryOptions(object):
def __init__(self):
self.factory = None
self.base_factory = None
self.base_declarations = {}
self.parameters = {}
self.parameters_dependencies = {}
self.pre_declarations = builder.DeclarationSet()
self.post_declarations = builder.DeclarationSet()
self._counter = None
self.counter_reference = None
@property
def declarations(self):
base_declarations = dict(self.base_declarations)
for name, param in utils.sort_ordered_objects(self.parameters.items(), getter=lambda item: item[1]):
base_declarations.update(param.as_declarations(name, base_declarations))
return base_declarations
def _build_default_options(self):
""""Provide the default value for all allowed fields.
Custom FactoryOptions classes should override this method
to update() its return value.
"""
return [
OptionDefault('model', None, inherit=True),
OptionDefault('abstract', False, inherit=False),
OptionDefault('strategy', enums.CREATE_STRATEGY, inherit=True),
OptionDefault('inline_args', (), inherit=True),
OptionDefault('exclude', (), inherit=True),
OptionDefault('rename', {}, inherit=True),
]
def _fill_from_meta(self, meta, base_meta):
# Exclude private/protected fields from the meta
if meta is None:
meta_attrs = {}
else:
meta_attrs = dict(
(k, v)
for (k, v) in vars(meta).items()
if not k.startswith('_')
)
for option in self._build_default_options():
assert not hasattr(self, option.name), "Can't override field %s." % option.name
value = option.apply(meta, base_meta)
meta_attrs.pop(option.name, None)
setattr(self, option.name, value)
if meta_attrs:
# Some attributes in the Meta aren't allowed here
raise TypeError(
"'class Meta' for %r got unknown attribute(s) %s"
% (self.factory, ','.join(sorted(meta_attrs.keys()))))
def contribute_to_class(self, factory, meta=None, base_meta=None, base_factory=None, params=None):
self.factory = factory
self.base_factory = base_factory
self._fill_from_meta(meta=meta, base_meta=base_meta)
self.model = self.get_model_class()
if self.model is None:
self.abstract = True
self.counter_reference = self._get_counter_reference()
# Scan the inheritance chain, starting from the furthest point,
# excluding the current class, to retrieve all declarations.
for parent in reversed(self.factory.__mro__[1:]):
if not hasattr(parent, '_meta'):
continue
self.base_declarations.update(parent._meta.base_declarations)
self.parameters.update(parent._meta.parameters)
for k, v in vars(self.factory).items():
if self._is_declaration(k, v):
self.base_declarations[k] = v
if params is not None:
for k, v in utils.sort_ordered_objects(vars(params).items(), getter=lambda item: item[1]):
if not k.startswith('_'):
self.parameters[k] = declarations.SimpleParameter.wrap(v)
self._check_parameter_dependencies(self.parameters)
self.pre_declarations, self.post_declarations = builder.parse_declarations(self.declarations)
def _get_counter_reference(self):
"""Identify which factory should be used for a shared counter."""
if (self.model is not None
and self.base_factory is not None
and self.base_factory._meta.model is not None
and issubclass(self.model, self.base_factory._meta.model)):
return self.base_factory._meta.counter_reference
else:
return self
def _initialize_counter(self):
"""Initialize our counter pointer.
If we're the top-level factory, instantiate a new counter
Otherwise, point to the top-level factory's counter.
"""
if self._counter is not None:
return
if self.counter_reference is self:
self._counter = _Counter(seq=self.factory._setup_next_sequence())
else:
self.counter_reference._initialize_counter()
self._counter = self.counter_reference._counter
def next_sequence(self):
"""Retrieve a new sequence ID.
This will call, in order:
- next_sequence from the base factory, if provided
- _setup_next_sequence, if this is the 'toplevel' factory and the
sequence counter wasn't initialized yet; then increase it.
"""
self._initialize_counter()
return self._counter.next()
def reset_sequence(self, value=None, force=False):
self._initialize_counter()
if self.counter_reference is not self and not force:
raise ValueError(
"Can't reset a sequence on descendant factory %r; reset sequence on %r or use `force=True`."
% (self.factory, self.counter_reference.factory))
if value is None:
value = self.counter_reference.factory._setup_next_sequence()
self._counter.reset(value)
def prepare_arguments(self, attributes):
"""Convert an attributes dict to a (args, kwargs) tuple."""
kwargs = dict(attributes)
# 1. Extension points
kwargs = self.factory._adjust_kwargs(**kwargs)
# 2. Remove hidden objects
kwargs = {
k: v for k, v in kwargs.items()
if k not in self.exclude and k not in self.parameters and v is not declarations.SKIP
}
# 3. Rename fields
for old_name, new_name in self.rename.items():
if old_name in kwargs:
kwargs[new_name] = kwargs.pop(old_name)
# 4. Extract inline args
args = tuple(
kwargs.pop(arg_name)
for arg_name in self.inline_args
)
return args, kwargs
def instantiate(self, step, args, kwargs):
model = self.get_model_class()
if step.builder.strategy == enums.BUILD_STRATEGY:
return self.factory._build(model, *args, **kwargs)
elif step.builder.strategy == enums.CREATE_STRATEGY:
return self.factory._create(model, *args, **kwargs)
else:
assert step.builder.strategy == enums.STUB_STRATEGY
return StubObject(**kwargs)
def use_postgeneration_results(self, step, instance, results):
self.factory._after_postgeneration(
instance,
create=step.builder.strategy == enums.CREATE_STRATEGY,
results=results,
)
def _is_declaration(self, name, value):
"""Determines if a class attribute is a field value declaration.
Based on the name and value of the class attribute, return ``True`` if
it looks like a declaration of a default field value, ``False`` if it
is private (name starts with '_') or a classmethod or staticmethod.
"""
if isinstance(value, (classmethod, staticmethod)):
return False
elif enums.get_builder_phase(value):
# All objects with a defined 'builder phase' are declarations.
return True
return not name.startswith("_")
def _check_parameter_dependencies(self, parameters):
"""Find out in what order parameters should be called."""
# Warning: parameters only provide reverse dependencies; we reverse them into standard dependencies.
# deep_revdeps: set of fields a field depend indirectly upon
deep_revdeps = collections.defaultdict(set)
# Actual, direct dependencies
deps = collections.defaultdict(set)
for name, parameter in parameters.items():
if isinstance(parameter, declarations.Parameter):
field_revdeps = parameter.get_revdeps(parameters)
if not field_revdeps:
continue
deep_revdeps[name] = set.union(*(deep_revdeps[dep] for dep in field_revdeps))
deep_revdeps[name] |= set(field_revdeps)
for dep in field_revdeps:
deps[dep].add(name)
# Check for cyclical dependencies
cyclic = [name for name, field_deps in deep_revdeps.items() if name in field_deps]
if cyclic:
raise errors.CyclicDefinitionError(
"Cyclic definition detected on %r; Params around %s"
% (self.factory, ', '.join(cyclic)))
return deps
def get_model_class(self):
"""Extension point for loading model classes.
This can be overridden in framework-specific subclasses to hook into
existing model repositories, for instance.
"""
return self.model
def __str__(self):
return "<%s for %s>" % (self.__class__.__name__, self.factory.__class__.__name__)
def __repr__(self):
return str(self)
# Factory base classes
class _Counter(object):
"""Simple, naive counter.
Attributes:
for_class (obj): the class this counter related to
seq (int): the next value
"""
def __init__(self, seq):
self.seq = seq
def next(self):
value = self.seq
self.seq += 1
return value
def reset(self, next_value=0):
self.seq = next_value
class BaseFactory(object):
"""Factory base support for sequences, attributes and stubs."""
# Backwards compatibility
UnknownStrategy = errors.UnknownStrategy
UnsupportedStrategy = errors.UnsupportedStrategy
def __new__(cls, *args, **kwargs):
"""Would be called if trying to instantiate the class."""
raise errors.FactoryError('You cannot instantiate BaseFactory')
_meta = FactoryOptions()
# ID to use for the next 'declarations.Sequence' attribute.
_counter = None
@classmethod
def reset_sequence(cls, value=None, force=False):
"""Reset the sequence counter.
Args:
value (int or None): the new 'next' sequence value; if None,
recompute the next value from _setup_next_sequence().
force (bool): whether to force-reset parent sequence counters
in a factory inheritance chain.
"""
cls._meta.reset_sequence(value, force=force)
@classmethod
def _setup_next_sequence(cls):
"""Set up an initial sequence value for Sequence attributes.
Returns:
int: the first available ID to use for instances of this factory.
"""
return 0
@classmethod
def attributes(cls, create=False, extra=None):
"""Build a dict of attribute values, respecting declaration order.
The process is:
- Handle 'orderless' attributes, overriding defaults with provided
kwargs when applicable
- Handle ordered attributes, overriding them with provided kwargs when
applicable; the current list of computed attributes is available
to the currently processed object.
"""
warnings.warn(
"Usage of Factory.attributes() is deprecated.",
DeprecationWarning,
stacklevel=2,
)
declarations = cls._meta.pre_declarations.as_dict()
declarations.update(extra or {})
from . import helpers
return helpers.make_factory(dict, **declarations)
@classmethod
def declarations(cls, extra_defs=None):
"""Retrieve a copy of the declared attributes.
Args:
extra_defs (dict): additional definitions to insert into the
retrieved DeclarationDict.
"""
warnings.warn(
"Factory.declarations is deprecated; use Factory._meta.pre_declarations instead.",
DeprecationWarning,
stacklevel=2,
)
decls = cls._meta.pre_declarations.as_dict()
decls.update(extra_defs or {})
return decls
@classmethod
def _adjust_kwargs(cls, **kwargs):
"""Extension point for custom kwargs adjustment."""
return kwargs
@classmethod
def _generate(cls, strategy, params):
"""generate the object.
Args:
params (dict): attributes to use for generating the object
strategy: the strategy to use
"""
if cls._meta.abstract:
raise errors.FactoryError(
"Cannot generate instances of abstract factory %(f)s; "
"Ensure %(f)s.Meta.model is set and %(f)s.Meta.abstract "
"is either not set or False." % dict(f=cls.__name__))
step = builder.StepBuilder(cls._meta, params, strategy)
return step.build()
@classmethod
def _after_postgeneration(cls, instance, create, results=None):
"""Hook called after post-generation declarations have been handled.
Args:
instance (object): the generated object
create (bool): whether the strategy was 'build' or 'create'
results (dict or None): result of post-generation declarations
"""
pass
@classmethod
def _build(cls, model_class, *args, **kwargs):
"""Actually build an instance of the model_class.
Customization point, will be called once the full set of args and kwargs
has been computed.
Args:
model_class (type): the class for which an instance should be
built
args (tuple): arguments to use when building the class
kwargs (dict): keyword arguments to use when building the class
"""
return model_class(*args, **kwargs)
@classmethod
def _create(cls, model_class, *args, **kwargs):
"""Actually create an instance of the model_class.
Customization point, will be called once the full set of args and kwargs
has been computed.
Args:
model_class (type): the class for which an instance should be
created
args (tuple): arguments to use when creating the class
kwargs (dict): keyword arguments to use when creating the class
"""
return model_class(*args, **kwargs)
@classmethod
def build(cls, **kwargs):
"""Build an instance of the associated class, with overriden attrs."""
return cls._generate(enums.BUILD_STRATEGY, kwargs)
@classmethod
def build_batch(cls, size, **kwargs):
"""Build a batch of instances of the given class, with overriden attrs.
Args:
size (int): the number of instances to build
Returns:
object list: the built instances
"""
return [cls.build(**kwargs) for _ in range(size)]
@classmethod
def create(cls, **kwargs):
"""Create an instance of the associated class, with overriden attrs."""
return cls._generate(enums.CREATE_STRATEGY, kwargs)
@classmethod
def create_batch(cls, size, **kwargs):
"""Create a batch of instances of the given class, with overriden attrs.
Args:
size (int): the number of instances to create
Returns:
object list: the created instances
"""
return [cls.create(**kwargs) for _ in range(size)]
@classmethod
def stub(cls, **kwargs):
"""Retrieve a stub of the associated class, with overriden attrs.
This will return an object whose attributes are those defined in this
factory's declarations or in the extra kwargs.
"""
return cls._generate(enums.STUB_STRATEGY, kwargs)
@classmethod
def stub_batch(cls, size, **kwargs):
"""Stub a batch of instances of the given class, with overriden attrs.
Args:
size (int): the number of instances to stub
Returns:
object list: the stubbed instances
"""
return [cls.stub(**kwargs) for _ in range(size)]
@classmethod
def generate(cls, strategy, **kwargs):
"""Generate a new instance.
The instance will be created with the given strategy (one of
BUILD_STRATEGY, CREATE_STRATEGY, STUB_STRATEGY).
Args:
strategy (str): the strategy to use for generating the instance.
Returns:
object: the generated instance
"""
assert strategy in (enums.STUB_STRATEGY, enums.BUILD_STRATEGY, enums.CREATE_STRATEGY)
action = getattr(cls, strategy)
return action(**kwargs)
@classmethod
def generate_batch(cls, strategy, size, **kwargs):
"""Generate a batch of instances.
The instances will be created with the given strategy (one of
BUILD_STRATEGY, CREATE_STRATEGY, STUB_STRATEGY).
Args:
strategy (str): the strategy to use for generating the instance.
size (int): the number of instances to generate
Returns:
object list: the generated instances
"""
assert strategy in (enums.STUB_STRATEGY, enums.BUILD_STRATEGY, enums.CREATE_STRATEGY)
batch_action = getattr(cls, '%s_batch' % strategy)
return batch_action(size, **kwargs)
@classmethod
def simple_generate(cls, create, **kwargs):
"""Generate a new instance.
The instance will be either 'built' or 'created'.
Args:
create (bool): whether to 'build' or 'create' the instance.
Returns:
object: the generated instance
"""
strategy = enums.CREATE_STRATEGY if create else enums.BUILD_STRATEGY
return cls.generate(strategy, **kwargs)
@classmethod
def simple_generate_batch(cls, create, size, **kwargs):
"""Generate a batch of instances.
These instances will be either 'built' or 'created'.
Args:
size (int): the number of instances to generate
create (bool): whether to 'build' or 'create' the instances.
Returns:
object list: the generated instances
"""
strategy = enums.CREATE_STRATEGY if create else enums.BUILD_STRATEGY
return cls.generate_batch(strategy, size, **kwargs)
# Note: we're calling str() on the class name to avoid issues with Py2's type() expecting bytes
# instead of unicode.
Factory = FactoryMetaClass(str('Factory'), (BaseFactory,), {
'Meta': BaseMeta,
'__doc__': """Factory base with build and create support.
This class has the ability to support multiple ORMs by using custom creation
functions.
""",
})
# Backwards compatibility
Factory.AssociatedClassError = errors.AssociatedClassError
class StubObject(object):
"""A generic container."""
def __init__(self, **kwargs):
for field, value in kwargs.items():
setattr(self, field, value)
class StubFactory(Factory):
class Meta:
strategy = enums.STUB_STRATEGY
model = StubObject
@classmethod
def build(cls, **kwargs):
return cls.stub(**kwargs)
@classmethod
def create(cls, **kwargs):
raise errors.UnsupportedStrategy()
class BaseDictFactory(Factory):
"""Factory for dictionary-like classes."""
class Meta:
abstract = True
@classmethod
def _build(cls, model_class, *args, **kwargs):
if args:
raise ValueError(
"DictFactory %r does not support Meta.inline_args." % cls)
return model_class(**kwargs)
@classmethod
def _create(cls, model_class, *args, **kwargs):
return cls._build(model_class, *args, **kwargs)
class DictFactory(BaseDictFactory):
class Meta:
model = dict
class BaseListFactory(Factory):
"""Factory for list-like classes."""
class Meta:
abstract = True
@classmethod
def _build(cls, model_class, *args, **kwargs):
if args:
raise ValueError(
"ListFactory %r does not support Meta.inline_args." % cls)
# When support for Python <3.6 is dropped sorting will no longer be required
# because dictionaries will already be ordered, this can then be changed to:
# values = kwargs.values()
values = [v for k, v in sorted(kwargs.items(), key=lambda item: int(item[0]))]
return model_class(values)
@classmethod
def _create(cls, model_class, *args, **kwargs):
return cls._build(model_class, *args, **kwargs)
class ListFactory(BaseListFactory):
class Meta:
model = list
def use_strategy(new_strategy):
"""Force the use of a different strategy.
This is an alternative to setting default_strategy in the class definition.
"""
def wrapped_class(klass):
klass._meta.strategy = new_strategy
return klass
return wrapped_class
| rbarrois/factory_boy | factory/base.py | Python | mit | 25,261 |
from .drastic import nonable, init, strict, enable, disable | clemtoy/drastic | drastic/__init__.py | Python | mit | 59 |
from distutils.core import setup, Extension
import commands
raw_libs = commands.getoutput('Wand-config --libs').split(' ')
libs = []
libdirs = ['/usr/local/lib']
for k,lib in enumerate(raw_libs):
if lib[0:2] == '-l':
libs.append(lib[2:])
elif lib[0:2] == '-L':
libdirs.append(lib[2:])
imagemagick = Extension('imagemagick',
sources = ['imagemagick.c'],
include_dirs = ['/usr/local/include'],
library_dirs = libdirs,
libraries = libs)
setup (name = 'imagemagick',
version = '1.0',
description = 'ImageMagick procedures we need for this software.',
long_description = 'ImageMagick procedures we need for this software.',
ext_modules = [imagemagick],
author = 'Douglas Webster',
)
| hsuaz/ferrox | ferrox/contrib/needed_imagemagick_calls/setup.py | Python | mit | 828 |
import os
class Config(object):
DEBUG = False
TESTING = False
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SQLALCHEMY_TRACK_MODIFICATIONS = True
class ProductionConfig(Config):
DEBUG = False
class StagingConfig(Config):
DEBUG = True
DEVELOPMENT = True
class DevelopmentConfig(Config):
DEBUG = True
DEVELOPMENT = True
class TestingConfig(Config):
TESTING = True
| mdsrosa/routes_api_python | config.py | Python | mit | 419 |
# -*- coding: utf-8 -*-
"""solve problem 2"""
n = 1
m = 1
result = 0
while n < 4000000:
s = n + m
n = m
m = s
if s % 2 == 0:
result += s
print result
| zikrach/euler | euler_2.py | Python | gpl-2.0 | 183 |
import numpy as np
import cv2
def detect(img, cascade):
rects = cascade.detectMultiScale(img, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30))
if len(rects) == 0:
return []
rects[:,2:] += rects[:,:2]
return rects
def detect_turned(img, cascade):
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img = cv2.equalizeHist(img)
img_t = cv2.transpose(img)
img_cw = cv2.flip(img_t, 1)
img_ccw = cv2.flip(img_t, 0)
r = detect(img, cascade)
r_cw = detect(img_cw, cascade)
r_ccw = detect(img_ccw, cascade)
h, w = img.shape[:2]
rects = []
rects += [(x1, y1, x2, y2, 1, 0) for x1, y1, x2, y2 in r]
rects += [(y1, h-x1-1, y2, h-x2-1, 0, -1) for x1, y1, x2, y2 in r_cw]
rects += [(w-y1-1, x1, w-y2-1, x2, 0, 1) for x1, y1, x2, y2 in r_ccw]
return rects
def process_image(fn, cascade, extract_faces=True):
img = cv2.imread(fn)
h, w = img.shape[:2]
scale = max(h, w) / 512.0
small = cv2.resize(img, (int(w/scale), int(h/scale)), interpolation=cv2.INTER_AREA)
rects = detect_turned(small, cascade)
for i, (x1, y1, x2, y2, vx, vy) in enumerate(rects):
cv2.rectangle(small, (x1, y1), (x2, y2), (0, 255, 0))
cv2.circle(small, (x1, y1), 2, (0, 0, 255), -1)
cv2.putText(small, str(i), ((x1+x2)/2, (y1+y2)/2), cv2.FONT_HERSHEY_PLAIN, 1.0, (0, 255, 0))
rects = np.float32(rects).reshape(-1,6)
rects[:,:4] = np.around(rects[:,:4]*scale)
faces = []
if extract_faces:
path, name, ext = splitfn(fn)
face_sz = 256
for i, r in enumerate(rects):
p1, p2, u = r.reshape(3, 2)
v = np.float32( [-u[1], u[0]] )
w = np.abs(p2-p1).max()
fscale = w / face_sz
p0 = 0.5*(p1+p2 - w*(u+v))
M = np.float32([u*fscale, v*fscale, p0]).T
face = cv2.warpAffine(img, M, (face_sz, face_sz), flags=cv2.WARP_INVERSE_MAP | cv2.INTER_AREA)
faces.append(face)
return small, rects, faces
if __name__ == '__main__':
import sys
import getopt
from glob import glob
from common import splitfn, image_extensions
args, img_args = getopt.getopt(sys.argv[1:], '', ['cascade=', 'outdir='])
args = dict(args)
cascade_fn = args.get('--cascade', "../../data/haarcascades/haarcascade_frontalface_alt.xml")
outdir = args.get('--outdir')
img_list = []
if len(img_args) == 0:
img_list = ['../cpp/lena.jpg']
else:
for mask in img_args:
img_list.extend(glob(mask))
img_list = [fn for fn in img_list if splitfn(fn)[-1].lower() in image_extensions]
cascade = cv2.CascadeClassifier(cascade_fn)
for i, fn in enumerate(img_list):
print '%d / %d %s' % (i+1, len(img_list), fn),
vis, rects, faces = process_image(fn, cascade)
if len(faces) > 0 and outdir is not None:
path, name, ext = splitfn(fn)
cv2.imwrite('%s/%s_all.bmp' % (outdir, name), vis)
for face_i, face in enumerate(faces):
cv2.imwrite('%s/%s_obj%02d.bmp' % (outdir, name, face_i), face)
print ' - %d object(s) found' % len(faces)
cv2.imshow('img', vis)
cv2.waitKey(50)
cv2.waitKey()
| petterreinholdtsen/cinelerra-hv | thirdparty/OpenCV-2.3.1/samples/python2/obj_detect.py | Python | gpl-2.0 | 3,343 |
import _plotly_utils.basevalidators
class ThicknessmodeValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self,
plotly_name="thicknessmode",
parent_name="histogram2dcontour.colorbar",
**kwargs
):
super(ThicknessmodeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
values=kwargs.pop("values", ["fraction", "pixels"]),
**kwargs
)
| plotly/plotly.py | packages/python/plotly/plotly/validators/histogram2dcontour/colorbar/_thicknessmode.py | Python | mit | 546 |
from time import sleep
from tqdm import tqdm
import requests
url = "http://raw.githubusercontent.com/Alafazam/lecture_notes/master/Cormen%20.pdf"
response = requests.get(url, stream=True)
with open("10MB", "wb") as handle:
total_length = int(response.headers.get('content-length'))/1024
for data in tqdm(response.iter_content(chunk_size=1024),total=total_length, leave=True, unit='KB'):
handle.write(data)
# with open("10MB", 'wb') as f:
# r = requests.get(url, stream=True)
# for chunk in tqdm(r.iter_content()):
# f.write(chunk)
# from tqdm import tqdm
# for i in tqdm(range(10000)):
# sleep(0.01) | Alafazam/simple_projects | misc/test_tqdm.py | Python | mit | 615 |
# -*- coding: utf-8 -*-
"""
Exif compare unit tests.
"""
import unittest
from exif_compare import main
# pylint: disable=E1103
class ExifCompareViewsTestCase(unittest.TestCase):
"""
Views tests.
"""
def setUp(self):
"""
Before each test, set up a environment.
"""
self.client = main.app.test_client()
def tearDown(self):
"""
Get rid of unused objects after each test.
"""
pass
def test_mainpage(self):
"""
Test main page redirect.
"""
resp = self.client.get('/')
self.assertEqual(resp.status_code, 200)
class ExifCompareUtilsTestCase(unittest.TestCase):
"""
Utility functions tests.
"""
def setUp(self):
"""
Before each test, set up a environment.
"""
pass
def tearDown(self):
"""
Get rid of unused objects after each test.
"""
pass
def suite():
"""
Default test suite.
"""
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ExifCompareViewsTestCase))
suite.addTest(unittest.makeSuite(ExifCompareUtilsTestCase))
return suite
if __name__ == '__main__':
unittest.main()
| sargo/exif-compare | src/exif_compare/tests.py | Python | mit | 1,237 |
import copy
import os
import pytest
import salt.modules.grains as grainsmod
import salt.utils.dictupdate as dictupdate
from salt.exceptions import SaltException
from salt.utils.odict import OrderedDict
from tests.support.mock import MagicMock, patch
from tests.support.runtests import RUNTIME_VARS
@pytest.fixture
def configure_loader_modules():
conf_file = os.path.join(RUNTIME_VARS.TMP, "__salt_test_grains")
cachedir = os.path.join(RUNTIME_VARS.TMP, "__salt_test_grains_cache_dir")
if not os.path.isdir(cachedir):
os.makedirs(cachedir)
return {
grainsmod: {
"__opts__": {"conf_file": conf_file, "cachedir": cachedir},
"__salt__": {"saltutil.refresh_grains": MagicMock()},
}
}
def test_filter_by():
with patch.dict(
grainsmod.__grains__,
{"os_family": "MockedOS", "1": "1", "2": "2", "roles": ["A", "B"]},
):
dict1 = {"A": "B", "C": {"D": {"E": "F", "G": "H"}}}
dict2 = {
"default": {"A": "B", "C": {"D": "E"}},
"1": {"A": "X"},
"2": {"C": {"D": "H"}},
"MockedOS": {"A": "Z"},
}
mdict1 = {"D": {"E": "I"}, "J": "K"}
mdict2 = {"A": "Z"}
mdict3 = {"C": {"D": "J"}}
# test None result with non existent grain and no default
res = grainsmod.filter_by(dict1, grain="xxx")
assert res is None
# test None result with os_family grain and no matching result
res = grainsmod.filter_by(dict1)
assert res is None
# test with non existent grain, and a given default key
res = grainsmod.filter_by(dict1, grain="xxx", default="C")
assert res == {"D": {"E": "F", "G": "H"}}
# add a merge dictionary, F disappears
res = grainsmod.filter_by(dict1, grain="xxx", merge=mdict1, default="C")
assert res == {"D": {"E": "I", "G": "H"}, "J": "K"}
# dict1 was altered, reestablish
dict1 = {"A": "B", "C": {"D": {"E": "F", "G": "H"}}}
# default is not present in dict1, check we only have merge in result
res = grainsmod.filter_by(dict1, grain="xxx", merge=mdict1, default="Z")
assert res == mdict1
# default is not present in dict1, and no merge, should get None
res = grainsmod.filter_by(dict1, grain="xxx", default="Z")
assert res is None
# test giving a list as merge argument raise exception
pytest.raises(SaltException, grainsmod.filter_by, dict1, "xxx", ["foo"], "C")
# Now, re-test with an existing grain (os_family), but with no match.
res = grainsmod.filter_by(dict1)
assert res is None
res = grainsmod.filter_by(dict1, default="C")
assert res == {"D": {"E": "F", "G": "H"}}
res = grainsmod.filter_by(dict1, merge=mdict1, default="C")
assert res == {"D": {"E": "I", "G": "H"}, "J": "K"}
# dict1 was altered, reestablish
dict1 = {"A": "B", "C": {"D": {"E": "F", "G": "H"}}}
res = grainsmod.filter_by(dict1, merge=mdict1, default="Z")
assert res == mdict1
res = grainsmod.filter_by(dict1, default="Z")
assert res is None
# this one is in fact a traceback in updatedict, merging a string
# with a dictionary
pytest.raises(TypeError, grainsmod.filter_by, dict1, merge=mdict1, default="A")
# Now, re-test with a matching grain.
dict1 = {"A": "B", "MockedOS": {"D": {"E": "F", "G": "H"}}}
res = grainsmod.filter_by(dict1)
assert res == {"D": {"E": "F", "G": "H"}}
res = grainsmod.filter_by(dict1, default="A")
assert res == {"D": {"E": "F", "G": "H"}}
res = grainsmod.filter_by(dict1, merge=mdict1, default="A")
assert res == {"D": {"E": "I", "G": "H"}, "J": "K"}
# dict1 was altered, reestablish
dict1 = {"A": "B", "MockedOS": {"D": {"E": "F", "G": "H"}}}
res = grainsmod.filter_by(dict1, merge=mdict1, default="Z")
assert res == {"D": {"E": "I", "G": "H"}, "J": "K"}
# dict1 was altered, reestablish
dict1 = {"A": "B", "MockedOS": {"D": {"E": "F", "G": "H"}}}
res = grainsmod.filter_by(dict1, default="Z")
assert res == {"D": {"E": "F", "G": "H"}}
# Test when grain value is a list
dict1 = {"A": "B", "C": {"D": {"E": "F", "G": "H"}}}
res = grainsmod.filter_by(dict1, grain="roles", default="C")
assert res == "B"
# Test default when grain value is a list
dict1 = {"Z": "B", "C": {"D": {"E": "F", "G": "H"}}}
res = grainsmod.filter_by(dict1, grain="roles", default="C")
assert res == {"D": {"E": "F", "G": "H"}}
# Test with wildcard pattern in the lookup_dict keys
dict1 = {"*OS": "B", "C": {"D": {"E": "F", "G": "H"}}}
res = grainsmod.filter_by(dict1)
assert res == "B"
# Test with non-strings in lookup_dict keys
# Issue #38094
dict1 = {1: 2, 3: {4: 5}, "*OS": "B"}
res = grainsmod.filter_by(dict1)
assert res == "B"
# Test with sequence pattern with roles
dict1 = {"Z": "B", "[BC]": {"D": {"E": "F", "G": "H"}}}
res = grainsmod.filter_by(dict1, grain="roles", default="Z")
assert res == {"D": {"E": "F", "G": "H"}}
# Base tests
# NOTE: these may fail to detect errors if dictupdate.update() is broken
# but then the unit test for dictupdate.update() should fail and expose
# that. The purpose of these tests is it validate the logic of how
# in filter_by() processes its arguments.
# Test with just the base
res = grainsmod.filter_by(dict2, grain="xxx", default="xxx", base="default")
assert res == dict2["default"]
# Test the base with the OS grain look-up
res = grainsmod.filter_by(dict2, default="xxx", base="default")
assert res == dictupdate.update(
copy.deepcopy(dict2["default"]), dict2["MockedOS"]
)
# Test the base with default
res = grainsmod.filter_by(dict2, grain="xxx", base="default")
assert res == dict2["default"]
res = grainsmod.filter_by(dict2, grain="1", base="default")
assert res == dictupdate.update(copy.deepcopy(dict2["default"]), dict2["1"])
res = grainsmod.filter_by(dict2, base="default", merge=mdict2)
assert res == dictupdate.update(
dictupdate.update(copy.deepcopy(dict2["default"]), dict2["MockedOS"]),
mdict2,
)
res = grainsmod.filter_by(dict2, base="default", merge=mdict3)
assert res == dictupdate.update(
dictupdate.update(copy.deepcopy(dict2["default"]), dict2["MockedOS"]),
mdict3,
)
def test_append_not_a_list():
with patch.dict(grainsmod.__grains__, {"b": "bval"}):
res = grainsmod.append("b", "d")
assert res == "The key b is not a valid list"
assert grainsmod.__grains__ == {"b": "bval"}
# Failing append to an existing dict
with patch.dict(grainsmod.__grains__, {"b": {"b1": "bval1"}}):
res = grainsmod.append("b", "d")
assert res == "The key b is not a valid list"
assert grainsmod.__grains__ == {"b": {"b1": "bval1"}}
def test_append_already_in_list():
with patch.dict(grainsmod.__grains__, {"a_list": ["a", "b", "c"], "b": "bval"}):
res = grainsmod.append("a_list", "b")
assert res == "The val b was already in the list a_list"
assert grainsmod.__grains__ == {"a_list": ["a", "b", "c"], "b": "bval"}
def test_append_ok():
with patch.dict(grainsmod.__grains__, {"a_list": ["a", "b", "c"], "b": "bval"}):
res = grainsmod.append("a_list", "d")
assert res == {"a_list": ["a", "b", "c", "d"]}
assert grainsmod.__grains__ == {"a_list": ["a", "b", "c", "d"], "b": "bval"}
with patch.dict(grainsmod.__grains__, {"b": "bval"}):
res = grainsmod.append("a_list", "d")
assert res == {"a_list": ["d"]}
assert grainsmod.__grains__ == {"a_list": ["d"], "b": "bval"}
with patch.dict(grainsmod.__grains__, {"b": "bval"}):
res = grainsmod.append("b", "d", convert=True)
assert res == {"b": ["bval", "d"]}
assert grainsmod.__grains__ == {"b": ["bval", "d"]}
with patch.dict(grainsmod.__grains__, {"b": {"b1": "bval1"}}):
res = grainsmod.append("b", "d", convert=True)
assert res == {"b": [{"b1": "bval1"}, "d"]}
assert grainsmod.__grains__ == {"b": [{"b1": "bval1"}, "d"]}
def test_append_nested_not_a_list():
with patch.dict(grainsmod.__grains__, {"a": {"b": "bval"}}):
res = grainsmod.append("a:b", "d")
assert res == "The key a:b is not a valid list"
assert grainsmod.__grains__ == {"a": {"b": "bval"}}
with patch.dict(grainsmod.__grains__, {"a": {"b": {"b1": "bval1"}}}):
res = grainsmod.append("a:b", "d")
assert res == "The key a:b is not a valid list"
assert grainsmod.__grains__ == {"a": {"b": {"b1": "bval1"}}}
def test_append_nested_already_in_list():
with patch.dict(
grainsmod.__grains__, {"a": {"a_list": ["a", "b", "c"], "b": "bval"}}
):
res = grainsmod.append("a:a_list", "b")
assert res == "The val b was already in the list a:a_list"
assert grainsmod.__grains__ == {"a": {"a_list": ["a", "b", "c"], "b": "bval"}}
def test_append_nested_ok():
with patch.dict(
grainsmod.__grains__, {"a": {"a_list": ["a", "b", "c"], "b": "bval"}}
):
res = grainsmod.append("a:a_list", "d")
assert res == {"a": {"a_list": ["a", "b", "c", "d"], "b": "bval"}}
assert grainsmod.__grains__ == {
"a": {"a_list": ["a", "b", "c", "d"], "b": "bval"}
}
with patch.dict(grainsmod.__grains__, {"a": {"b": "bval"}}):
res = grainsmod.append("a:a_list", "d")
assert res == {"a": {"a_list": ["d"], "b": "bval"}}
assert grainsmod.__grains__ == {"a": {"a_list": ["d"], "b": "bval"}}
with patch.dict(grainsmod.__grains__, {"a": {"b": "bval"}}):
res = grainsmod.append("a:b", "d", convert=True)
assert res == {"a": {"b": ["bval", "d"]}}
assert grainsmod.__grains__ == {"a": {"b": ["bval", "d"]}}
with patch.dict(grainsmod.__grains__, {"a": {"b": {"b1": "bval1"}}}):
res = grainsmod.append("a:b", "d", convert=True)
assert res == {"a": {"b": [{"b1": "bval1"}, "d"]}}
assert grainsmod.__grains__ == {"a": {"b": [{"b1": "bval1"}, "d"]}}
def test_append_to_an_element_of_a_list():
with patch.dict(grainsmod.__grains__, {"a": ["b", "c"]}):
res = grainsmod.append("a:b", "d")
assert res == {"a": ["b", "c"]}
assert grainsmod.__grains__ == {"a": ["b", "c"]}
def test_set_value_already_set():
with patch.dict(grainsmod.__grains__, {"a": 12, "c": 8}):
res = grainsmod.set("a", 12)
assert res["result"]
assert res["comment"] == "Grain is already set"
assert grainsmod.__grains__ == {"a": 12, "c": 8}
with patch.dict(grainsmod.__grains__, {"a": ["item", 12], "c": 8}):
res = grainsmod.set("a", ["item", 12])
assert res["result"]
assert res["comment"] == "Grain is already set"
assert grainsmod.__grains__ == {"a": ["item", 12], "c": 8}
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": {"nested": "val"}, "c": 8}
):
res = grainsmod.set("b,nested", "val", delimiter=",")
assert res["result"]
assert res["comment"] == "Grain is already set"
assert grainsmod.__grains__ == {"a": "aval", "b": {"nested": "val"}, "c": 8}
def test_set_fail_replacing_existing_complex_key():
with patch.dict(grainsmod.__grains__, {"a": "aval", "c": 8}):
res = grainsmod.set("a", ["item", 12])
assert not res["result"]
assert (
res["comment"] == "The key 'a' exists and the given value is a "
"dict or a list. Use 'force=True' to overwrite."
)
assert grainsmod.__grains__ == {"a": "aval", "c": 8}
with patch.dict(grainsmod.__grains__, {"a": ["item", 12], "c": 8}):
res = grainsmod.set("a", ["item", 14])
assert not res["result"]
assert (
res["comment"] == "The key 'a' exists but is a dict or a list. "
"Use 'force=True' to overwrite."
)
assert grainsmod.__grains__ == {"a": ["item", 12], "c": 8}
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": ["l1", {"l2": ["val1"]}], "c": 8}
):
res = grainsmod.set("b,l2", "val2", delimiter=",")
assert not res["result"]
assert (
res["comment"] == "The key 'b,l2' exists but is a dict or a "
"list. Use 'force=True' to overwrite."
)
assert grainsmod.__grains__ == {
"a": "aval",
"b": ["l1", {"l2": ["val1"]}],
"c": 8,
}
def test_set_nested_fails_replace_simple_value():
with patch.dict(grainsmod.__grains__, {"a": "aval", "b": "l1", "c": 8}):
res = grainsmod.set("b,l3", "val3", delimiter=",")
assert not res["result"]
assert (
res["comment"] == "The key 'b' value is 'l1', which is different from "
"the provided key 'l3'. Use 'force=True' to overwrite."
)
assert grainsmod.__grains__ == {"a": "aval", "b": "l1", "c": 8}
def test_set_simple_value():
with patch.dict(grainsmod.__grains__, {"a": ["b", "c"], "c": 8}):
res = grainsmod.set("b", "bval")
assert res["result"]
assert res["changes"] == {"b": "bval"}
assert grainsmod.__grains__ == {"a": ["b", "c"], "b": "bval", "c": 8}
def test_set_replace_value():
with patch.dict(grainsmod.__grains__, {"a": "aval", "c": 8}):
res = grainsmod.set("a", 12)
assert res["result"]
assert res["changes"] == {"a": 12}
assert grainsmod.__grains__ == {"a": 12, "c": 8}
def test_set_None_ok():
with patch.dict(grainsmod.__grains__, {"a": "aval", "c": 8}):
res = grainsmod.set("b", None)
assert res["result"]
assert res["changes"] == {"b": None}
assert grainsmod.__grains__ == {"a": "aval", "b": None, "c": 8}
def test_set_None_ok_destructive():
with patch.dict(grainsmod.__grains__, {"a": "aval", "c": 8}):
res = grainsmod.set("b", None, destructive=True)
assert res["result"]
assert res["changes"] == {"b": None}
assert grainsmod.__grains__ == {"a": "aval", "c": 8}
def test_set_None_replace_ok():
with patch.dict(grainsmod.__grains__, {"a": "aval", "c": 8}):
res = grainsmod.set("a", None)
assert res["result"]
assert res["changes"] == {"a": None}
assert grainsmod.__grains__ == {"a": None, "c": 8}
def test_set_None_force_destructive():
with patch.dict(grainsmod.__grains__, {"a": "aval", "c": 8}):
res = grainsmod.set("a", None, force=True, destructive=True)
assert res["result"]
assert res["changes"] == {"a": None}
assert grainsmod.__grains__ == {"c": 8}
def test_set_replace_value_was_complex_force():
with patch.dict(grainsmod.__grains__, {"a": ["item", 12], "c": 8}):
res = grainsmod.set("a", "aval", force=True)
assert res["result"]
assert res["changes"] == {"a": "aval"}
assert grainsmod.__grains__ == {"a": "aval", "c": 8}
def test_set_complex_value_force():
with patch.dict(grainsmod.__grains__, {"a": "aval", "c": 8}):
res = grainsmod.set("a", ["item", 12], force=True)
assert res["result"]
assert res["changes"] == {"a": ["item", 12]}
assert grainsmod.__grains__ == {"a": ["item", 12], "c": 8}
def test_set_nested_create():
with patch.dict(grainsmod.__grains__, {"a": "aval", "c": 8}):
res = grainsmod.set("b,nested", "val", delimiter=",")
assert res["result"]
assert res["changes"] == {"b": {"nested": "val"}}
assert grainsmod.__grains__ == {"a": "aval", "b": {"nested": "val"}, "c": 8}
def test_set_nested_update_dict():
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": {"nested": "val"}, "c": 8}
):
res = grainsmod.set("b,nested", "val2", delimiter=",")
assert res["result"]
assert res["changes"] == {"b": {"nested": "val2"}}
assert grainsmod.__grains__ == {"a": "aval", "b": {"nested": "val2"}, "c": 8}
def test_set_nested_update_dict_remove_key():
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": {"nested": "val"}, "c": 8}
):
res = grainsmod.set("b,nested", None, delimiter=",", destructive=True)
assert res["result"]
assert res["changes"] == {"b": {}}
assert grainsmod.__grains__ == {"a": "aval", "b": {}, "c": 8}
def test_set_nested_update_dict_new_key():
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": {"nested": "val"}, "c": 8}
):
res = grainsmod.set("b,b2", "val2", delimiter=",")
assert res["result"]
assert res["changes"] == {"b": {"b2": "val2", "nested": "val"}}
assert grainsmod.__grains__ == {
"a": "aval",
"b": {"b2": "val2", "nested": "val"},
"c": 8,
}
def test_set_nested_list_replace_key():
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": ["l1", "l2", "l3"], "c": 8}
):
res = grainsmod.set("b,l2", "val2", delimiter=",")
assert res["result"]
assert res["changes"] == {"b": ["l1", {"l2": "val2"}, "l3"]}
assert grainsmod.__grains__ == {
"a": "aval",
"b": ["l1", {"l2": "val2"}, "l3"],
"c": 8,
}
def test_set_nested_list_update_dict_key():
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": ["l1", {"l2": "val1"}], "c": 8}
):
res = grainsmod.set("b,l2", "val2", delimiter=",")
assert res["result"]
assert res["changes"] == {"b": ["l1", {"l2": "val2"}]}
assert grainsmod.__grains__ == {
"a": "aval",
"b": ["l1", {"l2": "val2"}],
"c": 8,
}
def test_set_nested_list_update_dict_key_overwrite():
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": ["l1", {"l2": ["val1"]}], "c": 8}
):
res = grainsmod.set("b,l2", "val2", delimiter=",", force=True)
assert res["result"]
assert res["changes"] == {"b": ["l1", {"l2": "val2"}]}
assert grainsmod.__grains__ == {
"a": "aval",
"b": ["l1", {"l2": "val2"}],
"c": 8,
}
def test_set_nested_list_append_dict_key():
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": ["l1", {"l2": "val2"}], "c": 8}
):
res = grainsmod.set("b,l3", "val3", delimiter=",")
assert res["result"]
assert res["changes"] == {"b": ["l1", {"l2": "val2"}, {"l3": "val3"}]}
assert grainsmod.__grains__ == {
"a": "aval",
"b": ["l1", {"l2": "val2"}, {"l3": "val3"}],
"c": 8,
}
def test_set_nested_existing_value_is_the_key():
with patch.dict(grainsmod.__grains__, {"a": "aval", "b": "l3", "c": 8}):
res = grainsmod.set("b,l3", "val3", delimiter=",")
assert res["result"]
assert res["changes"] == {"b": {"l3": "val3"}}
assert grainsmod.__grains__ == {"a": "aval", "b": {"l3": "val3"}, "c": 8}
def test_set_nested_existing_value_overwrite():
with patch.dict(grainsmod.__grains__, {"a": "aval", "b": "l1", "c": 8}):
res = grainsmod.set("b,l3", "val3", delimiter=",", force=True)
assert res["result"]
assert res["changes"] == {"b": {"l3": "val3"}}
assert grainsmod.__grains__ == {"a": "aval", "b": {"l3": "val3"}, "c": 8}
def test_set_deeply_nested_update():
with patch.dict(
grainsmod.__grains__,
{"a": "aval", "b": {"l1": ["l21", "l22", {"l23": "l23val"}]}, "c": 8},
):
res = grainsmod.set("b,l1,l23", "val", delimiter=",")
assert res["result"]
assert res["changes"] == {"b": {"l1": ["l21", "l22", {"l23": "val"}]}}
assert grainsmod.__grains__ == {
"a": "aval",
"b": {"l1": ["l21", "l22", {"l23": "val"}]},
"c": 8,
}
def test_set_deeply_nested_create():
with patch.dict(
grainsmod.__grains__,
{"a": "aval", "b": {"l1": ["l21", "l22", {"l23": "l23val"}]}, "c": 8},
):
res = grainsmod.set("b,l1,l24,l241", "val", delimiter=",")
assert res["result"]
assert res["changes"] == {
"b": {
"l1": [
"l21",
"l22",
{"l23": "l23val"},
{"l24": {"l241": "val"}},
]
}
}
assert grainsmod.__grains__ == {
"a": "aval",
"b": {
"l1": [
"l21",
"l22",
{"l23": "l23val"},
{"l24": {"l241": "val"}},
]
},
"c": 8,
}
def test_get_ordered():
with patch.dict(
grainsmod.__grains__,
OrderedDict(
[
("a", "aval"),
(
"b",
OrderedDict(
[
("z", "zval"),
(
"l1",
["l21", "l22", OrderedDict([("l23", "l23val")])],
),
]
),
),
("c", 8),
]
),
):
res = grainsmod.get("b")
assert type(res) == OrderedDict
# Check that order really matters
assert res == OrderedDict(
[
("z", "zval"),
("l1", ["l21", "l22", OrderedDict([("l23", "l23val")])]),
]
)
assert not res == OrderedDict(
[
("l1", ["l21", "l22", OrderedDict([("l23", "l23val")])]),
("z", "zval"),
]
)
def test_get_unordered():
with patch.dict(
grainsmod.__grains__,
OrderedDict(
[
("a", "aval"),
(
"b",
OrderedDict(
[
("z", "zval"),
(
"l1",
["l21", "l22", OrderedDict([("l23", "l23val")])],
),
]
),
),
("c", 8),
]
),
):
res = grainsmod.get("b", ordered=False)
assert type(res) == dict
# Check that order doesn't matter
assert res == OrderedDict(
[
("l1", ["l21", "l22", OrderedDict([("l23", "l23val")])]),
("z", "zval"),
]
)
def test_equals():
with patch.dict(
grainsmod.__grains__,
OrderedDict(
[
("a", "aval"),
(
"b",
OrderedDict(
[
("z", "zval"),
(
"l1",
["l21", "l22", OrderedDict([("l23", "l23val")])],
),
]
),
),
("c", 8),
]
),
):
res = grainsmod.equals("a", "aval")
assert type(res) == bool
assert res
res = grainsmod.equals("b:z", "zval")
assert res
res = grainsmod.equals("b:z", "aval")
assert not res
def test_grains_setval_refresh_pillar():
"""
Test that refresh_pillar kwarg is being passed correctly from grains.setval to saltutil.refresh_grains
"""
ret = grainsmod.setval("test_grains_setval_refresh_pillar", "saltopotamus")
grainsmod.__salt__["saltutil.refresh_grains"].assert_called_with(
refresh_pillar=True
)
ret = grainsmod.setval(
"test_grains_setval_refresh_pillar", "saltopotamus", refresh_pillar=True
)
grainsmod.__salt__["saltutil.refresh_grains"].assert_called_with(
refresh_pillar=True
)
ret = grainsmod.setval(
"test_grains_setval_refresh_pillar", "saltopotamus", refresh_pillar=False
)
grainsmod.__salt__["saltutil.refresh_grains"].assert_called_with(
refresh_pillar=False
)
def test_setval_unicode():
key = "塩" # salt
value = "塩人生です" # salt is life
# Note: call setvals 2 times is important
# 1: add key to conf grains
# 2: update and read key from conf grains
for _ in range(2):
ret = grainsmod.setvals({key: value})
assert key in ret
assert ret[key] == value
def test_delval_single():
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": {"nested": "val"}, "c": 8}
):
res = grainsmod.delval("a")
assert res["result"]
assert res["changes"] == {"a": None}
assert grainsmod.__grains__ == {"a": None, "b": {"nested": "val"}, "c": 8}
def test_delval_nested():
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": {"nested": "val"}, "c": 8}
):
res = grainsmod.delval("b:nested")
assert res["result"]
assert res["changes"] == {"b": {"nested": None}}
assert grainsmod.__grains__ == {"a": "aval", "b": {"nested": None}, "c": 8}
def test_delkey_single_key():
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": {"nested": "val"}, "c": 8}
):
res = grainsmod.delkey("a")
assert res["result"]
assert res["changes"] == {"a": None}
assert grainsmod.__grains__ == {"b": {"nested": "val"}, "c": 8}
def test_delkey_nested_key():
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": {"nested": "val"}, "c": 8}
):
res = grainsmod.delkey("b:nested")
assert res["result"]
assert res["changes"] == {"b": {}}
assert grainsmod.__grains__ == {"a": "aval", "b": {}, "c": 8}
def test_delkey_nested_key_force_needed():
with patch.dict(
grainsmod.__grains__, {"a": "aval", "b": {"nested": "val"}, "c": 8}
):
res = grainsmod.delkey("b", force=True)
assert res["comment"].find("Use 'force=True' to overwrite.") == -1
assert res["result"]
assert res["changes"] == {"b": None}
assert grainsmod.__grains__ == {"a": "aval", "c": 8}
| saltstack/salt | tests/pytests/unit/modules/test_grains.py | Python | apache-2.0 | 26,855 |
"""All forms for the extension."""
from django.forms import ModelForm
from .models import WebResource
class WebResourceForm(ModelForm):
"""Form for a single web resource."""
class Meta:
"""Form meta."""
model = WebResource
fields = ('name', 'description', 'url', 'colour', 'symbol')
| ExCiteS/geokey-webresources | geokey_webresources/forms.py | Python | mit | 321 |
import __init__ as markdown
import re
def isString(s):
""" Check if it's string """
return isinstance(s, unicode) or isinstance(s, str)
class Processor:
def __init__(self, markdown_instance=None):
if markdown_instance:
self.markdown = markdown_instance
class Treeprocessor(Processor):
"""
Treeprocessors are run on the ElementTree object before serialization.
Each Treeprocessor implements a "run" method that takes a pointer to an
ElementTree, modifies it as necessary and returns an ElementTree
object.
Treeprocessors must extend markdown.Treeprocessor.
"""
def run(self, root):
"""
Subclasses of Treeprocessor should implement a `run` method, which
takes a root ElementTree. This method can return another ElementTree
object, and the existing root ElementTree will be replaced, or it can
modify the current tree and return None.
"""
pass
class InlineProcessor(Treeprocessor):
"""
A Treeprocessor that traverses a tree, applying inline patterns.
"""
def __init__ (self, md):
self.__placeholder_prefix = markdown.INLINE_PLACEHOLDER_PREFIX
self.__placeholder_suffix = markdown.ETX
self.__placeholder_length = 4 + len(self.__placeholder_prefix) \
+ len(self.__placeholder_suffix)
self.__placeholder_re = re.compile(markdown.INLINE_PLACEHOLDER % r'([0-9]{4})')
self.markdown = md
def __makePlaceholder(self, type):
""" Generate a placeholder """
id = "%04d" % len(self.stashed_nodes)
hash = markdown.INLINE_PLACEHOLDER % id
return hash, id
def __findPlaceholder(self, data, index):
"""
Extract id from data string, start from index
Keyword arguments:
* data: string
* index: index, from which we start search
Returns: placeholder id and string index, after the found placeholder.
"""
m = self.__placeholder_re.search(data, index)
if m:
return m.group(1), m.end()
else:
return None, index + 1
def __stashNode(self, node, type):
""" Add node to stash """
placeholder, id = self.__makePlaceholder(type)
self.stashed_nodes[id] = node
return placeholder
def __handleInline(self, data, patternIndex=0):
"""
Process string with inline patterns and replace it
with placeholders
Keyword arguments:
* data: A line of Markdown text
* patternIndex: The index of the inlinePattern to start with
Returns: String with placeholders.
"""
if not isinstance(data, markdown.AtomicString):
startIndex = 0
while patternIndex < len(self.markdown.inlinePatterns):
data, matched, startIndex = self.__applyPattern(
self.markdown.inlinePatterns.value_for_index(patternIndex),
data, patternIndex, startIndex)
if not matched:
patternIndex += 1
return data
def __processElementText(self, node, subnode, isText=True):
"""
Process placeholders in Element.text or Element.tail
of Elements popped from self.stashed_nodes.
Keywords arguments:
* node: parent node
* subnode: processing node
* isText: bool variable, True - it's text, False - it's tail
Returns: None
"""
if isText:
text = subnode.text
subnode.text = None
else:
text = subnode.tail
subnode.tail = None
childResult = self.__processPlaceholders(text, subnode)
if not isText and node is not subnode:
pos = node.getchildren().index(subnode)
node.remove(subnode)
else:
pos = 0
childResult.reverse()
for newChild in childResult:
node.insert(pos, newChild)
def __processPlaceholders(self, data, parent):
"""
Process string with placeholders and generate ElementTree tree.
Keyword arguments:
* data: string with placeholders instead of ElementTree elements.
* parent: Element, which contains processing inline data
Returns: list with ElementTree elements with applied inline patterns.
"""
def linkText(text):
if text:
if result:
if result[-1].tail:
result[-1].tail += text
else:
result[-1].tail = text
else:
if parent.text:
parent.text += text
else:
parent.text = text
result = []
strartIndex = 0
while data:
index = data.find(self.__placeholder_prefix, strartIndex)
if index != -1:
id, phEndIndex = self.__findPlaceholder(data, index)
if id in self.stashed_nodes:
node = self.stashed_nodes.get(id)
if index > 0:
text = data[strartIndex:index]
linkText(text)
if not isString(node): # it's Element
for child in [node] + node.getchildren():
if child.tail:
if child.tail.strip():
self.__processElementText(node, child, False)
if child.text:
if child.text.strip():
self.__processElementText(child, child)
else: # it's just a string
linkText(node)
strartIndex = phEndIndex
continue
strartIndex = phEndIndex
result.append(node)
else: # wrong placeholder
end = index + len(prefix)
linkText(data[strartIndex:end])
strartIndex = end
else:
text = data[strartIndex:]
linkText(text)
data = ""
return result
def __applyPattern(self, pattern, data, patternIndex, startIndex=0):
"""
Check if the line fits the pattern, create the necessary
elements, add it to stashed_nodes.
Keyword arguments:
* data: the text to be processed
* pattern: the pattern to be checked
* patternIndex: index of current pattern
* startIndex: string index, from which we starting search
Returns: String with placeholders instead of ElementTree elements.
"""
match = pattern.getCompiledRegExp().match(data[startIndex:])
leftData = data[:startIndex]
if not match:
return data, False, 0
node = pattern.handleMatch(match)
if node is None:
return data, True, len(leftData) + match.span(len(match.groups()))[0]
if not isString(node):
if not isinstance(node.text, markdown.AtomicString):
# We need to process current node too
for child in [node] + node.getchildren():
if not isString(node):
if child.text:
child.text = self.__handleInline(child.text,
patternIndex + 1)
if child.tail:
child.tail = self.__handleInline(child.tail,
patternIndex)
placeholder = self.__stashNode(node, pattern.type())
return "%s%s%s%s" % (leftData,
match.group(1),
placeholder, match.groups()[-1]), True, 0
def run(self, tree):
"""Apply inline patterns to a parsed Markdown tree.
Iterate over ElementTree, find elements with inline tag, apply inline
patterns and append newly created Elements to tree. If you don't
want process your data with inline paterns, instead of normal string,
use subclass AtomicString:
node.text = markdown.AtomicString("data won't be processed with inline patterns")
Arguments:
* markdownTree: ElementTree object, representing Markdown tree.
Returns: ElementTree object with applied inline patterns.
"""
self.stashed_nodes = {}
stack = [tree]
while stack:
currElement = stack.pop()
insertQueue = []
for child in currElement.getchildren():
if child.text and not isinstance(child.text, markdown.AtomicString):
text = child.text
child.text = None
lst = self.__processPlaceholders(self.__handleInline(
text), child)
stack += lst
insertQueue.append((child, lst))
if child.getchildren():
stack.append(child)
for element, lst in insertQueue:
if element.text:
element.text = \
markdown.inlinepatterns.handleAttributes(element.text,
element)
i = 0
for newChild in lst:
# Processing attributes
if newChild.tail:
newChild.tail = \
markdown.inlinepatterns.handleAttributes(newChild.tail,
element)
if newChild.text:
newChild.text = \
markdown.inlinepatterns.handleAttributes(newChild.text,
newChild)
element.insert(i, newChild)
i += 1
return tree
class PrettifyTreeprocessor(Treeprocessor):
""" Add linebreaks to the html document. """
def _prettifyETree(self, elem):
""" Recursively add linebreaks to ElementTree children. """
i = "\n"
if markdown.isBlockLevel(elem.tag) and elem.tag not in ['code', 'pre']:
if (not elem.text or not elem.text.strip()) \
and len(elem) and markdown.isBlockLevel(elem[0].tag):
elem.text = i
for e in elem:
if markdown.isBlockLevel(e.tag):
self._prettifyETree(e)
if not elem.tail or not elem.tail.strip():
elem.tail = i
if not elem.tail or not elem.tail.strip():
elem.tail = i
def run(self, root):
""" Add linebreaks to ElementTree root object. """
self._prettifyETree(root)
# Do <br />'s seperately as they are often in the middle of
# inline content and missed by _prettifyETree.
brs = root.getiterator('br')
for br in brs:
if not br.tail or not br.tail.strip():
br.tail = '\n'
else:
br.tail = '\n%s' % br.tail
| natestedman/Observatory | observatory/lib/markdown/treeprocessors.py | Python | isc | 11,444 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._firewall_rules_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class FirewallRulesOperations:
"""FirewallRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.redis.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
resource_group_name: str,
cache_name: str,
**kwargs: Any
) -> AsyncIterable["_models.RedisFirewallRuleListResult"]:
"""Gets all firewall rules in the specified redis cache.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cache_name: The name of the Redis cache.
:type cache_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RedisFirewallRuleListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.redis.models.RedisFirewallRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RedisFirewallRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
cache_name=cache_name,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
cache_name=cache_name,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("RedisFirewallRuleListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/redis/{cacheName}/firewallRules'} # type: ignore
@distributed_trace_async
async def create_or_update(
self,
resource_group_name: str,
cache_name: str,
rule_name: str,
parameters: "_models.RedisFirewallRule",
**kwargs: Any
) -> "_models.RedisFirewallRule":
"""Create or update a redis cache firewall rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cache_name: The name of the Redis cache.
:type cache_name: str
:param rule_name: The name of the firewall rule.
:type rule_name: str
:param parameters: Parameters supplied to the create or update redis firewall rule operation.
:type parameters: ~azure.mgmt.redis.models.RedisFirewallRule
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RedisFirewallRule, or the result of cls(response)
:rtype: ~azure.mgmt.redis.models.RedisFirewallRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RedisFirewallRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'RedisFirewallRule')
request = build_create_or_update_request(
resource_group_name=resource_group_name,
cache_name=cache_name,
rule_name=rule_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create_or_update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('RedisFirewallRule', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('RedisFirewallRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/redis/{cacheName}/firewallRules/{ruleName}'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
cache_name: str,
rule_name: str,
**kwargs: Any
) -> "_models.RedisFirewallRule":
"""Gets a single firewall rule in a specified redis cache.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cache_name: The name of the Redis cache.
:type cache_name: str
:param rule_name: The name of the firewall rule.
:type rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RedisFirewallRule, or the result of cls(response)
:rtype: ~azure.mgmt.redis.models.RedisFirewallRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RedisFirewallRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
cache_name=cache_name,
rule_name=rule_name,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('RedisFirewallRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/redis/{cacheName}/firewallRules/{ruleName}'} # type: ignore
@distributed_trace_async
async def delete(
self,
resource_group_name: str,
cache_name: str,
rule_name: str,
**kwargs: Any
) -> None:
"""Deletes a single firewall rule in a specified redis cache.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cache_name: The name of the Redis cache.
:type cache_name: str
:param rule_name: The name of the firewall rule.
:type rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
resource_group_name=resource_group_name,
cache_name=cache_name,
rule_name=rule_name,
subscription_id=self._config.subscription_id,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/redis/{cacheName}/firewallRules/{ruleName}'} # type: ignore
| Azure/azure-sdk-for-python | sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_firewall_rules_operations.py | Python | mit | 13,189 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.