repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
vxgmichel/aioconsole
|
example/cli.py
|
Python
|
gpl-3.0
| 2,320
| 0.000862
|
"""Command line interface for echo server."""
import fnmatch
import asyncio
import argparse
from aioconsole import AsynchronousCli, start_interactive_server
from aioconsole.server import parse_server, print_server
from . import echo
async def get_history(reader, writer, pattern=None):
history = asyncio.get_event_loop().history
if not history:
return "No message in the history"
if pattern:
history = {host: history[host] for host in fnmatch.filter(history, pattern)}
if not history:
return "No host match the given pattern"
for host in history:
writer.write(f"Host {host}:\n".encode())
for i, message in enumerate(history[host]):
writer.write(f" {i}. {message}\n".encode())
def make_cli(streams=None):
parser = argparse.ArgumentParser(description="Display the message history")
parser.add_argument("--pattern", "-p", type=str, help="pattern to filter hostnames")
commands = {"history": (get_history, parser)}
return AsynchronousCli(commands, streams, prog="echo")
def parse_args(args=None):
parser = argparse.ArgumentParser(
description="Run the echo server and a command line interface."
)
parser.add_argument(
"server",
metavar="[HOST:]PORT",
type=str,
help="interface for the echo server, default host is localhost",
)
parser.add_argument(
"--serve-cli",
metavar="[HOST:]PORT",
type=str,
help="serve the command line interface on the given host+port "
"instead of using the standard streams",
)
namespace = parse
|
r.parse_args(args)
host, port = parse_server(namespace.server, parser)
if namespace.serve_cli is not None:
serve_cli = parse_server(namespace.serve_cli, parser)
else:
serve_cli = None
return host, port, serve_cli
def main(args=None):
host, port, serve_cli = parse_args(args)
if serve_cli:
cli_host, cli_port = serve_cli
coro = start_interactive_server(make_cli, cli_host, cli_port)
server = asyncio.get
|
_event_loop().run_until_complete(coro)
print_server(server, "command line interface")
else:
asyncio.ensure_future(make_cli().interact())
return echo.run(host, port)
if __name__ == "__main__":
main()
|
psi4/psi4
|
psi4/driver/molutil.py
|
Python
|
lgpl-3.0
| 9,885
| 0.004856
|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2022 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRA
|
NTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""Module with utility functions that act on molecule objects."""
from typing import Dict, Tuple, Union
|
import numpy as np
import qcelemental as qcel
from psi4 import core
from psi4.driver.p4util import temp_circular_import_blocker
from psi4.driver import qcdb
from psi4.driver.p4util.exceptions import *
def molecule_set_attr(self, name, value):
"""Function to redefine __setattr__ method of molecule class."""
fxn = object.__getattribute__(self, "is_variable")
isvar = fxn(name)
if isvar:
fxn = object.__getattribute__(self, "set_variable")
fxn(name, value)
return
object.__setattr__(self, name, value)
def molecule_get_attr(self, name):
"""Function to redefine __getattr__ method of molecule class."""
fxn = object.__getattribute__(self, "is_variable")
isvar = fxn(name)
if isvar:
fxn = object.__getattribute__(self, "get_variable")
return fxn(name)
return object.__getattribute__(self, name)
@classmethod
def _molecule_from_string(cls,
molstr,
dtype=None,
name=None,
fix_com=None,
fix_orientation=None,
fix_symmetry=None,
return_dict=False,
enable_qm=True,
enable_efp=True,
missing_enabled_return_qm='none',
missing_enabled_return_efp='none',
verbose=1):
molrec = qcel.molparse.from_string(
molstr=molstr,
dtype=dtype,
name=name,
fix_com=fix_com,
fix_orientation=fix_orientation,
fix_symmetry=fix_symmetry,
return_processed=False,
enable_qm=enable_qm,
enable_efp=enable_efp,
missing_enabled_return_qm=missing_enabled_return_qm,
missing_enabled_return_efp=missing_enabled_return_efp,
verbose=verbose)
if return_dict:
return core.Molecule.from_dict(molrec['qm']), molrec
else:
return core.Molecule.from_dict(molrec['qm'])
@classmethod
def _molecule_from_arrays(cls,
geom=None,
elea=None,
elez=None,
elem=None,
mass=None,
real=None,
elbl=None,
name=None,
units='Angstrom',
input_units_to_au=None,
fix_com=None,
fix_orientation=None,
fix_symmetry=None,
fragment_separators=None,
fragment_charges=None,
fragment_multiplicities=None,
molecular_charge=None,
molecular_multiplicity=None,
comment=None,
provenance=None,
connectivity=None,
missing_enabled_return='error',
tooclose=0.1,
zero_ghost_fragments=False,
nonphysical=False,
mtol=1.e-3,
verbose=1,
return_dict=False):
"""Construct Molecule from unvalidated arrays and variables.
Light wrapper around :py:func:`~qcelemental.molparse.from_arrays`
that is a full-featured constructor to dictionary representa-
tion of Molecule. This follows one step further to return
Molecule instance.
Parameters
----------
See :py:func:`~qcelemental.molparse.from_arrays`.
Returns
-------
:py:class:`psi4.core.Molecule`
"""
molrec = qcel.molparse.from_arrays(
geom=geom,
elea=elea,
elez=elez,
elem=elem,
mass=mass,
real=real,
elbl=elbl,
name=name,
units=units,
input_units_to_au=input_units_to_au,
fix_com=fix_com,
fix_orientation=fix_orientation,
fix_symmetry=fix_symmetry,
fragment_separators=fragment_separators,
fragment_charges=fragment_charges,
fragment_multiplicities=fragment_multiplicities,
molecular_charge=molecular_charge,
molecular_multiplicity=molecular_multiplicity,
comment=comment,
provenance=provenance,
connectivity=connectivity,
domain='qm',
missing_enabled_return=missing_enabled_return,
tooclose=tooclose,
zero_ghost_fragments=zero_ghost_fragments,
nonphysical=nonphysical,
mtol=mtol,
verbose=verbose)
if return_dict:
return core.Molecule.from_dict(molrec), molrec
else:
return core.Molecule.from_dict(molrec)
@classmethod
def _molecule_from_schema(cls, molschema: Dict, return_dict: bool = False, nonphysical: bool = False, verbose: int = 1) -> Union[core.Molecule, Tuple[core.Molecule, Dict]]:
"""Construct Molecule from non-Psi4 schema.
Light wrapper around :py:func:`~psi4.core.Molecule.from_arrays`.
Parameters
----------
molschema
Dictionary form of Molecule following known schema.
return_dict
Additionally return Molecule dictionary intermediate.
nonphysical
Do allow masses outside an element's natural range to pass validation?
verbose
Amount of printing.
Returns
-------
mol : :py:class:`psi4.core.Molecule`
molrec : dict
Dictionary representation of instance.
Only provided if `return_dict` is True.
"""
molrec = qcel.molparse.from_schema(molschema, nonphysical=nonphysical, verbose=verbose)
qmol = core.Molecule.from_dict(molrec)
geom = np.array(molrec["geom"]).reshape((-1, 3))
qmol._initial_cartesian = core.Matrix.from_array(geom)
if return_dict:
return qmol, molrec
else:
return qmol
def dynamic_variable_bind(cls):
"""Function to dynamically add extra members to
the core.Molecule class.
"""
cls.__setattr__ = molecule_set_attr
cls.__getattr__ = molecule_get_attr
cls.to_arrays = qcdb.Molecule.to_arrays
cls.to_dict = qcdb.Molecule.to_dict
cls.BFS = qcdb.Molecule.BFS
cls.B787 = qcdb.Molecule.B787
cls.scramble = qcdb.Molecule.scramble
cls.from_arrays = _molecule_from_arrays
cls.from_string = _molecule_from_string
cls.to_string = qcdb.Molecule.to_string
cls.from_schema = _molecule_from_schema
cls.to_schema = qcdb.Molecule.to_schema
cls.run_dftd3 = qcdb.Molecule.run_dftd3
cls.run_dftd4 = qcdb.Molecule.run_dftd4
cls.run_gcp = qcdb.Molecule.run_gcp
cls.format_molecule_for_mol = qcdb.Molecule.format_molecule_for_mol
dynamic_variable_bind(core.Molecule) # pass class type, not class instance
#
# Define geometry to be used by PSI4.
# The molecule created by this will be set in options.
#
# geometry("
# O 1.0 0.0 0.0
# H 0.0 1.0 0.0
# H 0.0 0.0 0.0
#
def geometry(geom, name="default"):
"""Function to create a molecule object of name *name* from the
geometry in string *geom*. Permitted for user use but deprecated
|
owenson/ardupilot-sdk-python
|
pymavlink/rotmat.py
|
Python
|
lgpl-3.0
| 12,026
| 0.004906
|
#!/usr/bin/env python
#
# vector3 and rotation matrix classes
# This follows the conventions in the ArduPilot code,
# and is essentially a python version of the AP_Math library
#
# Andrew Tridgell, March 2012
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2.1 of the License, or (at your
# option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
'''rotation matrix class
'''
from math import sin, cos, sqrt, asin, atan2, pi, radians, acos, degrees
class Vector3:
'''a vector'''
def __init__(self, x=None, y=None, z=None):
if x != None and y != None and z != None:
self.x = float(x)
self.y = float(y)
self.z = float(z)
elif x != None and len(x) == 3:
self.x = float(x[0])
self.y = float(x[1])
self.z = float(x[2])
elif x != None:
raise ValueError('bad initialiser')
else:
self.x = float(0)
self.y = float(0)
self.z = float(0)
def __repr__(self):
return 'Vector3(%.2f, %.2f, %.2f)' % (self.x,
self.y,
self.z)
def __add__(self, v):
return Vector3(self.x + v.x,
self.y + v.y,
self.z + v.z)
__radd__ = __add__
def __sub__(self, v):
return Vector3(self.x - v.x,
self.y - v.y,
self.z - v.z)
def __neg__(self):
return Vector3(-self.x, -self.y, -self.z)
def __rsub__(self, v):
return Vector3(v.x - self.x,
v.y - self.y,
v.z - self.z)
def __mul__(self, v):
if isinstance(v, Vector3):
'''dot product'''
return self.x*v.x + self.y*v.y + self.z*v.z
return Vector3(self.x * v,
self.y * v,
self.z * v)
__rmul__ = __mul__
def __div__(self, v):
return Vector3(self.x / v,
self.y / v,
self.z / v)
def __mod__(self, v):
'''cross product'''
return Vector3(self.y*v.z - self.z*v.y,
self.z*v.x - self.x*v.z,
self.x*v.y - self.y*v.x)
def __copy__(self):
return Vector3(self.x, self.y, self.z)
copy = __copy__
def length(self):
return sqrt(self.x**2 + self.y**2 + self.z**2)
def zero(self):
self.x = self.y = self.z = 0
def angle(self, v):
'''return the angle between this vector and another vector'''
return acos((self * v) / (self.length() * v.length()))
def normalized(self):
return self.__div__(self.length())
def normalize(self):
v = self.normalized()
self.x = v.x
self.y = v.y
self.z = v.z
class Matrix3:
'''a 3x3 matrix, intended as a rotation matrix'''
def __init__(self, a=None, b=None, c=None):
if a is not None and b is not None and c is not None:
self.a = a.copy()
self.b = b.copy()
self.c = c.copy()
else:
self.identity()
def __repr__(self):
return 'Matrix3((%.2f, %.2f, %.2f), (%.2f, %.2f, %.2f), (%.2f, %.2f, %.2f))' % (
self.a.x, self.a.y, self.a.z,
self.b.x, self.b.y, self.b.z,
self.c.x, self.c.y, self.c.z)
def identity(self):
self.a = Vector3(1,0,0
|
)
self.b = Vector3(0,1,0)
self.c = Vector3(0,0,1)
def transposed(self):
|
return Matrix3(Vector3(self.a.x, self.b.x, self.c.x),
Vector3(self.a.y, self.b.y, self.c.y),
Vector3(self.a.z, self.b.z, self.c.z))
def from_euler(self, roll, pitch, yaw):
'''fill the matrix from Euler angles in radians'''
cp = cos(pitch)
sp = sin(pitch)
sr = sin(roll)
cr = cos(roll)
sy = sin(yaw)
cy = cos(yaw)
self.a.x = cp * cy
self.a.y = (sr * sp * cy) - (cr * sy)
self.a.z = (cr * sp * cy) + (sr * sy)
self.b.x = cp * sy
self.b.y = (sr * sp * sy) + (cr * cy)
self.b.z = (cr * sp * sy) - (sr * cy)
self.c.x = -sp
self.c.y = sr * cp
self.c.z = cr * cp
def to_euler(self):
'''find Euler angles for the matrix'''
if self.c.x >= 1.0:
pitch = pi
elif self.c.x <= -1.0:
pitch = -pi
else:
pitch = -asin(self.c.x)
roll = atan2(self.c.y, self.c.z)
yaw = atan2(self.b.x, self.a.x)
return (roll, pitch, yaw)
def __add__(self, m):
return Matrix3(self.a + m.a, self.b + m.b, self.c + m.c)
__radd__ = __add__
def __sub__(self, m):
return Matrix3(self.a - m.a, self.b - m.b, self.c - m.c)
def __rsub__(self, m):
return Matrix3(m.a - self.a, m.b - self.b, m.c - self.c)
def __mul__(self, other):
if isinstance(other, Vector3):
v = other
return Vector3(self.a.x * v.x + self.a.y * v.y + self.a.z * v.z,
self.b.x * v.x + self.b.y * v.y + self.b.z * v.z,
self.c.x * v.x + self.c.y * v.y + self.c.z * v.z)
elif isinstance(other, Matrix3):
m = other
return Matrix3(Vector3(self.a.x * m.a.x + self.a.y * m.b.x + self.a.z * m.c.x,
self.a.x * m.a.y + self.a.y * m.b.y + self.a.z * m.c.y,
self.a.x * m.a.z + self.a.y * m.b.z + self.a.z * m.c.z),
Vector3(self.b.x * m.a.x + self.b.y * m.b.x + self.b.z * m.c.x,
self.b.x * m.a.y + self.b.y * m.b.y + self.b.z * m.c.y,
self.b.x * m.a.z + self.b.y * m.b.z + self.b.z * m.c.z),
Vector3(self.c.x * m.a.x + self.c.y * m.b.x + self.c.z * m.c.x,
self.c.x * m.a.y + self.c.y * m.b.y + self.c.z * m.c.y,
self.c.x * m.a.z + self.c.y * m.b.z + self.c.z * m.c.z))
v = other
return Matrix3(self.a * v, self.b * v, self.c * v)
def __div__(self, v):
return Matrix3(self.a / v, self.b / v, self.c / v)
def __neg__(self):
return Matrix3(-self.a, -self.b, -self.c)
def __copy__(self):
return Matrix3(self.a, self.b, self.c)
copy = __copy__
def rotate(self, g):
'''rotate the matrix by a given amount on 3 axes'''
temp_matrix = Matrix3()
a = self.a
b = self.b
c = self.c
temp_matrix.a.x = a.y * g.z - a.z * g.y
temp_matrix.a.y = a.z * g.x - a.x * g.z
temp_matrix.a.z = a.x * g.y - a.y * g.x
temp_matrix.b.x = b.y * g.z - b.z * g.y
temp_matrix.b.y = b.z * g.x - b.x * g.z
temp_matrix.b.z = b.x * g.y - b.y * g.x
temp_matrix.c.x = c.y * g.z - c.z * g.y
temp_matrix.c.y = c.z * g.x - c.x * g.z
temp_matrix.c.z = c.x * g.y - c.y * g.x
self.a += temp_matrix.a
self.b += temp_matrix.b
self.c += temp_matrix.c
def normalize(self):
'''re-normalise a rotation matrix'''
error = self.a * self.b
t0 = self.a - (self.b * (0.5 * error))
t1 = self.b - (self.a * (0.5 * error))
t2 = t0 % t1
self.a = t0 * (1.0 / t0.length())
self.b = t1 * (1.0 / t1.length())
self.c = t2 * (1.0 / t2.length())
def trace(s
|
jordanemedlock/psychtruths
|
temboo/core/Library/Google/ComputeEngine/Instances/GetInstance.py
|
Python
|
apache-2.0
| 5,317
| 0.005266
|
# -*- coding: utf-8 -*-
###############################################################################
#
# GetInstance
# Retrieves information about the specified Instance.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetInstance(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetInstance Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetInstance, self).__init__(temboo_session, '/Library/Google/ComputeEngine/Instances/GetInstance')
def new_input_set(self):
return GetInstanceInputSet()
def _make_result_set(self, result, path):
return GetInstanceResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetInstanceChoreographyExecution(session, exec_id, path)
class GetInstanceInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetInstance
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((optional, string) A valid access token retrieved during the OAuth process. This is required unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new access token.)
"""
super(GetInstanceInputSet, self)._set_input('AccessToken', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required unless providing a valid AccessToken.)
"""
super(GetInstanceInputSet, self)._set_input('ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for this Choreo. ((conditional, string) The Client Secret provided by Google. Required unless providing a valid AccessToken.)
"""
super(GetInstanceInputSet, self)._set_input('ClientSecret', value)
def set_Fields(self, value):
"""
Set the value of the Fields input for this Choreo. ((optional, string) Comma-seperated list of fields you want to include in the response.)
"""
super(GetInstanceInputSet, self)._set_input('Fields', value)
def set_Instance(self, value):
"""
Set the value of the Instance input for this Choreo. ((required, string) The name of the instance to retrieve.)
|
"""
super(GetInstanceInputSet, self)._set_input('Instance', value)
def set_Project(self, value):
"""
Set the value of the Project input for this Choreo. ((required, string
|
) The ID of a Google Compute project.)
"""
super(GetInstanceInputSet, self)._set_input('Project', value)
def set_RefreshToken(self, value):
"""
Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth refresh token used to generate a new access token when the original token is expired. Required unless providing a valid AccessToken.)
"""
super(GetInstanceInputSet, self)._set_input('RefreshToken', value)
def set_Zone(self, value):
"""
Set the value of the Zone input for this Choreo. ((required, string) The name of the zone associated with this request.)
"""
super(GetInstanceInputSet, self)._set_input('Zone', value)
class GetInstanceResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetInstance Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Google.)
"""
return self._output.get('Response', None)
def get_NewAccessToken(self):
"""
Retrieve the value for the "NewAccessToken" output from this Choreo execution. ((string) Contains a new AccessToken when the RefreshToken is provided.)
"""
return self._output.get('NewAccessToken', None)
class GetInstanceChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetInstanceResultSet(response, path)
|
angvp/angelvelasquez-crunchyfrog
|
cf/config/__init__.py
|
Python
|
gpl-3.0
| 4,652
| 0.001505
|
# -*- coding: utf-8 -*-
# crunchyfrog - a database schema browser and query tool
# Copyright (C) 2008 Andi Albrecht <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the G
|
NU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warra
|
nty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Configuration"""
import gobject
from os.path import abspath, dirname, join
from configobj import ConfigObj
from gettext import gettext as _
import logging
log = logging.getLogger("CONFIG")
class Config(gobject.GObject):
"""Configuration object
An instance of this class is accessible through the ``config``
attribute of an `CFApplication`_ instance.
The Config class is a simplified wrapper around a ConfigObj
instance. It merges a default configuration located as package
data in this package with an user configuration.
The ``-c`` command line switch determines which user configuration
file is used. If it's not set, it defaults to
``~/.config/crunchyfrog/config``.
This wrapper provides only a getter and a setter for configuration
values and expects that option names are dotted strings (but only by
convention).
Values can be any basic Python types since it uses ConfigObj's
``unrepr`` mode (Read the `ConfigObj documentation`_ for details).
Plugins can connect to the `changed` signal to track configuration
changes, i.e. the SQL editor uses this signal to reflect changes
made through the preference dialog.
.. Note:: The runtime Config instance (``app.config``) is bound
to the application. So it is not possible to store instance
specific data here.
:Usage example:
.. sourcecode:: python
>>> app.config.get("foo.bar") # Not set yet, None is default
None
>>> app.config.set("foo.bar", True)
>>> app.config.get("foo.bar")
True
>>> app.config.set("foo.bar", ["Completely", "different"]) # No type check!
>>> print " ".join(app.config.get("foo.bar"))
Completely different
:Signals:
changed
``def callback(config, key, value, user_oaram1, ...)``
Emitted when a option has changed.
.. _CFApplication: cf.app.CFApplication.html
.. _ConfigObj documentation: http://www.voidspace.org.uk/python/configobj.html#unrepr-mode
"""
__gsignals__ = {
"changed" : (gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(str, gobject.TYPE_PYOBJECT)),
}
def __init__(self, app, config_file):
"""
The constructor of this class takes two arguments:
:Parameter:
app
`CFApplication`_ instance
config_file
Path to user configuration file
.. _CFApplication: cf.app.CFApplication.html
"""
self.app = app
self.__gobject_init__() # IGNORE:E1101
self.__conf = None
self.__config_file = config_file
self.__init_conf()
self.app.register_shutdown_task(self.on_app_shutdown,
_(u"Writing configuration"))
def on_app_shutdown(self, *args): # IGNORE:W0613
"""Callback: write configuration file to disk"""
self.write()
def __init_conf(self):
"""Intialize the configuration system"""
self.__conf = ConfigObj(abspath(join(dirname(__file__), "default.cfg")),
unrepr=True)
log.info("Loading configuration file %r" % self.__config_file)
self.__conf.update(ConfigObj(self.__config_file, unrepr=True))
def init(self):
"""Loads configuration"""
pass
def get(self, key, default=None):
"""Returns value or default for key"""
return self.__conf.get(key, default)
def set(self, key, value):
"""Sets key to value"""
self.__conf[key] = value
self.emit("changed", key, value) # IGNORE:E1101
def write(self, fname=None):
"""Writes configuration file"""
if not fname:
fname = self.__config_file
fp = open(fname, "w")
self.__conf.write(fp)
fp.close()
|
pni-libraries/python-pni
|
doc/examples/old_examples/test_string2.py
|
Python
|
gpl-2.0
| 494
| 0.036437
|
#!/usr/bin/env python
from
|
__future__ import print_function
import sys
import numpy
import pni.io.nx.h5 as nexus
f = nexus.create_file("test_string2.nxs",True);
d = f.root().create_group("scan_1","NXentry").\
create_group("detector","NXdetector")
sa= d.create_field("Listo
|
fStrings","string",shape=(3,2))
sa[0,0]="safdfdsffdsfd"
sa[1,0]="safdsfsfdsffdsfd"
sa[2,0]="safdfsfd"
print(sa[0,0])
print(sa[1,0])
print(sa[2,0])
print(sa[...])
f.close()
|
nylas/sync-engine
|
migrations/versions/004_drafts_as_required_folder.py
|
Python
|
agpl-3.0
| 459
| 0.006536
|
"""Drafts as required folder
Revision ID: 41a7e825d108
Revises: 269247bc37d3
Create Date: 2014-03-13 21:14:25.652333
"""
# revision identifie
|
rs, used by Ale
|
mbic.
revision = '41a7e825d108'
down_revision = '269247bc37d3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('imapaccount', sa.Column('drafts_folder_name', sa.String(255), nullable=True))
def downgrade():
op.drop_column('imapaccount', 'drafts_folder_name')
|
gx1997/chrome-loongson
|
third_party/webdriver/python/test/selenium/webdriver/firefox/test_ff_api.py
|
Python
|
bsd-3-clause
| 1,189
| 0.001682
|
#!/usr/bin/python
#
# Copyright 2008-2010 WebDriver committers
# Copyright 2008-2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# Y
|
ou may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limit
|
ations under the License.
from selenium import webdriver
from selenium.test.selenium.webdriver.common import api_examples
from selenium.test.selenium.webdriver.common.webserver import SimpleWebServer
def setup_module(module):
webserver = SimpleWebServer()
webserver.start()
FirefoxApiExampleTest.webserver = webserver
FirefoxApiExampleTest.driver = webdriver.Firefox()
class FirefoxApiExampleTest(api_examples.ApiExampleTest):
pass
def teardown_module(module):
FirefoxApiExampleTest.driver.quit()
FirefoxApiExampleTest.webserver.stop()
|
PhoenixRacing/PhoenixRacingWebApp-noregrets
|
run_server.py
|
Python
|
bsd-3-clause
| 327
| 0.003058
|
from application import app as application
from gevent import monkey
from socketio.server import SocketIOServer
monkey.patch_all()
if __name__ == '__main__':
# SocketIOServer(
# ('', application.config['PORT']),
# application,
# resource="soc
|
ket.io").serve_forever()
socketio.run(application)
|
|
foligny/browsershots-psycopg2
|
shotserver/shotserver04/accounts/urls.py
|
Python
|
gpl-3.0
| 1,222
| 0.000818
|
# browsershots.org - Test your web design in different browsers
# Copyright (C) 2007 Johann C. Rocholl <[email protected]>
#
# Browsershots is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Browsershots is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have re
|
ceived a cop
|
y of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
URL configuration for the accounts app.
"""
__revision__ = "$Rev: 2160 $"
__date__ = "$Date: 2007-09-18 19:12:50 -0400 (Tue, 18 Sep 2007) $"
__author__ = "$Author: johann $"
from django.conf.urls.defaults import patterns
urlpatterns = patterns('shotserver04.accounts.views',
(r'^login/$', 'login'),
(r'^logout/$', 'logout'),
(r'^profile/$', 'profile'),
(r'^email/$', 'email'),
(r'^verify/(?P<hashkey>[0-9a-f]{32})/$', 'verify'),
)
|
amoschou/openiv
|
public/views.py
|
Python
|
mpl-2.0
| 3,942
| 0.011935
|
from django.shortcuts import render, redirect, get_object_or_404
from django.core.urlresolvers import reverse
from openiv.settings import *
# Create your views here.
def index(request):
return redirect(reverse('public:event'))
# Comment the above to have an independent home page.
context = {
'imagesource': 'public/images/image-1.jpg',
'activetab': 'home',
'titletext': EVENT_MEDIUM_NAME,
'text1': [
'The ' + EVENT_ANNIVERSARY + ' AIVCF will take place in ' + EVENT_CITY + ' in ' + EVENT_YEAR + '.',
'Out of courtesy to the upcoming festivals (68th in Perth 2017, 69th in Melbourne 2018), we won’t have any news until Melbourne 2018 has begun. Festival details will be revealed in ' + EVENT_PRIOR_YEAR + '.',
],
}
return render(request,'public/index.html', context)
def event(request):
context = {
'imagesource': 'public/images/image-1.jpg',
'activetab': 'event',
'titletext': 'About ' + EVENT_MEDIUM_NAME,
'text1': [
'Intervarsity choral festivals (IVs) have been an annual event since 1950 when the Melbourne University Choral Society travelled to Sydney to present a combined concert with the Sydney University Musical Society. IVs quickly expanded to include other university choirs and are now hosted in many cities across Australia with participation drawing from the wider choral community in Australia and occasionally overseas.',
EVENT_YEAR + ' sees the ' + EVENT_ANNIVERSARY + ' IV, hosted in ' + EVENT_CITY + ' by ' + EVENT_HOSTED_BY + '. Choristers from across the country will be in ' + EVENT_CITY + ' for intensive rehearsals to produce a grand concert.',
'Out of courtesy to the upcoming festivals (' + EVENT_UPCOMING_EVENTS + '), we won’t have any news until ' + EVENT_PRIOR_CITY + ' ' + EVENT_PRIOR_YEAR + ' has begun. Festival details will be revealed in ' + EVENT_PRIOR_YEAR + '.',
ORGANISATION_SHORT_NAME + ' acknowledges that ' + EVENT_SHORT_NAME + ' is being held on the traditional lands of the ' + EVENT_ABORIGINAL_COUNTRY + ' people; we pay respect to the elders of the community and extend our recognition to their descendants.',
],
'titletext2': 'Past ' + EVENT_CITY + ' IVs'
}
return render(request,'public/event.html', context)
def organisation(request):
context = {
'imagesource': 'public/images/image-1.jpg',
'activetab': 'organisation',
'titletext': ORGANISATION_SHORT_NAME,
'text1': [
'The ' + EVENT_ANNIVERSARY + ' Australian Intervarsity Choral Festival is presented by
|
' + ORGA
|
NISATION_SHORT_NAME + ' in ' + EVENT_YEAR + '. The organisation was elected by the members of ' + EVENT_HOSTED_BY + '.',
'We represent the ' + EVENT_CITY + ' contingent of a wider choral community across Australia with combined membership of over a thousand nationally in the Australian Intervarsity Choral Societies Association (AICSA).',
]
}
return render(request,'public/index.html', context)
def participate(request):
context = {
'imagesource': 'public/images/image-2.jpg',
'activetab': 'participate',
'titletext': 'Participate',
}
return render(request,'public/participateindex.html', context)
def participatefundraising(request):
context = {
'imagesource': 'public/images/image-2.jpg',
'activetab': 'participate',
'titletext': 'Participate: Fundraising',
}
return render(request,'public/participatefundraisingindex.html', context)
def help(request):
context = {
'titletext': 'Help',
}
return render(request,'public/help.html', context)
def privacy(request):
context = {
'titletext': 'Privacy policy',
}
return render(request,'public/help.html', context)
def privacyaffiliates(request):
context = {
'titletext': 'Affiliates',
}
return render(request,'public/help.html', context)
def conduct(request):
context = {
'titletext': 'Code of conduct',
}
return render(request,'public/help.html', context)
|
jebaum/neosyntax
|
rplugin/python3/neosyntax.py
|
Python
|
gpl-3.0
| 8,728
| 0.007218
|
import neovim
# TODO figure out the python way to do these imports, this is probably wrong
import pygments
import pygments.lexers
import pygments.token
@neovim.plugin
class Neosyntax(object):
def __init__(self, nvim):
self.nvim = nvim
# swap src_ids. from brefdl: allocate two ids, and swap, adding before clearing, so things that don't change won't appear to flicker
self.srcset = True
self.pygmap = {}
t = pygments.token
self.pygmap[t.Comment.Hashbang] = "Comment"
self.pygmap[t.Comment.Single] = "Comment"
self.pygmap[t.Comment] = "Comment" # older versions of pygments don't have Single and Hashbang?
self.pygmap[t.Keyword.Namespace] = "Include"
self.pygmap[t.Keyword] = "Conditional"
self.pygmap[t.Literal.Number.Integer] = "Number"
self.pygmap[t.Literal.String.Double] = "String"
self.pygmap[t.Literal.String.Single] = "String"
self.pygmap[t.Literal.String] = "String" # same comment as above
self.pygmap[t.Name.Builtin.Pseudo] = "Boolean"
self.pygmap[t.Name.Builtin] = "Function"
self.pygmap[t.Name.Decorator] = "PreProc"
self.pygmap[t.Operator.Word] = "Conditional"
def msg(self, m):
self.nvim.command("echom '" + str(m) + "'")
@neovim.autocmd('BufEnter', pattern='*', eval='expand("<abuf>")', sync=False)
def autocmd_handler1(self, bufnr): # TODO how to pass in multiple arguments?
self.highlight_buffer(int(bufnr))
@neovim.autocmd('TextChanged', pattern='*', eval='expand("<abuf>")', sync=False)
def autocmd_handler2(self, bufnr):
self.highlight_buffer(int(bufnr))
@neovim.autocmd('TextChangedI', pattern='*', eval='expand("<abuf>")', sync=False)
def autocmd_handler3(self, bufnr):
# TODO do special thing here if the user is currently typing inside a string or comment
# to extend that highlight group a bunch of columns ahead
# not sure where the best place to implement that will be
# TODO I was hoping that performance with syntax highlighting being done by this autocmd
# would be comparable to plain old :syntax off and without this plugin
# I think it is better, although I'll have to find a way to test that empirically
# But, it still isn't as good as I hoped. Some flickering is still present
# This may be a limitation of the tui and its ability to process remote api calls
# Maybe this will work better in the eventual gui?
# If nothing else, this function gives the option to have syntax highlighting turned off during
# insert mode, then handled once you leave insert mode. Just have to remove the TextChangedI autocmd
# and keep the TextChanged one (no I).
# This is less than ideal for lots of situations, but is better than nothing
# TODO figure out a way to queue these calls somehow? with the swapping src_id strategy,
# flicker is gone when typing fast in insert mode, but typing too fast can still cause a
# call backlog that can either crash the python host or just appear as lots of lag to the user
# a timer? when this is called, start a timer that counts down from X seconds
# throw away and subsequent calls that come in before the tmier is up
# maybe highlight_buffer should take lines as an argument to facilitate the viewport shit?
self.highlight_buffer(int(bufnr))
@neovim.function('UnHighlightBuffer', sync=False)
def unhighlight_buffer(self, bufnr):
bufnr = int(bufnr)
for b in self.nvim.buffers:
if b.number == bufnr: # TODO what if it isn't found?
buf = b
break
end = len([line for line in buf])
buf.clear_highlight(src_id=1, line_start=0, line_end=end, async=True)
buf.clear_highlight(src_id=2, line_start=0, line_end=end, async=True)
@neovim.function('HighlightBuffer', sync=False)
def highlight_buffer(self, bufnr):
# XXX some ideas to help with flickering:
# use cursorholdi instead of textchangedi
# still use textchangedi, but also use a timer, and if the highlight is less than X seconds old, don't recompute, just return
# in insert mode, only recompute highlight groups on the line, or couple of lines surrounding the cursor
# get the viewport of the current window, render that region only or first before the rest of the buffer
# also, should cache a map of buffer -> lexer so this doesn't have to be done every time
for b in self.nvim.buffers:
if b.number == bufnr: # TODO what if it isn't found?
buf = b
break
# TODO - can I be more intelligent than doing the whole buffer every time? just the area around a change?
fullbuf = "\n".join([line for line in buf]) # TODO can i cache this somehow?
self.msg(fullbuf)
mylexer = pygments.lexers.guess_lexer(fullbuf) # TODO cache this
# TODO these numbers need to be per buffer
addid = 1 if self.srcset else 2
rmid = 2 if self.srcset else 1
self.srcset = not self.srcset
arglist = []
linenum = 0
lastnewlineindex = -1
for (index, tokentype, value) in mylexer.get_tokens_unprocessed(fullbuf):
self.msg("line: " + str(linenum))
self.msg("idx : " + str(index))
self.msg("lni : " + str(lastnewlineindex))
self.msg("tok : " + str(tokentype))
self.msg("val : "
|
+ str(value))
self.msg("--------")
# XXX issue with highlight groups
# if `:syntax off` is set from vimrc, which is the entire goal of this plugin
# then a lot (maybe all) of the language specific highlight groups will never be loaded
# e.g., the "Comment" highlight group will probably exist (assuming the colorscheme
# defines it), but "py
|
thonComment" will not.
# This isn't great, because I want to maintain the ability of users to modify individual
# language highlight groups if they feel like it
# I am not going to worry about this just yet, but I will need to find a way to address this eventually
# For now, my solution is to just not use those language specific groups while I get the basics working
# Also, it would be really swell if I didn't have to write this code for every single languages someone
# might edit in vim. Actually, that's really the only way to do it.
# I need to make the core functionality as generic as possible, while having an easy way to override settings
# for a specific language if the generic way just won't work in all edge cases
# This should be possible both within this python code, and from vimscript
# entire file is sent to pygments in a single big list, so column indexes are relative to the entire file, not per line
# keep track of the last index where a newline was found
# the index for the 0th column for the next line will be 1 after the lastnewlineindex
# at the same time, also track line numbers
# TODO newlines are their own tokens in python, but not in bash, and probably other languages
# I assume any language where newlines don't have semantic meaning won't have them as tokens
# need to find a better way to keep track of line numbers
# shit.
# so i can either override each lexer that doesn't have newlines as tokens, see here:
# http://pygments.org/docs/lexerdevelopment/#modifying-token-streams
# or, note down the byte index of newlines in the fullbuf stream and work with that
# first method might be marginally faster, but is so ugly it makes me want to cry
# probably will go with second method.
if value == '\n':
linenum += 1
lastnewlineindex = index
# self.msg('found newline')
elif tokentyp
|
dpazel/music_rep
|
tests/transformation_tests/reflection_tests/test_t_chromatic_reflection.py
|
Python
|
mit
| 7,963
| 0.003014
|
import unittest
import logging
from harmoniccontext.harmonic_context import HarmonicContext
from harmoniccontext.harmonic_context_track import HarmonicContextTrack
from harmonicmodel.secondary_chord_template import SecondaryChordTemplate
from harmonicmodel.tertian_chord_template import TertianChordTemplate
from structure.LineGrammar.core.line_grammar_executor import LineGrammarExecutor
from structure.line import Line
from structure.note import Note
from timemodel.duration import Duration
from tonalmodel.diatonic_foundation import DiatonicFoundation
from tonalmodel.modality import ModalityType
from tonalmodel.tonality import Tonality
from transformation.reflection.t_chromatic_reflection import TChromaticReflection
from misc.interval import Interval
from tonalmodel.diatonic_pitch import DiatonicPitch
from fractions import Fraction
class TestTChromaticFlip(unittest.TestCase):
logging.basicConfig(level=logging.DEBUG)
def setUp(self):
pass
def tearDown(self):
pass
def test_hct_rebuild_perfect_overlap(self):
print('----- test_hct_rebuild_perfect_overlap -----')
line_str = '{<C-Major: I> hA:5 <:IV> B qC G <:VI> hD}'
lge = LineGrammarExecutor()
target_line, target_hct = lge.parse(line_str)
print('--- before transformation ---')
TestTChromaticFlip.print_notes(target_line)
TestTChromaticFlip.print_hct(target_hct)
cue = DiatonicPitch(5, 'c')
f = TChromaticReflection(target_line, target_hct, cue)
temporal_extent = Interval(Fraction(1, 2), Fraction(3, 2))
score_line, score_hct = f.apply(temporal_extent, cue)
print('--- after transformation ---')
TestTChromaticFlip.print_notes(score_line)
TestTChromaticFlip.print_hct(score_hct)
print('--- transformation ---')
TestTChromaticFlip.print_function(f, target_hct)
notes = score_line.get_all_notes()
assert 'Db:4' == str(notes[1].diatonic_pitch)
assert 'C:5' == str(notes[2].diatonic_pitch)
assert 'F:4' == str(notes[3].diatonic_pitch)
hc_list = score_hct.hc_list()
assert len(hc_list) == 3
assert hc_list[1].chord.chord_template.scale_degree == 1
assert {t[0].diatonic_symbol for t in hc_list[1].chord.tones} == {'G', 'C', 'Eb'}
assert hc_list[1].chord.chord_template.inversion == 3
def test_mozart(self):
print('----- Mozart -----')
line_str = '{<C-Major: I> hC:5 qE G <:VMaj7> q@b:4 sC:5 D <:I> hC}'
lge = LineGrammarExecutor()
target_line, target_hct = lge.parse(line_str)
print('--- before transformation ---')
TestTChromaticFlip.print_notes(target_line)
TestTChromaticFlip.print_hct(target_hct)
cue = DiatonicPitch(5, 'c')
f = TChromaticReflection(target_line, target_hct, cue)
score_line, score_hct = f.apply()
print('--- after transformation ---')
TestTChromaticFlip.print_notes(score_line)
TestTChromaticFlip.print_hct(score_hct)
print('--- transformation ---')
TestTChromaticFlip.print_function(f, target_hct)
notes = score_line.get_all_notes()
assert 'C:5' == str(notes[0].diatonic_pitch)
assert 'Ab:4' == str(notes[1].diatonic_pitch)
assert 'F:4' == str(notes[2].diatonic_pitch)
assert 'Db:5' == str(notes[3].diatonic_pitch)
assert 'C:5' == str(notes[4].diatonic_pitch)
assert 'Bb:4' == str(notes[5].diatonic_pitch)
assert 'C:5' == str(notes[6].diatonic_pitch)
hc_list = score_hct.hc_list()
assert len(hc_list) == 3
assert hc_list[0].chord.chord_template.scale_degree == 4
assert {t[0].diatonic_symbol for t in hc_list[0].chord.tones} == {'C', 'F', 'Ab'}
assert hc_list[0].chord.chord_template.inversion == 3
assert hc_list[1].chord.chord_template.scale_degree == 7
assert {t[0].diatonic_symbol for t in hc_list[1].chord.tones} == {'F', 'Bb', 'Db', 'Gb'}
assert hc_list[1].chord.chord_template.inversion == 3
assert hc_list[2].chord.chord_template.scale_degree == 4
assert {t[0].diatonic_symbol for t in hc_list[2].chord.tones} == {'C', 'F', 'Ab'}
assert hc_list[2].chord.chord_template.inversion == 3
def test_secondary_chord(self):
print('----- test_secondary_tonality -----')
diatonic_tonality = Tonality.create(ModalityType.Major, DiatonicFoundation.get_tone("C"))
chort_t_i = TertianChordTemplate.parse('tI')
chord_i = chort_t_i.create_chord(diatonic_tonality)
chord_v_ii = SecondaryChordTemplate.parse('V/ii').create_chord(diatonic_tonality)
chord_vi_v = SecondaryChordTemplate.parse('vi/V').create_chord(diatonic_tonality)
chord_t_ii = TertianChordTemplate.pars
|
e('tii')
chord_ii = chord_t_ii.create_chord(diatonic_tonality)
hc_track = HarmonicContextTrack()
hc_track.append(HarmonicContext(diatonic_tonality, chord_i, Duration(1)))
hc_track.append(HarmonicContext(diatonic_tonality, chord_v_ii, Duration(1)))
hc_track.append(HarmonicContext(diatonic_tonality, chord_vi_v, Duration(1)))
hc_track.append(HarmonicContext(dia
|
tonic_tonality, chord_ii, Duration(1)))
TestTChromaticFlip.print_hct(hc_track)
tune = [('C:5', (1, 1)), ('E:5', (1, 1)), ('E:5', (1, 1)), ('G:5', (1, 1))]
line = TestTChromaticFlip.build_line(tune)
cue = DiatonicPitch(5, 'd')
tflip = TChromaticReflection(line, hc_track, cue)
temporal_extent = Interval(Fraction(0), Fraction(4))
score_line, score_hct = tflip.apply()
TestTChromaticFlip.print_notes(score_line)
TestTChromaticFlip.print_hct(score_hct)
@staticmethod
def print_hct(hct):
hcs = hct.hc_list()
index = 0
for hc in hcs:
print('[{0}] {1} {2}'.format(index, hc, hc.position))
index += 1
print("--------")
@staticmethod
def print_notes(line):
for note in line.get_all_notes():
print(note)
print("--------")
@staticmethod
def print_map(f, source_hct, cue):
for hc in source_hct.hc_list():
if hc in f.hc_flip_map:
pitch_map = f.hc_flip_map[hc]
map_list = list()
for tone in pitch_map.domain_tonality.annotation[:-1]:
ft = pitch_map.tonal_function[tone]
map_list.append('{0}-->{1}'.format(tone.diatonic_symbol, ft.diatonic_symbol))
print('[{0}] ({1}) {2}'.format(hc, pitch_map.range_tonality, ', '.join([s for s in map_list])))
@staticmethod
def print_function(f, source_hct):
for hc in source_hct.hc_list():
if hc in f.hc_flip_map:
pitch_map = f.hc_flip_map[hc]
domain = sorted([p for p in pitch_map.domain], key=lambda p: p.chromatic_distance)
domain_tones = pitch_map.domain_tonality.annotation[:-1]
map_list = list()
for p in domain:
r = pitch_map[p]
if p.diatonic_tone in domain_tones:
map_list.append('{0} --> {1}'.format(p, r))
print('[{0}] ({1}) {2}: {3}'.format(pitch_map.domain_tonality,
pitch_map.cue_pitch,
pitch_map.range_tonality,
', '.join([s for s in map_list])
)
)
@staticmethod
def build_line(note_spec_list):
note_list = list()
for spec in note_spec_list:
pitch = DiatonicPitch.parse(spec[0])
n = Note(pitch, Duration(spec[1][0], spec[1][1]))
note_list.append(n)
return Line(note_list)
|
WeirdCoder/rss-2014-team-3
|
devel/lib/python2.7/dist-packages/robotbrain/__init__.py
|
Python
|
mit
| 1,010
| 0.00099
|
# generated from catkin/cmake/template/__init__.py.in
# keep symbol table as clean as possible by deleting all unnecessary symbols
from os import path as os_path
from sys import path as sys_path
from pkgutil import extend_path
__extended_path = "/home/rss-student/rss-2014-team-3/src/robotbrain/src".split(";")
for p in reversed(__extended_path):
sys_path.insert(0, p)
del p
del sys_path
__path__ = extend_path(__path__, __name__)
del extend_path
__execfiles = []
for p in __extended_path:
src_init_file =
|
os_path.join(p, __name__ + '.py')
if os_path.isfile(src_init_file):
__execfiles.append(src_init_file)
else:
src_init_file = os_path.join(p, __name__, '__init__.py')
if os_path.isfile(src_init_file):
__execfiles.append(src_init_file)
del src_init_file
del p
d
|
el os_path
del __extended_path
for __execfile in __execfiles:
with open(__execfile, 'r') as __fh:
exec(__fh.read())
del __fh
del __execfile
del __execfiles
|
twchad/Adafruit_Python_SSD1351
|
Adafruit_SSD1351/SSD1351.py
|
Python
|
mit
| 11,355
| 0.026244
|
# Copyright (c) 2014 Adafruit Industries
# Author: Tony DiCola
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import logging
import time
import Adafruit_GPIO as GPIO
import Adafruit_GPIO.SPI as SPI
# Constants
SSD1351_I2C_ADDRESS = 0x3C # 011110+SA0+RW - 0x3C or 0x3D
SSD1351_SETC
|
ONTRAST = 0x81
SSD1351_DISPLAYALLON_RESUME = 0xA4
SSD1351_DISPLAYALLON = 0xA5
SSD1351_NORMALDISPLAY = 0xA6
SSD1351_INVERTDISPLAY = 0xA7
SSD1351_DISPLAYOFF = 0xAE
SSD1351_DISPLAYON = 0xAF
SSD1351_SETDI
|
SPLAYOFFSET = 0xD3
SSD1351_SETCOMPINS = 0xDA
SSD1351_SETVCOMDETECT = 0xDB
SSD1351_SETDISPLAYCLOCKDIV = 0xD5
SSD1351_SETPRECHARGE = 0xD9
SSD1351_SETMULTIPLEX = 0xA8
SSD1351_SETLOWCOLUMN = 0x00
SSD1351_SETHIGHCOLUMN = 0x10
SSD1351_SETSTARTLINE = 0x40
SSD1351_MEMORYMODE = 0x20
SSD1351_COLUMNADDR = 0x21
SSD1351_PAGEADDR = 0x22
SSD1351_COMSCANINC = 0xC0
SSD1351_COMSCANDEC = 0xC8
SSD1351_SEGREMAP = 0xA0
SSD1351_CHARGEPUMP = 0x8D
SSD1351_EXTERNALVCC = 0x1
SSD1351_SWITCHCAPVCC = 0x2
# Scrolling constants
SSD1351_ACTIVATE_SCROLL = 0x2F
SSD1351_DEACTIVATE_SCROLL = 0x2E
SSD1351_SET_VERTICAL_SCROLL_AREA = 0xA3
SSD1351_RIGHT_HORIZONTAL_SCROLL = 0x26
SSD1351_LEFT_HORIZONTAL_SCROLL = 0x27
SSD1351_VERTICAL_AND_RIGHT_HORIZONTAL_SCROLL = 0x29
SSD1351_VERTICAL_AND_LEFT_HORIZONTAL_SCROLL = 0x2A
#? SSD1351_DELAYS_HWFILL (3)
#? SSD1351_DELAYS_HWLINE (1)
# SSD1351 Commands
SSD1351_SETCOLUMN = 0x15
SSD1351_SETROW = 0x75
SSD1351_WRITERAM = 0x5C
SSD1351_READRAM = 0x5D
SSD1351_SETREMAP = 0xA0
SSD1351_STARTLINE = 0xA1
SSD1351_DISPLAYOFFSET = 0xA2
SSD1351_DISPLAYALLOFF = 0xA4
SSD1351_DISPLAYALLON = 0xA5
SSD1351_NORMALDISPLAY = 0xA6
SSD1351_INVERTDISPLAY = 0xA7
SSD1351_FUNCTIONSELECT = 0xAB
SSD1351_DISPLAYOFF = 0xAE
SSD1351_DISPLAYON = 0xAF
SSD1351_PRECHARGE = 0xB1
SSD1351_DISPLAYENHANCE = 0xB2
SSD1351_CLOCKDIV = 0xB3
SSD1351_SETVSL = 0xB4
SSD1351_SETGPIO = 0xB5
SSD1351_PRECHARGE2 = 0xB6
SSD1351_SETGRAY = 0xB8
SSD1351_USELUT = 0xB9
SSD1351_PRECHARGELEVEL = 0xBB
SSD1351_VCOMH = 0xBE
SSD1351_CONTRASTABC = 0xC1
SSD1351_CONTRASTMASTER = 0xC7
SSD1351_MUXRATIO = 0xCA
SSD1351_COMMANDLOCK = 0xFD
SSD1351_HORIZSCROLL = 0x96
SSD1351_STOPSCROLL = 0x9E
SSD1351_STARTSCROLL = 0x9F
class SSD1351Base(object):
"""Base class for SSD1351-based OLED displays. Implementors should subclass
and provide an implementation for the _initialize function.
"""
def __init__(self, width, height, rst, dc=None, sclk=None, din=None, cs=None,
gpio=None, spi=None, i2c_bus=None, i2c_address=SSD1351_I2C_ADDRESS,
i2c=None):
self._log = logging.getLogger('Adafruit_SSD1351.SSD1351Base')
self._spi = None
self._i2c = None
self.width = width
self.height = height
self._pages = height/8
self._buffer = [0]*(width*height)
# Default to platform GPIO if not provided.
self._gpio = gpio
if self._gpio is None:
self._gpio = GPIO.get_platform_gpio()
# Setup reset pin.
self._rst = rst
self._gpio.setup(self._rst, GPIO.OUT)
# Handle hardware SPI
if spi is not None:
self._log.debug('Using hardware SPI')
self._spi = spi
self._spi.set_clock_hz(8000000)
# Handle software SPI
elif sclk is not None and din is not None and cs is not None:
self._log.debug('Using software SPI')
self._spi = SPI.BitBang(self._gpio, sclk, din, None, cs)
# Handle hardware I2C
elif i2c is not None:
self._log.debug('Using hardware I2C with custom I2C provider.')
self._i2c = i2c.get_i2c_device(i2c_address)
else:
self._log.debug('Using hardware I2C with platform I2C provider.')
import Adafruit_GPIO.I2C as I2C
if i2c_bus is None:
self._i2c = I2C.get_i2c_device(i2c_address)
else:
self._i2c = I2C.get_i2c_device(i2c_address, busnum=i2c_bus)
# Initialize DC pin if using SPI.
if self._spi is not None:
if dc is None:
raise ValueError('DC pin must be provided when using SPI.')
self._dc = dc
self._gpio.setup(self._dc, GPIO.OUT)
def _initialize(self):
raise NotImplementedError
def command(self, c):
"""Send command byte to display."""
if self._spi is not None:
# SPI write.
self._gpio.set_low(self._dc)
self._spi.write([c])
else:
# I2C write.
control = 0x00 # Co = 0, DC = 0
self._i2c.write8(control, c)
def data(self, c):
"""Send byte of data to display."""
if self._spi is not None:
# SPI write.
self._gpio.set_high(self._dc)
self._spi.write([c])
else:
# I2C write.
control = 0x40 # Co = 0, DC = 0
self._i2c.write8(control, c)
def begin(self, vccstate=SSD1351_SWITCHCAPVCC):
"""Initialize display."""
# Save vcc state.
self._vccstate = vccstate
# Reset and initialize display.
self.reset()
self._initialize()
# Turn on the display.
self.command(SSD1351_DISPLAYON)
def reset(self):
"""Reset the display."""
# Set reset high for a millisecond.
self._gpio.set_high(self._rst)
time.sleep(0.001)
# Set reset low for 10 milliseconds.
self._gpio.set_low(self._rst)
time.sleep(0.010)
# Set reset high again.
self._gpio.set_high(self._rst)
def display(self):
"""Write display buffer to physical display."""
self.command(SSD1351_SETCOLUMN)
self.data(0) # Column start address. (0 = reset)
self.data(self.width-1) # Column end address.
self.command(SSD1351_SETROW)
self.data(0) # Page start address. (0 = reset)
self.data(self.height-1) # Page end address.
# Write buffer data.
if self._spi is not None:
# Set DC high for data.
self._gpio.set_high(self._dc)
# Write buffer.
self.command(SSD1351_WRITERAM)
self._spi.write(self._buffer)
else:
for i in range(0, len(self._buffer), 16):
control = 0x40 # Co = 0, DC = 0
self._i2c.writeList(control, self._buffer[i:i+16])
def image(self, image):
"""Set buffer to value of Python Imaging Library image. The image should
be in 1 bit mode and a size equal to the display size.
"""
# if image.mode != '1':
# raise ValueError('Image must be in mode 1.')
imwidth, imheight = image.size
if imwidth != self.width or imheight != self.height:
raise ValueError('Image must be same dimensions as display ({0}x{1}).' \
.format(self.width, self.height))
# Grab all the pixels from the image, faster than getpixel.
pix = image.load()
# Iterate through the memory pages
index = 0
for page in range(self.height):
# Iterate through all x axis columns.
for x in range(self.width):
# Set the bits for the column of pixels at the current position.
bits = 0
# Don't use range here as it's a bit slow
for bit in [0, 1, 2, 3, 4, 5, 6, 7]:
bits = bits << 1
bits |= 0 if pix[(x, page*8+7-bit)] == 0 else 1
# Update buffer byte and increment to next byte.
self._buffer[index] = bits
index += 1
def clear(self):
"""Clear contents of image buffer."""
self._buffer = [0]*(self.width*self.height)
def set_contrast(self, contrast):
"""Sets the contrast of the display. Contrast should be a value between
0 and 255."""
if contrast < 0 or contrast > 255:
raise ValueError('Contrast must be a value from 0 to 255 (inclusive).')
self.command(SSD1351_CONTRASTMASTER)
self.command(contrast)
def dim(self, dim):
"""Adjusts contrast to dim the
|
omardroubi/Artificial-Intelligence
|
Projects/Project4/bayesNets/util.py
|
Python
|
apache-2.0
| 25,733
| 0.014728
|
# util.py
# -------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to http://ai.berkeley.edu.
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# ([email protected]) and Dan Klein ([email protected]).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel ([email protected]).
import sys
import inspect
import heapq, random
import cStringIO
class FixedRandom:
def __init__(self):
fixedState = (3, (2147483648L, 507801126L, 683453281L, 310439348L, 2597246090L, \
2209084787L, 2267831527L, 979920060L, 3098657677L, 37650879L, 807947081L, 3974896263L, \
881243242L, 3100634921L, 1334775171L, 3965168385L, 746264660L, 4074750168L, 500078808L, \
776561771L, 702988163L, 1636311725L, 2559226045L, 157578202L, 2498342920L, 2794591496L, \
4130598723L, 496985844L, 2944563015L, 3731321600L, 3514814613L, 3362575829L, 3038768745L, \
2206497038L, 1108748846L, 1317460727L, 3134077628L, 988312410L, 1674063516L, 746456451L, \
3958482413L, 1857117812L, 708750586L, 1583423339L, 3466495450L, 1536929345L, 1137240525L, \
3875025632L, 2466137587L, 1235845595L, 4214575620L, 3792516855L, 657994358L, 1241843248L, \
1695651859L, 3678946666L, 1929922113L, 2351044952L, 2317810202L, 2039319015L, 460787996L, \
3654096216L, 4068721415L, 1814163703L, 2904112444L, 1386111013L, 574629867L, 2654529343L, \
3833135042L, 2725328455L, 552431551L, 4006991378L, 1331562057L, 3710134542L, 303171486L, \
1203231078L, 2670768975L, 54570816L, 2679609001L, 578983064L, 1271454725L, 3230871056L, \
2496832891L, 2944938195L, 1608828728L, 367886575L, 2544708204L, 103775539L, 1912402393L, \
1098482180L, 2738577070L, 3091646463L, 1505274463L, 2079416566L, 659100352L, 839995305L, \
1696257633L, 274389836L, 3973303017L, 671127655L, 1061109122L, 517486945L, 1379749962L, \
3421383928L, 3116950429L, 2165882425L, 2346928266L, 2892678711L, 2936066049L, 1316407868L, \
2873411858L, 4279682888L, 2744351923L, 3290373816L, 1014377279L, 955200944L, 4220990860L, \
2386098930L, 1772997650L, 3757346974L, 1621616438L, 2877097197L, 442116595L, 2010480266L, \
2867861469L, 2955352695L, 605335967L, 2222936009L, 2067554933L, 4129906358L, 1519608541L, \
1195006590L, 1942991038L, 2736562236L, 279162408L, 1415982909L, 4099901426L, 1732201505L, \
2934657937L, 860563237L, 2479235483L, 3081651097L, 2244720867L, 3112631622L, 1636991639L, \
3860393305L, 2312061927L, 48780114L, 1149090394L, 2643246550L, 1764050647L, 3836789087L, \
3474859076L, 4237194338L, 1735191073L, 2150369208L, 92164394L, 756974036L, 2314453957L, \
323969533L, 4267621035L, 283649842L, 810004843L, 727855536L, 1757827251L, 3334960421L, \
3261035106L, 38417393L, 2660980472L, 1256633965L, 2184045390L, 811213141L, 2857482069L, \
2237770878L, 3891003138L, 2787806886L, 2435192790L, 2249324662L, 3507764896L, 995388363L, \
856944153L, 619213904L, 3233967826L, 3703465555L, 3286531781L, 3863193356L, 2992340714L, \
413696855L, 3865185632L, 1704163171L, 3043634452L, 2225424707L, 2199018022L, 3506117517L, \
3311559776L, 3374443561L, 1207829628L, 668793165L, 1822020716L, 2082656160L, 1160606415L, \
3034757648L, 741703672L, 3094328738L, 459332691L, 27
|
02383376L, 1610239915L, 4162939394L, \
557861574L, 3805706338L, 3832520705L, 1248934879L, 3250424034L, 892335058L, 74323433L, \
3209751608L, 3213220797L, 3444035873L, 3743886725L, 1783837251L, 610968664L, 580745246L, \
4041979504L, 201684874L, 2673219253L, 1377283008L, 3497299167L, 2344209394L, 2304982920L, \
3081403782L, 2599256854L, 3184475235L, 3373055826L, 695186388L, 2423332338L, 222864327L, \
1258227992L, 3627871647L, 3487724980L, 4027953
|
808L, 3053320360L, 533627073L, 3026232514L, \
2340271949L, 867277230L, 868513116L, 2158535651L, 2487822909L, 3428235761L, 3067196046L, \
3435119657L, 1908441839L, 788668797L, 3367703138L, 3317763187L, 908264443L, 2252100381L, \
764223334L, 4127108988L, 384641349L, 3377374722L, 1263833251L, 1958694944L, 3847832657L, \
1253909612L, 1096494446L, 555725445L, 2277045895L, 3340096504L, 1383318686L, 4234428127L, \
1072582179L, 94169494L, 1064509968L, 2681151917L, 2681864920L, 734708852L, 1338914021L, \
1270409500L, 1789469116L, 4191988204L, 1716329784L, 2213764829L, 3712538840L, 919910444L, \
1318414447L, 3383806712L, 3054941722L, 3378649942L, 1205735655L, 1268136494L, 2214009444L, \
2532395133L, 3232230447L, 230294038L, 342599089L, 772808141L, 4096882234L, 3146662953L, \
2784264306L, 1860954704L, 2675279609L, 2984212876L, 2466966981L, 2627986059L, 2985545332L, \
2578042598L, 1458940786L, 2944243755L, 3959506256L, 1509151382L, 325761900L, 942251521L, \
4184289782L, 2756231555L, 3297811774L, 1169708099L, 3280524138L, 3805245319L, 3227360276L, \
3199632491L, 2235795585L, 2865407118L, 36763651L, 2441503575L, 3314890374L, 1755526087L, \
17915536L, 1196948233L, 949343045L, 3815841867L, 489007833L, 2654997597L, 2834744136L, \
417688687L, 2843220846L, 85621843L, 747339336L, 2043645709L, 3520444394L, 1825470818L, \
647778910L, 275904777L, 1249389189L, 3640887431L, 4200779599L, 323384601L, 3446088641L, \
4049835786L, 1718989062L, 3563787136L, 44099190L, 3281263107L, 22910812L, 1826109246L, \
745118154L, 3392171319L, 1571490704L, 354891067L, 815955642L, 1453450421L, 940015623L, \
796817754L, 1260148619L, 3898237757L, 176670141L, 1870249326L, 3317738680L, 448918002L, \
4059166594L, 2003827551L, 987091377L, 224855998L, 3520570137L, 789522610L, 2604445123L, \
454472869L, 475688926L, 2990723466L, 523362238L, 3897608102L, 806637149L, 2642229586L, \
2928614432L, 1564415411L, 1691381054L, 3816907227L, 4082581003L, 1895544448L, 3728217394L, \
3214813157L, 4054301607L, 1882632454L, 2873728645L, 3694943071L, 1297991732L, 2101682438L, \
3952579552L, 678650400L, 1391722293L, 478833748L, 2976468591L, 158586606L, 2576499787L, \
662690848L, 3799889765L, 3328894692L, 2474578497L, 2383901391L, 1718193504L, 3003184595L, \
3630561213L, 1929441113L, 3848238627L, 1594310094L, 3040359840L, 3051803867L, 2462788790L, \
954409915L, 802581771L, 681703307L, 545982392L, 2738993819L, 8025358L, 2827719383L, \
770471093L, 3484895980L, 3111306320L, 3900000891L, 2116916652L, 397746721L, 2087689510L, \
721433935L, 1396088885L, 2751612384L, 1998988613L, 2135074843L, 2521131298L, 707009172L, \
2398321482L, 688041159L, 2264560137L, 482388305L, 207864885L, 3735036991L, 3490348331L, \
1963642811L, 3260224305L, 3493564223L, 1939428454L, 1128799656L, 1366012432L, 2858822447L, \
1428147157L, 2261125391L, 1611208390L, 1134826333L, 2374102525L, 3833625209L, 2266397263L, \
3189115077L, 770080230L, 2674657172L, 4280146640L, 3604531615L, 4235071805L, 3436987249L, \
509704467L, 2582695198L, 4256268040L, 3391197562L, 1460642842L, 1617931012L, 457825497L, \
1031452907L, 1330422862L, 4125947620L, 2280712485L, 431892090L, 2387410588L, 2061126784L, \
896457479L, 3480499461L, 2488196663L, 4021103792L, 1877063114L, 2744470201L, 1046140599L, \
2129952955L, 3583049218L, 4217723693L, 2720341743L, 820661843L, 1079873609L, 3360954200L, \
3652304997L, 3335838575L, 2178810636L, 1908053374L, 4026721976L, 1793145418L, 476541615L, \
9734
|
inovtec-solutions/OpenERP
|
openerp/addons/hr_attendance/__openerp__.py
|
Python
|
agpl-3.0
| 2,163
| 0.001849
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Attendances',
'version': '1.1',
'category': 'Human Resources',
'description': """
This module aims to manage employee's attendances.
==================================================
Keeps account of the attendances of the employees on the basis of the
actions(Sign in/Sign out) performed by them.
""",
'author': 'OpenERP SA',
'images': ['images/hr_attendances.jpeg'],
'depends': ['hr'],
'data': [
'security/ir_rule.xml',
'security/ir.model.access.csv',
'hr_attendance_view.xml',
'hr_attendance_report.xml',
'wizard/hr_attendance_bymonth_view.xml',
'wizard/hr_attendance_byweek_view.xml',
'wizard/hr_attendance_error_view.xml',
'res_confi
|
g_view.xml',
],
'demo': ['hr_attendance_demo.xml'],
'test': [
'test/attendance_process.yml',
'test/hr_attendance_report.yml',
],
'installable': True,
'auto_install': False,
#web
"js": ["static/src/js/attendance.js"],
'qweb' : ["static/src/xml/attendance.xml"],
'css' : ["static/src/css/slider.css"],
}
# vim:expandtab:smartin
|
dent:tabstop=4:softtabstop=4:shiftwidth=4:
|
theetcher/fxpt
|
fxpt/fx_refsystem/replace_with_ref_dialog_ui2.py
|
Python
|
mit
| 4,622
| 0.004976
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'replace_with_ref_dialog_ui.ui'
#
# Created: Fri Nov 18 22:58:33 2016
# by: pyside2-uic running on PySide2 2.0.0~alpha0
#
# WARNING! All changes made in this file will be lost!
from PySide2 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(520, 174)
self.verticalLayout = QtWidgets.QVBoxLayout(Dialog)
self.verticalLayout.setContentsMargins(6, 6, 6, 6)
self.verticalLayout.setObjectName("verticalLayout")
self.groupBox = QtWidgets.QGroupBox(Dialog)
self.groupBox.setTitle("")
self.groupBox.setObjectName("groupBox")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox)
self.horizontalLayout.setObjectName("horizontalLayout")
self.uiLBL_text = QtWidgets.QLabel(self.groupBox)
self.uiLBL_text.setTextFormat(QtCore.Qt.RichText)
self.uiLBL_text.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.uiLBL_text.setWordWrap(True)
self.uiLBL_text.setObjectName("uiLBL_text")
self.horizontalLayout.addWidget(self.uiLBL_text)
self.verticalLayout.addWidget(self.groupBox)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem)
self.uiBTN_saveReplace = QtWidgets.QPushButton(Dialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.uiBTN_saveReplace.sizePolicy().hasHeightForWidth())
self.uiBTN_saveReplace.setSizePolicy(sizePolicy)
self.uiBTN_saveReplace.setObjectName("uiBTN_saveReplace")
self.horizontalLayout_2.addWidget(self.uiBTN_saveReplace)
self.uiBTN_replace = QtWidgets.QPushButton(Dialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.uiBTN_replace.sizePolicy().hasHeightForWidth())
self.uiBTN_replace.setSizePolicy(sizePolicy)
self.uiBTN_replace.setObjectName("uiBTN_replace")
self.horizontalLayout_2.addWidget(self.uiBTN_replace)
self.uiBTN_cancel = QtWidgets.QPushButton(Dialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.uiBTN_cancel.sizePolicy().hasHeightForWidth())
self.uiBTN_cancel.setSizePolicy(sizePolicy)
self.uiBTN_cancel.setObjectName("uiBTN_cancel")
self.horizontalLayout_2.addWidget(self.uiBTN_cancel)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem1)
self.horizontalLayout_2.setStretch(1, 1)
self.horizontalLayout_2.setStretch(2, 1)
self.horizontalLayout_2.setStretch(3, 1)
self.vert
|
icalLayout.addLayout(self.horizontalLayout_2)
self.verticalLayout.setStretch(0, 1)
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.uiBTN_saveReplace, QtCore.SIGNAL("clicked()"), Dialog.onSaveReplaceClicked)
QtCore.QObject.connect(self.uiBT
|
N_replace, QtCore.SIGNAL("clicked()"), Dialog.onReplaceClicked)
QtCore.QObject.connect(self.uiBTN_cancel, QtCore.SIGNAL("clicked()"), Dialog.onCancelClicked)
QtCore.QObject.connect(Dialog, QtCore.SIGNAL("finished(int)"), Dialog.onDialogFinished)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtWidgets.QApplication.translate("Dialog", "Replace With Reference", None, -1))
self.uiLBL_text.setText(QtWidgets.QApplication.translate("Dialog", "Text", None, -1))
self.uiBTN_saveReplace.setText(QtWidgets.QApplication.translate("Dialog", "Save and Replace", None, -1))
self.uiBTN_replace.setText(QtWidgets.QApplication.translate("Dialog", "Replace", None, -1))
self.uiBTN_cancel.setText(QtWidgets.QApplication.translate("Dialog", "Cancel", None, -1))
|
fluentstream/asterisk-p2p
|
res/pjproject/tests/pjsua/scripts-pesq/201_codec_l16_16000.py
|
Python
|
gpl-2.0
| 537
| 0.01676
|
# $Id: 201_codec_l16_16000.py 369517 2012-07-01 17:28:57Z file $
#
from inc_cfg im
|
port *
# Call with L16/16000/1 codec
test_param = TestParam(
"PESQ codec L16/16000/1 (RX side uses snd dev)",
[
InstanceParam("UA1", "--max-calls=1 --add-codec L16/16000/1 --clock-rate 16000 --play-file wavs/input.16.wav --null-audio"),
InstanceParam("UA2", "--max-calls=1 --add-
|
codec L16/16000/1 --clock-rate 16000 --rec-file wavs/tmp.16.wav --auto-answer 200")
]
)
if (HAS_SND_DEV == 0):
test_param.skip = True
pesq_threshold = 3.5
|
diplomacy/research
|
diplomacy_research/scripts/dataset/dataset_010_redis.py
|
Python
|
mit
| 3,931
| 0.00407
|
# ==============================================================================
# Copyright 2019 - Philip Paquette
#
# NOTICE: Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# ==============================================================================
""" Redis dataset
- Populated the Redis server with the supervised games
- Saves the redis database on disk for faster boot time.
"""
import logging
import os
import pickle
import shutil
from threading import Thread
from tqdm import tqdm
from diplomacy_research.models.training.memory_buffer import MemoryBuffer
from diplomacy_research.models.training.memory_buffer.expert_games import save_expert_games
from diplomacy_research.proto.diplomacy_proto.game_pb2 import SavedGame as SavedGameProto
from diplomacy_research.utils.process import start_redis
from diplomacy_research.utils.proto import bytes_to_zlib, bytes_to_proto, read_next_bytes
from diplomacy_research.settings import PROTO_DATASET_PATH, REDIS_DATASET_PATH, WORKING_DIR, \
PHASES_COUNT_DATASET_PATH, IN_PRODUCTION
# Constants
LOGGER = logging.getLogger(__name__)
def run(**kwargs):
""" Run the script - Determines if we need to build the dataset or not. """
del kwargs # Unused args
if os.path.exists(REDIS_DATASET_PATH):
LOGGER.info('... Dataset already exists. Skipping.')
else:
build()
def build():
""" Building the Redis dataset """
if not os.path.exists(PROTO_DATASET_PATH):
raise RuntimeError('Unable to find the proto dataset at %s' % PROTO_DATASET_PATH)
# Creating output directory if it doesn't exist
os.makedirs(os.path.join(WORKING_DIR, 'containers', 'redis'), exist_ok=True)
# Starting the Redis server and blocking on that thread
redis_thread = Thread(target=start_redis, kwargs={'save_dir': os.path.join(WORKING_DIR, 'containers'),
'log_file_path': os.devnull,
'clear': True})
redis_thread.start()
# Creating a memory buffer object to save games in Redis
memory_buffer = MemoryBuffer()
memory_buffer.clear()
# Loading the phases count dataset to get the number of games
|
total = None
if os.path.exists(PHASES_COUNT_DATASET_PATH):
with open(PHASES_COUNT_DATASET_PATH, 'rb') as file:
total = len(pickle.load(file))
progress_bar = tqdm(total=total)
# Loading dataset and converting
LOGGER.info('... Creating redis dataset.')
with open(PROTO_DATASET_PATH, 'rb
|
') as file:
while True:
saved_game_bytes = read_next_bytes(file)
if saved_game_bytes is None:
break
progress_bar.update(1)
saved_game_proto = bytes_to_proto(saved_game_bytes, SavedGameProto)
save_expert_games(memory_buffer, [bytes_to_zlib(saved_game_bytes)], [saved_game_proto.id])
# Saving
memory_buffer.save(sync=True)
# Moving file
redis_db_path = {True: '/work_dir/redis/saved_redis.rdb',
False: os.path.join(WORKING_DIR, 'containers', 'redis', 'saved_redis.rdb')}.get(IN_PRODUCTION)
shutil.move(redis_db_path, REDIS_DATASET_PATH)
LOGGER.info('... Done creating redis dataset.')
# Stopping Redis and thread
progress_bar.close()
memory_buffer.shutdown()
redis_thread.join(timeout=60)
|
kxgames/kxg
|
demos/guess_my_number.py
|
Python
|
mit
| 5,672
| 0.001587
|
#!/usr/bin/env python3
# vim: tw=76
import kxg
import random
import pyglet
LOWER_BOUND, UPPER_BOUND = 0, 5000
class World(kxg.World):
"""
Keep track of the secret number, the range of numbers that haven't been
eliminated yet, and the winner (if there is one).
"""
def __init__(self):
super().__init__()
self.number = 0
self.lower_bound = 0
self.upper_bound = 0
self.winner = 0
class Referee(kxg.Referee):
"""
Pick the secret number.
"""
def on_start_game(self, num_players):
number = random.randint(LOWER_BOUND + 1, UPPER_BOUND - 1)
self >> PickNumber(number, LOWER_BOUND, UPPER_BOUND)
class PickNumber(kxg.Message):
"""
Pick the secret number and communicate that choice to all the clients.
"""
def __init__(self, number, lower_bound, upper_bound):
self.number = number
self.lower_bound = lower_bound
self.upper_bound = upper_bound
def on_check(self, world):
if world.number:
raise kxg.MessageCheck("number already picked")
def on_execute(self, world):
world.number = self.number
world.lower_bound = self.lower_bound
world.upper_bound = self.upper_bound
class GuessNumber(kxg.Message):
"""
Make a guess on behalf of the given player. If the guess is
right, that player wins the game. If the guess is wrong, the
range of numbers that the secret number could be is narrowed
accordingly.
"""
def __init__(self, player, guess):
self.player = player
self.guess = guess
def on_check(self, world):
pass
def on_execute(self, world):
if self.guess == world.number:
world.winner = self.player
world.end_game()
elif self.guess < world.number:
world.lower_bound = max(self.guess, world.lower_bound)
elif self.guess > world.number:
world.upper_bound = min(self.guess, world.upper_bound)
class Gui:
"""
Manage GUI objects like the window, which exist before and after the game
itself.
"""
def __init__(self):
self.width, self.height = 600, 400
self.window = pyglet.window.Window()
self.window.set_size(self.width, self.height)
self.window.set_visible(True)
self.label = pyglet.text.Label(
"",
color=(255, 255, 255, 255),
font_name='Deja Vu Sans', font_size=32,
x=self.width//2, y=self.height//2,
anchor_x='center', anchor_y='center',
)
def on_refresh_gui(self):
self.window.clear()
self.label.draw()
class GuiActor(kxg.Actor):
"""
Show the players the range of numbers that haven't been eliminated yet,
and allow the player to guess what the number is.
"""
def __init__(self):
super().__init__()
self.guess = None
self.prompt = "{0.lower_bound} < {1} < {0.upper_bound}"
def on_setup_gui(self, gui):
self.gui = gui
self.gui.window.set_handlers(self)
def on_draw(self):
self.gui.on_refresh_gui()
def on_mouse_scroll(self, x, y, dx, dy):
# If the user scrolls the mouse wheel, update the guess accordingly.
if self.guess is None:
if dy < 0:
self.guess = self.world.upper_bound
else:
self.guess = self.world.lower_bound
self.guess = sorted([
self.world.lower_bound,
self.guess + dy,
self.world.upper_bound,
])[1]
self.on_update_prompt()
def on_key_press(self, symbol, modifiers):
# If the user types a number, add that digit to the guess.
try:
digit = int(chr(symbol))
self.guess = 10 * (self.guess or 0) + digit
except ValueError:
pass
# If the user hits backspace, remove the last digit from the guess.
if symbol == pyglet.window.key.BACKSPACE:
if self.guess is not None:
guess_str = str(self.guess)[:-1]
self.guess = int(guess_str) if guess_str else None
# If the user hits enter, guess the current number.
if symbol == pyglet.window.key.ENTER:
if self.guess:
self >> GuessNumber(self.id, self.guess)
self.guess = None
self.on_update_prompt()
@kxg.subscribe_to_message(PickNumber)
@kxg.subscribe_to_message(GuessNumber)
def on_update_prompt(self, message=None):
guess_str = '???' if self.guess is None else str(self.guess)
self.gui.label.text = self.prompt.format(self.world, guess_str)
def on_finish_game(self):
self.gui.window.pop_handlers()
if self.world.winner == self.id:
self.gui.label.text = "You won!"
else:
self.gui.label.text = "You lost!"
class AiActor(kxg.Actor):
"""
Wait a random amount of time, then guess a random number within the
remaining range.
"""
def __init__(self):
super().__init__()
self.reset_timer()
def on_update_game(self, dt):
self.timer -= dt
if self.timer < 0:
lower_bound = self.world.lower_bound + 1
upper_bound = self.world.upper_bound - 1
gue
|
ss = random.randint(lower_bound, upper_bound)
self >> GuessNumber(self.id, gue
|
ss)
self.reset_timer()
def reset_timer(self):
self.timer = random.uniform(1, 3)
if __name__ == '__main__':
kxg.quickstart.main(World, Referee, Gui, GuiActor, AiActor)
|
Princessgladys/googleresourcefinder
|
tests/docs_test.py
|
Python
|
apache-2.0
| 2,824
| 0
|
from selenium_test_case import Seleniu
|
mTestCase
class DocsTest(SeleniumTestCase):
def test_links_between_pages(self):
self.open_path('/help')
self.assert_text_present('Frequently Asked Questions')
self.click_and_wait('link=Terms of Service')
self.assert_text_present('Terms of Service for Google Resource Finder')
self.click_and_wait('link=Privacy')
self.assert_text_present('Google Resource Finder Privacy Policy'
|
)
self.click_and_wait('link=Help')
self.assert_text_present('Frequently Asked Questions')
def test_languages(self):
# English (en)
self.open_path('/help?lang=en')
self.assert_text_present('Frequently Asked Questions')
self.click_and_wait('link=Terms of Service')
self.assert_text_present('Terms of Service for Google Resource Finder')
self.click_and_wait('link=Privacy')
self.assert_text_present('Google Resource Finder Privacy Policy')
self.click_and_wait('link=Help')
self.assert_text_present('Frequently Asked Questions')
# Spanish (es-419)
self.open_path('/help?lang=es')
self.assert_text_present('Preguntas frecuentes')
self.click_and_wait('link=Condiciones del servicio')
self.assert_text_present(
'Condiciones del servicio del Buscador de recursos de Google')
self.click_and_wait(u'link=Privacidad')
self.assert_text_present(
u'Pol\u00edtica de privacidad del Buscador de recursos de Google')
self.click_and_wait(u'link=Ayuda')
self.assert_text_present('Preguntas frecuentes')
# French (fr)
self.open_path('/help?lang=fr')
self.assert_text_present(u'Questions fr\u00e9quentes')
self.click_and_wait('link=Conditions d\'utilisation')
self.assert_text_present(
u'Conditions d\'utilisation de Google Resource Finder')
self.click_and_wait(u'link=Confidentialit\u00e9')
self.assert_text_present(
u'R\u00e8gles de confidentialit\u00e9 de Google Resource Finder')
self.click_and_wait(u'link=Aide')
self.assert_text_present(u'Questions fr\u00e9quentes')
# Kreyol (ht)
self.open_path('/help?lang=ht')
self.assert_text_present(u'Kesyon Div\u00e8s Moun Poze Tout Tan')
self.click_and_wait(u'link=Kondisyon S\u00e8vis yo')
self.assert_text_present(
u'Kondisyon S\u00e8vis pou Resource Finder Google')
self.click_and_wait(u'link=Vi prive')
self.assert_text_present(u'Politik Resp\u00e8 Pou Moun ak ' +
u'\u201cResource Finder\u201d nan Google')
self.click_and_wait(u'link=Ed')
self.assert_text_present(u'Kesyon Div\u00e8s Moun Poze Tout Tan')
|
lisitsky/one-button-ftp
|
pyftp1.py
|
Python
|
mit
| 15,763
| 0.004916
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys, time, os, hashlib, atexit
import ftplib
import traceback
from PyQt5.QtWidgets import QApplication, QWidget, QFileDialog
from PyQt5.QtWidgets import QPushButton, QHBoxLayout, QVBoxLayout, \
QScrollArea, QLineEdit, QCheckBox, QMessageBox, QMenu
from PyQt5 import QtGui
from PyQt5.QtGui import QIcon, QPalette, QLinearGradient, QColor, QBrush, QCursor
from PyQt5.QtCore import Qt, QObject, QThread, pyqtSignal, pyqtSlot, QEvent, QSettings
album_uploaders = {}
class MainWindow(QWidget):
_album_buttons = {}
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
def _start():
try:
self.start_work(name.text(), passwd.text(), remember.checkState())
except ftplib.error_perm as e:
QMessageBox.critical(self, 'Error', 'Неверный пароль!', QMessageBox.Ok)
except Exception as e:
s = traceback.format_exc()
QMessageBox.critical(self, 'Ошибка', 'Пожалуйста, отправьте данную информацию разработчикам:\n\n %s' % s,
QMessageBox.Ok)
self.resize(600, 400)
self.setWindowTitle('Загрузка по FTP в Фотобанк')
self.setWindowIcon(QIcon('resources/favicon.ico'))
# set layouts
name = QLineEdit(settings.login)
name.returnPressed.connect(_start)
name.setPlaceholderText('Логин в фотобанк')
passwd = QLineEdit(settings.passwd)
passwd.returnPressed.connect(_start)
passwd.setPlaceholderText('Пароль для фотобанка')
passwd.setEchoMode(QLineEdit.Password)
remember = QCheckBox('Запомнить?', checked=settings.remember)
login = QPushButton('Вход')
auth_panel = QHBoxLayout()
auth_panel.addWidget(name)
auth_panel.addWidget(passwd)
auth_panel.addWidget(remember)
auth_panel.addWidget(login)
login.clicked.connect(_start)
# login.clicked.connect(lambda: self.start_work(name.text(), passwd.text(), remember.checkState()))
btn_area = QScrollArea()
btn_area_widget = QWidget()
btn_area.setWidget(btn_area_widget)
self.__btn_area_layout = btn_area_layout = QVBoxLayout(btn_area_widget)
btn_area.setWidgetResizable(True)
central_box = QHBoxLayout()
central_box.addWidget(btn_area)
vbox = QVBoxLayout()
# vbox.addStretch()
vbox.addLayout(auth_panel)
vbox.addLayout(central_box)
self.setLayout(vbox)
self.show()
def set_ftp_credentials(self, login, passwd, remember):
# set ftp credentials
#print (login, passwd, remember)
self.__ftp_login = login
self.__ftp_passwd = passwd
self.__ftp_remember = remember
def add_album_buttons(self, albums):
# adds album buttons
layout = self.__btn_area_layout
for name in albums:
if name not in self._album_buttons:
button = AlbumButton(name, self.__ftp_login, self.__ftp_passwd)
layout.addWidget(button)
self._album_buttons[name] = button
def start_work(self, login, passwd, remember):
# start work:
# - remember credentials
# - establish connection
# - get albums
self.set_ftp_credentials(login, passwd, remember)
if remember:
save_settings(login=login, passwd=passwd, remember=remember)
self.__ftp = start_ftp(login, passwd)
albums = sort_albums(get_albums(self.__ftp))
self.add_album_buttons(albums)
def enqueueFiles_XXX(self, album_name, fileslist):
# enqueue files to specific folder uploader
worker = album_uploaders.get(album_name)
if worker is None:
# start new uploader
worker = AlbumUploader()
worker.setName(album_name)
thread = QThread(self)
worker.moveToThread(thread)
thread.started.connect(worker.process)
worker.finished.connect(thread.quit)
worker.finished.connect(worker.deleteLater)
thread.finished.connect(thread.deleteLater)
# worker.message.connect(self.text)
thread.start()
album_uploaders[album_name] = worker
def clo
|
seEvent(self, event):
# check and exit
workers = len(album_uploaders)
if workers > 0:
reply = QMessageBox.question(self, 'Закрыть программу?',
'Вы уверены, что хотите выйти? \nСейчас загружается %s альбом(а,ов)' % workers,
QMessageBox.
|
Yes | QMessageBox.No, QMessageBox.No)
if reply != QMessageBox.Yes:
event.ignore()
return
event.accept()
class AlbumUploader(QObject):
name = ''
finished = pyqtSignal()
message = pyqtSignal(int)
progress_message = pyqtSignal(str, float, bool)
fileslist = None
ftp = None # connection to server
i = 0
active = False
progress = 0.0
def setName(self, name, ftp_login, ftp_passwd):
self.name = name
self.ftp_login = ftp_login
self.ftp_passwd = ftp_passwd
def __str__(self):
return 'AlbumUploader @%s ftp=%s name="%s" i=%s len=%s fileslist=%s' % \
(id(self), self.ftp, self.name, self.i, len(self.fileslist), self.fileslist)
def prepareFtp(self):
self.ftp = start_ftp(self.ftp_login, self.ftp_passwd)
#print('FTP conn: %s', self.ftp)
cwd = '/' + self.name
self.ftp.cwd(cwd)
self.ftp.set_pasv(True)
self.ftp.sendcmd('TYPE I')
self.ftp.set_debuglevel(2)
def uploadFile(self, f):
# upload file to server
#print('Uploading file "%s"' % f)
fil = open(f, 'rb')
size_local = os.path.getsize(f)
basename = os.path.basename(f)
self.ftp.storbinary('STOR '+basename, fil)
size_remote = self.ftp.size(basename)
md5 = hashlib.md5(fil.read()).hexdigest()
#print('Uploaded file %s md5=%s size_local=%s size_remote=%s' % (f, md5, size_local, size_remote))
fil.close()
if size_remote != size_local:
raise Exception("Sizes don't match!")
def getProgress(self):
# return current progress percent
if self.startlen == 0:
return 0.0
return float(self.startlen-len(self.fileslist))/self.startlen
def updateButton(self):
# update album button style:
# progressbar
# show activity
# percent = float(self.startlen-len(self.fileslist))/self.startlen
self.progress_message.emit(self.name, self.getProgress(), self.active)
@pyqtSlot()
def process(self):
#print('START %s', self)
#print('fileslist: %s' % self.fileslist)
self.prepareFtp()
self.i = 0
self.startlen = len(self.fileslist)
self.active = True
self.updateButton()
while True:
# get first file name
#print("New load cycle by %s" % self)
try:
f = self.fileslist.pop(0)
self.uploadFile(f)
#print('sleep', self.i, len(self.fileslist), f)
self.message.emit(self.i)
self.updateButton()
# time.sleep(2.0)
# 1/(1-1)
except IndexError as err:
#print('upload fileslist is empty. \nGot error: %s\n' % err)
break
except Exception as err:
#print('Fatal!!!! \nWhile uploading file "%s" got error: \n%s' % (f, err))
traceback.print_exc(file=sys.stdout)
self.fileslist.append(f)
time.sleep(2.0)
self.prepareFtp()
#print('FINISHED')
#print('These file(s) were not uploaded: %s' % self.fileslist)
self.active=False
self.updateButton()
self.finished.emit()
self.fileslist = []
self.startlen = len(sel
|
hashamali/pyScss
|
scss/compiler.py
|
Python
|
mit
| 59,822
| 0.000953
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from collections import defaultdict
from enum import Enum
import logging
from pathlib import Path
import re
import sys
import warnings
try:
from collections import OrderedDict
except ImportError:
# Backport
from ordereddict import OrderedDict
import six
from scss.calculator import Calculator
from scss.cssdefs import _spaces_re
from scss.cssdefs import _escape_chars_re
from scss.cssdefs import _prop_split_re
from scss.errors import SassError
from scss.errors import SassBaseError
from scss.errors import SassImportError
from scss.extension import Extension
from scss.extension.core import CoreExtension
from scss.extension import NamespaceAdapterExtension
from scss.grammar import locate_blocks
from scss.rule import BlockAtRuleHeader
from scss.rule import Namespace
from scss.rule import RuleAncestry
from scss.rule import SassRule
from scss.rule import UnparsedBl
|
ock
from scss.selector import Selector
from scss.source import SourceFile
from scss.types import Arglist
from scss.types import List
from scss.types import Null
from scss.types import Number
from scss.types import String
from scss.types import Undefined
from scss.types import Url
from scss.util import normalize_var # TODO put in... namespace maybe?
# TODO should mention logging for the programmatic interface in the
# documen
|
tation
# TODO or have a little helper (or compiler setting) to turn it on
log = logging.getLogger(__name__)
_xcss_extends_re = re.compile(r'\s+extends\s+')
class OutputStyle(Enum):
nested = ()
compact = ()
compressed = ()
expanded = ()
legacy = () # ???
class SassDeprecationWarning(UserWarning):
# Note: DO NOT inherit from DeprecationWarning; it's turned off by default
# in 2.7 and later!
pass
def warn_deprecated(rule, message):
warnings.warn(
"{0} (at {1})".format(message, rule.file_and_line),
SassDeprecationWarning,
stacklevel=2,
)
class Compiler(object):
"""A Sass compiler. Stores settings and knows how to fire off a
compilation. Main entry point into compiling Sass.
"""
def __init__(
self, root=Path(), search_path=(),
namespace=None, extensions=(CoreExtension,),
import_static_css=False,
output_style='nested', generate_source_map=False,
live_errors=False, warn_unused_imports=False,
ignore_parse_errors=False,
loops_have_own_scopes=True,
undefined_variables_fatal=True,
super_selector='',
):
"""Configure a compiler.
:param root: Directory to treat as the "project root". Search paths
and some custom extensions (e.g. Compass) are relative to this
directory. Defaults to the current directory.
:type root: :class:`pathlib.Path`
:param search_path: List of paths to search for ``@import``s, relative
to ``root``. Absolute and parent paths are allowed here, but
``@import`` will refuse to load files that aren't in one of the
directories here. Defaults to only the root.
:type search_path: list of strings, :class:`pathlib.Path` objects, or
something that implements a similar interface (useful for custom
pseudo filesystems)
"""
# TODO perhaps polite to automatically cast any string paths to Path?
# but have to be careful since the api explicitly allows dummy objects.
if root is None:
self.root = None
else:
self.root = root.resolve()
self.search_path = tuple(
self.normalize_path(path)
for path in search_path
)
self.extensions = []
if namespace is not None:
self.extensions.append(NamespaceAdapterExtension(namespace))
for extension in extensions:
if isinstance(extension, Extension):
self.extensions.append(extension)
elif (isinstance(extension, type) and
issubclass(extension, Extension)):
self.extensions.append(extension())
elif isinstance(extension, Namespace):
self.extensions.append(
NamespaceAdapterExtension(extension))
else:
raise TypeError(
"Expected an Extension or Namespace, got: {0!r}"
.format(extension)
)
if import_static_css:
self.dynamic_extensions = ('.scss', '.sass', '.css')
self.static_extensions = ()
else:
self.dynamic_extensions = ('.scss', '.sass')
self.static_extensions = ('.css',)
self.output_style = output_style
self.generate_source_map = generate_source_map
self.live_errors = live_errors
self.warn_unused_imports = warn_unused_imports
self.ignore_parse_errors = ignore_parse_errors
self.loops_have_own_scopes = loops_have_own_scopes
self.undefined_variables_fatal = undefined_variables_fatal
self.super_selector = super_selector
def normalize_path(self, path):
if isinstance(path, six.string_types):
path = Path(path)
if path.is_absolute():
return path
if self.root is None:
raise IOError("Can't make absolute path when root is None")
return self.root / path
def make_compilation(self):
return Compilation(self)
def call_and_catch_errors(self, f, *args, **kwargs):
"""Call the given function with the given arguments. If it succeeds,
return its return value. If it raises a :class:`scss.errors.SassError`
and `live_errors` is turned on, return CSS containing a traceback and
error message.
"""
try:
return f(*args, **kwargs)
except SassError as e:
if self.live_errors:
# TODO should this setting also capture and display warnings?
return e.to_css()
else:
raise
def compile(self, *filenames):
# TODO this doesn't spit out the compilation itself, so if you want to
# get something out besides just the output, you have to copy this
# method. that sucks.
# TODO i think the right thing is to get all the constructors out of
# SourceFile, since it's really the compiler that knows the import
# paths and should be consulted about this. reconsider all this (but
# preserve it for now, SIGH) once importers are a thing
compilation = self.make_compilation()
for filename in filenames:
# TODO maybe SourceFile should not be exposed to the end user, and
# instead Compilation should have methods for add_string etc. that
# can call normalize_path.
# TODO it's not possible to inject custom files into the
# /compiler/ as persistent across compiles, nor to provide "fake"
# imports. do we want the former? is the latter better suited to
# an extension?
source = SourceFile.from_filename(self.normalize_path(filename))
compilation.add_source(source)
return self.call_and_catch_errors(compilation.run)
def compile_sources(self, *sources):
# TODO this api is not the best please don't use it. this all needs to
# be vastly simplified, still, somehow.
compilation = self.make_compilation()
for source in sources:
compilation.add_source(source)
return self.call_and_catch_errors(compilation.run)
def compile_string(self, string):
source = SourceFile.from_string(string)
compilation = self.make_compilation()
compilation.add_source(source)
return self.call_and_catch_errors(compilation.run)
def compile_file(filename, compiler_class=Compiler, **kwargs):
"""Compile a single file (provided as a :class:`pathlib.Path`), and re
|
napalm-automation/napalm-yang
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs_/adjacency_sid/sid/state/__init__.py
|
Python
|
apache-2.0
| 30,567
| 0.001374
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-isn/neighbors/neighbor/subTLVs/subTLVs/adjacency-sid/sid/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State parameters of Adjacency-SID.
"""
__slots__ = ("_path_helper", "_extmethods", "__value", "__flags", "__weight")
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_he
|
lper = False
self._extmethods = False
self.__value = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
|
yang_name="value",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
self.__flags = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"ADDRESS_FAMILY": {},
"BACKUP": {},
"VALUE": {},
"LOCAL": {},
"SET": {},
},
)
),
is_leaf=False,
yang_name="flags",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
self.__weight = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="weight",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-isn",
"neighbors",
"neighbor",
"subTLVs",
"subTLVs",
"adjacency-sid",
"sid",
"state",
]
def _get_value(self):
"""
Getter method for value, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/adjacency_sid/sid/state/value (uint32)
YANG Description: Adjacency-SID value.
"""
return self.__value
def _set_value(self, v, load=False):
"""
Setter method for value, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/adjacency_sid/sid/state/value (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_value is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_value() directly.
YANG Description: Adjacency-SID value.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="value",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """value must be of a type compatible with uint32""",
"defined-type": "uint32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)""",
}
)
self.__value = t
if hasattr(self, "_set"):
self._set()
def _unset_value(self):
self.__value = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="value",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance
|
auready/django
|
tests/admin_docs/test_views.py
|
Python
|
bsd-3-clause
| 14,791
| 0.003042
|
import sys
import unittest
from django.conf import settings
from django.contrib.admindocs import utils, views
from django.contrib.admindocs.views import get_return_data_type, simplify_regex
from django.contrib.sites.models import Site
from django.db import models
from django.db.models import fields
from django.test import SimpleTestCase, modify_settings, override_settings
from django.test.utils import captured_stderr
from django.urls import reverse
from .models import Company, Person
from .tests import AdminDocsTestCase, TestDataMixin
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewTests(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_index(self):
response = self.client.get(reverse('django-admindocs-docroot'))
self.assertContains(response, '<h1>Documentation</h1>', html=True)
self.assertContains(response, '<h1 id="site-name"><a href="/admin/">Django administration</a></h1>')
self.client.logout()
response = self.client.get(reverse('django-admindocs-docroot'), follow=True)
# Should display the login screen
self.assertContains(response, '<input type="hidden" name="next" value="/admindocs/" />', html=True)
def test_bookmarklets(self):
response = self.client.get(reverse('django-admindocs-bookmarklets'))
self.assertContains(response, '/admindocs/views/')
def test_templatetag_index(self):
response = self.client.get(reverse('django-admindocs-tags'))
self.assertContains(response, '<h3 id="built_in-extends">extends</h3>', html=True)
def test_templatefilter_index(self):
response = self.client.get(reverse('django-admindocs-filters'))
self.assertContains(response, '<h3 id="built_in-first">first</h3>', html=True)
def test_view_index(self):
response = self.client.get(reverse('django-admindocs-views-index'))
self.assertContains(
response,
'<h3><a href="/admindocs/views/django.contrib.admindocs.views.BaseAdminDocsView/">/admindocs/</a></h3>',
html=True
)
self.assertContains(response, 'Views by namespace test')
self.assertContains(response, 'Name: <code>test:func</code>.')
def test_view_index_with_method(self):
"""
Views that are methods are listed correctly.
"""
response = self.client.get(reverse('django-admindocs-views-index'))
self.assertContains(
response,
'<h3><a href="/admindocs/views/django.contrib.admin.sites.AdminSite.index/">/admin/</a></h3>',
html=True
)
def test_view_detail(self):
url = reverse('django-admindocs-views-detail', args=['django.contrib.admindocs.views.BaseAdminDocsView'])
response = self.client.get(url)
# View docstring
self.assertContains(response, 'Base view for admindocs views.')
@override_settings(ROOT_URLCONF='admin_docs.namespace_urls')
def test_namespaced_view_detail(self):
url = reverse('django-admindocs-views-detail', args=['admin_docs.views.XViewClass'])
response = self.client.get(url)
self.assertContains(response, '<h1>admin_docs.views.XViewClass</h1>')
def test_view_detail_illegal_import(self):
url = reverse('django-admindocs-views-detail', args=['urlpatterns_reverse.nonimported_module.view'])
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
def test_view_detail_as_method(self):
"""
Views that are methods can be displayed.
"""
url = reverse('django-admindocs-views-detail', args=['django.contrib.admin.sites.AdminSite.index'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_model_index(self):
response = self.client.get(reverse('django-admindocs-models-index'))
self.assertContains(
response,
'<h2 id="app-auth">Authentication and Authorization (django.contrib.auth)</h2>',
html=True
)
def test_template_detail(self):
response = self.client.get(reverse('django-admindocs-templates', args=['admin_doc/template_detail.html']))
self.assertContains(response, '<h1>Template: "admin_doc/template_detail.html"</h1>', html=True)
def test_missing_docutils(self):
utils.docutils_is_available = False
try:
response = self.client.get(reverse('django-admindocs-docroot'))
self.assertContains(
response,
'<h3>The admin documentation system requires Python\'s '
'<a href="http://docutils.sf.net/">docutils</a> library.</h3>',
html=True
)
self.assertContains(response, '<h1 id="site-name"><a href="/admin/">Django administration</a></h1>')
finally:
utils.docutils_is_available = True
@modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'})
@override_settings(SITE_ID=None) # will restore SITE_ID after the test
def test_no_sites_framework(self):
"""
Without the sites framework, should not access SITE_ID o
|
r Site
objects. Deleting settings is fine here as UserSettingsHolder is used.
"""
Site.objects.all().delete()
del settings.SITE_ID
response = self.client.get(reverse('django-admindocs-views-index'))
self.assertContains(response, 'View documentation')
@override_settings(TEMPLATES=[{
'NAME': 'ONE',
'B
|
ACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}, {
'NAME': 'TWO',
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}])
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewWithMultipleEngines(AdminDocViewTests):
def test_templatefilter_index(self):
# Overridden because non-trivial TEMPLATES settings aren't supported
# but the page shouldn't crash (#24125).
response = self.client.get(reverse('django-admindocs-filters'))
self.assertContains(response, '<title>Template filters</title>', html=True)
def test_templatetag_index(self):
# Overridden because non-trivial TEMPLATES settings aren't supported
# but the page shouldn't crash (#24125).
response = self.client.get(reverse('django-admindocs-tags'))
self.assertContains(response, '<title>Template tags</title>', html=True)
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class TestModelDetailView(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.force_login(self.superuser)
with captured_stderr() as self.docutils_stderr:
self.response = self.client.get(reverse('django-admindocs-models-detail', args=['admin_docs', 'Person']))
def test_method_excludes(self):
"""
Methods that begin with strings defined in
``django.contrib.admindocs.views.MODEL_METHODS_EXCLUDE``
shouldn't be displayed in the admin docs.
"""
self.assertContains(self.response, "<td>get_full_name</td>")
self.assertNotContains(self.response, "<td>_get_full_name</td>")
self.assertNotContains(self.response, "<td>add_image</td>")
self.assertNotContains(self.response, "<td>delete_image</td>")
self.assertNotContains(self.response, "<td>set_status</td>")
self.assertNotContains(self.response, "<td>save_changes</td>")
def test_methods_with_arguments(self):
"""
Methods that take arguments should also displayed.
"""
self.assertContains(self.response, "<h3>Methods with arguments</h3>")
self.assertContains(self.response, "<td>rename_company</td>")
self.assertContains(self.response, "<td>dummy_function</td>")
self.assertContains(self.response, "<td>suffix_company_name</td>")
def test_methods_with_arguments_display_
|
kreatorkodi/repository.torrentbr
|
plugin.video.yatp/site-packages/hachoir_parser/archive/rar.py
|
Python
|
gpl-2.0
| 14,384
| 0.005284
|
"""
RAR parser
Status: can only read higher-level attructures
Author: Christophe Gisquet
"""
from hachoir_parser import Parser
from hachoir_core.field import (StaticFieldSet, FieldSet,
Bit, Bits, Enum,
UInt8, UInt16, UInt32, UInt64,
String, TimeDateMSDOS32,
NullBytes, NullBits, RawBytes)
from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal
from hachoir_core.endian import LITTLE_ENDIAN
from hachoir_parser.common.msdos import MSDOSFileAttr32
from datetime import timedelta
MAX_FILESIZE = 1000 * 1024 * 1024
BLOCK_NAME = {
0x72: "Marker",
0x73: "Archive",
0x74: "File",
0x75: "Comment",
0x76: "Extra info",
0x77: "Subblock",
0x78: "Recovery record",
0x79: "Archive authenticity",
0x7A: "New-format subblock",
0x7B: "Archive end",
}
COMPRESSION_NAME = {
0x30: "Storing",
0x31: "Fastest compression",
0x32: "Fast compression",
0x33: "Normal compression",
0x34: "Good compression",
0x35: "Best compression"
}
OS_MSDOS = 0
OS_WIN32 = 2
OS_NAME = {
0: "MS DOS",
1: "OS/2",
2: "Win32",
3: "Unix",
}
DICTIONARY_SIZE = {
0: "Dictionary size 64 Kb",
1: "Dictionary size 128 Kb",
2: "Dictionary size 256 Kb",
3: "Dictionary size 512 Kb",
4: "Dictionary size 1024 Kb",
7: "File is a directory",
}
def formatRARVersion(field):
"""
Decodes the RAR version stored on 1 byte
"""
return "%u.%u" % divmod(field.value, 10)
def markerFlags(s):
yield UInt16(s, "flags", "Marker flags, always 0x1a21")
commonFlags = (
(Bit, "is_ignorable", "Old versions of RAR should ignore this block when copying data"),
(Bit, "has_added_size", "Additional field indicating additional size"),
)
class ArchiveFlags(StaticFieldSet):
format = (
(Bit, "vol", "Archive volume"),
(Bit, "has_comment", "Whether there is a comment"),
(Bit, "is_locked", "Archive volume"),
(Bit, "is_solid", "Whether files can be extracted separately"),
(Bit, "new_numbering", "New numbering, or compressed comment"), # From unrar
(Bit, "has_authenticity_information", "The integrity/authenticity of the archive can be checked"),
(Bit, "is_protected", "The integrity/authenticity of the archive can be checked"),
(Bit, "is_passworded", "Needs a password to be decrypted"),
(Bit, "is_first_vol", "Whether it is the first volume"),
(Bit, "is_encrypted", "Whether the encryption version is present"),
(NullBits, "internal", 4, "Reserved for 'internal use'"),
) + commonFlags
def archiveFlags(s):
yield ArchiveFlags(s, "flags", "Archiver block flags")
def archiveHeader(s):
yield NullBytes(s, "reserved[]", 2, "Reserved word")
yield NullBytes(s, "reserved[]", 4, "Reserved dword")
def commentHeader(s):
yield filesizeHandler(UInt16(s, "total_size", "Comment header size + comment size"))
yield filesizeHandler(UInt16(s, "uncompressed_size", "Uncompressed comment size"))
yield UInt8(s, "required_version", "RAR version needed to extract comment")
yield UInt8(s, "packing_method", "Comment packing method")
yield UInt16(s, "comment_crc16", "Comment CRC")
def commentBody(s):
size = s["total_size"].value - s.current_size
if size > 0:
yield RawBytes(s, "comment_data", size, "Compressed comment data")
def signatureHeader(s):
yield TimeDateMSDOS32(s, "creation_time")
yield filesizeHandler(UInt16(s, "arc_name_size"))
yield filesizeHandler(UInt16(s, "user_name_size"))
def recoveryHeader(s):
yield filesizeHandler(UInt32(s, "total_size"))
yield textHandler(UInt8(s, "version"), hexadecimal)
yield UInt16(s, "rec_sectors")
yield UInt32(s, "total_blocks")
yield RawBytes(s, "mark", 8)
def avInfoHeader(s):
yield filesizeHandler(UInt16(s, "total_size", "Total block size"))
yield UInt8(s, "version", "Version needed to decompress", handler=hexadecimal)
yield UInt8(s, "method", "Compression method", handler=hexadecimal)
yield UInt8(s, "av_version", "Version for AV", handler=hexadecimal)
yield UInt32(s, "av_crc", "AV info CRC32", handler=hexadecimal)
def avInfoBody(s):
size = s["total_size"].value - s.current_size
if size > 0:
yield RawBytes(s, "av_info_data", size, "AV info")
class FileFlags(FieldSet):
static_size = 16
def createFields(self):
yield Bit(self, "continued_from", "File continued from previous volume")
yield Bit(self, "continued_in", "File continued in next volume")
yield Bit(self, "is_encrypted", "File encrypted with password")
yield Bit(self, "has_comment", "File comment present")
yield Bit(self, "is_solid", "Information from previous files is
|
used (solid flag)"
|
)
# The 3 following lines are what blocks more staticity
yield Enum(Bits(self, "dictionary_size", 3, "Dictionary size"), DICTIONARY_SIZE)
yield Bit(self, "is_large", "file64 operations needed")
yield Bit(self, "is_unicode", "Filename also encoded using Unicode")
yield Bit(self, "has_salt", "Has salt for encryption")
yield Bit(self, "uses_file_version", "File versioning is used")
yield Bit(self, "has_ext_time", "Extra time info present")
yield Bit(self, "has_ext_flags", "Extra flag ??")
for field in commonFlags:
yield field[0](self, *field[1:])
def fileFlags(s):
yield FileFlags(s, "flags", "File block flags")
class ExtTimeFlags(FieldSet):
static_size = 16
def createFields(self):
for name in ['arctime', 'atime', 'ctime', 'mtime']:
yield Bits(self, "%s_count" % name, 2, "Number of %s bytes" % name)
yield Bit(self, "%s_onesec" % name, "Add one second to the timestamp?")
yield Bit(self, "%s_present" % name, "Is %s extra time present?" % name)
class ExtTime(FieldSet):
def createFields(self):
yield ExtTimeFlags(self, "time_flags")
for name in ['mtime', 'ctime', 'atime', 'arctime']:
if self['time_flags/%s_present' % name].value:
if name != 'mtime':
yield TimeDateMSDOS32(self, "%s" % name, "%s DOS timestamp" % name)
count = self['time_flags/%s_count' % name].value
if count:
yield Bits(self, "%s_remainder" % name, 8 * count, "%s extra precision time (in 100ns increments)" % name)
def createDescription(self):
out = 'Time extension'
pieces = []
for name in ['mtime', 'ctime', 'atime', 'arctime']:
if not self['time_flags/%s_present' % name].value:
continue
if name == 'mtime':
basetime = self['../ftime'].value
else:
basetime = self['%s' % name].value
delta = timedelta()
if self['time_flags/%s_onesec' % name].value:
delta += timedelta(seconds=1)
if '%s_remainder'%name in self:
delta += timedelta(microseconds=self['%s_remainder' % name].value / 10.0)
pieces.append('%s=%s' % (name, basetime + delta))
if pieces:
out += ': ' + ', '.join(pieces)
return out
def specialHeader(s, is_file):
yield filesizeHandler(UInt32(s, "compressed_size", "Compressed size (bytes)"))
yield filesizeHandler(UInt32(s, "uncompressed_size", "Uncompressed size (bytes)"))
yield Enum(UInt8(s, "host_os", "Operating system used for archiving"), OS_NAME)
yield textHandler(UInt32(s, "crc32", "File CRC32"), hexadecimal)
yield TimeDateMSDOS32(s, "ftime", "Date and time (MS DOS format)")
yield textHandler(UInt8(s, "version", "RAR version needed to extract file"), formatRARVersion)
yield Enum(UInt8(s, "method", "Packing method"), COMPRESSION_NAME)
yield filesizeHandler(UInt16(s, "filename_length", "File name size"))
if s["host_os"].value in (OS_MSDOS, OS_WIN32):
yield MSDOSFileAttr32(s, "file_attr", "File attributes")
else:
yield textHandler(UInt32(s, "file_attr", "File attributes"), hexadecimal)
# Start additional field from unrar
if s["flags/is_large"].valu
|
podhmo/komet
|
komet/executors.py
|
Python
|
mit
| 3,149
| 0.000318
|
# -*- coding:utf-8 -*-
import copy
from zope.interface import implementer
from .interfaces import (
IExecutor,
ISchemaValidation,
IDataValidation,
ICreate,
IDelete,
IEdit
)
from alchemyjsonschema.dictify import (
normalize,
validate_all,
ErrorFound
)
from jsonschema import FormatChecker
from jsonschema.validators import Draft4Validator
class ValidationError(Exception):
pass
@implementer(IExecutor)
class Executor(object):
def __init__(self, context, params):
self.context = context
self.raw_params = params
self.params = None
def validation(self, ob=None):
raise NotImplemented
def execute(self, ob=None):
raise NotImplemented
def default_validation(self, iface, ob=None, name=""):
fn = self.context.customized_or_default(iface, ISchemaValidation, name=name)
params = fn(self.context, self.raw_params)
fn2 = self.context.customized_or_default(iface, IDataValidation, name=name)
fn2(self.context, params, ob)
return params
class CreateExecutor(Executor):
def validation(self, ob=None):
self.params = default_validation(self, ICreate, ob)
def execute(self, ob=None):
if self.params is None:
raise RuntimeError("execute after validation")
ob = self.context.modelclass(**self.params)
self.context.session.add(ob)
self.context.session.flush()
return ob
class EditExecutor(Executor):
|
def validation(self, ob=None):
self.params = default_validation(self, IEdit, ob)
def execute(self, ob):
if self.params is None:
raise RuntimeError("execute after validation")
for k, v in self.params.items():
setattr(ob, k, v)
self.context.session.add(ob)
return ob
class DeleteExecutor(Executor):
def validation(self, ob=None):
self.params = default_validation
|
(self, IDelete, ob)
def execute(self, ob):
self.context.session.delete(ob)
return ob
def create_jsonschema_validation(context, params, ob=None):
def customize_schema(schema):
schema = copy.deepcopy(schema)
# when creating model, id is not needed.
if "id" in schema["required"]:
schema["required"].remove("id")
if "id" in schema["properties"]:
schema["properties"].pop("id")
return schema
schema = customize_schema(context.schema)
schema_validator = Draft4Validator(schema, format_checker=FormatChecker())
try:
validate_all(params, schema_validator)
except ErrorFound as err:
raise ValidationError({e.path[0]: e.message for e in err.errors})
return normalize(params, schema)
def edit_jsonschema_validation(context, params):
schema = context.schema
schema_validator = Draft4Validator(schema, format_checker=FormatChecker())
try:
validate_all(params, schema_validator)
except ErrorFound as err:
raise ValidationError({e.path[0]: e.message for e in err.errors})
return normalize(params, schema)
def delete_jsonschema_validation(context, params):
return params
|
openpermissions/accounts-srv
|
accounts/controllers/roles_handler.py
|
Python
|
gpl-3.0
| 1,354
| 0
|
# -*- coding: utf-8 -*-
# Copyright © 2014-2016 Digital Catapult and The Copyright Hub Foundation
# (together the Open Permissions Platfor
|
m Coalition)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software F
|
oundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""API Roles handler. Allows to create and modify roles
"""
from koi import auth
from perch import Token, User
from tornado.gen import coroutine
from .base import BaseHandler
class RolesHandler(BaseHandler):
"""Responsible for managing role resources
"""
@auth.auth_required(Token.valid)
@coroutine
def get(self):
"""Get all roles"""
roles = {x.value for x in User.roles}
result = [{'id': x, 'name': x.title()} for x in roles]
self.finish({
'status': 200,
'data': result
})
|
BdEINSALyon/resa
|
account/migrations/0004_auto_20170705_1003.py
|
Python
|
gpl-3.0
| 620
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-07-05 10:03
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
depende
|
ncies = [
('account', '0003_auto_20170705_0958'),
]
operations = [
migrations.RenameField(
model_name='oauthtoken',
old_name='renew_token',
new_name='refresh_token',
),
migrations.RenameField(
model_name='oauthtoken',
old_name='ren
|
ew_token_expiration',
new_name='refresh_token_expiration',
),
]
|
opendatatrentino/opendata-harvester
|
harvester/cli.py
|
Python
|
bsd-2-clause
| 1,858
| 0
|
import logging
import sys
from cliff.app import App
from cliff.commandmanager import CommandManager
# from .utils import ColorLogFormatter
from nicelog.formatters import ColorLineFormatter
class HarvesterApp(App):
logger = logging.getLogger(__name_
|
_)
def __init__(self):
super(HarvesterApp, self).__init__(
description='Harvester application CLI',
version='0.1',
command_manager=CommandManager('harvester.commands'))
def configure_logging(self):
"""
Create logging handlers for any log output.
Modified version to set custom formatter
|
for console
"""
root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)
# Set up logging to a file
if self.options.log_file:
file_handler = logging.FileHandler(
filename=self.options.log_file,
)
formatter = logging.Formatter(self.LOG_FILE_MESSAGE_FORMAT)
file_handler.setFormatter(formatter)
root_logger.addHandler(file_handler)
# Always send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {0: logging.WARNING,
1: logging.INFO,
2: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
console.setLevel(console_level)
# formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
formatter = ColorLineFormatter(
show_date=True, show_function=True, show_filename=True)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def main(argv=sys.argv[1:]):
myapp = HarvesterApp()
return myapp.run(argv)
if __name__ == '__main__':
sys.exit(main())
|
apoikos/servermon
|
hwdoc/vendor/dummy.py
|
Python
|
isc
| 2,909
| 0.005846
|
# -*- coding: utf-8 -*- vim:fileencoding=utf-8:
# vim: tabstop=4:shiftwidth=4:softtabstop=4:expandtab
# Copyright © 2010-2012 Greek Research and Technology Network (GRNET S.A.)
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
# OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF
# USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
'''
Module containing dummy implem
|
entations of django management commands
Idea is to be able to use it for unit tests and as a reference
'''
def power_on(hostname, username, password, **kwargs):
'''
Power on command
'''
return True
def power_off(hostname, username, password, **kwargs):
'''
Power off command
'''
return True
def power_off_acpi(hostname, username, password, **kwargs):
'''
Power off using ACPI command
'''
return True
def power_cyc
|
le(hostname, username, password, **kwargs):
'''
Cold boot command
'''
return True
def power_reset(hostname, username, password, **kwargs):
'''
Warm boot command
'''
return True
def pass_change(hostname, username, password, **kwargs):
'''
Change BMC password
'''
return True
def set_settings(hostname, username, password, **kwargs):
'''
Set BMC settings
'''
return True
def set_ldap_settings(hostname, username, password, **kwargs):
'''
Set BMC LDAP settings
'''
return True
def boot_order(hostname, username, password, **kwargs):
'''
Set boot order
'''
return True
def license_set(hostname, username, password, **kwargs):
'''
Set BMC License
'''
return True
def bmc_reset(hostname, username, password, **kwargs):
'''
Reset BMC
'''
return True
def bmc_factory_defaults(hostname, username, password, **kwargs):
'''
Reset BMC to factory defaults
'''
return True
def add_user(hostname, username, password, **kwargs):
'''
Add a user to the BMC
'''
return True
def remove_user(hostname, username, password, **kwargs):
'''
Remove a User from the BMC
'''
return True
def get_all_users(hostname, username, password, **kwargs):
'''
Get a list of all configured users on the BMC
'''
return True
def firmware_update(hostname, username, password, **kwargs):
'''
Performs a firmware update of the BMC
'''
return True
|
ryandoherty/RaceCapture_App
|
install/hooks/hook-autosportlabs.racecapture.views.configuration.rcp.scriptview.py
|
Python
|
gpl-3.0
| 28
| 0
|
hiddenimports
|
= ['de
|
cimal']
|
hollylemos/think-python
|
chapter-03/3.py
|
Python
|
mit
| 1,541
| 0.035042
|
#Problem 1:
#Python provides a built-in function called len that returns the length of a string
#so the value of len('allen') is 5.
#Write a function named right_justify that takes a string named s as a parameter and prints
#the string with enough leading spaces so that the last letter of the string is in column 70
#of the display.
#word = raw_input('Type a word to send over there ---->\n')
def right
|
_justify(word):
print " " * (70 - len(word)) + word
#right_justify(word)
#Problem 2:
#1. Type this example into a script and test it:
#def do_twice(f):
#f()
#f()
#2. Modify do_twice so that it takes two arguments, a
|
function object and a value,
#and calls the function twice, passing the value as an argument.
#3. Write a more general version of print_spam, called print_twice, that takes a
#string as a parameter and prints it twice.
#4. Use the modified version of do_twice to call print_twice twice, passing 'spam'
#as an argument.
#5. Define a new function called do_four that takes a function object and a value
#and calls the function four times, passing the value as a parameter. There should
#be only two statements in the body of this function, not four.
word = raw_input('Type a word to repeat\n')
string = raw_input('Type something here\n')
def do_twice(f, word):
print_twice('spam')
print_twice('spam')
def print_spam(word):
print word
def print_twice(string):
print string
print string
def do_four(f, word):
do_twice(string, word)
do_twice(string, word)
do_four(print_twice, 'spam')
print ''
|
lgiordani/punch
|
tests/test_vcs_configuration.py
|
Python
|
isc
| 7,655
| 0
|
import pytest
from punch import vcs_configuration as vc
@pytest.fixture
def global_variables():
return {
'serializer': '{{ major }}.{{ minor }}.{{ patch }}',
'mark': 'just a mark'
}
@pytest.fixture
def vcs_config_dict():
return {
'name': 'git',
'commit_message': "Version updated to {{ new_version }}",
'finish_release': True,
'options': {
'make_release_branch': False,
'annotate_tags': False,
'annotation_message': '',
}
}
@pytest.fixture
def vcs_config_dict_with_include_files(vcs_config_dict):
vcs_config_dict['include_files'] = ['HISTORY.rst']
return vcs_config_dict
@pytest.fixture
def vcs_config_dict_with_include_all_files(vcs_config_dict):
vcs_config_dict['include_all_files'] = True
return vcs_config_dict
@pytest.fixture
def special_variables():
return {
'current_version': '1.2.3',
'new_version': '1.3.0'
}
def test_vcs_configuration_from_string(
vcs_config_dict, global_variables, special_variables):
vcsconf = vc.VCSConfiguration(vcs_config_dict['name'],
vcs_config_dict['options'],
global_variables,
special_variables,
vcs_config_dict['commit_message']
)
expected_options = {
'make_release_branch': False,
'annotate_tags': False,
'annotation_message': '',
'current_version': '1.2.3',
'new_version': '1.3.0'
}
assert vcsconf.name == 'git'
assert vcsconf.commit_message == "Version updated to 1.3.0"
assert vcsconf.include_files == []
assert vcsconf.finish_release is True
assert vcsconf.options == expected_options
def test_vcs_configuration_from_string_with_include_files(
vcs_config_dict_with_include_files,
global_variables, special_variables):
vcsconf = vc.VCSConfiguration(
vcs_config_dict_with_include_files['name'],
vcs_config_dict_with_include_files['options'],
global_variables,
special_variables,
vcs_config_dict_with_include_files['commit_message'],
include_files=vcs_config_dict_with_include_files['include_files']
)
assert vcsconf.include_files == ['HISTORY.rst']
def test_vcs_configuration_from_string_with_include_all_files(
vcs_config_dict_with_include_all_files,
global_variables, special_variables):
vcsconf = vc.VCSConfiguration(
vcs_config_dict_with_include_all_files['name'],
vcs_config_dict_with_include_all_files['options'],
global_variables,
special_variables,
vcs_config_dict_with_include_all_files['commit_message'],
include_all_files=vcs_config_dict_with_include_all_files[
'include_all_files']
)
assert vcsconf.include_all_files is True
def test_vcs_configuration_from_dict(
vcs_config_dict, global_variables, special_variables):
vcsconf = vc.VCSConfiguration.from_dict(
vcs_config_dict,
global_variables,
special_variables
)
expected_options = {
'make_release_branch': False,
'annotate_tags': False,
'annotation_message': '',
'current_version': '1.2.3',
'new_version': '1.3.0'
}
assert vcsconf.name == 'git'
assert vcsconf.commit_message == "Version updated to 1.3.0"
assert vcsconf.include_files == []
assert vcsconf.finish_release is True
assert vcsconf.options == expected_options
def test_vcs_configuration_from_dict_with_include_files(
vcs_config_dict_with_include_files,
global_variables, special_variables):
vcsconf = vc.VCSConfiguration.from_dict(
vcs_config_dict_with_include_file
|
s,
global_variables,
special_variables
)
assert vcsconf.include_files == ['HISTORY.rst']
def test_vcs_configuration_from_dict_with_include_all_files(
vcs_config_dict_with_include_all_files,
global_variables, special_variables):
vcsconf = vc.VCSConfiguration.from_dict(
vcs_config_dict_with_include_all_files,
global_variables,
special_variables
)
assert vcsconf.include_all_files is True
def test_vcs_conf
|
iguration_from_dict_without_commit_message(
vcs_config_dict, global_variables, special_variables):
vcs_config_dict.pop('commit_message')
vcsconf = vc.VCSConfiguration.from_dict(
vcs_config_dict,
global_variables,
special_variables
)
expected_options = {
'make_release_branch': False,
'annotate_tags': False,
'annotation_message': '',
'current_version': '1.2.3',
'new_version': '1.3.0'
}
assert vcsconf.name == 'git'
assert vcsconf.commit_message == "Version updated 1.2.3 -> 1.3.0"
assert vcsconf.include_files == []
assert vcsconf.finish_release is True
assert vcsconf.options == expected_options
def test_vcs_configuration_from_dict_without_finish_release(
vcs_config_dict, global_variables, special_variables):
vcs_config_dict.pop('finish_release')
vcsconf = vc.VCSConfiguration.from_dict(
vcs_config_dict,
global_variables,
special_variables
)
expected_options = {
'make_release_branch': False,
'annotate_tags': False,
'annotation_message': '',
'current_version': '1.2.3',
'new_version': '1.3.0'
}
assert vcsconf.name == 'git'
assert vcsconf.commit_message == "Version updated to 1.3.0"
assert vcsconf.include_files == []
assert vcsconf.finish_release is True
assert vcsconf.options == expected_options
def test_vcs_configuration_from_dict_without_options(
vcs_config_dict, global_variables, special_variables):
vcs_config_dict.pop('options')
vcsconf = vc.VCSConfiguration.from_dict(
vcs_config_dict,
global_variables,
special_variables
)
assert vcsconf.name == 'git'
assert vcsconf.commit_message == "Version updated to 1.3.0"
assert vcsconf.finish_release is True
def test_vcs_configuration_from_dict_can_use_global_variables(
vcs_config_dict, global_variables, special_variables):
vcs_config_dict['commit_message'] = "Mark: {{ mark }}"
vcsconf = vc.VCSConfiguration.from_dict(
vcs_config_dict,
global_variables,
special_variables
)
assert vcsconf.commit_message == "Mark: just a mark"
def test_vcs_configuration_from_dict_special_variables_take_precedence(
vcs_config_dict, global_variables, special_variables):
vcs_config_dict['commit_message'] = "{{ current_version }}"
global_variables['current_version'] = "5.0.0"
vcsconf = vc.VCSConfiguration.from_dict(
vcs_config_dict,
global_variables,
special_variables
)
assert vcsconf.commit_message == "1.2.3"
def test_vcs_configuration_from_dict_options_templates_are_processed(
vcs_config_dict, global_variables, special_variables):
vcs_config_dict['options']['annotation_message'] = \
"Updated {{ current_version}} -> {{ new_version }}"
vcsconf = vc.VCSConfiguration.from_dict(
vcs_config_dict,
global_variables,
special_variables
)
expected_options = {
'make_release_branch': False,
'annotate_tags': False,
'annotation_message': 'Updated 1.2.3 -> 1.3.0',
'current_version': '1.2.3',
'new_version': '1.3.0'
}
assert vcsconf.options == expected_options
|
fogbow/fogbow-dashboard
|
openstack_dashboard/dashboards/fogbow/usage/urls.py
|
Python
|
apache-2.0
| 404
| 0.00495
|
from django.conf.urls.defaults import patterns # noqa
from django.conf.urls.defaults import url # noqa
from openstack_dashboard.dashboards.fogbow.usage import views
from openstack_dashboard.dashboards.fogbow.usage.views import IndexView
urlpatterns = patterns('',
url(r'^$', IndexView.as_view(), name='index'),
url(r'^(?P<member_id>.*)/usage$', views.getSpecificMembe
|
rUsage, name='usage'),
)
|
|
asanfilippo7/osf.io
|
api/comments/views.py
|
Python
|
apache-2.0
| 12,700
| 0.003622
|
from rest_framework import generics, permissions as drf_permissions
from rest_framework.exceptions import NotFound, ValidationError, PermissionDenied
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from api.base.exceptions import Gone
from api.base import permissions as base_permissions
from api.base.views import JSONAPIBaseView
from api.comments.permissions import (
CommentDetailPermissions,
CommentReportsPermissions
)
from api.comments.serializers import (
CommentSerializer,
CommentDetailSerializer,
CommentReportSerializer,
CommentReportDetailSerializer,
CommentReport
)
from framework.auth.core import Auth
from framework.auth.oauth_scopes import CoreScopes
from framework.exceptions import PermissionsError
from website.project.model import Comment
class CommentMixin(object):
"""Mixin with convenience methods for retrieving the current comment based on the
current URL. By default, fetches the comment based on the comment_id kwarg.
"""
serializer_class = CommentSerializer
comment_lookup_url_kwarg = 'comment_id'
def get_comment(self, check_permissions=True):
pk = self.kwargs[self.comment_lookup_url_kwarg]
try:
comment = Comment.find_one(Q('_id', 'eq', pk) & Q('root_target', 'ne', None))
except NoResultsFound:
raise NotFound
# Deleted root targets still appear as tuples in the database and are included in
# the above query, requiring an additional check
if comment.root_target.referent.is_deleted:
comment.root_target = None
comment.save()
if comment.root_target is None:
raise NotFound
if check_permissions:
# May raise a permission denied
self.check_object_permissions(self.request, comment)
return comment
class CommentDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, CommentMixin):
"""Details about a specific comment. *Writeable*.
###Permissions
Comments on public nodes are given read-only access to everyone. Comments on private nodes are only visible
to contributors and administrators on the parent node. Only the user who created the comment has permission
to edit and delete the comment.
Note that if an anonymous view_only key is being used, the user relationship will not be exposed.
##Attributes
OSF comment entities have the "comments" `type`.
name type description
=================================================================================
content string content of the comment
date_created iso8601 timestamp timestamp that the comment was created
date_modified iso8601 timestamp timestamp when the comment was last updated
modified boolean has this comment been edited?
deleted boolean is this comment deleted?
is_abuse boolean has this comment been reported by the current user?
has_children boolean does this comment have replies?
can_edit boolean can the current user edit this comment?
##Relationships
###User
The user who created the comment.
###Node
The project associated with this comment.
###Target
The "parent" of the comment. If the comment was made on a node, the target is the node. If the comment
is a reply, its target is the comment it was in reply to.
###Replies
List of replies to this comment. New replies can be created through this endpoint.
###Reports
List of spam reports for this comment. Only users with permission to create co
|
mments can
access this endpoint, and users can only see reports that they have created.
##Links
self: th
|
e canonical api endpoint of this comment
##Actions
###Update
Method: PUT / PATCH
URL: /links/self
Query Params: <none>
Body (JSON): {
"data": {
"type": "comments", # required
"id": {comment_id}, # required
"attributes": {
"content": {content}, # mandatory
"deleted": {is_deleted}, # mandatory
}
}
}
Success: 200 OK + comment representation
To update a comment, issue either a PUT or a PATCH request against the `/links/self` URL. The `content`
and `deleted` fields are mandatory if you PUT and optional if you PATCH. Non-string values will be accepted and
stringified, but we make no promises about the stringification output. So don't do that.
To restore a deleted comment, issue a PATCH request against the `/links/self` URL, with `deleted: False`.
###Delete
Method: DELETE
URL: /links/self
Query Params: <none>
Success: 204 No Content
To delete a comment send a DELETE request to the `/links/self` URL. Nothing will be returned in the response
body. Attempting to delete an already deleted comment will result in a 400 Bad Request response.
##Query Params
*None*.
#This Request/Response
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
CommentDetailPermissions,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.NODE_COMMENTS_READ]
required_write_scopes = [CoreScopes.NODE_COMMENTS_WRITE]
serializer_class = CommentDetailSerializer
view_category = 'comments'
view_name = 'comment-detail'
# overrides RetrieveAPIView
def get_object(self):
return self.get_comment()
def perform_destroy(self, instance):
auth = Auth(self.request.user)
if instance.is_deleted:
raise ValidationError('Comment already deleted.')
else:
try:
instance.delete(auth, save=True)
except PermissionsError:
raise PermissionDenied('Not authorized to delete this comment.')
class CommentReportsList(JSONAPIBaseView, generics.ListCreateAPIView, CommentMixin):
"""List of reports made for a comment. *Writeable*.
Paginated list of reports for a comment. Each resource contains the full representation of the
report, meaning additional requests to an individual comment's report detail view are not necessary.
###Permissions
The comment reports endpoint can only be viewed by users with permission to comment on the node. Users
are only shown comment reports that they have made.
##Attributes
OSF comment report entities have the "comment_reports" `type`.
name type description
=====================================================================================
category string the type of spam, must be one of the allowed values
message string description of why the comment was reported
##Links
See the [JSON-API spec regarding pagination](http://jsonapi.org/format/1.0/#fetching-pagination).
##Actions
###Create
Method: POST
URL: /links/self
Query Params: <none>
Body (JSON): {
"data": {
"type": "comment_reports", # required
"attributes": {
"category": {category}, # mandatory
"message": {text}, # optional
}
}
}
Success: 201 CREATED + comment report representation
To create a report for this comment, issue a POST request against this endpoint. The `category` field is mandatory,
and must be one of the following: "spam", "hate" or "violence" . The `message` field is optional. If the comment
|
jeremiah-c-leary/vhdl-style-guide
|
vsg/vhdlFile/extract/get_n_token_after_tokens.py
|
Python
|
gpl-3.0
| 688
| 0.001453
|
from vsg.vhdlFile.extract import tokens
def get_n_token_after_tokens(iToken, lTokens, lAllTokens, oTokenMap):
lReturn = []
lIndexes = []
for oToken in lTokens:
lTemp = oTokenMap.get_token_indexes(oToken)
for iTemp in lTemp:
iTokenIndex = iTemp
for iCount in range(0, iToken):
iTokenIndex = oTokenMap.
|
get_index_of_next_non_whitespace_t
|
oken(iTokenIndex, bExcludeComments=True)
lIndexes.append(iTokenIndex)
lIndexes.sort()
for iIndex in lIndexes:
iLine = oTokenMap.get_line_number_of_index(iIndex)
lReturn.append(tokens.New(iIndex, iLine, [lAllTokens[iIndex]]))
return lReturn
|
pam-phy/python-notes
|
byte-of-python/if.py
|
Python
|
gpl-2.0
| 600
| 0.020833
|
#!/usr/bin/env python3
number = 23
guess = int(input('Enter an integer : '))
if guess == number:
# 新块从这里开始
print('Congratulatio
|
ns, you guessed it.')
print('(but you do not win any pizzas!)')
# 新块在这里结束
elif guess < number:
# 另一代码块
print('No, it is a little higher than that')
# 你可以在此做任何你希望在该代码块内进行的事情
else:
print('No, it is a
|
little lower than that')
# 你必须通过猜测一个大于(>)设置数的数字来到达这里
print('Done')
# 这最后一句语句将在
# if 语句执行完毕后执行。
|
conan-io/conan
|
conans/client/generators/visualstudio.py
|
Python
|
mit
| 5,803
| 0.00224
|
import os
import re
from conans.model import Generator
from conans.paths import BUILD_INFO_VISUAL_STUDIO
from conans.client.tools.files import VALID_LIB_EXTENSIONS
class VisualStudioGenerator(Generator):
template = '''<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ImportGroup Label="PropertySheets" />
<PropertyGroup Label="UserMacros" />
<PropertyGroup Label="Conan-RootDirs">{item_properties}
</PropertyGroup>
{properties}
<ItemGroup />
</Project>'''
properties_template = '''<PropertyGroup Label="ConanVariables"{condition}>
<ConanPackageName>{conan_package_name}</ConanPackageName>
<ConanPackageVersion>{conan_package_version}</ConanPackageVersion>
<ConanCompilerFlags>{compiler_flags}</ConanCompilerFlags>
<ConanLinkerFlags>{linker_flags}</ConanLinkerFlags>
<ConanPreprocessorDefinitions>{definitions}</ConanPreprocessorDefinitions>
<ConanIncludeDirectories>{include_dirs}</ConanIncludeDirectories>
<ConanResourceDirectories>{res_dirs}</ConanResourceDirectories>
<ConanLibraryDirectories>{lib_dirs}</ConanLibraryDirectories>
<ConanBinaryDirectories>{bin_dirs}</ConanBinaryDirectories>
<ConanLibraries>{libs}</ConanLibraries>
<ConanSystemDeps>{system_libs}</ConanSystemDeps>
</PropertyGroup>
<PropertyGroup{condition}>
<LocalDebuggerEnvironment>PATH=%PATH%;{bin_dirs}</LocalDebuggerEnvironment>
<DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
</PropertyGroup>
<ItemDefinitionGroup{condition}>
<ClCompile>
<AdditionalIncludeDirectories>$(ConanIncludeDirectories)%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>$(ConanPreprocessorDefinitions)%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalOptions>$(ConanCompilerFlags) %(AdditionalOptions)</AdditionalOptions>
</ClCompile>
<Link>
<AdditionalLibraryDirectories>$(ConanLibraryDirectories)%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<AdditionalDependencies>$(ConanLibraries)%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalDependencies>$(ConanSystemDeps)%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalOptions>$(ConanLinkerFlags) %(AdditionalOptions)</AdditionalOptions>
</Link>
<Midl>
<AdditionalIncludeDirectories>$(ConanIncludeDirectories)%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</Midl>
<ResourceCompile>
<AdditionalIncludeDirectories>$(ConanIncludeDirectories)%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>$(ConanPreprocessorDefinitions)%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalOptions>$(ConanCompilerFlags) %(AdditionalOptions)</AdditionalOptions>
</ResourceCompile>
</ItemDefinitionGroup>'''
item_template = '''
<Conan-{name}-Root>{root_dir}</Conan-{name}-Root>'''
def _format_items(self):
sections = []
for dep_name, cpp_info in self._deps_build_info.dependencies:
fields = {
'root_dir': cpp_info.rootpath,
'name': dep_name.replace(".", "-")
}
section = self.item_template.format(**fields)
sections.append(section)
return "".join(sections)
@property
def filename(self):
return BUILD_INFO_VISUAL_STUDIO
def _format_properties(self, build_info, condition):
def has_valid_ext(lib):
ext = os.path.splitext(lib)[1]
return ext in VALID_LIB_EXTENSIONS
fields = {
'conan_package_name': self.conanfile.name if self.conanfile.name else "",
'conan_package_version':
|
self.conanfile.version if self.conanfile.version else "",
'condition': condition,
'bin_dirs': "".join("%s;" % p for p in build_info.bin_paths),
'res_dirs': "".join("%s;" % p for p in build_info.res_paths),
'include_dirs': "".join("%s;" % p for p in build_info.include_paths),
'lib_dirs': "".join("%s;" % p for p
|
in build_info.lib_paths),
'libs': "".join(['%s.lib;' % lib if not has_valid_ext(lib)
else '%s;' % lib for lib in build_info.libs]),
'system_libs': "".join(['%s.lib;' % sys_dep if not has_valid_ext(sys_dep)
else '%s;' % sys_dep for sys_dep in build_info.system_libs]),
'definitions': "".join("%s;" % d for d in build_info.defines),
'compiler_flags': " ".join(build_info.cxxflags + build_info.cflags),
'linker_flags': " ".join(build_info.sharedlinkflags),
'exe_flags': " ".join(build_info.exelinkflags)
}
formatted_template = self.properties_template.format(**fields)
return formatted_template
@property
def content(self):
per_item_props = self._format_items()
properties = [self._format_properties(self._deps_build_info, condition='')]
for config, cpp_info in self._deps_build_info.configs.items():
condition = " Condition=\"'$(Configuration)' == '%s'\"" % config
properties.append(self._format_properties(cpp_info, condition=condition))
fields = {
'item_properties': per_item_props,
'properties': '\n'.join(properties)
}
formatted_template = self.template.format(**fields)
userprofile = os.getenv("USERPROFILE")
if userprofile:
userprofile = userprofile.replace("\\", "\\\\")
formatted_template = re.sub(userprofile, "$(USERPROFILE)", formatted_template,
flags=re.I)
return formatted_template
|
barosl/homu
|
homu/main.py
|
Python
|
mit
| 37,593
| 0.003937
|
import argparse
import github3
import toml
import json
import re
from . import utils
import logging
from threading import Thread, Lock
import time
import traceback
import sqlite3
import requests
from contextlib import contextmanager
from itertools import chain
from queue import Queue
import os
import subprocess
from .git_helper import SSH_KEY_FILE
import shlex
import sys
STATUS_TO_PRIORITY = {
'success': 0,
'pending': 1,
'approved': 2,
'': 3,
'error': 4,
'failure': 5,
}
INTERRUPTED_BY_HOMU_FMT = 'Interrupted by Homu ({})'
INTERRUPTED_BY_HOMU_RE = re.compile(r'Interrupted by Homu \((.+?)\)')
TEST_TIMEOUT = 3600 * 10
@contextmanager
def buildbot_sess(repo_cfg):
sess = requests.Session()
sess.post(repo_cfg['buildbot']['url'] + '/login', allow_redirects=False, data={
'username': repo_cfg['buildbot']['username'],
'passwd': repo_cfg['buildbot']['password'],
})
yield sess
sess.get(repo_cfg['buildbot']['url'] + '/logout', allow_redirects=False)
db_query_lock = Lock()
def db_query(db, *args):
with db_query_lock:
db.execute(*args)
class PullReqState:
num = 0
priority = 0
rollup = False
title = ''
body = ''
head_ref = ''
base_ref = ''
assignee = ''
delegate = ''
def __init__(self, num, head_sha, status, db, repo_label, mergeable_que, gh, owner, name, repos):
self.head_advanced('', use_db=False)
self.num = num
self.head_sha = head_sha
self.status = status
self.db = db
self.repo_label = repo_label
self.mergeable_que = mergeable_que
self.gh = gh
self.owner = owner
self.name = name
self.repos = repos
self.test_started = time.time() # FIXME: Save in the local database
def head_advanced(self, head_sha, *, use_db=True):
self.head_sha = head_sha
self.approved_by = ''
self.status = ''
self.merge_sha = ''
self.build_res = {}
self.try_ = False
self.mergeable = None
if use_db:
self.set_status('')
self.set_mergeable(None)
self.init_build_res([])
def __repr__(self):
return 'PullReqState:{}/{}#{}(approved_by={}, priority={}, status={})'.format(
self.owner,
self.name,
self.num,
self.approved_by,
self.priority,
self.status,
)
def sort_key(self):
return [
STATUS_TO_PRIORITY.get(self.get_status(), -1),
1 if self.mergeable is False else 0,
0 if self.approved_by else 1,
1 if self.rollup else 0,
-self.priority,
self.num,
]
def __lt__(self, other):
return self.sort_key() < other.sort_key()
def get_issue(self):
issue = getattr(self, 'issue', None)
if not issue:
issue = self.issue = self.get_repo().issue(self.num)
return issue
def add_comment(self, text):
self.get_issue().create_comment(text)
def set_status(self, status):
self.status = status
db_query(self.db, 'UPDATE pull SET status = ? WHERE repo = ? AND num = ?', [self.status, self.repo_label, self.num])
# FIXME: self.try_ should also be saved in the database
if not self.try_:
db_query(self.db, 'UPDATE pull SET merge_sha = ? WHERE repo = ? AND num = ?', [self.merge_sha, self.repo_label, self.num])
def get_status(self):
return 'approved' if self.status == '' and self.approved_by and self.mergeable is not False else self.status
def set_mergeable(self, mergeable, *, cause=None, que=True):
if mergeable is not None:
self.mergeable = mergeable
db_query(self.db, 'INSERT OR REPLACE INTO mergeable (repo, num, mergeable) VALUES (?, ?, ?)', [self.repo_label, self.num, self.mergeable])
else:
if que:
self.mergeable_que.put([self, cause])
else:
self.mergeable = None
db_query(self.db, 'DELETE FROM mergeable WHERE repo = ? AND num = ?', [self.repo_label, self.num])
def init_build_res(self, builders, *, use_db=True):
self.build_res = {x: {
'res': None,
'url': '',
} for x in builders}
if use_db:
db_query(self.db, 'DELETE FROM build_res WHERE repo = ? AND num = ?', [self.repo_label, self.num])
def set_build_res(self, builder, res, url):
if builder not in self.build_res:
raise Exception('Invalid builder: {}'.format(builder))
self.build_res[builder] = {
'res': res,
'url': url,
}
db_query(self.db, 'INSERT OR REPLACE INTO build_res (repo, num, builder, res, url, merge_sha) VALUES (?, ?, ?, ?, ?, ?)', [
self.repo_label,
self.num,
builder,
res,
url,
self.merge_sha,
])
def build_res_summary(self):
return ', '.join('{}: {}'.format(builder, data['res'])
for builder, data in self.build_res.items())
def get_repo(self):
repo = self.repos[self.repo_label]
if not repo:
self.repos[self.repo_label] = repo = self.gh.repository(self.owner, self.name)
assert repo.owner.login == self.owner
assert repo.name == self.name
return repo
def save(self):
db_query(self.db, 'INSERT OR REPLACE INTO pull (repo, num, status, merge_sha, title, body, head_sha, head_ref, base_ref, assignee, approved_by, priority, try_, rollup, delegate) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [
self.repo_label,
self.num,
self.status,
self.merge_sha,
self.title,
self.body,
self.head_sha,
self.head_ref,
self.base_ref,
self.assignee,
self.approved_by,
self.priority,
self.try_,
self.rollup,
self.delegate,
])
def refresh(self):
issue = self.get_repo().issue(self.num)
self.title = issue.title
self.body = issue.body
def fake_merge(self, repo_cfg):
if repo_cfg.get('linear', False) or repo_cfg.get('a
|
utosquash', False):
msg = '''!!! Temporary commit !!!
This commit is artifically made up to mark PR {} as merged.
If this commit remained in the history, you can safely reset HEAD to {}.
This is possibly due to protected branches, which forbids force-pushing.
You are advised to turn off protected branches, or disable certain Homu
features that requi
|
re force-pushing, such as linear history or
auto-squashing.
[ci skip]'''.format(self.num, self.merge_sha)
def inner():
# `merge()` will return `None` if the `head_sha` commit is already part of the `base_ref` branch, which means rebasing didn't have to modify the original commit
merge_commit = self.get_repo().merge(self.base_ref, self.head_sha, msg)
if merge_commit:
self.fake_merge_sha = merge_commit.sha
def fail(err):
self.add_comment(':warning: Unable to mark this PR as merged. Closing instead. ({})'.format(err))
self.get_issue().close()
utils.retry_until(inner, fail, self)
def sha_cmp(short, full):
return len(short) >= 4 and short == full[:len(short)]
def sha_or_blank(sha):
return sha if re.match(r'^[0-9a-f]+$', sha) else ''
def parse_commands(body, username, repo_cfg, state, my_username, db, states, *, realtime=False, sha=''):
try_only = False
if username not in repo_cfg['reviewers'] and username != my_username:
if username.lower() == state.delegate.lower():
pass # Allow users who have been delegated review powers
elif username in repo_cfg.get('try_users', []):
try_only = True
else:
return False
state_changed = False
words = list(chain.from_iterable(re.findall(r'\S+', x) for x in body.split
|
flungo/python-yaml-config
|
lib/yamlconfig/config.py
|
Python
|
mit
| 821
| 0.002436
|
__author__ = 'Fabrizio Lungo<[email protected]>'
import os
import yaml
from __exceptions__.FileNotFound import FileNotFound
from section import ConfigurationSection
class Configuration
|
(ConfigurationSection):
def __init__(self, fn='config.yml', name=None, create=False):
self._fn = fn
self._create = create
self.reload()
if name is None:
name=fn
self._name = name
def reload(self):
if self._create and not os.path.exists(self._fn):
se
|
lf._config = {}
elif os.path.exists(self._fn):
with open(self._fn, "r") as f:
self._config = yaml.load(f)
else:
raise FileNotFound(filename=self._fn)
def save(self):
with open(self._fn, "w") as f:
yaml.dump(self._config, f)
|
daicang/Leetcode-solutions
|
268-missing-number.py
|
Python
|
mit
| 388
| 0.028351
|
class Solution(object):
def missingNumber(self, nums):
|
"""
:type nums: List[int]
:rtype: int
"""
xor = len(nums)
for i, n in enumerate(nums):
xor ^= n
xor ^= i
return xor
inputs = [
[0],
[1],
[3,0,1],
[9,6,4,2,3,5,7,0,1]
]
s = Solution(
|
)
for i in inputs:
print s.missingNumber(i)
|
andrebellafronte/stoq
|
stoqlib/gui/test/test_search.py
|
Python
|
gpl-2.0
| 12,901
| 0.001783
|
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2008 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <[email protected]>
##
import datetime
import locale
import mock
import os
import unittest
from dateutil import relativedelta
from dateutil.relativedelta import SU, MO, SA, relativedelta as delta
from stoqlib.api import api
from stoqlib.domain.product import Product
from stoqlib.domain.test.domaintest import DomainTest
from stoqlib.gui.editors.producteditor import ProductEditor
from stoqlib.gui.events import SearchDialogSetupSearchEvent
from stoqlib.gui.search.productsearch import ProductSearch
from stoqlib.gui.search.searchextension import SearchExtension
from stoqlib.gui.search.searchcolumns import SearchColumn, QuantityColumn
from stoqlib.gui.search.searchdialog import SearchDialog
from stoqlib.gui.search.searchfilters import (StringSearchFilter, DateSearchFilter,
ComboSearchFilter, NumberSearchFilter)
from stoqlib.gui.search.searchoptions import (ThisWeek, LastWeek, NextWeek, ThisMonth,
LastMonth, NextMonth)
from stoqlib.gui.test.uitestutils import GUITest
from stoqlib.lib.defaults import get_weekday_start
from stoqlib.lib.introspection import get_all_classes
class TestDateOptions(unittest.TestCase):
def setUp(self):
self._original_locale = locale.getlocale(locale.LC_ALL)
def tearDown(self):
self._set_locale(self._original_locale)
def _get_week_interval(self, today):
weekday = get_weekday_start()
start = today + delta(weekday=weekday(-1))
end = start + delta(days=+6)
return start, end
def _get_month_interval(self, today):
start = today + delta(day=1)
end = start + delta(day=31)
return start, end
def _get_locales(self):
# en_US: week starts on sunday
# es_ES: week starts on monday
return ["en_US.UTF-8", "es_ES.UTF-8"]
def _starts_on_sunday(self, loc):
return loc.startswith("en_US")
def _set_locale(self, loc):
try:
loc = locale.setlocale(locale.LC_ALL, loc)
except locale.Error:
# Some locales could not be available on user's machine, leading
# him to a false positive broke test, so skip it, informing the
# problem.
raise unittest.SkipTest("Locale %s not available" % (loc, ))
else:
os.environ['LC_ALL'] = loc
def _testWeekday(self, loc, interval):
if self._starts_on_sunday(loc):
self.assertEqual(
relativedelta.weekday(interval[0].weekday()), SU)
self.assertEqual(
relativedelta.weekday(interval[1].weekday()), SA)
else:
self.assertEqual(
relativedelta.weekday(interval[0].weekday()), MO)
self.assertEqual(
relativedelta.weekday(interval[1].weekday()), SU)
def test_this_week(self):
option = ThisWeek()
for loc in self._get_locales():
self._set_locale(loc)
# starting in 2008/01/01, wednesday
for i in range(1, 8):
get_today_date = lambda: datetime.date(2008, 1, i)
option.get_today_date = get_today_date
self.assertEqual(option.get_interval(),
self._get_week_interval(get_today_date()))
self._testWeekday(loc, option.get_interval())
def test_last_week(self):
option = LastWeek()
for loc in self._get_locales():
self._set_locale(loc)
# starting in 2008/01/01, wednesday
for i in range(1, 8):
get_today_date = lambda: datetime.date(2008, 1, i)
option.get_today_date = get_today_date
last_week_day = get_today_date() + delta(weeks=-1)
self.assertEqual(option.get_interval(),
self._get_week_interval(last_week_day))
self._testWeekday(loc, option.get_interval())
def test_next_week(self):
option = NextWeek()
for loc in self._get_locales():
self._set_locale(loc)
# starting in 2008/01/01, wednesday
for i in range(1, 8):
get_today_date = lambda: datetime.date(2008, 1, i)
option.get_today_date = get_today_date
next_week_day = get_today_date() + delta(weeks=+1)
self.assertEqual(option.get_interval(),
self._get_week_interval(next_week_day))
self._testWeekday(loc, option.get_interval())
def test_this_month(self):
option = ThisMonth()
for loc in self._get_locales():
self._set_locale(loc)
for month_day in [datetime.date(2007, 1, 1),
datetime.date(2007, 1, 15),
datetime.date(2007, 1, 31)]:
option.get_today_date = lambda: month_day
self.assertEqual(option.get_interval(),
self._get_month_interval(month_day))
def test_last_month(self):
option = LastMonth()
for loc in self._get_locales():
self._set_locale(loc)
for month_day in [datetime.date(2007, 1, 1),
datetime.date(2007, 1, 15),
datetime.date(2007, 1, 31)]:
option.get_today_date = lambda: month_day
last_month_day = month_day + delta(months=-1)
self.assertEqual(option.get_interval(),
self._get_month_interval(last_month_day))
def test_next_month(self):
option = NextMonth()
for loc in self._get_locales():
self._set_locale(loc)
for month_day in [datetime.date(2007, 1, 1),
|
datetime.date(2007, 1, 15),
datetime.date(2007, 1, 31)]:
option.get_today_d
|
ate = lambda: month_day
next_month_day = month_day + delta(months=+1)
self.assertEqual(option.get_interval(),
self._get_month_interval(next_month_day))
class TestSearchEditor(GUITest):
"""Tests for SearchEditor"""
@mock.patch('stoqlib.gui.search.searcheditor.api.new_store')
@mock.patch('stoqlib.gui.search.searcheditor.run_dialog')
def test_run_editor(self, run_dialog, new_store):
run_dialog.return_value = True
new_store.return_value = self.store
dialog = ProductSearch(store=self.store)
dialog.search.refresh()
dialog.results.select(dialog.results[0])
product = dialog.results[0].product
with mock.patch.object(self.store, 'commit'):
with mock.patch.object(self.store, 'close'):
self.click(dialog._toolbar.edit_button)
run_dialog.assert_called_once_with(ProductEditor, dialog,
self.store, product,
visual_mode=False)
class TestSearchEvent(GUITest):
def test_search_dialog_setup_search(self):
class ProductSearchExtention(SearchExtension):
spec_attributes = dict(ncm=Product.ncm)
def get_columns(self):
|
sveetch/Sveetoy
|
project/githubpages_settings.py
|
Python
|
mit
| 428
| 0
|
# -*- coding: utf-8 -*-
"""
Production settings file for project 'project'
"""
from
|
project.settings import *
DEBUG = False
SITE_DOMAIN = 'sveetch.github.io/Sveetoy'
# Directory where all stuff will be builded
PUBLISH_DIR = os.path.join(PROJECT_DIR, '../docs')
# Path where will be moved all the static files, usually this is a directory in
# the ``PUBLISH_DIR``
STATIC_DIR =
|
os.path.join(PROJECT_DIR, PUBLISH_DIR, 'static')
|
Udzu/pudzu
|
dataviz/flagstriband.py
|
Python
|
mit
| 3,039
| 0.014149
|
from pudzu.charts import *
df = pd.read_csv("datasets/flagstriband.csv")
df = pd.concat([pd.DataFrame(df.colours.apply(list).tolist(), columns=list("TMB")), df], axis=1).set_index("colours")
FONT, SIZE = calibri, 24
fg, bg = "black", "#EEEEEE"
default_img = "https://s-media-cache-ak0.pinimg.com/736x/0d/36/e7/0d36e7a476b06333d9fe9960572b66b9.jpg"
COLORS = { "W": "white", "Y": "yellow", "R": "red", "G": "green", "B": "blue", "K": "black", }
W, H = 320, 200
def label(c, size):
w, h = size
label = Image.from_text_bounded(" ", (W,H), SIZE, partial(FONT, bold=True), beard_line=True)
description = Image.from_text_bounded(" ", (W,H), SIZE, partial(FONT, italics=True), beard_line=True)
if c == "Y":
flag = Triangle(max(w,h), "orange", "yellow", p=1.0).crop_to_aspect(w,h).trim(1).pad(1, "grey")
else:
flag = Rectangle((w-2, h-2), RGBA(COLORS.get(c)).blend(bg, 0.1)).pad(1, "grey")
return Image.from_column([label, description, flag], padding=2, bg=bg)
def process(d):
if non(d['name']): return None
label = Image.from_text_bounded(d['name'].replace("*","").upper(), (W,H), SIZE, partial(FONT, bold=True), beard_line=True)
description = Image.from_text_bounded(get_non(d, 'description', " "), (W,H), SIZE, partial(FONT, italics=True), beard_line=True)
flag = Image.from_url_with_cache(get_non(d, 'flag', default_img)).to_rgba()
flag = flag.resize_fixed_aspect(height=H-2) if flag.width / flag.height < 1.3 else flag.resize((W-2,H-2))
flag = flag.pad(1, "grey")
flaglabel = Image.from_column([label, description, flag], padding=2, bg=bg)
if "*" in d['name']: flaglabel = flaglabel.blend(Rectangle(flaglabel.size, bg), 0.3)
return flaglabel
def grid(middle):
ms = df[df.M == middle]
colors = "".join(COLORS).replace(middle,"")
array = [[dict(ms.loc[b+middle+t][["name", "description", "flag"]]) for b in colors] for t in colors]
data = pd.DataFrame(array, index=list(colors), columns=list(colors))
grid = grid_chart(data, process, padding=(10,20), fg=fg, bg=bg, yalign=1,
|
row_label=lambda row: label(data.index[row], (100, H)), col_label=lambda col: label(data.columns[col], (W,100)), corner_label=label(middle, (100,100)))
return grid
PAD = 100
grids = list(generate_batches([grid(c) for c in COLORS], 2))
grid = Image.from_array(grids, padding=(PAD,PAD//2), bg=bg)
title = Image.from_column([
Image.from_text_bounded("From Austria to Zanzibar".upper(), grid.size, 360, partial(FONT, bold=True), fg=fg, bg=bg, padding=(PAD,20)),
Ima
|
ge.from_text_bounded("a catalog of horizontal triband flags".upper(), grid.size, 240, partial(FONT, bold=True), fg=fg, bg=bg, padding=(PAD,20)),
], padding=0)
img = Image.from_column([title, grid], bg=bg, padding=(20,0)).pad(10, bg)
img.place(Image.from_text("/u/Udzu", FONT(48), fg=fg, bg=bg, padding=10).pad((2,2,0,0), fg), align=1, padding=10, copy=False)
img.save("output/flagstriband.png")
img.resize_fixed_aspect(scale=0.5).save("output/flagstriband2.png")
|
somini/gpodder
|
src/gpodder/escapist_videos.py
|
Python
|
gpl-3.0
| 4,242
| 0.003772
|
# -*- coding: utf-8 -*-
#
# gPodder - A media aggregator and podcast client
# Copyright (c) 2005-2014 Thomas Perl and the gPodder Team
#
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# gPodder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have r
|
eceived a copy of the GNU General Public License
# along with this program. If not, see <http
|
://www.gnu.org/licenses/>.
#
#
# gpodder.escapist - Escapist Videos download magic
# somini <[email protected]>; 2014-09-14
#
import gpodder
from gpodder import util
import logging
logger = logging.getLogger(__name__)
try:
# For Python < 2.6, we use the "simplejson" add-on module
import simplejson as json
except ImportError:
# Python 2.6 already ships with a nice "json" module
import json
import re
# This matches the more reliable URL
ESCAPIST_NUMBER_RE = re.compile(r'http://www.escapistmagazine.com/videos/view/(\d+)', re.IGNORECASE)
# This matches regular URL, mainly those that come in the RSS feeds
ESCAPIST_REGULAR_RE = re.compile(r'http://www.escapistmagazine.com/videos/view/([\w-]+)/(\d+)-', re.IGNORECASE)
# This finds the RSS for a given URL
DATA_RSS_RE = re.compile(r'http://www.escapistmagazine.com/rss/videos/list/([1-9][0-9]*)\.xml')
# This matches the flash player's configuration. It's a JSON, but it's always malformed
DATA_CONFIG_RE = re.compile(r'flashvars=.*config=(http.*\.js)', re.IGNORECASE)
# This matches the actual MP4 url, inside the "JSON"
DATA_CONFIG_DATA_RE = re.compile(r'http[:/\w.?&-]*\.mp4')
# This matches the cover art for an RSS. We shouldn't parse XML with regex.
DATA_COVERART_RE = re.compile(r'<url>(http:.+\.jpg)</url>')
class EscapistError(BaseException): pass
def get_real_download_url(url):
video_id = get_escapist_id(url)
if video_id is None:
return url
web_data = get_escapist_web(video_id)
data_config_frag = DATA_CONFIG_RE.search(web_data)
if data_config_frag is None:
raise EscapistError('Cannot get flashvars URL from The Escapist')
data_config_url = data_config_frag.group(1)
logger.debug('Config URL: %s', data_config_url)
data_config_data = util.urlopen(data_config_url).read().decode('utf-8')
data_config_data_frag = DATA_CONFIG_DATA_RE.search(data_config_data)
if data_config_data_frag is None:
raise EscapistError('Cannot get configuration JS from The Escapist')
real_url = data_config_data_frag.group(0)
if real_url is None:
raise EscapistError('Cannot get MP4 URL from The Escapist')
elif "-ad-rotation/" in real_url:
raise EscapistError('Oops, seems The Escapist blocked this IP. Wait a few days/weeks to get it unblocked')
else:
return real_url
def get_escapist_id(url):
result = ESCAPIST_NUMBER_RE.match(url)
if result is not None:
return result.group(1)
result = ESCAPIST_REGULAR_RE.match(url)
if result is not None:
return result.group(2)
return None
def is_video_link(url):
return (get_escapist_id(url) is not None)
def get_real_channel_url(url):
video_id = get_escapist_id(url)
if video_id is None:
return url
web_data = get_escapist_web(video_id)
data_config_frag = DATA_RSS_RE.search(web_data)
if data_config_frag is None:
raise EscapistError('Cannot get RSS URL from The Escapist')
return data_config_frag.group(0)
def get_real_cover(url):
rss_url = get_real_channel_url(url)
if rss_url is None:
return None
rss_data = util.urlopen(rss_url).read()
rss_data_frag = DATA_COVERART_RE.search(rss_data)
if rss_data_frag is None:
return None
return rss_data_frag.group(1)
def get_escapist_web(video_id):
if video_id is None:
return None
web_url = 'http://www.escapistmagazine.com/videos/view/%s' % video_id
return util.urlopen(web_url).read()
|
ProteinDF/ProteinDF_bridge
|
proteindf_bridge/modeling.py
|
Python
|
gpl-3.0
| 24,765
| 0.000371
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2014 The ProteinDF development team.
# see also AUTHORS and README if provided.
#
# This file is a part of the ProteinDF software package.
#
# The ProteinDF is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# The ProteinDF is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ProteinDF. If not, see <http://www.gnu.org/licenses/>.
from .superposer import Superposer
from .matrix import Matrix
from .atomgroup import AtomGroup
from .atom import Atom
from .functions import load_msgpack
from .position import Position
from .error import BrInputError
# from .xyz import Xyz
import os
import math
import re
import logging
logger = logging.getLogger(__name__)
class Modeling:
_ACE_ALA_NME_path_base = os.path.join(
os.environ.get('PDF_HOME', '.'),
'data',
"ACE_ALA_NME_{}.brd")
_ACE_ALA_NME_comformers = ["trans1", "trans2", "cis1", "cis2"]
def __init__(self):
self._ACE_ALA_NME = {}
for comformer in self._ACE_ALA_NME_comformers:
brd_path = self._ACE_ALA_NME_path_base.format(comformer)
# print(comformer, brd_path)
atomgroup = AtomGroup(load_msgpack(brd_path))
assert(atomgroup.get_number_of_all_atoms() > 0)
self._ACE_ALA_NME[comformer] = atomgroup
def _get_ACE_ALA_NME(self, comformer):
assert(comformer in self._ACE_ALA_NME_comformers)
return self._ACE_ALA_NME[comformer]
# -----------------------------------------------------------------
def get_ACE_simple(self, next_aa):
"""
隣のC-alphaの位置をメチル基にする。
"""
answer = AtomGroup()
CAs = next_aa.pickup_atoms('CA')
if len(CAs) > 0:
answer.set_atom('CA', CAs[0])
else:
raise BrInputError(next_aa,
'cannot found "CA" atom on building ACE.')
Cs = next_aa.pickup_atoms('C')
if len(Cs) > 0:
answer.set_atom('C', Cs[0])
else:
raise BrInputError(next_aa,
'cannot found "C" atom on building ACE.')
Os = next_aa.pickup_atoms('O')
if len(Os) > 0:
answer.set_atom('O', Os[0])
else:
raise BrInputError(next_aa,
'cannot found "O" atom on building ACE.')
answer |= self.add_methyl(answer['CA'], answer['C'])
answer.path = '/ACE'
return answer
def get_NME_simple(self, next_aa):
"""
隣のC-alphaの位置をメチル基にする。
"""
answer = AtomGroup()
CAs = next_aa.pickup_atoms('CA')
if len(CAs) > 0:
answer.set_atom('CA', CAs[0])
else:
raise BrInputError(next_aa,
'cannot found "CA" atom on building NME.')
Ns = next_aa.pickup_atoms('N')
if len(Ns) > 0:
answer.set_atom('N', Ns[0])
else:
raise BrInputError(next_aa,
'cannot found "N" atom on building NME.')
Hs = next_aa.pickup_atoms('H')
if len(Hs) > 0:
answer.set_atom('H', Hs[0])
else:
# for proline
CDs = next_aa.pickup_atoms('CD')
if len(CDs) > 0:
dummy_H = Atom(CDs[0])
dummy_H.symbol = 'H'
answer.set_atom('H', dummy_H)
else:
raise BrInputError(next_aa,
'cannot found "H" or "CD" atom(for proline) on building NME.')
answer |= self.add_methyl(answer['CA'], answer['N'])
answer.path = '/NME'
return answer
# -----------------------------------------------------------------
def get_ACE(self, res, next_aa=None):
"""
template (ACE-ALA-NME) format:
HH3[1-3]-CH3-C - N-CA(HA)-C- N-CH3-HH3[1-3]
|| | | || |
O H CB O H
"""
AAN = None
rmsd_min = 1000.0
for comformer in self._ACE_ALA_NME_comformers:
ref_AAN = self._get_ACE_ALA_NME(comformer)
(matched, rmsd) = self._match_ACE(ref_AAN, res, next_aa)
# print(comformer, rmsd)
if rmsd < rmsd_min:
rmsd_min = rmsd
AAN = matched
if rmsd_min > 1.0:
logger.warn("RMSD value is too large: {}".format(rmsd))
answer = AtomGroup(AAN['1'])
answer.path = '/ACE'
return answer
def _match_ACE(self, AAN, res, next_aa):
'''AAN (ACE-ALA-NME)
'''
assert(isinstance(AAN, AtomGroup))
assert(isinstance(res, AtomGroup))
(AAN_part, res_part) = self._match_residues(AAN['2'], res)
# for ACE
if next_aa is not None:
if next_aa.has_atom('N'):
AAN_part.set_atom('N2', AAN['3']['N'])
res_part.set_atom('N2', next_aa['N'])
if next_aa.has_atom('H'):
AAN_part.set_atom('NH2', AAN['3']['H'])
res_part.set_atom('NH2', next_aa['H'])
if next_aa.has_atom('CA'):
AAN_part.set_atom('CH3', AAN['3']['CH3'])
res_part.set_atom('CH3', next_aa['CA'])
sp = Superposer(AAN_part, res_part)
rmsd = sp.rmsd
matched_AAN = sp.superimpose(AAN)
return (matched_AAN, rmsd)
def get_NME(self, res, next_aa=None):
"""
template (ACE-ALA-NME) format:
HH3[1-3]-CH3-C - N-CA(HA)-C- N-CH3-HH3[1-3]
|| | | || |
O H CB O H
"""
AAN = Non
|
e
rmsd_min = 1000.0
for comformer in self._ACE_ALA_NME_comformers:
ref_AAN = self._get_ACE_ALA_NME(comformer)
(matched, rmsd) = self._match_NME(ref_AAN, res, next_aa)
# print(comformer, rmsd)
if rmsd < rmsd_min:
rms
|
d_min = rmsd
AAN = matched
if rmsd_min > 1.0:
logger.warn("RMSD value is too large: {}".format(rmsd))
answer = AtomGroup(AAN['3'])
answer.path = '/NME'
return answer
def _match_NME(self, AAN, res, next_aa):
'''AAN (ACE-ALA-NME)
'''
assert(isinstance(AAN, AtomGroup))
assert(isinstance(res, AtomGroup))
(AAN_part, res_part) = self._match_residues(AAN['2'], res)
# for NME
if next_aa is not None:
if next_aa.has_atom('C'):
AAN_part.set_atom('C2', AAN['1']['C'])
res_part.set_atom('C2', next_aa['C'])
if next_aa.has_atom('O'):
AAN_part.set_atom('O2', AAN['1']['O'])
res_part.set_atom('O2', next_aa['O'])
if next_aa.has_atom('CA'):
AAN_part.set_atom('CH3', AAN['1']['CH3'])
res_part.set_atom('CH3', next_aa['CA'])
sp = Superposer(AAN_part, res_part)
rmsd = sp.rmsd
matched_AAN = sp.superimpose(AAN)
return (matched_AAN, rmsd)
def _match_residues(self, res1, res2, max_number_of_atoms=-1):
"""
2つのアミノ酸残基のN, H, CA, HA, C, Oの原子を突き合わせる。
アミノ酸残基がプロリンだった場合は、CDの炭素をHに命名する。
GLYはHA1, HA2とあるので突き合せない。
"""
atom_names = ['CA', 'O', 'C', 'N', 'CB', 'HA']
if max_number_of_atoms == -1:
max_number_of_atoms = len(atom_names)
ans_res1 = AtomGroup()
ans_res2 = AtomGroup()
for atom_name in atom_names:
pickup_atoms1 = res1.pickup_atoms(atom_name)
if len(pickup_atoms1) > 0:
pickup_atoms2 = res2.pickup_atoms(atom_name)
i
|
GeotrekCE/Geotrek-admin
|
geotrek/trekking/tests/test_models.py
|
Python
|
bsd-2-clause
| 21,282
| 0.001786
|
from django.test import TestCase
from django.contrib.gis.geos import (LineString, Polygon, MultiPolygon,
MultiLineString, MultiPoint, Point)
from django.core.exceptions import ValidationError
from django.conf import settings
from django.test.utils import override_settings
from unittest import skipIf
from bs4 import BeautifulSoup
from geotrek.common.tests import TranslationResetMixin
from geotrek.core.tests.factories import PathFactory
from geotrek.zoning.tests.factories import DistrictFactory, CityFactory
from geotrek.trekking.tests.factories import (POIFactory, TrekFactory,
TrekWithPOIsFactory, ServiceFactory,
RatingFactory, RatingScaleFactory)
from geotrek.trekking.models import Trek, OrderedTrekChild
class TrekTest(TranslationResetMixin, TestCase):
def test_is_publishable(self):
t = TrekFactory.create()
t.geom = LineString((0, 0), (1, 1))
self.assertTrue(t.has_geom_valid())
t.description_teaser = ''
self.assertFalse(t.is_complete())
self.assertFalse(t.is_publishable())
t.description_teaser = 'ba'
t.departure = 'zin'
t.arrival = 'ga'
self.assertTrue(t.is_complete())
self.assertTrue(t.is_publishable())
t.geom = MultiLineString([LineString((0, 0), (1, 1)), LineString((2, 2), (3, 3))])
self.assertFalse(t.has_geom_valid())
self.assertFalse(t.is_publishable())
def test_any_published_property(self):
t = TrekFactory.create(published=False)
t.published_fr = False
t.published_it = False
t.save()
self.assertFalse(t.any_published)
t.published_it = True
t.save()
self.assertTrue(t.any_published)
@override_settings(PUBLISHED_BY_LANG=False)
def test_any_published_without_published_by_lang(self):
t = TrekFactory.create(published=False)
t.published_fr = True
t.save()
self.assertFalse(t.any_published)
def test_published_status(self):
t = TrekFactory.create(published=False)
t.published_fr = False
t.published_it = True
t.save()
self.assertEqual(t.published_status, [
{'lang': 'en', 'language': 'English', 'status': False},
{'lang': 'es', 'language': 'Spanish', 'status': False},
{'lang': 'fr', 'language': 'French', 'status': False},
{'lang': 'it', 'language': 'Italian', 'status': True}])
@override_settings(PUBLISHED_BY_LANG=False)
def test_published_status_without_published_by_lang(self):
t = TrekFactory.create(published=True)
t.published_fr = False
t.published_it = False
t.save()
self.assertEqual(t.published_status, [
{'lang': 'en', 'language': 'English', 'status': True},
{'lang': 'es', 'language': 'Spanish', 'status': True},
{'lang': 'fr', 'language': 'French', 'status': True},
{'lang': 'it', 'language': 'Italian', 'status': True}])
@override_settings(PUBLISHED_BY_LANG=False)
def test_published_langs_without_published_by_lang_not_published(self):
t = TrekFactory.create(published=False)
t.published_fr = True
t.published_it = True
t.save()
self.assertEqual(t.published_langs, [])
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
def test_kml_coordinates_should_be_3d(self):
trek = TrekWithPOIsFactory.create()
kml = trek.kml()
parsed = BeautifulSoup(kml, 'lxml')
for placemark in parsed.findAll('placemark'):
coordinates = placemark.find('coordinates')
tuples = [s.split(',') for s in coordinates.string.split(' ')]
self.assertTrue(all([len(i) == 3 for i in tuples]))
def test_pois_types(self):
trek = TrekWithPOIsFactory.create()
type0 = trek.pois[0].type
type1 = trek.pois[1].type
self.assertEqual(2, len(trek.poi_types))
self.assertIn(type0, trek.poi_types)
self.assertIn(type1, trek.poi_types)
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
def test_delete_cascade(self):
p1 = PathFactory.create()
p2 = PathFactory.create()
t = TrekFactory.create(paths=[p1, p2])
# Everything should be all right before delete
self.assertTrue(t.published)
self.assertFalse(t.deleted)
self.assertEqual(t.aggregations.count(), 2)
# When a path is deleted
p1.delete()
t = Trek.objects.get(pk=t.pk)
self.assertFalse(t.published)
self.assertFalse(t.deleted)
self.assertEqual(t.aggregations.count(), 1)
# Reset published status
t.published = True
t.save()
# When all paths are deleted
p2.delete()
t = Trek.objects.get(pk=t.pk)
self.assertFalse(t.published)
self.assertTrue(t.deleted)
self.assertEqual(t.aggregations.count(), 0)
def test_treks_are_sorted_by_name(self):
TrekFactory.create(name='Cb')
TrekFactory.create(name='Ca')
TrekFactory.create(name='A')
TrekFactory.create(name='B')
self.assertQuerysetEqual(Trek.objects.all(),
['<Trek: A>', '<Trek: B>', '<Trek: Ca>', '<Trek: Cb>'],
ordered=False)
def test_trek_itself_as_parent(self):
"""
Test if a trek it is its own parent
"""
trek1 = TrekFactory.create(name='trek1')
OrderedTrekChild.objects.create(parent=trek1, child=trek1)
self.assertRaisesMessage(ValidationError,
"Cannot use itself as child trek.",
trek1.full_clean)
class TrekPublicationDateTest(TranslationResetMixin, TestCase):
def setUp(self):
self.trek = TrekFactory.create(published=False)
def test_default_value_is_null(self):
self.assertIsNone(self.trek.publication_date)
def test_takes_current_date_when_published_becomes_true(self):
self.trek.published = True
self.trek.save()
self.assertIsNotNone(self.trek.publication_date)
def test_becomes_null_when_unpublished(self):
self.test_takes_current_date_when_published_becomes_true()
self.trek.published
|
= False
self.trek.save()
self.assertIsNone(self.trek.publication_date)
def test_date_is_not_updated_when_saved
|
_again(self):
import datetime
self.test_takes_current_date_when_published_becomes_true()
old_date = datetime.date(2003, 8, 6)
self.trek.publication_date = old_date
self.trek.save()
self.assertEqual(self.trek.publication_date, old_date)
class RelatedObjectsTest(TranslationResetMixin, TestCase):
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
def test_helpers(self):
p1 = PathFactory.create(geom=LineString((0, 0), (4, 4)))
p2 = PathFactory.create(geom=LineString((4, 4), (8, 8)))
trek = TrekFactory.create(paths=[(p1, 0.5, 1), (p2, 0, 1)])
poi = POIFactory.create(paths=[(p1, 0.6, 0.6)])
poi2 = POIFactory.create(paths=[(p1, 0.6, 0.6)])
service = ServiceFactory.create(paths=[(p1, 0.7, 0.7)])
service.type.practices.add(trek.practice)
trek.pois_excluded.add(poi2.pk)
# /!\ District are automatically linked to paths at DB level
d1 = DistrictFactory.create(geom=MultiPolygon(
Polygon(((-2, -2), (3, -2), (3, 3), (-2, 3), (-2, -2)))))
# Ensure related objects are accessible
self.assertCountEqual(trek.pois_excluded.all(), [poi2])
self.assertCountEqual(trek.all_pois, [poi, poi2])
self.assertCountEqual(trek.pois, [poi])
self.assertCountEqual(trek.services, [service])
self.assertCountEqual(poi.treks, [trek])
self.assertCountEqual(service.treks, [trek])
self.assertCountEqual(trek.districts, [d1])
|
winstein27/social
|
social/feed/migrations/0006_post_author.py
|
Python
|
agpl-3.0
| 660
| 0.001515
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-06-20 23:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deleti
|
on
class Migration(migrations.Migration):
dependencies = [
('authentication', '0003_auto_20160620_2027'),
('feed', '0005_auto_20160620_1547'),
]
operations = [
migrations.AddField(
model_name='post',
name='author',
|
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='posts', to='authentication.Profile'),
preserve_default=False,
),
]
|
dokterbob/python-postnl-checkout
|
tests/test_django.py
|
Python
|
agpl-3.0
| 13,320
| 0
|
import datetime
import decimal
from django.test import TestCase
from django.core.cache import cache
from httmock import HTTMock
from django_dynamic_fixture import G, N
from postnl_checkout.contrib.django_postnl_checkout.models import Order
from .base import PostNLTestMixin
class OrderTests(PostNLTestMixin, TestCase):
""" Tests for Order model. """
maxDiff = None
def setUp(self):
super(OrderTests, self).setUp()
self.order_datum = datetime.datetime(
year=2011, month=7, day=21,
hour=20, minute=11, second=0
)
self.verzend_datum = datetime.datetime(
year=2011, month=7, day=22,
hour=20, minute=11, second=0
)
def test_save(self):
""" Test saving an Order model. """
instance = N(Order)
instance.clean()
instance.save()
def test_prepare_order(self):
""" Test prepare_order class method. """
# Setup mock response
def response(url, request):
self.assertXMLEqual(
request.body, self.read_file('prepare_order_request.xml')
)
return self.read_file('prepare_order_response.xml')
kwargs = {
'AangebodenBetaalMethoden': {
'PrepareOrderBetaalMethode': {
'Code': 'IDEAL',
'Prijs': '5.00'
}
},
'AangebodenCommunicatieOpties': {
'PrepareOrderCommunicatieOptie': {
'Code': 'NEWS'
}
},
# FIXME: the following is not submitted by SUDS
# Most probably because it is not properly defined in the WSDL
# Contact PostNL about this.
# 'AangebodenOpties': {
# 'PrepareOrderOptie': {
# 'Code': 'WRAP',
# 'Prijs': '2.50'
# }
# },
# 'AfleverOpties': {
# 'AfleverOptie': {
# 'Code': 'PG',
# 'Kosten': '0.00',
# 'Toegestaan': True
# }
# },
'Consument': {
'ExtRef': '[email protected]'
},
'Contact': {
'Url': 'http://www.kadowereld.nl/url/contact'
},
'Order': {
'ExtRef': '1105_900',
'OrderDatum': self.order_datum,
'Subtotaal': '125.00',
'VerzendDatum': self.verzend_datum,
'VerzendKosten': '12.50'
},
'Retour': {
'BeschrijvingUrl': 'http://www.kadowereld.nl/url/beschrijving',
'PolicyUrl': 'http://www.kadowereld.nl/url/policy',
'RetourTermijn': 28,
'StartProcesUrl': 'http://www.kadowereld.nl/url/startproces'
},
'Service': {
'Url': 'http://www.kadowereld.nl/url/service'
}
}
# Execute API call
with HTTMock(response):
instance = Order.prepare_order(**kwargs)
# Assert model field values
self.assertTrue(instance.pk)
self.assertEquals(
instance.order_token, '0cfb4be2-47cf-4eac-865c-d66657953d5c'
)
self.assertEquals(
|
instance.order_ext_ref, '1105_900'
)
self.assertEquals(
instance.order_date, self.order_datum
)
# Assert JSON values
self.assertEquals(instance.prepare_order_request, kwargs)
self.assertEquals(instance.prepare_order_response, {
'Check
|
out': {
'OrderToken': '0cfb4be2-47cf-4eac-865c-d66657953d5c',
'Url': (
'http://tpppm-test.e-id.nl/Orders/OrderCheckout'
'?token=0cfb4be2-47cf-4eac-865c-d66657953d5c'
)
},
'Webshop': {
'IntRef': 'a0713e4083a049a996c302f48bb3f535'
}
})
def test_read_order(self):
""" Test read_order method. """
# Setup mock response
def response(url, request):
self.assertXMLEqual(
request.body, self.read_file('read_order_request.xml')
)
return self.read_file('read_order_response.xml')
instance = G(
Order,
order_token='0cfb4be2-47cf-4eac-865c-d66657953d5c'
)
# Read order data
with HTTMock(response):
new_instance = instance.read_order()
response_data = new_instance.read_order_response
self.assertTrue(response_data)
self.assertEquals(response_data, {
'Voorkeuren': {
'Bezorging': {
'Tijdvak': {
'Start': u'10:30',
'Eind': u'08:30'
},
'Datum': datetime.datetime(2012, 4, 26, 0, 0)
}
},
'Consument': {
'GeboorteDatum': datetime.datetime(1977, 6, 15, 0, 0),
'ExtRef': u'jjansen',
'TelefoonNummer': u'06-12345678',
'Email': u'[email protected]'
},
'Facturatie': {
'Adres': {
'Huisnummer': u'1',
'Initialen': u'J',
'Geslacht': u'Meneer',
'Deurcode': None,
'Gebruik': u'P',
'Gebouw': None,
'Verdieping': None,
'Achternaam': u'Jansen',
'Afdeling': None,
'Regio': None,
'Land': u'NL',
'Wijk': None,
'Postcode': u'4131LV',
'Straat': 'Lage Biezenweg',
'Bedrijf': None,
'Plaats': u'Vianen',
'Tussenvoegsel': None,
'Voornaam': u'Jan',
'HuisnummerExt': None
}
},
'Webshop': {
'IntRef': u'a0713e4083a049a996c302f48bb3f535'
},
'CommunicatieOpties': {
'ReadOrderResponseCommunicatieOptie': [
{
'Text': u'Do not deliver to neighbours',
'Code': u'REMARK'
}
]
},
'Bezorging': {
'ServicePunt': {
'Huisnummer': None,
'Initialen': None,
'Geslacht': None,
'Deurcode': None,
'Gebruik': None,
'Gebouw': None,
'Verdieping': None,
'Achternaam': None,
'Afdeling': None,
'Regio': None,
'Land': None,
'Wijk': None,
'Postcode': None,
'Straat': None,
'Bedrijf': None,
'Plaats': None,
'Tussenvoegsel': None,
'Voornaam': None,
'HuisnummerExt': None
},
'Geadresseerde': {
'Huisnummer': u'1',
'Initialen': u'J',
'Geslacht': u'Meneer',
'Deurcode': None,
'Gebruik': u'Z',
'Gebouw': None,
'Verdieping': None,
'Achternaam': u'Janssen',
'Afdeling': None,
'Regio': None,
'Land': u'NL',
'Wijk': None,
'Postcode': u'4131LV',
'Straat': u'Lage Biezenweg ',
'Bedrijf': u'E-ID',
'Plaats': u'Vianen',
'Tussenvoegsel': None,
'Voornaam': u'Jan',
'HuisnummerExt': None
}
},
'Opties': {
'ReadOrderResp
|
dimbyd/latextree
|
latextree/parser/bibliography.py
|
Python
|
mit
| 6,260
| 0.005591
|
# bibliography.py
r'''
Defines the BibItem() and Bibliography() classes (both sub-classed from Node)
The Bibliography() object is initialized directly from
a .bib file using the `bibtexparser` package.
We use registry.ClassFactory for unlisted fields
'''
import os
import logging
log = logging.getLogger(__name__)
import re, bibtexparser
from .registry import ClassFactory
from .command import Command
from .content import Text
class BibItem(Command):
def __init__(self, citation_key=None):
Command.__init__(self)
self.citation_key = citation_key
def __repr__(self):
if self.citation_key:
return '{}:{}({})'.format(self.genus, self.species, self.citation_key)
return '{}:{}()'.format(self.genus, self.species)
def harvard_dict(self):
'''
Create a dictionary of fields required for harvard-style citations.
Returns a dict of citation keys mapped onto bibliographic information in the correct format.
The main difficulty is with the 'author' key.
'''
bibtex_tags = ('title', 'author', 'year', 'publisher', 'isbn')
harv = dict()
surnames = list()
initials = list()
for child in self.children:
# deal with author field
if child.species == 'author':
# split on
# (1) authors: delimited by a comma (,) or an 'and', then
# (2) names: delimited by a point (.) or a space
author_str = child.content
author_list = [x.split(' ') for x in re.split(',|and', author_str)]
author_list = [[x.strip() for x in au if x] for au in author_list]
for author in author_list:
surnames.append(author[-1])
initials.append('.'.join([x[0] for x in author[:-1]]) + '.')
names = ['%s, %s' % name for name in zip(surnames, initials)]
harv['author'] = ' and '.join([', '.join(names[:-1]), names[-1]])
# copy bibtex (tag, content) pairs for tags in bibtex_fields
else:
if child.species in bibtex_tags:
harv[child.species] = child.content
# set citation text e.g. (Evans 2012)
if len(surnames) == 1:
harv['citation'] = '(%s, %s)' % (surnames[0], harv['year'])
elif len(surnames) == 2:
harv['citation'] = '(%s & %s, %s)' % (surnames[0], surnames[1], harv['year'])
elif len(surnames) > 3:
harv['citation'] = '(%s et al. %s)' % (surnames[0], harv['year'])
return harv
def harvard(self):
''' print harvard-style item (should be done in a template!) '''
title = ''
author = ''
year = ''
publisher = ''
for child in self.children:
if child.species == 'title':
title = child.content
elif child.species == 'author':
author_str = child.content
auth_list = [x.split('.') for x in re.split(',|and', author_str)]
auth_list = [[x.strip() for x in au] for au in auth_list]
auth_parts = []
for auth in auth_list:
name = auth[-1] + ' ' + '.'.join([x[0] for x in auth[:-1]]) + '.'
auth_parts.append(name)
author = ' and '.join([', '.join(auth_parts[:-1]), auth_parts[-1]])
elif child.species == 'year':
year = child.content
elif child.species == 'publisher':
|
publisher = child.content
else:
|
pass
return '%s (%s) %s. %s.' % (author, year, title, publisher)
class Bibliography(Command):
r'''
Bibliography is block command, whose `children` is a list of BibItem objects.
This is an example of a Command whicl logically encloses what follows.
The data is read from a .bib file then parsed into a dictionary by the
`bibtexparser` package.
At the moment it can only pull contents from a single bib file whereas
the command allows for \bibliography{refs1.bib, refs2.bib} etc.
'''
def __init__(self, bibtex_filename=None, LATEX_ROOT=None):
Command.__init__(self)
self.filename = bibtex_filename
if bibtex_filename:
if LATEX_ROOT:
bibtex_filename = os.path.join(LATEX_ROOT, bibtex_filename)
self.read_bibtex_file(bibtex_filename)
def read_bibtex_file(self, bibtex_filename):
if not bibtex_filename[-4:] == '.bib':
bibtex_filename = bibtex_filename + '.bib'
try:
with open(bibtex_filename) as bibtex_file:
chars = bibtex_file.read()
except FileNotFoundError as e:
raise Exception('Bibtex file \'{}\' not found'.format(e.filename))
# call bibtexparser
bibtex_db = bibtexparser.loads(chars)
for entry in bibtex_db.entries:
bibitem = BibItem()
for key, val in entry.items():
if key == 'ID':
bibitem.citation_key = val
else:
node = ClassFactory(str(key), [], BaseClass=Text)()
node.content = val
bibitem.append_child(node)
self.append_child(bibitem)
def chars(self):
'''
The raw format is the original command "\bibliography{refs.bib}"
We are not testing bibtexparser!
'''
return r'\bibliography{{{}}}{}'.format(self.filename, self.post_space)
def add_item(self, bibitem):
if not isinstance(bibitem, BibItem):
Exception('Bibliography objects can only contain BibItem objects')
self.children.append(bibitem)
def harvard(self):
''' string harvard entries together '''
return '\n'.join([x.harvard() for x in self.children])
def test_bibtex():
bibtex_filename = './test_docs/test_article/references.bib'
bib = Bibliography(bibtex_filename)
print(bib.pretty_print())
print(bib.harvard())
print(bib.chars())
if __name__ == '__main__':
test_bibtex()
|
rbarzic/arty-cm0-designstart
|
synt/yaml2mmi.py
|
Python
|
gpl-2.0
| 1,294
| 0.017002
|
import yaml
header="""
<?xml version="1.0" encoding="UTF-8"?>
<MemInfo Version="1" Minor="0">
<Processor Endianness="Little" InstPath="design/cortex">
<AddressSpace
Name="design_1_i_microblaze_0.design_1_i_microblaze_0_local_memory_dlmb_bram_if_cntlr" Begin="0" End="8191">
<BusBlock>
"""
footer="""
</BusBlock>
</AddressSpace>
</Processor>
<Config>
<Option Name="Part" Val="xc7a35tcsg324-1"/>
</Config>
</MemInfo>
"""
bitlane="""
<BitLane MemType="{type}" Placement="{placement}">
<DataWidth MSB="{msb}" LSB="{lsb}"/>
|
<AddressRange Begin="0" End="{end_address}"/>
<Parity ON="false" NumBits="0"/>
</BitLane>
"""
remap = [3,2,1,0,7,6,5,4,11,10,9,8,15,14,13,12]
bram = open("bram.yaml", "r")
doc = yaml.load(bram)
bit_pos = 0
bit_width = 2
output = header
# for bram in doc['bram']:
for i in range(len(doc['bram'])):
bram = doc['bram'][remap[i]]
data = dict()
# print bram
data['lsb'] = bit_pos
data['msb'] = bit_pos + bit_wi
|
dth - 1
data['end_address'] = 16383
data['type'] = 'RAMB36E1'
data['placement'] = bram['SITE'].split('_')[1] # remove RAMB36_ in front of the position string
bit_pos += bit_width
output += bitlane.format(**data)
output += footer
print output
|
saltstack/salt
|
tests/pytests/unit/states/test_openvswitch_port.py
|
Python
|
apache-2.0
| 3,213
| 0.000934
|
import pytest
import salt.states.openvswitch_port as openvswitch_port
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
return {openvswitch_port: {"__opts__": {"test": False}}}
def test_present():
"""
Test to verify that the named port exists on bridge, eventually creates it.
"""
name = "salt"
bridge = "br-salt"
ret = {"name": name, "result": None, "comment": "", "changes": {}}
mock = MagicMock(return_value=True)
mock_l = MagicMock(return_value=["salt"])
mock_n = MagicMock(return_value=[])
with patch.dict(
openvswitch_port.__salt__,
{
"openvswitch.bridge_exists": mock,
"openvswitch.interface_get_type": MagicMock(return_value='""'),
"openvswitch.port_list": mock_l,
},
):
comt = "Port salt already exists."
ret.update({"comment": comt, "result": True})
assert openvswitch_port.present(name, bridge) == ret
with patch.dict(
openvswitch_port.__salt__,
{
"openvswitch.bridge_exists": mock,
"openvswitch.interface_get_type": MagicMock(return_value='""'),
"openvswitch.port_list": mock_n,
"openvswitch.port_add": mock,
},
):
comt = "Port salt created on bridge br-salt."
ret.update(
{
"comment": comt,
"result": True,
"changes": {
"salt": {
"new": "Created port salt on bridge br-salt.",
"old": "No port named salt present.",
},
},
}
)
assert openvswitch_port.present(name, bridge) == ret
with patch.dict(
openvswitch_port.__salt__,
{
"openvswitch.bridge_exists": mock,
"openvswitch.port_list": mock_n,
"openvswitch.port_add": mock,
"openvswitch.interface_get_options": mock_n,
"openvswitch.interface_get_type": MagicMock(return_value=""),
"openvswitch.port_create_gre": mock,
"dig.check_ip": mock,
},
):
comt = "Port salt created on bridge br-salt."
ret.update(
{
"result": True,
"comment": (
"Created GRE tunnel interface salt with remote ip 10.0.0.1 and key"
" 1 on bridge br-salt."
),
"changes": {
"salt
|
": {
"new": (
"Created GRE tunnel interface salt with remote ip 10.0.0.1"
" and key 1 on bridge br-salt."
),
"old": (
"No GRE
|
tunnel interface salt with remote ip 10.0.0.1 and"
" key 1 on bridge br-salt present."
),
},
},
}
)
assert (
openvswitch_port.present(
name, bridge, tunnel_type="gre", id=1, remote="10.0.0.1"
)
== ret
)
|
hobson/pug
|
pug/setup_util.py
|
Python
|
mit
| 915
| 0.006557
|
# Handy for debugging setup.py
"""Utilities creating reusable, DRY, setup.py installation scripts
Typical usage
|
in setup.py:
>>> global_env, local_env = {}, {}
>>> execfile(join('pug', 'setup_util.py'), global_env, local_env)
>>> get_variable = local_env['get_variable']
"""
import os
def setup(*args, **kwargs):
print('setup() args = {0
|
}'.format(args))
print('setup() kwargs = {0}'.format(kwargs))
def get_variable(relpath, keyword='__version__'):
"""Read __version__ or other properties from a python file without importing it
from gist.github.com/technonik/406623 but with added keyward kwarg """
for line in open(os.path.join(os.path.dirname(__file__), relpath), encoding='cp437'):
if keyword in line:
if '"' in line:
return line.split('"')[1]
elif "'" in line:
return line.split("'")[1]
|
taohungyang/cloud-custodian
|
c7n/output.py
|
Python
|
apache-2.0
| 9,658
| 0.000207
|
# Copyright 2015-2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Outputs metrics, logs, structured records across
a variety of sources.
See docs/usage/outputs.rst
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import datetime
import gzip
import logging
import shutil
import tempfile
import os
from c7n.registry import PluginRegistry
from c7n.log import CloudWatchLogHandler
from c7n.utils import chunks, local_session, parse_s3, get_retry
DEFAULT_NAMESPACE = "CloudMaid"
log = logging.getLogger('custodian.output')
metrics_outputs = PluginRegistry('c7n.blob-outputs')
blob_outputs = PluginRegistry('c7n.blob-outputs')
@metrics_outputs.register('aws')
class MetricsOutput(object):
"""Send metrics data to cloudwatch
"""
permissions = ("cloudWatch:PutMetricData",)
retry = staticmethod(get_retry(('Throttling',)))
BUFFER_SIZE = 20
@staticmethod
def select(metrics_selector):
if not metrics_selector:
return NullMetricsOutput
# Compatibility for boolean configuration
if isinstance(metrics_selector, bool):
metrics_selector = 'aws'
for k in metrics_outputs.keys():
if k.startswith(metrics_selector):
return metrics_outputs[k]
raise ValueError("invalid metrics option %r" % metrics_selector)
def __init__(self, ctx, namespace=DEFAULT_NAMESPACE):
self.ctx = ctx
self.namespace = namespace
self.buf = []
def get_timestamp(self):
"""
Now, if C7N_METRICS_TZ is set to TRUE, UTC timestamp will be used.
For backwards compatibility, if it is not set, UTC will be the default.
To disable this and use the system's time zone, C7N_METRICS_TZ shoule be set to FALSE.
"""
if os.getenv("C7N_METRICS_TZ", 'TRUE').upper() in ('TRUE', ''):
return datetime.datetime.utcnow()
else:
return datetime.datetime.now()
def flush(self):
if self.buf:
self._put_metrics(self.namespace, self.buf)
self.buf = []
def put_metric(self, key, value, unit, buffer=True, **dimensions):
point = self._format_metric(key, value, unit, dimensions)
self.buf.append(point)
if buffer:
# Max metrics in a single request
if len(self.buf) == 20:
self.flush()
else:
self.flush()
def _format_metric(self, key, value, unit, dimensions):
d = {
"MetricName": key,
"Timestamp": self.get_timestamp(),
"Value": value,
"Unit": unit}
d["Dimensions"] = [
{"Name": "Policy", "Value": self.ctx.policy.name},
{"Name": "ResType", "Value": self.ctx.policy.resource_type}]
for k, v in dimensions.items():
d['Dimensions'].append({"Name": k, "Value": v})
return d
def _put_metrics(self, ns, metrics):
watch = local_session(self.ctx.session_factory).client('cloudwatch')
for metric_values in chunks(metrics, self.BUFFER_SIZE):
return self.retry(
watch.put_metric_data, Namespace=ns, MetricData=metrics)
class NullMetricsOutput(MetricsOutput):
permissions = ()
def __init__(self, ctx, namespace=DEFAULT_NAMESPACE):
super(NullMetricsOutput, self).__init__(ctx, namespace)
self.data = []
def _put_metrics(self, ns, metrics):
self.data.append({'Namespace': ns, 'MetricData': metrics})
for m in metrics:
if m['MetricName'] not in ('ActionTime', 'ResourceTime'):
log.debug(self.format_metric(m))
def format_metric(self, m):
label = "metric:%s %s:%s" % (m['MetricName'], m['Unit'], m['Value'])
for d in m['Dimensions']:
label += " %s:%s" % (d['Name'].lower(), d['Value'].lower())
return label
class LogOutput(object):
log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
def __init__(self, ctx):
self.ctx = ctx
def get_handler(self):
raise NotImplementedError()
def __enter__(self):
log.debug("Storing output with %s" % repr(self))
self.join_log()
return self
def __exit__(self, exc_type=None, exc_value=None, exc_traceback=None):
self.leave_log()
if exc_type is not None:
log.exception("Error while executing policy")
def join_log(self):
self.handler = self.get_handler()
self.handler.setLevel(logging.DEBUG)
self.handler.setFormatter(logging.Formatter(self.log_format))
mlog = logging.getLogger('custodian')
mlog.addHandler(self.handler)
def leave_log(self):
mlog = logging.getLogger('custodian')
mlog.removeHandler(self.handler)
self.handler.flush()
self.handler.close()
class CloudWatchLogOutput(LogOutput):
log_format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s'
def get_handler(self):
return CloudWatchLogHandler(
log_group=self.ctx.options.log_group,
log_stream=self.ctx.policy.name,
session_factory=lambda x=None: self.ctx.session_factory(
assume=False))
def __repr__(self):
return "<%s to group:%s stream:%s>" % (
self.__class__.__name__,
self.ctx.options.log_group,
self.ctx.policy.name)
class FSOutput(LogOutput):
@staticmethod
def select(path):
|
for k in blob_outputs.keys():
if path.startswith('%s://' % k):
return blob_outputs[k]
# Fall back local disk
return blob_outputs['file']
@staticmethod
def join(*parts):
return os.path.join(*parts)
def __init__(self, ctx):
super(FSOutput, self).__init__(ctx)
self.root_dir = self.ctx.output_path or tempfile.mkdtemp()
def get_handler(sel
|
f):
return logging.FileHandler(
os.path.join(self.root_dir, 'custodian-run.log'))
def compress(self):
# Compress files individually so thats easy to walk them, without
# downloading tar and extracting.
for root, dirs, files in os.walk(self.root_dir):
for f in files:
fp = os.path.join(root, f)
with gzip.open(fp + ".gz", "wb", compresslevel=7) as zfh:
with open(fp, "rb") as sfh:
shutil.copyfileobj(sfh, zfh, length=2**15)
os.remove(fp)
@blob_outputs.register('file')
class DirectoryOutput(FSOutput):
permissions = ()
def __init__(self, ctx):
super(DirectoryOutput, self).__init__(ctx)
if self.root_dir.startswith('file://'):
self.root_dir = self.root_dir[len('file://'):]
if self.ctx.output_path is not None:
if not os.path.exists(self.root_dir):
os.makedirs(self.root_dir)
def __repr__(self):
return "<%s to dir:%s>" % (self.__class__.__name__, self.root_dir)
@blob_outputs.register('s3')
class S3Output(FSOutput):
"""
Usage:
.. code-block:: python
with S3Output(session_factory, 's3://bucket/prefix'):
log.info('xyz') # -> log messages sent to custodian-run.log.gz
"""
permissions = ('S3:PutObject',)
def __init__(self, ctx):
super(S3Output, self).__init__(ctx)
self.date_path = datetime.datetime.now().strftime('%Y/%m/%d/%H')
self.s3_path, self.bucket, self.key_prefix = parse_s3(
self.ctx.output_path)
self.root_dir = tempfile.mkdtemp()
self.tran
|
knimon-software/ffos-meets-closure
|
tools/sub/download.py
|
Python
|
mit
| 567
| 0
|
#!/usr/bin/env python
import sys
urllib_urlretrieve = None
try:
# Python 3.x or later
import urllib.request
urllib_urlretrieve = urllib.request.urlretrieve
except ImportError:
# Python 2.x
import urllib
urllib_urlretrieve = urllib.urlretrieve
def download(url, target_path):
urllib_url
|
retrieve(url, target_path)
if __name__ == '__main__':
if len(sys.argv) != 3:
print 'Usage: python %s url target_path' % sys.argv[0]
sys.exit()
url = sys.argv[1]
target_p
|
ath = sys.argv[2]
download(url, target_path)
|
schukinp/python_training
|
fixture/application.py
|
Python
|
apache-2.0
| 1,091
| 0.003666
|
from selenium import webdriver
from fixture.session import SessionHelper
from fixture.group import GroupHelper
from fixture.contact import ContactHelper
class Application:
def __init__(self, browser, base_url):
if browser == "firefox":
self.wd = webdriver.Firefox(capabilities={"marionette": False}, firefox_binary="C:/Program Files/Mozilla Firefox/firefox.exe")
elif browser == "chrome":
self.wd = webdriver.Chrome()
elif browser == "ie":
self.wd = webdriver.Ie()
else:
raise ValueError("Unrecognized browser %s" % browser)
self.session = SessionHelper(self)
self.group = GroupHelper(self)
self.contact = ContactHelper(sel
|
f)
self.base_url = base_url
def is_valid(self):
try:
self.wd.current_url
return True
except:
return False
def open_homepage(self):
wd = self.wd
if not wd.curren
|
t_url.endswith("addressbook/"):
wd.get(self.base_url)
def destroy(self):
self.wd.quit()
|
openstack/nova
|
nova/api/openstack/compute/keypairs.py
|
Python
|
apache-2.0
| 9,891
| 0
|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Keypair management extension."""
import webob
import webob.exc
from nova.api.openstack import api_version_request
from nova.api.openstack import common
from nova.api.openstack.compute.schemas import keypairs
from nova.api.openstack.compute.views import keypairs as keypairs_view
from nova.api.openstack import wsgi
from nova.api import validation
from nova.compute import api as compute_api
from nova import exception
from nova.objects import keypair as keypair_obj
from nova.policies import keypairs as kp_policies
class KeypairController(wsgi.Controller):
"""Keypair API controller for the OpenStack API."""
_view_builder_class = keypairs_view.ViewBuilder
def __init__(self):
super(KeypairController, self).__init__()
self.api = compute_api.KeypairAPI()
@wsgi.Controller.api_version("2.10")
@wsgi.response(201)
@wsgi.expected_errors((400, 403, 409))
@validation.schema(keypairs.create_v210)
def create(self, req, body):
"""Create or import keypair.
A policy check restricts users from creating keys for other users
params: keypair object with:
name (required) - string
public_key (optional) - string
type (optional) - string
user_id (optional) - string
"""
# handle optional user-id for admin only
user_id = body['keypair'].get('user_id')
return self._create(req, body, key_type=True, user_id=user_id)
@wsgi.Controller.api_version("2.2", "2.9") # noqa
@wsgi.response(201)
@wsgi.expected_errors((400, 403, 409))
@validation.schema(keypairs.create_v22)
def create(self, req, body): # noqa
"""Create or import keypair.
Sending name will generate a key and return private_key
and fingerprint.
Keypair will have the type ssh or x509, specified by type.
You can send a public_key to add an existing ssh/x509 key.
params: keypair object with:
name (required) - string
public_key (optional) - string
type (optional) - string
"""
return self._create(req, body, key_type=True)
@wsgi.Controller.api_version("2.1", "2.1") # noqa
@wsgi.expected_errors((400, 403, 409))
@validation.schema(keypairs.create_v20, "2.0", "2.0")
@validation.schema(keypairs.create, "2.1", "2.1")
def create(self, req, body): # noqa
"""Create or import keypair.
Sending name will generate a key and return private_key
and fingerprint.
You can send a public_key to add an existing ssh key.
params: keypair object with:
name (required) - string
public_key (optional) - string
"""
return self._create(req, body)
def _create(self, req, body, user_id=None, key_type=False):
context = req.environ['nova.context']
params = body['keypair']
name = common.normalize_name(params['name'])
key_type_value = params.get('type', keypair_obj.KEYPAIR_TYPE_SSH)
user_id = user_id or context.user_id
context.can(kp_policies.POLICY_ROOT % 'create',
target={'user_id': user_id})
return_priv_key = False
try:
if 'public_key' in params:
keypair = self.api.import_key_pair(
context, user_id, name, params['public_key'],
key_type_value)
else:
keypair, private_key = self.api.create_key_pair(
context, user_id, name, key_type_value)
keypair['private_key'] = private_key
return_priv_key = True
except exception.KeypairLimitExceeded as e:
raise webob.exc.HTTPForbidden(explanation=str(e))
except exception.InvalidKeypair as exc:
raise webob.exc.HTTPBadRequest(explanation=exc.format_message())
except exception.KeyPairExists as exc:
raise webob.exc.HTTPConflict(explanation=exc.format_message())
return self._view_builder.create(keypair,
private_key=return_priv_key,
key_type=key_type)
@wsgi.Controller.api_version("2.1", "2.1")
@validation.query_schema(keypairs.delete_query_schema_v20)
@wsgi.response(202)
@wsgi.expected_errors(404)
def delete(self, req, id):
self._delete(req, id)
@wsgi.Controller.api_version("2.2", "2.9") # noqa
@validation.query_schema(keypairs.delete_query_schema_v20)
@wsgi.response(204)
@wsgi.expected_errors(404)
def delete(self, req, id): # noqa
self._delete(req, id)
@wsgi.Controller.api_version("2.10") # noqa
@validation.query_schema(keypairs.delete_query_schema_v275, '2.75')
@validation.query_schema(keypairs.delete_query_schema_v210, '2.10', '2.74')
@wsgi.response(204)
@wsgi.expected_errors(404)
def delete(self, req, id): # noqa
# handle optional user-id for admin only
user_id = self._get_user_id(req)
self._delete(req, id, user_id=user_id)
def _delete(self, req, id, user_id=None):
"""Delete a keypair with a given name."""
|
context = req.environ['nova.context']
# handle optional user-id for admin only
user_id = user_id or context.user_id
context.can(kp_policies.POLICY_ROOT % 'delete',
|
target={'user_id': user_id})
try:
self.api.delete_key_pair(context, user_id, id)
except exception.KeypairNotFound as exc:
raise webob.exc.HTTPNotFound(explanation=exc.format_message())
def _get_user_id(self, req):
if 'user_id' in req.GET.keys():
user_id = req.GET.getall('user_id')[0]
return user_id
@wsgi.Controller.api_version("2.10")
@validation.query_schema(keypairs.show_query_schema_v275, '2.75')
@validation.query_schema(keypairs.show_query_schema_v210, '2.10', '2.74')
@wsgi.expected_errors(404)
def show(self, req, id):
# handle optional user-id for admin only
user_id = self._get_user_id(req)
return self._show(req, id, key_type=True, user_id=user_id)
@wsgi.Controller.api_version("2.2", "2.9") # noqa
@validation.query_schema(keypairs.show_query_schema_v20)
@wsgi.expected_errors(404)
def show(self, req, id): # noqa
return self._show(req, id, key_type=True)
@wsgi.Controller.api_version("2.1", "2.1") # noqa
@validation.query_schema(keypairs.show_query_schema_v20)
@wsgi.expected_errors(404)
def show(self, req, id): # noqa
return self._show(req, id)
def _show(self, req, id, key_type=False, user_id=None):
"""Return data for the given key name."""
context = req.environ['nova.context']
user_id = user_id or context.user_id
context.can(kp_policies.POLICY_ROOT % 'show',
target={'user_id': user_id})
try:
keypair = self.api.get_key_pair(context, user_id, id)
except exception.KeypairNotFound as exc:
raise webob.exc.HTTPNotFound(explanation=exc.format_message())
return self._view_builder.show(keypair, key_type=key_type)
@wsgi.Controller.api_version("2.35")
@validation.query_schema(keypairs.index_query_schema_v275, '2.75')
@validation.query_schema(keypairs.index_query_schema_v235, '2.35', '2.74')
@wsgi.expected_errors(400)
def index(self, req):
user_id = self._get_user_id(req)
re
|
dl1ksv/gr-display
|
python/qa_display_text_msg.py
|
Python
|
gpl-3.0
| 951
| 0.005258
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2020 dl1ksv.
#
# SP
|
DX-License-Identifier: GPL-3.0-or-later
#
from gnuradio import gr, gr_unittest
from PyQt5 import Qt
import sip
# from gnuradio import blocks
try:
from display import text_msg
except ImportError:
import os
import sys
dirname, filename = os.path.split(os.path.abspath(__file__))
sys.path.append(os.path.join(dirname, "bindings"))
from display impor
|
t display_text_msg
class qa_display_text_msg(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_instance(self):
instance = text_msg('TestString','test',80,None)
b = sip.wrapinstance(instance.pyqwidget(),Qt.QWidget)
def test_001_descriptive_test_name(self):
# set up fg
self.tb.run()
# check data
if __name__ == '__main__':
gr_unittest.run(qa_display_text_msg)
|
ipa-mdl/catkin_pkg
|
test/test_templates.py
|
Python
|
bsd-3-clause
| 14,395
| 0.001389
|
import os
import unittest
import tempfile
import shutil
from mock import MagicMock, Mock
from catkin_pkg.package_templates import _safe_write_files, create_package_files, \
create_cmakelists, create_package_xml, PackageTemplate, _create_include_macro, \
_create_targetlib_args
from catkin_pkg.package import parse_package, Dependency, Export, Url, PACKAGE_MANIFEST_FILENAME
from catkin_pkg.python_setup import generate_distutils_setup
def u(str):
try:
return unicode(str)
except NameError:
return str
class TemplateTest(unittest.TestCase):
def get_maintainer(self):
maint = Mock()
maint.email = '[email protected]'
maint.name = 'John Foo'
return maint
def test_safe_write_files(self):
file1 = os.path.join('foo', 'bar')
file2 = os.path.join('foo', 'baz')
newfiles = {file1: 'foobar', file2: 'barfoo'}
try:
rootdir = tempfile.mkdtemp()
_safe_write_files(newfiles, rootdir)
self.assertTrue(os.path.isfile(os.path.join(rootdir, file1)))
self.assertTrue(os.path.isfile(os.path.join(rootdir, file2)))
self.assertRaises(ValueError, _safe_write_files, newfiles, rootdir)
finally:
shutil.rmtree(rootdir)
def test_create_cmakelists(self):
mock_pack = MagicMock()
mock_pack.name = 'foo'
mock_pack.catkin_deps = []
result = create_cmakelists(mock_pack, 'groovy')
self.assertTrue('project(foo)' in result, result)
self.assertTrue('find_package(catkin REQUIRED)' in result, result)
mock_pack.catkin_deps = ['bar', 'baz']
result = create_cmakelists(mock_pack, 'groovy')
self.assertTrue('project(foo)' in result, result)
expected = """find_package(catkin REQUIRED COMPONENTS
bar
baz
)"""
self.assertTrue(expected in result, result)
def test_create_package_xml(self):
maint = self.get_maintainer()
pack = PackageTemplate(name='foo',
description='foo',
version='0.0.0',
maintainers=[maint],
licenses=['BSD'])
result = create_package_xml(pack, 'groovy')
self.assertTrue('<name>foo</name>' in result, result)
def test_create_targetlib_args(self):
mock_pack = MagicMock()
mock_pack.name = 'foo'
mock_pack.catkin_deps = []
mock_pack.boost_comps = []
mock_pack.system_deps = []
statement = _create_targetlib_args(mock_pack)
self.assertEqual('# ${catkin_LIBRARIES}\n', statement)
mock_pack.catkin_deps = ['roscpp', 'rospy']
mock_pack.boost_comps = []
mock_pack.system_deps = []
statement = _create_targetlib_args(mock_pack)
self.assertEqual('# ${catkin_LIBRARIES}\n', statement)
mock_pack.catkin_deps = ['roscpp']
mock_pack.boost_comps = ['thread', 'filesystem']
mock_pack.system_deps = []
statement = _create_targetlib_args(mock_pack)
self.assertEqual('# ${catkin_LIBRARIES}\n# ${Boost_LIBRARIES}\n', statement)
mock_pack.catkin_deps = ['roscpp']
mock_pack.boost_comps = []
mock_pack.system_deps = ['log4cxx', 'BZip2']
statement = _create_targetlib_args(mock_pack)
self.assertEqual('# ${catkin_LIBRARIES}\n# ${log4cxx_LIBRARIES}\n# ${BZip2_LIBRARIES}\n', statement)
mock_pack.catkin_deps = ['roscpp']
mock_pack.boost_comps = ['thread', 'filesystem']
mock_pack.system_deps = ['log4cxx', 'BZip2']
statement = _create_targetlib_args(mock_pack)
self.assertEqual('# ${catkin_LIBRARIES}\n# ${Boost_LIBRARIES}\n# ${log4cxx_LIBRARIES}\n# ${BZip2_LIBRARIES}\n', statement)
def test_create_include_macro(self):
mock_pack = MagicMock()
mock_pack.name = 'foo'
mock_pack.catkin_deps = []
mock_pack.boost_comps = []
mock_pack.system_deps = []
statement = _create_include_macro(mock_pack)
self.assertEqual('# include_directories(include)', statement)
mock_pack.catkin_deps = ['roscpp', 'rospy']
mock_pack.boost_comps = []
mock_pack.system_deps = []
statement = _create_include_macro(mock_pack)
self.assertEqual('# include_directories(include)\ninclude_directories(\n ${catkin_INCLUDE_DIRS}\n)', statement)
mock_pack.catkin_deps = ['roscpp']
mock_pack.boost_comps = ['thread', 'filesystem']
mock_pack.system_deps = []
statement = _create_include_macro(mock_pack)
self.assertEqual('# include_directories(include)\ninclude_directories(\n ${catkin_INCLUDE_DIRS}\n ${Boost_INCLUDE_DIRS}\n)', statement)
mock_pack.catkin_deps = ['roscpp']
mock_pack.boost_comps = []
mock_pack.system_deps = ['log4cxx', 'BZip2']
statement = _create_include_macro(mock_pack)
self.assertEqual('# include_directories(include)\n# TODO: Check names of system library include directories (log4cxx, BZip2)\ninclude_directories(\n ${catkin_INCLUDE_DIRS}\n ${log4cxx_INCLUDE_DIRS}\n ${BZip2_INCLUDE_DIRS}\n)', statement)
mock_pack.catkin_deps = ['roscpp']
mock_pack.boost_comps = ['thread', 'filesystem']
mock_pack.system_deps = ['log4cxx', 'BZip2']
statement = _create_include_macro(mock_pack)
self.assertEqual('# include_directories(include)\n# TODO: Check names of system library include directories (log4cxx, BZip2)\ninclude_directories(\n ${catkin_INCLUDE_DIRS}\n ${Boost_INCLUDE_DIRS}\n ${log4cxx_INCLUDE_DIRS}\n ${BZip2_INCLUDE_DIRS}\n)', statement)
def test_create_package(self):
maint = self.get_maintainer()
pack = PackageTemplate(name='bar',
description='bar',
package_format='1',
version='0.0.0',
version_abi='pabi',
maintainers=[maint],
licenses=['BSD'])
try:
rootdir = tempfile.mkdtemp()
file1 = os.path.join(rootdir, 'CMakeLists.txt')
file2 = os.path.join(rootdir, PACKAGE_MANIFEST_FILENAME)
create_package_
|
files(rootdir, pack, 'groovy', {file1: ''})
self.assertTrue(os.path.isfile(file1))
self.assertTrue(os.path.isfile(file2))
finally:
shutil.rmtree(rootdir)
def test_create_package_template(self):
template = PackageTemplate._create_package_template(
package_name='bar2',
catkin_deps=['dep1', 'dep2'
|
])
self.assertEqual('dep1', template.build_depends[0].name)
self.assertEqual('dep2', template.build_depends[1].name)
def test_parse_generated(self):
maint = self.get_maintainer()
pack = PackageTemplate(name='bar',
package_format=1,
version='0.0.0',
version_abi='pabi',
urls=[Url('foo')],
description='pdesc',
maintainers=[maint],
licenses=['BSD'])
try:
rootdir = tempfile.mkdtemp()
file1 = os.path.join(rootdir, 'CMakeLists.txt')
file2 = os.path.join(rootdir, PACKAGE_MANIFEST_FILENAME)
create_package_files(rootdir, pack, 'groovy')
self.assertTrue(os.path.isfile(file1))
self.assertTrue(os.path.isfile(file2))
pack_result = parse_package(file2)
self.assertEqual(pack.name, pack_result.name)
self.assertEqual(pack.package_format, pack_result.package_format)
self.assertEqual(pack.version, pack_result.version)
self.assertEqual(pack.version_abi, pack_result.version_abi)
self.assertEqual(pack.description, pack_result.description)
self.assertEqual(pack.maintainers[0].name, pack_result.maintainers[0].name)
self.assertEqual(pack.maintaine
|
ciyer/opensnp-fun
|
run_plink_reformat.py
|
Python
|
mit
| 886
| 0.023702
|
#!/usr/bin/env python
# encoding: utf-8
import glob
import os
import subprocess
'''
Convert 23andMe files to
PLINK format
'''
def twenty3_and_me_files():
"""Return the opensnp files that are 23 and me format"""
all_twenty3_and_me_files= glob.glob('../opensnp_datadump.current/*.23andme.txt')
fifteen_mb = 15 * 1000 * 1000
non_junk_files = [path for path in all_twenty3_and_me_files if os.path.getsize(path) > f
|
ifteen_mb]
return non_junk_files
def run_plink_format(usable_files):
"""Reformat the 23andMe files into plink binary stuff"""
for f in usable_files:
gid = f.split("/")[-1].split("_")[1].replace("file",""
|
)
call = "../plink_v190/plink --23file "+ f + " F" + gid + "ID" + gid + "I 1"
call += " --out ../plink_binaries/" + gid
print "convert gid " + gid
subprocess.call(call,shell=True)
usable_files = twenty3_and_me_files()
run_plink_format(usable_files)
|
hadithhouse/hadithhouse
|
hadiths/initial_data.py
|
Python
|
mit
| 5,391
| 0.000518
|
# -*- coding: utf-8 -*-
"""
Contains data that initially get added to the database to bootstrap it.
"""
from __future__ import unicode_literals
# pylint: disable=invalid-name
prophet_muhammad = {
'title': u'Prophet',
'display_name': u'النبي محمد (صلى الله عليه وآله وسلم)'.strip(),
'full_name': u'محمد بن عبد الله بن عبد المطلب بن هاشم'.strip(),
'brief_desc': u'نبي الإسلام، عليه وعلى آله الصلاة والسلام'.strip(),
'birth_year': 570,
'death_year': 632
}
imam_alsadiq = {
'title': u'Imam',
'display_name': u'الإمام الصادق (عليه السلام)',
'full_name': u"جعفر بن محمد الصادق",
'brief_desc': u'إمام من أئمة المسلمين وسادس أئمة الشيعة الاثنى عشرية'
}
# pylint: disable=line-too-long
first_shia_hadith_text = u'''
نضر الله عبدا سمع مقالتي فوعاها وحفظها وبلغها من لم يسمعها، فرب حامل فقه غير فقيه ورب حامل فقه إلى من هو أفقه منه، ثلاث لا يغل عليهن قلب امرئ مسلم: إخلاص العمل لله، والنصحية لائمة المسلمين، واللزوم لجماعتهم، فإن دعوتهم محيطة من ورائهم، المسلمون إخوة تتكافى دماؤهم ويسعى بذمتهم أدناهم.
'''.strip()
first_sunni_hadith_text = u'''
نضر الله عبدا سمع مقالتي فحفظها ووعاها واداها ، فرب حامل فقه غير فقيه ، ورب حامل فقه الى من هو افقه منه ، ثلاث لا يغل عليهن قلب مسلم : اخلاص العمل لله ، والنصيحة للمسلمين ، ولزوم جماعتهم ، فان دعوتهم تحيط من ورايهم
'''.strip()
# pylint: enable=line-too-long
shia_first_hadith_persons = [
u"عبد الله بن أبي يعفور العبدي".strip(),
u"ابان بن عثمان الأحمر البجلي".strip(),
u"احمد بن محمد بن عمرو بن ابي نصر البزنطي".strip(),
u"احمد بن عيسى".strip()]
sunni_first_hadith_persons = [
u"عبد الله بن مسعود".strip(),
u"عبد الرحمن بن عبد الله الهذلي".strip(),
u"عبد الملك بن عمير اللخمي".strip(),
u"سفيان بن عيينة الهلالي".strip(),
]
holy_quran = u"القرآن الكريم"
holy_quran_suras = [
u"الفاتحة",
u"البقرة",
u"آل عمران",
u"النساء",
u"المائدة",
u"اﻷنعام",
u"اﻷعراف",
u"اﻷنفال",
u"التوبة",
u"يونس",
u"هود",
u"يوسف",
u"الرعد",
u"إبراهيم",
u"الحجر",
u"النحل",
u"اﻹسراء",
u"الكهف",
u"مريم",
u"طه",
u"اﻷنبياء",
u"الحج",
u"المؤمنون",
u"النور",
u"الفرقان",
u"الشعراء",
u"النمل",
u"القصص",
u"العنكبوت",
u"الروم",
u"لقمان",
u"السجدة",
u"اﻷحزاب",
u"سبأ",
u"فاطر",
u"يس",
u"الص
|
افات",
u"ص",
u"الزمر",
u"غافر",
u"فصلت",
u"الشورى",
u"الزخرف",
u"الدخان",
u"الجاثية",
u"اﻷحقاف",
u"محمد",
u"الفتح",
u"الحجرات",
u"ق",
u"الذاريات",
u"الطور",
u"النجم",
u"القمر",
u"الرحمن",
u"الواقعة",
u"الحديد",
u"المج
|
ادلة",
u"الحشر",
u"الممتحنة",
u"الصف",
u"الجمعة",
u"المنافقون",
u"التغابن",
u"الطلاق",
u"التحريم",
u"الملك",
u"القلم",
u"الحاقة",
u"المعارج",
u"نوح",
u"الجن",
u"المزمل",
u"المدثر",
u"القيامة",
u"اﻹنسان",
u"المرسلات",
u"النبأ",
u"النازعات",
u"عبس",
u"التكوير",
u"الانفطار",
u"المطففين",
u"الانشقاق",
u"البروج",
u"الطارق",
u"اﻷعلى",
u"الغاشية",
u"الفجر",
u"البلد",
u"الشمس",
u"الليل",
u"الضحى",
u"الشرح",
u"التين",
u"العلق",
u"القدر",
u"البينة",
u"الزلزلة",
u"العاديات",
u"القارعة",
u"التكاثر",
u"العصر",
u"الهمزة",
u"الفيل",
u"قريش",
u"الماعون",
u"الكوثر",
u"الكافرون",
u"النصر",
u"المسد",
u"اﻹخلاص",
u"الفلق",
u"الناس"]
# كتاب الكافي، باب ما امر النبي صلى الله عليه وآله بالنصيحة لائمة
# المسلمين واللزوم لجماعتهم ومن هم؟
# http://www.mezan.net/books/kafi/kafi1/html/ara/books/al-kafi-1/166.html
shia_first_hadith_book = u"الكافي"
# مسند الشافعي، حديث 1105
# https://library.islamweb.net/hadith/display_hbook.php?bk_no=51&hid=1105&pid=
sunni_first_hadith_book = u"مسند الشافعي"
first_hadith_tag = u'علم الحديث'
|
daohu527/leetcode_learning
|
665. Non-decreasing Array/code.py
|
Python
|
gpl-3.0
| 554
| 0.001805
|
class Solution(
|
object):
def checkPoss
|
ibility(self, nums):
"""
:type nums: List[int]
:rtype: bool
"""
n = len(nums)
t = 0
for i in xrange(n-1):
if nums[i] > nums[i+1]:
if i-1 < 0 or i+2 > n-1:
t += 1
elif nums[i-1] <= nums[i+1]:
t += 1
elif nums[i+2] >= nums[i]:
t += 1
else:
return False
return True if t <= 1 else False
|
wisechengyi/pants
|
src/python/pants/rules/core/test_test.py
|
Python
|
apache-2.0
| 8,678
| 0.001498
|
# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from abc import ABCMeta, abstractmethod
from pathlib import PurePath
from textwrap import dedent
from typing import List, Tuple, Type
from unittest.mock import Mock
import pytest
from pants.base.exceptions import ResolveError
from pants.build_graph.address import Address
from pants.engine.fs import (
EMPTY_DIRECTORY_DIGEST,
Digest,
FileContent,
InputFilesContent,
Workspace,
)
from pants.engine.interactive_runner import InteractiveProcessRequest, InteractiveRunner
from pants.engine.legacy.graph import HydratedTargetsWithOrigins, HydratedTargetWithOrigin
from pants.engine.legacy.structs import TargetAdaptorWithOrigi
|
n
from pants.engine.rules import UnionMembership
from pants.rules.core.fmt_test import FmtTest
from pants.rules.core.test import (
AddressAndTestResult,
CoverageDataBatch,
CoverageReport,
FilesystemCoverageReport,
Status,
Test,
TestDebugRequest,
TestResult,
TestRunner,
WrappedTestRunner,
run_tests,
)
from pants.testutil.engine.util import MockConsole, MockGet, run_rule
from pants.testutil.test_base import TestBase
from pants.util.ordered_se
|
t import OrderedSet
# TODO(#9141): replace this with a proper util to create `GoalSubsystem`s
class MockOptions:
def __init__(self, **values):
self.values = Mock(**values)
class MockTestRunner(TestRunner, metaclass=ABCMeta):
@staticmethod
def is_valid_target(_: TargetAdaptorWithOrigin) -> bool:
return True
@staticmethod
@abstractmethod
def status(_: Address) -> Status:
pass
@staticmethod
def stdout(_: Address) -> str:
return ""
@staticmethod
def stderr(_: Address) -> str:
return ""
@property
def test_result(self) -> TestResult:
address = self.adaptor_with_origin.adaptor.address
return TestResult(self.status(address), self.stdout(address), self.stderr(address))
class SuccessfulTestRunner(MockTestRunner):
@staticmethod
def status(_: Address) -> Status:
return Status.SUCCESS
@staticmethod
def stdout(address: Address) -> str:
return f"Successful test runner: Passed for {address}!"
class ConditionallySucceedsTestRunner(MockTestRunner):
@staticmethod
def status(address: Address) -> Status:
return Status.FAILURE if address.target_name == "bad" else Status.SUCCESS
@staticmethod
def stdout(address: Address) -> str:
return (
f"Conditionally succeeds test runner: Passed for {address}!"
if address.target_name != "bad"
else ""
)
@staticmethod
def stderr(address: Address) -> str:
return (
f"Conditionally succeeds test runner: Had an issue for {address}! Oh no!"
if address.target_name == "bad"
else ""
)
class InvalidTargetTestRunner(MockTestRunner):
@staticmethod
def is_valid_target(_: TargetAdaptorWithOrigin) -> bool:
return False
@staticmethod
def status(_: Address) -> Status:
return Status.FAILURE
class TestTest(TestBase):
def make_ipr(self) -> InteractiveProcessRequest:
input_files_content = InputFilesContent(
(FileContent(path="program.py", content=b"def test(): pass"),)
)
digest = self.request_single_product(Digest, input_files_content)
return InteractiveProcessRequest(
argv=("/usr/bin/python", "program.py",), run_in_workspace=False, input_files=digest,
)
def run_test_rule(
self,
*,
test_runner: Type[TestRunner],
targets: List[HydratedTargetWithOrigin],
debug: bool = False,
) -> Tuple[int, str]:
console = MockConsole(use_colors=False)
options = MockOptions(debug=debug, run_coverage=False)
interactive_runner = InteractiveRunner(self.scheduler)
workspace = Workspace(self.scheduler)
union_membership = UnionMembership({TestRunner: OrderedSet([test_runner])})
def mock_coordinator_of_tests(
wrapped_test_runner: WrappedTestRunner,
) -> AddressAndTestResult:
runner = wrapped_test_runner.runner
return AddressAndTestResult(
address=runner.adaptor_with_origin.adaptor.address,
test_result=runner.test_result, # type: ignore[attr-defined]
)
result: Test = run_rule(
run_tests,
rule_args=[
console,
options,
interactive_runner,
HydratedTargetsWithOrigins(targets),
workspace,
union_membership,
],
mock_gets=[
MockGet(
product_type=AddressAndTestResult,
subject_type=WrappedTestRunner,
mock=lambda wrapped_test_runner: mock_coordinator_of_tests(wrapped_test_runner),
),
MockGet(
product_type=TestDebugRequest,
subject_type=TestRunner,
mock=lambda _: TestDebugRequest(self.make_ipr()),
),
MockGet(
product_type=CoverageReport,
subject_type=CoverageDataBatch,
mock=lambda _: FilesystemCoverageReport(
result_digest=EMPTY_DIRECTORY_DIGEST,
directory_to_materialize_to=PurePath("mockety/mock"),
),
),
],
union_membership=union_membership,
)
return result.exit_code, console.stdout.getvalue()
def test_empty_target_noops(self) -> None:
exit_code, stdout = self.run_test_rule(
test_runner=SuccessfulTestRunner,
targets=[FmtTest.make_hydrated_target_with_origin(include_sources=False)],
)
assert exit_code == 0
assert stdout.strip() == ""
def test_invalid_target_noops(self) -> None:
exit_code, stdout = self.run_test_rule(
test_runner=InvalidTargetTestRunner,
targets=[FmtTest.make_hydrated_target_with_origin()],
)
assert exit_code == 0
assert stdout.strip() == ""
def test_single_target(self) -> None:
target_with_origin = FmtTest.make_hydrated_target_with_origin()
address = target_with_origin.target.adaptor.address
exit_code, stdout = self.run_test_rule(
test_runner=SuccessfulTestRunner, targets=[target_with_origin],
)
assert exit_code == 0
assert stdout == dedent(
f"""\
{address} stdout:
{SuccessfulTestRunner.stdout(address)}
{address} ..... SUCCESS
"""
)
def test_multiple_targets(self) -> None:
good_target = FmtTest.make_hydrated_target_with_origin(name="good")
good_address = good_target.target.adaptor.address
bad_target = FmtTest.make_hydrated_target_with_origin(name="bad")
bad_address = bad_target.target.adaptor.address
exit_code, stdout = self.run_test_rule(
test_runner=ConditionallySucceedsTestRunner, targets=[good_target, bad_target],
)
assert exit_code == 1
assert stdout == dedent(
f"""\
{good_address} stdout:
{ConditionallySucceedsTestRunner.stdout(good_address)}
{bad_address} stderr:
{ConditionallySucceedsTestRunner.stderr(bad_address)}
{good_address} ..... SUCCESS
{bad_address} ..... FAILURE
"""
)
def test_single_debug_target(self) -> None:
exit_code, stdout = self.run_test_rule(
test_runner=SuccessfulTestRunner,
targets=[FmtTest.make_hydrated_tar
|
mfrey/baltimore
|
analysis/hopcountanalysis.py
|
Python
|
gpl-3.0
| 3,831
| 0.006004
|
#!/usr/bin/env python2.7
import logging
import numpy as np
from .analysis import Analysis
class HopCountAnalysis(Analysis):
def __init__(self, scenario, location, repetitions, csv):
Analysis.__init__(self, scenario, location, "hopCount", repetitions, csv)
self.logger = logging.getLogger('baltimore.analysis.HopCountAnalysis')
self.logger.debug('creating an instance of HopCountAnalysis for scenario %s', scenario)
self.data_min = {}
self.data_max = {}
self.data_median = {}
self.data_std = {}
self.data_avg = {}
def evaluate(self, experiment_results, is_verbose=False):
self.logger.info("running hop count analysis")
hop_count = {}
raw_data = []
for repetition in experiment_results:
nodes = experiment_results.nodes_have_metric("hopCount", repetition)
for node in nodes:
data = experiment_results.get_tuple_metric_per_node("hopC
|
ount", node, repetition)
for element in data:
raw_data.append([repetition, node, float(element[0]), int(element[1])])
if node not in hop_count:
hop_count[node] = []
hop_count[node].append(raw_data)
raw_data = []
for node, data in list(hop_count.items()):
hop_count_data = [element[3] for repetition i
|
n data for element in repetition]
self.data_min[node] = np.amin(hop_count_data)
self.data_max[node] = np.amax(hop_count_data)
self.data_median[node] = np.median(hop_count_data)
self.data_std[node] = np.std(hop_count_data)
self.data_avg[node] = np.average(hop_count_data)
self.logger.info("Printing hop count statistics for node %s", node)
self.logger.info("Minimum hop count = %f nodes", self.data_min[node])
self.logger.info("Maximum hop count = %f nodes", self.data_max[node])
self.logger.info("Std.Deviation = %f nodes", self.data_std[node])
self.logger.info("Average hop count = %f nodes", self.data_avg[node])
self.logger.info("Median hop count = %f nodes", self.data_median[node])
if self.draw:
for node in hop_count:
self.metric = "hop_count_node-" + str(node)
self.plot_boxplot("Average Hop Count (Node " + str(node) + ")", "Repetition", "Hop Count [ms]", self.data_avg[node])
if self.csv:
self.export_csv()
self.export_csv_raw(hop_count)
def export_csv(self):
self.metric = "hopCount"
file_name = self.scenario + "_" + self.metric + "_aggregated.csv"
disclaimer = [['#'],['#'], ['# ' + str(self.date) + ' - hop count for scenario ' + self.scenario],['# aggregated over ' + str(self.repetitions) + ' repetitions'],['#']]
header = ['node', 'min', 'max', 'median', 'std', 'avg']
data = []
for node in self.data_min:
data.append([node, self.data_min[node], self.data_max[node], self.data_median[node], self.data_std[node], self.data_avg[node]])
self._write_csv_file(file_name, disclaimer, header, data)
def export_csv_raw(self, raw_data):
self.metric = "hopCount"
file_name = self.scenario + "_" + self.metric + ".csv"
disclaimer = [['#'],['#'], ['# ' + str(self.date) + ' - hop count for scenario ' + self.scenario],['#']]
header = ['node', 'repetition', 'timestamp', 'hop count']
data = []
for node, hop_counts in list(raw_data.items()):
for values in hop_counts:
for element in values:
data.append([node, element[0], element[2], element[3]])
self._write_csv_file(file_name, disclaimer, header, data)
|
KDE/twine2
|
kbindinggenerator/cmake.py
|
Python
|
lgpl-3.0
| 15,357
| 0.004428
|
#!env python
# Copyright 2008 Simon Edwards <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a c
|
opy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston,
|
MA 02110-1301 USA
import re
import os.path
import glob
import kbindinggenerator.cmakeparser as cmakeparser
def ExtractInstallFiles(filename=None,input=None,variables=None):
if variables is None:
variables = {}
else:
variables = variables.copy()
install_list = []
if filename is not None:
variables['cmake_current_source_dir'] = [os.path.dirname(filename)]
ExtractInstallFilesWithContext(variables, install_list, filename,input)
# print(repr(variables))
# print(repr(install_list))
return install_list
def ExtractInstallFilesWithContext(variables, install_list, filename=None, input=None, fileprefix=""):
inputstring = ""
currentdir = ""
if input:
inputstring = input
elif filename:
currentdir = os.path.dirname(filename)
fhandle = open(filename)
inputstring= fhandle.read()
fhandle.close()
parser = cmakeparser.CMakeParser()
command_list = parser.parse(inputstring, filename)
include_dirs = []
for commandobject in command_list:
command = commandobject.command().lower()
args = [arg.value() for arg in commandobject.arguments()]
if command=="set":
variables[args[0].lower()] = ExpandArgs(variables, args[1:], filename)
elif command=="install":
install_args = ExpandArgs(variables, args, filename)
for arg in install_args:
if arg.endswith('.h'):
for basepath in [currentdir, fileprefix] + include_dirs:
fullpath = os.path.join(basepath, arg)
# print(fullpath)
if os.path.exists(fullpath):
install_list.append(fullpath)
break
else:
fullpath = os.path.join(currentdir, basepath, arg)
if os.path.exists(fullpath):
install_list.append(fullpath)
break
else:
print("Unable to find header file " + arg)
elif command=="include":
if filename is not None:
command_args = ExpandArgs(variables, args, filename)
this_dir = os.path.dirname(filename)
for arg in command_args:
if len(arg.strip())!=0:
include_filename = os.path.join(this_dir,arg)
if os.path.exists(include_filename):
ExtractInstallFilesWithContext(variables, install_list, include_filename)
elif command=="add_subdirectory":
if filename is not None:
command_args = ExpandArgs(variables, args, filename)
this_dir = os.path.dirname(filename)
for arg in command_args:
if len(arg.strip())!=0:
include_filename = os.path.join(this_dir,arg,"CMakeLists.txt")
if os.path.exists(include_filename):
ExtractInstallFilesWithContext(variables, install_list, include_filename, fileprefix=os.path.join(fileprefix,arg))
elif command=="file":
# This is just a basic cmake FILE() implementation. It just does GLOB.
command_args = ExpandArgs(variables, args, filename)
varname = None
result = None
try:
it = iter(command_args)
arg = it.__next__()
if arg.lower()=='glob' and filename is not None:
arg = it.next()
varname = arg
arg = it.next()
relative_dir = os.path.dirname(filename)
if arg.lower()=='relative':
arg = it.next()
relative_dir = arg
arg = it.next()
if not relative_dir.endswith('/'):
relative_dir += '/'
result = []
current_dir = variables['cmake_current_source_dir'][0]
while True:
for x in glob.iglob(os.path.join(current_dir, arg)):
if x.startswith(relative_dir):
x = x[len(relative_dir):]
result.append(x)
arg = it.next()
except StopIteration:
if varname is not None and result is not None:
variables[varname.lower()] = result
elif command=="ecm_generate_headers":
header_args = ExpandArgs(variables, args, filename)
# print("ecm_generate_headers:"+repr(header_args))
prefix=""
if "RELATIVE" in header_args:
prefix = header_args[header_args.index("RELATIVE")+1]
for item in header_args:
if item == "REQUIRED_HEADERS" or item == "RELATIVE":
break
headername = os.path.join(currentdir, prefix, item.lower() + ".h")
if os.path.exists(headername):
install_list.append(headername)
elif command == "target_include_directories":
include_args = ExpandArgs(variables, args, filename)
if "PUBLIC" in include_args:
for item in include_args[include_args.index("PUBLIC")+1:]:
include_dirs.append(item)
#print("include dirs:",repr(include_dirs))
def ExpandArgs(variables, args, filename=None):
rex = re.compile(r'(\$\{[^\}]+\})')
fixed_args = []
for arg in args:
fixed_parts = []
if arg.startswith("$<BUILD_INTERFACE:"):
arg = arg[len("$<BUILD_INTERFACE:"): -1]
parts = rex.split(arg)
for part in parts:
if part.startswith("${"):
name = part[2:-1].lower()
if name in variables:
value = variables[name]
if len(value)==1:
fixed_parts.append(variables[name][0])
else:
fixed_args.extend(value)
else:
print("Undefined cmake variable '" + name + "' in " + filename)
else:
fixed_parts.append(part)
fixed_args.append(''.join(fixed_parts))
return fixed_args
def __FetchCommands(lexer):
topmode = True
command_list = []
command = None
args = []
tok = lexer.token()
while 1:
if not tok:
if command:
command_list.append( (command,args) )
break # No more input
if topmode:
if tok.type=="COMMAND":
command = tok.value
topmode = False
else:
print("Fail")
# Fail
tok = lexer.token()
else:
# Grab arguments
if tok.type=="COMMAND":
if command:
command_list.append( (command,args) )
command = None
args = []
topmode = True
continue
args.append(tok.value)
tok =
|
alobbs/qvm
|
qvm/qvm-stop.py
|
Python
|
apache-2.0
| 171
| 0.017544
|
import os
import re
i
|
mport cmd
import sys
import time
import util
host = sys.argv[1]
cmd.run ("virsh shutdown %s"%(host))
while util.vm_is_running(host):
time.slee
|
p(1)
|
hryamzik/ansible
|
lib/ansible/modules/network/cnos/cnos_factory.py
|
Python
|
gpl-3.0
| 5,299
| 0.00434
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
#
# Copyright (C) 2017 Lenovo, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Module to Reset to factory settings of Lenovo Switches
# Lenovo Networking
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cnos_factory
author: "Anil Kumar Muraleedharan (@amuraleedhar)"
short_description: Reset the switch's startup configuration to default (factory) on devices running Lenovo CNOS
description:
- This module allows you to reset a switch's startup configuration. The method provides a way to reset the
startup configuration to its factory settings. This is helpful when you want to move the switch to another
topology as a new network device.
This module uses SSH to manage network device configuration.
The results of the operation can be viewed in results directory.
For more information about this module from Lenovo and customizing it usage for your
use cases, please visit U(http://systemx.lenovofiles.com/help/index.jsp?topic=%2Fcom.lenovo.switchmgt.ansible.doc%2Fcnos_factory.html)
version_added: "2.3"
extends_documentation_fragment: cnos
options: {}
'''
EXAMPLES = '''
Tasks : The following are examples of using the module cnos_reload. These are written in the main.yml file of the tasks directory.
---
- name: Test Reset to factory
cnos_factory:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}"
outputfile: "./results/test_factory_{{ inventory_hostname }}_output.txt"
'''
RET
|
URN = '''
msg:
description: Success or failure message
returned: always
type: string
sample: "Switch Startup Config is Reset to factory settings"
'''
import sys
try:
import paramiko
HAS_PARAMIKO = True
except ImportError:
HAS_PARAMIKO = False
import time
import socket
import array
import json
import time
import re
try:
from ansible.module_utils.network.cnos import cnos
HAS_LIB = True
except:
HAS_LIB = False
from ansible.module_utils.basic import AnsibleModule
from co
|
llections import defaultdict
def main():
module = AnsibleModule(
argument_spec=dict(
outputfile=dict(required=True),
host=dict(required=True),
username=dict(required=True),
password=dict(required=True, no_log=True),
enablePassword=dict(required=False, no_log=True),
deviceType=dict(required=True),),
supports_check_mode=False)
username = module.params['username']
password = module.params['password']
enablePassword = module.params['enablePassword']
cliCommand = "save erase \n"
outputfile = module.params['outputfile']
hostIP = module.params['host']
deviceType = module.params['deviceType']
output = ""
if not HAS_PARAMIKO:
module.fail_json(msg='paramiko is required for this module')
# Create instance of SSHClient object
remote_conn_pre = paramiko.SSHClient()
# Automatically add untrusted hosts (make sure okay for security policy in your environment)
remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# initiate SSH connection with the switch
remote_conn_pre.connect(hostIP, username=username, password=password)
time.sleep(2)
# Use invoke_shell to establish an 'interactive session'
remote_conn = remote_conn_pre.invoke_shell()
time.sleep(2)
# Enable and enter configure terminal then send command
output = output + cnos.waitForDeviceResponse("\n", ">", 2, remote_conn)
output = output + cnos.enterEnableModeForDevice(enablePassword, 3, remote_conn)
# Make terminal length = 0
output = output + cnos.waitForDeviceResponse("terminal length 0\n", "#", 2, remote_conn)
# cnos.debugOutput(cliCommand)
# Send the CLi command
output = output + cnos.waitForDeviceResponse(cliCommand, "[n]", 2, remote_conn)
output = output + cnos.waitForDeviceResponse("y" + "\n", "#", 2, remote_conn)
# Save it into the file
file = open(outputfile, "a")
file.write(output)
file.close()
errorMsg = cnos.checkOutputForError(output)
if(errorMsg is None):
module.exit_json(changed=True, msg="Switch Startup Config is Reset to factory settings ")
else:
module.fail_json(msg=errorMsg)
if __name__ == '__main__':
main()
|
memsharded/conan
|
conans/test/unittests/util/client_conf_test.py
|
Python
|
mit
| 1,569
| 0.001275
|
import os
import unittest
from conans.client.cache.cache import CONAN_CONF
from conans.client.conf import ConanClientConfigParser
from conans.paths import DEFAULT_PROFILE_NAME
from conans.test.utils.test_files import temp_folder
from conans.util.files import save
default_client_conf = '''[storage]
path: ~/.conan/data
[log]
trace_file = "Path/with/quotes"
[general]
'''
default_profile = '''
[settings]
arch=x86_64
build_type=Release
compiler=gcc
compiler.libcxx=libstdc++
compiler.version=4.9
os=Linux
'''
class ClientConfTest(unittest.TestCase):
def test_quotes(self):
tmp_dir = temp_folder()
save(os.path.join(tmp_dir, CONAN_CONF), default_client_conf)
save(os.path.join(tmp_dir, DEFAULT_PROFILE_NAME), default_profile)
config = ConanClientConfigParser(os.path.join(tmp_dir, CONAN_CONF))
self.assertEqual(config.env_vars["CONAN_TRACE_FI
|
LE"], "Path/with/quotes")
def test_proxies(self):
tmp_dir = temp_folder()
save(os.path.join(tmp_dir, CONAN_CONF), "")
config = ConanClientConfigParser(os.path.join(tmp_dir, CONAN_CONF))
self.assertEqual(None, config.proxies)
save(os.path.join(tmp_dir, CONAN_CONF), "[proxies]")
config = ConanClientConfigParser(os.path.join(tmp_di
|
r, CONAN_CONF))
self.assertNotIn("no_proxy", config.proxies)
save(os.path.join(tmp_dir, CONAN_CONF), "[proxies]\nno_proxy=localhost")
config = ConanClientConfigParser(os.path.join(tmp_dir, CONAN_CONF))
self.assertEqual(config.proxies["no_proxy"], "localhost")
|
vishdha/erpnext
|
erpnext/setup/doctype/sales_partner/sales_partner.py
|
Python
|
gpl-3.0
| 1,584
| 0.024621
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cstr, f
|
ilter_strip_join
from frappe.website.website_generator import WebsiteGenerator
from frappe.contacts.address_and_contact import load_address_and_contact
class SalesPartner(WebsiteGenerator):
website = frappe._dict(
page_title_field = "partner_name",
condition_field = "show_in_website",
template = "templates/generators/sales_partner.html"
)
def onload(self):
"""Load address and contacts in `__onload`"""
load_address_and_contact(self, "sales_partner")
|
def autoname(self):
self.name = self.partner_name
def validate(self):
if not self.route:
self.route = "partners/" + self.scrub(self.partner_name)
super(SalesPartner, self).validate()
if self.partner_website and not self.partner_website.startswith("http"):
self.partner_website = "http://" + self.partner_website
def get_context(self, context):
address = frappe.db.get_value("Address",
{"sales_partner": self.name, "is_primary_address": 1},
"*", as_dict=True)
if address:
city_state = ", ".join(filter(None, [address.city, address.state]))
address_rows = [address.address_line1, address.address_line2,
city_state, address.pincode, address.country]
context.update({
"email": address.email_id,
"partner_address": filter_strip_join(address_rows, "\n<br>"),
"phone": filter_strip_join(cstr(address.phone).split(","), "\n<br>")
})
return context
|
mame82/P4wnP1
|
hidtools/hidsrv9.py
|
Python
|
gpl-3.0
| 10,097
| 0.029712
|
#!/usr/bin/python
# This file is part of P4wnP1.
#
# Copyright (c) 2017, Marcus Mengs.
#
# P4wnP1 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# P4wnP1 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with P4wnP1. If not, see <http://www.gnu.org/licenses/>.
# Works with SendHID6.ps1
import sys
import struct
import Queue
import subprocess
import thread
import signal
from select import select
import time
chunks = lambda A, chunksize=60: [A[i:i+chunksize] for i in range(0, len(A), chunksize)]
# single packet for a data stream to send
# 0: 1 Byte src
# 1: 1 Byte dst
# 2: 1 Byte snd
# 3: 1 Byte rcv
# 4-63 60 Bytes Payload
# client dst
# 1 stdin
# 2 stdout
# 3 stderr
# reassemable received and enqueue report fragments into full streams (separated by dst/src)
def fragment_rcvd(qin, fragemnt_assembler, src=0, dst=0, data=""):
stream_id = (src, dst)
# if src == dst == 0, ignore (heartbeat)
if (src != 0 or dst !=0):
# check if stream already present
if fragment_assembler.has_key(stream_id):
# check if closing fragment (snd length = 0)
if (len(data) == 0):
# end of stream - add to input queue
stream = [src, dst, fragment_assembler[stream_id][2]]
qin.put(stream)
# delete from fragment_assembler
del fragment_assembler[stream_id]
else:
# append data to stream
fragment_assembler[stream_id][2] += data
#print repr(fragment_assembler[stream_id][2])
else:
# start stream, if not existing
data_arr = [src, dst, data]
fragment_assembler[stream_id] = data_arr
def send_datastream(qout, src=1, dst=1, data=""):
# split data into chunks fitting into packet payload (60 bytes)
chnks = chunks(data)
for chunk in chnks:
data_arr = [src, dst, chunk]
qout.put(data_arr)
# append empty packet to close stream
qout.put([src, dst, ""])
def send_packet(f, src=1, dst=1, data="", rcv=0):
snd = len(data)
#print "Send size: " + str(snd)
packet = struct.pack('!BBBB60s', src, dst, snd, rcv, data)
#print packet.encode("hex")
f.write(packet)
def read_packet(f):
hidin = f.read(0x40)
#print "Input received (" + str(len(hidin)) + " bytes):"
#print hidin.encode("hex")
data = struct.unpack('!BBBB60s', hidin)
src = data[0]
dst = data[1]
snd = data[2]
rcv = data[3]
# reduce msg to real size
msg = data[4][0:snd]
return [src, dst, snd, rcv, msg]
def process_input(qin, subproc):
# HID in loop, should ho to thread
# check if input queue contains data
while True:
if not qin.empty():
input = qin.get()
src=input[0]
dst=input[1]
stream=input[2]
# process received input
# stdin (redirect to bash)
if dst == 1:
command=stream
if command.upper() == "RESET_BASH":
# send sigint to bash
print "Restarting bash process"
reset_bash(subproc)
else:
print "running command '" + command + "'"
run_local_command(command, subproc)
# stdout
elif dst == 2:
print "Data received on stdout"
print stream
pass
# stderr
elif dst == 3:
pass
# getfile
elif dst == 4:
print "Data receiveced on dst=4 (getfile): " + stream
args=stream.split(" ",3)
if (len(args) < 3):
# too few arguments, echo this back with src=2, dst=3 (stderr)
print "To few arguments"
send_datastream(qout, 4, 3, "P4wnP1 received 'getfile' with too few arguments")
# ToDo: files are reassembled here, this code should be moved into a separate method
else:
# check if first word is "getfile" ignore otherwise
if not args[0].strip().lower() == "getfile":
send_datastream(qout, 4, 3, "P4wnP1 received data on dst=4 (getfile) but wrong request format was choosen")
continue
filename = args[1].strip()
varname = args[2].strip()
content = None
# try to open file, send error if not possible
try:
with open(filename, "rb") as f:
content = f.read() # naive approach, reading whole file at once (we split into chunks anyway)
except IOError as e:
# deliver Error to Client errorstream
send_datastream(qout, 4, 3, "Error on getfile: " + e.strerror)
continue
# send header
print "Varname " + str(varname)
send_datastream(qout, 4, 4, "BEGINFILE " + filename + " " + varname)
# send filecontent (sould be chunked into multiple streams, but would need reassembling on layer5)
# note: The client has to read (and recognize) ASCII based header and footer streams, but content could be in binary form
if content == None:
send_datastream(qout, 4, 3, "Error on getfile: No file content read")
else:
#send_datastream(qout, 4, 4, content)
streamchunksize=600
for chunk in chunks(content, streamchunksize):
send_datastream(qout, 4, 4, chunk)
# send footer
send_datastream(qout, 4, 4, "ENDFILE " + filename + " " + varname)
else:
print "Input in input queue:"
print input
def run_local_command(command, bash):
bash = subproc[0]
sin = bash.stdin
sin.write(command + "\n")
sin.flush()
return
def process_bash_output(qout, subproc):
buf = ""
while True:
bash = subproc[0]
outstream = bash.stdout
#print "Reading stdout of bash on " + str(outstream)
# check for output which needs to be delivered from backing bash
try:
r,w,ex = select([outstream], [], [], 0.1)
except ValueError:
# we should land here if the output stream is closed
# because a new bash process was started
pass
if outstream in r:
byte = outstream.read(1)
if byte == "\n":
# full line received from subprocess, send it to HID
# note: the newline char isn't send, as each outputstream is printed in a separate line by the powershell client
# we set src=1 as we receive bash commands on dst=1
# dst = 2 (stdout of client)
send_datastream(qout, 2, 2, buf)
# clear buffer
buf = ""
else:
buf += byte
def process_bash_error(qout, subproc):
buf = ""
while True:
bash = subproc[0]
errstream = bash.stderr
# check for output which needs to be delivered from backing bash stderr
try:
r,w,ex = select([errstream], [], [], 0.1)
except ValueError:
# we should land here if the error stream is closed
# because a new bash process was started
pass
if errstream in r:
byte = errstream.read(1)
if byte == "\n":
# full line received from subprocess, send it to HID
# note: the newline char isn't send
|
, as each outputstream is printed in a separate line by the powershell client
# dst = 3 (stderr of client)
send_datastream(qout, 3, 3, buf)
# clear buffer
buf = ""
else:
buf += byte
# As we don't pipe CTRL+C intterupt from client through
#
|
HID data stream, there has to be another option to reset the bash process if it stalls
# This could easily happen, as we don't support interactive commands, waiting for input
# (this non-interactive shell restriction should be a known hurdle to every pentester out there)
def reset_bash(subproc):
bash = subproc[0]
bash.stdout.close()
bash.kill()
send_datastream(qout, 3, 3, "Bash process terminated")
bash = subprocess.Popen(["bash"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
subproc[0] = bash
if bash.poll() == None:
send_datastream(qout, 3, 3, "New bash process started")
else:
send_datastream(qout, 3, 3, "Restarting bash failed")
# prepare a stream to answer a getfile request
def stream_from_getfile(filename):
with open(filename,"rb") as f:
content = f.read()
return content
# main code
qout = Queue.Queue()
qin = Queue.Queue()
fragment_assembler = {}
bash = subprocess.Popen(["bash"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stder
|
ocadotechnology/django-tastypie
|
tests/core/tests/fields.py
|
Python
|
bsd-3-clause
| 56,800
| 0.001268
|
import datetime
from dateutil.tz import tzoffset
from decimal import Decimal
from django.db import models
from django.contrib.auth.models import User
from django.test import TestCase
from django.http import HttpRequest
from tastypie.bundle import Bundle
from tastypie.exceptions import ApiFieldError, NotFound
from tastypie.fields import NOT_PROVIDED, ApiField, BooleanField, CharField,\
DateField, DateTimeField, DecimalField, DictField, FileField, FloatField,\
IntegerField, ListField, TimeField, ToOneField, ToManyField
from tastypie.resources import ModelResource
from tastypie.utils import aware_datetime
from core.models import Note, Subject, MediaBit
from core.tests.mocks import MockRequest
class ApiFieldTestCase(TestCase):
fixtures = ['note_testdata.json']
def test_init(self):
field_1 = ApiField()
self.assertEqual(field_1.instance_name, None)
self.assertEqual(field_1.attribute, None)
self.assertEqual(field_1._default, NOT_PROVIDED)
self.assertEqual(field_1.null, False)
self.assertEqual(field_1.help_text, '')
self.assertEqual(field_1.use_in, 'all')
field_2 = ApiField(attribute='foo', default=True, null=True, readonly=True, help_text='Foo.', use_in="foo")
self.assertEqual(field_2.instance_name, None)
self.assertEqual(field_2.attribute, 'foo')
self.assertEqual(field_2._default, True)
self.assertEqual(field_2.null, True)
self.assertEqual(field_2.readonly, True)
self.assertEqual(field_2.help_text, 'F
|
oo.')
self.assertEqual(field_1.use_in, 'all')
field_3 = ApiField(use_in="list")
self.assertEqual(field_3.use_in, 'list')
field_4 = ApiField(use_in="detail")
self.assertEqual(field_4.use_in, 'detail')
def use_
|
in_callable(x):
return True
field_5 = ApiField(use_in=use_in_callable)
self.assertTrue(field_5.use_in is use_in_callable)
def test_dehydrated_type(self):
field_1 = ApiField()
self.assertEqual(field_1.dehydrated_type, 'string')
def test_has_default(self):
field_1 = ApiField()
self.assertEqual(field_1.has_default(), False)
field_2 = ApiField(default=True)
self.assertEqual(field_2.has_default(), True)
def test_default(self):
field_1 = ApiField()
self.assertEqual(isinstance(field_1.default, NOT_PROVIDED), True)
field_2 = ApiField(default=True)
self.assertEqual(field_2.default, True)
def test_dehydrate(self):
note = Note.objects.get(pk=1)
bundle = Bundle(obj=note)
# With no attribute or default, we should get ``None``.
field_1 = ApiField()
self.assertEqual(field_1.dehydrate(bundle), None)
# Still no attribute, so we should pick up the default
field_2 = ApiField(default=True)
self.assertEqual(field_2.dehydrate(bundle), True)
# Wrong attribute should yield default.
field_3 = ApiField(attribute='foo', default=True)
self.assertEqual(field_3.dehydrate(bundle), True)
# Wrong attribute should yield null.
field_4 = ApiField(attribute='foo', null=True)
self.assertEqual(field_4.dehydrate(bundle), None)
# Correct attribute.
field_5 = ApiField(attribute='title', default=True)
self.assertEqual(field_5.dehydrate(bundle), u'First Post!')
# Correct callable attribute.
field_6 = ApiField(attribute='what_time_is_it', default=True)
self.assertEqual(field_6.dehydrate(bundle), aware_datetime(2010, 4, 1, 0, 48))
def test_convert(self):
field_1 = ApiField()
self.assertEqual(field_1.convert('foo'), 'foo')
self.assertEqual(field_1.convert(True), True)
def test_hydrate(self):
note = Note.objects.get(pk=1)
bundle = Bundle(obj=note)
# With no value, default or nullable, we should get an ``ApiFieldError``.
field_1 = ApiField()
field_1.instance_name = 'api'
self.assertRaises(ApiFieldError, field_1.hydrate, bundle)
# The default.
field_2 = ApiField(default='foo')
field_2.instance_name = 'api'
self.assertEqual(field_2.hydrate(bundle), 'foo')
# The callable default.
def foo():
return 'bar'
field_3 = ApiField(default=foo)
field_3.instance_name = 'api'
self.assertEqual(field_3.hydrate(bundle), 'bar')
# The nullable case.
field_4 = ApiField(null=True)
field_4.instance_name = 'api'
self.assertEqual(field_4.hydrate(bundle), None)
# The readonly case.
field_5 = ApiField(readonly=True)
field_5.instance_name = 'api'
bundle.data['api'] = 'abcdef'
self.assertEqual(field_5.hydrate(bundle), None)
# A real, live attribute!
field_6 = ApiField(attribute='title')
field_6.instance_name = 'api'
bundle.data['api'] = note.title
self.assertEqual(field_6.hydrate(bundle), u'First Post!')
# Make sure it uses attribute when there's no data
field_7 = ApiField(attribute='title')
field_7.instance_name = 'notinbundle'
self.assertEqual(field_7.hydrate(bundle), u'First Post!')
# Make sure it falls back to instance name if there is no attribute
field_8 = ApiField()
field_8.instance_name = 'title'
self.assertEqual(field_8.hydrate(bundle), u'First Post!')
# Attribute & null regression test.
# First, simulate data missing from the bundle & ``null=True``.
field_9 = ApiField(attribute='notinbundle', null=True)
field_9.instance_name = 'notinbundle'
self.assertEqual(field_9.hydrate(bundle), None)
# The do something in the bundle also with ``null=True``.
field_10 = ApiField(attribute='title', null=True)
field_10.instance_name = 'title'
self.assertEqual(field_10.hydrate(bundle), u'First Post!')
# The blank case.
field_11 = ApiField(attribute='notinbundle', blank=True)
field_11.instance_name = 'notinbundle'
self.assertEqual(field_11.hydrate(bundle), None)
bundle.data['title'] = note.title
field_12 = ApiField(attribute='title', blank=True)
field_12.instance_name = 'title'
self.assertEqual(field_12.hydrate(bundle), u'First Post!')
class CharFieldTestCase(TestCase):
fixtures = ['note_testdata.json']
def test_init(self):
field_1 = CharField()
self.assertEqual(field_1.help_text, 'Unicode string data. Ex: "Hello World"')
field_2 = CharField(help_text="Custom.")
self.assertEqual(field_2.help_text, 'Custom.')
def test_dehydrated_type(self):
field_1 = CharField()
self.assertEqual(field_1.dehydrated_type, 'string')
def test_dehydrate(self):
note = Note.objects.get(pk=1)
bundle = Bundle(obj=note)
field_1 = CharField(attribute='title', default=True)
self.assertEqual(field_1.dehydrate(bundle), u'First Post!')
field_2 = CharField(default=20)
self.assertEqual(field_2.dehydrate(bundle), u'20')
class FileFieldTestCase(TestCase):
fixtures = ['note_testdata.json']
def test_init(self):
field_1 = FileField()
self.assertEqual(field_1.help_text, 'A file URL as a string. Ex: "http://media.example.com/media/photos/my_photo.jpg"')
field_2 = FileField(help_text="Custom.")
self.assertEqual(field_2.help_text, 'Custom.')
def test_dehydrated_type(self):
field_1 = FileField()
self.assertEqual(field_1.dehydrated_type, 'string')
def test_dehydrate(self):
bit = MediaBit.objects.get(pk=1)
bundle = Bundle(obj=bit)
field_1 = FileField(attribute='image', default=True)
self.assertEqual(field_1.dehydrate(bundle), u'http://localhost:8080/media/lulz/catz.gif')
field_2 = FileField(default='http://media.example.com/img/default_avatar.jpg')
self.assertEqual(field_2.dehydrate(bundle), u'http://media.example.com/img/default_avatar.jpg')
bit = MediaBit.objects
|
dschep/django-photomap
|
photomap/migrations/0004_copy_exif_data_to_model.py
|
Python
|
mit
| 1,128
| 0.003546
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.gis.geos import geometry
from PIL import
|
Image
from PIL.ExifTags import TAGS
from ..util import point_from_exif
class Migration(DataMigration):
def forwards(self, orm):
for photo in orm['photomap.Photo'].objects.all():
photo.location = point_from_exif(photo.image.path)
photo.save()
def backwards(self, orm):
|
raise NotImplementedError('Too lazy to write a method to write the'
' coordinates to the EXIF of the files')
models = {
u'photomap.photo': {
'Meta': {'object_name': 'Photo'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True'})
}
}
complete_apps = ['photomap']
symmetrical = True
|
ZhangXFeng/hadoop
|
src/hadoop-mapreduce1-project/src/contrib/hod/hodlib/AllocationManagers/goldAllocationManager.py
|
Python
|
apache-2.0
| 4,244
| 0.013431
|
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
"""Gold Allocation Manager Implementation"""
# -*- python -*-
import sys, httplib
import sha, base64, hmac
import xml.dom.minidom
from hodlib.Common.util import *
class goldAllocationManager:
def __init__(self, cfg, log):
self.__GOLD_SECRET_KEY_FILE = cfg['auth-file']
(self.__goldHost, self.__goldPort) = (cfg['allocation-manager-address'][0],
cfg['allocation-manager-address'][1])
self.cfg = cfg
self.log = log
def getQuote(self, user, project, ignoreErrors=True):
# Get Secret Key from File
secret = ''
try:
secretFile = open(self.__GOLD_SECRET_KEY_FILE)
secret = secretFile.readline()
except Exception, e:
self.log.error("Unable to open file %s" % self.__GOLD_SECRET_KEY_FILE)
self.log.debug(get_exception_string())
return (ignoreErrors or False)
secretFile.close()
secret = secret.rstrip()
# construct the SSRMAP request body
body = '<Body><Request action="Quote" actor="hod"><Object>Job</Object><Data><Job><ProjectId>%s</ProjectId><UserId>%s</UserId><WallDuration>10</WallDuration></Job></Data></Request></Body>' % (project, user)
# compute digest
message = sha.new()
message.update(body)
digest = message.digest()
digestStr = base64.b64encode(digest)
# compute signature
message = hmac.new(secret, digest, sha)
signatureStr = base64.b64encode(message.digest())
# construct the SSSRMAP Message
sssrmapRequest = '<?xml version="1.0" encoding="UTF-8"?>\
<Envelope>%s<Signature><DigestValue>%s</DigestValue><SignatureValue>%s</SignatureValue><SecurityToken type="Symmetric"></SecurityToken></Signature></Envelope>' % (body, digestStr, signatureStr)
self.log.info('sssrmapRequest: %s' % sssrmapRequest)
try:
# post message to GOLD server
webservice = httplib.HTTP(self.__goldHost, self.__goldPort)
webservice.putrequest("POST", "/SSSRMAP3 HTTP/1.1")
webservice.putheader("Content-Type", "text/xml; charset=\"utf-8\"")
webservice.putheader("Transfer-Encoding", "chunked")
webservice.endheaders()
webservice.send("%X" % len(sssrmapRequest) + "\r\n" + sssrmapRequest + '0\r\n')
# handle the response
statusCode, statusmessage, header = webservice.getreply()
responseStr = webservice.getfile().read()
self.log.debug("httpStatusCode: %d" % statusCode)
self.log.info('responseStr: %s' % responseStr)
# parse XML response
if (statusCode == 200):
responseArr = responseStr.split("\n")
responseBody = responseArr[2]
try:
doc = xml.dom.minidom.parseString(responseBody)
responseVal = doc.getElementsByTagName("Value")[0].firstChild.nodeValue
self.log.info("responseVal: %s" % responseVal)
if (responseVal == 'Success'):
|
return True
else:
return False
except Exception, e:
self.log.error("Unable to parse GOLD responseBody XML \"(%s)\" to get responseVal" % (responseBody))
self.log.debug(get_exception_string())
|
return (ignoreErrors or False)
else:
self.log.error("Invalid HTTP statusCode %d" % statusCode)
except Exception, e:
self.log.error("Unable to POST message to GOLD server (%s, %d)" %
(self.__goldHost, self.__goldPort))
self.log.debug(get_exception_string())
return (ignoreErrors or False)
return True
|
noslenfa/tdjangorest
|
uw/lib/python2.7/site-packages/IPython/terminal/console/tests/test_console.py
|
Python
|
apache-2.0
| 1,726
| 0.006952
|
"""Tests for two-process terminal frontend
Currently only has the most simp
|
le test possible, starting a console and running
a single command.
Authors:
* Min RK
"""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import sys
import time
import nose.tools as nt
from nose import SkipTest
import IPython.testing.tools as tt
from IPython.testing im
|
port decorators as dec
from IPython.utils import py3compat
#-----------------------------------------------------------------------------
# Tests
#-----------------------------------------------------------------------------
@dec.skip_win32
def test_console_starts():
"""test that `ipython console` starts a terminal"""
from IPython.external import pexpect
args = ['console', '--colors=NoColor']
# FIXME: remove workaround for 2.6 support
if sys.version_info[:2] > (2,6):
args = ['-m', 'IPython'] + args
cmd = sys.executable
else:
cmd = 'ipython'
try:
p = pexpect.spawn(cmd, args=args)
except IOError:
raise SkipTest("Couldn't find command %s" % cmd)
# timeout after one minute
t = 60
idx = p.expect([r'In \[\d+\]', pexpect.EOF], timeout=t)
p.sendline('5')
idx = p.expect([r'Out\[\d+\]: 5', pexpect.EOF], timeout=t)
idx = p.expect([r'In \[\d+\]', pexpect.EOF], timeout=t)
# send ctrl-D;ctrl-D to exit
p.sendeof()
p.sendeof()
p.expect([pexpect.EOF, pexpect.TIMEOUT], timeout=t)
if p.isalive():
p.terminate()
def test_help_output():
"""ipython console --help-all works"""
tt.help_all_output_test('console')
|
Royal-Society-of-New-Zealand/NZ-ORCID-Hub
|
orcid_api_v3/models/work_title_v30_rc2.py
|
Python
|
mit
| 4,930
| 0.000203
|
# coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.subtitle_v30_rc2 import SubtitleV30Rc2 # noqa: F401,E501
from orcid_api_v3.models.title_v30_rc2 import TitleV30Rc2 # noqa: F401,E501
from orcid_api_v3.models.translated_title_v30_rc2 import TranslatedTitleV30Rc2 # noqa: F401,E501
class WorkTitleV30Rc2(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'title': 'TitleV30Rc2',
'subtitle': 'SubtitleV30Rc2',
'translated_title': 'TranslatedTitleV30Rc2'
}
attribute_map = {
'title': 'title',
'subtitle': 'subtitle',
'translated_title': 'translated-title'
}
def __init__(self, title=None, subtitle=None, translated_title=None): # noqa: E501
"""WorkTitleV30Rc2 - a model defined in Swagger""" # noqa: E501
self._title = None
self._subtitle = None
self._translated_title = None
self.discriminator = None
if title is not None:
self.title = title
if subtitle is not None:
self.subtitle = subtitle
if translated_title is not None:
self.translated_title = translated_title
@property
def title(self):
"""Gets the title of this WorkTitleV30Rc2. # noqa: E501
:return: The title of this WorkTitleV30Rc2. # noqa: E501
:rtype: TitleV30Rc2
"""
return self._title
@title.setter
def title(self, title):
"""Sets the title of this WorkTitleV30Rc2.
:param title: The title of this WorkTitleV30Rc2. # noqa: E501
:type: TitleV30Rc2
"""
self._title = title
@property
def subtitle(self):
"""Gets the subtitle of this WorkTitleV30Rc2. # noqa: E501
:return: The subtitle of this WorkTitleV30Rc2. # noqa: E501
:rtype: SubtitleV30Rc2
"""
return self._subtitle
@subtitle.setter
def subtitle(self, subtitle):
"""Sets the subtitle of this WorkTitleV30Rc2.
:param subtitle: The subtitle of this WorkTitleV30Rc2. # noqa: E501
:type: SubtitleV30Rc2
"""
self._subtitle = subtitle
@property
def translated_title(self):
"""Gets the translated_title of this WorkTitleV30Rc2. # noqa: E501
:return: The translated_title of this WorkTitleV30Rc2. # noqa: E501
:rtype: TranslatedTitleV30Rc2
"""
return self._translated_title
@translated_title.setter
def translated_title(self, translated_title):
"""Sets the translated_title of this WorkTitleV30Rc2.
:param translated_title: The translated_title of this WorkTitleV30Rc2. # noqa: E501
:type: TranslatedTitleV30Rc2
"""
self._translated_title = translated_title
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(WorkTitleV30Rc2, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
|
return self.to_str()
def __eq__(self, ot
|
her):
"""Returns true if both objects are equal"""
if not isinstance(other, WorkTitleV30Rc2):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
avanzosc/odoo-addons
|
event_name_code_year_id/tests/test_event_name_code_year_id.py
|
Python
|
agpl-3.0
| 1,353
| 0
|
# Copyright
|
2021 Alfredo de la Fuente - Avanzosc S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.tests import common
from odoo.tests import tagged
@tagged("post_install", "-at_install")
class TestNameCodeYearId(common.SavepointCase):
@classmethod
def s
|
etUpClass(cls):
super(TestNameCodeYearId, cls).setUpClass()
cls.event_obj = cls.env['event.event']
cls.skill_type_lang = cls.env.ref('hr_skills.hr_skill_type_lang')
cls.skill_spanish = cls.env.ref('hr_skills.hr_skill_spanish')
cls.skill_filipino = cls.env.ref('hr_skills.hr_skill_filipino')
cls.skill_type_lang.skill_language = True
cls.skill_spanish.code = 'SP'
cls.skill_filipino.code = 'FI'
def test_event_name_code_year_id(self):
vals = {'name': 'User for event lang level',
'date_begin': '2025-01-06 08:00:00',
'date_end': '2025-01-15 10:00:00',
'lang_id': self.skill_spanish.id}
event = self.event_obj.create(vals)
name = 'SP-{}-2025'.format(event.id)
self.assertEqual(event.name, name)
vals = {'date_begin': '2024-01-06 08:00:00',
'lang_id': self.skill_filipino.id}
event.write(vals)
name = 'FI-{}-2024'.format(event.id)
self.assertEqual(event.name, name)
|
gradiuscypher/internet_illithid
|
bag_of_holding/libs/user_manager.py
|
Python
|
mit
| 2,633
| 0.001899
|
import traceback
from sqlalchemy import Column, Boolean, Integer, String, ForeignKey, create_engine
from sqlalchemy.orm import relationship, sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
engine = create_engine('sqlite:///bag_of_holding.db')
Base.metadata.bind = engine
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
session = Session()
class UserManager:
def build_db(self):
Base.metadata.create_all(engine)
def add_user(self, user_name):
"""
Create a new user
:return:
"""
def remove_user(self, user_name):
"""
Remove a current user
:param user_name:
:return:
"""
def add_user_profile(self, user_name, service_name):
"""
Add a service profile to a user_name
:param user_name:
:param service_name:
:return:
"""
def remove_user_profile(self, user_name, service_id):
"""
remove a service profile from a user_name
:param user_name:
:param service_id:
:return:
"""
def add_profile_trait(self, user_name, service_id, trait_name, trait_value):
"""
Add a trait to a service profile
:param user_name:
:param service_id:
:param trait_na
|
me
:param trait_value
:return:
"""
def remove_profile_trait(self, user_name, service_id, trait_id):
"""
Add a trait to a service profile
:param user_name:
:param service_id:
:param trait_id
:return:
"""
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Col
|
umn(String)
profiles = relationship('UserProfile')
def __repr__(self):
return '<User(id={})>'.format(self.id)
class UserProfile(Base):
__tablename__ = 'userprofiles'
id = Column(Integer, primary_key=True)
service_name = Column(String)
service_url = Column(String)
profile_id = Column(String)
user_id = Column(Integer, ForeignKey('user.id'))
def __repr__(self):
return '<UserProfile(id={}, service_name={}, service_url={}, profile_id={})>'\
.format(self.id, self.service_name, self.service_url, self.profile_id)
class ProfileTrait(Base):
__tablename__ = 'profiletraits'
id = Column(Integer, primary_key=True)
name = Column(String)
value = Column(String)
def __repr__(self):
return '<ProfileTrait(id={}, name={}, value={})>'.format(self.id, self.name, self.value)
|
spradeepv/dive-into-python
|
hackerrank/domain/algorithms/implementation/caesar_cipher/solution.py
|
Python
|
mit
| 1,399
| 0.002144
|
def get_encrypted_char(k, ascii_val, ascii_list, limit):
diff = k % 26
rotate_val = ascii_val + diff
encrypted_char = ''
if rotate_val not in ascii_list:
rotate_val -= limit
for i in ascii_list:
rotate_val -= 1
if rotate_val == 0:
encrypted_char += chr(i)
else:
encrypted_char += chr(rotate_val)
return encrypted_char
def encrypt(s, k):
"""
a-z : 97-122
A-Z : 65-90
:param s: string to be encrypted
:param k: Integer, by which each character is rotated
:return: Encrypted string
"""
lower_ascii_list = [i for i in range(97, 123)]
upper_ascii_list = [i
|
for i in range(65, 91)]
lower_case_limit = 122
upper_case_limit = 90
encrypted_string = str()
for c in s:
ascii_val = ord(c)
if ascii_val in lower_ascii_list or ascii_val in upper_ascii_list:
|
limit = lower_case_limit
ascii_list = lower_ascii_list
if ascii_val in upper_ascii_list:
limit = upper_case_limit
ascii_list = upper_ascii_list
encrypted_string += get_encrypted_char(k, ascii_val, ascii_list,
limit)
else:
encrypted_string += c
return encrypted_string
l = raw_input()
s = raw_input()
k = int(raw_input())
print encrypt(s, k)
|
CompassionCH/l10n-switzerland
|
l10n_ch_fds_upload_sepa/models/account_payment_order.py
|
Python
|
agpl-3.0
| 1,096
| 0
|
# -*- coding: utf-8 -*-
# © 2015 Compassion CH (Nicolas Tran)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, models
class AccountPaymentOrder(m
|
odels.Model):
_inherit = 'account.payment.order'
@api.multi
def open2generated(self):
"""
Replace action to propose upload SEPA file to FDS.
:return: window action
"""
action = super(AccountPaymentOrder, self).open2generated()
if self.payment_method
|
_id.code == 'sepa_credit_transfer':
upload_obj = self.env['payment.order.upload.sepa.wizard']
attachment_id = action['res_id']
upload_wizard = upload_obj.create({
'attachment_id': attachment_id,
'payment_order_id': self.id,
})
del action['view_id']
action.update({
'res_model': upload_obj._name,
'res_id': upload_wizard.id,
'flags': {'initial_mode': 'edit'},
'attachment_id': attachment_id
})
return action
|
sboily/flask-blog
|
flask_blog/plugins/posts/views.py
|
Python
|
gpl-3.0
| 1,743
| 0.003442
|
from flask import Blueprint, render_template, redirect, url_for
from flask_blog.extensions import mongo
fr
|
om flask_blog.helpers import convertToObj
from flask.ext.login import login_required, current_user
from forms import PostsForm
posts = Blueprint('posts', __name__, template_folder='templates',
static_folder='static', static_url_path='/%s' % __name__)
@posts.rou
|
te("/posts")
@login_required
def list():
posts = mongo.db.posts.find()
return render_template('posts_list.html', posts=posts)
@posts.route("/posts/add", methods=['GET', 'POST'])
@login_required
def add():
form = PostsForm()
if form.validate_on_submit():
mongo.db.posts.insert(_add_username(form.data))
return redirect(url_for("posts.list"))
return render_template('post_add.html', form=form)
@posts.route("/posts/get/<ObjectId:id>")
def get(id):
post = mongo.db.posts.find_one_or_404(id)
return render_template('post_get.html', post=post)
@posts.route("/posts/edit/<ObjectId:id>", methods=['GET', 'POST'])
@login_required
def edit(id):
post = mongo.db.posts.find_one_or_404(id)
form = PostsForm(obj=convertToObj(**post))
if form.validate_on_submit():
form.populate_obj(convertToObj(**post))
mongo.db.posts.update({'_id': id},
{'$set': form.data}
)
return redirect(url_for("posts.list"))
return render_template('post_edit.html', form=form, post=post)
@posts.route("/posts/delete/<ObjectId:id>")
@login_required
def delete(id):
mongo.db.posts.remove(id)
return redirect(url_for("posts.list"))
def _add_username(form):
post = form
post.update({"author": current_user.username})
return post
|
esparta/logilab_common3
|
test/unittest_registry.py
|
Python
|
gpl-2.0
| 6,824
| 0.001465
|
# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:[email protected]
#
# This file is part of Logilab-Common.
#
# Logilab-Common is free software: you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option)
# any later version.
#
# Logilab-Common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Logilab-Common. If not, see <http://www.gnu.org/licenses/>.
"""unit tests for selectors mechanism"""
from __future__ import with_statement
import gc
import logging
import os.path as osp
import sys
from operator import eq, lt, le, gt
from contextlib
|
import contextmanager
logging.basicConfig(level=logging.ERROR)
from logilab.common.testlib
|
import TestCase, unittest_main
from logilab.common.registry import *
class _1_(Predicate):
def __call__(self, *args, **kwargs):
return 1
class _0_(Predicate):
def __call__(self, *args, **kwargs):
return 0
def _2_(*args, **kwargs):
return 2
class SelectorsTC(TestCase):
def test_basic_and(self):
selector = _1_() & _1_()
self.assertEqual(selector(None), 2)
selector = _1_() & _0_()
self.assertEqual(selector(None), 0)
selector = _0_() & _1_()
self.assertEqual(selector(None), 0)
def test_basic_or(self):
selector = _1_() | _1_()
self.assertEqual(selector(None), 1)
selector = _1_() | _0_()
self.assertEqual(selector(None), 1)
selector = _0_() | _1_()
self.assertEqual(selector(None), 1)
selector = _0_() | _0_()
self.assertEqual(selector(None), 0)
def test_selector_and_function(self):
selector = _1_() & _2_
self.assertEqual(selector(None), 3)
selector = _2_ & _1_()
self.assertEqual(selector(None), 3)
def test_three_and(self):
selector = _1_() & _1_() & _1_()
self.assertEqual(selector(None), 3)
selector = _1_() & _0_() & _1_()
self.assertEqual(selector(None), 0)
selector = _0_() & _1_() & _1_()
self.assertEqual(selector(None), 0)
def test_three_or(self):
selector = _1_() | _1_() | _1_()
self.assertEqual(selector(None), 1)
selector = _1_() | _0_() | _1_()
self.assertEqual(selector(None), 1)
selector = _0_() | _1_() | _1_()
self.assertEqual(selector(None), 1)
selector = _0_() | _0_() | _0_()
self.assertEqual(selector(None), 0)
def test_composition(self):
selector = (_1_() & _1_()) & (_1_() & _1_())
self.assertTrue(isinstance(selector, AndPredicate))
self.assertEqual(len(selector.selectors), 4)
self.assertEqual(selector(None), 4)
selector = (_1_() & _0_()) | (_1_() & _1_())
self.assertTrue(isinstance(selector, OrPredicate))
self.assertEqual(len(selector.selectors), 2)
self.assertEqual(selector(None), 2)
def test_search_selectors(self):
sel = _1_()
self.assertIs(sel.search_selector(_1_), sel)
csel = AndPredicate(sel, Predicate())
self.assertIs(csel.search_selector(_1_), sel)
csel = AndPredicate(Predicate(), sel)
self.assertIs(csel.search_selector(_1_), sel)
self.assertIs(csel.search_selector((AndPredicate, OrPredicate)), csel)
self.assertIs(csel.search_selector((OrPredicate, AndPredicate)), csel)
self.assertIs(csel.search_selector((_1_, _0_)), sel)
self.assertIs(csel.search_selector((_0_, _1_)), sel)
def test_inplace_and(self):
selector = _1_()
selector &= _1_()
selector &= _1_()
self.assertEqual(selector(None), 3)
selector = _1_()
selector &= _0_()
selector &= _1_()
self.assertEqual(selector(None), 0)
selector = _0_()
selector &= _1_()
selector &= _1_()
self.assertEqual(selector(None), 0)
selector = _0_()
selector &= _0_()
selector &= _0_()
self.assertEqual(selector(None), 0)
def test_inplace_or(self):
selector = _1_()
selector |= _1_()
selector |= _1_()
self.assertEqual(selector(None), 1)
selector = _1_()
selector |= _0_()
selector |= _1_()
self.assertEqual(selector(None), 1)
selector = _0_()
selector |= _1_()
selector |= _1_()
self.assertEqual(selector(None), 1)
selector = _0_()
selector |= _0_()
selector |= _0_()
self.assertEqual(selector(None), 0)
def test_wrap_selectors(self):
class _temp_(Predicate):
def __call__(self, *args, **kwargs):
return 0
del _temp_ # test weakref
s1 = _1_() & _1_()
s2 = _1_() & _0_()
s3 = _0_() & _1_()
gc.collect()
self.count = 0
def decorate(f, self=self):
def wrapper(*args, **kwargs):
self.count += 1
return f(*args, **kwargs)
return wrapper
wrap_predicates(decorate)
self.assertEqual(s1(None), 2)
self.assertEqual(s2(None), 0)
self.assertEqual(s3(None), 0)
self.assertEqual(self.count, 8)
@contextmanager
def prepended_syspath(path):
sys.path.insert(0, path)
yield
sys.path = sys.path[1:]
class RegistryStoreTC(TestCase):
def test_autoload(self):
store = RegistryStore()
store.setdefault('zereg')
with prepended_syspath(self.datadir):
store.register_objects([self.datapath('regobjects.py'),
self.datapath('regobjects2.py')])
self.assertEqual(['zereg'], store.keys())
self.assertEqual(set(('appobject1', 'appobject2', 'appobject3')),
set(store['zereg']))
class RegistrableInstanceTC(TestCase):
def test_instance_modulename(self):
# no inheritance
obj = RegistrableInstance()
self.assertEqual(obj.__module__, 'unittest_registry')
# with inheritance from another python file
with prepended_syspath(self.datadir):
from regobjects2 import instance, MyRegistrableInstance
instance2 = MyRegistrableInstance()
self.assertEqual(instance.__module__, 'regobjects2')
self.assertEqual(instance2.__module__, 'unittest_registry')
if __name__ == '__main__':
unittest_main()
|
JohnGiorgi/SRA-RNAseq-Workflow
|
SRA_RNAseq_Workflow/helpers.py
|
Python
|
gpl-3.0
| 366
| 0.030055
|
# /usr/bin/env python
|
import os
# Context manager
class cd:
"""
Context manager for safely changing the current working directory
"""
def __init__(self, newPath):
self.newPath = os.path.expanduser(newPath)
def __enter__(self):
self.savedPath = os.getcwd()
os.chdir(self.newPath)
|
def __exit__(self, etype, value, traceback):
os.chdir(self.savedPath)
|
Hemisphere-Project/Telemir-DatabitMe
|
Telemir-EEG/pyacq/pyacq/core/devices/emotiv.py
|
Python
|
gpl-2.0
| 12,623
| 0.01323
|
# -*- coding: utf-8 -*-
"""
Emotiv acquisition :
Reverse engineering and original crack code written by
Cody Brocious (http://github.com/daeken)
Kyle Machulis (http://github.com/qdot)
Many thanks for their contribution.
Need python-crypto.
"""
import multiprocessing as mp
import numpy as np
import msgpack
import time
from collections import OrderedDict
from .base import DeviceBase
import platform
WINDOWS = (platform.system() == "Windows")
try:
import pywinusb.hid as hid
except:
pass
import os
from subprocess import check_output
from Crypto.Cipher import AES
from Crypto import Random
import Queue
tasks = Queue.Queue()
_channel_names = [ 'F3', 'F4', 'P7', 'FC6', 'F7', 'F8','T7','P8','FC5','AF4','T8','O2','O1','AF3']
sensorBits = {
'F3': [10, 11, 12, 13, 14, 15, 0, 1, 2, 3, 4, 5, 6, 7],
'FC5': [28, 29, 30, 31, 16, 17, 18, 19, 20, 21, 22, 23, 8, 9],
'AF3': [46, 47, 32, 33, 34, 35, 36, 37, 38, 39, 24, 25, 26, 27],
'F7': [48, 49, 50, 51, 52, 53, 54, 55, 40, 41, 42, 43, 44, 45],
'T7': [66, 67, 68, 69, 70, 71, 56, 57, 58, 59, 60, 61, 62, 63],
'P7': [84, 85, 86, 87, 72, 73, 74, 75, 76, 77, 78, 79, 64, 65],
'O1': [102, 103, 88, 89, 90, 91, 92, 93, 94, 95, 80, 81, 82, 83],
'O2': [140, 141, 142, 143, 128, 129, 130, 131, 132, 133, 134, 135, 120, 121],
'P8': [158, 159, 144, 145, 146, 147, 148, 149, 150, 151, 136, 137, 138, 139],
'T8': [160, 161, 162, 163, 164, 165, 166, 167, 152, 153, 154, 155, 156, 157],
'F8': [178, 179, 180, 181, 182, 183, 168, 169, 170, 171, 172, 173, 174, 175],
'AF4': [196, 197, 198, 199, 184, 185, 186, 187, 188, 189, 190, 191, 176, 177],
'FC6': [214, 215, 200, 201, 202, 203, 204, 205, 206, 207, 192, 193, 194, 195],
'F4': [216, 217, 218, 219, 220, 221, 222, 223, 208, 209, 210, 211, 212, 213]
}
quality_bits = [99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112]
def create_analog_subdevice_param(channel_names):
n = len(channel_names)
d = {
'type' : 'AnalogInput',
'nb_channel' : n,
'params' :{ },
'by_channel_params' : {
'channel_indexes' : range(n),
'channel_names' : channel_names,
}
}
return d
def get_info(device):
info = { }
info['class'] = 'EmotivMultiSignals'
if WINDOWS:
# EMOTIV
info['device_path'] = device.device_path
info['board_name'] = '{} #{}'.format(device.vendor_name, device.serial_number).replace('\n', '').replace('\r', '')
info['serial'] = device.serial_number
info['hid'] = device
else:
info['device_path'] = device
name = device_path.strip('/dev/')
realInputPath = os.path.realpath("/sys/class/hidraw/" + name)
path = '/'.join(realInputPath.split('/')[:-4])
with open(path + "/manufacturer", 'r') as f:
manufacturer = f.readline()
with open(path + "/serial", 'r') as f:
serial = f.readline().strip()
info['board_name'] = '{} #{}'.format(manufacturer, serial).replace('\n', '').replace('\r', '')
info['serial'] = serial
# PYACQ
info['global_params'] = {'buffer_length' : 60.,}
info['subdevices'] = [ ]
info['subdevices'].append(create_analog_subdevice_param(_channel_names))
quality_name = ['Quality {}'.format(n) for n in _channel_names]
info['subdevices'].append(create_analog_subdevice_param(quality_name))
info['subdevices'].append(create_analog_subdevice_param([ 'X','Y']))
return info
def dump(obj):
for attr in dir(obj):
print "obj.%s = %s" % (attr, getattr(obj, attr))
class EmotivMultiSignals(DeviceBase):
def __init__(self, **kargs):
DeviceBase.__init__(self, **kargs)
@classmethod
def get_available_devices(cls):
devices = OrderedDict()
if WINDOWS:
try:
for device in hid.find_all_hid_devices():
print "device : ", device
if (device.product_name == 'Emotiv RAW DATA' or device.product_name == 'EPOC BCI'):
devices['Emotiv '+device.serial_number] = get_info(device)
finally:
pass
else:
serials = { }
for name in os.listdir("/sys/class/hidraw"):
realInputPath = os.path.realpath("/sys/class/hidraw/" + name)
path = '/'.join(realInputPath.split('/')[:-4])
try:
with open(path + "/manufacturer", 'r') as f:
manufacturer = f.readline()
if "emotiv" in manufacturer.lower():
with open(path + "/serial", 'r') as f:
serial = f.readline().strip()
if serial not in serials:
serials[serial] = [ ]
serials[serial].append(name)
except IOError as e:
print "Couldn't open file: %s" % e
for serial, names in serials.items():
device_path = '/dev/'+names[1]
info = get_info(device_path)
devices['Emotiv '+device_path] = info
return devices
def configure(self, buffer_length = 60,
subdevices = None,
):
self.params = {'buffer_length' : buffer_length,
'subdevices' : subdevices,
}
self.__dict__.update(self.params)
self.configured = True
def initialize(self):
devices = EmotivMultiSignals.get_available_devices()
self.device = devices.values()[0]
if self.subdevices is None:
self.subdevices = self.device['subdevices']
self.sampling_rate = 128.
self.packet_size = 1
l = int(self.sampling_rate*self.buffer_length)
self.buffer_length = (l - l%self.packet_size)/self.sampling_rate
self.name = '{}'.format(self.device['board_name'])
self.streams = [ ]
for s, sub in enumerate(self.subdevices):
stream = self.streamhandler.new_AnalogSignalSharedMemStream(name = self.name+str(s) , sampling_rate = self.sampling_rate,
nb_ch
|
annel = sub['nb_channel'], buffer_l
|
ength = self.buffer_length,
packet_size = self.packet_size, dtype = np.float64,
channel_names = sub['by_channel_params']['channel_names'],
channel_indexes = sub['by_channel_params']['channel_indexes'],
)
self.streams.append(stream)
def start(self):
self.stop_flag = mp.Value('i', 0) #flag pultiproc = global
self.process = mp.Process(target = emotiv_mainLoop, args=(self.stop_flag, self.streams, self.device) )
self.process.start()
print 'FakeMultiAnalogChannel started:', self.name
self.running = True
def stop(self):
self.stop_flag.value = 1
self.process.join()
print 'FakeMultiAnalogChannel stopped:', self.name
self.running = False
def close(self):
if WINDOWS:
self.device['hid'].close()
else:
pass
# for ii in self.streams:
# self.streams[ii].stop()
def setupCrypto(serial):
type = 0 #feature[5]
type &= 0xF
type = 0
#I believe type == True is for the Dev headset, I'm not using that. That's the point of this library in the first place I thought.
k = ['\0'] * 16
k[0] = serial[-1]
k[1] = '\0'
k[2] = serial[-2]
if type:
k[3] = 'H'
k[4] = serial[-1]
k[5] = '\0'
k[6] = serial[-2]
k[7] = 'T'
k[8] = serial[-3]
k[9] = '\x10'
k[10] = serial[-4]
k[11] = 'B'
else:
k[3] = 'T'
k[4] = serial[-3]
k[
|
uranusjr/snafu
|
tests/test_versions.py
|
Python
|
isc
| 4,596
| 0
|
import json
import pathlib
import re
import pytest
import snafu.versions
version_paths = list(snafu.versions.VERSIONS_DIR_PATH.iterdir())
version_names = [p.stem for p in version_paths]
@pytest.mark.parametrize('path', version_paths, ids=version_names)
def test_version_definitions(path):
assert path.suffix == '.json', '{} has wrong extension'.format(path)
assert re.match(r'^\d\.\d(?:\-32)?$', path.stem), \
'{} has invalid name'.format(path)
with path.open() as f:
data = json.load(f)
schema = data.pop('type')
possible_types = snafu.versions.InstallerType.__members__
assert schema in possible_types
assert isinstance(data.pop('version_info'), list)
if schema == 'cpython_msi':
for key in ('x86', 'amd64'):
d = data.pop(key)
assert d.pop('url')
assert re.match(r'^[a-f\d]{32}$', d.pop('md5_sum'))
elif schema == 'cpython':
assert data.pop('url')
assert re.match(r'^[a-f\d]{32}$', data.pop('md5_sum'))
assert not data, 'superfulous keys: {}'.format(', '.join(data.keys()))
def test_get_version_cpython_msi():
version = snafu.versions.get_version('3.4', force_32=False)
assert version == snafu.versions.CPythonMSIVersion(
name='3.4',
url='https://www.python.org/ftp/python/3.4.4/python-3.4.4.amd64.msi',
md5_sum='963f67116935447fad73e09cc561c713',
version_info=(3, 4, 4),
)
def test_get_version_cpython_msi_switch():
version = snafu.versions.get_version('3.4', force_32=True)
assert version == snafu.versions.CPythonMSIVersion(
name='3.4',
url='https://www.python.org/ftp/python/3.4.4/python-3.4.4.msi',
md5_sum='e96268f7042d2a3d14f7e23b2535738b',
version_info=(3, 4, 4),
)
def test_get_version_cpython():
version = snafu.versions.get_version('3.5', force_32=False)
assert version == snafu.versions.CPythonVersion(
name='3.5',
url='https://www.python.org/ftp/python/3.5.4/python-3.5.4-amd64.exe',
md5_sum='4276742a4a75a8d07260f13fe956eec4',
version_info=(3, 5, 4),
)
def test_get_version_cpython_switch():
version = snafu.versions.get_version('3.5', force_32=True)
assert version == snafu.versions.CPythonVersion(
name='3.5-32',
url='https://www.python.org/ftp/python/3.5.4/python-3.5.4.exe',
md5_sum='9693575358f41f452d03fd33714f223f',
version_info=(3, 5, 4),
forced_32=True,
)
def test_get_version_not_found():
with pytest.raises(snafu.versions.VersionNotFoundError) as ctx:
snafu.versions.get_version('2.8', force_32=False)
assert str(ctx.value) == '2.8'
@pytest.mark.parametrize('name, force_32, result', [
('3.6', False, 'Python 3.6'),
('3.6', True, 'Python 3.6-32'),
('3.4', False, 'Python 3.4'),
('3.4', True, 'Python 3.4'),
])
def test_str(name, force_32, result):
version = snafu.versions.get_version(name, force_32=force_32)
assert str(version) == result
@pytest.mark.parametrize('name, force_32, cmd', [
('3.6', False, 'python3.exe'),
('3.6', True, 'python3.exe'),
('2.7', False, 'python2.exe'),
('2.7', True, 'python2.exe'),
])
def test_python_major_command(mocker, name, force_32, cmd):
mocker.patch.object(snafu.versions, 'configs', **{
'get_scripts_dir_path.return_value': pathlib.Path(),
})
version = snafu.versions.get_version(name, force_32=force_32)
assert version.python_major_command == pathlib.Path(cmd)
@pytest.mark.parametrize('name, force_32, result', [
('3.6', False, '3.6'),
('3.6', True, '3.6'),
('3.4', False, '3.4'),
('3.4', True, '3.4'),
])
def test_arch_free_name(name, force_32, result):
version = snafu.versions.get_version(name, force_32=force_32)
assert version.arch_free_name == result
@pytest.mark.parametrize('name, force_32, result', [
('3.6', False, {'3.6'}),
('3.6', True, {'3.6', '3.6-32'}),
('3.6-32', False, {'3.6-32'}),
('3.4', False, {'3.4'}),
('3.4', True, {'3.4'}),
])
def test_script_version_names(name, force_32, result):
version = snafu.versions.get_version(name, force_32=force_32)
assert version.script_version_names == result
def test_is_installed(tmpdir, mocker):
mock_metadata = mocker.patch.object(snafu.versio
|
ns, 'metadata', **{
'get_install_path.return_value':
|
pathlib.Path(str(tmpdir)),
})
version = snafu.versions.get_version('3.6', force_32=False)
assert version.is_installed()
mock_metadata.get_install_path.assert_called_once_with('3.6')
|
mozilla/normandy
|
contract-tests/v3_api/support/assertions.py
|
Python
|
mpl-2.0
| 461
| 0
|
import json
from
|
os.path import join, dirname
from jsonschema import validate
SCHEMA_FILE = "normandy-schema.json"
def assert_valid_schema(data):
schema = _load_json_schema()
return validate(data, schema)
def _load_json_schema():
relative_path = join("schemas", SCHEMA_FILE)
absolute_path = join(dirname(__file__), relative_path)
with open(absolute_path) as schema_file:
return json.loa
|
ds(schema_file.read())
|
tensorflow/addons
|
tensorflow_addons/seq2seq/decoder.py
|
Python
|
apache-2.0
| 23,035
| 0.000781
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Base classes and functions for dynamic decoding."""
import abc
import tensorflow as tf
from tensorflow_addons.utils.types import TensorLike
from typeguard import typechecked
from typing import Any, Optional, Tuple, Union
# TODO: Find public API alternatives to these
from tensorflow.python.ops import control_flow_util
class Decoder(metaclass=abc.ABCMeta):
"""An RNN Decoder abstract interface object.
Concepts used by this interface:
- `inputs`: (structure of) tensors and TensorArrays that is passed as input
to the RNN cell composing the decoder, at each time step.
- `state`: (structure of) tensors and TensorArrays that is passed to the
RNN cell instance as the state.
- `finished`: boolean tensor telling whether each sequence in the batch is
finished.
- `training`: boolean whether it should behave in training mode or in
inference mode.
- `outputs`: instance of `tfa.seq2seq.BasicDecoderOutput`. Result of the decoding, at
each time step.
"""
@property
def batch_size(self):
"""The batch size of input values."""
raise NotImplementedError
@property
def output_size(self):
"""A (possibly nested tuple of...) integer[s] or `TensorShape`
object[s]."""
raise NotImplementedError
@property
def output_dtype(self):
"""A (possibly nested tuple of...) dtype[s]."""
raise NotImplementedError
@abc.abstractmethod
def initialize(self, name=None):
"""Called before any decoding iterations.
This methods must compute initial input values and initial state.
Args:
name: Name scope for any created operations.
Returns:
`(finished, initial_inputs, initial_state)`: initial values of
'finished' flags, inputs and state.
"""
raise NotImplementedError
@abc.abstractmethod
def step(self, time, inputs, state, training=None, name=None):
"""Called per step of decoding (but only once for dynamic decoding).
Args:
time: Scalar `int32` tensor. Current step number.
inputs: RNN cell input (possibly nested tuple of) tensor[s] for this
time step.
state: RNN cell state (possibly nested tuple of) tensor[s] from
previous time step.
training: Python boolean. Indicates whether the layer should behave
in training mode or in inference mode. Only relevant
when `dropout` or `recurrent_dropout` is used.
name: Name scope for any created operations.
Returns:
`(outputs, next_state, next_inputs, finished)`: `outputs` is an
object containing the decoder output, `next_state` is a (structure
of) state tensors and TensorArrays, `next_inputs` is the tensor that
should be used as input for the next step, `finished` is a boolean
tensor telling whether the sequence is complete, for each sequence in
the batch.
"""
raise NotImplementedError
def finalize(self, outputs, final_state, sequence_lengths):
raise NotImplementedError
@property
def tracks_own_finished(self):
"""Describes whether the Decoder keeps track of finished states.
Most decoders will emit a true/false `finished` value independently
at each time step. In this case, the `tfa.seq2seq.dynamic_decode` function keeps
track of which batch entries are already finished, and performs a
logical OR to insert new batches to the finished set.
Some decoders, however, shuffle batches / beams between time steps and
`tfa.seq2seq.dynamic_decode` will mix up the finished state across these entries
because it does not t
|
rack the reshuffle across time steps. In this
case, it is up to the decoder to declare that it will keep track of its
own finished state by setting this property to `True`.
Returns:
Python bool.
"""
return False
class BaseDecoder(tf.keras.layers.Layer):
"""An RNN Decoder that is based on a Keras layer.
Concepts used by this interface:
- `inputs`: (structure of) Tensors and TensorArrays that is passed as input
to the RNN
|
cell composing the decoder, at each time step.
- `state`: (structure of) Tensors and TensorArrays that is passed to the
RNN cell instance as the state.
- `memory`: tensor that is usually the full output of the encoder, which
will be used for the attention wrapper for the RNN cell.
- `finished`: boolean tensor telling whether each sequence in the batch is
finished.
- `training`: boolean whether it should behave in training mode or in
inference mode.
- `outputs`: instance of `tfa.seq2seq.BasicDecoderOutput`. Result of the decoding, at
each time step.
"""
@typechecked
def __init__(
self,
output_time_major: bool = False,
impute_finished: bool = False,
maximum_iterations: Optional[TensorLike] = None,
parallel_iterations: int = 32,
swap_memory: bool = False,
**kwargs,
):
self.output_time_major = output_time_major
self.impute_finished = impute_finished
self.maximum_iterations = maximum_iterations
self.parallel_iterations = parallel_iterations
self.swap_memory = swap_memory
super().__init__(**kwargs)
def call(self, inputs, initial_state=None, training=None, **kwargs):
init_kwargs = kwargs
init_kwargs["initial_state"] = initial_state
return dynamic_decode(
self,
output_time_major=self.output_time_major,
impute_finished=self.impute_finished,
maximum_iterations=self.maximum_iterations,
parallel_iterations=self.parallel_iterations,
swap_memory=self.swap_memory,
training=training,
decoder_init_input=inputs,
decoder_init_kwargs=init_kwargs,
)
@property
def batch_size(self):
"""The batch size of input values."""
raise NotImplementedError
@property
def output_size(self):
"""A (possibly nested tuple of...) integer[s] or `TensorShape`
object[s]."""
raise NotImplementedError
@property
def output_dtype(self):
"""A (possibly nested tuple of...) dtype[s]."""
raise NotImplementedError
def initialize(self, inputs, initial_state=None, **kwargs):
"""Called before any decoding iterations.
This methods must compute initial input values and initial state.
Args:
inputs: (structure of) tensors that contains the input for the
decoder. In the normal case, it's a tensor with shape
[batch, timestep, embedding].
initial_state: (structure of) tensors that contains the initial state
for the RNN cell.
**kwargs: Other arguments that are passed in from layer.call()
method. It could contains item like input `sequence_length`, or
masking for input.
Returns:
`(finished, initial_inputs, initial_state)`: initial values of
'finished' flags, inputs and state.
"""
raise NotImplementedError
def step(self, time, inputs, state, training):
"""Called per step of decoding (but only once for dynamic decoding).
Args:
|
ethiery/heat-solver
|
trunk/test_matgen.py
|
Python
|
mit
| 1,289
| 0.006206
|
# author : Etienne THIERY
from matgen import *
import random
import numpy
def test_symmetricPositiveDefinite():
for i in range(10):
print(".", end="", flush=True)
size = random.randint(400, 500)
maxVal = random.randint(0, 1000)
M = symmetricPositiveDefinite(size, maxVal)
if not (isSymmetric(M) and isDefinitePositive
|
(M)):
return
|
False
return True
def test_symmetricSparsePositiveDefinite():
for i in range(10):
print(".", end="", flush=True)
size = random.randint(400, 500)
maxVal = random.randint(0, 1000)
nbZeros = random.randint(0, size*(size-1))
M = symmetricSparsePositiveDefinite(size, nbZeros, maxVal)
if not (isSymmetric(M) and isDefinitePositive(M) and abs(numberOfZeros(M)-nbZeros) <= 1):
return False
return True
def numberOfZeros(M):
count = 0
for line in M:
for coeff in line:
if coeff == 0:
count+=1
return count
def printTest(test_func):
print("Testing " + test_func.__name__[5:] + " : ", end="", flush=True)
print(("" if test_func() else "un") + "expected behaviour", flush=True)
printTest(test_symmetricPositiveDefinite)
printTest(test_symmetricSparsePositiveDefinite)
|
fcchou/HelixMC
|
doc/source/conf.py
|
Python
|
gpl-3.0
| 8,069
| 0.005453
|
# -*- coding: utf-8 -*-
#
# HelixMC documentation build configuration file, created by
# sphinx-quickstart on Thu Mar 21 15:51:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
#import sys
#import os
import time
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
#sys.path.insert(0, os.path.abspath('sphinxext'))
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.autosummary',
'sphinx.ext.mathjax', 'numpydoc'
]
#Autodoc Stuffs
autosummary_generate = True
numpydoc_show_class_members = False
def skip(app, what, name, obj, skip, options):
if name == "__init__" or name == '__call__':
return False
return skip
def setup(app):
app.connect("autodoc-skip-member", skip)
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'HelixMC'
copyright = u'2013-%s, Fang-Chieh Chou (GPLv3 Licence)' % time.strftime('%Y')
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import helixmc
version = helixmc.__version__
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_trees = ['_templates']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to
|
use.
#pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
# The
|
name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '_static/logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = '_static/icon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'HelixMCdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [(
'index', 'HelixMC.tex', u'HelixMC Documentation',
u'Fang-Chieh Chou', 'manual'
)]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = '_static/logo.png'
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output -------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'helixmc', u'HelixMC Documentation',
[u'Fang-Chieh Chou'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [(
'index', 'HelixMC', u'HelixMC Documentation',
u'Fang-Chieh Chou', 'HelixMC', 'One line description of project.',
'Miscellaneous'
)]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
trim_doctests_flags = True
|
cdcq/jzyzj
|
syzoj/update_assistant/misc.py
|
Python
|
mit
| 163
| 0
|
fro
|
m syzoj.models import JudgeState
from syzoj import db
db.create_all()
all_judge = JudgeState.query.all()
for item in all_judge:
item.update_us
|
erac_info()
|
joetainment/mmmmtools
|
MmmmToolsMod/Dynamic/CapsDisabler.py
|
Python
|
gpl-3.0
| 2,060
| 0.019417
|
import subprocess as Subprocess
import pymel.all as pm
class CapsDisabler(object):
def __init__(self, parentRef, go=False):
self.parentRef = parentRef
self.ini = self.parentRef.ini
self.conf = self.ini.conf
self.enabled = False
self.autohotkeyProcess = None
if go==True:
self.go()
|
def go(self):
try:
if int( self.ini.getItem("disable_capslock")
|
) == 1:
self.enabled = True
else:
#print("Hotkeys not enabled.")
pass
except:
print("\n Could not start CapsLock disabling system or could "
"not find info on it's configuration, perhaps because of "
"missing info in the ini file. \n")
if self.enabled:
self.disableCapslock()
def killAutohotkeyProcess(self):
if isinstance( self.autohotkeyProcess, Subprocess.Popen ):
try:
self.autohotkeyProcess.kill()
except:
u.log( "Autohotkey process not stopped. Perhaps it had "
"not been started.")
self.autohotkeyProcess = None
else:
self.autohotkeyProcess = None
def disableCapslock(self):
self.killAutohotkeyProcess()
self.autohotkeyProcess = None
self.autohotkeyProcess = Subprocess.Popen( self.parentRef.env.conf.autohotkey_command )
def startDisablingCapslock(self):
self.disableCapslock()
def stopDisablingCapslock(self):
self.killAutohotkeyProcess()
def setDisableCaplockOn(self):
print( "pretending to set disable_capslock to ON" )
#self.ini.setItem( disable_capslock, 1 ) ##untested code
pass
def setDisableCapslockOff(self):
print( "pretending to set disable_capslock to OFF" )
#self.ini.setItem( disable_capslock, 0 )
pass
|
AragurDEV/yowsup
|
yowsup/layers/protocol_iq/protocolentities/iq.py
|
Python
|
gpl-3.0
| 2,304
| 0.010417
|
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
class IqProtocolEntity(ProtocolEntity):
'''
<iq type="{{get | set}}" id="{{id}}" xmlns="{{xmlns}}" to="{{TO}}" from="{{FROM}}">
</iq>
'''
TYPE_SET = "set"
TYPE_GET = "get"
TYPE_ERROR = "error"
TYPE_RESULT = "result"
TYPE_DELETE = "delete"
TYPES = (TYPE_SET, TYPE_GET, TYPE_RESULT, TYPE_ERROR, TYPE_DELETE)
def __init__(self, xmlns = None, _id = None, _type = None, to = None, _from = None):
|
super(IqProtocolEntity, self).__init__("iq")
assert _type in self.__class__.TYPES, "Iq of type %s is not implemented, can accept only (%s)" % (_type," | ".join(self.__class__.TYPES))
assert not to or not _from, "Can't set from and to at the same time"
self._id = self._generateId(True) if _id is None else _id
|
self._from = _from
self._type = _type
self.xmlns = xmlns
self.to = to
def getId(self):
return self._id
def getType(self):
return self._type
def getXmlns(self):
return self.xmlns
def getFrom(self, full = True):
return self._from if full else self._from.split('@')[0]
def getTo(self):
return self.to
def toProtocolTreeNode(self):
attribs = {
"id" : self._id,
"type" : self._type
}
if self.xmlns:
attribs["xmlns"] = self.xmlns
if self.to:
attribs["to"] = self.to
elif self._from:
attribs["from"] = self._from
return self._createProtocolTreeNode(attribs, None, data = None)
def __str__(self):
out = "Iq:\n"
out += "ID: %s\n" % self._id
out += "Type: %s\n" % self._type
if self.xmlns:
out += "xmlns: %s\n" % self.xmlns
if self.to:
out += "to: %s\n" % self.to
elif self._from:
out += "from: %s\n" % self._from
return out
@staticmethod
def fromProtocolTreeNode(node):
return IqProtocolEntity(
node.getAttributeValue("xmlns"),
node.getAttributeValue("id"),
node.getAttributeValue("type"),
node.getAttributeValue("to"),
node.getAttributeValue("from")
)
|
domcleal/tito
|
test/unit/test_build_target_parser.py
|
Python
|
gpl-2.0
| 3,872
| 0.000517
|
import unittest
from tito.buildparser import BuildTargetParser
from ConfigParser import ConfigParser
from tito.exception import TitoException
class BuildTargetParserTests(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.valid_branches = ["branch1", "branch2"]
self.release_target = "project-x.y.z"
self.releasers_config = ConfigParser()
self.releasers_config.add_section(self.release_target)
self.releasers_config.set(self.release_target, "build_targets",
"branch1:project-x.y.z-candidate")
def test_parser_gets_correct_targets(self):
parser = BuildTargetParser(self.releasers_config, self.release_target,
self.valid_branches)
release_targets = parser.get_build_targets()
self.assertTrue("branch1" in release_targets)
self.assertEqual("project-x.y.z-candidate", release_targets["branch1"])
self.assertFalse("branch2" in release_targets)
def test_invalid_branch_raises_exception(self):
self.releasers_config.set(self.release_target, "build_targets",
"invalid-branch:project-x.y.z-candidate")
parser = BuildTargetParser(self.releasers_config, self.release_target,
self.valid_branches)
self.assertRaises(TitoException, parser.get_build_targets)
def test_missing_semicolon_raises_exception(self):
self.releasers_config.set(self.release_target, "build_targets",
"invalid-branchproject-x.y.z-candidate")
parser = BuildTargetParser(self.releasers_config, self.release_target,
self.valid_branches)
self.assertRaises(TitoException, parser.get_build_targets)
def test_empty_branch_raises_exception(self):
self.releasers_config.set(self.release_target, "build_targets",
":project-x.y.z-candidate")
parser = BuildTargetParser(self.releasers_config, self.release_target,
self.valid_branches)
self.assertRaises(TitoException, parser.get_build_targets)
def test_empty_target_raises_exception(self):
self.releasers_config.set(self.release_target, "build_targets",
"branch1:")
parser = BuildTargetParser(self.releasers_config, self.release_target,
self.valid_branches)
self.assertRaises(TitoException, parser.get_build_targets)
def test_multiple_spaces_ok(self):
self.releasers_config.set(self.release_target, "build_targets",
" branch1:project-x.y.z-candidate ")
parser = BuildTargetParser(self.releasers_config, self.release_target,
self.valid_branches)
release_targets = parser.get_build_targets()
self.assertEqual(1, len(release_targets))
se
|
lf.assertTrue("branch1" in release_targets)
self.assertEqual("project-x.y.z-candidate", release_targets["branch1"])
def test_multiple_branches_supported(self):
self.releasers_config.set(self.release_target, "build_targets",
|
"branch1:project-x.y.z-candidate branch2:second-target")
parser = BuildTargetParser(self.releasers_config, self.release_target,
self.valid_branches)
release_targets = parser.get_build_targets()
self.assertEquals(2, len(release_targets))
self.assertTrue("branch1" in release_targets)
self.assertEqual("project-x.y.z-candidate", release_targets["branch1"])
self.assertTrue("branch2" in release_targets)
self.assertEqual("second-target", release_targets['branch2'])
|
heraldmatias/django-payroll
|
src/inei/planilla/forms.py
|
Python
|
gpl-3.0
| 3,110
| 0.006431
|
from django.forms.formsets import BaseFormSet, formset_factory
from django.forms.models import BaseModelFormSet
from django.forms.models import modelformset_factory
from django import forms
from models import PlanillaHistoricas, ConceptosFolios, Folios, Tomos
class PlanillaHistoricasForm(forms.Form):
codi_empl_per = forms.CharField(max_length=50, widget=forms.TextInput(attrs={'class': 'nombre', 'placeholder': 'Apellidos y Nombres'}))
desc_plan_stp = forms.CharField(max_length=200, widget=forms.Textarea(attrs={'rows': 1}))
def __init__(self, concepto, *args, **kwargs):
super(PlanillaHistoricasForm, self).__init__(*args, **kwargs)
campos = dict()
egr = 'border-color: #e9322d; -webkit-box-shadow: 0 0 6px #f8b9b7; -moz-box-shadow: 0 0 6px #f8b9b7; box-shadow: 0 0 6px #f8b9b7;';
ing = 'border-color: #2D78E9; -webkit-box-shadow: 0 0 6px #2D78E9; -moz-box-shadow: 0 0 6px #2D78E9; box-shadow: 0 0 6px #2D78E9;';
total = 'border-color: rgb(70, 136, 71); -webkit-box-shadow: 0 0 6px rgb(70, 136, 71); -moz-box-shadow: 0 0 6px rgb(70, 136, 71); box-shadow: 0 0 6px rgb(70, 136, 71);';
for conc in concepto:
codigo = conc.codi_conc_tco.codi_conc_tco
descripcion = conc.codi_conc_tco.desc_cort_tco
tipo = conc.codi_conc_tco.tipo_conc_tco
clase = 'remuneraciones' if codigo == 'C373' else 'descuentos' if codigo == 'C374' else 'total' if codigo == 'C12' else 'monto'
attrs = {
'class': clase + ' error',
'data-title': descripcion,
'data-tipo': tipo,
'style': 'width:auto;font-size:15px;' + (ing if tipo == '1' else egr if tipo == '2' else total if codigo in ('C373', 'C12', 'C374') else ''),
'maxlength': 35,
'placeholder': descripcion
}
if codigo in campos:
campos[codigo] += 1
else:
campos[codigo] = 1
index = campos[codigo]
flag = '_%s' % index
self.fields['%s%s' % (codigo, flag)] = forms.CharField(widget=forms.TextInput(attrs=attrs))
self.fields['codigos'] = forms.CharField(max_length=700, widget=forms.HiddenInput())
class BasePlanillaHistoricasFormSet(BaseFormSet):
def __init__(self, *args, **kwargs):
self.concepto = kwargs['concepto']
del kwargs['concepto']
super(BasePlanillaHistoricasFormSet, self).__init__(*args, **kwargs)
def _construct_form(self, i, **kwargs
|
):
kwargs['concepto'] = self.concepto
return super(BasePlanillaHistoricasFormSet, self)._construct_form(i, **kwargs)
def add_fields(self, form, index):
super(BasePlanillaHistoricas
|
FormSet, self).add_fields(form, index)
PlanillaHistoricasFormSet = formset_factory(#form=PlanillaHistoricasForm,
form=PlanillaHistoricasForm,
formset=BasePlanillaHistoricasFormSet,
extra=0, can_delete=False) #exclude=('id', ))
|
lmr/avocado-vt
|
virttest/utils_disk.py
|
Python
|
gpl-2.0
| 21,332
| 0.000281
|
"""
Virtualization test - Virtual disk related utility functions
:copyright: Red Hat Inc.
"""
import os
import glob
import shutil
import stat
import tempfile
import logging
import re
try:
import configparser as ConfigParser
except ImportError:
import ConfigParser
from avocado.core import exceptions
from avocado.utils import process
from avocado.utils.service import SpecificServiceManager
from virttest import error_context
from virttest.compat_52lts import decode_to_text
# Whether to print all shell commands called
DEBUG = False
def copytree(src, dst, overwrite=True, ignore=''):
"""
Copy dirs from source to target.
:param src: source directory
:param dst: destination directory
:param overwrite: overwrite file if exist or not
:param ignore: files want to ignore
"""
ignore = glob.glob(os.path.join(src, ignore))
for root, dirs, files in os.walk(src):
dst_dir = root.replace(src, dst)
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
for _ in files:
if _ in ignore:
continue
src_file = os.path.join(root, _)
dst_file = os.path.join(dst_dir, _)
if os.path.exists(dst_file):
if overwrite:
os.remove(dst_file)
else:
continue
shutil.copy(src_file, dst_dir)
def is_mount(src, dst=None, fstype=None, options=None, verbose=False,
session=None):
"""
Check is src or dst mounted.
:param src: source device or directory
:param dst: mountpoint, if None will skip to check
:param fstype: file system type, if None will skip to check
:param options: mount options should be seperated by ","
:param session: check within the session if given
:return: True if mounted, else return False
"""
mount_str = "%s %s %s" % (src, dst, fstype)
mount_str = mount_str.replace('None', '').strip()
mount_list_cmd = 'cat /proc/mounts'
if session:
mount_result = session.cmd_output_safe(mount_list_cmd)
else:
mount_result = decode_to_text(process.system_output(mount_list_cmd, shell=True))
if verbose:
logging.debug("/proc/mounts contents:\n%s", mount_result)
for result in mount_result.splitlines():
if mount_str in result:
if options:
options =
|
options.split(",")
options_result = result.split()[3].split(",")
for op in options:
|
if op not in options_result:
if verbose:
logging.info("%s is not mounted with given"
" option %s", src, op)
return False
if verbose:
logging.info("%s is mounted", src)
return True
if verbose:
logging.info("%s is not mounted", src)
return False
def mount(src, dst, fstype=None, options=None, verbose=False, session=None):
"""
Mount src under dst if it's really mounted, then remout with options.
:param src: source device or directory
:param dst: mountpoint
:param fstype: filesystem type need to mount
:param options: mount options
:param session: mount within the session if given
:return: if mounted return True else return False
"""
options = (options and [options] or [''])[0]
if is_mount(src, dst, fstype, options, verbose, session):
if 'remount' not in options:
options = 'remount,%s' % options
cmd = ['mount']
if fstype:
cmd.extend(['-t', fstype])
if options:
cmd.extend(['-o', options])
cmd.extend([src, dst])
cmd = ' '.join(cmd)
if session:
return session.cmd_status(cmd, safe=True) == 0
return process.system(cmd, verbose=verbose) == 0
def umount(src, dst, fstype=None, verbose=False, session=None):
"""
Umount src from dst, if src really mounted under dst.
:param src: source device or directory
:param dst: mountpoint
:param fstype: fstype used to check if mounted as expected
:param session: umount within the session if given
:return: if unmounted return True else return False
"""
mounted = is_mount(src, dst, fstype, verbose=verbose, session=session)
if mounted:
from . import utils_package
package = "psmisc"
# check package is available, if not try installing it
if not utils_package.package_install(package):
logging.error("%s is not available/installed for fuser", package)
fuser_cmd = "fuser -km %s" % dst
umount_cmd = "umount %s" % dst
if session:
session.cmd_output_safe(fuser_cmd)
return session.cmd_status(umount_cmd, safe=True) == 0
process.system(fuser_cmd, ignore_status=True, verbose=True, shell=True)
return process.system(umount_cmd, ignore_status=True, verbose=True) == 0
return True
@error_context.context_aware
def cleanup(folder):
"""
If folder is a mountpoint, do what is possible to unmount it. Afterwards,
try to remove it.
:param folder: Directory to be cleaned up.
"""
error_context.context(
"cleaning up unattended install directory %s" % folder)
umount(None, folder)
if os.path.isdir(folder):
shutil.rmtree(folder)
@error_context.context_aware
def clean_old_image(image):
"""
Clean a leftover image file from previous processes. If it contains a
mounted file system, do the proper cleanup procedures.
:param image: Path to image to be cleaned up.
"""
error_context.context("cleaning up old leftover image %s" % image)
if os.path.exists(image):
umount(image, None)
os.remove(image)
class Disk(object):
"""
Abstract class for Disk objects, with the common methods implemented.
"""
def __init__(self):
self.path = None
def get_answer_file_path(self, filename):
return os.path.join(self.mount, filename)
def copy_to(self, src):
logging.debug("Copying %s to disk image mount", src)
dst = os.path.join(self.mount, os.path.basename(src))
if os.path.isdir(src):
shutil.copytree(src, dst)
elif os.path.isfile(src):
shutil.copyfile(src, dst)
def close(self):
os.chmod(self.path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP |
stat.S_IROTH | stat.S_IXOTH)
cleanup(self.mount)
logging.debug("Disk %s successfully set", self.path)
class FloppyDisk(Disk):
"""
Represents a floppy disk. We can copy files to it, and setup it in
convenient ways.
"""
@error_context.context_aware
def __init__(self, path, qemu_img_binary, tmpdir, vfd_size):
error_context.context(
"Creating unattended install floppy image %s" % path)
self.mount = tempfile.mkdtemp(prefix='floppy_virttest_', dir=tmpdir)
self.path = path
self.vfd_size = vfd_size
clean_old_image(path)
try:
c_cmd = '%s create -f raw %s %s' % (qemu_img_binary, path,
self.vfd_size)
process.run(c_cmd, verbose=DEBUG)
f_cmd = 'mkfs.msdos -s 1 %s' % path
process.run(f_cmd, verbose=DEBUG)
except process.CmdError as e:
logging.error("Error during floppy initialization: %s" % e)
cleanup(self.mount)
raise
def close(self):
"""
Copy everything that is in the mountpoint to the floppy.
"""
pwd = os.getcwd()
try:
os.chdir(self.mount)
path_list = glob.glob('*')
for path in path_list:
self.copy_to(path)
finally:
os.chdir(pwd)
cleanup(self.mount)
def copy_to(self, src):
logging.debug("Copying %s to floppy image", src)
mcopy_cmd = "mcopy -s -o -n -i %s %s ::/" % (self.path, src)
process.run(mcopy_cmd, verbose=DEBUG)
def _copy_virtio_drivers(self, virtio_floppy):
"""
Copy
|
jacekdalkowski/bike-timer
|
web-api/biketimerwebapi/db/repositories/cassandra/cassandra_repositories_module.py
|
Python
|
apache-2.0
| 1,685
| 0.005935
|
from injector import Module
from cassandra.cqlengine import connection
from cassandra.cluster import Clu
|
ster
from cassandra.cqlengine.management import create_keyspace_simple, sync_table, sync_type
from cassandra.cqlengi
|
ne.usertype import UserType
from ...entities.track_type import TrackType
from cassandra_users_repository import CassandraUsersRepository
from cassandra_spots_repository import CassandraSpotsRepository
from runs.cassandra_runs_repository import CassandraRunsRepository
from cassandra_checkpoint_passes_repository import CassandraCheckpointPassesRepository
from ..repositories_definitions import UsersRepository
from ..repositories_definitions import SpotsRepository
from ..repositories_definitions import RunsRepository
from ..repositories_definitions import CheckpointPassesRepository
class CassandraRepositoriesModule(Module):
def configure(self, binder):
connection.setup(['cassandrahost'], 'biketimer', protocol_version=4)
cluster = Cluster(['cassandrahost'])
session = cluster.connect('biketimer')
users_repository_instance = CassandraUsersRepository(cluster, session)
binder.bind(UsersRepository, to=users_repository_instance)
spots_repository_instance = CassandraSpotsRepository(cluster, session)
binder.bind(SpotsRepository, to=spots_repository_instance)
runs_repository_instance = CassandraRunsRepository(cluster, session)
binder.bind(RunsRepository, to=runs_repository_instance)
checkpoint_passes_repository_instance = CassandraCheckpointPassesRepository(cluster, session)
binder.bind(CheckpointPassesRepository, to=checkpoint_passes_repository_instance)
|
dannyp11/gossip_network
|
main_v2.py
|
Python
|
gpl-2.0
| 4,267
| 0.023904
|
import sys, math
# **************** Main program *********************************************
def main():
# File IO ###############################################
txt = open("in9.txt", 'r')
N = int (txt.readline())
n = 2 * N
a = [[0 for x in range(1)] for y in range(n)]
# print >> sys.stderr, a
print >> sys.stderr, "N=", N
for line in range (0, N):
x , y = [int(j) for j in txt.readline().split()]
#print >> sys.stderr, '%d %d' % (x, y)
m, n = sortTwo(x, y)
a[m].append(n)
a[n].append(m)
a[m][0] += 1
a[n][0] += 1
print >> sys.stderr, "Done file IO \n \n"
##############################################################
# Init vars-------------------------------------------------
#print >> sys.stderr, a
while (a[-1]==[0]): # check for abundant [0]
a.pop()
relationship = a
n = len(relationship)
print >> sys.stderr, "total nodes:" , n
level = [0] * n # contains level of nodes
# print >> sys.stderr, level
#print >> sys.stderr, "relationship: \n" , relationship
countOne = 0
oneList = []
for elem in range(0, n):
if (relationship[elem][0] == 1):
countOne += 1
oneList.append(elem)
print >> sys.stderr, "countONe:", countOne
# print >> sys.stderr,"oneList:", oneList
print >> sys.stderr, "Done Var init \n \n"
# -------------------------------------------------------------
# Engine ---------------------------------------------------
for i in range(0, countOne):
node = oneList[i]
level[i] = findSingleMaxLength(node, node, oneList, countOne, relationship, n)
# ------------------------------------------------------------
# Report -------------------------------------------------
#---------------------------------------------------------
# No touch area ------------------------------------------
maxi = max(level)
if (maxi % 2 == 0):
ans = maxi / 2
else:
ans = (maxi + 1) / 2
print >> sys.stderr, "Answer:", ans
#*********************** End program ************************************************
def spreadRumorNode(node, relationship, relationship_len): # update relationship and provide bag
bag = []
new_relationship = relationship
if (new_relationship[node][0] > 0):
for bag_elem in range (1, 1 + relationship[node][0]):
node_child = relationship[node][bag_elem]
if (relationship[node_child][0] > 0):
bag.append(node_child)
new_relationship[node][0] = -2
return bag, new_relationship
def spreadRumorOnce(target_list, relationship, relationship_len):
new_target_list = []
new_relationship = relationship
number_of_target = len(target_list)
target_bag = [[] for y in range(number_of_target)]
for i in range(number_of_target):
node = target_list[i]
target_bag[i], new_relationship = spreadRumorNode(node, new_relationship, relationship_len)
new_target_list.extend(target_bag[i])
return new_target_list, new_relationship
def findSingleMaxLength(x, x_pos, oneList, oneList_len, relationship, relationship_len):
new_relationship = relationship
step = -1
try:
i = oneList.index(x)
except ValueError:
return -1 # no match
nowhere_to_go = 0
target_list = [x]
while (nowhere_to_go == 0):
step += 1
target_list, new_relationship = spreadRumorOnce(target_list, new_relationship, relationship_len)
if (target_list == []):
nowhere_to_go = 1
return step
def findMin(a, b):
res = a
if (res > b):
res =
|
b
return res
def sortTwo(a, b):
if (a < b):
x = a
y = b
else:
x = b
y = a
return x, y
|
main()
|
natecavanaugh/GitGutter
|
git_gutter_events.py
|
Python
|
mit
| 3,715
| 0.000538
|
import time
import sublime
import sublime_plugin
ST3 = int(sublime.version()) >= 3000
if ST3:
from .view_collection import ViewCollection
from .git_gutter_popup import show_diff_popup
else:
from view_collection import ViewCollection
from git_gutter_popup import show_diff_popup
def async_event_listener(EventListener):
if ST3:
async_methods = set([
'on_new',
'on_clone',
'on_load',
'on_pre_save',
'on_post_save',
'on_modified',
'on_selection_modified',
'on_activated',
'on_deactivated',
])
for attr_name in dir(EventListener):
if attr_name in async_methods:
attr = getattr(EventListener, attr_name)
setattr(EventListener, attr_name + '_async', attr)
delattr(EventListener, attr_name)
return EventListener
@async_event_listener
class GitGutterEvents(sublime_plugin.EventListener):
def __init__(self):
self._settings_loaded = False
self.latest_keypresses = {}
# Synchronous
def on_modified(self, view):
if self.settings_loaded() and self.live_mode:
self.debounce(view, 'modified', ViewCollection.add)
def on_clone(self, view):
if self.settings_loaded():
self.debounce(view, 'clone', ViewCollection.add)
def on_post_save(self, view):
if self.settings_loaded():
self.debounce(view, 'post-save', ViewCollection.add)
def on_load(self, view):
if self.settings_loaded() and self.live_mode:
self.debounce(view, 'load', ViewCollection.add)
def on_activated(self, view):
if self.settings_loaded() and self.focus_change_mode:
self.debounce(view, 'activated', ViewCollection.add)
def on_hover(self, view, point, hover_zone):
if hover_zone != sublime.HOVER_GUTTER:
return
# don't let the popup flicker / fight with other packages
if view.is_popup_visi
|
ble():
return
if not settings.get("enable_hover_diff_popup"):
return
show_diff_popup(view, point, flags=sublime.HIDE_ON_MOUSE_MOVE_AWAY)
# Asynchronous
def debounce(self, view, event_type, func):
if self.non_blocking:
key = (event_type, view.file_name())
|
this_keypress = time.time()
self.latest_keypresses[key] = this_keypress
def callback():
latest_keypress = self.latest_keypresses.get(key, None)
if this_keypress == latest_keypress:
func(view)
if ST3:
set_timeout = sublime.set_timeout_async
else:
set_timeout = sublime.set_timeout
set_timeout(callback, settings.get("debounce_delay"))
else:
func(view)
# Settings
def settings_loaded(self):
if settings and not self._settings_loaded:
self._settings_loaded = self.load_settings()
return self._settings_loaded
def load_settings(self):
self.live_mode = settings.get('live_mode')
if self.live_mode is None:
self.live_mode = True
self.focus_change_mode = settings.get('focus_change_mode')
if self.focus_change_mode is None:
self.focus_change_mode = True
self.non_blocking = settings.get('non_blocking')
if self.non_blocking is None:
self.non_blocking = True
return True
settings = {}
def plugin_loaded():
global settings
settings = sublime.load_settings('GitGutter.sublime-settings')
if not ST3:
plugin_loaded()
|
noironetworks/aci-integration-module
|
aim/tools/cli/commands/config.py
|
Python
|
apache-2.0
| 1,450
| 0
|
# Copyright (c) 2016 Cisco Systems
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BAS
|
IS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import click
from aim import config as aim_cfg
from aim import context
from aim.db import api
from aim.tools.cli.groups import aimcli
@aimcli.aim.group(name='config')
@click.pass_cont
|
ext
def config(ctx):
aim_ctx = context.AimContext(store=api.get_store(expire_on_commit=True))
ctx.obj['manager'] = aim_cfg.ConfigManager(aim_ctx, '')
@config.command(name='update')
@click.argument('host', required=False)
@click.pass_context
def update(ctx, host):
"""Current database version."""
host = host or ''
ctx.obj['manager'].to_db(ctx.obj['conf'], host=host)
@config.command(name='replace')
@click.argument('host', required=False)
@click.pass_context
def replace(ctx, host):
"""Used for upgrading database."""
host = host or ''
ctx.obj['manager'].replace_all(ctx.obj['conf'], host=host)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.