repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
cameronobrien/BroadsideBot
|
app/intel_entry.py
|
Python
|
mit
| 1,447
| 0.004838
|
import datetime
import csv
with open('SYSTEMS.csv') as f:
reader = csv.reader(f)
ALLOWED_SYSTEMS = [l[0] for l
|
in reader]
class IntelEntry:
KEYS = ["timer_name", "alliance", "system", "time", "date", "location
|
"]
def __init__(self, timer_name="", alliance="", system="", time="", date="", location=""):
if timer_name != "":
self.timer_name = timer_name
else:
raise ValueError("Provided timer not valid.")
if alliance != "":
self.alliance = alliance.strip()
else:
raise ValueError("Provided alliance not valid.")
system = system.upper()
if system in ALLOWED_SYSTEMS:
self.system = system
else:
raise ValueError("Provided solar system not valid.")
self.location = location
if time != "":
self.time = datetime.datetime.strptime(' '.join([date, time]), '%m/%d/%y %H:%M')
if self.time < datetime.datetime.now():
raise ValueError("Provided date/time not valid. Time must be in the future.")
else:
raise ValueError("Provided date/time not valid. Time must be in format '%m/%d/%y %H:%M'.")
def to_dict(self):
return { "timer_name": self.timer_name,
"alliance": self.alliance,
"system": self.system,
"location": self.location,
"time": self.time }
|
oblique-labs/pyVM
|
rpython/memory/gc/generation.py
|
Python
|
mit
| 30,342
| 0.000857
|
import sys
from rpython.memory.gc.semispace import SemiSpaceGC
from rpython.memory.gc.semispace import GCFLAG_EXTERNAL, GCFLAG_FORWARDED
from rpython.memory.gc.semispace import GC_HASH_TAKEN_ADDR
from rpython.memory.gc import env
from rpython.rtyper.lltypesystem.llmemory import NULL, raw_malloc_usage
from rpython.rtyper.lltypesystem import lltype, llmemory, llarena
from rpython.rlib.objectmodel import free_non_gc_object
from rpython.rlib.debug import ll_assert
from rpython.rlib.debug import debug_print, debug_start, debug_stop
from rpython.rlib.rarithmetic import intmask, LONG_BIT
from rpython.rtyper.lltypesystem.lloperation import llop
WORD = LONG_BIT // 8
# The following flag is never set on young objects, i.e. the ones living
# in the nursery. It is initially set on all prebuilt and old objects,
# and gets cleared by the write_barrier() when we write in them a
# pointer to a young object.
GCFLAG_NO_YOUNG_PTRS = SemiSpaceGC.first_unused_gcflag << 0
# The following flag is set on some last-generation objects (== prebuilt
# objects for GenerationGC, but see also HybridGC). The flag is set
# unless the object is already listed in 'last_generation_root_objects'.
# When a pointer is written inside an object with GCFLAG_NO_HEAP_PTRS
# set, the write_barrier clears the flag and adds the object to
# 'last_generation_root_objects'.
GCFLAG_NO_HEAP_PTRS = SemiSpaceGC.first_unused_gcflag << 1
class GenerationGC(SemiSpaceGC):
"""A basic generational GC: it's a SemiSpaceGC with an additional
nursery for young objects. A write barrier is used to ensure that
old objects that contain pointers to young objects are recorded in
a list.
"""
inline_simple_malloc = True
inline_simple_malloc_varsize = True
needs_write_barrier = True
prebuilt_gc_objects_are_static_roots = False
first_unused_gcflag = SemiSpaceGC.first_unused_gcflag << 2
# the following values override the default arguments of __init__ when
# translating to a real backend.
TRANSLATION_PARAMS = {'space_size': 8*1024*1024, # 8 MB
'nursery_size': 3*1024*1024, # 3 MB
'min_nursery_size': 48*1024,
'auto_nursery_size': True}
nursery_hash_base = -1
def __init__(self, config,
nursery_size=32*WORD,
min_nursery_size=32*WORD,
auto_nursery_size=False,
space_size=1024*WORD,
max_space_size=sys.maxint//2+1,
**kwds):
SemiSpaceGC.__init__(self, config,
space_size = space_size,
max_space_size = max_space_size,
**kwds)
assert min_nursery_size <= nursery_size <= space_size // 2
self.initial_nursery_size = nursery_size
self.auto_nursery_size = auto_nursery_size
self.min_nursery_size = min_nursery_size
# define nursery fields
self.reset_nursery()
self._setup_wb()
# compute the constant lower bounds for the attributes
# largest_young_fixedsize and largest_young_var_basesize.
# It is expected that most (or all) objects have a fixedsize
# that is much lower anyway.
sz = self.get_young_fixedsize(self.min_nursery_size)
self.lb_young_fixedsize = sz
sz = self.get_young_var_basesize(self.min_nursery_size)
self.lb_young_var_basesize = sz
def setup(self):
self.old_objects_pointing_to_young = self.AddressStack()
# ^^^ a list of addresses inside the old objects space; it
# may contain static prebuilt objects as well. More precisely,
# it lists exactly the old and static objects whose
# GCFLAG_NO_YOUNG_PTRS bit is not set.
self.young_objects_with_weakrefs = self.AddressStack()
self.last_generation_root_objects = self.AddressStack()
self.young_objects_with_id = self.AddressDict()
SemiSpaceGC.setup(self)
self.set_nursery_size(self.initial_nursery_size)
# the GC is fully setup now. The rest can make use of it.
if self.auto_nursery_size:
newsize = nursery_size_from_env()
#if newsize <= 0:
# ---disabled--- just use the default value.
# newsize = env.estimate_best_nursery_size()
if newsize > 0:
self.set_nursery_size(newsize)
self.reset_nursery()
def _teardown(self):
self.collect() # should restore last gen objects flags
SemiSpaceGC._teardown(self)
def reset_nursery(self):
self.nursery = NULL
self.nursery_top = NULL
self.nursery_free = NULL
def set_nursery_size(self, newsize):
debug_start("gc-set-nursery-size")
if newsize < self.min_nursery_size:
newsize = self.min_nursery_size
if newsize > self.space_size // 2:
newsize = self.space_size // 2
# Compute the new bounds for how large young objects can be
# (larger objects are allocated directly old). XXX adjust
self.nursery_size = newsize
self.largest_young_fixedsize = self.get_young_fixedsize(newsize)
self.largest_young_var_basesize = self.get_young_var_basesize(newsize)
scale = 0
while (self.min_nursery_size
|
<< (scale+1)) <= newsize:
scale += 1
self.nursery_scale = scale
debug_print("nursery_size =", newsize)
debug_print("largest_young_fixedsize =",
self.largest_young_fixedsize)
debug_print("largest_young_var_basesize =",
self.largest_young_var_basesize)
debug_print("nursery_scale =", scale)
# we get the following invariant:
assert se
|
lf.nursery_size >= (self.min_nursery_size << scale)
# Force a full collect to remove the current nursery whose size
# no longer matches the bounds that we just computed. This must
# be done after changing the bounds, because it might re-create
# a new nursery (e.g. if it invokes finalizers).
self.semispace_collect()
debug_stop("gc-set-nursery-size")
@staticmethod
def get_young_fixedsize(nursery_size):
return nursery_size // 2 - 1
@staticmethod
def get_young_var_basesize(nursery_size):
return nursery_size // 4 - 1
@classmethod
def JIT_max_size_of_young_obj(cls):
min_nurs_size = cls.TRANSLATION_PARAMS['min_nursery_size']
return cls.get_young_fixedsize(min_nurs_size)
def is_in_nursery(self, addr):
ll_assert(llmemory.cast_adr_to_int(addr) & 1 == 0,
"odd-valued (i.e. tagged) pointer unexpected here")
return self.nursery <= addr < self.nursery_top
def appears_to_be_in_nursery(self, addr):
# same as is_in_nursery(), but may return True accidentally if
# 'addr' is a tagged pointer with just the wrong value.
if not self.translated_to_c:
if not self.is_valid_gc_object(addr):
return False
return self.nursery <= addr < self.nursery_top
def malloc_fixedsize_clear(self, typeid, size,
has_finalizer=False,
is_finalizer_light=False,
contains_weakptr=False):
if (has_finalizer or
(raw_malloc_usage(size) > self.lb_young_fixedsize and
raw_malloc_usage(size) > self.largest_young_fixedsize)):
# ^^^ we do two size comparisons; the first one appears redundant,
# but it can be constant-folded if 'size' is a constant; then
# it almost always folds down to False, which kills the
# second comparison as well.
ll_assert(not contains_weakptr, "wrong case for mallocing weakref")
# "non-simple" case or object too big: don't use the nursery
return SemiSpaceGC.malloc_fixedsize_clear(self, typeid, size,
has_finalizer,
i
|
Cinntax/home-assistant
|
homeassistant/components/roku/remote.py
|
Python
|
apache-2.0
| 1,715
| 0.001166
|
"""Support for the Roku remote."""
import requests.exceptions
from homeassistant.components import remote
from homeassistant.const import CONF_HOST
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Roku remote platform."""
if not discovery_info:
return
host = discovery_info[CONF_HOST]
async_add_entities([RokuRemote(host)], True)
class RokuRemote(remote.RemoteDevice):
"""Device that sends commands to an Roku."""
def __init__(self, host):
"""Initialize the Roku device."""
from roku import Roku
self.roku = Roku(host)
self._device_info = {}
def update(self):
"""Retrieve latest state."""
try:
self._device_info = self.roku.device_info
except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
pass
@property
def name(self):
"""Return the name of the device."""
if self._device_info.user_device_name:
return self._device_info.user_device_name
return f"Roku {self._device_info.serial_num}"
@property
def unique_id(self):
"""Return a unique ID."""
return self._device_info.serial_num
@property
def is_on(self):
"""Return true if device is on."""
retur
|
n True
@property
def should_poll(self):
"""No polling needed for Roku."""
return False
def send_command(self, command,
|
**kwargs):
"""Send a command to one device."""
for single_command in command:
if not hasattr(self.roku, single_command):
continue
getattr(self.roku, single_command)()
|
wmvanvliet/mne-python
|
mne/utils/__init__.py
|
Python
|
bsd-3-clause
| 4,697
| 0.002768
|
# # # WARNING # # #
# This list must also be updated in doc/_templates/autosummary/class.rst if it
# is changed here!
_doc_special_members = ('__contains__', '__getitem__', '__iter__', '__len__',
'__add__', '__sub__', '__mul__', '__div__',
'__neg__', '__hash__')
from ._bunch import Bunch, BunchConst, BunchConstNamed
from .check import (check_fname, check_version, check_random_state,
_check_fna
|
me, _check_subject, _check_pandas_installed,
|
_check_pandas_index_arguments,
_check_event_id, _check_ch_locs, _check_compensation_grade,
_check_if_nan, _is_numeric, _ensure_int, _check_preload,
_validate_type, _check_info_inv,
_check_channels_spatial_filter, _check_one_ch_type,
_check_rank, _check_option, _check_depth, _check_combine,
_path_like, _check_src_normal, _check_stc_units,
_check_pyqt5_version, _check_sphere, _check_time_format,
_check_freesurfer_home, _suggest, _require_version,
_on_missing, _check_on_missing, int_like, _safe_input,
_check_all_same_channel_names, path_like, _ensure_events,
_check_eeglabio_installed, _check_dict_keys,
_check_edflib_installed, _to_rgb, _soft_import,
_import_h5py, _import_h5io_funcs,
_import_pymatreader_funcs)
from .config import (set_config, get_config, get_config_path, set_cache_dir,
set_memmap_min_size, get_subjects_dir, _get_stim_channel,
sys_info, _get_extra_data_path, _get_root_dir,
_get_numpy_libs)
from .docs import (copy_function_doc_to_method_doc, copy_doc, linkcode_resolve,
open_docs, deprecated, fill_doc, deprecated_alias,
copy_base_doc_to_subclass_doc, docdict as _docdict)
from .fetching import _url_to_local_path
from ._logging import (verbose, logger, set_log_level, set_log_file,
use_log_level, catch_logging, warn, filter_out_warnings,
wrapped_stdout, _get_call_line, _record_warnings,
ClosingStringIO, _VerboseDep)
from .misc import (run_subprocess, _pl, _clean_names, pformat, _file_like,
_explain_exception, _get_argvalues, sizeof_fmt,
running_subprocess, _DefaultEventParser,
_assert_no_instances, _resource_path)
from .progressbar import ProgressBar
from ._testing import (run_command_if_main, requires_sklearn,
requires_version, requires_nibabel, requires_mne,
requires_good_network, requires_pandas, requires_h5py,
ArgvSetter, SilenceStdout, has_freesurfer, has_mne_c,
_TempDir, has_nibabel, buggy_mkl_svd,
requires_numpydoc, requires_vtk, requires_freesurfer,
requires_nitime, requires_dipy,
requires_neuromag2ft, requires_pylsl,
assert_object_equal, assert_and_remove_boundary_annot,
_raw_annot, assert_dig_allclose, assert_meg_snr,
assert_snr, assert_stcs_equal, modified_env,
_click_ch_name)
from .numerics import (hashfunc, _compute_row_norms,
_reg_pinv, random_permutation, _reject_data_segments,
compute_corr, _get_inst_data, array_split_idx,
sum_squared, split_list, _gen_events, create_slices,
_time_mask, _freq_mask, grand_average, object_diff,
object_hash, object_size, _apply_scaling_cov,
_undo_scaling_cov, _apply_scaling_array,
_undo_scaling_array, _scaled_array, _replace_md5, _PCA,
_mask_to_onsets_offsets, _array_equal_nan,
_julian_to_cal, _cal_to_julian, _dt_to_julian,
_julian_to_dt, _dt_to_stamp, _stamp_to_dt,
_check_dt, _ReuseCycle, _arange_div, _hashable_ndarray,
_custom_lru_cache)
from .mixin import (SizeMixin, GetEpochsMixin, _prepare_read_metadata,
_prepare_write_metadata, _FakeNoPandas, ShiftTimeMixin)
from .linalg import (_svd_lwork, _repeated_svd, _sym_mat_pow, sqrtm_sym, eigh,
_get_blas_funcs)
from .dataframe import (_set_pandas_dtype, _scale_dataframe_data,
_convert_times, _build_data_frame)
|
okfn-brasil/viralata
|
viralata/views.py
|
Python
|
agpl-3.0
| 12,901
| 0
|
#!/usr/bin/env python
# coding: utf-8
import re
import bleach
import passlib
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.exc import NoResultFound
# from flask import redirect, url_for, make_response
from flask.ext.restplus import Resource
from flask.ext.mail import Message
from auths import get_auth_url, get_username
from models import User
from extensions import db, sv
from utils import decode_validate_token
from cutils import ExtraApi
api = ExtraApi(version='1.0',
title='Vira-lata',
description='An authentication microservice.')
api.update_parser_arguments({
'username': {
'location': 'json',
'help': 'The username.',
},
'password': {
'location': 'json',
'help': 'The password.',
},
'new_password': {
'location': 'json',
'help': 'A new password, when changing the current one.',
},
'code': {
'location': 'json',
'help': 'A temporary code used to reset the password.',
},
'email': {
'location': 'json',
'help': 'The email.',
},
'description': {
'location': 'json',
'help': 'The user description.',
},
})
@api.route('/login/external/manual/<string:backend>')
class LoginExtManAPI(Resource):
def get(self, backend):
'''Asks the URL that should be used to login with a specific backend
(like Facebook).'''
return {'redirect': get_auth_url(backend, 'loginextmanapi')}
@api.route('/complete/manual/<string:backend>')
class CompleteLoginExtManAPI(Resou
|
rce):
def post(self, backend):
'''Completes the login with a specific backend.'''
username = get_username(backend, redirect_uri='/')
return create_tokens(username)
# @api.route('/login/external/automatic/<string:backend>')
# class StartLoginExtAutoAPI(Resource):
# def get(self, backend):
# '''Asks the URL that should be used to login with a specific ba
|
ckend
# (like Facebook).'''
# print('AUTH-GET')
# print(get_auth_url(backend, 'completeloginautoapi'))
# return {'redirect': get_auth_url(backend, 'completeloginautoapi')}
# # return redirect(get_auth_url(backend, 'completeloginautoapi'))
# @api.route('/complete/automatic/<string:backend>')
# class CompleteLoginAutoAPI(Resource):
# def get(self, backend):
# '''Completes the login with a specific backend.'''
# print('COMPLETE-GET')
# username = get_username(backend,
# url_for('completeloginautoapi',
# backend='facebook'))
# tokens = create_tokens(username)
# response = redirect("http://localhost:5001/")
# # import IPython; IPython.embed()
# return response
# # return create_tokens(username)
@api.route('/login/local')
class LoginLocalAPI(Resource):
@api.doc(parser=api.create_parser('username', 'password'))
def post(self):
'''Login using local DB, not a third-party service.'''
args = api.general_parse()
username = args['username']
password = args['password']
try:
if User.verify_user_password(username, password):
return create_tokens(username)
else:
api.abort_with_msg(400, 'Wrong password...', ['password'])
except NoResultFound:
api.abort_with_msg(400,
'Username seems not registered...',
['username'])
@api.route('/renew_micro_token')
class RenewMicroToken(Resource):
@api.doc(parser=api.create_parser('token'))
def post(self):
'''Get a new micro token to be used with the other microservices.'''
args = api.general_parse()
decoded = decode_token(args['token'])
if decoded['type'] != 'main':
# This seems not to be a main token. It must be main for security
# reasons, for only main ones can be invalidated at logout.
# Allowing micro tokens would allow infinite renew by a
# compromised token
api.abort_with_msg(400, 'Must use a main token', ['token'])
token = create_token(decoded['username']),
return {
'microToken': token,
'microTokenValidPeriod': api.app.config[
'MICRO_TOKEN_VALID_PERIOD'],
}
@api.route('/reset_password')
class ResetPassword(Resource):
@api.doc(parser=api.create_parser('username', 'email'))
def post(self):
'''Sends an email to the user with a code to reset password.'''
args = api.general_parse()
user = get_user(args['username'])
check_user_email(user, args['email'])
msg = Message(
api.app.config['MAIL_SUBJECT'],
sender=api.app.config['SENDER_NAME'],
recipients=[user.email])
code = passlib.utils.generate_password(15)
exp = api.app.config['TIME_RESET_PASSWORD']
user.set_temp_password(code, exp)
db.session.commit()
msg.body = (api.app.config['EMAIL_TEMPLATE']
.format(code=code, exp_min=exp/60))
api.mail.send(msg)
return {
'message': 'Check email!',
'exp': exp,
}
@api.doc(parser=api.create_parser('username', 'email', 'code', 'password'))
def put(self):
'''Change the password of a user using a temporary code.'''
args = api.general_parse()
password = args['password']
validate_password(password)
username = args['username']
user = get_user(username)
check_user_email(user, args['email'])
if not user.check_temp_password(args['code']):
api.abort_with_msg(400, 'Invalid code', ['code'])
user.hash_password(password)
# Commit is done by create_tokens
return create_tokens(username)
@api.route('/logout')
class Logout(Resource):
@api.doc(parser=api.create_parser('token'))
def post(self):
'''Invalidates the main token.'''
args = api.general_parse()
decoded = decode_token(args['token'])
# Invalidates all main tokens
get_user(decoded['username']).last_token_exp = 0
db.session.commit()
return {}
@api.route('/users/<string:username>')
class UserAPI(Resource):
@api.doc(parser=api.create_parser('token'))
def get(self, username):
'''Get information about an user.'''
args = api.general_parse()
try:
user = User.get_user(username)
except NoResultFound:
api.abort_with_msg(404, 'User not found', ['username'])
resp = {
'username': user.username,
'description': user.description,
}
# Add email if this is the owner of the account
token = args['token']
if token:
decoded = decode_token(token)
if decoded['username'] == username:
resp['email'] = user.email
return resp
@api.doc(parser=api.create_parser('token', 'description',
'email', 'password', 'new_password'))
def put(self, username):
'''Edit information about an user.'''
args = api.general_parse()
decoded = decode_token(args['token'])
if username == decoded['username']:
user = get_user(decoded['username'])
changed = False
password = args.get('password')
# If is changing password
if password:
new_password = args['new_password']
if user.verify_password(password):
validate_password(new_password, 'new_password')
user.hash_password(new_password)
changed = True
else:
api.abort_with_msg(400, 'Wrong password...', ['password'])
# If is changing description
if args['description']:
user.description = bleach.clean(args['description'],
strip=True)
|
macmanes-lab/MCBS913
|
code/Junhong Chen/concatReads.py
|
Python
|
mit
| 925
| 0.016216
|
"""
Author: Junhong Chen
"""
from Bio import SeqIO
import gzip
import sys
import os
pe1 = []
pe2 = []
pname = []
for dirName, subdirList, fileList in os.walk(sys.argv[1]):
for fname in fileList:
tmp = fname.split(".")[0]
tmp = tmp[:len(tmp)-1]
if t
|
mp not in pname:
pname.append(tmp)
pe1.append(dirName+"/"+tmp+"1.fq.gz")
pe2.append(dirName+"/"+tmp+"2.fq.gz")
def concat(name,file_list):
with open(name, 'w') as w_file:
for filen in file_list:
print 'working with',filen
with gzip.open(filen, 'rU') as o_file:
seq_records = Seq
|
IO.parse(o_file, 'fastq')
SeqIO.write(seq_records, w_file, 'fastq')
#print pe1
#print pe2
concat(sys.argv[2]+"-pe1.fq", pe1)
concat(sys.argv[2]+"-pe2.fq", pe2)
|
novafloss/django-agnocomplete
|
agnocomplete/core.py
|
Python
|
mit
| 20,047
| 0
|
"""
The different agnocomplete classes to be discovered
"""
from copy import copy
from six import with_metaclass
from abc import abstractmethod, ABCMeta
import logging
from django.db.models import Q
from django.core.exceptions import ImproperlyConfigured
from django.utils.encoding import force_text as text
from django.conf import settings
import requests
from .constants import AGNOCOMPLETE_DEFAULT_PAGESIZE
from .constants import AGNOCOMPLETE_MIN_PAGESIZE
from .constants import AGNOCOMPLETE_MAX_PAGESIZE
from .constants import AGNOCOMPLETE_DEFAULT_QUERYSIZE
from .constants import AGNOCOMPLETE_MIN_QUERYSIZE
from .exceptions import AuthenticationRequiredAgnocompleteException
from .exceptions import SkipItem
from .exceptions import ItemNotFound
logger = logging.getLogger(__name__)
class ClassPropertyDescriptor(object):
"""
Toolkit class used to instanciate a class property.
"""
def __init__(self, fget, fset=None):
self.fget = fget
self.fset = fset
def __get__(self, obj, klass=None):
if klass is None:
klass = type(obj)
return self.fget.__get__(obj, klass)()
def __set__(self, obj, value):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
def setter(self, func):
"""
Setter: the decorated method will become a class property.
"""
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(func):
"""
Decorator: the given function will become a class property.
e.g::
class SafeClass(object):
@classproperty
def safe(cls):
return True
class UnsafeClass(object):
@classproperty
def safe(cls):
return False
"""
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
def load_settings_sizes():
"""
Load sizes from settings or fallback to the module constants
"""
page_size = AGNOCOMPLETE_DEFAULT_PAGESIZE
settings_page_size = getattr(
settings, 'AGNOCOMPLETE_DEFAULT_PAGESIZE', None)
page_size = settings_page_size or page_size
page_size_min = AGNOCOMPLETE_MIN_PAGESIZE
settings_page_size_min = getattr(
settings, 'AGNOCOMPLETE_MIN_PAGESIZE', None)
page_size_min = settings_page_size_min or page_size_min
page_size_max = AGNOCOMPLETE_MAX_PAGESIZE
settings_page_size_max = getattr(
settings, 'AGNOCOMPLETE_MAX_PAGESIZE', None)
page_size_max = settings_page_size_max or page_size_max
# Query sizes
query_size = AGNOCOMPLETE_DEFAULT_QUERYSIZE
settings_query_size = getattr(
settings, 'AGNOCOMPLETE_DEFAULT_QUERYSIZE', None)
query_size = settings_query_size or query_size
query_size_min = AGNOCOMPLETE_MIN_QUERYSIZE
settings_query_size_min = getattr(
settings, 'AGNOCOMPLETE_MIN_QUERYSIZE', None)
query_size_min = settings_query_size_min or query_size_min
return (
page_size, page_size_min, page_size_max,
query_size, query_size_min,
)
class AgnocompleteBase(with_metaclass(ABCMeta, object)):
"""
Base class for Agnocomplete tools.
"""
# To be overridden by settings, or constructor arguments
page_size = None
page_size_max = None
page_size_min = None
query_size = None
query_size_min = None
url = None
def __init__(self, user=None, page_size=None, url=None):
# Loading the user context
self.user = user
# Load from settings or fallback to constants
settings_page_size, settings_page_size_min, settings_page_size_max, \
query_size, query_size_min = load_settings_sizes()
# Use the class attributes or fallback to settings
self._conf_page_size = self.page_size or settings_page_size
self._conf_page_size_min = self.page_size_min or settings_page_size_min
self._conf_page_size_max = self.page_size_max or settings_page_size_max
# Use instance constructor parameters to eventually override defaults
page_size = page_size or self._conf_page_size
if page_size > self._conf_page_size_max \
or page_size < self._conf_page_size_min:
page_size = self._conf_page_size
# Finally set this as the wanted page_size
self._page_size = page_size
# set query sizes
self._query_size = self.query_size or query_size
self._query_size_min = self.query_size_min or query_size_min
# Eventual custom URL
self._url = url
def set_agnocomplete_field(self, field):
self.agnocomplete_field = field
@classproperty
def slug(cls):
"""
Return the key used in the register, used as a slug for the URL.
You can override this by adding a class property.
"""
return cls.__name__
def get_url(self):
return self._url or self.url
def get_page_size(self):
"""
Return the computed page_size
It takes into account:
* class variables
* constructor arguments,
* settings
* fallback to the module constants if needed.
"""
return self.
|
_pag
|
e_size
def get_query_size(self):
"""
Return the computed default query size
It takes into account:
* class variables
* settings,
* fallback to the module constants
"""
return self._query_size
def get_query_size_min(self):
"""
Return the computed minimum query size
It takes into account:
* class variables
* settings,
* fallback to the module constants
"""
return self._query_size_min
@abstractmethod
def get_choices(self):
pass
@abstractmethod
def items(self, query=None, **kwargs):
pass
@abstractmethod
def selected(self, ids):
"""
Return the values (as a tuple of pairs) for the ids provided
"""
pass
def is_valid_query(self, query):
"""
Return True if the search query is valid.
e.g.:
* not empty,
* not too short,
"""
# No query, no item
if not query:
return False
# Query is too short, no item
if len(query) < self.get_query_size_min():
return False
return True
class AgnocompleteChoices(AgnocompleteBase):
"""
Usage Example::
class AgnocompleteColor(AgnocompleteChoices):
choices = (
('red', 'Red'),
('green', 'Green'),
('blue', 'Blue'),
)
"""
choices = ()
def get_choices(self):
return self.choices
def item(self, current_item):
value, label = current_item
return dict(value=value, label=label)
def items(self, query=None, **kwargs):
if not self.is_valid_query(query):
return []
result = copy(self.choices)
if query:
result = filter(lambda x: x[1].lower().startswith(query), result)
result = tuple(result)
# Slicing before rendering
result = result[:self.get_page_size()]
return [self.item(item) for item in result]
def selected(self, ids):
"""
Return the selected options as a list of tuples
"""
result = copy(self.choices)
result = filter(lambda x: x[0] in ids, result)
# result = ((item, item) for item in result)
return list(result)
class AgnocompleteModelBase(with_metaclass(ABCMeta, AgnocompleteBase)):
model = None
requires_authentication = False
@abstractmethod
def get_queryset(self):
pass
@property
def fields(self):
raise NotImplementedError(
"Integrator: You must have a `fields` property")
def get_model(self):
"""
|
paulmcquad/Python
|
11 - Lists/sort revertdigits.py
|
Python
|
gpl-3.0
| 181
| 0.055249
|
def revertdigits( item ):
return (item%10)*100 + (int(item/
|
10)%10)*10 + int(item/100)
numlist
|
= [314, 315, 642, 246, 129, 999]
numlist.sort( key=revertdigits )
print( numlist )
|
voxie-viewer/voxie
|
filters/downsample.py
|
Python
|
mit
| 3,821
| 0.001309
|
#!/usr/bin/python3
#
# Copyright (c) 2014-2022 The Voxie Authors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. I
|
N NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import numpy as np
import voxie
args = voxie.parser.parse_args()
context = voxie.VoxieContext(args)
instance = context.createInstance()
if args.voxie_action != 'RunFilter':
raise Exception('Invalid operation: ' + args.voxie_action)
with
|
context.makeObject(context.bus, context.busName, args.voxie_operation, ['de.uni_stuttgart.Voxie.ExternalOperationRunFilter']).ClaimOperationAndCatch() as op:
filterPath = op.FilterObject
pars = op.Parameters
# print (pars)
properties = pars[filterPath._objectPath]['Properties'].getValue('a{sv}')
# print (properties)
inputPath = properties['de.uni_stuttgart.Voxie.Input'].getValue('o')
inputDataPath = pars[inputPath]['Data'].getValue('o')
inputData = context.makeObject(context.bus, context.busName, inputDataPath, [
'de.uni_stuttgart.Voxie.VolumeDataVoxel'])
outputPath = properties['de.uni_stuttgart.Voxie.Output'].getValue('o')
factor = properties['de.uni_stuttgart.Voxie.Filter.Downsample.Factor'].getValue(
'x')
origin = inputData.VolumeOrigin
sizeOrig = inputData.ArrayShape
spacingOrig = np.array(inputData.GridSpacing)
# print (origin, sizeOrig, spacingOrig)
# TODO: Don't cut away data at the end
# size = ((int(sizeOrig[0]) + factor - 1) // factor,
# (int(sizeOrig[1]) + factor - 1) // factor,
# (int(sizeOrig[2]) + factor - 1) // factor)
size = (int(sizeOrig[0]) // factor,
int(sizeOrig[1]) // factor,
int(sizeOrig[2]) // factor)
spacing = spacingOrig * factor
with inputData.GetBufferReadonly() as bufferOld:
arrayOld = bufferOld.array
arrayOld2 = arrayOld[:size[0] * factor,
:size[1] * factor, :size[2] * factor]
arrayOld3 = arrayOld2.view()
arrayOld3.shape = size[0], factor, size[1], factor, size[2], factor
dataType = ('float', 32, 'native') # TODO?
with instance.CreateVolumeDataVoxel(size, dataType, origin, spacing) as data:
with data.CreateUpdate() as update, data.GetBufferWritable(update) as buffer:
buffer[:] = 0
zCount = arrayOld3.shape[4]
for z in range(zCount):
buffer[:, :, z] = np.mean(
arrayOld3[:, :, :, :, z, :], axis=(1, 3, 4))
op.SetProgress((z + 1) / zCount)
version = update.Finish()
result = {}
result[outputPath] = {
'Data': voxie.Variant('o', data._objectPath),
'DataVersion': voxie.Variant('o', version._objectPath),
}
op.Finish(result)
|
richtermondt/inithub-web
|
inithub/inithub/context_processors.py
|
Python
|
mit
| 376
| 0.013298
|
'''
Cre
|
ated on Jun 6, 2014
@author: rtermondt
'''
from django.conf import settings
def global_settings(request):
invitation_system_setting = getattr(settings, 'INVITATION_SYSTEM', None)
if invitation_system_setting == True:
invite_system = True
else:
invite_system = False
return {
'INVITATION_SYSTEM': invite_system
|
}
|
regionbibliotekhalland/digitalasagor
|
tooltip.py
|
Python
|
gpl-3.0
| 8,114
| 0.008134
|
'''Michael Lange <klappnase (at) freakmail (dot) de>
The ToolTip class provides a flexible tooltip widget for Tkinter; it is based on IDLE's ToolTip
module which unfortunately seems to be broken (at least the version I saw).
INITIALIZATION OPTIONS:
anchor : where the text should be positioned inside the widget, must be on of "n", "s", "e", "w", "nw" and so on;
default is "center"
bd : borderwidth of the widget; default is 1 (NOTE: don't use "borderwidth" here)
bg : background color to use for the widget; default is "lightyellow" (NOTE: don't use "background")
delay : time in ms that it takes for the widget to appear on the screen when the mouse pointer has
entered the parent widget; default is 800
fg : foreground (i.e. text) color to use; default is "black" (NOTE: don't use "foreground")
follow_mouse : if set to 1 the tooltip will follow the mouse pointer instead of being displayed
outside of the parent widget; this may be useful if you want to use tooltips for
large widgets like listboxes or canvases; default is 0
font : font to use for the widget; default is system specific
justify : how multiple lines of text will be aligned, must be "left", "right" or "center"; default is "left"
padx : extra space added to the left and right within the widget; default is 4
pady : extra space above and below the text; default is 2
relief : one of "flat", "ridge", "groove", "raised", "sunken" or "solid"; default is "solid"
state : must be "normal" or "disabled"; if set to "disabled" the tooltip will not appear; default is "normal"
text : the text that is displayed inside the widget
textvariable : if set to an instance of Tkinter.StringVar() the variable's value will be used as text for the widget
width : width of the widget; the default is 0, which means that "wraplength" will be used to limit the widgets width
wraplength : limits the number of characters in each line; default is 150
WIDGET METHODS:
configure(**opts) : change one or more of the widget's options as described above; the changes will take effect the
next time the tooltip shows up; NOTE: follow_mouse cannot be changed after widget initialization
Other widget methods that might be useful if you want to subclass ToolTip:
enter() : callback when the mouse pointer enters the parent widget
leave() : called when the mouse pointer leaves the parent widget
motion() : is called when the mouse pointer moves inside the parent widget if follow_mouse is set to 1 and the
tooltip has shown up to continually update the coordinates of the tooltip window
coords() : calculates the screen coordinates of the tooltip window
create_contents() : creates the contents of the tooltip window (by default a Tkinter.Label)
'''
# Ideas gleaned from PySol
import Tkinter
class ToolTip:
def __init__(self, master, text='Your text here', delay=800, **opts):
self.master = master
self._opts = {'anchor':'center', 'bd':1, 'bg':'lightyellow', 'delay':delay, 'fg':'black',\
'follow_mouse':0, 'font':None, 'justify':'left', 'padx':4, 'pady':2,\
'relief':'solid', 'state':'normal', 'text':text, 'textvariable':None,\
'width':0, 'wraplength':150}
self.configure(**opts)
self._tipwindow = None
self._id = None
self._id1 = self.master.bind("<Enter>", self.enter, '+')
|
self._id2 = self.master.bind("<Leave>", self.leave, '+')
self._id3 = self.master.bind("<ButtonPress>", self.leave, '+')
self._follow_mouse = 0
if self._opts['follow_mouse']:
self._id4 = self.master.bind("<Motion>", self.motion, '+')
self._follow_mouse = 1
|
def configure(self, **opts):
for key in opts:
if self._opts.has_key(key):
self._opts[key] = opts[key]
else:
KeyError = 'KeyError: Unknown option: "%s"' %key
raise KeyError
##----these methods handle the callbacks on "<Enter>", "<Leave>" and "<Motion>"---------------##
##----events on the parent widget; override them if you want to change the widget's behavior--##
def enter(self, event=None):
self._schedule()
def leave(self, event=None):
self._unschedule()
self._hide()
def motion(self, event=None):
if self._tipwindow and self._follow_mouse:
x, y = self.coords()
self._tipwindow.wm_geometry("+%d+%d" % (x, y))
##------the methods that do the work:---------------------------------------------------------##
def _schedule(self):
self._unschedule()
if self._opts['state'] == 'disabled':
return
self._id = self.master.after(self._opts['delay'], self._show)
def _unschedule(self):
id = self._id
self._id = None
if id:
self.master.after_cancel(id)
def _show(self):
if self._opts['state'] == 'disabled':
self._unschedule()
return
if not self._tipwindow:
self._tipwindow = tw = Tkinter.Toplevel(self.master)
# hide the window until we know the geometry
tw.withdraw()
tw.wm_overrideredirect(1)
if tw.tk.call("tk", "windowingsystem") == 'aqua':
tw.tk.call("::tk::unsupported::MacWindowStyle", "style", tw._w, "help", "none")
self.create_contents()
tw.update_idletasks()
x, y = self.coords()
tw.wm_geometry("+%d+%d" % (x, y))
tw.deiconify()
def _hide(self):
tw = self._tipwindow
self._tipwindow = None
if tw:
tw.destroy()
##----these methods might be overridden in derived classes:----------------------------------##
def coords(self):
# The tip window must be completely outside the master widget;
# otherwise when the mouse enters the tip window we get
# a leave event and it disappears, and then we get an enter
# event and it reappears, and so on forever :-(
# or we take care that the mouse pointer is always outside the tipwindow :-)
tw = self._tipwindow
twx, twy = tw.winfo_reqwidth(), tw.winfo_reqheight()
w, h = tw.winfo_screenwidth(), tw.winfo_screenheight()
# calculate the y coordinate:
if self._follow_mouse:
y = tw.winfo_pointery() + 20
# make sure the tipwindow is never outside the screen:
if y + twy > h:
y = y - twy - 30
else:
y = self.master.winfo_rooty() + self.master.winfo_height() + 3
if y + twy > h:
y = self.master.winfo_rooty() - twy - 3
# we can use the same x coord in both cases:
x = tw.winfo_pointerx() - twx / 2
if x < 0:
x = 0
elif x + twx > w:
x = w - twx
return x, y
def create_contents(self):
opts = self._opts.copy()
for opt in ('delay', 'follow_mouse', 'state'):
del opts[opt]
label = Tkinter.Label(self._tipwindow, **opts)
label.pack()
##---------demo code-----------------------------------##
def demo():
root = Tkinter.Tk(className='ToolTip-demo')
l = Tkinter.Listbox(root)
l.insert('end', "I'm a listbox")
l.pack(side='top')
t1 = ToolTip(l, follow_mouse=1, text="I'm a tooltip with follow_mouse set to 1, so I won't be placed outside my parent")
b = Tkinter.Button(root, text='Quit', command=root.quit)
b.pack(side='bottom')
t2 = ToolTip(b, text='Enough of this')
root.mainloop()
if __name__ == '__main__':
demo()
|
johncburnett/Angelus
|
src/angelus.py
|
Python
|
gpl-2.0
| 953
| 0.008395
|
#!/usr/bin/env python
# angelus.py - John Burnett & Will Johnson (c)2015
#
# Angelus does the following:
# -FFT analysis
# -Partial tracking
# -Modal analysis
# -Resynthesis
#
# Angelus will eventual
|
ly do the following:
# -FFT Analysis -> Notation
# -Modal Analysis -> 3D mesh (and reverse?)
from FFT_Analyzer import FFT_Analyzer
from writeRObU import writeRObU
from Synthesizer import Synthesizer
import sys
def main():
fname = sys.argv[1]
title = parse_fname(fname)
infile = "../audio/" + fname
outfile = "../build/" + title + ".ro"
analysis = FFT_Analyzer(infile)
analysis.perform_analysis()
analysis.stft(20)
analysis.get_modal_data(30)
out = writeRObU(outfile
|
, analysis.modal_model)
out.write()
synth = Synthesizer(analysis, title)
synth.write_wav()
#synth.write_residual()
def parse_fname(fname):
s = ""
for l in fname:
if l != '.': s += l
else: return s
main()
|
tvtsoft/odoo8
|
addons/sale_contract/tests/__init__.py
|
Python
|
agpl-3.0
| 78
| 0
|
# -*- coding: utf-8 -*-
|
import common_sale_contract
import test_sale_contrac
|
t
|
beardypig/streamlink
|
tests/streams/test_stream_wrappers.py
|
Python
|
bsd-2-clause
| 655
| 0
|
import unittest
from streamlink.stream import StreamIOIterWrapper
class TestPluginStream(unittest.TestCase):
def test_iter(self):
def generator():
yield b"1" * 8192
yield b"2" * 4096
yield b"3" * 20
|
48
fd = StreamIOIterWrapper(generator())
self.assertEqual(fd.read(409
|
6), b"1" * 4096)
self.assertEqual(fd.read(2048), b"1" * 2048)
self.assertEqual(fd.read(2048), b"1" * 2048)
self.assertEqual(fd.read(1), b"2")
self.assertEqual(fd.read(4095), b"2" * 4095)
self.assertEqual(fd.read(1536), b"3" * 1536)
self.assertEqual(fd.read(), b"3" * 512)
|
garyp/djwed
|
photologue/urls.py
|
Python
|
mit
| 3,400
| 0.008529
|
from django.conf import settings
from django.conf.urls.defaults import *
from models import *
from django.views.generic import date_based, list_detail
from django.contrib.auth.decorators import login_required
# Number of random images from the gallery to display.
SAMPLE_SIZE = ":%s" % getattr(settings, 'GALLERY_SAMPLE_SIZE', 8)
# galleries
gallery_args = {'date_field
|
': 'date_added', 'allow_empty': True, 'queryset': Gallery.objects.filter(is_public=True), 'extra_co
|
ntext':{'sample_size':SAMPLE_SIZE}}
urlpatterns = patterns('django.views.generic.date_based',
url(r'^gallery/(?P<year>\d{4})/(?P<month>[a-z]{3})/(?P<day>\w{1,2})/(?P<slug>[\-\d\w]+)/$', login_required(date_based.object_detail), {'date_field': 'date_added', 'slug_field': 'title_slug', 'queryset': Gallery.objects.filter(is_public=True), 'extra_context':{'sample_size':SAMPLE_SIZE}}, name='pl-gallery-detail'),
url(r'^gallery/(?P<year>\d{4})/(?P<month>[a-z]{3})/(?P<day>\w{1,2})/$', login_required(date_based.archive_day), gallery_args, name='pl-gallery-archive-day'),
url(r'^gallery/(?P<year>\d{4})/(?P<month>[a-z]{3})/$', login_required(date_based.archive_month), gallery_args, name='pl-gallery-archive-month'),
url(r'^gallery/(?P<year>\d{4})/$', login_required(date_based.archive_year), gallery_args, name='pl-gallery-archive-year'),
url(r'^gallery/?$', login_required(date_based.archive_index), gallery_args, name='pl-gallery-archive'),
)
urlpatterns += patterns('django.views.generic.list_detail',
url(r'^gallery/(?P<slug>[\-\d\w]+)/$', login_required(list_detail.object_detail), {'slug_field': 'title_slug', 'queryset': Gallery.objects.filter(is_public=True), 'extra_context':{'sample_size':SAMPLE_SIZE}}, name='pl-gallery'),
url(r'^gallery/page/(?P<page>[0-9]+)/$', login_required(list_detail.object_list), {'queryset': Gallery.objects.filter(is_public=True), 'allow_empty': True, 'paginate_by': 8, 'extra_context':{'sample_size':SAMPLE_SIZE}}, name='pl-gallery-list'),
)
# photographs
photo_args = {'date_field': 'date_added', 'allow_empty': True, 'queryset': Photo.objects.filter(is_public=True)}
urlpatterns += patterns('django.views.generic.date_based',
url(r'^photo/(?P<year>\d{4})/(?P<month>[a-z]{3})/(?P<day>\w{1,2})/(?P<slug>[\-\d\w]+)/$', login_required(date_based.object_detail), {'date_field': 'date_added', 'slug_field': 'title_slug', 'queryset': Photo.objects.filter(is_public=True)}, name='pl-photo-detail'),
url(r'^photo/(?P<year>\d{4})/(?P<month>[a-z]{3})/(?P<day>\w{1,2})/$', login_required(date_based.archive_day), photo_args, name='pl-photo-archive-day'),
url(r'^photo/(?P<year>\d{4})/(?P<month>[a-z]{3})/$', login_required(date_based.archive_month), photo_args, name='pl-photo-archive-month'),
url(r'^photo/(?P<year>\d{4})/$', login_required(date_based.archive_year), photo_args, name='pl-photo-archive-year'),
url(r'^photo/$', login_required(date_based.archive_index), photo_args, name='pl-photo-archive'),
)
urlpatterns += patterns('django.views.generic.list_detail',
url(r'^photo/(?P<slug>[\-\d\w]+)/$', login_required(list_detail.object_detail), {'slug_field': 'title_slug', 'queryset': Photo.objects.filter(is_public=True)}, name='pl-photo'),
url(r'^photo/page/(?P<page>[0-9]+)/$', login_required(list_detail.object_list), {'queryset': Photo.objects.filter(is_public=True), 'allow_empty': True, 'paginate_by': 20}, name='pl-photo-list'),
)
|
darshan95/Shift-Reduce-Chunk-Expander
|
src/ssf_reader.py
|
Python
|
mit
| 4,608
| 0.052951
|
#!/usr/bin/python -*- coding:utf-8 -*-
__Author__ = "Riyaz Ahmad Bhat"
__Email__ = "[email protected]"
import re
from collections import namedtuple
from sanity_checker import SanityChecker
class DefaultList(list):
"""Equivalent of Default dictionaries for Indexing Errors."""
def __init__(self, default=None):
self.default = default
list.__init__(self)
def __getitem__(self, index):
try: return list.__getitem__(self, index)
except IndexError: return self.default
class SSFReader (SanityChecker):
def __init__ (self, sentence):
super(SSFReader, self).__init__()
self.id_ = int()
self.nodeList = list()
self.chunk_word = dict()
self.sentence = sentence
self.modifierModified = dict()
|
self.node = namedtuple('node',
('id', 'head', 'children', 'pos', 'poslcat', 'af', 'vpos', 'name','drel','parent',
'chunkId', '
|
chunkType', 'mtype', 'troot', 'coref', 'stype','voicetype', 'posn'))
self.features = namedtuple('features',
('lemma','cat','gen','num','per','case','vib','tam'))
def getAnnotations (self):
children_ = list()
for line in self.sentence.split("\n"):
nodeInfo = line.decode("utf-8").split("\t")
if nodeInfo[0].isdigit():
assert len(nodeInfo) == 4 # no need to process trash! FIXME
attributeValue_pairs = self.FSPairs(nodeInfo[3][4:-1])
attributes = self.updateFSValues(attributeValue_pairs)
h = attributes.get #NOTE h -> head node attributes
elif nodeInfo[0].replace(".",'',1).isdigit():
assert (len(nodeInfo) == 4) and (nodeInfo[1] and nodeInfo[2] != '') # FIXME
self.id_ += 1
pos_ = nodeInfo[2].encode("utf-8").decode("ascii",'ignore').encode("ascii")
wordForm_ = nodeInfo[1]
attributeValue_pairs = self.FSPairs(nodeInfo[3][4:-1])
if attributeValue_pairs['name'] == h('head_'):# NOTE head word of the chunk
self.nodeList.append(self.node(str(self.id_),wordForm_,children_,pos_,h('poslcat_'),
self.features(h('lemma_') if h('lemma_') else wordForm_ ,h('cat_'),h('gen_'), h('num_'),
h('per_'),h('case_'),h('vib_'),h('tam_')),h('vpos_'),h('head_'),h('drel_'),
h('parent_'),h('chunkId_'),":".join(('head',h('chunkId_'))),h('mtype_'),h('troot_'),
h('coref_'),h('stype_'),h('voicetype_'),h('posn_')))
self.modifierModified[h('chunkId_')] = h('parent_')
self.chunk_word[h('chunkId_')] = h('head_')
else:
attributes = self.updateFSValues(attributeValue_pairs)
c = attributes.get #NOTE c -> child node attributes
children_.append(self.node(str(self.id_),wordForm_,[],pos_,c('poslcat_'),self.features(c('lemma_') \
if c('lemma_') else wordForm_ ,c('cat_'),c('gen_'),c('num_'),c('per_'),c('case_'),c('vib_'),
c('tam_')),c('vpos_'),c('name_'),"_","_",None,":".join(('child',h('chunkId_'))),c('mtype_'),
c('troot_'),c('coref_'),None, None, c('posn_')))
else: children_ = list()
return self
def FSPairs (self, FS) :
feats = dict()
for feat in FS.split():
if "=" not in feat:continue
feat = re.sub("af='+","af='",feat.replace("dmrel=",'drel='))
assert len(feat.split("=")) == 2
attribute,value = feat.split("=")
feats[attribute] = value
return feats
def morphFeatures (self, AF):
"LEMMA,CAT,GEN,NUM,PER,CASE,VIB,TAM"
assert len(AF[:-1].split(",")) == 8 # no need to process trash! FIXME
lemma_,cat_,gen_,num_,per_,case_,vib_,tam_ = AF.split(",")
if len(lemma_) > 1: lemma_ = lemma_.strip("'")
return lemma_.strip("'"),cat_,gen_,num_,per_,case_,vib_,tam_.strip("'")
def updateFSValues (self, attributeValue_pairs):
attributes = dict(zip(['head_','poslcat_','af_','vpos_','name_','drel_','parent_','mtype_','troot_','chunkId_',\
'coref_','stype_','voicetype_','posn_'], [None] * 14))
attributes.update(dict(zip(['lemma_','cat_','gen_','num_','per_','case_','vib_','tam_'], [''] * 8)))
for key,value in attributeValue_pairs.items():
if key == "af":
attributes['lemma_'],attributes['cat_'],attributes['gen_'],attributes['num_'],\
attributes['per_'],attributes['case_'],attributes['vib_'],attributes['tam_'] = \
self.morphFeatures (value)
elif key == "drel":
assert len(value.split(":")) == 2 # no need to process trash! FIXME
attributes['drel_'], attributes['parent_'] = re.sub("'|\"",'',value).split(":")
assert attributes['drel_'] and attributes['parent_'] != "" # no need to process trash! FIXME
else:
variable = str(key) + "_"
if variable == "name_": attributes['chunkId_'] = re.sub("'|\"",'',value)
attributes[variable] = re.sub("'|\"",'',value)
return attributes
|
ahb0327/intellij-community
|
python/testData/completion/importQualifiedNamespacePackage/a.after.py
|
Python
|
apache-2.0
| 18
| 0
|
import nspkg1.f
|
oo
| |
glaudsonml/kurgan-ai
|
libs/Tree.py
|
Python
|
apache-2.0
| 2,507
| 0.001197
|
'''
Tree from:
http://www.quesucede.com/page/show/id/python-3-tree-implementation
'''
from urllib.parse import urlparse
import os
(_ROOT, _DEPTH, _BREADTH) = range(3)
class Node:
def __init__(self, identifier):
self.__identifier = identifier
self.__children = []
@property
def identifier(self):
return self.__identifier
@property
def children(self):
return self.__children
def add_child(self, identifier):
self.__children.append(identifier)
class Tree:
def __init__(self):
self.__nodes = {}
@property
def nodes(self):
return self.__nodes
def add_node(self, identifier, parent=None):
print("identifier: " + identifier + " parent= " + str(parent))
node = Node(identifier)
self[identifier] = node
if parent is not None:
self[parent].add_child(identifier)
return node
def display(self, identifier, depth=_ROOT):
children = self[identifier].children
if depth == _ROOT:
print("{0}".format(identifier))
else:
print("\t"*depth, "{0}".format(identifier))
depth += 1
for child in children:
print("\t"*depth, "{0}".format(identifier))
self.display(child, depth) # recursive call
def traverse(self, identifier, mode=_DEPTH):
yield identifier
queue = self[identifier].children
while queue:
yield queue[0]
expansion = self[queue[0]].children
if mode == _DEPTH:
queue = expansion + queue[1:] # depth-first
elif mode == _BREADTH:
queue = queue[1:] + expansion # width-first
def __getitem__(self, key):
return self.__node
|
s[key
|
]
def __setitem__(self, key, item):
self.__nodes[key] = item
'''
tree = Tree()
t = print("{0}".format("palestras"))
tree.add_node("Harry") # root node
tree.add_node("Jane", t)
tree.add_node("Bill", "Harry")
tree.add_node("Joe", "Jane")
tree.add_node("Diane", "Jane")
tree.add_node("George", "Diane")
tree.add_node("Mary", "Diane")
tree.add_node("Jill", "George")
tree.add_node("Carol", "Jill")
tree.add_node("Grace", "Bill")
tree.add_node("Mark", "Jane")
tree.display("Harry")
print("***** DEPTH-FIRST ITERATION *****")
for node in tree.traverse("Harry"):
print(node)
print("***** BREADTH-FIRST ITERATION *****")
for node in tree.traverse("Harry", mode=_BREADTH):
print(node)
'''
|
yosefk/heapprof
|
heapprof.py
|
Python
|
bsd-2-clause
| 3,588
| 0.028428
|
#!/usr/bin/python
import sys, commands, struct, operator, subprocess, os
if len(sys.argv) != 3:
print 'usage:',sys.argv[0],'<program> <core>'
sys.exit(1)
prog, core = sys.argv[1:]
# finds out the size of void*/size_t. could be hardcoded for speed...
try:
cell = int(commands.getoutput('gdb '+prog+r''' -ex 'printf "cell %d\n", sizeof(void*)' -ex q | grep cell''').split()[1])
except:
print 'gdb failed to open',prog,core,'- assuming a 32b pointer'
cell = 4
fmt = {4:'I',8:'Q'}[cell]
def gdb_sym_info(addrs,exe):
gdb = subprocess.Popen(['gdb',prog,core], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
info = {}
found = 0
for addr in addrs:
if addr:
gdb.stdin.write('info symbol 0x%x\n'%addr)
gdb.stdin.write('list *0x%x\n'%addr)
gdb.stdin.write('printf "\\ndone\\n"\n')
gdb.stdin.flush()
line = ''
lineinfo = None
syminfo = 'UNKNOWN'
while line != 'done':
line = gdb.stdout.readline().strip()
if 'is in' in line: lineinfo = line.split('is in ')[1]
if 'in section' in line: syminfo = line.split('(gdb) ')[1]
if lineinfo:
info[addr] = lineinfo
else:
info[addr] = syminfo
found += int(info[addr] != 'UNKNOWN')
return info, found
def addr2line_sym_info(addrs,exe):
addr2line = subprocess.Popen('addr2line -f -e'.split()+[exe], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
info = {}
for addr in addrs:
if addr:
addr2line.stdin.write('0x%x\n'%addr)
addr2line.stdin.flush()
info[addr] = addr2line.stdout.readline().strip()+' '+addr2line.stdout.readline().strip()
return info
def sym_info(addrs,exe):
if 'HEAPPROF_ADDR2LINE' in os.environ:
gdb_found = 0
else:
syminfo, gdb_found = gdb_sym_info(addrs, prog)
if gdb_found < 1: # gdb didn't manage to find anything - perhaps the core dump is in a custom format
syminfo = addr2line_sym_info(addrs, prog)
return syminfo
# a silly guard against "non-blocks" - occurences of HeaP and ProF
# in code instead of data
def is_block(s,e): return (
|
e-s)%cell == 0 and (e-s)/cell < 100
class Block:
def __init__(self, metadata):
self.size = struct.unpack(fmt, metadata
|
[0:cell])[0]
self.stack = struct.unpack('%d'%(len(metadata)/cell - 1)+fmt, metadata[cell:])
def find_blocks(bytes):
blocks = []
end_index = 0
while True:
start_index = bytes.find('HeaP',end_index)
end_index = bytes.find('ProF',start_index)
if not is_block(start_index, end_index):
end_index = start_index + cell # search again
else:
if min(start_index, end_index) < 0:
break
blocks.append(Block(bytes[start_index+cell:end_index])) # this assumes little endian...
return blocks
def code_addrs(blocks):
return list(reduce(operator.or_, [set(block.stack) for block in blocks]))
def report(blocks, syminfo):
stack2sizes = {}
for block in blocks:
stack2sizes.setdefault(block.stack,list()).append(block.size)
total = sorted([(sum(sizes), stack) for stack, sizes in stack2sizes.iteritems()])
heapsize = sum([size for size, stack in total])
for size, stack in reversed(total):
print '%d%% %d %s'%(int(100.*size/heapsize), size, stack2sizes[stack])
for addr in stack:
if addr:
print ' 0x%x'%addr, syminfo[addr]
blocks = find_blocks(open(core,'rb').read())
if not blocks:
print 'no heap blocks found in the core dump (searched for metadata enclosed in the magic string HeaP...ProF)'
sys.exit(1)
syminfo = sym_info(code_addrs(blocks), prog)
report(blocks, syminfo)
|
lunapocket/powerOverWhelming
|
project/src/gui/guiSelectCode.py
|
Python
|
gpl-3.0
| 6,372
| 0.010233
|
import sys
from PyQt5 import QtWidgets, QtGui, QtCore
from PyQt5 import uic
from . import guiStart
from . import guiCompileSuccess
# sys.path.insert(1, 'C:/Users/GuSan/Desktop/powerOverWhelming/project/src/comp_exec')
from ..comp_exec import validation
from . import guiErrorCode
class GuiSelectCode(QtWidgets.QMainWindow) :
def setupUi(self, SelectCode):
SelectCode.setObjectName("SelectCode")
SelectCode.resize(1300, 1100)
self.centralwidget = QtWidgets.QWidget(SelectCode)
self.centralwidget.setObjectName("centralwidget")
self.opt_select_code_3 = QtWidgets.QRadioButton(self.centralwidget)
self.opt_select_code_3.setGeometry(QtCore.QRect(970, 100, 21, 22))
self.opt_select_code_3.setText("")
self.opt_select_code_3.setObjectName("opt_select_code_3")
self.txt_select_code_1 = QtWidgets.QPlainTextEdit(self.centralwidget)
self.txt_select_code_1.setGeometry(QtCore.QRect(150, 140, 320, 721))
self.txt_select_code_1.setObjectName("txt_select_code_1")
self.opt_select_code_1 = QtWidgets.QRadioButton(self.centralwidget)
self.opt_select_code_1.setGeometry(QtCore.QRect(310, 100, 21, 22))
self.opt_select_code_1.setText("")
self.opt_select_code_1.setObjectName("opt_select_code_1")
self.txt_select_code_3 = QtWidgets.QPlainTextEdi
|
t(self.centralwidget)
self.txt_select_code_3.setGeometry(QtCore.QRect(810, 140, 320, 721))
self.txt_select_code_3.setObjectName("txt_select_code_3")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(560, 40, 201, 41))
|
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName("label")
self.btn_compile_start = QtWidgets.QPushButton(self.centralwidget)
self.btn_compile_start.setGeometry(QtCore.QRect(980, 890, 151, 51))
self.btn_compile_start.setObjectName("btn_compile_start")
self.btn_return_search = QtWidgets.QPushButton(self.centralwidget)
self.btn_return_search.setGeometry(QtCore.QRect(980, 970, 151, 51))
self.btn_return_search.setObjectName("btn_return_search")
self.opt_select_code_2 = QtWidgets.QRadioButton(self.centralwidget)
self.opt_select_code_2.setGeometry(QtCore.QRect(640, 100, 21, 22))
self.opt_select_code_2.setText("")
self.opt_select_code_2.setObjectName("opt_select_code_2")
self.txt_select_code_2 = QtWidgets.QPlainTextEdit(self.centralwidget)
self.txt_select_code_2.setGeometry(QtCore.QRect(480, 140, 320, 721))
self.txt_select_code_2.setObjectName("txt_select_code_2")
self.progress = QtWidgets.QProgressBar(self.centralwidget)
self.progress.setGeometry(QtCore.QRect(150, 910, 791, 31))
self.progress.setProperty("value", 0)
self.progress.setObjectName("progress")
SelectCode.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(SelectCode)
self.statusbar.setObjectName("statusbar")
SelectCode.setStatusBar(self.statusbar)
self.retranslateUi(SelectCode)
QtCore.QMetaObject.connectSlotsByName(SelectCode)
def retranslateUi(self, SelectCode):
_translate = QtCore.QCoreApplication.translate
SelectCode.setWindowTitle(_translate("SelectCode", "Select Code"))
self.label.setText(_translate("SelectCode", "Select Code"))
self.btn_compile_start.setText(_translate("SelectCode", "Compile!"))
self.btn_return_search.setText(_translate("SelectCode", "Return to Search"))
def __init__(self):
QtWidgets.QMainWindow.__init__(self)
self.setupUi(self)
self.initUi()
def initUi(self) :
self.btn_compile_start.clicked.connect(self.compile_click)
self.opt_select_code_1.setChecked(True)
self.btn_return_search.clicked.connect(self.return_search)
#window_start = guiStart.GuiStart(self)
#self.txt_select_code_1.setPlainText(window_start.inputOutput(window_start.edit_keyword.toPlainText())[0])
#self.txt_select_code_2.setPlainText(window_start.inputOutput(window_start.edit_keyword.toPlainText())[1])
#self.txt_select_code_3.setPlainText(window_start.inputOutput(window_start.edit_keyword.toPlainText())[2])
def return_search(self) :
global window_search_code
self.close()
window_search_code = guiStart.GuiStart()
window_search_code.show()
def compile_click(self) :
global window_compile_success
global window_compile_fail
window_compile_success = guiCompileSuccess.GuiCompileSuccess()
window_compile_fail = guiErrorCode.GuiErrorCode()
self.completed = 0
while self.completed<100 :
self.completed+=0.001
self.progress.setValue(self.completed)
QtWidgets.QApplication.processEvents()
tupleCompile = validation.validation(self.loadText(), 'cpp')
print(tupleCompile[0])
if(tupleCompile[1]==1) :
msg = QtWidgets.QMessageBox()
msg.setText("컴파일 에러")
msg.setWindowTitle("컴파일 에러")
msg.show()
msg.exec_()
window_compile_fail.txt_error_code.setPlainText(self.loadText())
window_compile_fail.txt_error_context.setPlainText(tupleCompile[0])
window_compile_fail.show()
return window_compile_fail
else :
window_compile_success.txt_code_complete.setPlainText(self.loadText())
window_compile_success.txt_output_test.setPlainText(tupleCompile[0])
window_compile_success.show()
return window_compile_success
def loadText(self) :
if(self.opt_select_code_1.isChecked()) :
print("radioButton 1 is toggled")
return self.txt_select_code_1.toPlainText()
elif(self.opt_select_code_2.isChecked()) :
print("radioButton 2 is toggled")
return self.txt_select_code_2.toPlainText()
else :
print("radioButton 3 is toggled")
return self.txt_select_code_3.toPlainText()
|
atvcaptain/enigma2
|
lib/python/Plugins/SystemPlugins/SoftwareManager/SoftwareTools.py
|
Python
|
gpl-2.0
| 9,405
| 0.031154
|
# -*- coding: iso-8859-1 -*-
from enigma import eConsoleAppContainer
from Components.Console import Console
from Components.About import about
from Components.PackageInfo import PackageInfoHandler
from Components.Language import language
from Components.Sources.List import List
from Components.Ipkg import IpkgComponent
from Components.Network import iNetwork
from Tools.Directories import pathExists, fileExists, resolveFilename, SCOPE_METADIR
from Tools.HardwareInfo import HardwareInfo
from time import time
from boxbranding import getImageVersion
class SoftwareTools(PackageInfoHandler):
lastDownloadDate = None
NetworkConnectionAvailable = None
list_updating = False
available_updates = 0
available_updatelist = []
available_packetlist = []
installed_packetlist = {}
def __init__(self):
aboutInfo = getImageVersion()
if aboutInfo.startswith("dev-"):
self.ImageVersion = 'Experimental'
else:
self.ImageVersion = 'Stable'
self.language = language.getLanguage()[:2] # getLanguage returns e.g. "fi_FI" for "language_country"
PackageInfoHandler.__init__(self, self.statusCallback, blocking = False, neededTag = 'ALL_TAGS', neededFlag = self.ImageVersion)
self.directory = resolveFilename(SCOPE_METADIR)
self.hardware_info = HardwareInfo()
self.list = List([])
self.NotifierCallback = None
self.Console = Console()
self.UpdateConsole = Console()
self.cmdList = []
self.unwanted_extensions = ('-dbg', '-dev', '-doc', '-staticdev', '-src')
self.ipkg = IpkgComponent()
self.ipkg.addCallback(self.ipkgCallback)
def statusCallback(self, status, progress):
pass
def startSoftwareTools(self, callback = None):
if callback is not None:
self.NotifierCallback = callback
iNetwork.checkNetworkState(self.checkNetworkCB)
def checkNetworkCB(self, data):
if data is not None:
if data <= 2:
self.NetworkConnectionAvailable = True
self.getUpdates()
else:
self.NetworkConnectionAvailable = False
self.getUpdates()
def getUpdates(self, callback = None):
if self.lastDownloadDate is None:
if self.NetworkConnectionAvailable:
self.lastDownloadDate = time()
if self.list_updating is False and callback is None:
self.list_updating = True
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is False and callback is not None:
self.list_updating = True
self.NotifierCallback = callback
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is True and callback is not None:
self.NotifierCallback = callback
else:
self.list_updating = False
if callback is not None:
callback(False)
elif self.NotifierCallback is not None:
self.NotifierCallback(False)
else:
if self.NetworkConnectionAvailable:
self.lastDownloadDate = time()
if self.list_updating is False and callback is None:
self.list_updating = True
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is False and callback is not None:
self.list_updating = True
self.NotifierCallback = callback
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is True and callback is not None:
self.NotifierCallback = callback
else:
if self.list_updating and callback is not None:
self.NotifierCallback = callback
self.startIpkgListAvailable()
else:
self.list_updating = False
if callback is not None:
callback(False)
elif self.NotifierCallback is not None:
self.NotifierCallback(False)
def ipkgCallback(self, event, param):
if event == IpkgComponent.EVENT_ERROR:
self.list_updating = False
if self.NotifierCallback is not None:
self.NotifierCallback(False)
elif event == IpkgComponent.EVENT_DONE:
if self.list_updating:
self.startIpkgListAvailable()
pass
def startIpkgListAvailable(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " list"
self.UpdateConsole.ePopen(cmd, self.IpkgListAvailableCB, callback)
def IpkgListAvailableCB(self, result, retval, extra_args = None):
(callback) = extra_args or None
if result:
if self.list_updating:
self.available_packetlist = []
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any(name.endswith(x) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 1 and tokens[1].strip() or ""
descr = l > 2 and tokens[2].strip() or ""
self.available_packetlist.append([name, version, descr])
if callback is None:
self.startInstallMetaPackage()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def startInstallMetaPackage(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if self.NetworkConnectionAvailable:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " install enigma2-meta enigma2-plugins-meta enigma2-skins-meta"
self.UpdateConsole.ePopen(cmd, self.InstallMetaPackageCB, callback)
else:
self.InstallMetaPackageCB(True)
def InstallMetaPackageCB(self, result, retval = None, extra_args = None):
(callback) = extra_args or None
if result:
self.fillPackagesIndexList()
if callback is None:
self.startIpkgListInstalled()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def startIpkgListInstalled(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " list_installed"
self.UpdateConsole.ePopen(cmd, self.IpkgListInstalledCB, callback)
def IpkgListInstalledCB(self, result, retval, extra_args = None):
(callback) = extra_args or None
if result:
self.installed_packetlist = {}
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any(name.endswith(x) for x in self.unwanted_extensions):
|
l = len(tokens)
version = l > 1 and tokens[1].strip() or ""
self.installed_packetlist[name] = version
for package in self.packagesIn
|
dexlist[:]:
if not self.verifyPrerequisites(package[0]["prerequisites"]):
self.packagesIndexlist.remove(package)
for package in self.packagesIndexlist[:]:
attributes = package[0]["attributes"]
if "packagetype" in attributes:
if attributes["packagetype"] == "internal":
self.packagesIndexlist.remove(package)
if callback is None:
self.countUpdates()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def countUpdates(self, callback = None):
self.available_updates = 0
self.available_updatelist = []
for package in self.packagesIndexlist[:]:
attributes = package[0]["attributes"]
packagename = attributes["packagename"]
for x in self.available_packetlist:
if x[0] == packagename:
if packagename in self.installed_packetlist:
if self.installed_packetlist[packagename] != x[1]:
self.available_updates +=1
self.available_updatelist.append([packagename])
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(True)
callback = None
elif self.NotifierCallback is not None:
self.NotifierCallback(True)
self.NotifierCallback = None
def star
|
couchbaselabs/celery
|
celery/bin/celeryd_detach.py
|
Python
|
bsd-3-clause
| 4,792
| 0.000417
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import with_statement
import os
import sys
from optparse import OptionParser, BadOptionError
from celery import __version__
from celery.platforms import EX_FAILURE, detached
from celery.utils.log import get_logger
from celery.bin.base import daemon_options, Option
logger = get_logger(__name__)
OPTION_LIST = daemon_options(default_pidfile="celeryd.pid") + (
Option("--fake",
default=False, action="store_true", dest="fake",
help="Don't fork (for debugging purposes)"), )
def detach(path, argv, logfile=None, pidfile=None, uid=None,
gid=None, umask=0, working_directory=None, fake=False, ):
with detached(logfile, pidfile, uid, gid, umask, working_directory, fake):
try:
os.execv(path, [path] + argv)
except Exception:
from celery import current_app
current_app.log.setup_logging_subsystem("ERROR", logfile)
logger.critical("Can't exec %r", " ".join([path] + argv),
exc_info=True)
return EX_FAILURE
class PartialOptionParser(OptionParser):
def __init__(self, *args, **kwargs):
self.leftovers = []
OptionParser.__init__(self, *args, **kwargs)
def _process_long_opt(self, rargs, values):
arg = rargs.pop(0)
if "=" in arg:
opt, next_arg = arg.split("=", 1)
rargs.insert(0, next_arg)
had_explicit_value = True
else:
opt = arg
had_explicit_value = False
try:
opt = self._match_long_opt(opt)
option = self._long_opt.get(opt)
except BadOptionError:
option = None
if option:
if option.takes_value():
nargs = option.nargs
if len(rargs) < nargs:
if nargs == 1:
self.error("%s option requires an argument" % opt)
else:
self.error("%s option requires %d arguments" % (
opt, nargs))
elif nargs == 1:
value = rargs.pop(0)
else:
value = tuple(rargs[0:nargs])
del rargs[0:nargs]
elif had_explicit_value:
self.error("%s option does not take a value" % opt)
else:
value = None
option.process(opt, value, values, self)
else:
self.leftovers.append(arg)
def _process_short_opts(self, rargs, values):
arg = rargs[0]
try:
OptionParser._process_short_opts(self, rargs, values)
except BadOptionError:
self.leftovers.append(arg)
if rargs and not rargs[0][0] == "-":
|
self.leftovers.append(rargs.pop(0))
class detached_celeryd(object):
option_list = OPTION_LIST
usage = "%prog [options] [celeryd options]"
version = __version__
description = ("Detaches Celery worker nodes. See `celeryd --help` "
"for the list of supported worker arguments.")
command = sys.executable
execv_path = sys.executable
execv_argv = ["-m",
|
"celery.bin.celeryd"]
def Parser(self, prog_name):
return PartialOptionParser(prog=prog_name,
option_list=self.option_list,
usage=self.usage,
description=self.description,
version=self.version)
def parse_options(self, prog_name, argv):
parser = self.Parser(prog_name)
options, values = parser.parse_args(argv)
if options.logfile:
parser.leftovers.append("--logfile=%s" % (options.logfile, ))
if options.pidfile:
parser.leftovers.append("--pidfile=%s" % (options.pidfile, ))
return options, values, parser.leftovers
def execute_from_commandline(self, argv=None):
if argv is None:
argv = sys.argv
config = []
seen_cargs = 0
for arg in argv:
if seen_cargs:
config.append(arg)
else:
if arg == "--":
seen_cargs = 1
config.append(arg)
prog_name = os.path.basename(argv[0])
options, values, leftovers = self.parse_options(prog_name, argv[1:])
sys.exit(detach(path=self.execv_path,
argv=self.execv_argv + leftovers + config,
**vars(options)))
def main():
detached_celeryd().execute_from_commandline()
if __name__ == "__main__": # pragma: no cover
main()
|
openstack/zaqar
|
zaqar/storage/sqlalchemy/tables.py
|
Python
|
apache-2.0
| 2,182
| 0
|
# Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# h
|
ttp://www.apache.org/licenses/LICENSE-2.0
#
# Unless requi
|
red by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import sqlalchemy as sa
metadata = sa.MetaData()
Queues = sa.Table('Queues', metadata,
sa.Column('id', sa.INTEGER, primary_key=True),
sa.Column('project', sa.String(64)),
sa.Column('name', sa.String(64)),
sa.Column('metadata', sa.LargeBinary),
sa.UniqueConstraint('project', 'name'),
)
Pools = sa.Table('Pools', metadata,
sa.Column('name', sa.String(64), primary_key=True),
sa.Column('uri', sa.String(255),
unique=True, nullable=False),
sa.Column('weight', sa.INTEGER, nullable=False),
sa.Column('options', sa.Text()),
sa.Column('flavor', sa.String(64), nullable=True))
# NOTE(gengchc2): Modify pool_group define: turn NOT NULL into DEFAULT NULL:
# [alter table Flavors change column pool_group pool_group varchar(64)
# default null;]
Flavors = sa.Table('Flavors', metadata,
sa.Column('name', sa.String(64), primary_key=True),
sa.Column('project', sa.String(64)),
sa.Column('capabilities', sa.Text()))
Catalogue = sa.Table('Catalogue', metadata,
sa.Column('pool', sa.String(64),
sa.ForeignKey('Pools.name',
ondelete='CASCADE')),
sa.Column('project', sa.String(64)),
sa.Column('queue', sa.String(64), nullable=False),
sa.UniqueConstraint('project', 'queue'))
|
mrquim/mrquimrepo
|
script.video.F4mProxy/lib/flvlib/astypes.py
|
Python
|
gpl-2.0
| 8,332
| 0.00228
|
import os
import calendar
import datetime
import logging
from primitives import *
from constants import *
from helpers import OrderedAttrDict, utc
"""
The AS types and their FLV representations.
"""
log = logging.getLogger('flvlib.astypes')
class MalformedFLV(Exception):
pass
# Number
def get_number(f, max_offset=None):
return get_double(f)
def make_number(num):
return make_double(num)
# Boolean
def get_boolean(f, max_offset=None):
value = get_ui8(f)
return bool(value)
def make_boolean(value):
return make_ui8((value and 1) or 0)
# String
def get_string(f, max_offset=None):
# First 16 bits are the string's length
length = get_ui16(f)
# Then comes the string itself
ret = f.read(length)
return ret
def make_string(string):
if isinstance(string, unicode):
# We need a blob, not unicode. Arbitrarily choose UTF-8
string = string.encode('UTF-8')
length = make_ui16(len(string))
return length + string
# Longstring
def get_longstring(f, max_offset=None):
# First 32 bits are the string's length
length = get_ui32(f)
# Then comes the string itself
ret = f.read(length)
return ret
def make_longstring(string):
if isinstance(string, unicode):
# We need a blob, not unicode. Arbitrarily choose UTF-8
string = string.encode('UTF-8')
length = make_ui32(len(string))
return length + string
# ECMA Array
class ECMAArray(OrderedAttrDict):
pass
def get_ecma_array(f, max_offset=None):
length = get_ui32(f)
log.debug("The ECMA array has approximately %d elements", length)
array = ECMAArray()
while True:
if max_offset and (f.tell() == max_offset):
log.debug("Prematurely terminating reading an ECMA array")
break
marker = get_ui24(f)
if marker == 9:
log.debug("Marker!")
break
else:
f.seek(-3, os.SEEK_CUR)
name, value = get_script_data_variable(f, max_offset=max_offset)
array[name] = value
return array
def make_ecma_array(d):
length = make_ui32(len(d))
rest = ''.join([make_script_data_variable(name, value)
for name, value in d.iteritems()])
marker = make_ui24(9)
return length + rest + marker
# Strict Array
def get_strict_array(f, max_offset=None):
length = get_ui32(f)
log.debug("The length is %d", length)
elements = [get_script_data_value(f, max_offset=max_offset)
for _ in xrange(length)]
return elements
def make_strict_array(l):
ret = make_ui32(len(l))
rest = ''.join([make_script_data_value(value) for value in l])
return ret + rest
# Date
def get_date(f, max_offset=None):
timestamp = get_number(f) / 1000.0
# From the following document:
# http://opensource.adobe.com/wiki/download/
# attachments/1114283/amf0_spec_121207.pdf
#
# Section 2.13 Date Type
#
# (...) While the design of this type reserves room for time zone offset
# information, it should not be filled in, nor used (...)
|
_ignored = get_si16(f)
return datetime.datetime.fromtimestamp(timestamp, utc)
def make_date(date):
if date.tzinfo:
utc_date = date.astimezone(utc)
else:
# assume it's UTC
utc_date = date.replace(tzinfo=utc)
ret = make_number(calendar.timegm(utc_date.timetuple()) * 1000)
offset = 0
return ret + make_s
|
i16(offset)
# Null
def get_null(f, max_offset=None):
return None
def make_null(none):
return ''
# Object
class FLVObject(OrderedAttrDict):
pass
def get_object(f, max_offset=None):
ret = FLVObject()
while True:
if max_offset and (f.tell() == max_offset):
log.debug("Prematurely terminating reading an object")
break
marker = get_ui24(f)
if marker == 9:
log.debug("Marker!")
break
else:
f.seek(-3, os.SEEK_CUR)
name, value = get_script_data_variable(f)
setattr(ret, name, value)
return ret
def make_object(obj):
# If the object is iterable, serialize keys/values. If not, fall
# back on iterating over __dict__.
# This makes sure that make_object(get_object(StringIO(blob))) == blob
try:
iterator = obj.iteritems()
except AttributeError:
iterator = obj.__dict__.iteritems()
ret = ''.join([make_script_data_variable(name, value)
for name, value in iterator])
marker = make_ui24(9)
return ret + marker
# MovieClip
class MovieClip(object):
def __init__(self, path):
self.path = path
def __eq__(self, other):
return isinstance(other, MovieClip) and self.path == other.path
def __repr__(self):
return "<MovieClip at %s>" % self.path
def get_movieclip(f, max_offset=None):
ret = get_string(f)
return MovieClip(ret)
def make_movieclip(clip):
return make_string(clip.path)
# Undefined
class Undefined(object):
def __eq__(self, other):
return isinstance(other, Undefined)
def __repr__(self):
return '<Undefined>'
def get_undefined(f, max_offset=None):
return Undefined()
def make_undefined(undefined):
return ''
# Reference
class Reference(object):
def __init__(self, ref):
self.ref = ref
def __eq__(self, other):
return isinstance(other, Reference) and self.ref == other.ref
def __repr__(self):
return "<Reference to %d>" % self.ref
def get_reference(f, max_offset=None):
ret = get_ui16(f)
return Reference(ret)
def make_reference(reference):
return make_ui16(reference.ref)
as_type_to_getter_and_maker = {
VALUE_TYPE_NUMBER: (get_number, make_number),
VALUE_TYPE_BOOLEAN: (get_boolean, make_boolean),
VALUE_TYPE_STRING: (get_string, make_string),
VALUE_TYPE_OBJECT: (get_object, make_object),
VALUE_TYPE_MOVIECLIP: (get_movieclip, make_movieclip),
VALUE_TYPE_NULL: (get_null, make_null),
VALUE_TYPE_UNDEFINED: (get_undefined, make_undefined),
VALUE_TYPE_REFERENCE: (get_reference, make_reference),
VALUE_TYPE_ECMA_ARRAY: (get_ecma_array, make_ecma_array),
VALUE_TYPE_STRICT_ARRAY: (get_strict_array, make_strict_array),
VALUE_TYPE_DATE: (get_date, make_date),
VALUE_TYPE_LONGSTRING: (get_longstring, make_longstring)
}
type_to_as_type = {
bool: VALUE_TYPE_BOOLEAN,
int: VALUE_TYPE_NUMBER,
long: VALUE_TYPE_NUMBER,
float: VALUE_TYPE_NUMBER,
# WARNING: not supporting Longstrings here.
# With a max length of 65535 chars, noone will notice.
str: VALUE_TYPE_STRING,
unicode: VALUE_TYPE_STRING,
list: VALUE_TYPE_STRICT_ARRAY,
dict: VALUE_TYPE_ECMA_ARRAY,
ECMAArray: VALUE_TYPE_ECMA_ARRAY,
datetime.datetime: VALUE_TYPE_DATE,
Undefined: VALUE_TYPE_UNDEFINED,
MovieClip: VALUE_TYPE_MOVIECLIP,
Reference: VALUE_TYPE_REFERENCE,
type(None): VALUE_TYPE_NULL
}
# SCRIPTDATAVARIABLE
def get_script_data_variable(f, max_offset=None):
name = get_string(f)
log.debug("The name is %s", name)
value = get_script_data_value(f, max_offset=max_offset)
log.debug("The value is %r", value)
return (name, value)
def make_script_data_variable(name, value):
log.debug("The name is %s", name)
log.debug("The value is %r", value)
ret = make_string(name) + make_script_data_value(value)
return ret
# SCRIPTDATAVALUE
def get_script_data_value(f, max_offset=None):
value_type = get_ui8(f)
log.debug("The value type is %r", value_type)
try:
get_value = as_type_to_getter_and_maker[value_type][0]
except KeyError:
raise MalformedFLV("Invalid script data value type: %d", value_type)
log.debug("The getter function is %r", get_value)
value = get_value(f, max_offset=max_offset)
return value
def make_script_data_value(value):
value_type = type_to_as_type.get(value.__class__, VALUE_TYPE_OBJECT)
log.debug("The value type is %r", value_type)
# KeyError can't happen here, because we always fall back on
# VALUE_TYPE_OBJECT when determining value_type
make_value = as_type_to_getter_and_maker[value_t
|
dgasmith/psi4
|
psi4/driver/procrouting/response/scf_response.py
|
Python
|
lgpl-3.0
| 28,511
| 0.002876
|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2019 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Fou
|
ndation, Inc.,
# 51 Franklin Street, Fi
|
fth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
from typing import Union, List
try:
from dataclasses import dataclass
except ImportError:
from pydantic.dataclasses import dataclass
import numpy as np
from psi4 import core
from psi4.driver import constants
from psi4.driver.p4util import solvers
from psi4.driver.p4util.exceptions import *
from psi4.driver.procrouting.response.scf_products import (TDRSCFEngine, TDUSCFEngine)
dipole = {
'name': 'Dipole polarizabilities',
'printout_labels': ['X', 'Y', 'Z'],
'mints_function': core.MintsHelper.ao_dipole,
'vector names': ['AO Mux', 'AO Muy', 'AO Muz']
}
quadrupole = {
'name': 'Quadrupole polarizabilities',
'printout_labels': ['XX', 'XY', 'XZ', 'YY', 'YZ', 'ZZ'],
'mints_function': core.MintsHelper.ao_quadrupole,
}
quadrupole['vector names'] = ["AO Quadrupole " + x for x in quadrupole["printout_labels"]]
traceless_quadrupole = {
'name': 'Traceless quadrupole polarizabilities',
'printout_labels': ['XX', 'XY', 'XZ', 'YY', 'YZ', 'ZZ'],
'mints_function': core.MintsHelper.ao_traceless_quadrupole,
}
traceless_quadrupole['vector names'] = [
"AO Traceless Quadrupole " + x for x in traceless_quadrupole["printout_labels"]
]
property_dicts = {
'DIPOLE_POLARIZABILITIES': dipole,
'QUADRUPOLE_POLARIZABILITIES': quadrupole,
'TRACELESS_QUADRUPOLE_POLARIZABILITIES': traceless_quadrupole
}
def cpscf_linear_response(wfn, *args, **kwargs):
"""
Compute the static properties from a reference wavefunction. The currently implemented properties are
- dipole polarizability
- quadrupole polarizability
Parameters
----------
wfn : psi4 wavefunction
The reference wavefunction.
args : list
The list of arguments. For each argument, such as ``dipole polarizability``, will return the corresponding
response. The user may also choose to pass a list or tuple of custom vectors.
kwargs : dict
Options that control how the response is computed. The following options are supported (with default values):
- ``conv_tol``: 1e-5
- ``max_iter``: 10
- ``print_lvl``: 2
Returns
-------
responses : list
The list of responses.
"""
mints = core.MintsHelper(wfn.basisset())
# list of dictionaries to control response calculations, count how many user-supplied vectors we have
complete_dict = []
n_user = 0
for arg in args:
# for each string keyword, append the appropriate dictionary (vide supra) to our list
if isinstance(arg, str):
ret = property_dicts.get(arg)
if ret:
complete_dict.append(ret)
else:
raise ValidationError('Do not understand {}. Abort.'.format(arg))
# the user passed a list of vectors. absorb them into a dictionary
elif isinstance(arg, tuple) or isinstance(arg, list):
complete_dict.append({
'name': 'User Vectors',
'length': len(arg),
'vectors': arg,
'vector names': ['User Vector {}_{}'.format(n_user, i) for i in range(len(arg))]
})
n_user += len(arg)
# single vector passed. stored in a dictionary as a list of length 1 (can be handled as the case above that way)
# note: the length is set to '0' to designate that it was not really passed as a list
else:
complete_dict.append({
'name': 'User Vector',
'length': 0,
'vectors': [arg],
'vector names': ['User Vector {}'.format(n_user)]
})
n_user += 1
# vectors will be passed to the cphf solver, vector_names stores the corresponding names
vectors = []
vector_names = []
# construct the list of vectors. for the keywords, fetch the appropriate tensors from MintsHelper
for prop in complete_dict:
if 'User' in prop['name']:
for name, vec in zip(prop['vector names'], prop['vectors']):
vectors.append(vec)
vector_names.append(name)
else:
tmp_vectors = prop['mints_function'](mints)
for tmp in tmp_vectors:
tmp.scale(-2.0) # RHF only
vectors.append(tmp)
vector_names.append(tmp.name)
# do we have any vectors to work with?
if len(vectors) == 0:
raise ValidationError('I have no vectors to work with. Aborting.')
# print information on module, vectors that will be used
_print_header(complete_dict, n_user)
# fetch wavefunction information
nmo = wfn.nmo()
ndocc = wfn.nalpha()
nvirt = nmo - ndocc
c_occ = wfn.Ca_subset("AO", "OCC")
c_vir = wfn.Ca_subset("AO", "VIR")
nbf = c_occ.shape[0]
# the vectors need to be in the MO basis. if they have the shape nbf x nbf, transform.
for i in range(len(vectors)):
shape = vectors[i].shape
if shape == (nbf, nbf):
vectors[i] = core.triplet(c_occ, vectors[i], c_vir, True, False, False)
# verify that this vector already has the correct shape
elif shape != (ndocc, nvirt):
raise ValidationError('ERROR: "{}" has an unrecognized shape ({}, {}). Must be either ({}, {}) or ({}, {})'.format(
vector_names[i], shape[0], shape[1], nbf, nbf, ndocc, nvirt))
# compute response vectors for each input vector
params = [kwargs.pop("conv_tol", 1.e-5), kwargs.pop("max_iter", 10), kwargs.pop("print_lvl", 2)]
responses = wfn.cphf_solve(vectors, *params)
# zip vectors, responses for easy access
vectors = {k: v for k, v in zip(vector_names, vectors)}
responses = {k: v for k, v in zip(vector_names, responses)}
# compute response values, format output
output = []
for prop in complete_dict:
# try to replicate the data structure of the input
if 'User' in prop['name']:
if prop['length'] == 0:
output.append(responses[prop['vector names'][0]])
else:
buf = []
for name in prop['vector names']:
buf.append(responses[name])
output.append(buf)
else:
names = prop['vector names']
dim = len(names)
buf = np.zeros((dim, dim))
for i, i_name in enumerate(names):
for j, j_name in enumerate(names):
buf[i, j] = -1.0 * vectors[i_name].vector_dot(responses[j_name])
output.append(buf)
_print_output(complete_dict, output)
return output
def _print_header(complete_dict, n_user):
core.print_out('\n\n ---------------------------------------------------------\n'
' {:^57}\n'.format('CPSCF Linear Response Solver') +
' {:^57}\n'.format('by Marvin Lechner and Daniel G. A. Smith') +
' ---------------------------------------------------------\n')
core.print_out('\n ==> Requested Responses <==\n\n')
for prop in complete_dict:
if 'User' not in prop['name']:
core.print_out(' {}\n'.format(prop['name']))
if n_user != 0:
core.print_out(' {} us
|
jeremiedecock/snippets
|
python/doctest/numpy_example.py
|
Python
|
mit
| 2,806
| 0.000713
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This is a doctest example with Numpy arrays.
For more information about doctest, see
https://docs.python.org/3/library/doctest.html (reference)
and
www.fil.univ-lille1.fr/~L1S2API/CoursTP/tp_doctest.html (nice examples in
French).
To run doctest, execute this script (thanks to the
`if __name__ == "__main__": import doctest ; doctest.testmod()` directives)
or execute the following command in a terminal::
python3 -m doctest datapipe/io/images.py
"""
import numpy as np
def example1():
"""A very basic doctest example.
Notes
-----
The numpy module is imported at the end of this file, in the test::
if __name__ == "__main__":
import doctest
import numpy
doctest.testmod()
Examples
--------
>>> numpy.array([1, 2, 3])
array([1, 2, 3])
"""
pass
def example2():
"""A very basic doctest example to test values returned by this function.
Examples
--------
>>> example2()
array([1, 2, 3])
"""
return numpy.array([1, 2, 3])
def example3(a):
"""A very basic example.
Examples
--------
>>> a = numpy.array([3, 1, 2])
>>> example3(a)
>>> a
array([1, 2, 3])
"""
a.sort()
def example4(a):
"""Replace *in-plac
|
e* `NaN` values in `a` by zeros.
Replace `NaN` ("Not a Number") values in `a` by zeros.
Parameters
----------
image : array_like
The image to process. `NaN` values are replaced **in-place** thus this
function changes the provided object.
Returns
-------
array_like
Returns a boolean mask array indicating whether values in `a`
initially contained `NaN` values (`True`)
|
of not (`False`). This array
is defined by the instruction `np.isnan(a)`.
Notes
-----
`NaN` values are replaced **in-place** in the provided `a`
parameter.
Examples
--------
>>> a = numpy.array([1., 2., numpy.nan])
>>> a
array([ 1., 2., nan])
>>> example4(a)
array([False, False, True], dtype=bool)
Be careful with white space! The following will work...
>>> a
array([ 1., 2., 0.])
but this one would't
# >>> a
# array([ 1., 2., 0.])
As an alternative, the `doctest: +NORMALIZE_WHITESPACE` can be used (see
https://docs.python.org/3/library/doctest.html#doctest.NORMALIZE_WHITESPACE
and http://www.fil.univ-lille1.fr/~L1S2API/CoursTP/tp_doctest.html)
>>> a
... # doctest: +NORMALIZE_WHITESPACE
array([ 1., 2., 0.])
but the space before the '1' is still required...
"""
nan_mask = np.isnan(a)
a[nan_mask] = 0
return nan_mask
if __name__ == "__main__":
import doctest
import numpy
doctest.testmod()
|
Ginray/my-flask-blog
|
tests/test_user_model.py
|
Python
|
mit
| 5,437
| 0.000368
|
import unittest
import time
from datetime import datetime
from app import create_app, db
from app.models import User, AnonymousUser, Role, Permission
class UserModelTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
Role.insert_roles()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_password_setter(self):
u = User(password='cat')
self.assertTrue(u.password_hash is not None)
def test_no_password_getter(self):
u = User(password='cat')
with self.assertRaises(AttributeError):
u.password
def test_password_verification(self):
u = User(password='cat')
self.assertTrue(u.verify_password('cat'))
self.assertFalse(u.verify_password('dog'))
def test_password_salts_are_random(self):
u = User(password='cat')
u2 = User(password='cat')
self.assertTrue(u.password_hash != u2.password_hash)
def test_valid_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token()
self.assertTrue(u.confirm(token))
def test_invalid_confirmation_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_confirmation_token()
self.assertFalse(u2.confirm(token))
def test_expired_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token(1)
time.sleep(2)
self.assertFalse(u.confirm(token))
def test_valid_reset_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_reset_token()
self.assertTrue(u.reset_password(token, 'dog'))
self.assertTrue(u.verify_password('dog'))
def test_invalid_reset_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_reset_token()
self.assertFalse(u2.reset_password(token, 'horse'))
self.assertTrue(u2.verify_password('dog'))
def test_valid_email_change_token(self):
u = User(email='[email protected]', password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_email_change_token('[email protected]')
self.assertTrue(u.change_email(token))
self.assertTrue(u.email == '[email protected]')
def test_invalid_email_change_token(self):
u1 = User(email='[email protected]', password='cat')
u2 = User(email='[email protected]', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_email_change_token('[email protected]')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == '[email protected]')
def test_duplicate_email_change_token(self):
u1 = User(email='[email protected]', password='cat')
u2 = User(email='[email protected]', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u2.generate_email_change_token('[email protected]')
self.assertFalse(u2.change_email(toke
|
n))
self.assertTrue(u2.email == '[email protected]')
def test_roles_and_permissions(self):
u = User(email='john@example.
|
com', password='cat')
self.assertTrue(u.can(Permission.WRITE_ARTICLES))
self.assertFalse(u.can(Permission.MODERATE_COMMENTS))
def test_anonymous_user(self):
u = AnonymousUser()
self.assertFalse(u.can(Permission.FOLLOW))
def test_timestamps(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
self.assertTrue(
(datetime.utcnow() - u.member_since).total_seconds() < 3)
self.assertTrue(
(datetime.utcnow() - u.last_seen).total_seconds() < 3)
def test_ping(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
time.sleep(2)
last_seen_before = u.last_seen
u.ping()
self.assertTrue(u.last_seen > last_seen_before)
def test_gravatar(self):
u = User(email='[email protected]', password='cat')
with self.app.test_request_context('/'):
gravatar = u.gravatar()
gravatar_256 = u.gravatar(size=256)
gravatar_pg = u.gravatar(rating='pg')
gravatar_retro = u.gravatar(default='retro')
with self.app.test_request_context('/', base_url='https://example.com'):
gravatar_ssl = u.gravatar()
self.assertTrue('http://www.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6'in gravatar)
self.assertTrue('s=256' in gravatar_256)
self.assertTrue('r=pg' in gravatar_pg)
self.assertTrue('d=retro' in gravatar_retro)
self.assertTrue('https://secure.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6' in gravatar_ssl)
|
bodokaiser/piwise
|
main.py
|
Python
|
bsd-3-clause
| 5,437
| 0.001839
|
import numpy as np
import torch
from PIL import Image
from argparse import ArgumentParser
from torch.optim import SGD, Adam
from torch.autograd import Variable
from torch.utils.data import DataLoader
from torchvision.transforms import Compose, CenterCrop, Normalize
from torchvision.transforms import ToTensor, ToPILImage
from piwise.dataset import VOC12
from piwise.network import FCN8, FCN16, FCN32, UNet, PSPNet, SegNet
from piwise.criterion import CrossEntropyLoss2d
from piwise.transform import Relabel, ToLabel, Colorize
from piwise.visualize import Dashboard
NUM_CHANNELS = 3
NUM_CLASSES = 22
color_transform = Colorize()
image_transform = ToPILImage()
input_transform = Compose([
CenterCrop(256),
ToTensor(),
Normalize([.485, .456, .406], [.229, .224, .225]),
])
target_transform = Compose([
CenterCrop(256),
ToLabel(),
Relabel(255, 21),
])
def train(args, model):
model.train()
weight = torch.ones(22)
weight[0] = 0
loader = DataLoader(VOC12(args.datadir, input_transform, target_transform),
num_workers=args.num_workers, batch_size=args.batch_size, shuffle=True)
if args.cuda:
criterion = CrossEntropyLoss2d(weight.cuda())
else:
criterion = CrossEntropyLoss2d(weight)
optimizer = Adam(model.parameters())
if args.model.startswith('FCN'):
optimizer = SGD(model.parameters(), 1e-4, .9, 2e-5)
if args.model.startswith('PSP'):
optimizer = SGD(model.parameters(), 1e-2, .9, 1e-4)
if args.model.startswith('Seg'):
optimizer = SGD(model.parameters(), 1e-3, .9)
if args.steps_plot > 0:
board = Dashboard(args.port)
for epoch in range(1, args.num_epochs+1):
epoch_loss = []
for step, (images, labels) in enumerate(loader):
if args.cuda:
images = images.cuda()
labels = labels.cuda()
inputs = Variable(images)
targets = Variable(labels)
outputs = model(inputs)
optimizer.zero_grad()
loss = criterion(outputs, targets[:, 0])
loss.backward()
optimizer.step()
epoch_loss.append(loss.data[0])
if args.steps_plot > 0 and step % args.steps_plot == 0:
image = inputs[0].cpu().data
ima
|
ge[0] = image[0] * .229 + .485
i
|
mage[1] = image[1] * .224 + .456
image[2] = image[2] * .225 + .406
board.image(image,
f'input (epoch: {epoch}, step: {step})')
board.image(color_transform(outputs[0].cpu().max(0)[1].data),
f'output (epoch: {epoch}, step: {step})')
board.image(color_transform(targets[0].cpu().data),
f'target (epoch: {epoch}, step: {step})')
if args.steps_loss > 0 and step % args.steps_loss == 0:
average = sum(epoch_loss) / len(epoch_loss)
print(f'loss: {average} (epoch: {epoch}, step: {step})')
if args.steps_save > 0 and step % args.steps_save == 0:
filename = f'{args.model}-{epoch:03}-{step:04}.pth'
torch.save(model.state_dict(), filename)
print(f'save: {filename} (epoch: {epoch}, step: {step})')
def evaluate(args, model):
model.eval()
image = input_transform(Image.open(args.image))
label = model(Variable(image, volatile=True).unsqueeze(0))
label = color_transform(label[0].data.max(0)[1])
image_transform(label).save(args.label)
def main(args):
Net = None
if args.model == 'fcn8':
Net = FCN8
if args.model == 'fcn16':
Net = FCN16
if args.model == 'fcn32':
Net = FCN32
if args.model == 'fcn32':
Net = FCN32
if args.model == 'unet':
Net = UNet
if args.model == 'pspnet':
Net = PSPNet
if args.model == 'segnet':
Net = SegNet
assert Net is not None, f'model {args.model} not available'
model = Net(NUM_CLASSES)
if args.cuda:
model = model.cuda()
if args.state:
try:
model.load_state_dict(torch.load(args.state))
except AssertionError:
model.load_state_dict(torch.load(args.state,
map_location=lambda storage, loc: storage))
if args.mode == 'eval':
evaluate(args, model)
if args.mode == 'train':
train(args, model)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--cuda', action='store_true')
parser.add_argument('--model', required=True)
parser.add_argument('--state')
subparsers = parser.add_subparsers(dest='mode')
subparsers.required = True
parser_eval = subparsers.add_parser('eval')
parser_eval.add_argument('image')
parser_eval.add_argument('label')
parser_train = subparsers.add_parser('train')
parser_train.add_argument('--port', type=int, default=80)
parser_train.add_argument('--datadir', required=True)
parser_train.add_argument('--num-epochs', type=int, default=32)
parser_train.add_argument('--num-workers', type=int, default=4)
parser_train.add_argument('--batch-size', type=int, default=1)
parser_train.add_argument('--steps-loss', type=int, default=50)
parser_train.add_argument('--steps-plot', type=int, default=0)
parser_train.add_argument('--steps-save', type=int, default=500)
main(parser.parse_args())
|
rickhenderson/code-samples
|
python-blender/read_hadamard_file.py
|
Python
|
gpl-3.0
| 1,503
| 0.007984
|
# read_hadamard_file.py
# Reads data from a text file to create a 3D
# version of a given Hadamard Matrix.
# Created by Rick Henderson
# Created on June 4, 2015
# Completed June 5, 2015
# Note: A "Hadamard File" is a text file containing rows
# rows of + and - where the + indicates a 1 or a cube
# and the - represents a 0 or a space.
import bpy
# Set the order (size) of the matrix
nOrder = 12
# You can also change these values if you want to alter the offset between the cubes
xOffset = 1.0
yOffset = 1.0
zOffset = 0 # You would have to alter the code more if you want a 3D array of cubes
xpos = 0
ypos = 0
char_number = 0
# Open the file to read from
# Modified technique from DiveIntoPython3.net/files.html
line_number = 0
with open('c:/had12.txt', encoding='utf-8') as a_file:
for each_row in a_file:
line_number += 1
# Just print the current row to the console as a test
print(each_row.rstrip())
for a_char in each_row:
char_number += 1
# If the current character is +, generate a cube then position it
if a_char == '+':
bpy.ops.mesh.primitive_cube_add(radius=0.5)
bpy.context.object.location[0] = line_number * xOffset
|
bpy.context.object.location[1] = char_number * yOffset
#
|
Now an entire row has been read, so reset char_number to 0
char_number = 0
# Program Ends
|
nealegibson/Infer
|
setup.py
|
Python
|
gpl-3.0
| 518
| 0.027027
|
from numpy.distutils.core import setup, Extension
#from setuptools import setup, Extension
setup(
name = "Infer", version = "1.0",
description='Python version of MCMC, plus other
|
inference codes under development',
author='Neale Gibson',
author_email='[email protected]',
packages=['Infer'],
package_dir={'Infer':'src'},
#and extensi
|
on package for solving toeplitz matrices...
ext_modules = [
Extension("Infer.LevinsonTrenchZoharSolve",sources=["src/LevinsonTrenchZoharSolve.c"],),
]
)
|
eicher31/compassion-modules
|
partner_communication/models/__init__.py
|
Python
|
agpl-3.0
| 706
| 0.001416
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.comp
|
assion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanu
|
el Cino <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from . import communication_config
from . import communication_job
from . import communication_attachment
from . import res_partner
from . import email
from . import crm_phonecall
from . import ir_attachment
from . import mail_template
from . import communication_dashboard
from . import report_with_omr
|
jordanemedlock/psychtruths
|
temboo/core/Library/YouTube/Playlists/ListPlaylistsByID.py
|
Python
|
apache-2.0
| 6,403
| 0.00531
|
# -*- coding: utf-8 -*-
###############################################################################
#
# ListPlaylistsByID
# Returns a collection of playlists that match the provided IDs.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ListPlaylistsByID(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ListPlaylistsByID Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(ListPlaylistsByID, self).__init__(temboo_session, '/Library/YouTube/Playlists/ListPlaylistsByID')
def new_input_set(self):
return ListPlaylistsByIDInputSet()
def _make_result_set(self, result, path):
return ListPlaylistsByIDResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ListPlaylistsByIDChoreographyExecution(session, exec_id, path)
class ListPlaylistsByIDInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ListPlaylistsByID
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((optional, string) The API Key provided by Google for simple API access when you do not need to access user data.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('APIKey', value)
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((optional, string) A valid access token retrieved during the OAuth process. This is required for OAuth authentication unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new access token.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('AccessToken', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required for OAuth authentication unless providing a valid AccessToken.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for this Choreo. ((conditional, string) The Client Secret provided by Google. Required for OAuth authentication unless providing a valid AccessToken.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('Client
|
Secret', value)
def set_Fields(se
|
lf, value):
"""
Set the value of the Fields input for this Choreo. ((optional, string) Allows you to specify a subset of fields to include in the response using an xpath-like syntax (i.e. items/snippet/title).)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('Fields', value)
def set_MaxResults(self, value):
"""
Set the value of the MaxResults input for this Choreo. ((optional, integer) The maximum number of results to return.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('MaxResults', value)
def set_PageToken(self, value):
"""
Set the value of the PageToken input for this Choreo. ((optional, string) The "nextPageToken" found in the response which is used to page through results.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('PageToken', value)
def set_Part(self, value):
"""
Set the value of the Part input for this Choreo. ((optional, string) Specifies a comma-separated list of playlist resource properties that the API response will include. Part names that you can pass are: id, snippet, and status.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('Part', value)
def set_PlaylistID(self, value):
"""
Set the value of the PlaylistID input for this Choreo. ((required, string) A comma-separated list of the YouTube playlist ID(s) for the resource(s) that are being retrieved.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('PlaylistID', value)
def set_RefreshToken(self, value):
"""
Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth refresh token used to generate a new access token when the original token is expired. Required for OAuth authentication unless providing a valid AccessToken.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('RefreshToken', value)
class ListPlaylistsByIDResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ListPlaylistsByID Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from YouTube.)
"""
return self._output.get('Response', None)
def get_NewAccessToken(self):
"""
Retrieve the value for the "NewAccessToken" output from this Choreo execution. ((string) Contains a new AccessToken when the RefreshToken is provided.)
"""
return self._output.get('NewAccessToken', None)
class ListPlaylistsByIDChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ListPlaylistsByIDResultSet(response, path)
|
scionrep/scioncc
|
src/scripts/manhole.py
|
Python
|
bsd-2-clause
| 3,942
| 0.003298
|
#!/usr/bin/env python
# "manhole" entry point, friendlier ipython startup to remote container
__author__ = 'Dave Foster <[email protected]>'
def main():
import sys, os, re, errno, json, socket
from pkg_resources import load_entry_point
r = re.compile('manhole-(\d+).json')
if len(sys.argv) == 2:
mh_file = sys.argv[1]
else:
# find manhole file in local dir
mh_files = [f for f in os.listdir(os.getcwd()) if r.search(f) is not None]
if len(mh_files) == 0:
print >>sys.stderr, "No manhole files detected, specify it manually"
sys.exit(1)
elif len(mh_files) > 1:
def legal_manhole_file(f):
"""
Helper method to check if a process exists and is likely a manhole-able container.
@return True/False if is a likely container.
"""
mh_pid = int(r.search(f).group(1))
try:
os.getpgid(mh_pid)
except OSError as e:
if e.errno == errno.ESRCH:
return False
raise # unexpected, just re-raise
# the pid seems legal, now check status of sockets - the pid may be reused
with open(f) as ff:
mh_doc = json.load(ff)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind((mh_doc['ip'], mh_doc['shell_port']))
except socket.error as e:
if e.errno == errno.EADDRINUSE:
return True
raise # unexpected, re-raise
finally:
s.close()
return False
# try to see if these are active processes
legal_mh_files = filter(legal_manhole_file, mh_files)
if len(legal_mh_files) > 1:
print >>sys.stderr, "Multiple legal manhole files detected, specify it manually:", legal_mh_files
sys.exit(1)
# we found a single legal file, use it
mh_file = legal_mh_files[0]
# perform cleanup of stale files
dead_mh_files = [x for x in mh_files if x not in legal_mh_files]
for df in dead_mh_files:
print >>sys.stderr, "Cleaning up stale manhole file", df
os.unlink(df)
else:
mh_file = mh_files[0]
if not os.access(mh_file, os.R_OK):
print >>sys.stderr, "Manhole file (%s) does not exist" % mh_file
sys.exit(1)
mhpid = r.search(mh_file).group(1)
# configure branding
manhole_logo = """
__ __ _______ __ _ __ __ _______ ___ _______
| |_| || _ || | | || | | || || | | |
| || |_| || |_| || |_| || _ || | | ___|
| || || || || | | || | | |___
| || || _ || || |_| || |___ | ___|
| ||_|| || _ || | | || _ || || || |___
|_| |_||__| |__||_| |__||__| |__||_______||_______||_______|
"""
# manipulate argv!
sys.argv = [sys.argv[0], "console", "--existing", mh_file,
"--PromptManager.in_template=>o> ",
|
"--PromptManager.in2_template=... ",
"--PromptManager.out_template=--> ",
|
"--TerminalInteractiveShell.banner1=%s" % manhole_logo,
"--TerminalInteractiveShell.banner2=SciON Container Manhole, connected to %s\n(press Ctrl-D to detach, quit() to exit container)\n" % mhpid]
# HACK: Mock out client shutdown to avoid default shutdown on Ctrl-D
from mock import patch
with patch("IPython.kernel.client.KernelClient.shutdown"):
ipy_entry = load_entry_point('ipython', 'console_scripts', 'ipython')()
sys.exit(ipy_entry)
if __name__ == '__main__':
main()
|
kiddinn/plaso
|
tests/output/formatting_helper.py
|
Python
|
apache-2.0
| 12,029
| 0.001912
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the output module field formatting helper."""
import unittest
from dfdatetime import semantic_time as dfdatetime_semantic_time
from dfvfs.path import fake_path_spec
from plaso.containers import events
from plaso.lib import definitions
from plaso.output import formatting_helper
from tests.containers import test_lib as containers_test_lib
from tests.output import test_lib
class TestFieldFormattingHelper(formatting_helper.FieldFormattingHelper):
"""Field formatter helper for testing purposes."""
_FIELD_FORMAT_CALLBACKS = {'zone': '_FormatTimeZone'}
class FieldFormattingHelperTest(test_lib.OutputModuleTestCase):
"""Test the output module field formatting helper."""
# pylint: disable=protected-access
_TEST_EVENTS = [
{'data_type': 'test:event',
'filename': 'log/syslog.1',
'hostname': 'ubuntu',
'path_spec': fake_path_spec.FakePathSpec(
location='log/syslog.1'),
'text': (
'Reporter <CRON> PID: 8442 (pam_unix(cron:session): session\n '
'closed for user root)'),
'timestamp': '2012-06-27 18:17:01',
'timestamp_desc': definitions.TIME_DESCRIPTION_CHANGE}]
def testFormatDateTime(self):
"""Tests the _FormatDateTime function with dynamic time."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '2012-06-27T18:17:01.000000+00:00')
output_mediator.SetTimezone('Europe/Amsterdam')
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '2012-06-27T20:17:01.000000+02:00')
output_mediator.SetTimezone('UTC')
event.date_time = dfdatetime_semantic_time.InvalidTime()
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, 'Invalid')
def testFormatDateTimeWithoutDynamicTime(self):
"""Tests the _FormatDateTime function without dynamic time."""
output_mediator = self._CreateOutputMediator(dynamic_time=False)
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
# Test with event.date_time
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '2012-06-27T18:17:01.000000+00:00')
output_mediator.SetTimezone('Europe/Amsterdam')
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '2012-06-27T20:17:01.000000+02:00')
output_mediator.SetTimezone('UTC')
event.date_time = dfdatetime_semantic_time.InvalidTime()
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '0000-00-00T00:00:00.000000+00:00')
# Test with event.timestamp
event.date_time = None
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '2012-06-27T18:17:01.000000+00:00')
event.timestamp = 0
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '0000-00-00T00:00:00.000000+00:00')
event.timestamp = -9223372036854775808
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '0000-00-00T00:00:00.000000+00:00')
def testFormatDisplayName(self):
"""Tests the _FormatDisplayName function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
display_name_string = test_helper._FormatDisplayName(
event, event_data, event_data_stream)
self.assertEqual(display_name_string, 'FAKE:log/syslog.1')
def testFormatFilename(self):
"""Tests the _FormatFilename function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEven
|
tFromValues(self._TEST_EVENTS[0]))
filename_string = test_helper._FormatFilename(
event, event_data, event_data_stream)
self.assertEqual(filename_string, 'log/syslog.1')
def testFormatHostname(self):
"""Tests the _FormatHostname function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, ev
|
ent_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
hostname_string = test_helper._FormatHostname(
event, event_data, event_data_stream)
self.assertEqual(hostname_string, 'ubuntu')
def testFormatInode(self):
"""Tests the _FormatInode function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
inode_string = test_helper._FormatInode(
event, event_data, event_data_stream)
self.assertEqual(inode_string, '-')
def testFormatMACB(self):
"""Tests the _FormatMACB function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
macb_string = test_helper._FormatMACB(event, event_data, event_data_stream)
self.assertEqual(macb_string, '..C.')
def testFormatMessage(self):
"""Tests the _FormatMessage function."""
output_mediator = self._CreateOutputMediator()
formatters_directory_path = self._GetTestFilePath(['formatters'])
output_mediator.ReadMessageFormattersFromDirectory(
formatters_directory_path)
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
message_string = test_helper._FormatMessage(
event, event_data, event_data_stream)
expected_message_string = (
'Reporter <CRON> PID: 8442 (pam_unix(cron:session): session closed '
'for user root)')
self.assertEqual(message_string, expected_message_string)
def testFormatMessageShort(self):
"""Tests the _FormatMessageShort function."""
output_mediator = self._CreateOutputMediator()
formatters_directory_path = self._GetTestFilePath(['formatters'])
output_mediator.ReadMessageFormattersFromDirectory(
formatters_directory_path)
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
message_short_string = test_helper._FormatMessageShort(
event, event_data, event_data_stream)
expected_message_short_string = (
'Reporter <CRON> PID: 8442 (pam_unix(cron:session): session closed '
'for user root)')
self.assertEqual(message_short_string, expected_message_short_string)
def testFormatSource(self):
"""Tests the _FormatSource function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._
|
openstack/horizon
|
openstack_dashboard/dashboards/admin/hypervisors/compute/urls.py
|
Python
|
apache-2.0
| 1,052
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANT
|
IES OR CONDITIONS OF ANY KIND, eit
|
her express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import url
from openstack_dashboard.dashboards.admin.hypervisors.compute import views
urlpatterns = [
url(r'^(?P<compute_host>[^/]+)/evacuate_host$',
views.EvacuateHostView.as_view(),
name='evacuate_host'),
url(r'^(?P<compute_host>[^/]+)/disable_service$',
views.DisableServiceView.as_view(),
name='disable_service'),
url(r'^(?P<compute_host>[^/]+)/migrate_host$',
views.MigrateHostView.as_view(),
name='migrate_host'),
]
|
CLVsol/oehealth
|
oehealth_professional/__init__.py
|
Python
|
agpl-3.0
| 1,597
| 0.010645
|
# -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify
|
#
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
#
|
#
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
import res_partner
import oehealth_annotation
import oehealth_professional_category
import oehealth_professional
import oehealth_tag
import oehealth_event_participant
import oehealth_specialty
|
craigcitro/pydatalab
|
google/datalab/contrib/mlworkbench/commands/_ml.py
|
Python
|
apache-2.0
| 33,262
| 0.008689
|
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Platform library - ml cell magic."""
from __future__ import absolute_import
from __future__ import unicode_literals
try:
import IPython
import IPython.core.display
import IPython.core.magic
except ImportError:
raise Exception('This module can only be loaded in ipython.')
import argparse
import json
import os
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import shutil
import six
from skimage.segmentation import mark_boundaries
import subprocess
import tempfile
import textwrap
from tensorflow.python.lib.io import file_io
import urllib
import google.datalab
import google.datalab.bigquery as bq
from google.datalab import Context
import google.datalab.ml as datalab_ml
import google.datalab.utils.commands
import google.datalab.contrib.mlworkbench._local_predict as _local_predict
import google.datalab.contrib.mlworkbench._shell_process as _shell_process
import google.datalab.contrib.mlworkbench._archive as _archive
import google.datalab.contrib.mlworkbench._prediction_explainer as _prediction_explainer
MLTOOLBOX_CODE_PATH = '/datalab/lib/pydatalab/solutionbox/code_free_ml/mltoolbox/code_free_ml/'
@IPython.core.magic.register_line_cell_magic
def ml(line, cell=None):
"""Implements the datalab cell magic for MLWorkbench operations.
Args:
line: the contents of the ml command line.
Returns:
The results of executing the cell.
"""
parser = google.datalab.utils.commands.CommandParser(
prog='%ml',
description=textwrap.dedent("""\
Execute MLWorkbench operations
Use "%ml <command> -h" for help on a specific command.
"""))
analyze_parser = parser.subcommand(
'analyze',
formatter_class=argparse.RawTextHelpFormatter,
help='Analyze training data and generate stats, such as min/max/mean '
'for numeric values, vocabulary for text columns.',
epilog=textwrap.dedent("""\
Example usage:
%%ml analyze [--cloud]
output: path/to/dir
training_data:
csv: path/to/csv
schema:
- name: serialId
type: STRING
- name: num1
type: FLOAT
- name: num2
type: INTEGER
- name: text1
type: STRING
features:
serialId:
transform: key
num1:
transform: scale
value: 1
num2:
transform: identity
text1:
transform: bag_of_words
Also supports in-notebook variables, such as:
%%ml analyze --output path/to/dir
training_data: $my_csv_dataset
|
features: $features_def"""))
analyze_parser.add_argument('--output', required=True,
help='path of output directory.')
analyze_parser.add_argument('--cloud', action='store_true', default=False,
help='whether to run analysis in cloud or local.')
analyze_parser.add_argument('--package', required=False,
|
help='A local or GCS tarball path to use as the source. '
'If not set, the default source package will be used.')
analyze_parser.add_cell_argument(
'training_data',
required=True,
help=textwrap.dedent("""\
training data. It is one of the following:
csv (example "csv: file.csv"), or
bigquery_table (example: "bigquery_table: project.dataset.table"), or
bigquery_sql (example: "bigquery_sql: select * from table where num1 > 1.0"), or
a variable defined as google.datalab.ml.CsvDataSet or
google.datalab.ml.BigQueryDataSet."""))
analyze_parser.add_cell_argument(
'features',
required=True,
help=textwrap.dedent("""\
features config indicating how to transform data into features. The
list of supported transforms:
"transform: identity"
does nothing (for numerical columns).
"transform: scale
value: x"
scale a numerical column to [-a, a]. If value is missing, x
defaults to 1.
"transform: one_hot"
treats the string column as categorical and makes one-hot
encoding of it.
"transform: embedding
embedding_dim: d"
treats the string column as categorical and makes embeddings of
it with specified dimension size.
"transform: bag_of_words"
treats the string column as text and make bag of words
transform of it.
"transform: tfidf"
treats the string column as text and make TFIDF transform of it.
"transform: image_to_vec
checkpoint: gs://b/o"
from image gs url to embeddings. "checkpoint" is a inception v3
checkpoint. If absent, a default checkpoint is used.
"transform: target"
denotes the column is the target. If the schema type of this
column is string, a one_hot encoding is automatically applied.
If numerical, an identity transform is automatically applied.
"transform: key"
column contains metadata-like information and will be output
as-is in prediction."""))
analyze_parser.set_defaults(func=_analyze)
transform_parser = parser.subcommand(
'transform',
formatter_class=argparse.RawTextHelpFormatter,
help='Transform the data into tf.example which is more efficient in training.',
epilog=textwrap.dedent("""\
Example usage:
%%ml transform --cloud [--shuffle]
analysis: path/to/analysis_output_folder
output: path/to/dir
prefix: my_filename
batch_size: 100
training_data:
csv: path/to/csv
cloud:
num_workers: 3
worker_machine_type: n1-standard-1
project_id: my_project_id"""))
transform_parser.add_argument('--analysis', required=True,
help='path of analysis output directory.')
transform_parser.add_argument('--output', required=True,
help='path of output directory.')
transform_parser.add_argument(
'--prefix', required=True, metavar='NAME',
help='The prefix of the output file name. The output files will be like '
'NAME_00000_of_00005.tar.gz')
transform_parser.add_argument('--cloud', action='store_true', default=False,
help='whether to run transform in cloud or local.')
transform_parser.add_argument('--shuffle', action='store_true', default=False,
help='whether to shuffle the training data in output.')
transform_parser.add_argument('--batch_size', type=int, default=100,
help='number of instances in a batch to process once. '
'Larger batch is more efficient but may consume more memory.')
transform_parser.add_argument('--package', required=False,
help='A local or GCS tarball path to use as the source. '
'If not set, the default source package will be used.')
transform_parser.add_cell_argument(
'training_data',
requi
|
Freso/listenbrainz-server
|
listenbrainz/webserver/admin/views.py
|
Python
|
gpl-2.0
| 206
| 0
|
f
|
rom flask_admin import expose
from listenbrainz.webserver.admin import AdminIndexView
class HomeView(AdminIndexView):
@expose('/')
def index(self):
return self.render('admin/home.ht
|
ml')
|
erik-stephens/zabbix
|
zabbix/__init__.py
|
Python
|
mit
| 266
| 0
|
"""
A pythonic interface to the Zabbix API.
"""
from .api import A
|
pi, ApiException
from .objects.host import Host
from .objects.hostgroup import H
|
ostGroup
from .objects.item import Item
from .objects.trigger import Trigger
from .objects.itservice import ItService
|
shabinesh/Tabject
|
tabject/types.py
|
Python
|
bsd-3-clause
| 550
| 0.016364
|
from decimal import Decimal
class Integer:
def __init__(self, val=None):
self.val = int(val)
def __repr__(self):
return self.val
class Text:
|
def __init__(self, val=None):
self.val = str(val)
def __repr__(self):
return self.val
class Bool:
def __init__(self, val=None):
self.val = bool(val)
def __repr__(self):
return self.val
class Real:
|
def __init__(self, val=None):
self.val = Decimal(val)
def __repr__(self):
return self.val
class Date:
pass
|
rutsky/aiohttp
|
setup.py
|
Python
|
apache-2.0
| 4,887
| 0
|
import codecs
import pathlib
import re
import sys
from distutils.command.build_ext import build_ext
from distutils.errors import (CCompilerError, DistutilsExecError,
DistutilsPlatformError)
from setuptools import Extension, setup
if sys.version_info < (3, 5, 3):
raise RuntimeError("aiohttp 3.x requires Python 3.5.3+")
try:
from Cython.Build import cythonize
USE_CYTHON = True
except ImportError:
USE_CYTHON = False
ext = '.pyx' if USE_CYTHON else '.c'
extensions = [Extension('aiohttp._websocket', ['aiohttp/_websocket' + ext]),
Extension('aiohttp._http_parser',
['aiohttp/_http_parser' + ext,
'vendor/http-parser/http_parser.c',
'aiohttp/_find_header.c'],
define_macros=[('HTTP_PARSER_STRICT', 0)],
),
Extension('aiohttp._frozenlist',
['aiohttp/_frozenlist' + ext]),
Extension('aiohttp._helpers',
['aiohttp/_helpers' + ext]),
Extension('aiohttp._http_writer',
['aiohttp/_http_writer' + ext])]
if USE_CYTHON:
extensions = cythonize(extensions)
class BuildFailed(Exception):
pass
class ve_build_ext(build_ext):
# This class allows C extension building to fail.
def run(self):
try:
build_ext.run(self)
except (DistutilsPlatformError, FileNotFoundError):
raise BuildFailed()
def build_extension(self, ext):
try:
build_ext.build_extension(self, ext)
except (DistutilsExecError,
DistutilsPlatformError, ValueError):
raise BuildFailed()
here = pathlib.Path(__file__).parent
txt = (here / 'aiohttp' / '__init__.py').read_text('utf-8')
try:
version = re.findall(r"^__version__ = '([^']+)'\r?$",
txt, re.M)[0]
except IndexError:
raise RuntimeError('Unable to determine version.')
install_requires = [
'attrs>=17.3.0',
'chardet>=2.0,<4.0',
'multidict>=4.0,<5.0',
'async_timeout>=3.0,<4.0',
'yarl>=1.0,<2.0',
'idna-ssl>=1.0; python_version<"3.7"',
]
def read(f):
return (here / f).read_text('utf-8').strip()
NEEDS_PYTEST = {'pytest', 'test'}.intersection(sys.argv)
pytest_runner = ['pytest-runner'] if NEEDS_PYTEST else []
tests_require = ['pytest', 'gunicorn',
'pytest-timeout', 'async-generator']
args = dict(
name='aiohttp',
version=version,
description='Async http client/server framework (asyncio)',
long_description='\n\n'.join((read('README.rst'), read('CHANGES.rst'))),
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Development Status :: 5 - Production/Stable',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Topic :: Internet :: WWW/HTTP',
'Framework :: AsyncIO',
],
author='Nikolay Kim',
author_email='[email protected]',
maintainer=', '.join(('Nikolay Kim <[email protected]>',
'Andrew Svetlov <[email protected]>')),
maintainer_email='[email protected]',
url='https://github.com/aio-libs/aiohttp',
project_urls={
'Chat: Gitter': 'https://gitter.im/aio-libs/Lobby',
'CI: AppVeyor': 'https://ci.appveyor.com/project/aio-libs/aiohttp',
'CI: Circle': 'https://circleci.com/gh/aio-libs/aiohttp',
'CI: Shippable': 'https://app.shippable.com/github/aio-libs/aiohttp',
'CI: Travis': 'https://travis-ci.com/aio-libs/aiohttp',
'Coverage: codecov': 'https://codecov.io/github/aio-libs/aiohttp',
'Docs: RTD': 'https://docs.aiohttp.org',
'GitHub: issues': 'https://github.com/aio-libs/aiohttp/issues',
'GitHub: repo': 'https://github.com/aio-libs/aiohttp',
},
license='Apache 2',
packages=['aiohttp'],
python_requires='>=3.5.3',
install_requires=install_requires,
tests_require=tests_require,
setup_requires=pytest_runner,
include_package_data=True,
ext_modules=ext
|
ensions,
cmdclass=dict(build_ext=ve_build_ext),
)
try:
setup(**args)
e
|
xcept BuildFailed:
print("************************************************************")
print("Cannot compile C accelerator module, use pure python version")
print("************************************************************")
del args['ext_modules']
del args['cmdclass']
setup(**args)
|
TshepangRas/tshilo-dikotla
|
td_maternal/models/maternal_clinical_measurements_one.py
|
Python
|
gpl-2.0
| 738
| 0.004065
|
from django.db import models
from django.core.validators import MinValueValidator, MaxValueValidator
from edc_registration.models import RegisteredSubject
from .base_maternal_clinical_measurements import BaseMaternalCli
|
nicalMeasurements
class MaternalClinicalMeasurementsOne(BaseMaternalClinicalMeasurement
|
s):
height = models.DecimalField(
max_digits=5,
decimal_places=2,
verbose_name="Mother's height? ",
validators=[MinValueValidator(134), MaxValueValidator(195), ],
help_text="Measured in Centimeters (cm)")
class Meta:
app_label = 'td_maternal'
verbose_name = 'Maternal Clinical Measurements One'
verbose_name_plural = 'Maternal Clinical Measurements One'
|
pulsar-chem/Pulsar-Core
|
test/old/Old2/modules/CP.py
|
Python
|
bsd-3-clause
| 2,969
| 0.022903
|
#!/usr/bin/env python3
import os
import sys
thispath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(os.path.dirname(thispath),"helper"))
from MiscFxns import *
from StandardModules import *
import pulsar_psi4
def ApplyBasis(syst,bsname,bslabel="primary"):
return psr.system.apply_single_basis(bslabel,bsname,syst)
def CompareEgy(EgyIn):
return abs(EgyIn+224.89287653924677)<0.00001
def CompareGrad(GradIn):
CorrectGrad=[
-0.000988976949000001, 0.0004443157829999993, 0.05238342271999999,
0.018237358511, -0.002547005771, -0.030731839919000005,
-0.02344281975, -0.0062568701740000005, -0.025360880303,
-0.015409293889000001, -0.047382578540999996, -0.012807191666999996,
0.016869055227000003, 0.024963490952999996, -0.017442968207000004,
0.007207092293000001, 0.025306999363999997, 0.023850402741000004,
0.019786523729999998, 0.04038960502300001, -0.028509120090000006,
-0.026869925129, -0.022975320699000004, 0.005627050168,
0.004610985953999999, -0.011942635934, 0.032991124551000006]
AllGood=True
for i in range(0,len(CorrectGrad)):
AllGood=AllGood and CorrectGrad[i]-GradIn[i]<0.00001
return AllGood
def Run(mm):
try:
tester = psr.testing.Tester("Testing Boys and Bernardi CP")
tester.print_header()
pulsar_psi4.pulsar_psi4_setup(mm)
LoadDefaultModules(mm)
mm.change_option("PSI4_SCF","BASIS_SET","sto-3g")
mm.change_option("PSR_CP","METHOD","PSI4_SC
|
F")
mm.change_option("PSR_MBE","METHOD","PSI4_SCF")
mm.change_option("PSI4_SCF","PRINT",0)
mol=psr.system.make_system("""
0 1
O 1.2361419 1.0137761 -0.0612424
H 0.5104418 0.8944555 0.5514190
H 1.9926927 1.1973129 0.4956931
O -0.9957202 0.0160415 1.2422556
H -1.4542703 -0.5669741 1.8472817
H -0.9377950 -0.4817912 0.4267562
O -0.2432343 -1.0198566 -1.1953808
|
H 0.4367536 -0.3759433 -0.9973297
H -0.5031835 -0.8251492 -2.0957959
""")
mol = ApplyBasis(mol,"sto-3g","sto-3g")
wfn=psr.datastore.Wavefunction()
wfn.system=mol
MyMod=mm.get_module("PSR_CP",0)
NewWfn,Egy=MyMod.deriv(0,wfn)
tester.test("Testing CP Energy via Deriv(0)", True, CompareEgy, Egy[0])
NewWfn,Egy=MyMod.energy(wfn)
tester.test("Testing CP Energy via Energy()", True, CompareEgy, Egy)
NewWfn,Egy=MyMod.deriv(1,wfn)
tester.test("Testing CP Gradient via Deriv(1)", True, CompareGrad, Egy)
NewWfn,Egy=MyMod.gradient(wfn)
tester.test("Testing CP Gradient via Gradient()", True, CompareGrad, Egy)
tester.print_results()
except Exception as e:
psr.output.Output("Caught exception in main handler\n")
traceback.print_exc()
with psr.ModuleAdministrator() as mm:
Run(mm)
psr.finalize()
|
sparkslabs/kamaelia
|
Code/Python/Apps/Europython09/App/BB1.py
|
Python
|
apache-2.0
| 1,272
| 0.002358
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import Axon
from Kamaelia.Chassis.ConnectedServer import ServerCore
class RequestResponseComponent(Axon.Component.component):
def main(self):
while not self.dataReady("control"):
|
for msg in self.Inbox("inbox"):
self.send(msg, "outbo
|
x")
self.pause()
yield 1
self.send(self.recv("control"), "signal")
ServerCore(protocol=RequestResponseComponent,
port=1599).run()
|
deprofundis/deprofundis
|
models/scripts/example_crbm.py
|
Python
|
mit
| 2,313
| 0.006053
|
from models.sampler import DynamicBlockGibbsSampler
from models.distribution import DynamicBernoulli
from models.optimizer import DynamicSGD
from utils.utils import prepare_frames
from scipy import io as matio
from data.gwtaylor.path import *
import ipdb
import numpy as np
SIZE_BATCH = 10
EPOCHS = 100
SIZE_HIDDEN = 50
SIZE_VISIBLE = 150
# CRBM Constants
M_LAG_VISIBLE = 2
N_LAG_HIDDEN = 2
SIZE_LAG = max(M_LAG_VISIBLE, N_LAG_HIDDEN)+1
# load and prepare dataset from .mat
mat = matio.loadmat(MOCAP_SAMPLE)
dataset = mat['batchdatabinary']
# generate batches
batch_idx_list = prepare_frames(len(dataset), SIZE_LAG, SIZE_BATCH)
# load distribution
bernoulli = DynamicBernoulli(SIZE_VISIBLE, SIZE_HIDDEN, m_lag_visible=M_LAG_VISIBLE, n_lag_hidden=N_LAG_HIDDEN)
gibbs_sampler = DynamicBlockGibbsSampler
|
(bernoulli, sampling_steps=1)
sgd = DynamicSGD(bernoulli)
for epoch in range(EPOCHS):
error = 0.0
for chunk_idx_list in batch_idx_list:
# get batch data set
data = np.zeros(sha
|
pe=(SIZE_BATCH, SIZE_VISIBLE, SIZE_LAG))
for idx, (start, end) in enumerate(chunk_idx_list):
data[idx, :, :] = dataset[start:end, :].T
hidden_0_probs, hidden_0_states, \
hidden_k_probs, hidden_k_states, \
visible_k_probs, visible_k_states = gibbs_sampler.sample(data[:, :, 0], data[:, :, 1:])
# compute deltas
d_weight_update, d_bias_hidden_update, \
d_bias_visible_update, d_vis_vis, d_vis_hid = sgd.optimize(data[:, :, 0], hidden_0_states, hidden_0_probs, hidden_k_probs,
hidden_k_states, visible_k_probs, visible_k_states, data[:, :, 1:])
# update model values
bernoulli.weights += d_weight_update
bernoulli.bias_hidden += d_bias_hidden_update
bernoulli.bias_visible += d_bias_visible_update
bernoulli.vis_vis_weights += d_vis_vis
bernoulli.vis_hid_weights += d_vis_hid
# compute reconstruction error
_, _, \
_, _, \
_, visible_k_states = gibbs_sampler.sample(data[:, :, 0], data[:, :, 1:])
error += np.mean(np.abs(visible_k_states - data[:, :, 0]))
error = 1./len(batch_idx_list) * error;
print error
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/deed/event_perk/shared_lambda_shuttle_static_deed.py
|
Python
|
mit
| 487
| 0.045175
|
#### NOTI
|
CE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/deed/event_perk/shared_lambda_shuttle_static_deed.iff"
result.attribute_template_id = 2
result.stfName("event_perk","lambda_shuttle_static_deed_name")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ###
|
#
return result
|
lonnen/socorro
|
socorro/unittest/lib/test_task_manager.py
|
Python
|
mpl-2.0
| 2,418
| 0.000414
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from unittest import mock
from configman.dotdict import DotDict
from socorro.lib.task_manager import TaskManager, default_task_func
class TestTaskManager:
def test_constuctor1(self):
config = DotDict()
config.quit_on_empty_queue = False
tm = TaskManager(config)
assert tm.config == config
assert tm.task_func == default_task_func
assert tm.quit is False
def test_get_iterator(self):
config = DotDict()
config.quit_on_empty_queue = False
tm = TaskManager(config, job_source_iterator=range(1))
assert list(tm._get_iterator()) == [0]
def an_iter(self):
yield from range(5)
tm = TaskManager(config, job_source_iterator=an_iter)
assert list(tm._get_iterator()) == [0, 1, 2, 3, 4]
class X:
def __init__(self, config):
self.config = config
def __iter__(self):
yield from self.config
tm = TaskManager(config, job_source_iterator=X(config))
assert list(tm._get_iterator()) == list(config.keys())
def test_blocking_start(self):
config = DotDict()
config.idle_delay = 1
config.quit_on_empty_queue = False
class MyTaskManager(TaskManager):
def _responsive_sleep(self, seconds, wait_log_interval=0, wait_reason=""):
try:
if self.count >= 2:
raise KeyboardInterrupt
self.count += 1
except AttributeError:
self.count = 0
tm = MyTaskManager(config, task_func=mock.Mock())
waiting_func = mock.Mock()
tm.blocking_start(waiting_func=waiting_func)
assert tm.task_func.call_count == 10
assert waiting_func.call_count == 0
def test_blocking_start_with_quit_on_empty(self):
config = DotDict()
config.idle_delay = 1
config.quit_on_empty_queue = True
tm = TaskManager(config, task_func=mock.Mock())
waiting_func = mock.Mock()
tm.blo
|
cking_start(waiting_func=waiting_func)
assert tm.task_func.call_count == 10
assert waiting_func.
|
call_count == 0
|
dermoth/gramps
|
gramps/gen/db/generic.py
|
Python
|
gpl-2.0
| 88,300
| 0.000453
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2015-2016 Gramps Development Team
# Copyright (C) 2016 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#------------------------------------------------------------------------
#
# Python Modules
#
#------------------------------------------------------------------------
import random
import pickle
import time
import re
import os
import logging
import bisect
import ast
import sys
import datetime
import glob
#------------------------------------------------------------------------
#
# Gramps Modules
#
#------------------------------------------------------------------------
from . import (DbReadBase, DbWriteBase, DbUndo, DBLOGNAME, DBUNDOFN,
REFERENCE_KEY, PERSON_KEY, FAMILY_KEY,
CITATION_KEY, SOURCE_KEY, EVENT_KEY, MEDIA_KEY, PLACE_KEY,
REPOSITORY_KEY, NOTE_KEY, TAG_KEY, TXNADD, TXNUPD, TXNDEL,
KEY_TO_NAME_MAP, DBMODE_R, DBMODE_W)
from .utils import write_lock_file, clear_lock_file
from ..errors import HandleError
from ..utils.callback import Callback
from ..updatecallback import UpdateCallback
from .bookmarks import DbBookmarks
from ..utils.id import create_id
from ..lib.researcher import Researcher
from ..lib import (Tag, Media, Person, Family, Source, Citation, Event,
Place, Repository, Note, NameOriginType)
from ..lib.genderstats import GenderStats
from ..config import config
from ..const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
LOG = logging.getLogger(DBLOGNAME)
SIGBASE = ('person', 'family', 'source', 'event', 'media',
'place', 'repository', 'reference', 'note', 'tag', 'citation')
def touch(fname, mode=0o666, dir_fd=None, **kwargs):
## After http://stackoverflow.com/questions/1158076/implement-touch-using-python
if sys.version_info < (3, 3, 0):
with open(fname, 'a'):
os.utime(fname, None) # set to now
else:
flags = os.O_CREAT | os.O_APPEND
with os.fdopen(os.open(fname, flags=flags, mode=mode, dir_fd=dir_fd)) as f:
os.utime(f.fileno() if os.utime in os.supports_fd else fname,
dir_fd=None if os.supports_fd else dir_fd, **kwargs)
class DbGenericUndo(DbUndo):
def __init__(self, grampsdb, path):
super(DbGenericUndo, self).__init__(grampsdb)
self.undodb = []
def open(self, value=None):
"""
Open the backing storage. Needs to be overridden in the derived
class.
"""
pass
def close(self):
"""
Close the backing storage. Needs to be overridden in the derived
class.
"""
pass
def append(self, value):
"""
Add a new entry on the end. Needs to be overridden in the derived
class.
"""
self.undodb.append(value)
def __getitem__(self, index):
"""
Returns an entry by index number. Needs to be overridden in the
derived class.
"""
return self.undodb[index]
def __setitem__(self, index, value):
"""
Set an entry to a value. Needs to be overridden in the derived class.
"""
self.undodb[index] = value
def __len__(self):
"""
Returns the number of entries. Needs to be overridden in the derived
class.
"""
return len(self.undodb)
def _redo(self, update_history):
"""
Access the last undone transaction, and revert the data to the state
before the transaction was undone.
"""
txn = self.redoq.pop()
self.undoq.append(txn)
transaction = txn
db = self.db
subitems = transaction.get_recnos()
# sigs[obj_type][trans_type]
sigs = [[[] for trans_type in range(3)] for key in range(11)]
# Process all records in the transaction
try:
self.db._txn_begin()
for record_id in subitems:
(key, trans_type, handle, old_data, new_data) = \
pickle.loads(self.undodb[record_id])
if key == REFERENCE_KEY:
self.db.undo_reference(new_data, handle)
else:
self.db.undo_data(new_data, handle, key)
sigs[key][trans_type].append(handle)
# now emit the signals
self.undo_sigs(sigs, False)
self.db._txn_commit()
except:
self.db._txn_abort()
raise
# Notify listeners
if db.undo_callback:
db.undo_callback(_("_Undo %s") % transaction.get_description())
if db.redo_callback:
if self.redo_count > 1:
new_transaction = self.redoq[-2]
db.redo_callback(_("_Redo %s")
% new_transaction.get_description())
else:
db.redo_callback(None)
if update_history and db.undo_history_callback:
db.undo_history_callback()
return True
def _undo(self, update_history):
"""
Access the last committed transaction, and revert the data to the
state before the transaction was committed.
"""
txn = self.undoq.pop()
self.redoq.append(txn)
transaction = txn
db = self.db
subitems = transaction.get_recnos(reverse=True)
# sigs[obj_type][trans_type]
sigs = [[[] for trans_type in range(3)] for key in range(11)]
# Process all records in the transaction
try:
self.db._txn_begin()
for record_id in subitems:
(key, trans_type, handle, old_data, new_data) = \
pickle.loads(self.undodb[record_id])
if key == REFERENCE_KEY:
self.db.undo_reference(old_data, handle)
else:
self.db.undo_data(old_data, handle, key)
sigs[key][trans_type].append(handle)
# now emit the signals
self.undo_sigs(sigs, True)
self.db._txn_commit()
except:
self.db._txn_abort()
raise
# Notify listeners
if db.undo_callback:
if self.undo_count > 0:
db.undo_callback(_("_Undo %s")
% self.undoq[-1].get_description())
else:
db.undo_callba
|
ck(None)
if db.redo_callback:
db.redo_callback(_("_Redo %s")
% transaction.get_description())
if update_history and db.undo_history_callback:
db.undo_history_callback()
return True
def undo_sigs(self, sigs, undo):
""
|
"
Helper method to undo/redo the signals for changes made
We want to do deletes and adds first
Note that if 'undo' we swap emits
"""
for trans_type in [TXNDEL, TXNADD, TXNUPD]:
for obj_type in range(11):
handles = sigs[obj_type][trans_type]
if handles:
if not undo and trans_type == TXNDEL \
or undo and trans_type == TXNADD:
typ = '-delete'
else:
# don't update a handle if its been deleted, and note
# that 'deleted' handles are in the 'add' list if we
# are undoing
|
reviewboard/reviewboard
|
reviewboard/hostingsvcs/bugtracker.py
|
Python
|
mit
| 1,633
| 0
|
from djblets.cache.backend import cache_memoize
class BugTracker(object):
"""An interface to a bug tracker.
BugTracker subclasses are used to enable interaction with different
bug trackers.
"""
def get_bug_info(self, repository, bug_id):
"""Get the information for the specified bug.
This should return a dictionary with 'summary', 'description', and
'status' keys.
This is cached for 60 seconds to reduce the number of queries to the
bug trackers and make things seem fast after the first infobox load,
but is still a short enough time to give relatively fresh data.
"""
return cache_memoize(self.make_bug_cache_key(repository, bug_id),
lambda: self.get_bug_info_uncached(repository,
bug_id),
expiration=60)
def get_bug_info_uncached(self, repository, bug_id):
"""Get the information for the specified bug (implementation).
This should be implemented by subclasses, and should return a
dictionary with 'summary', 'description', and 'status' keys.
If any of those are unsupported by the given bug tracker, the unknown
values should be given as an empty string.
"""
return {
'summary': '',
'description': '',
'stat
|
us': '',
}
def make_bug_cache_key(self, repository, bug_id):
"""Returns a key to use when caching fetched bug information."""
|
return 'repository-%s-bug-%s' % (repository.pk, bug_id)
|
TaskEvolution/Task-Coach-Evolution
|
taskcoach/taskcoachlib/widgets/__init__.py
|
Python
|
gpl-3.0
| 1,717
| 0.000582
|
'''
Task Coach - Your friendly task manager
Copyright (C) 2004-2013 Task Coach developers <[email protected]>
Task Coach is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Task Coach is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or
|
FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from notebook import Notebook, BookPage
from frame import AuiManagedFrameWithDynamicCenterPane
from dialog import Dialog, Noteboo
|
kDialog, HTMLDialog, AttachmentSelector
from itemctrl import Column
from listctrl import VirtualListCtrl
from checklistbox import CheckListBox
from treectrl import CheckTreeCtrl, TreeListCtrl
from squaremap import SquareMap
from timeline import Timeline
from datectrl import DateTimeCtrl, TimeEntry
from textctrl import SingleLineTextCtrl, MultiLineTextCtrl, StaticTextWithToolTip
from panel import PanelWithBoxSizer, BoxWithFlexGridSizer, BoxWithBoxSizer
from searchctrl import SearchCtrl
from spinctrl import SpinCtrl
from tooltip import ToolTipMixin, SimpleToolTip
from dirchooser import DirectoryChooser
from fontpicker import FontPickerCtrl
from syncmlwarning import SyncMLWarningDialog
from calendarwidget import Calendar
from calendarconfig import CalendarConfigDialog
from password import GetPassword
import masked
from wx.lib import sized_controls
|
hackshel/py-aluminium
|
src/__furture__/simplepool.py
|
Python
|
bsd-3-clause
| 3,289
| 0.023995
|
#!/usr/bin/env python
"""simple thread pool
@author: dn13([email protected])
@author: Fibrizof([email protected])
"""
import threading
import Queue
import new
def WorkerPoolError( Exception ):
pass
class Task(threading.Thread):
def __init__(self, queue, result_queue):
threading.Thread.__init__(self)
self.queue = queue
self.result_queue = result_queue
self.running = True
def cancel(self):
self.running = False
self.queue.put(None)
def run(self):
while self.running:
call = self.queue.get()
if call:
try:
reslut = call()
self.result_queue.put(reslut)
except:
pass
self.queue.task_done()
class WorkerPool( object ):
def __init__( self, threadnum ):
self.threadnum = threadnum
self.q = Queue.Queue()
self.result_q = Queue.Queue()
self.ts = [ Task(self.q, self.result_q) for i in range(threadnum) ]
self._registfunctions = {}
self.is_in_join = False
for t in self.ts :
t.setDaemon(True)
t.start()
def __del__(self):
try:
# 调用两次的意义在于, 第一次将所有线程的running置成false, 在让他们发一次queue的信号
# 偷懒没有写成两个接口
for t in self.ts:
t.cancel()
for t in self.ts:
t.cancel()
except:
pass
def __call__( self, work ):
if not self.is_in_join:
self.q.put( work )
else
|
:
raise WorkerPoolError, 'Pool has been joined'
def join( self ):
self.is_in_join = True
self.q.join()
self.is_in_join = False
return
def runwithpool( self, _old ):
|
def _new( *args, **kwargs ):
self.q.put( lambda : _old( *args, **kwargs ) )
return _new
def registtopool( self, _old ):
if _old.__name__ in self._registfunctions :
raise WorkerPoolError, 'function name exists'
self._registfunctions[_old.__name__] = _old
return _old
def get_all_result(self):
result_list = []
while True:
try:
result_list.append(self.result_q.get_nowait())
except Exception as e:
if 0 == self.result_q.qsize():
break
else:
continue
return result_list
def __getattr__( self, name ):
if name in self._registfunctions :
return self._registfunctions[name]
raise AttributeError, '%s not found' % name
if __name__ == '__main__' :
import thread
p = WorkerPool(5)
@p.runwithpool
def foo( a ):
print 'foo>', thread.get_ident(), '>', a
return
@p.registtopool
def bar( b ):
print 'bar>', thread.get_ident(), '>', b
for i in range(10):
foo(i)
p.bar(i+100)
p( lambda : bar(200) )
p.join()
|
piton-package-manager/piton
|
piton/commands/outdated.py
|
Python
|
mit
| 1,511
| 0.031105
|
from ..utils.co
|
mmand import BaseCommand
from ..utils.tabulate import tabulate
from ..utils.info import get_packages, Sources
class Colors:
PURPLE = '\033[95m'
OKBLUE = '\033[94m'
OKGRE
|
EN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
UNDERLINE = '\033[4m'
ENDC = '\033[0m'
class Command(BaseCommand):
name = "outdated"
@classmethod
def run(cls, args):
cls._run()
@classmethod
def _run(cls):
packages = get_packages((Sources.required, Sources.installed))
packages = list(filter(lambda p: p.wanted_rule, packages)) # filter out ones with no wanted version (not in package.json)
packages_to_display = []
for package in packages:
package.get_wanted_version()
if not package.version or (
package.version != package.latest_version or
package.version != package.wanted_version):
packages_to_display.append(package)
cls.display_outdated(packages_to_display)
@staticmethod
def display_outdated(packages):
if len(packages) == 0:
return
headings = ["package", "current", "wanted", "latest"]
headings = list(map(lambda heading: Colors.UNDERLINE+heading+Colors.ENDC, headings))
table = []
packages = sorted(packages, key=lambda package: package.name)
for package in packages:
table.append([
Colors.OKGREEN+package.name+Colors.ENDC,
package.version or "n/a",
Colors.OKGREEN+(package.wanted_version or "n/a")+Colors.ENDC,
Colors.PURPLE+(package.latest_version or "n/a")+Colors.ENDC
])
print(tabulate(table, headings, tablefmt="plain"))
|
abhattad4/Digi-Menu
|
tests/migrations/test_commands.py
|
Python
|
bsd-3-clause
| 37,861
| 0.002803
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import codecs
import importlib
import os
import shutil
from django.apps import apps
from django.core.management import CommandError, call_command
from django.db import DatabaseError, connection, models
from django.db.migrations import questioner
from django.test import ignore_warnings, mock, override_settings
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text
from .models import UnicodeModel, UnserializableModel
from .test_base import MigrationTestBase
class MigrateTests(MigrationTestBase):
"""
Tests running the migrate command.
"""
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate(self):
"""
Tests basic usage of the migrate command.
"""
# Make sure no tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run migrations all the way
call_command("migrate", verbosity=0)
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Unmigrate everything
call_command("migrate", "migrations", "zero", verbosity=0)
# Make sure it's all gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_fake_initial(self):
"""
#24184 - Tests that --fake-initial only works if all tables created in
the initial migration of an app exists
"""
# Make sure no tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Fake a roll-back
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
# Make sure the tables still exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Try to run initial migration
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", "0001", verbosity=0)
# Run initial migration with an explicit --fake-initial
out = six.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: False):
call_command("migrate", "migrations", "0001", fake_initial=True, stdout=out, verbosity=1)
self.assertIn(
"migrations.0001_initial... faked",
out.getvalue().lower()
)
# Run migrations all the way
call_command("migrate", verbosity=0)
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Fake a roll-back
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
# Make sure the tables still exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Try to run initial migration
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", verbosity=0)
# Run initial migration with an explicit --fake-initial
with self.assertRaises(DatabaseError):
# Fails because "migrations_tribble" does not exist but needs to in
# order to make --fake-initial work.
call_command("migrate", "migrations", fake_initial=True, verbosity=0)
# Fake a apply
call_command("migrate", "migrations", fake=True, verbosity=0)
# Unmigrate everything
call_command("migrate", "migrations", "zero", verbosity=0)
# Make sure it's all gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"})
def test_migrate_conflict_exit(self):
"""
Makes sure that migrate exits if it detects a conflict.
"""
with self.assertRaisesMessage(CommandError, "Conflicting migrations detected"):
call_command("migrate", "migrations")
@ignore_warnings(category=RemovedInDjango20Warning)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_list(self):
"""
Tests --list output of migrate command
"""
out = six.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: True):
call_command("migrate", list=True, stdout=out, verbosity=0, no_color=False)
self.assertEqual(
'\x1b[1mmigrations\n\x1b[0m'
' [ ] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
call_command("migrate", "migrations", "0001", verbosity=0)
out = six.StringIO()
# Giving the explicit app_label tests for selective `show_migration_list` in the command
call_command("migrate", "migrations", list=True, stdout=out, verbosity=0, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_showmigrations_list(self):
"""
Tests --list output of showmigrations command
"""
out = six.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: True):
call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False)
self.assertEqual(
'\x1b[1mmigrations\n\x1b[0m'
|
' [ ] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
call_command("migrate", "migrations", "0001", verbosity=0)
out = six.StringIO()
# Giving the explicit app_label tests for
|
selective `show_list` in the command
call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"})
def test_showmigrations_plan(self):
"""
Tests --plan output of showmigrations command
"""
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertIn(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third\n"
"[ ] migrations.0002_second",
out.getvalue
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/psrc/person/home_grid_id.py
|
Python
|
gpl-2.0
| 2,165
| 0.0194
|
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 Unive
|
rsity of Washington
# See opus_core/LICENSE
from opus_core.variables.variable import Variable
from variable_functions import my_attribute_label
class home_grid_
|
id(Variable):
'''The grid_id of a person's residence.'''
def dependencies(self):
return [my_attribute_label('household_id'),
'psrc.household.grid_id']
def compute(self, dataset_pool):
households = dataset_pool.get_dataset('household')
return self.get_dataset().get_join_data(households, name='grid_id')
from opus_core.tests import opus_unittest
from urbansim.variable_test_toolbox import VariableTestToolbox
from numpy import array
from numpy import ma
from psrc.datasets.person_dataset import PersonDataset
from opus_core.storage_factory import StorageFactory
class Tests(opus_unittest.OpusTestCase):
variable_name = 'psrc.person.home_grid_id'
def test_my_inputs(self):
storage = StorageFactory().get_storage('dict_storage')
persons_table_name = 'persons'
storage.write_table(
table_name=persons_table_name,
table_data={
'person_id':array([1, 2, 3, 4, 5]),
'household_id':array([1, 1, 3, 3, 3]),
'member_id':array([1,2,1,2,3])
},
)
persons = PersonDataset(in_storage=storage, in_table_name=persons_table_name)
values = VariableTestToolbox().compute_variable(self.variable_name,
data_dictionary = {
'household':{
'household_id':array([1,2,3]),
'grid_id':array([9, 9, 7])
},
'person':persons
},
dataset = 'person'
)
should_be = array([9, 9, 7, 7, 7])
self.assert_(ma.allclose(values, should_be, rtol=1e-7),
'Error in ' + self.variable_name)
if __name__=='__main__':
opus_unittest.main()
|
abalakh/robottelo
|
tests/foreman/api/test_docker.py
|
Python
|
gpl-3.0
| 47,259
| 0.000042
|
# -*- encoding: utf-8 -*-
"""Unit tests for the Docker feature."""
from fauxfactory import gen_choice, gen_string, gen_url
from nailgun import entities
from random import randint, shuffle
from requests.exceptions import HTTPError
from robottelo.api.utils import promote
from robottelo.constants import DOCKER_REGISTRY_HUB
from robottelo.decorators import run_only_on, skip_if_bug_open, stubbed
from robottelo.helpers import (
get_external_docker_url,
get_internal_docker_url,
valid_data_list,
)
from robottelo.test import APITestCase
DOCKER_PROVIDER = 'Docker'
EXTERNAL_DOCKER_URL = get_external_docker_url()
INTERNAL_DOCKER_URL = get_internal_docker_url()
STRING_TYPES = ['alpha', 'alphanumeric', 'cjk', 'utf8', 'latin1']
def _invalid_names():
"""Return a generator yielding various kinds of invalid strings for
Docker repositories.
"""
return (
# boundaries
gen_string('alphanumeric', 2),
gen_string('alphanumeric', 31),
u'{0}/{1}'.format(
gen_string('alphanumeric', 3),
gen_string('alphanumeric', 3)
),
u'{0}/{1}'.format(
gen_string('alphanumeric', 4),
gen_string('alphanumeric', 2)
),
u'{0}/{1}'.format(
gen_string('alphanumeric', 31),
gen_string('alphanumeric', 30)
),
u'{0}/{1}'.format(
gen_string('alphanumeric', 30),
gen_string('alphanumeric', 31)
),
# not allowed non alphanumeric character
u'{0}+{1}_{2}/{2}-{1}_{0}.{3}'.format(
gen_string('alphanumeric', randint(3, 6)),
gen_string('alphanumeric', randint(3, 6)),
gen_string('alphanumeric', randint(3, 6)),
gen_string('alphanumeric', randint(3, 6)),
),
u'{0}-{1}_{2}/{2}+{1}_{0}.{3}'.format(
gen_string('alphanumeric', randint(3, 6)),
gen_string('alphanumeric', randint(3, 6)),
gen_string('alphanumeric', randint(3, 6)),
gen_string('alphanumeric', randint(3, 6)),
),
)
def _valid_names():
"""Return a generator yielding various kinds of valid strings for
Docker repositories.
"""
return (
# boundaries
gen_string('alphanumeric', 3).lower(),
gen_string('alphanumeric', 30).lower(),
u'{0}/{1}'.format(
gen_string('alphanumeric', 4).lower(),
gen_string('alphanumeric', 3).lower(),
),
u'{0}/{1}'.format(
gen_string('alphanumeric', 30).lower(),
gen_string('alphanumeric', 30).lower(),
),
# allowed non alphanumeric character
u'{0}-{1}_{2}/{2}-{1}_{0}.{3}'.format(
gen_string('alphanumeric', randint(3, 6)).lower(),
gen_string('alphanumeric', randint(3, 6)).lower(),
gen_string('alphanumeric', randint(3, 6)).lower(),
gen_string('alphanumeric', randint(3, 6)).lower(),
),
u'-_-_/-_.',
)
def _create_repository(product, name=None, upstream_name=None):
"""Creates a Docker-based repository.
:param product: A ``Product`` object.
:param str name: Name for the repository. If ``None`` then a random
value will be generated.
:param str upstream_name: A valid name for an existing Docker image.
If ``None`` then defaults to ``busybox``.
:return: A ``Repository`` object.
"""
if name is None:
name = gen_string(gen_choice(STRING_TYPES), 15)
if upstream_name is None:
upstream_name = u'busybox'
return entities.Repository(
content_type=u'docker',
docker_upstream_name=upstream_name,
name=name,
product=product,
url=DOCKER_REGISTRY_HUB,
).create()
@run_only_on('sat')
class DockerRepositoryTestCase(APITestCase):
"""Tests specific to performing CRUD methods against ``Docker``
repositories.
"""
@classmethod
def setUpClass(cls):
"""Create an organization and product which can be re-used in tests."""
super(DockerRepositoryTestCase, cls).setUpClass()
cls.org = entities.Organization().create()
def test_create_one_docker_repo(self):
"""@Test:
|
Create one Docker-type repo
|
sitory
@Assert: A repository is created with a Docker image.
@Feature: Docker
"""
for name in valid_data_list():
with self.subTest(name):
repo = _create_repository(
entities.Product(organization=self.org).create(),
name,
)
self.assertEqual(repo.name, name)
self.assertEqual(repo.docker_upstream_name, 'busybox')
self.assertEqual(repo.content_type, 'docker')
def test_create_docker_repo_valid_upstream_name(self):
"""@Test: Create a Docker-type repository with a valid docker upstream
name
@Assert: A repository is created with the specified upstream name.
@Feature: Docker
"""
for upstream_name in _valid_names():
with self.subTest(upstream_name):
repo = _create_repository(
entities.Product(organization=self.org).create(),
upstream_name=upstream_name,
)
self.assertEqual(repo.docker_upstream_name, upstream_name)
self.assertEqual(repo.content_type, u'docker')
def test_create_docker_repo_invalid_upstream_name(self):
"""@Test: Create a Docker-type repository with a invalid docker
upstream name.
@Assert: A repository is not created and a proper error is raised.
@Feature: Docker
"""
product = entities.Product(organization=self.org).create()
for upstream_name in _invalid_names():
with self.subTest(upstream_name):
with self.assertRaises(HTTPError):
_create_repository(product, upstream_name=upstream_name)
def test_create_multiple_docker_repo(self):
"""@Test: Create multiple Docker-type repositories
@Assert: Multiple docker repositories are created with a Docker image
and they all belong to the same product.
@Feature: Docker
"""
product = entities.Product(organization=self.org).create()
for _ in range(randint(2, 5)):
repo = _create_repository(product)
product = product.read()
self.assertIn(repo.id, [repo_.id for repo_ in product.repository])
def test_create_multiple_docker_repo_multiple_products(self):
"""@Test: Create multiple Docker-type repositories on multiple products.
@Assert: Multiple docker repositories are created with a Docker image
and they all belong to their respective products.
@Feature: Docker
"""
for _ in range(randint(2, 5)):
product = entities.Product(organization=self.org).create()
for _ in range(randint(2, 3)):
repo = _create_repository(product)
product = product.read()
self.assertIn(
repo.id,
[repo_.id for repo_ in product.repository],
)
def test_sync_docker_repo(self):
"""@Test: Create and sync a Docker-type repository
@Assert: A repository is created with a Docker repository
and it is synchronized.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create()
)
repo.sync()
repo = repo.read()
self.assertGreaterEqual(repo.content_counts['docker_image'], 1)
def test_update_docker_repo_name(self):
"""@Test: Create a Docker-type repository and update its name.
@Assert: A repository is created with a Docker image and that its
name can be updated.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create())
# Update the repository name to random value
for new_name in valid_data_lis
|
akuster/yali
|
yali/storage/library/__init__.py
|
Python
|
gpl-2.0
| 125
| 0.008
|
#!
|
/usr/bin/python
# -*- coding: utf-8 -*-
from yali.storage import StorageError
class LibraryError(StorageError):
p
|
ass
|
mvillalba/codinghyde.ant
|
demos/ant/02-capabilities.py
|
Python
|
mit
| 769
| 0
|
"""
Interrogate stick for supported capabilities.
"""
import sys
from codinghyde.ant import driver
from codinghyde.ant import node
from config import *
# Initialize
stick = driver.USB1Driver(SERIAL, debug=DEBUG)
antnode = node.Node(stick)
antnode.start()
# Interrogate stick
# Note: This method will return
|
immediately, as the stick's capabilities are
# interrogated on node initialization (node.start()) in order to set proper
# internal Node instance state.
capabilities = antnode.getCapabilities()
print 'Maximum channels:', capabilities['max_channels']
print 'Maximum network keys:', capabilities['max_net_keys']
print 'Standard options: %X' % capabilities['std_options']
print 'Advanced options: %X' % capabilities['adv_options']
# Shutdown
antnode.
|
stop()
|
punchagan/zulip
|
zerver/lib/users.py
|
Python
|
apache-2.0
| 21,312
| 0.001548
|
import re
import unicodedata
from collections import defaultdict
from typing import Any, Dict, List, Optional, Sequence, Union
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db.models.query import QuerySet
from django.forms.models import model_to_dict
from django.utils.translation import gettext as _
from typing_extensions import TypedDict
from zulip_bots.custom_exceptions import ConfigValidationError
from zerver.lib.avatar import avatar_url, get_avatar_field
from zerver.lib.cache import (
bulk_cached_fetch,
realm_user_dict_fields,
user_profile_by_id_cache_key,
user_profile_cache_key_id,
)
from zerver.lib.exceptions import OrganizationAdministratorRequired
from zerver.lib.request import JsonableError
from zerver.lib.timezone import canonicalize_ti
|
mezone
from zerver.models import (
CustomProfileField,
CustomProfileFieldValue,
Realm,
Service,
UserProfile,
get_realm_user_dicts,
get_user_profile_by_id_in_realm,
)
def check_full_name(full_name_raw: str) -> str:
full_name = full_name_raw.strip()
if len(full_name) > UserProfile.MAX_NAME_LENGTH:
raise JsonableError(_(
|
"Name too long!"))
if len(full_name) < UserProfile.MIN_NAME_LENGTH:
raise JsonableError(_("Name too short!"))
for character in full_name:
if unicodedata.category(character)[0] == "C" or character in UserProfile.NAME_INVALID_CHARS:
raise JsonableError(_("Invalid characters in name!"))
# Names ending with e.g. `|15` could be ambiguous for
# sloppily-written parsers of our Markdown syntax for mentioning
# users with ambiguous names, and likely have no real use, so we
# ban them.
if re.search(r"\|\d+$", full_name_raw):
raise JsonableError(_("Invalid format!"))
return full_name
# NOTE: We don't try to absolutely prevent 2 bots from having the same
# name (e.g. you can get there by reactivating a deactivated bot after
# making a new bot with the same name). This is just a check designed
# to make it unlikely to happen by accident.
def check_bot_name_available(realm_id: int, full_name: str) -> None:
dup_exists = UserProfile.objects.filter(
realm_id=realm_id,
full_name=full_name.strip(),
is_active=True,
).exists()
if dup_exists:
raise JsonableError(_("Name is already in use!"))
def check_short_name(short_name_raw: str) -> str:
short_name = short_name_raw.strip()
if len(short_name) == 0:
raise JsonableError(_("Bad name or username"))
return short_name
def check_valid_bot_config(bot_type: int, service_name: str, config_data: Dict[str, str]) -> None:
if bot_type == UserProfile.INCOMING_WEBHOOK_BOT:
from zerver.lib.integrations import WEBHOOK_INTEGRATIONS
config_options = None
for integration in WEBHOOK_INTEGRATIONS:
if integration.name == service_name:
# key: validator
config_options = {c[1]: c[2] for c in integration.config_options}
break
if not config_options:
raise JsonableError(_("Invalid integration '{}'.").format(service_name))
missing_keys = set(config_options.keys()) - set(config_data.keys())
if missing_keys:
raise JsonableError(
_("Missing configuration parameters: {}").format(
missing_keys,
)
)
for key, validator in config_options.items():
value = config_data[key]
error = validator(key, value)
if error:
raise JsonableError(_("Invalid {} value {} ({})").format(key, value, error))
elif bot_type == UserProfile.EMBEDDED_BOT:
try:
from zerver.lib.bot_lib import get_bot_handler
bot_handler = get_bot_handler(service_name)
if hasattr(bot_handler, "validate_config"):
bot_handler.validate_config(config_data)
except ConfigValidationError:
# The exception provides a specific error message, but that
# message is not tagged translatable, because it is
# triggered in the external zulip_bots package.
# TODO: Think of some clever way to provide a more specific
# error message.
raise JsonableError(_("Invalid configuration data!"))
# Adds an outgoing webhook or embedded bot service.
def add_service(
name: str,
user_profile: UserProfile,
base_url: Optional[str] = None,
interface: Optional[int] = None,
token: Optional[str] = None,
) -> None:
Service.objects.create(
name=name, user_profile=user_profile, base_url=base_url, interface=interface, token=token
)
def check_bot_creation_policy(user_profile: UserProfile, bot_type: int) -> None:
# Realm administrators can always add bot
if user_profile.is_realm_admin:
return
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_EVERYONE:
return
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_ADMINS_ONLY:
raise OrganizationAdministratorRequired()
if (
user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_LIMIT_GENERIC_BOTS
and bot_type == UserProfile.DEFAULT_BOT
):
raise OrganizationAdministratorRequired()
def check_valid_bot_type(user_profile: UserProfile, bot_type: int) -> None:
if bot_type not in user_profile.allowed_bot_types:
raise JsonableError(_("Invalid bot type"))
def check_valid_interface_type(interface_type: Optional[int]) -> None:
if interface_type not in Service.ALLOWED_INTERFACE_TYPES:
raise JsonableError(_("Invalid interface type"))
def is_administrator_role(role: int) -> bool:
return role in {UserProfile.ROLE_REALM_ADMINISTRATOR, UserProfile.ROLE_REALM_OWNER}
def bulk_get_users(
emails: List[str], realm: Optional[Realm], base_query: "QuerySet[UserProfile]" = None
) -> Dict[str, UserProfile]:
if base_query is None:
assert realm is not None
query = UserProfile.objects.filter(realm=realm, is_active=True)
realm_id = realm.id
else:
# WARNING: Currently, this code path only really supports one
# version of `base_query` being used (because otherwise,
# they'll share the cache, which can screw up the filtering).
# If you're using this flow, you'll need to re-do any filters
# in base_query in the code itself; base_query is just a perf
# optimization.
query = base_query
realm_id = 0
def fetch_users_by_email(emails: List[str]) -> List[UserProfile]:
# This should be just
#
# UserProfile.objects.select_related("realm").filter(email__iexact__in=emails,
# realm=realm)
#
# But chaining __in and __iexact doesn't work with Django's
# ORM, so we have the following hack to construct the relevant where clause
where_clause = "upper(zerver_userprofile.email::text) IN (SELECT upper(email) FROM unnest(%s) AS email)"
return query.select_related("realm").extra(where=[where_clause], params=(emails,))
def user_to_email(user_profile: UserProfile) -> str:
return user_profile.email.lower()
return bulk_cached_fetch(
# Use a separate cache key to protect us from conflicts with
# the get_user cache.
lambda email: "bulk_get_users:" + user_profile_cache_key_id(email, realm_id),
fetch_users_by_email,
[email.lower() for email in emails],
id_fetcher=user_to_email,
)
def get_user_id(user: UserProfile) -> int:
return user.id
def user_ids_to_users(user_ids: Sequence[int], realm: Realm) -> List[UserProfile]:
# TODO: Consider adding a flag to control whether deactivated
# users should be included.
def fetch_users_by_id(user_ids: List[int]) -> List[UserProfile]:
return list(UserProfile.objects.filter(id__in=user_ids).select_related())
user_profiles_by_id: Dict[int, UserProfile] = bulk_cached_fetch(
cache_key_function=user
|
icyflame/batman
|
setup.py
|
Python
|
mit
| 7,976
| 0.000251
|
# -*- coding: utf-8 -*-
"""Installer script for Pywikibot 2.0 framework."""
#
# (C) Pywikibot team, 2009-2015
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
import itertools
import os
import sys
PYTHON_VERSION = sys.version_info[:3]
PY2 = (PYTHON_VERSION[0] == 2)
PY26 = (PYTHON_VERSION < (2, 7))
versions_required_message = """
Pywikibot not available on:
%s
Pywikibot is only supported under Python 2.6.5+, 2.7.2+ or 3.3+
"""
def python_is_supported():
"""Check that Python is supported."""
# Any change to this must be copied to pwb.py
return (PYTHON_VERSION >= (3, 3, 0) or
(PY2 and PYTHON_VERSION >= (2, 7, 2)) or
(PY26 and PYTHON_VERSION >= (2, 6, 5)))
if not python_is_supported():
raise RuntimeError(versions_required_message % sys.version)
test_deps = []
dependencies = ['requests']
# the irc module has no Python 2.6 support since 10.0
irc_dep = 'irc==8.9' if sys.version_info < (2, 7) else 'irc'
extra_deps = {
# Core library dependencies
'isbn': ['python-stdnum'],
'Graphviz': ['pydot>=1.0.28'],
'Google': ['google>=1.7'],
'IRC': [irc_dep],
'mwparserfromhell': ['mwparserfromhell>=0.3.3'],
'Tkinter': ['Pillow'],
# 0.6.1 supports socket.io 1.0, but WMF is using 0.9 (T91393 and T85716)
'rcstream': ['socketIO-client<0.6.1'],
'security': ['requests[security]'],
'mwoauth': ['mwoauth>=0.2.4'],
'html': ['BeautifulSoup4'],
}
if PY2:
# Additional core library dependencies which are only available on Python 2
extra_deps.update({
'csv': ['unicodecsv'],
'MySQL': ['oursql'],
'unicode7': ['unicodedata2>=7.0.0-2'],
})
script_deps = {
'flickrripper.py': ['Pillow'],
'states_redirect.py': ['pycountry'],
'weblinkchecker.py': ['memento_client>=0.5.1'],
}
# flickrapi 1.4.4 installs a root logger in verbose mode; 1.4.5 fixes this.
# The problem doesnt exist in flickrapi 2.x.
# pywikibot accepts flickrapi 1.4.5+ on Python 2, as it has been stable for a
# long time, and only depends on python-requests 1.x, whereas flickrapi 2.x
# depends on python-requests 2.x, which is first packaged in Ubuntu 14.04
# and will be first packaged for Fedora Core 21.
# flickrapi 1.4.x does not run on Python 3, and setuptools can only
# select flickrapi 2.x for Python 3 installs.
script_deps['flickrripper.py'].append(
'flickrapi>=1.4.5,<2' if PY26 else 'flickrapi')
# lunatic-python is only available for Linux
if sys.platform.startswith('linux'):
script_deps['script_wui.py'] = [irc_dep, 'lunatic-python', 'crontab']
# The main pywin32 repository contains a Python 2 only setup.py with a small
# wrapper setup3.py for Python 3.
# http://pywin32.hg.sourceforge.net:8000/hgroot/pywin32/pywin32
# The main pywinauto repository doesnt support Python 3.
# The repositories used below have a Python 3 compliant setup.py
dependency_links = [
'git+https://github.com/AlereDevices/lunatic-python.git#egg=lunatic-python',
'hg+https://bitbucket.org/TJG/pywin32#egg=pywin32',
'git+https://github.com/vasily-v-ryabov/pywinauto-64#egg=pywinauto',
'git+https://github.com/nlhepler/pydot#egg=pydot-1.0.29',
]
if PYTHON_VERSION < (2, 7, 3):
# work around distutils hardcoded unittest dependency
# work around T106512
import unittest # noqa
if 'test' in sys.argv:
import unittest2
sys.modules['unittest'] = unittest2
if sys.version_info[0] == 2:
if PY26:
# requests security extra includes pyOpenSSL. cryptography is the
# dependency of pyOpenSSL. 0.8.2 is the newest and compatible version
# for Python 2.6, which won't raise unexpected DeprecationWarning.
extra_deps['security'].append('cryptography<=0.8.2')
script_deps['replicate_wiki.py'] = ['argparse']
dependencies.append('future>=0.15.0') # provides collections backports
dependencies += extra_deps['unicode7'] # T102461 workaround
# tools.ip does not have a hard dependency on an IP address module,
# as it falls back to using regexes if one is not available.
# The functional backport of py3 ipaddress is acceptable:
# https://pypi.python.org/pypi/ipaddress
# However the Debian package python-ipaddr is also supported:
# https://pypi.python.org/pypi/ipaddr
# Other backports are likely broken.
# ipaddr 2.1.10+ is distributed with Debian and Fedora. See T105443.
dependencies.append('ipaddr>=2.1.10')
if sys.version_info < (2, 7, 9):
# Python versions before 2.7.9 will cause urllib3 to trigger
# InsecurePlatformWarning warnings for all HTTPS requests. By
# installing with security extras, requests will automatically set
# them up and the warnings will stop. See
# <https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning>
# for more details.
dependencies += extra_deps['security']
script_deps['data_ingestion.py'] = extra_deps['csv']
# mwlib is not available for py3
script_deps['patrol'] = ['mwlib']
# Some of the ui_tests depend on accessing the console window's menu
# to set the console font and copy and paste, achieved using pywinauto
# which depends on pywin
|
32.
# These tests may be disabled because pywin32 depends on VC++, is time
# comsuming to build, and the console window cant be accessed during appveyor
# builds.
# Microsoft makes available a compiler for Python 2.7
# http://www.microsoft.com/en-au/download/details.aspx?id=44266
# If you set up your own compiler for Python 3, on 3.3 two demo files
# packag
|
ed with pywin32 may fail. Remove com/win32com/demos/ie*.py
if os.name == 'nt' and os.environ.get('PYSETUP_TEST_NO_UI', '0') != '1':
# FIXME: tests/ui_tests.py suggests pywinauto 0.4.2
# which isnt provided on pypi.
test_deps += ['pywin32', 'pywinauto>=0.4.0']
extra_deps.update(script_deps)
# Add all dependencies as test dependencies,
# so all scripts can be compiled for script_tests, etc.
if 'PYSETUP_TEST_EXTRAS' in os.environ:
test_deps += list(itertools.chain(*(extra_deps.values())))
# mwlib requires 'pyparsing>=1.4.11,<1.6', which conflicts with
# pydot's requirement for pyparsing>=2.0.1.
if 'mwlib' in test_deps:
test_deps.remove('mwlib')
if 'oursql' in test_deps and os.name == 'nt':
test_deps.remove('oursql') # depends on Cython
if 'requests[security]' in test_deps:
# Bug T105767 on Python 2.7 release 9+
if sys.version_info[:2] == (2, 7) and sys.version_info[2] >= 9:
test_deps.remove('requests[security]')
# These extra dependencies are needed other unittest fails to load tests.
if sys.version_info[0] == 2:
test_deps += extra_deps['csv']
else:
test_deps += ['six']
from setuptools import setup, find_packages
name = 'pywikibot'
version = '2.0rc1.post2'
github_url = 'https://github.com/wikimedia/pywikibot-core'
setup(
name=name,
version=version,
description='Python MediaWiki Bot Framework',
long_description=open('README.rst').read(),
maintainer='The Pywikibot team',
maintainer_email='[email protected]',
license='MIT License',
packages=['pywikibot'] + [package
for package in find_packages()
if package.startswith('pywikibot.')],
install_requires=dependencies,
dependency_links=dependency_links,
extras_require=extra_deps,
url='https://www.mediawiki.org/wiki/Pywikibot',
test_suite="tests.collector",
tests_require=test_deps,
classifiers=[
'License :: OSI Approved :: MIT License',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'Environment :: Console',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
],
use_2to3=False
)
|
devopservices/ansible
|
lib/ansible/cache/jsonfile.py
|
Python
|
gpl-3.0
| 4,163
| 0.004324
|
# (c) 2014, Brian Coca, Josh Drake, et al
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or
|
FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org
|
/licenses/>.
import os
import time
import errno
try:
import simplejson as json
except ImportError:
import json
from ansible import constants as C
from ansible import utils
from ansible.cache.base import BaseCacheModule
class CacheModule(BaseCacheModule):
"""
A caching module backed by json files.
"""
def __init__(self, *args, **kwargs):
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
self._cache = {}
self._cache_dir = C.CACHE_PLUGIN_CONNECTION # expects a dir path
if not self._cache_dir:
utils.exit("error, fact_caching_connection is not set, cannot use fact cache")
if not os.path.exists(self._cache_dir):
try:
os.makedirs(self._cache_dir)
except (OSError,IOError), e:
utils.warning("error while trying to create cache dir %s : %s" % (self._cache_dir, str(e)))
return None
def get(self, key):
if key in self._cache:
return self._cache.get(key)
if self.has_expired(key):
raise KeyError
cachefile = "%s/%s" % (self._cache_dir, key)
try:
f = open( cachefile, 'r')
except (OSError,IOError), e:
utils.warning("error while trying to write to %s : %s" % (cachefile, str(e)))
else:
value = json.load(f)
self._cache[key] = value
return value
finally:
f.close()
def set(self, key, value):
self._cache[key] = value
cachefile = "%s/%s" % (self._cache_dir, key)
try:
f = open(cachefile, 'w')
except (OSError,IOError), e:
utils.warning("error while trying to read %s : %s" % (cachefile, str(e)))
else:
f.write(utils.jsonify(value))
finally:
f.close()
def has_expired(self, key):
cachefile = "%s/%s" % (self._cache_dir, key)
try:
st = os.stat(cachefile)
except (OSError,IOError), e:
if e.errno == errno.ENOENT:
return False
else:
utils.warning("error while trying to stat %s : %s" % (cachefile, str(e)))
if time.time() - st.st_mtime <= self._timeout:
return False
if key in self._cache:
del self._cache[key]
return True
def keys(self):
keys = []
for k in os.listdir(self._cache_dir):
if not (k.startswith('.') or self.has_expired(k)):
keys.append(k)
return keys
def contains(self, key):
if key in self._cache:
return True
if self.has_expired(key):
return False
try:
st = os.stat("%s/%s" % (self._cache_dir, key))
return True
except (OSError,IOError), e:
if e.errno == errno.ENOENT:
return False
else:
utils.warning("error while trying to stat %s : %s" % (cachefile, str(e)))
def delete(self, key):
del self._cache[key]
try:
os.remove("%s/%s" % (self._cache_dir, key))
except (OSError,IOError), e:
pass #TODO: only pass on non existing?
def flush(self):
self._cache = {}
for key in self.keys():
self.delete(key)
def copy(self):
ret = dict()
for key in self.keys():
ret[key] = self.get(key)
return ret
|
AASHE/iss
|
iss/management/commands/upsert_iss_domains.py
|
Python
|
mit
| 1,085
| 0
|
#!/usr/bin/env python
"""Upserts Domains from Salesforce Domain__c.
"""
import logging
import os
from django.core.management.base import BaseCommand
import iss.models
import iss.salesfo
|
rce
logger = logging.getLogger(os.path.basename(__file__))
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-m', '--modified-within',
type=int,
metavar='n-days',
default=7,
help='upsert Domains modified within n-days')
def handle(self, *args, **options):
upse
|
rt_domains(options['modified_within'])
def upsert_domains(modified_since=7):
"""Upsert Domains for SF Domain__c modified in last `modified_since` days.
"""
logger.info('upserting domains modified in last {since} days'.
format(since=modified_since))
modified_domains = (iss.salesforce.Domain.get_domains_modified_since(
days_ago=modified_since))
for domain in modified_domains:
iss.models.Domain.upsert(domain)
|
nabla-c0d3/sslyze
|
tests/plugins_tests/test_elliptic_curves_plugin.py
|
Python
|
agpl-3.0
| 2,148
| 0.005121
|
from sslyze import ServerNetworkLocation
from sslyze.plugins.elliptic_curves_plugin import (
SupportedEllipticCurvesScanResult,
SupportedEllipticCurvesImplementation,
)
from tests.connectivity_utils import check_connectivity_to_server_and_return_info
from tests.markers import can_only_run_on_linux_64
from tests.openssl_ser
|
ver i
|
mport ModernOpenSslServer
class TestEllipticCurvesPluginWithOnlineServer:
def test_supported_curves(self):
# Given a server to scan that supports ECDH cipher suites
server_location = ServerNetworkLocation("www.cloudflare.com", 443)
server_info = check_connectivity_to_server_and_return_info(server_location)
# When scanning for supported elliptic curves, it succeeds
result: SupportedEllipticCurvesScanResult = SupportedEllipticCurvesImplementation.scan_server(server_info)
# And the result confirms that some curves are supported and some are not
assert result.supports_ecdh_key_exchange
assert result.supported_curves
assert result.rejected_curves
# And a CLI output can be generated
assert SupportedEllipticCurvesImplementation.cli_connector_cls.result_to_console_output(result)
@can_only_run_on_linux_64
class TestEllipticCurvesPluginWithLocalServer:
def test_supported_curves(self):
# Given a server to scan that supports ECDH cipher suites with specific curves
server_curves = ["X25519", "X448", "prime256v1", "secp384r1", "secp521r1"]
with ModernOpenSslServer(groups=":".join(server_curves)) as server:
server_location = ServerNetworkLocation(
hostname=server.hostname, ip_address=server.ip_address, port=server.port
)
server_info = check_connectivity_to_server_and_return_info(server_location)
# When scanning the server for supported curves, it succeeds
result: SupportedEllipticCurvesScanResult = SupportedEllipticCurvesImplementation.scan_server(server_info)
# And the supported curves were detected
assert set(server_curves) == {curve.name for curve in result.supported_curves}
|
ravello/testmill
|
lib/testmill/command_run.py
|
Python
|
apache-2.0
| 4,718
| 0.000848
|
# Copyright 2012-2013 Ravello Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, print_f
|
unction
import argparse
import textwrap
from testmill import (console, manifest, keypair, login, error,
application, tasks, util, inflect)
from testmill.state import env
usage = textwrap.dedent("""\
usage: ravtest [OPTION]... run [-i] [-c] [--new] [--vms <vmlist>]
[--dry-run] <application> [<command>]
ravtest run --help
""")
description = textwrap.dedent("""\
Run automated tasks in a Ravello ap
|
plication.
The application defined by <application> is loaded from the manifest
(.ravello.yml). It is then created if it doesn't exist yet, and the
runbook defined in the manifest is run.
If --new is specified, a new application instance is always created,
even if one exists already.
The available options are:
-i, --interactive
Run in interactive mode. All tasks are run directly
connected to the console. In case of multiple virtual
machines, output will be interleaved and may be hard
to understand.
-c, --continue
Continue running even after an error.
--new
Never re-use existing applications.
--vms <vmlist>
Execute tasks only on these virtual machines, instead of on
all virtual machines in the application. <vmlist> is a
comma-separated list of VMs.
--dry-run
Do not execute any tasks. Useful for starting up an
application without doing anything yet.
""")
def add_args(parser):
parser.usage = usage
parser.description = description
parser.add_argument('-i', '--interactive', action='store_true')
parser.add_argument('-c', '--continue', action='store_true',
dest='continue_')
parser.add_argument('--new', action='store_true')
parser.add_argument('--vms')
parser.add_argument('--dry-run', action='store_true')
parser.add_argument('application')
parser.add_argument('command', nargs='?')
def do_run(args, env):
"""The "ravello run" command."""
login.default_login()
keypair.default_keypair()
manif = manifest.default_manifest()
appname = args.application
for appdef in manif.get('applications', []):
if appdef['name'] == appname:
break
else:
error.raise_error("Unknown application `{0}`.", appname)
vms = set((vm['name'] for vm in appdef.get('vms', [])))
if args.vms:
only = set((name for name in args.vms.split(',')))
if not only <= vms:
unknown = [name for name in only if name not in vms]
what = inflect.plural_noun('virtual machine', len(unknown))
error.raise_error("Unknown {0}: {1}", ', '.join(unknown), what)
vms = [name for name in vms if name in only]
if not vms:
error.raise_error('No virtual machines in application.')
app = application.create_or_reuse_application(appdef, args.new)
app = application.wait_for_application(app, vms)
if args.command:
for vm in appdef['vms']:
for task in vm['tasks']:
if task['name'] == 'execute':
task['commands'] = [args.command]
elif args.dry_run:
for vm in appdef['vms']:
vm['tasks'] = []
ret = tasks.run_all_tasks(app, vms)
console.info('\n== The following services will be available for {0} '
'minutes:\n', appdef['keepalive'])
for vm in app['vms']:
if vm['name'] not in vms:
continue
svcs = vm.get('suppliedServices')
if not svcs:
continue
console.info('On virtual machine `{0}`:', vm['name'])
for svc in svcs:
svc = svc['baseService']
addr = util.format_service(vm, svc)
console.info(' * {0}: {1}', svc['name'], addr)
console.info('')
return error.EX_OK if ret == 0 else error.EX_SOFTWARE
|
kiniou/qtile
|
libqtile/state.py
|
Python
|
mit
| 2,415
| 0
|
# Copyright (c) 2012, Tycho Andersen. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
class QtileState(object):
"""
Represents the state of the qtile object. Primarily used for restoring
state across restarts; any additional state which doesn't fit nicely
into X atoms can go here.
"""
def __init__(self, qtile):
# Note: window state is saved and restored via _NET_WM_STATE, so
# the only thing we need to restore here is the layout and screen
# configurations.
self.groups = {}
self.screens = {}
for group in qtile.groups:
self.groups[group.name] = group.layout.name
for index, screen in enumerate(qtile.screens):
self.screens[index] = screen.group.name
def apply(
|
self, qtile):
"""
Rearrange the windows in the specified Qtile object according to
|
this QtileState.
"""
for (group, layout) in self.groups.items():
try:
qtile.groupMap[group].layout = layout
except KeyError:
pass # group missing
for (screen, group) in self.screens.items():
try:
group = qtile.groupMap[group]
qtile.screens[screen].setGroup(group)
except (KeyError, IndexError):
pass # group or screen missing
|
ZmG/trywsk
|
base/admin.py
|
Python
|
apache-2.0
| 1,654
| 0.009069
|
__author__ = 'thatcher'
from django.contrib import admin
# from django.contrib.auth.models import User
# from django.contrib.auth.admin import UserAdmin
# from django.contrib.sessions.
from django.contrib.sessions.models import Session
from .models import *
from base.forms import *
def images_thubmnail(self):
return '<img style="max-height: 80px; width: auto;" src="{}" alt="{}" >'.format(self.uri(
|
), self.alt)
# return self.uri()
images_thubmnail.short_description = 'Thumbnail'
images_thubmnail.allow_tags = True
class TeamMemberAdmin(admin.ModelAdmin):
model = TeamMember
list_display = ['full_name', 'sort_weight', 'show_as_team']
admin.site.register(TeamMember, TeamMemberAdmin)
class NewsItemAdmin(admin.ModelAdmin):
model = NewsItem
list_display = ['id', 'title', 'publication_date', 'show',
|
'author']
admin.site.register(NewsItem, NewsItemAdmin)
class EventAdmin(admin.ModelAdmin):
model = Event
list_display = ['title', 'location', 'date_and_time']
admin.site.register(Event, EventAdmin)
class PostAdmin(admin.ModelAdmin):
model = GenericPost
list_display = ['title', 'category', 'publication_date']
admin.site.register(GenericPost, PostAdmin)
class CategoryAdmin(admin.ModelAdmin):
model = PostCategory
list_display = ['name', 'added_date']
admin.site.register(PostCategory, CategoryAdmin)
class ImageAdmin(admin.ModelAdmin):
model = Image
list_display = [images_thubmnail, 'alt', 'image_caption', 'image', ]
admin.site.register(Image, ImageAdmin)
class TagAdmin(admin.ModelAdmin):
model = Tag
list_display = ['name', 'added_date']
admin.site.register(Tag, TagAdmin)
|
SamuelDSR/YouCompleteMe-Win7-GVIM
|
third_party/jedi/jedi/imports.py
|
Python
|
gpl-3.0
| 15,994
| 0.000813
|
"""
:mod:`imports` is here to resolve import statements and return the
modules/classes/functions/whatever, which they stand for. However there's not
any actual importing done. This module is about finding modules in the
filesystem. This can be quite tricky sometimes, because Python imports are not
always that simple.
This module uses imp for python up to 3.2 and importlib for python 3.3 on; the
correct implementation is delegated to _compatibility.
This module also supports import autocompletion, which means to complete
statements like ``from datetim`` (curser at the end would return ``datetime``).
"""
from __future__ import with_statement
import os
import pkgutil
import sys
import itertools
from jedi._compatibility import find_module
from jedi import modules
from jedi import common
from jedi import debug
from jedi.parser import representation as pr
from jedi import cache
import builtin
import evaluate
# for debugging purposes only
imports_processed = 0
class ModuleNotFound(Exception):
pass
class ImportPath(pr.Base):
"""
An ImportPath is the path of a `pr.Import` object.
"""
class GlobalNamespace(object):
def __init__(self):
self.line_offset = 0
GlobalNamespace = GlobalNamespace()
def __init__(self, import_stmt, is_like_search=False, kill_count=0,
direct_resolve=False, is_just_from=False):
self.import_stmt = import_stmt
self.is_like_search = is_like_search
self.direct_resolve = direct_resolve
self.is_just_from = is_just_from
self.is_partial_import = bool(max(0, kill_count))
path = import_stmt.get_parent_until().path
self.file_path = os.path.dirname(path) if path is not None else None
# rest is import_path resolution
self.import_path = []
if import_stmt.from_ns:
self.import_path += import_stmt.from_ns.names
if import_stmt.namespace:
if self._is_nested_import() and not direct_resolve:
self.import_path.append(import_stmt.namespace.names[0])
else:
self.import_path += import_stmt.namespace.names
for i in range(kill_count + int(is_like_search)):
self.import_path.pop()
def __repr__(self):
return '<%s: %s>' % (type(self).__name__, self.import_stmt)
def _is_nested_import(self):
"""
This checks for the special case of nested imports, without aliases and
from statement::
import foo.bar
"""
return not self.import_stmt.alias and not self.import_stmt.from_ns \
and len(self.import_stmt.namespace.names) > 1 \
and not self.direct_resolve
def _get_nested_import(self, parent):
"""
See documentation of `self._is_nested_import`.
Generates an Import statement, that can be used to fake nested imports.
"""
i = self.import_stmt
# This is not an existing Import statement. Therefore, set position to
# 0 (0 is not a valid line number).
zero = (0, 0)
names = i.namespace.names[1:]
n = pr.Name(i._sub_module, names, zero, zero, self.import_stmt)
new = pr.Import(i._sub_module, zero, zero, n)
new.parent = parent
debug.dbg('Generated a nested import: %s' % new)
return new
def get_defined_names(self, on_import_stmt=False):
names = []
for scope in self.follow():
if scope is ImportPath.GlobalNamespace:
if self._is_relative_import() == 0:
names += self._get_module_names()
if self.file_path is not None:
path = os.path.abspath(self.file_path)
for i in range(self.import_stmt.relative_count - 1):
path = os.path.dirname(path)
names += self._get_module_names([path])
if self._is_relative_import():
rel_path = self._get_relative_path() + '/__init__.py'
with common.ignored(IOError):
m = modules.Module(rel_path)
names += m.parser.module.get_defined_names()
else:
if on_import_stmt and isinstance(scope, pr.Module) \
and scope.path.endswith('__init__.py'):
pkg_path = os.path.dirname(scope.path)
paths = self._namespace_packages(pkg_path, self.import_path)
names += self._get_module_names([pkg_path] + paths)
if self.is_just_from:
# In the case of an import like `from x.` we don't need to
# add all the variables.
if ['os'] == self.import_path and not self._is_relative_import():
|
# os.path is a hardcoded exception, because it's a
|
# ``sys.modules`` modification.
p = (0, 0)
names.append(pr.Name(self.GlobalNamespace, [('path', p)],
p, p, self.import_stmt))
continue
for s, scope_names in evaluate.get_names_of_scope(scope,
include_builtin=False):
for n in scope_names:
if self.import_stmt.from_ns is None \
or self.is_partial_import:
# from_ns must be defined to access module
# values plus a partial import means that there
# is something after the import, which
# automatically implies that there must not be
# any non-module scope.
continue
names.append(n)
return names
def _get_module_names(self, search_path=None):
"""
Get the names of all modules in the search_path. This means file names
and not names defined in the files.
"""
def generate_name(name):
return pr.Name(self.GlobalNamespace, [(name, inf_pos)],
inf_pos, inf_pos, self.import_stmt)
names = []
inf_pos = float('inf'), float('inf')
# add builtin module names
if search_path is None:
names += [generate_name(name) for name in sys.builtin_module_names]
if search_path is None:
search_path = self._sys_path_with_modifications()
for module_loader, name, is_pkg in pkgutil.iter_modules(search_path):
names.append(generate_name(name))
return names
def _sys_path_with_modifications(self):
# If you edit e.g. gunicorn, there will be imports like this:
# `from gunicorn import something`. But gunicorn is not in the
# sys.path. Therefore look if gunicorn is a parent directory, #56.
in_path = []
if self.import_path:
parts = self.file_path.split(os.path.sep)
for i, p in enumerate(parts):
if p == self.import_path[0]:
new = os.path.sep.join(parts[:i])
in_path.append(new)
module = self.import_stmt.get_parent_until()
return in_path + modules.sys_path_with_modifications(module)
def follow(self, is_goto=False):
"""
Returns the imported modules.
"""
if evaluate.follow_statement.push_stmt(self.import_stmt):
# check recursion
return []
if self.import_path:
try:
scope, rest = self._follow_file_system()
except ModuleNotFound:
debug.warning('Module not found: ' + str(self.import_stmt))
evaluate.follow_statement.pop_stmt()
return []
scopes = [scope]
scopes += remove_star_imports(scope)
# follow the rest of the import (not FS -> classes, functions)
if len(rest) > 1 or rest and self.is_like_s
|
keon/algorithms
|
algorithms/queues/moving_average.py
|
Python
|
mit
| 749
| 0
|
f
|
rom __future__ import division
from collections import deque
class MovingAverage(object):
def __init__(self, size):
"""
Initialize your data structure here.
:type size: int
"""
self.que
|
ue = deque(maxlen=size)
def next(self, val):
"""
:type val: int
:rtype: float
"""
self.queue.append(val)
return sum(self.queue) / len(self.queue)
# Given a stream of integers and a window size,
# calculate the moving average of all integers in the sliding window.
if __name__ == '__main__':
m = MovingAverage(3)
assert m.next(1) == 1
assert m.next(10) == (1 + 10) / 2
assert m.next(3) == (1 + 10 + 3) / 3
assert m.next(5) == (10 + 3 + 5) / 3
|
mtoshi/rancidcmd
|
setup.py
|
Python
|
mit
| 2,154
| 0
|
# -*- coding: utf-8 -*-
"""Racndicmd setup.py."""
|
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import os
import sys
class Tox(TestCommand):
"""Tox."""
user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
def initialize_options(self):
"""Init."""
TestCommand.initialize_options(self)
self.tox_args = None
def finalize_options(self
|
):
"""Finalize."""
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
"""Run."""
import tox
import shlex
if self.tox_args:
errno = tox.cmdline(args=shlex.split(self.tox_args))
else:
errno = tox.cmdline(self.tox_args)
sys.exit(errno)
classifiers = [
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: System :: Networking",
"Topic :: System :: Networking :: Monitoring",
"Topic :: Utilities",
]
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst')) as _file:
README = _file.read()
requires = []
with open('requirements.txt', 'w') as _file:
_file.write('\n'.join(requires))
EXCLUDE_FROM_PACKAGES = []
setup(
name="rancidcmd",
version="0.1.12",
description='RANCID Command Wrapper.',
long_description=README,
author='Toshikatsu Murakoshi',
author_email='[email protected]',
url='https://github.com/mtoshi/rancidcmd',
license='MIT',
classifiers=classifiers,
packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES),
py_modules=['rancidcmd'],
install_requires=requires,
include_package_data=True,
tests_require=['tox'],
cmdclass={'test': Tox},
)
|
rahmonov/agile-crm-python
|
agilecrm/client.py
|
Python
|
mit
| 454
| 0
|
from .company import Co
|
mpany
from .contact import Contact
from .deal import Deal
from .note import Note
from .requester import Requester
class AgileCRM:
def __init__(self, domain, email, api_key):
requester = Requester(domain, email, api_key)
self.contact = Contact(requester=requester)
self.company = Company(requester=requester)
self.deal = Deal(requester=requester)
self.note = No
|
te(requester=requester)
|
jtraver/dev
|
python3/matplotlib/plot1.py
|
Python
|
mit
| 220
| 0.004545
|
#!/usr/bin/env py
|
thon3
#!/usr/bin/python
# https://en.wikipedia.org/wiki/Matplotlib
import numpy
import matplotlib.pypl
|
ot as plt
from numpy.random import rand
a = rand(100)
b = rand(100)
plt.scatter(a, b)
plt.show()
|
woozzu/tf_tutorials
|
03_MLP_spiral2D.py
|
Python
|
mit
| 3,767
| 0.009557
|
'''
A MLP algorithm example using TensorFlow library.
This example is using generate random distribution
(http://cs231n.github.io/neural-networks-case-study/)
Code references:
https://github.com/shouvikmani/Tensorflow-Deep-Learning-Tutorial/blob/master/tutorial.i
|
pynb
https://github.com/aymericdamien/TensorFlow-Examples/
http://cs231n.github.io/neural-networks-case-study/
The source code modified modified by S.W. Oh.
'''
from __future__ import print_function
import tensorflow as tf
import numpy as np
from matplotlib import pyplot as plt
# import Dense (fully-conne
|
cted) layer
from util.layer import Dense
###### Generate 2D spiral random data and Plot ###################################
N = 200 # number of points per class
D = 2 # dimensionality
K = 4 # number of classes
X_train = np.zeros((N*K,D)) # data matrix (each row = single example)
y_train = np.zeros((N*K,K)) # class labels
yc = np.zeros(N*K, dtype='uint8')
for j in range(K):
ix = range(N*j,N*(j+1))
r = np.linspace(0.0,1,N) # radius
t = np.linspace(j*4.8,(j+1)*4.8,N) + np.random.randn(N)*0.2 # theta
X_train[ix] = np.c_[r*np.sin(t), r*np.cos(t)]
y_train[ix,j] = 1
yc[ix] = j
# lets visualize the data:
plt.scatter(X_train[:, 0], X_train[:, 1], c=yc, s=40, cmap=plt.cm.Spectral)
plt.show()
# Random shuffle
perm = np.random.permutation(len(y_train))
X_train = X_train[perm,:]
y_train = y_train[perm,:]
yc = yc[perm]
# Parameters
learning_rate = 0.01
training_epochs = 500
batch_size = 10
display_step = 1
###### Build graph ######################################################
# Place holders
x = tf.placeholder(tf.float32, [None, 2]) # 2 dimensional input
y = tf.placeholder(tf.float32, [None, 4]) # 4 classes
# Construct MLP with two hidden layer
h = Dense(x, [2,64], 'ih')
h = tf.nn.relu(h)
h = Dense(h, [64,64], 'hh')
h = tf.nn.relu(h)
logit = Dense(h, [64,4], 'hl')
pred = tf.nn.softmax(logit) # Softmax
# Directly compute loss from logit (to ensure stability and avoid overflow)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logit, labels=y))
# Define optimizer and train_op
train_op = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
###### Start Training ###################################################
# Open a Session
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
# Training cycle
for epoch in range(training_epochs):
avg_cost = 0.
total_batch = int(len(y_train)/batch_size)
# Loop over all batches
for i in range(total_batch):
batch_xs = X_train[i:i+batch_size,:]
batch_ys = y_train[i:i+batch_size,:]
# Run optimization op (backprop) and cost op (to get loss value)
_, c = sess.run([train_op, cost], feed_dict={x: batch_xs, y: batch_ys})
# Compute average loss
avg_cost += c / total_batch
# Display logs per epoch step
if (epoch+1) % display_step == 0:
print("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(avg_cost))
print("Optimization Finished!")
# Visualize Dicision boundary
h = 0.02
x_min, x_max = X_train[:, 0].min() - 1, X_train[:, 0].max() + 1
y_min, y_max = X_train[:, 1].min() - 1, X_train[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = sess.run(pred, feed_dict={x: np.c_[xx.ravel(), yy.ravel()]})
Z = np.argmax(Z, axis=1)
Z = Z.reshape(xx.shape)
fig = plt.figure()
plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral, alpha=0.8)
plt.scatter(X_train[:, 0], X_train[:, 1], c=yc, s=40, cmap=plt.cm.Spectral)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.show()
|
PaloAltoNetworks/minemeld-core
|
tests/traced_mock.py
|
Python
|
apache-2.0
| 5,455
| 0.0033
|
# Copyright 2016 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module implements mock classes for minemed.traced tests
"""
import gevent
import gevent.event
import logging
from minemeld.traced.storage import TableNotFound
LOG = logging.getLogger(__name__)
CLOCK = -1
def _get_clock():
global CLOCK
CLOCK += 1
return CLOCK
MOCK_TABLES = []
class MockTable(object):
def __init__(self, name, create_if_missing=True):
self.name = name
self.create_if_missing = create_if_missing
self.last_used = None
self.refs = []
self.db_open = True
self.db = {}
self.max_counter = -1
def add_reference(self, refid):
self.refs.append(refid)
def remove_reference(self, refid):
try:
self.refs.remove(refid)
except ValueError:
pass
def ref_count(self):
return len(self.refs)
def put(self, key, value):
self.last_used = _get_clock()
self.max_counter += 1
new_max_counter = '%016x' % self.max_counter
self.db[key+new_max_counter] = value
def backwards_iterator(self, timestamp, counter):
starting_key = '%016x
|
%016x' % (timestamp, counter)
items = [[k, v] for k, v in self.db.iteritems() if k <= starting_key]
items = sorted(items, cmp=lambda x, y: cmp(x[0], y[0]), reverse=True)
|
return items
def close(self):
self.db_open = False
@staticmethod
def oldest_table():
tables = [t.name for t in MOCK_TABLES]
LOG.debug(tables)
if len(tables) == 0:
return None
return sorted(tables)[0]
def table_factory(name, create_if_missing=True):
table = next((t for t in MOCK_TABLES if t.name == name), None)
if table is not None:
return table
if not create_if_missing:
raise TableNotFound()
mt = MockTable(name, create_if_missing=create_if_missing)
MOCK_TABLES.append(mt)
return mt
def table_cleanup():
global MOCK_TABLES
MOCK_TABLES = []
class MockStore(object):
def __init__(self, config=None):
if config is None:
config = {}
self.config = config
self.writes = []
self.db = {}
self.counter = 0
self.release_alls = []
def write(self, timestamp, log):
self.writes.append({
'timestamp': timestamp,
'log': log
})
self.db['%016x%016x' % (timestamp, self.counter)] = log
self.counter += 1
def iterate_backwards(self, ref, timestamp, counter):
starting_key = '%016x%016x' % (timestamp, counter)
items = [[k, v] for k, v in self.db.iteritems() if k <= starting_key]
items = sorted(items, cmp=lambda x, y: cmp(x[0], y[0]), reverse=True)
for c, i in enumerate(items):
if c % 1 == 0:
yield {'msg': 'test message'}
yield {'timestamp': i[0], 'log': i[1]}
def release_all(self, ref):
self.release_alls.append(ref)
def store_factory(config=None):
return MockStore(config=config)
MOCK_QUERIES = []
class MockQuery(gevent.Greenlet):
def __init__(self, store, query, timestamp, counter,
num_lines, uuid, redis_config):
self.store = store
self.query = query
self.timestamp = timestamp
self.counter = counter
self.num_lines = num_lines
self.uuid = uuid
self.redis_config = redis_config
self.finish_event = gevent.event.Event()
super(MockQuery, self).__init__()
def kill(self):
LOG.debug("%s killed", self.uuid)
super(MockQuery, self).kill()
def _run(self):
LOG.debug("%s started", self.uuid)
self.finish_event.wait()
LOG.debug("%s finished", self.uuid)
class MockEQuery(gevent.Greenlet):
def __init__(self, store, query, timestamp, counter,
num_lines, uuid, redis_config):
self.store = store
self.query = query
self.timestamp = timestamp
self.counter = counter
self.num_lines = num_lines
self.uuid = uuid
self.redis_config = redis_config
self.finish_event = gevent.event.Event()
super(MockEQuery, self).__init__()
def kill(self):
LOG.debug("%s killed", self.uuid)
super(MockEQuery, self).kill()
def _run(self):
LOG.debug("%s started", self.uuid)
self.finish_event.wait()
raise RuntimeError("BAD BAD QUERY!")
def query_factory(store, query, timestamp, counter,
num_lines, uuid, redis_config):
if query == "bad":
mqf = MockEQuery
else:
mqf = MockQuery
mq = mqf(store, query, timestamp, counter,
num_lines, uuid, redis_config)
MOCK_QUERIES.append(mq)
return mq
def query_cleanup():
global MOCK_QUERIES
MOCK_QUERIES = []
|
TheAnosmic/cheetahs_byte
|
tests/test_compile/test_jmp_add.py
|
Python
|
gpl-2.0
| 3,283
| 0
|
from unittest import TestCase
from compile import add_jmp_opcodes, break_to_atoms
from compile.jmp_add import travel, shuffle
from opcode_ import PRT
class TestJMPAdd(TestCase):
def test_added_init_jmp(self):
node_chain = PRT.build_from_string('u', None)
atoms = break_to_atoms(node_chain)
atoms = add_jmp_opcodes(atoms)
self.assertEqual(len(atoms), 2)
self.assertEqual(len(atoms[0]), 2)
self.assertEqual(len(atoms[1]), 2)
def test_nothing_happend_on_one_and_no_jmp_init(self):
atom = PRT.build_from_string('i', None)
atoms = break_to_atoms(atom)
atoms = add_jmp_opcodes(
atoms,
first_step_is_jmp=False)
self.assertEqual(atoms[0][0], atom[0])
self.assertEqual(atoms[0][1], atom[1])
self.assertEqual(len(atoms), 1)
self.assertEqual(len(atoms[0]), 2)
def test_first_jmp_points_to_first_node(self):
atom = PRT.build_from_string('o', None)
first_node = atom[0]
atoms = break_to_atoms(atom)
atoms = add_jmp_opcodes(atoms)
self.assertEqual(atoms[0][0].target_uuid,
first_node.uuid)
def test_reach_to_end(self):
node_chain = PRT.build_from_string('T', None) + \
PRT.build_from_string('h', None) + \
PRT.build_from_string('e', None) + \
PRT.build_from_string('A', None) + \
PRT.build_from_string('n', None) + \
PRT.build_from_string('o', None) + \
PRT.build_from_string('s', None) + \
PRT
|
.build_from_string('m', None) + \
PRT.build_from_string('i', None) + \
PRT.build_from_string('c', None)
last = node_chain[-1]
atoms = break_to_atoms(node_chain)
atoms = add_jmp_opcodes(atoms)
atom = atoms[0]
for _ in range(len(node_chain) - 1):
next_atom = travel(atoms, atom)
if next_atom:
|
atom = next_atom
else:
self.fail("Chain ended too soon")
self.assertIn(last, atom)
def test_reach_to_end_with_shuffle(self):
# TODO why some are NC of NC and some NC of NODs?
node_chain = PRT.build_from_string('T', None) + \
PRT.build_from_string('h', None) + \
PRT.build_from_string('e', None) + \
PRT.build_from_string('A', None) + \
PRT.build_from_string('n', None) + \
PRT.build_from_string('o', None) + \
PRT.build_from_string('s', None) + \
PRT.build_from_string('m', None) + \
PRT.build_from_string('i', None) + \
PRT.build_from_string('c', None)
last = node_chain[-1]
atoms = break_to_atoms(node_chain)
atoms = add_jmp_opcodes(atoms)
atoms = shuffle(atoms)
atom = atoms[0]
for _ in range(len(node_chain) - 1):
next_atom = travel(atoms, atom)
if next_atom:
atom = next_atom
else:
self.fail("Chain ended too soon")
self.assertIn(last, atom)
|
bunnydev26/django_newboston
|
music/admin.py
|
Python
|
gpl-3.0
| 149
| 0
|
from django.contrib import admin
from .models impo
|
rt Album, Song
# Register your models here.
|
admin.site.register(Album)
admin.site.register(Song)
|
marshallmcdonnell/interactive_plotting
|
Traits/manual/testing_hasstricttraits.py
|
Python
|
mit
| 547
| 0.005484
|
#!/usr/bin/env python
from traits.api import HasStrictTraits, Float
from mock import Mock
class MyClass(HasStrictTraits):
number = Float(2.0)
def add_to_number(self, value):
""" Add th
|
e value to `number`. """
self.number += value
my_class = MyClass()
|
# Using my_class.add_to_number = Mock() will fail.
# But setting the mock on the instance `__dict__` works.
my_class.__dict__['add_to_number'] = Mock()
# We can now use the mock in our tests.
my_class.add_to_number(42)
print my_class.add_to_number.call_args_list
|
russellb/gerrymander
|
gerrymander/model.py
|
Python
|
apache-2.0
| 15,825
| 0.000569
|
#
# Copyright (C) 2014 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
class ModelBase(object):
pass
class ModelUser(ModelBase):
def __init__(self, name, email=None, username=None):
self.name = name
self.email = email
self.username = username
def is_in_list(self, users):
if self.name is not None and self.name in users:
return True
if self.username is not None and self.username in users:
return True
return False
@staticmethod
def from_json(data):
return ModelUser(data.get("name", None),
data.get("email", None),
data.get("username", None))
class ModelFile(ModelBase):
ACTION_MODIFIED = "MODIFIED"
ACTION_ADDED = "ADDED"
ACTION_DELETED = "DELETED"
ACTION_RENAMED = "RENAMED"
def __init__(self, path, action):
self.path = path
self.action = action
@staticmethod
def from_json(data):
return ModelFile(data.get("file", None),
data.get("type", None))
class ModelApproval(ModelBase):
ACTION_VERIFIED = "Verified"
ACTION_REVIEWED = "Code-Review"
ACTION_WORKFLOW = "Workflow"
def __init__(self, action, value, description, grantedOn=None, user=None):
self.action = action
self.value = value
self.description = description
if grantedOn is not None:
self.grantedOn = int(grantedOn)
else:
self.grantedOn = None
self.user = user
def is_user_in_list(self, users):
if self.user is None:
return False
return self.user.is_in_list(users)
def is_newer_than(self, then):
if self.grantedOn is None:
return False
if self.grantedOn > then:
return True
return False
def is_nack(self):
if self.value < 0:
return True
return False
def is_reviewer_nack(self):
if self.action != ModelApproval.ACTION_REVIEWED:
return False
if self.value < 0:
return True
return False
def get_age(self, now=None):
if now is None:
now = time.time()
return now - self.grantedOn
@staticmethod
def from_json(data):
user = None
if data.get("by", None):
user = ModelUser.from_json(data["by"])
return ModelApproval(data.get("type", None),
int(data.get("value", 0)),
data.get("description", None),
data.get("grantedOn", None),
user)
class ModelComment(ModelBase):
def __init__(self, message, file, line, reviewer):
self.message = message
self.file = file
self.line = line
self.reviewer = reviewer
def is_reviewer_in_list(self, users):
if self.reviewer is None:
return False
return self.reviewer.is_in_list(users)
@staticmethod
def from_json(data):
user = None
if data.get("reviewer", None):
user = ModelUser.from_json(data["reviewer"])
return ModelComment(data.get("message", ""),
data.get("file", None),
data.get("line", 0),
user)
class ModelPatch(ModelBase):
def __init__(self, number, revision, ref, uploader, createdOn, approvals=[], files=[], comments=[]):
self.number = number
self.revision = revision
self.ref = ref
self.uploader = uploader
self.createdOn = createdOn
self.approvals = approvals
self.files = files
self.comments = comments
@staticmethod
def is_user_in_list(users, user):
if user.username is not None and user.username in users:
return True
if user.email is not None and user.email in users:
return True
return False
def is_reviewer_nacked(self):
for approval in self.approvals:
if approval.is_reviewer_nack():
return True
return False
def is_nacked(self):
for appro
|
val in self.approvals:
if approval.is_nack():
return True
return False
def get_age(self, now):
if len(self.approvals) == 0:
return now - self.createdOn
age = 0
for approval in self.approvals:
thisage = now - approval.grantedOn
if thisage > age:
age = thisage
return age
def has_other_reviewers(self, excludeusers):
'''Determine if the
|
patch has been reviewed by any
users that are not in 'excludeusers'''
hasReviewers = False
for approval in self.approvals:
if not approval.is_user_in_list(excludeusers):
hasReviewers = True
return hasReviewers
def has_reviewers(self, includeusers):
'''Determine if the patch has been reviewed by any
users that are in 'includeusers'''
hasReviewers = False
for approval in self.approvals:
if approval.user is None:
continue
if approval.is_user_in_list(includeusers):
hasReviewers = True
return hasReviewers
@staticmethod
def from_json(data):
files = []
for f in data.get("files", []):
files.append(ModelFile.from_json(f))
approvals = []
for f in data.get("approvals", []):
approvals.append(ModelApproval.from_json(f))
user = None
if "uploader" in data:
user = ModelUser.from_json(data["uploader"])
comments = []
for c in data.get("comments", []):
comments.append(ModelComment.from_json(c))
return ModelPatch(int(data.get("number", 0)),
data.get("revision"),
data.get("ref"),
user,
data.get("createdOn"),
approvals,
files,
comments)
class ModelChange(ModelBase):
def __init__(self, project, branch, topic, id, number, subject, owner, url, createdOn, lastUpdated, status, patches = [], comments = []):
self.project = project
self.branch = branch
self.topic = topic
self.id = id
self.number = number
self.subject = subject
self.owner = owner
self.url = url
if createdOn is not None:
self.createdOn = int(createdOn)
else:
self.createdOn = None
if lastUpdated is not None:
self.lastUpdated = int(lastUpdated)
else:
self.lastUpdated = None
self.status = status
self.patches = patches
self.comments = comments
def get_current_patch(self):
if len(self.patches) == 0:
return None
return self.patches[len(self.patches) - 1]
def get_first_patch(self):
if len(self.patches) == 0:
return None
return self.patches[0]
def get_reviewer_not_nacked_patch(self):
prev = None
for patch in reversed(self.patches):
if patch.is_reviewer_nacked():
break
prev = patch
return prev
def get_current_age(self):
patch = self.get_current_patch()
return patch.get_age(time.time())
def get_first_age(self):
patch = self.get_first_patch()
return patch.get_age(time.
|
fantoms/psychic-octo-spork
|
chipdisable.py
|
Python
|
gpl-3.0
| 1,217
| 0.028759
|
#!/usr/bin/python
import Adafruit_GPIO as GPIO
import time, os
#print "GETTING GPIO OBJECT"
gpio = GPIO.get_platform_gpio()
#print "SETUP CSID1"
#gpio.setup("CSID1", GPIO.OUT)
#print os.path.exists('/sys/class/gpio/gpio133')
#print "SETUP XIO-P1"
#gpio.setup("XIO-P1", GPIO.IN)
#GPIO.setup("U14_13", GPIO.IN)
#print "READING XIO-P1"
#print "HIGH", gpio.input("XIO-P1")
#gpio.output("CSID1", GPIO.LOW)
#time.sleep(1)
#print "LOW", gpio.input("XIO-P1")
#gpio.output("CSID1", GPIO.HIGH)
#print "HIGH", gpio.input("XIO-P1")
#gpio.output("CSID1", GPIO.LOW)
#print "LOW", gpio.input("XIO-P1")
#this example will test out CHIP XIO-P0 in to XIO-P1
#jumper the pins to test
#
#my test required sudo to work, gpio access requires sudo before changing permissions
#gpio.setup("XIO-P0", GPIO.OUT)
#gpio.setup("XIO-P1", GPIO.IN)
#print
|
"LOW", gpio.input("XIO-P0")
#print "LOW", gpio.input("XIO-P1")
#gpio.output("XIO-P0", GPIO.HIGH)
#print "LOW", gpio.input("XIO-P0")
#print "LOW", gpio.input("XIO-P1")
#time.sleep(4)
#gpio.output("XIO-P0", GPIO.LOW)
#print "LOW", gpio.input("XIO-P0")
#print "LOW", gpio.input("XIO-P1")
#print "CLEA
|
NUP"
#gpio.cleanup()
gpio.setup("XIO-P0", GPIO.OUT)
gpio.output("XIO-P0", GPIO.HIGH)
|
i3visio/osrframework
|
osrframework/wrappers/teamtreehouse.py
|
Python
|
agpl-3.0
| 3,948
| 0.00456
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affer
|
o General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__author__ = "Felix Brezo, Yaiza Rubio <[email protected]>"
__version__ = "2.0"
from osrframework.utils.platforms import Platform
class Teamtreehouse(Platform):
""" A <Platform> object for Teamtreehouse"""
def __init__(self):
self.platformName = "Teamtreehouse"
self.tags = ["social", "news"]
########################
# Defining valid modes #
########################
self.isValidMode = {}
self.isValidMode["phonefy"] = False
self.isValidMode["usufy"] = True
self.isValidMode["searchfy"] = False
######################################
# Search URL for the different modes #
######################################
# Strings with the URL for each and every mode
self.url = {}
#self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>"
self.url["usufy"] = "http://teamtreehouse.com/" + "<usufy>"
#self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>"
######################################
# Whether the user needs credentials #
######################################
self.needsCredentials = {}
#self.needsCredentials["phonefy"] = False
self.needsCredentials["usufy"] = False
#self.needsCredentials["searchfy"] = False
#################
# Valid queries #
#################
# Strings that will imply that the query number is not appearing
self.validQuery = {}
# The regular expression '.+' will match any query.
#self.validQuery["phonefy"] = ".*"
self.validQuery["usufy"] = ".+"
#self.validQuery["searchfy"] = ".*"
###################
# Not_found clues #
###################
# Strings that will imply that the query number is not appearing
self.notFoundText = {}
#self.notFoundText["phonefy"] = []
self.notFoundText["usufy"] = ["<title>Sorry, we can't find the page you are looking for</title>"]
#self.notFoundText["searchfy"] = []
#########################
# Fields to be searched #
#########################
self.fieldsRegExp = {}
# Definition of regular expressions to be searched in phonefy mode
#self.fieldsRegExp["phonefy"] = {}
# Example of fields:
#self.fieldsRegExp["phonefy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in usufy mode
self.fieldsRegExp["usufy"] = {}
# Example of fields:
#self.fieldsRegExp["usufy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in searchfy mode
#self.fieldsRegExp["searchfy"] = {}
# Example of fields:
#self.fieldsRegExp["searchfy"]["i3visio.location"] = ""
################
# Fields found #
################
# This attribute will be feeded when running the program.
self.foundFields = {}
|
caktus/django-opendebates
|
opendebates/views.py
|
Python
|
apache-2.0
| 19,833
| 0.001311
|
import datetime
import json
import logging
from django.contrib import messages
from django.contrib.auth import REDIRECT_FIELD_NAME, get_user_model
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import logout
from django.contrib.sites.shortcuts import get_current_site
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.db import connections
from django.db.models import DateField, F, Q
from django.db.models.functions import Trunc
from django.utils import timezone
from django.utils.translation import ugettext as _
from django.http import Http404, HttpResponse, HttpResponseServerError
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.cache import cache_page
from djangohelpers.lib import rendered_with, allow_http
from registration.backends.simple.views import RegistrationView
from .forms import OpenDebatesRegistrationForm, VoterForm, QuestionForm, MergeFlagForm
from .models import (Candidate, Category, Debate, Flag, Submission, Vote, Voter,
TopSubmissionCategory, ZipCode, RECENT_EVENTS_CACHE_ENTRY)
from .router import readonly_db
from .utils import (get_ip_address_from_request, get_headers_from_request, choose_sort, sort_list,
vote_needs_captcha, registration_needs_captcha, get_voter)
from opendebates_emails.models import send_email
def health_check(request):
"""
Health check for the load balancer.
"""
logger = logging.getLogger('opendebates.views.health_check')
db_errors = []
for conn_name in connections:
conn = connections[conn_name]
try:
cursor = conn.cursor()
cursor.execute('SELECT 1')
row = cursor.fetchone()
assert row[0] == 1
except Exception as e:
# note that there doesn't seem to be a way to pass a timeout to
# psycopg2 through Django, so this will likely not raise a timeout
# exception
logger.warning('Caught error checking database connection "{0}"'
''.format(conn_name), exc_info=True)
db_errors.append(e)
if not db_errors:
return HttpResponse('OK')
else:
return HttpResponseServerError('Configuration Error')
def state_from_zip(zip):
try:
return ZipCode.objects.get(zip=zip).state
except ZipCode.DoesNotExist:
return ''
def root_redirect(request):
site = get_current_site(request)
# Look for the *next* debate
debate = Debate.objects.annotate(
debate_day=Trunc('debate_time', 'day', output_field=DateField())
).filter(
site=site,
debate_day__gte=datetime.date.today(),
).order_by('debate_time').first()
if debate is None:
# No next debate? Look for the most recently ended debate.
debate = Debate.objects.filter(
site=site,
).order_by('-debate_time').first()
if debate:
return redirect('/%s/' % debate.prefix)
else:
# If no debates at all, redirect to opendebatecoalition.com
return redirect('https://opendebatecoalition.com')
@cache_page(5) # Cache for 5 seconds after rendering
@allow_http("GET")
@rendered_with("opendebates/snippets/recent_activity.html")
def recent_activity(request):
entries = cache.get(RECENT_EVENTS_CACHE_ENTRY.format(request.debate.id), default=[])
return {
"recent_activity": entries
}
@rendered_with("opendebates/list_ideas.html")
def list_ideas(request):
ideas = Submission.objects.filter(category__debate=request.debate)
citations_only = request.GET.get("citations_only")
sort = choose_sort(request, request.GET.get('sort'))
|
ideas = sort_list(citations_only, sort, ideas)
return {
'ideas': ideas,
'sort': sort,
'url_name': reverse('list_ideas'),
'stashed_submission': request.session.pop(
"opendebates.stashed_submission", None) if request.user.is_authenticated else None,
}
@rendered_with("opendebates/list_i
|
deas.html")
def list_category(request, cat_id):
category = get_object_or_404(Category, id=cat_id, debate=request.debate)
ideas = Submission.objects.filter(category__debate=request.debate, category=cat_id)
citations_only = request.GET.get("citations_only")
sort = choose_sort(request, request.GET.get('sort'))
ideas = sort_list(citations_only, sort, ideas)
return {
'ideas': ideas,
'sort': sort,
'url_name': reverse("list_category", kwargs={'cat_id': cat_id}),
'category': category
}
@rendered_with("opendebates/list_ideas.html")
@allow_http("GET")
def search_ideas(request):
try:
search_term = [q for q in request.GET.getlist("q") if q][0]
except IndexError:
return redirect(reverse('list_ideas'))
ideas = Submission.objects.filter(category__debate=request.debate)
citations_only = request.GET.get("citations_only")
sort = choose_sort(request, request.GET.get('sort'))
ideas = sort_list(citations_only, sort, ideas)
ideas = ideas.search(search_term.replace("%", ""))
return {
'ideas': ideas,
'search_term': search_term,
'sort': sort,
'url_name': reverse('search_ideas'),
}
@rendered_with("opendebates/list_ideas.html")
def category_search(request, cat_id):
ideas = Submission.objects.filter(category__debate=request.debate, category=cat_id)
citations_only = request.GET.get("citations_only")
search_term = request.GET['q']
sort = choose_sort(request, request.GET.get('sort'))
ideas = sort_list(citations_only, sort, ideas)
ideas = ideas.search(search_term.replace("%", ""))
return {
'ideas': ideas,
'search_term': search_term,
'sort': sort,
'url_name': reverse("list_category", kwargs={'cat_id': cat_id})
}
@rendered_with("opendebates/vote.html")
@allow_http("GET", "POST")
def vote(request, id):
"""Despite the name, this is both the page for voting AND the detail page for submissions"""
try:
with readonly_db():
idea = Submission.objects.get(
id=id, category__debate=request.debate,
)
except Submission.DoesNotExist:
raise Http404
if request.method == "POST" and not idea.approved:
# Don't allow voting on removed submissions, but do allow viewing them
raise Http404
if idea.duplicate_of_id:
if not idea.approved:
# Submissions which have been "unmoderated as duplicates"
# should remain completely inaccessible, and should not redirect
raise Http404
url = reverse("show_idea", kwargs={'id': idea.duplicate_of_id})
url = url + "#i"+str(idea.id)
return redirect(url)
if request.method == "GET":
two_other_approved_ideas = list(Submission.objects.filter(
category=idea.category,
duplicate_of=None,
approved=True).exclude(id=idea.id)[:2]) + [None, None]
related1 = two_other_approved_ideas[0]
related2 = two_other_approved_ideas[1]
return {
'idea': idea,
'show_duplicates': True,
'related1': related1,
'related2': related2,
'duplicates': (Submission.objects.filter(
category__debate=request.debate,
approved=True, duplicate_of=idea)
if idea.has_duplicates else []),
}
if not request.debate.allow_voting_and_submitting_questions:
raise Http404
form = VoterForm(request.POST)
if not vote_needs_captcha(request):
form.ignore_captcha()
if not form.is_valid():
if request.is_ajax():
return HttpResponse(
json.dumps({"status": "400", "errors": form.errors}),
content_type="application/json")
messages.error(request, _('You have some errors in your form'))
return {
'form': form,
'idea': idea,
}
state = state_from_zip(form.cleaned_data['zipcode'])
|
joakim-hove/ert
|
res/enkf/queue_config.py
|
Python
|
gpl-3.0
| 6,283
| 0.001751
|
# Copyright (C) 2017 Equinor ASA, Norway.
#
# The file 'site_config.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
from cwrap import BaseCClass
from ecl.util.util import StringList, Hash
from res import ResPrototype
from res.enkf import ConfigKeys
from res.job_queue import JobQueue, ExtJoblist, Driver
class QueueConfig(BaseCClass):
TYPE_NAME = "queue_config"
_free = ResPrototype("void queue_config_free( queue_config )")
_alloc = ResPrototype("void* queue_config_alloc_load(char*)", bind=False)
_alloc_full = ResPrototype(
"void* queue_config_alloc_full(char*, bool, int, int, queue_driver_enum)",
bind=False,
)
_alloc_content = ResPrototype(
"void* queue_config_alloc(config_content)", bind=False
)
_alloc_local_copy = ResPrototype(
"queue_config_obj queue_config_alloc_local_copy( queue_config )"
)
_has_job_script = ResPrototype("bool queue_config_has_job_script( queue_config )")
_get_job_script = ResPrototype("char* queue_config_get_job_script(queue_config)")
_max_submit = ResPrototype("int queue_config_get_max_submit(queue_config)")
_queue_system = ResPrototype("char* queue_config_get_queue_system(queue_config)")
_queue_driver = ResPrototype(
"driver_ref queue_config_get_queue_driver(queue_config, char*)"
)
_get_num_cpu = ResPrototype("int queue_config_get_num_cpu(queue_config)")
_lsf_queue_opt = ResPrototype("char* queue_config_lsf_queue_name()", bind=False)
_lsf_server_opt = ResPrototype("char* queue_config_lsf_server()", bind=False)
_lsf_resource_opt = ResPrototype("char* queue_config_lsf_resource()", bind=False)
_lsf_driver_opt = ResPrototype("char* queue_config_lsf_driver_name()", bind=False)
def __init__(self, user_config_file=None, config_content=None, config_dict=None):
configs = sum(
[
1
for x in [user_config_file, config_content, config_dict]
if x is not None
]
)
if configs > 1:
raise ValueError(
"Attempting to create QueueConfig object with multiple config objects"
)
if configs == 0:
raise ValueError(
"Attempting to create QueueConfig object with no config objects"
)
c_ptr = None
if user_config_file is not None:
c_ptr = self._alloc(user_config_file)
if config_content is not None:
c_ptr = self._alloc_content(config_content)
if config_dict is not None:
c_ptr = self._alloc_full(
config_dict[ConfigKeys.JOB_SCRIPT],
config_dict[ConfigKeys.USER_MODE],
config_dict[ConfigKeys.MAX_SUBMIT],
config_dict[ConfigKeys.NUM_CPU],
config_dict[ConfigKeys.QUEUE_SYSTEM],
)
if not c_ptr:
raise ValueError("Unable to create QueueConfig instance")
super(QueueConfig, self).__init__(c_ptr)
# Need to create
if config_dict is not None:
queue_options = config_dict.get(ConfigKeys.QUEUE_OPTION)
for option in queue_options:
self.driver.set_option(
option[ConfigKeys.NAME], option[ConfigKeys.VALUE]
)
def create_job_queue(self):
queue = JobQueue(self.driver, max_submit=self.max_submit)
return queue
def create_local_copy(self):
return self._alloc_local_copy()
def has_job_script(self):
return self._has_job_script()
def free(self):
self._free()
@property
def max_submit(self):
return self._max_submit()
@property
def queue_name(self):
return self.driver.get_option(QueueConfig.LSF_QUEUE_NAME_KEY)
@property
def queue_system(self):
"""The queue system in use, e.g. LSF or LOCAL"""
return self._queue_system()
@property
def job_script(self):
return self._get_job_script()
@property
def driver(self):
return self._queue_driver(self.queue_system).setParent(self)
def _assert_lsf(self, key="driver"):
sys = self.queue_system
if sys != QueueConfig.LSF_KEY:
fmt = "Cannot fetch LSF {key}, current queue is {system}"
raise ValueError(fmt.format(key=key, system=self.queue_system))
@property
def _lsf_driver(self):
self._assert_lsf()
driver = self._queue_driver(self.LSF_KEY)
return driver.setParent(self)
@property
def lsf_resource(self):
|
self._assert_lsf(key=QueueConfig.LSF_RESOURCE_KEY)
return self._lsf_driver.get_option(self.LSF_RESOURCE_KEY)
@property
def lsf_server(self):
self._assert_lsf(key=QueueConfig
|
.LSF_SERVER_KEY)
return self._lsf_driver.get_option(self.LSF_SERVER_KEY)
@property
def num_cpu(self):
return self._get_num_cpu()
def __eq__(self, other):
if self.max_submit != other.max_submit:
return False
if self.queue_system != other.queue_system:
return False
if self.num_cpu != other.num_cpu:
return False
if self.job_script != other.job_script:
return False
if self.queue_system != "LOCAL":
if self.queue_name != other.queue_name:
return False
if self.lsf_resource != other.lsf_resource:
return False
if self.lsf_server != other.lsf_server:
return False
return True
LSF_KEY = _lsf_driver_opt()
LSF_QUEUE_NAME_KEY = _lsf_queue_opt()
LSF_RESOURCE_KEY = _lsf_resource_opt()
LSF_SERVER_KEY = _lsf_server_opt()
|
wvangeit/AllenSDK
|
setup.py
|
Python
|
gpl-3.0
| 1,828
| 0.012582
|
from setuptools import setup, find_packages
import os
import allensdk
# http://bugs.python.org/issue8876#msg208792
if hasattr(os, 'link'):
del os.link
def prepend_find_packages(*roots):
''' Recursively traverse nested packages under the root directories
'''
packages = []
for root in roots:
packages += [root]
packages += [root + '.' + s for s in find_packages(root)]
return packages
setup(
version = allensdk.__version__,
name = 'allensdk',
author = 'David Feng',
author_email = '[email protected]',
packages = prepend_find_packages('allensdk'),
package_data={'': ['*.conf', '*.cfg', '*.md', '*.json', '*.dat', '*.env', '*.sh', 'bps', 'Makefile', 'COPYING'] },
description = 'core libraries for the allensdk.',
install_requires = ['h5py>=2.2.1',
'matplotlib>=1.4.2',
'pandas>=0.16.2',
'numpy>=1.8.2',
'six>=1.8.0',
'pynrrd <= 0.2.0.dev'],
dependency_links = [
'git+https://github.com/mhe/pynrrd.git@9e09b24ff1#egg=pynrrd-0.1.999.dev'
],
tests_require=['nose>=1.2.1',
'coverage>=3.7.1',
'mock'],
setup_requires=['setuptools', 'sphinx', 'numpydoc'],
url='http://alleninstitute.github.io/AllenSDK/',
scripts=['allensdk/model/biophys_sim/scripts/bps']
|
,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Natural Language :: English',
'
|
Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
|
m00dawg/holland
|
plugins/holland.lib.lvm/tests/xfs/test_snapshot.py
|
Python
|
bsd-3-clause
| 1,824
| 0.004386
|
import shutil
from nose.tools import *
from holland.lib.lvm import LogicalVolume
from holland.lib.lvm.snapshot import *
from tests.constants import *
class TestSnapshot(object):
def setup(self):
self.tmpdir = tempfile.mkdtemp()
def teardown(self):
shutil.rmtree(self.tmpdir)
def test_snapshot_fsm(self):
lv = LogicalVolume.lookup('%s/%s' % (TEST_VG, TEST_LV))
name = lv.lv_name + '_snapshot'
size = 1 # extent
snapshot = Snapshot(name, size, self.tmpdir)
snapshot.start(lv)
def test_snapshot_fsm_with_callbacks(self):
lv = LogicalVolume.lookup('%s/%s' % (TEST_VG, TEST_LV))
name = lv.lv_name + '_snapshot'
size = 1 # extent
snapshot = Snapshot(name, size, self.tmpdir)
def handle_event(event, *args, **kwargs):
pass
snapshot.register('pre-mount', handle_event)
snapshot.register('post-mount', handle_event)
snapshot.start(lv)
def test_snapshot_fsm_with_failures(self):
lv = LogicalVolume.lookup('%s/%s' % (TEST_VG, TEST_LV))
name = lv.lv_name + '_snapshot'
size = 1 # extent
snapshot = Snapshot(name, size, self.tmpdir)
def bad_callback(event, *
|
args, **kwargs):
raise Exception("Oooh nooo!")
for evt in ('initialize', 'pre-snapshot', 'pos
|
t-snapshot',
'pre-mount', 'post-mount', 'pre-unmount', 'post-unmount',
'pre-remove', 'post-remove', 'finish'):
snapshot.register(evt, bad_callback)
assert_raises(CallbackFailuresError, snapshot.start, lv)
snapshot.unregister(evt, bad_callback)
if snapshot.sigmgr._handlers:
raise Exception("WTF. sigmgr handlers still exist when checking event => %r", evt)
|
frenzykryger/hamachi-watchdog
|
hamachi-watchdog.py
|
Python
|
bsd-2-clause
| 745
| 0
|
#!/usr/bin/env python3
import json
import os
import subprocess
def connection_lost(network_id, timeout_seconds):
p = subprocess.Popen(["hamachi", "go-online", network_id])
try:
p.wait(timeout_seconds)
except subprocess.TimeoutExpired:
p.kill()
return True
return False
if __name__ == "__main__":
with open("/etc/hamachi-watchdog/hamachi-watchdog.conf", "r") as f:
config = json.load(f)
network_id = config['network_id']
timeout_secon
|
ds = config['timeout_seconds']
if connection_lost(network_id, timeout_seconds):
print("Hamachi looks down. Rest
|
arting it...")
os.system("systemctl restart logmein-hamachi.service")
print("Hamachi was restarted")
|
drewkett/SU2
|
SU2_PY/merge_solution.py
|
Python
|
lgpl-2.1
| 2,861
| 0.00769
|
#!/usr/bin/env python
## \file merge_solution.py
# \brief Python script for merging of the solution files.
# \author F. Palacios
# \version 6.1.0 "Falcon"
#
# The current SU2 release has been coordinated by the
# SU2 International Developers Society <www.su2devsociety.org>
# with selected contribut
|
ions from the open-source community.
#
# The main research teams contributing to the current release are:
# - Prof. Juan J. Alonso's group at Stanford University.
# - Prof. Piero Colonna's group at Delft University of Technology.
# - Prof. Nicolas R. Gauger's group at Kaiserslautern University of Technology.
# - Prof. Alberto Guardone's group at Polytechnic University
|
of Milan.
# - Prof. Rafael Palacios' group at Imperial College London.
# - Prof. Vincent Terrapon's group at the University of Liege.
# - Prof. Edwin van der Weide's group at the University of Twente.
# - Lab. of New Concepts in Aeronautics at Tech. Institute of Aeronautics.
#
# Copyright 2012-2018, Francisco D. Palacios, Thomas D. Economon,
# Tim Albring, and the SU2 contributors.
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
from optparse import OptionParser
import SU2
# -------------------------------------------------------------------
# Main
# -------------------------------------------------------------------
def main():
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="read config from FILE", metavar="FILE")
parser.add_option("-n", "--partitions", dest="partitions", default=-1,
help="number of PARTITIONS", metavar="PARTITIONS")
(options, args)=parser.parse_args()
options.partitions = int(options.partitions)
merge_solution( options.filename ,
options.partitions )
# -------------------------------------------------------------------
# MERGE SOLUTION
# -------------------------------------------------------------------
def merge_solution( filename ,
partitions = -1 ):
config = SU2.io.Config(filename)
if partitions > -1 :
config.NUMBER_PART = partitions
SU2.run.merge(config)
#: def merge_solution()
if __name__ == '__main__':
main()
|
sysadminmatmoz/pmis
|
project_progress_measurement/wizard/__init__.py
|
Python
|
agpl-3.0
| 237
| 0
|
# -*- coding: utf-8 -
|
*-
# Copy
|
right 2014-17 Eficent Business and IT Consulting Services S.L.
# <[email protected]>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from . import progress_measurements_entry
|
pawkoz/dyplom
|
blender/doc/python_api/examples/bge.texture.1.py
|
Python
|
gpl-2.0
| 1,051
| 0.001903
|
"""
Texture Replacement
+++++++++++++++++++
Example of how to replace a texture in game with an external image.
``createTexture()`` and ``removeTexture()`` are to be called from a
module Python Controller.
"""
from bge import logic
from bge import texture
def createTexture(cont):
"""Create a new Dyna
|
mic Texture"""
obj = cont.owner
# get the reference pointer (ID) of the internal texture
ID = texture.materialID(obj, 'IMoriginal.png')
# create a texture object
object_texture = texture.Texture(obj, ID)
# create a new source with an external image
url = logic.expandPath("//newtexture.jpg")
|
new_source = texture.ImageFFmpeg(url)
# the texture has to be stored in a permanent Python object
logic.texture = object_texture
# update/replace the texture
logic.texture.source = new_source
logic.texture.refresh(False)
def removeTexture(cont):
"""Delete the Dynamic Texture, reversing back the final to its original state."""
try:
del logic.texture
except:
pass
|
hakuya/higu
|
lib/hdbfs/ark.py
|
Python
|
bsd-2-clause
| 8,385
| 0.025164
|
import os
import shutil
import tempfile
import zipfile
class ZipVolume:
def
|
__init__( self, path ):
self.zf = zipfile.ZipFile( path, 'r' )
self.ls = {}
self.__load_ls()
def __load_ls( self ):
ils = self.zf.infolist()
for i in ils:
try:
ids, e = i.filename.split( '.' )
id = int( ids, 16 )
self.ls[id] = i
except:
print 'WARNING: %s not loaded f
|
rom zip' % ( i.filename, )
pass
def verify( self ):
return self.zf.testzip() is None
def read( self, id, extension ):
try:
info = self.ls[id]
return self.zf.open( info, 'r' )
except KeyError:
return None
def _debug_write( self, id, extension ):
assert False
def get_state( self ):
return 'clean'
def reset_state( self ):
pass
class FileVolume:
def __init__( self, data_config, vol_id ):
self.data_config = data_config
self.vol_id = vol_id
self.to_commit = []
self.state = 'clean'
self.rm_dir = None
def __get_path( self, id, priority, extension ):
path = self.data_config.get_file_vol_path( self.vol_id, priority )
return os.path.join( path, '%016x.%s' % ( id, extension ) )
def verify( self ):
return True
def read( self, id, priority, extension ):
p = self.__get_path( id, priority, extension )
if( not os.path.isfile( p ) ):
return None
else:
try:
return open( p, 'rb' )
except IndexError:
return None
def _debug_write( self, id, priority, extension ):
p = self.__get_path( id, priority, extension )
try:
return open( p, 'wb' )
except IndexError:
return None
def get_state( self ):
return self.state
def reset_state( self ):
self.to_commit = []
self.state = 'clean'
rm_dir = self.rm_dir
self.rm_dir = None
self.to_commit = []
if( rm_dir is not None ):
shutil.rmtree( rm_dir )
def commit( self ):
completion = 0
try:
for t in self.to_commit:
shutil.move( t[0], t[1] )
completion += 1
except:
# Something went wrong, rollback
for t in self.to_commit[:completion]:
shutil.move( t[1], t[0] )
# Sometimes move() seems to leave files behind
for t in self.to_commit:
try:
if( os.path.isfile( t[1] ) ):
os.remove( t[1] )
except:
pass
raise
# Comitted
self.state = 'committed'
def rollback( self ):
if( self.state == 'dirty' ):
self.to_commit = []
self.state = 'clean'
elif( self.state == 'committed' ):
for t in self.to_commit:
shutil.move( t[1], t[0] )
# Sometimes move() seems to leave files behind
for t in self.to_commit:
try:
if( os.path.isfile( t[1] ) ):
os.remove( t[1] )
except:
pass
self.state = 'dirty'
def load_data( self, path, id, priority, extension ):
if( self.state == 'committed' ):
self.reset_state()
self.state = 'dirty'
new_path = self.data_config.get_file_vol_path( self.vol_id, priority )
if( not os.path.isdir( new_path ) ):
os.makedirs( new_path )
tgt = os.path.join( new_path, '%016x.%s' % ( id, extension ) )
self.to_commit.append( ( path, tgt, ) )
def delete( self, id, priority, extension ):
if( self.state == 'committed' ):
self.reset_state()
self.state = 'dirty'
if( self.rm_dir is None ):
self.rm_dir = tempfile.mkdtemp()
src = self.__get_path( id, priority, extension )
if( not os.path.isfile( src ) ):
return
name = os.path.split( src )[-1]
tgt = os.path.join( self.rm_dir, name )
self.to_commit.append( ( src, tgt, ) )
class StreamDatabase:
def __init__( self, data_config ):
self.volumes = {}
self.data_config = data_config
self.state = 'clean'
def __get_volume( self, vol_id ):
if( self.volumes.has_key( vol_id ) ):
return self.volumes[vol_id]
vol = FileVolume( self.data_config, vol_id )
self.volumes[vol_id] = vol
return vol
def __get_vol_for_id( self, id ):
return self.__get_volume( id >> 12 )
def get_state( self ):
return self.state
def reset_state( self ):
for vol in self.volumes.values():
vol.reset_state()
self.state = 'clean'
def prepare_commit( self ):
if( self.state == 'clean' ):
return
assert self.state != 'prepared'
vols = self.volumes.values()
# Clean things up before we begin. We need to do this so that
# We can determine the volumes that changes as part of this
# commit
for vol in vols:
assert vol.get_state() != 'committed'
try:
# Try to commit all the dirty volumes
for vol in vols:
if( vol.get_state() == 'dirty' ):
vol.commit()
except:
# Something went wrong, rollback
for vol in vols:
if( vol.get_state() == 'committed' ):
vol.rollback()
raise
# Comitted
self.state = 'prepared'
def unprepare_commit( self ):
if( self.state == 'clean' ):
return
assert self.state == 'prepared'
vols = self.volumes.values()
for vol in vols:
assert vol.get_state() != 'dirty'
if( vol.get_state() == 'committed' ):
vol.rollback()
for vol in vols:
assert vol.get_state() != 'committed'
self.state = 'dirty'
def complete_commit( self ):
if( self.state == 'clean' ):
return
assert self.state == 'prepared'
vols = self.volumes.values()
for vol in vols:
if( vol.get_state() == 'committed' ):
vol.reset_state()
self.state = 'clean'
def commit( self ):
self.prepare_commit()
self.complete_commit()
def rollback( self ):
vols = self.volumes.values()
if( self.state == 'clean' ):
for vol in vols:
assert vol.get_state() == 'clean'
return
if( self.state == 'prepared' ):
self.unprepare_commit()
if( self.state == 'dirty' ):
for vol in vols:
assert vol.get_state() != 'committed'
if( vol.get_state() == 'dirty' ):
vol.rollback()
for vol in vols:
assert vol.get_state() == 'clean'
self.state = 'clean'
def load_data( self, path, id, priority, extension ):
if( self.state == 'committed' ):
# Clean things up before we begin. We need to do this so that
# We can determine the volumes that changes as part of this
# commit
self.reset_state()
self.state = 'dirty'
v = self.__get_vol_for_id( id )
v.load_data( path, id, priority, extension )
def delete( self, id, priority, extension ):
if( self.state == 'committed' ):
# Clean things up before we begin. We need to do this so that
# We can determine the volumes that changes as part of this
# commit
self.reset_state()
self.state = 'dirty'
v = self.__get_vol_for_id( id )
v.delete( id, priority, extension )
def read( self, id, priority, extension ):
v = self.__get_vol_for_id( id )
|
arnavd96/Cinemiezer
|
myvenv/lib/python3.4/site-packages/music21/ext/jsonpickle/__init__.py
|
Python
|
mit
| 5,049
| 0.00099
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 John Paulett (john -at- paulett.org)
# Copyright (C) 2009, 2011, 2013 David Aguilar (davvid -at- gmail.com)
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
"""Python library for serializing any arbitrary object graph into JSON.
jsonpickle can take almost any Python object and turn the object into JSON.
Additionally, it can reconstitute the object back into Python.
The object must be accessible globally via a module and must
inherit from object (AKA new-style classes).
Create an object::
class Thing(object):
def __init__(self, name):
self.name = name
obj = Thing('Awesome')
Use jsonpickle to transform the object into a JSON string::
import jsonpickle
frozen = jsonpickle.encode(obj)
Use jsonpickle to recreate a Python object from a JSON string::
thawed = jsonpickle.decode(frozen)
.. warning::
Loading a JSON string from an untrusted source represents a potential
security vulnerability. jsonpickle makes no attempt to sanitize the input.
The new object has the same type and data, but essentially is now a copy of
the original.
.. code-block:: python
assert obj.name == thawed.name
If you will never need to load (regenerate the Python class from JSON), you can
pass in the keyword unpicklable=False to prevent extra information from being
added to JSON::
oneway = jsonpickle.encode(obj, unpicklable=False)
result = jsonpickle.decode(oneway)
assert obj.name == result['name'] == 'Awesome'
"""
import sys, os
from music21 import common
sys.path.append(common.getSourceFilePath() + os.path.sep + 'ext')
from jsonpickle import pickler
from jsonpickle import unpickler
from jsonpickle.backend import JSONBackend
from jsonpickle.version import VERSION
# ensure built-in handlers are loaded
__import__('jsonpickle.handlers')
__all__ = ('encode', 'decode')
__version__ = VERSION
json = JSONBackend()
# Export specific JSONPluginMgr methods into the jsonpickle namespace
set_preferred_backend = json.set_preferred_backend
set_encoder_options = json.set_encoder_options
load_backend = json.load_backend
remove_backend = json.remove_backend
enable_fallthrough = json.enable_fallthrough
def encode(value,
unpicklable=True,
make_refs=True,
keys=False,
max_depth=None,
backend=None,
warn=False,
max_iter=None):
"""Return a JSON formatted representation of value, a Python object.
:param unpicklable: If set to False then the output will not contain the
information necessary to turn the JSON data back into Python objects,
but a simpler JSON stream is produced.
:param max_depth: If set to a non-negative integer then jsonpickle will
not recurse deeper than 'max_depth' steps into the object. Anything
deeper than 'max_depth' is represented using a Python repr() of the
object.
:param make_refs: If set to False jsonpickle's referencing support is
|
disabled. Objects that are id()-identical won't be preserved across
encode()/decode(), but the resulting JSON stream will be conceptually
simpler. jsonpickle detects cyclical objects and will break the cycle
by calling repr() instead of recursing when make_refs is set False.
:param keys: If set to True the
|
n jsonpickle will encode non-string
dictionary keys instead of coercing them into strings via `repr()`.
:param warn: If set to True then jsonpickle will warn when it
returns None for an object which it cannot pickle
(e.g. file descriptors).
:param max_iter: If set to a non-negative integer then jsonpickle will
consume at most `max_iter` items when pickling iterators.
>>> encode('my string')
'"my string"'
>>> encode(36)
'36'
>>> encode({'foo': True})
'{"foo": true}'
>>> encode({'foo': True}, max_depth=0)
'"{\\'foo\\': True}"'
>>> encode({'foo': True}, max_depth=1)
'{"foo": "True"}'
"""
if backend is None:
backend = json
return pickler.encode(value,
backend=backend,
unpicklable=unpicklable,
make_refs=make_refs,
keys=keys,
max_depth=max_depth,
warn=warn)
def decode(string, backend=None, keys=False):
"""Convert a JSON string into a Python object.
The keyword argument 'keys' defaults to False.
If set to True then jsonpickle will decode non-string dictionary keys
into python objects via the jsonpickle protocol.
>>> str(decode('"my string"'))
'my string'
>>> decode('36')
36
"""
if backend is None:
backend = json
return unpickler.decode(string, backend=backend, keys=keys)
# json.load(),loads(), dump(), dumps() compatibility
dumps = encode
loads = decode
|
parmarmanojkumar/MITx_Python
|
6002x/week2/lectureCode_intDict.py
|
Python
|
mit
| 1,482
| 0.008097
|
import random
class intDict(object):
"""A dictionary with integer keys"""
def __init__(self, nu
|
mBuckets):
"""Create an empty dictionary"""
self.buckets = []
self.numBuckets = numBuckets
for i in range(numBuckets):
self.buckets.append([])
def addEntry(self, dictKey, dictVal):
"""Assumes dictKey an int. Adds an entry."""
hashBucket = self.buckets[dictKey%self.numBuckets]
for i in range(len(hashBucket)):
if hashBucket[i][0] == dictKey:
|
hashBucket[i] = (dictKey, dictVal)
return
hashBucket.append((dictKey, dictVal))
def getValue(self, dictKey):
"""Assumes dictKey an int. Returns entry associated
with the key dictKey"""
hashBucket = self.buckets[dictKey%self.numBuckets]
for e in hashBucket:
if e[0] == dictKey:
return e[1]
return None
def __str__(self):
res = ''
for b in self.buckets:
for t in b:
res = res + str(t[0]) + ':' + str(t[1]) + ','
return '{' + res[:-1] + '}' #res[:-1] removes the last comma
D = intDict(29)
for i in range(29):
#choose a random int in range(10**5)
key = random.choice(range(10**5))
D.addEntry(key, i)
print '\n', 'The buckets are:'
for hashBucket in D.buckets: #violates abstraction barrier
print ' ', hashBucket
|
Jorisvansteenbrugge/advbioinf
|
rosalind/python/fib.py
|
Python
|
gpl-3.0
| 489
| 0.022495
|
#!/usr/bin/env python
from sys i
|
mport argv
def calcRabbits(n,k):
pairs = [1, 1]
for i in range(2,n):
#try:
f1 = pairs[i-1]
f2 = pairs[i-2] * 3
pairs.append((f1+f2))
# except IndexError:
# pass
return pairs
if __name__ == "__main__":
try:
n = int(argv[1])
k = int(argv[2])
print(calcRabbits(n,k))
except (IndexError, ValueError):
print("Usage: python fib.py <intN> <i
|
ntK>")
|
gitsimon/tq_website
|
partners/cms_plugins/partners_plugin.py
|
Python
|
gpl-2.0
| 859
| 0.001164
|
from cms.models.pluginmodel import CMSPlugin
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import gettext_lazy as _
from django.utils.translation import get_language
from partners.models import Partner
class PartnersPlugin(CMSPluginBase):
name = _("Partners")
model = CMSPlugin
render_template = "partners/partners_plugin.html"
text_enabled = False
a
|
llow_children = False
def render(self, context, instance, placeholder):
language = get_language()
if language is None:
language = 'en'
partners = Partner.objects.filter(active=True).translated(language).order_by('translations__name').all()
context.update({
'partners': partners,
})
return context
plugin_pool.
|
register_plugin(PartnersPlugin)
|
espenak/pinax-oldversion-backup
|
pinax/apps/signup_codes/views.py
|
Python
|
mit
| 4,496
| 0.004448
|
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils.translation import ugettext
from django.contrib import messages
from django.contrib.admin.views.decorators import staff_member_required
from pinax.apps.account.utils import get_default_redirect, user_display
from pinax.apps.signup_codes.models import SignupCode
from pinax.apps.signup_codes.forms import SignupForm, InviteUserForm
def group_and_bridge(request):
"""
Given the request we can depend on the GroupMiddleware to provide the
group and bridge.
"""
# be group aware
group = getattr(request, "group", None)
if group:
bridge = request.bridge
else:
bridge = None
return group, bridge
def group_context(group, bridge):
# @@@ use bridge
ctx = {
"group": group,
}
if group:
ctx["group_base"] = bridge.group_base_template()
return ctx
def signup(request, **kwargs):
form_class = kwargs.pop("form_class", SignupForm)
template_name = kwargs.pop("template_name", "account/signup.html")
template_name_failure = kwargs.pop("template_name_failure", "signup_codes/failure.html")
success_url = kwargs.pop("success_url", None)
group, bridge = group_and_bridge(request)
ctx = group_context(group, bridge)
if success_url is None:
if hasattr(settings, "SIGNUP_REDIRECT_URLNAME"):
fallback_url = reverse(settings.SIGNUP_REDIRECT_URLNAME)
else:
if hasattr(settings, "LOGIN_REDIRECT_URLNAME"):
fallback_url = reverse(settings.LOGIN_REDIRECT_URLNAME)
else:
fallback_url = settings.LOGIN_REDIRECT_URL
success_url = get_default_redirect(request, fallback_url)
code = request.GET.get("code")
if request.method == "POST":
form = form_class(request.POST, group=group)
if form.is_valid():
user = form.save(request=request)
signup_code = form.c
|
leaned_data["signup_code"]
if signup_code:
signup_code.use(user)
|
form.login(request, user)
messages.add_message(request, messages.SUCCESS,
ugettext("Successfully logged in as %(username)s.") % {
"username": user_display(user),
}
)
return HttpResponseRedirect(success_url)
else:
signup_code = SignupCode.check(code)
if signup_code:
initial = {
"signup_code": code,
"email": signup_code.email,
}
form = form_class(initial=initial, group=group)
else:
if not settings.ACCOUNT_OPEN_SIGNUP:
ctx.update({
"code": code,
})
ctx = RequestContext(request, ctx)
# if account signup is not open we want to fail when there is
# no sign up code or what was provided failed.
return render_to_response(template_name_failure, ctx)
else:
form = form_class(group=group)
ctx.update({
"code": code,
"form": form,
})
return render_to_response(template_name, RequestContext(request, ctx))
@staff_member_required
def admin_invite_user(request, **kwargs):
"""
This view, by default, works inside the Django admin.
"""
form_class = kwargs.pop("form_class", InviteUserForm)
template_name = kwargs.pop("template_name", "signup_codes/admin_invite_user.html")
group, bridge = group_and_bridge(request)
if request.method == "POST":
form = form_class(request.POST, group=group)
if form.is_valid():
email = form.cleaned_data["email"]
form.send_signup_code()
messages.add_message(request, messages.INFO,
ugettext("An email has been sent to %(email)s.") % {
"email": email
}
)
form = form_class() # reset
else:
form = form_class(group=group)
ctx = group_context(group, bridge)
ctx.update({
"title": ugettext("Invite user"),
"form": form,
})
return render_to_response(template_name, RequestContext(request, ctx))
|
beddit/sleep-musicalization-web
|
manage.py
|
Python
|
bsd-2-clause
| 258
| 0
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sleeptomusicweb.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.arg
|
v)
|
mattseymour/django
|
tests/view_tests/tests/test_static.py
|
Python
|
bsd-3-clause
| 5,569
| 0.000898
|
import mimetypes
import unittest
from os import path
from django.conf.urls.static import static
from django.http import FileResponse, HttpResponseNotModified
from django.test import SimpleTestCase, override_settings
from django.utils.http import http_date
from django.views.static import was_modified_since
from .. import urls
from ..urls import media_dir
@override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls')
class StaticTests(SimpleTestCase):
"""Tests django views in django/views/static.py"""
prefix = 'site_media'
def test_serve(self):
"The static view can serve static media"
media_files = ['file.txt', 'file.txt.gz']
for filename in media_files:
response = self.client.get('/%s/%s' % (self.prefix, filename))
response_content = b''.join(response)
file_path = path.join(media_dir, filename)
with open(file_path, 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
self.assertEqual(mimetypes.guess_type(file_path)[1], response.get('Content-Encoding', None))
def test_chunked(self):
"The static view should stream files in chunks to avoid large memory usage"
response = self.client.get('/%s/%s' % (self.prefix, 'long-line.txt'))
first_chunk = next(response.streaming_content)
self.assertEqual(len(first_chunk), FileResponse.block_size)
second_chunk = next(response.streaming_content)
response.close()
# strip() to prevent OS line endings from causing differences
self.assertEqual(len(second_chunk.strip()), 1449)
def test_unknown_mime_type(self):
response = self.client.get('/%s/file.unknown' % self.prefix)
self.assertEqual('application/octet-stream', response['Content-Type'])
response.close()
def test_copes_with_empty_path_component(self):
file_name = 'file.txt'
response = self.client.get('/%s//%s' % (self.prefix, file_name))
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
|
self.assertEqual(fp.read(), response_content)
def test_is_modified_since(self):
file_name = 'file.txt'
response = self.client.get(
'/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE='Thu, 1 Jan 1970 00:00:00 GMT'
)
response_conten
|
t = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
def test_not_modified_since(self):
file_name = 'file.txt'
response = self.client.get(
'/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE='Mon, 18 Jan 2038 05:14:07 GMT'
# This is 24h before max Unix time. Remember to fix Django and
# update this test well before 2038 :)
)
self.assertIsInstance(response, HttpResponseNotModified)
def test_invalid_if_modified_since(self):
"""Handle bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per RFC
2616, section 14.25.
"""
file_name = 'file.txt'
invalid_date = 'Mon, 28 May 999999999999 28:25:26 GMT'
response = self.client.get('/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE=invalid_date)
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
def test_invalid_if_modified_since2(self):
"""Handle even more bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per RFC
2616, section 14.25.
"""
file_name = 'file.txt'
invalid_date = ': 1291108438, Wed, 20 Oct 2010 14:05:00 GMT'
response = self.client.get('/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE=invalid_date)
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
def test_404(self):
response = self.client.get('/%s/non_existing_resource' % self.prefix)
self.assertEqual(404, response.status_code)
def test_index(self):
response = self.client.get('/%s/' % self.prefix)
self.assertContains(response, 'Index of /')
class StaticHelperTest(StaticTests):
"""
Test case to make sure the static URL pattern helper works as expected
"""
def setUp(self):
super(StaticHelperTest, self).setUp()
self._old_views_urlpatterns = urls.urlpatterns[:]
urls.urlpatterns += static('/media/', document_root=media_dir)
def tearDown(self):
super(StaticHelperTest, self).tearDown()
urls.urlpatterns = self._old_views_urlpatterns
class StaticUtilsTests(unittest.TestCase):
def test_was_modified_since_fp(self):
"""
A floating point mtime does not disturb was_modified_since (#18675).
"""
mtime = 1343416141.107817
header = http_date(mtime)
self.assertFalse(was_modified_since(header, mtime))
|
MichaelSEA/python_koans
|
python3/koans/triangle.py
|
Python
|
mit
| 1,017
| 0.003933
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Triangle Project Code.
# Triangle analyzes the lengths of the sides of a triangle
# (represented by a,
|
b and c) and returns the type of triangle.
#
# It returns:
# 'equilateral' if all sides are equal
# 'isosceles' if exactly 2 sides are equal
# 'scalene' if no sides are equal
#
# The tests for this method can be found in
# about_triangle_project.py
# and
# about_triangle_project_2.py
#
def triangle(a, b, c):
if a <=0 or b <= 0 or c <= 0:
raise TriangleError(f"Non-positive value passed for sides:{a},{b},
|
{c}")
sum1 = a + b
sum2 = a + c
sum3 = b + c
if sum1 <= c or sum2 <= b or sum3 <= a:
raise TriangleError("Sum of any two sides must be greater than third one.")
if a == b == c:
return 'equilateral'
if a == b or b == c or a == c:
return 'isosceles'
return 'scalene'
# Error class used in part 2. No need to change this code.
class TriangleError(Exception):
pass
|
HalfLike/qc-web-server
|
app/models.py
|
Python
|
apache-2.0
| 1,467
| 0.00818
|
#!/usr/bin/python
#! -*- coding:utf-8 -*-
from sqlalchemy import Column, Integer, String
from database import Base
class Message(Base):
__tablename__ = 'message'
MessageId = Column(Integer, primary_key=True)
DeviceId = Column(String(50))
MessageBody = Column(String(1000))
MessageType = Column(Integer)
CreatedTime = Column(String(50))
def __init__(self, json):
self.DeviceId = json["DeviceId"]
self.CreatedTime = json["CreatedTime"]
self.MessageType = json["MessageType"]
self.MessageBody = json["MessageBody"]
def get_json(self):
return {
"MessageId":self.MessageId,
"DeviceId":self.DeviceId,
"CreatedTime":self.CreatedTime,
|
"MessageType":self.MessageType,
"MessageBody":self.MessageBody
}
def __repr__(self):
return repr(self.get_json())
class UserInfo(Base):
__tablename__ = 'userinfo'
Devic
|
eId = Column(String(50), primary_key=True)
UseTimes = Column(Integer)
LastUseTime = Column(String(50))
def __init__(self, json):
self.DeviceId = json["DeviceId"]
self.UseTimes = json["UseTimes"]
self.LastUseTime = json["LastUseTime"]
def get_json(self):
return {
"DeviceId":self.DeviceId,
"UseTimes":self.UseTimes,
"LastUseTime":self.LastUseTime
}
def __repr__(self):
return repr(self.get_json())
|
nburn42/tensorflow
|
tensorflow/contrib/distribute/python/one_device_strategy.py
|
Python
|
apache-2.0
| 4,931
| 0.009126
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Class OneDeviceStrategy implementing DistributionStrategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.contrib.distribute.python import values
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.training import distribute as distribute_lib
# TODO(josh11b): Replace asserts in this file with if ...: raise ...
class OneDeviceStrategy(distribute_lib.DistributionStrategy):
"""A distribution strategy for running on a single device."""
# TODO(josh11b): Do we wrap values in types to generate errors if you are
# doing something that won't work with other DistributionStrategy
# implementations?
def __init__(self, device, prefetch_on_device=None):
super(OneDeviceStrategy, self).__init__()
self._device = device
self._prefetch_on_device = prefetch_on_device
self._default_device = device
def _create_variable(self, next_creator, *args, **kwargs):
# No need to distinguish tower-local variables when not mirroring,
# we just enforce that they are not trainable.
if kwargs.pop("tower_local_reduce_method", None) is not None:
kwargs["trainable"] = False
colocate_with = kwargs.pop("colocate_with", None)
if colocate_with is None:
with ops.device(self._device):
return next_creator(*args, **kwargs)
if isinstance(colocate_with, six.string_types):
with ops.device(colocate_with):
return next_creator(*args, **kwargs)
if (isinstance(colocate_with, list) and len(colocate_with) == 1 and
isinstance(colocate_with[0], six.string_types)):
with ops.device(colocate_with[0]):
return next_creator(*args, **kwargs)
with ops.colocate_with(colocate_with):
return next_creator(*args, **kwargs)
def distribute_dataset(self, dataset_fn):
return values.PerDeviceDataset(
self._call_dataset_fn(dataset_fn), [self._device],
self._prefetch_on_device)
def _broadcast(self, tensor, destinations):
return tensor
def _call_for_each_tower(self, fn, *args, **kwargs):
# We don't run `fn` in multiple threads in OneDeviceStrategy.
kwargs.pop("run_concurrently", None)
with ops.device(self._device), _OneDeviceTowerContext(self):
return fn(*args, **kwargs)
def map(self, map_over, fn, *args, **kwargs):
with ops.device(self._device):
return values.MapOutput([fn(m, *args, **kwargs) for m in map_over])
def _reduce(self, method_string, value, destinations):
if not isinstance(value, values.MapOutput):
return value
l = value.get()
assert l
with ops.device(self._device):
if method_string == "sum":
return math_ops.add_n(l)
elif method_string == "mean":
return math_ops.add_n(l) / len(l)
else:
assert False
def _update(self, var, fn, *args, **kwargs):
with ops.device(self._device), distribute_lib.UpdateContext(self._device):
return fn(var, *args, **kwargs)
def _update_non_slot(self, colocate_with, fn, *args, **kwargs):
del colocate_with
with ops.device(self._device), distribute_lib.UpdateContext(self._device):
return fn(*args, **kwargs)
def _fetch(self, val, destination, fn):
"""Return a copy of `val` or `fn(val)` on `destination`."""
with ops.device(self._device):
v = fn(val)
with ops.device(destination):
return array_ops.identity(v)
def _unwrap(self, value):
return [value]
@property
def is_single_tower(self):
return True
@property
def num_towers(self):
return 1
@property
def worker_devices(self):
return [self._device]
@property
def parameter_devices(self):
return [self._device]
def non_slot_devices(self, var_list):
del var_list
return [self._device]
def _worker_device_index(self):
return 0
class _OneDeviceTowerContext(distribute_lib.TowerContext):
def __init__(self, distribution_strategy):
distribute_lib.TowerContext._
|
_init__(
self, distribution_strategy, tower_id=0)
|
@property
def device(self):
return self._distribution_strategy.worker_devices[0]
|
farvardin/txt2tags-test
|
targets/html5.py
|
Python
|
gpl-2.0
| 3,582
| 0.019542
|
"""
A HTML5 target.
"""
from targets import _
from html import TYPE
import html
NAME = _('HTML5 page')
EXTENSION = 'html'
HEADER = """\
<!DOCTYPE html>
<html>
<head>
<meta charset="%(ENCODING)s">
<title>%(HEADER1)s</title>
<meta name="generator" content="http://txt2tags.org">
<link rel="stylesheet" href="%(STYLE)s">
<style>
body{background-color:#fff;color:#000;}
hr{background-color:#000;border:0;color:#000;}
hr.heavy{height:5px;}
hr.light{height:1px;}
img{border:0;display:block;}
img.right{margin:0 0 0 auto;}
img.center{border:0;margin:0 auto;}
table th,table td{padding:4px;}
.center,header{text-align:center;}
table.center {margin-left:auto; margin-right:auto;}
.right{text-align:right;}
.left{text-align:left;}
.tableborder,.tableborder td,.tableborder th{border:1px solid #000;}
.underline{text-decoration:underline;}
</style>
</head>
<body>
<header>
<hgroup>
<h1>%(HEADER1)s</h1>
<h2>%(HEADER2)s</h2>
<h3>%(HEADER3)s</h3>
</hgroup>
</header>
<article>
"""
HEADERCSS = """\
<!DOCTYPE html>
<html>
<head>
<meta charset="%(ENCODING)s">
<title>%(HEADER1)s</title>
<meta name="generator" content="http://txt2tags.org">
<link rel="stylesheet" href="%(STYLE)s">
</head>
<body>
<header>
<hgroup>
<h1>%(HEADER1)s</h1>
<h2>%(HEADER2)s</h2>
<h3>%(HEADER3)s</h3>
</hgroup>
</header>
<article>
"""
TAGS = html.TAGS.copy()
for tag in TAGS:
TAGS[tag] = TAGS[tag].lower()
HTML5TAGS = {
'title1Open' : '<section~A~>\n<h1>\a</h1>' ,
'title1Close' : '</section>' ,
'title2Open' : '<section~A~>\n<h2>\a</h2>' ,
'title2Close' : '</section>' ,
'title3Open' : '<section~A~>\n<h3>\a</h3>' ,
'title3Close' : '</section>' ,
'title4Open' : '<section~A~>\n<h4>\a</h4>' ,
'title4Close' : '</section>' ,
'title5Open' : '<section~A~>\n<h5>\a</h5>' ,
'title5Close' : '</section>' ,
'fontBoldOpen' : '<strong>' ,
'fontBoldClose' : '</strong>' ,
'fontItalicOpen' : '<em>' ,
'fontItalicClose' : '</em>' ,
'fontUnderlineOpen' : '<span class="underline">',
'fontUnderlineClose' : '</span>' ,
'fontStrikeOpen' : '<del>' ,
'fontStrikeClose' : '</del>' ,
'listItemClose' : '</li>' ,
'numlistItemClose' : '</li>' ,
'deflistItem2Close' : '</dd>' ,
'bar1'
|
: '<hr class="light">' ,
'bar2' : '<hr class="heavy">' ,
'img' : '<img~a~ src="\a" alt="">' ,
'imgEmbed' : '<img~a~ src="\a" alt="">' ,
'_imgAlignLeft' : ' class="left"' ,
'_imgAlignCenter' : ' class="center"',
'_imgAlignRight' : ' class="right"' ,
'tableOpen' : '<table~a~~b~>' ,
'_tableBorder' : ' class="tableborder"' ,
'_tableAlignCenter' : ' style="margin-left: auto; margin-right: auto;"',
'_tableCellAlignRight' : ' class="right"' ,
'_tableCellAlignCenter': ' class="center"',
'cssOpen' : '<style>' ,
'tocOpen' : '<nav>' ,
'tocClose' : '</nav>' ,
'EOD' : '</article></body></html>'
}
TAGS.update(HTML5TAGS)
RULES = html.RULES.copy()
#Update the rules to use explicit <section> </section> tags
HTML5RULES = {
'titleblocks' : 1,
}
RULES.update(HTML5RULES)
|
|
sadad111/leetcodebox
|
Distribute Candies.py
|
Python
|
gpl-3.0
| 338
| 0
|
class Solution(object):
def distributeCandies(self, candies):
"""
:type candies: List[int]
:rtype: int
"""
result = 0
kind = list(set(candies))
if len(kind) > len(candies)/2:
result = len(candi
|
es)/2
else:
result = len(kind)
return
|
result
|
nushio3/chainer
|
chainer/functions/local_response_normalization.py
|
Python
|
mit
| 4,265
| 0.003751
|
import numpy
from chainer import cuda, Function
def _cu_conv_sum(y, x, n):
# Convolutional sum
# TODO(beam2d): Use sca
|
n computation
rdim = x.size / (x.shape[0] * x.shape[1])
cuda.elementwise(
|
'float* y, const float* x, int rdim, int N, int n_',
'''
int half_n = n_ / 2;
int offset = i / rdim * N * rdim + i % rdim;
float* xi = x + offset;
float* yi = y + offset;
float sum_part = 0;
for (int j = 0; j < N + half_n; ++j) {
if (j < N) {
sum_part += xi[j * rdim];
}
if (j >= n_) {
sum_part -= xi[(j - n_) * rdim];
}
if (j >= half_n) {
yi[(j - half_n) * rdim] = sum_part;
}
}
''', 'lrn_conv_sum')(y, x, rdim, x.shape[1], n,
range=slice(0, x.shape[0] * rdim, 1))
class LocalResponseNormalization(Function):
"""Cross-channel normalization function used in AlexNet."""
def __init__(self, n=5, k=2, alpha=1e-4, beta=.75):
self.n = n
self.k = k
self.alpha = alpha
self.beta = beta
def forward_cpu(self, x):
half_n = self.n / 2
x2 = x[0] * x[0]
sum_part = x2.copy()
for i in xrange(1, half_n + 1):
sum_part[:, i: ] += x2[:, :-i]
sum_part[:, :-i] += x2[:, i: ]
self.unit_scale = self.k + self.alpha * sum_part
self.scale = self.unit_scale ** -self.beta
self.y = x[0] * self.scale
return self.y,
def backward_cpu(self, x, gy):
half_n = self.n / 2
summand = self.y * gy[0] / self.unit_scale
sum_part = summand.copy()
for i in xrange(1, half_n + 1):
sum_part[:, i: ] += summand[:, :-i]
sum_part[:, :-i] += summand[:, i: ]
gx = gy[0] * self.scale - 2 * self.alpha * self.beta * x[0] * sum_part
return gx,
def forward_gpu(self, x):
self.y = x[0] * x[0] # temporary
self.scale = cuda.empty_like(self.y)
_cu_conv_sum(self.scale, self.y, self.n)
cuda.elementwise(
'''float* y, float* scale, const float* x,
float k, float alpha, float beta''',
'''scale[i] = k + alpha * scale[i];
y[i] = x[i] * __powf(scale[i], -beta);''',
'lrn_fwd')(self.y, self.scale, x[0], self.k, self.alpha, self.beta)
return self.y,
def backward_gpu(self, x, gy):
summand = cuda.empty_like(x[0])
cuda.elementwise(
'''float* summand, const float* scale, const float* y,
const float* gy''',
'summand[i] = y[i] * gy[i] / scale[i]',
'lrn_bwd_summand')(summand, self.scale, self.y, gy[0])
gx = cuda.empty_like(x[0])
_cu_conv_sum(gx, summand, self.n)
cuda.elementwise(
'''float* gx, const float* x, const float* gy, const float* scale,
float beta, float coeff''',
'gx[i] = __powf(scale[i], -beta) * gy[i] - coeff * x[i] * gx[i]',
'lrn_bwd')(gx, x[0], gy[0], self.scale, self.beta,
2 * self.alpha * self.beta)
return gx,
def local_response_normalization(x, n=5, k=2, alpha=1e-4, beta=.75):
"""Local response normalization across neighboring channels.
This function implements normalization across channels. Let :math:`x` an
input image with :math:`N` channels. Then, this function computes an output
image :math:`y` by following formula:
.. math::
y_i = {x_i \\over \\left( k + \\
\\alpha \\sum_{j=\\max{1, i - n/2}}^{\\min{N, i + n/2}} \\
x_j^2 \\right)^\\beta}.
Args:
x (Variable): Input variable.
n (int): Normalization window width.
k (float): Smoothing parameter.
alpha (float): Normalizer scaling parameter.
beta (float): Normalizer power parameter.
Returns:
Variable: Output variable.
See: SSec. 3.3 of `ImageNet Classification with Deep Convolutional Neural \\
Networks <http://www.cs.toronto.edu/~fritz/absps/imagenet.pdf>`_
"""
return LocalResponseNormalization(n, k, alpha, beta)(x)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.