repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
nirea/collardata
|
freebielist.py
|
Python
|
gpl-2.0
| 4,787
| 0.005849
|
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from model import FreebieItem, Distributor, Contributor
import datetime
import logging
head = '''
<html>
<head>
<title>%s</title>
<script src="/static/sorttable.js"></script>
<style>
body {
background-color: #000000;
color: #FFFFFF;
}
input {
background-color: #000000;
color: #FF0000;
outline-color: #000000;
border-color: #FF0000;
}
table.sortable thead {
background-color:#202020;
color:#FFFFFF;
font-weight: bold;
cursor: default;
}
</style>
</head>
<body>
<b><a href="/freebielist/">Freebies</a> | <a href="/freebielist/distributors">Distributors</a> | <a href="/freebielist/contributors">Contributors</a></b><p>
'''
end = '''
</body>
</html>
'''
class Distributors(webapp.RequestHandler):
def get(self):
message = '''<h1>List of Distributors</h1>
<p>This lists all Distributors currently in the distribution system as of %s.</p>
<table class="sortable" border=\"1\">''' % datetime.datetime.utcnow().isoformat(' ')
message += '<tr><th>Row</th><th>Distributor</th><th>Key</th></tr><br />\n'
query = Distributor.gql("")
dists = []
for record in query:
s = '<td>%s</td><td>%s</td>\n' % (record.avname, record.avkey)
if (s in dists) == False:
dists += [s]
for i in range(0,len(dists)):
message += '<tr><td>%d</td>%s' % (i+1, dists[i])
message += "</table>"
self.response.out.write((head % 'Distributor List') + message + end)
class Contributors(webapp.RequestHandler):
def get(self):
message = '''<h1>List of Contributors</h1>
<p>This lists all Contributors currently in the distribution system as of %s.</p>
<table class="sortable" border=\"1\">''' % datetime.datetime.utcnow().isoformat(' ')
message += '<tr><th>Row</th><th>Contributor</th><th>Key</th></tr><br />\n'
query = Contributor.gql("")
dists = []
for record in query:
s = '<td>%s</td><td>%s</td>\n' % (record.avname, record.avkey)
if (s in dists) == False:
dists += [s]
for i in range(0,len(dists)):
message += '<tr><td>%d</td>%s' % (i+1, dists[i])
message += "</table>"
self.response.out.write((head % 'Contributor List') + message + end)
class MainPage(webapp.RequestHandler):
def get(self):
message = '''<h1>List of Freebie items</h1>
<p>This lists all item currently in the distribution system as of %s.</p>
<table class="sortable" border=\"1\">''' % datetime.datetime.utcnow().isoformat(' ')
message += '<tr><th>Row</th><th>Owner</th><th>Giver ID</th><th>Name</th><th>Version</th><th>Update Date</th><th>Distributor Location</th><th>Texture Key</th><th>Texture Server</th><th>Texture Updatetime</th></tr><br />\n'
query = FreebieItem.gql("")
content =[]
for record in query:
owner = record.freebie_owner
if (owner == None):
owner = '***Not assigned***'
if (record.freebie_texture_update == None):
|
i = -1
else:
i = record.freebie_texture_update
content += ['<td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%d</td>\n' % (owner, record.freebie_giver, record.freebie_name, record.freebie_version, record.freebie_timedate, record.freebie_location, rec
|
ord.freebie_texture_key, record.freebie_texture_serverkey, i)]
content = sorted(content)
for i in range(0,len(content)):
message += '<tr><td>%d</td>%s' % (i+1, content[i])
message += "</table>"
self.response.out.write((head % 'Freebie Items List') + message + end)
application = webapp.WSGIApplication([
(r'/.*?/distributors',Distributors),
(r'/.*?/contributors',Contributors),
('.*', MainPage)
],
debug=True)
def real_main():
run_wsgi_app(application)
def profile_main():
# This is the main function for profiling
# We've renamed our original main() above to real_main()
import cProfile, pstats, StringIO
prof = cProfile.Profile()
prof = prof.runctx("real_main()", globals(), locals())
stream = StringIO.StringIO()
stats = pstats.Stats(prof, stream=stream)
stats.sort_stats("time") # Or cumulative
stats.print_stats(80) # 80 = how many to print
# The rest is optional.
# stats.print_callees()
# stats.print_callers()
logging.info("Profile data:\n%s", stream.getvalue())
if __name__ == "__main__":
profile_main()
|
ales-erjavec/anyqt
|
AnyQt/_backport/QtDesigner.py
|
Python
|
gpl-3.0
| 30
| 0.033333
|
from Py
|
Qt5.QtDesigner import *
| |
sandan/sqlalchemy
|
lib/sqlalchemy/orm/persistence.py
|
Python
|
mit
| 51,786
| 0.000019
|
# orm/persistence.py
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""private module containing functions used to emit INSERT, UPDATE
and DELETE statements on behalf of a :class:`.Mapper` and its descending
mappers.
The functions here are called only by the unit of work functions
in unitofwork.py.
"""
import operator
from itertools import groupby, chain
from .. import sql, util, exc as sa_exc
from . import attributes, sync, exc as orm_exc, evaluator
from .base import state_str, _attr_as_key, _entity_descriptor
from ..sql import expression
from ..sql.base import _from_objects
from . import loading
def _bulk_insert(
mapper, mappings, session_transaction, isstates, return_defaults):
base_mapper = mapper.base_mapper
cached_connections = _cached_connection_dict(base_mapper)
if session_transaction.session.connection_callable:
raise NotImplementedError(
"connection_callable / per-instance sharding "
"not supported in bulk_insert()")
if isstates:
if return_defaults:
states = [(state, state.dict) for state in mappings]
mappings = [dict_ for (state, dict_) in states]
else:
mappings = [state.dict for state in mappings]
else:
mappings = list(mappings)
connection = session_transaction.connection(base_mapper)
for table, super_mapper in base_mapper._sorted_tables.items():
if not mapper.isa(super_mapper):
continue
records = (
(None, state_dict, params, mapper,
connection, value_params, has_all_pks, has_all_defaults)
for
state, state_dict, params, mp,
conn, value_params, has_all_pks,
has_all_defaults in _collect_insert_commands(table, (
(None, mapping, mapper, connection)
for mapping in mappings),
bulk=True, return_defaults=return_defaults
)
)
_emit_insert_statements(base_mapper, None,
cached_connections,
super_mapper, table, records,
bookkeeping=return_defaults)
if return_defaults and isstates:
identity_cls = mapper._identity_class
identity_props = [p.key for p in mapper._identity_key_props]
for state, dict_ in states:
state.key = (
identity_cls,
tuple([dict_[key] for key in identity_props])
)
def _bulk_update(mapper, mappings, session_transaction,
isstates, update_changed_only):
base_mapper = mapper.base_mapper
cached_connections = _cached_connection_dict(base_mapper)
def _changed_dict(mapper, state):
return dict(
(k, v)
for k, v in state.dict.items() if k in state.committed_state or k
in mapper._primary_key_propkeys
)
if isstates:
if update_changed_only:
mappings = [_changed_dict(mapper, state) for state in mappings]
else:
mappings = [state.dict for state in mappings]
else:
mappings = list(mappings)
if session_transaction.session.connection_callable:
raise NotImplementedError(
"connection_callable / per-instance sharding "
"not supported in bulk_update()")
connection = session_transaction.connection(base_mapper)
for table, super_mapper in base_mapper._sorted_tables.items():
if not mapper.isa(super_mapper):
continue
records = _collect_update_commands(None, table, (
(None, mapping, mapper, connection,
(mapping[mapper._version_id_prop.key]
if mapper._version_id_prop else None))
for mapping in mappings
), bulk=True)
_emit_update_statements(base_mapper, None,
cached_connections,
super_mapper, table, records,
bookkeeping=False)
def save_obj(
base_mapper, states, uowtransaction, single=False):
"""Issue ``INSERT`` and/or ``UPDATE`` statements for a list
of objects.
This is called within the context of a UOWTransaction during a
flush operation, given a list of states to be flushed. The
base mapper in an inheritance hierarchy handles the inserts/
updates for all descendant mappers.
"""
# if batch=false, call _save_obj separately for each object
if not single and not base_mapper.batch:
for state in _sort_states(states):
save_obj(base_mapper, [state], uowtransaction, single=True)
return
states_to_update = []
states_to_insert = []
cached_connections = _cached_connection_dict(base_mapper)
for (state, dict_, mapper, connection,
has_identity,
row_switch, update_version_id) in _organize_states_for_save(
base_mapper, states, uowtransaction
):
if has_identity or row_switch:
states_to_update.append(
(state, dict_, mapper, connection, update_version_id)
)
else:
states_to_insert.append(
(state, dict_, mapper, connection)
)
for table, mapper in base_mapper._sorted_tables.items():
if table not in mapper._pks_by_table:
continue
insert = _collect_insert_commands(table, states_to_insert)
|
update = _collect_update_commands(
uowtransaction, table, states_to_update)
_emit_update_statements(base_mapper, uowtransaction,
|
cached_connections,
mapper, table, update)
_emit_insert_statements(base_mapper, uowtransaction,
cached_connections,
mapper, table, insert)
_finalize_insert_update_commands(
base_mapper, uowtransaction,
chain(
(
(state, state_dict, mapper, connection, False)
for state, state_dict, mapper, connection in states_to_insert
),
(
(state, state_dict, mapper, connection, True)
for state, state_dict, mapper, connection,
update_version_id in states_to_update
)
)
)
def post_update(base_mapper, states, uowtransaction, post_update_cols):
"""Issue UPDATE statements on behalf of a relationship() which
specifies post_update.
"""
cached_connections = _cached_connection_dict(base_mapper)
states_to_update = list(_organize_states_for_post_update(
base_mapper,
states, uowtransaction))
for table, mapper in base_mapper._sorted_tables.items():
if table not in mapper._pks_by_table:
continue
update = (
(state, state_dict, sub_mapper, connection)
for
state, state_dict, sub_mapper, connection in states_to_update
if table in sub_mapper._pks_by_table
)
update = _collect_post_update_commands(base_mapper, uowtransaction,
table, update,
post_update_cols)
_emit_post_update_statements(base_mapper, uowtransaction,
cached_connections,
mapper, table, update)
def delete_obj(base_mapper, states, uowtransaction):
"""Issue ``DELETE`` statements for a list of objects.
This is called within the context of a UOWTransaction during a
flush operation.
"""
cached_connections = _cached_connection_dict(base_mapper)
states_to_delete = list(_organize_states_for_delete(
base_mapper,
states,
uowtransaction))
table_to_mapper = base_mapper._sorted_tables
for table in reversed(list(table_to_mapper.keys())):
ma
|
AnoopAlias/XtendWeb
|
scripts/hhvm_ghost_hunter.py
|
Python
|
gpl-3.0
| 3,290
| 0.004863
|
#!/usr/bin/env python
import yaml
import pwd
import sys
import subprocess
import json
import os
__author__ = "Anoop P Alias"
__copyright__ = "Copyright Anoop P Alias"
__license__ = "GPL"
__email__ = "[email protected]"
installation_path = "/opt/nDeploy" # Absolute Installation Path
if __name__ == "__main__":
# This script is mostly intended to be called from a cronjob
conf_list = os.listdir("/opt/nDeploy/hhvm.d")
for filename in conf_list:
cpaneluser, extension = filename.split('.')
# if user is not in /etc/passwd we dont proceed any further
try:
pwd.getpwnam(cpaneluser)
except KeyError:
sys.exit(0)
else:
# Update the userdata cache
subprocess.Popen(['/scripts/updateuserdatacache', '--force', cpaneluser], shell=True)
# Try loading the main userdata cache file
cpuserdatajson = "/var/cpanel/userdata/" + cpaneluser + "/main.cache"
with open(cpuserdatajson) as cpaneluser_data_stream:
json_parsed_cpaneluser = json.load(cpaneluser_data_stream)
main_domain = json_parsed_cpaneluser.get('main_domain')
# parked_domains = yaml_parsed_cpaneluser.get('parked_domains') # This data is irrelevant as parked domain list is in ServerAlias
# addon_domains_dict = json_parsed_cpaneluser.get('addon_domains') # So we know which addon is mapped to which sub-domain
sub_domains = json_parsed_cpaneluser.get('sub_domains')
# Since we have all domains now..check XtendWeb domain-data files for HHVM enabled
# Turn off HHVM if no domain using HHVM
hhvm_flag = False
with open(installation_path + "/domain-data/" + main_domain, 'r') as domain_data_stream:
yaml_parsed_domain_data = yaml.safe_load(domain_data_stream)
backend_category = yaml_parsed_domain_data.get('backend_category', None)
if backend_category == 'HHVM':
hhvm_flag = True
for the_sub_domain in sub_domains:
if the_sub_domain.startswith("*"):
subdom_config_dom = "_wildcard_."+the_sub_domain.replace('*.', '')
else:
subdom_config_dom = the_sub_domain
with open(installation_path + "/domain-data/" + subdom_config_dom, 'r') as domain_data_stream:
yaml_parsed_domain_data = yaml.safe_load(domain_data_stream)
backend_category = yaml_parsed_domain_data.get('backend_category', None)
if backend_category == 'HHVM':
hhvm_flag = True
if hhvm_flag is False:
# This means none of the domain has HHVM enabled and we can shut down HHVM for the user
|
subprocess.call(['systemctl', 'stop', 'ndeploy_hhvm@'+cpaneluser+'.service'])
subprocess.call(['systemctl', 'disable', 'ndeploy_hhvm@'+cpaneluser+'.service'])
if os.path.isfile(installation_path+"/conf/ndeploy_cluster.yaml"):
subprocess.call('ansible -i /opt/nDeploy/conf/nDeploy-cluster/hosts ndeployslaves -m systemd -a "name=ndeploy_hhvm@'+cpaneluser+'.service state=stopped enabled=no
|
"', shell=True)
|
rain1024/underthesea
|
underthesea/corpus/corpus.py
|
Python
|
gpl-3.0
| 87
| 0
|
cla
|
ss Corpus:
"""Interface for corpus
"""
def __init_
|
_(self):
pass
|
jptomo/rpython-lang-scheme
|
rpython/memory/gc/test/test_object_pinning.py
|
Python
|
mit
| 38,271
| 0.001698
|
import py
from rpython.rtyper.lltypesystem import lltype, llmemory, llarena
from rpython.memory.gc.incminimark import IncrementalMiniMarkGC, WORD
from test_direct import BaseDirectGCTest
T = lltype.GcForwardReference()
T.become(lltype.GcStruct('pinning_test_struct2',
('someInt', lltype.Signed)))
S = lltype.GcForwardReference()
S.become(lltype.GcStruct('pinning_test_struct1',
('someInt', lltype.Signed),
('next', lltype.Ptr(T)),
('data', lltype.Ptr(T))))
class PinningGCTest(BaseDirectGCTest):
def setup_method(self, meth):
BaseDirectGCTest.setup_method(self, meth)
max = getattr(meth, 'max_number_of_pinned_objects', 20)
self.gc.max_number_of_pinned_objects = max
def test_pin_can_move(self):
# even a pinned object is considered to be movable. Only the caller
# of pin() knows if it is currently movable or not.
ptr = self.malloc(T)
adr = llmemory.cast_ptr_to_adr(ptr)
assert self.gc.can_move(adr)
assert self.gc.pin(adr)
assert self.gc.can_move(adr)
def test_pin_twice(self):
ptr = self.malloc(T)
adr = llmemory.cast_ptr_to_adr(ptr)
assert self.gc.pin(adr)
assert not self.gc.pin(adr)
def test_unpin_not_pinned(self):
# this test checks a requirement of the unpin() interface
ptr = self.malloc(S)
py.test.raises(Exception,
self.gc.unpin, llmemory.cast_ptr_to_adr(ptr))
def test__is_pinned(self):
ptr = self.malloc(T)
adr = llmemory.cast_ptr_to_adr(ptr)
assert not self.gc._is_pinned(adr)
assert self.gc.pin(adr)
assert self.gc._is_pinned(adr)
self.gc.unpin(adr)
assert not self.gc._is_pinned(adr)
def test_prebuilt_not_pinnable(self):
ptr = lltype.malloc(T, immortal=True)
self.consider_constant(ptr)
assert not self.gc.pin(llmemory.cast_ptr_to_adr(ptr))
self.gc.collect()
assert not self.gc.pin(llmemory.cast_ptr_to_adr(ptr))
# XXX test with multiple mallocs, and only part of them is pinned
def test_random(self):
# scenario: create bunch of objects. randomly pin, unpin, add to
# stackroots and remove from stackroots.
import random
for i in xrange(10**3):
obj = self.malloc(T)
obj.someInt = 100
#
if random.random() < 0.5:
self.stackroots.append(obj)
print("+stack")
if random.random() < 0.5:
self.gc.pin(llmemory.cast_ptr_to_adr(obj))
print("+pin")
self.gc.debug_gc_step(random.randint(1, 4))
for o in self.stackroots[:]:
assert o.someInt == 100
o_adr = llmemory.cast_ptr_to_adr(o)
if random.random() < 0.1 and self.gc._is_pinned(o_adr):
print("-pin")
self.gc.unpin(o_adr)
if random.random() < 0.1:
print("-stack")
self.stackroots.remove(o)
class TestIncminimark(PinningGCTest):
from rpython.memory.gc.incminimark import IncrementalMiniMarkGC as GCClass
from rpython.memory.gc.incminimark import STATE_SCANNING, STATE_MARKING
def test_try_pin_gcref_containing_type(self):
# scenario: incminimark's object pinning can't pin objects that may
# contain GC pointers
obj = self.malloc(S)
assert not self.gc.pin(llmemory.cast_ptr_to_adr(obj))
def test_pin_old(self):
# scenario: try pinning an old object. This should be not possible and
# we want to make sure everything stays as it is.
old_ptr = self.malloc(S)
old_ptr.someInt = 900
self.stackroots.append(old_ptr)
assert self.stackroots[0] == old_ptr # test assumption
self.gc.collect()
old_ptr = self.stackroots[0]
# now we try to pin it
old_adr = llmemory.cast_ptr_to_adr(old_ptr)
assert not self.gc.is_in_nursery(old_adr)
assert not self.gc.pin(old_adr)
assert self.gc.pinned_objects_in_nursery == 0
def pin_pin_pinned_object_count(self, collect_func):
# scenario: pin two objects that are referenced from stackroots. Check
# if the pinned objects count is correct, even after an other collection
pinned1_ptr = self.malloc(T)
pinned1_ptr.someInt = 100
self.stackroots.append(pinned1_ptr)
#
pinned2_ptr = self.malloc(T)
pinned2_ptr.someInt = 200
self.stackroots.append(pinned2_ptr)
#
assert self.gc.pin(llmemory.cast_ptr_to_adr(pinned1_ptr))
assert self.gc.pinned_objects_in_nursery == 1
assert self.gc.pin(llmemory.cast_ptr_to_adr(pinned2_ptr))
assert self.gc.pinned_objects_in_nursery == 2
#
collect_func()
#
assert self.gc.pinned_objects_in_nursery == 2
def test_pin_pin_pinned_object_count_minor_collection(self):
self.pin_pin_pinned_object_count(self.gc.minor_collection)
def test_pin_pin_pinned_object_count_major_collection(self):
self.pin_pin_pinned_object_count(self.gc.collect)
def pin_unpin_pinned_object_count(self, collect_func):
# scenario: pin an object and check the pinned object count. Unpin it
# and check the count again.
pinned_ptr = self.malloc(T)
pinned_ptr.someInt = 100
self.stackroots.append(pinned_ptr)
pinned_adr = llmemory.cast_ptr_to_adr(pinned_ptr)
#
|
assert self.gc.pinned_objects_in_nursery == 0
assert self.gc.pin(pinned_adr)
assert self.gc.pinned_objects_in_nursery == 1
collect_func()
assert self.gc.pinned_objects_in_nursery == 1
self.gc.unpin(pinned_adr)
assert self.gc.pinned_objects_in_nursery == 0
collect_func()
assert self.gc.pinned_objects_in_nursery == 0
def test_pin_unpin_pinned_object_count_minor_collection(self):
self.pin_unpin_pinned_object_count(self.gc.minor_collec
|
tion)
def test_pin_unpin_pinned_object_count_major_collection(self):
self.pin_unpin_pinned_object_count(self.gc.collect)
def pinned_obj_in_stackroot(self, collect_func):
# scenario: a pinned object that is part of the stack roots. Check if
# it is not moved
#
ptr = self.malloc(T)
ptr.someInt = 100
self.stackroots.append(ptr)
assert self.stackroots[0] == ptr # validate our assumption
adr = llmemory.cast_ptr_to_adr(ptr)
assert self.gc.is_in_nursery(adr) # to be sure
assert self.gc.pin(adr)
#
# the object shouldn't move from now on
collect_func()
#
# check if it is still at the same location as expected
adr_after_collect = llmemory.cast_ptr_to_adr(self.stackroots[0])
assert self.gc.is_in_nursery(adr_after_collect)
assert adr == adr_after_collect
assert self.gc._is_pinned(adr)
assert ptr.someInt == 100
assert self.gc.pinned_objects_in_nursery == 1
def test_pinned_obj_in_stackroot_minor_collection(self):
self.pinned_obj_in_stackroot(self.gc.minor_collection)
def test_pinned_obj_in_stackroot_full_major_collection(self):
self.pinned_obj_in_stackroot(self.gc.collect)
def test_pinned_obj_in_stackroots_stepwise_major_collection(self):
# scenario: same as for 'pinned_obj_in_stackroot' with minor change
# that we do stepwise major collection and check in each step for
# a correct state
#
ptr = self.malloc(T)
ptr.someInt = 100
self.stackroots.append(ptr)
assert self.stackroots[0] == ptr # validate our assumption
adr = llmemory.cast_ptr_to_adr(ptr)
assert self.gc.is_in_nursery(adr)
assert self.gc.pin(adr)
#
# the object shouldn't move from now on. Do a full round of major
# steps and check each time for correct state
#
# check that we sta
|
coolbombom/CouchPotatoServer
|
couchpotato/core/plugins/automation/main.py
|
Python
|
gpl-3.0
| 1,352
| 0.016272
|
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
log = CPLog(__name__)
class Automation(Plugin):
|
def __init__(self):
addEvent('app.load', self.setCrons)
if not Env.get('dev'):
addEvent('app.load', self.addMovies)
addEvent('setting.save.automation.hour.after', self.setCrons)
def setCrons(self):
fireEvent('schedule.interval', 'automation.add_movies', self.addMovies, hours = self.conf('hour', default = 12))
def addMovies(sel
|
f):
movies = fireEvent('automation.get_movies', merge = True)
movie_ids = []
for imdb_id in movies:
prop_name = 'automation.added.%s' % imdb_id
added = Env.prop(prop_name, default = False)
if not added:
added_movie = fireEvent('movie.add', params = {'identifier': imdb_id}, force_readd = False, search_after = False, update_library = True, single = True)
if added_movie:
movie_ids.append(added_movie['id'])
Env.prop(prop_name, True)
for movie_id in movie_ids:
movie_dict = fireEvent('movie.get', movie_id, single = True)
fireEvent('searcher.single', movie_dict)
|
GastonLab/ddb-scripts
|
specialist/scan_multi-gene_annotated_snpEff.py
|
Python
|
mit
| 1,956
| 0.00409
|
#!/usr/bin/env python
# Standard packages
import sys
import cyvcf2
import argparse
import geneimpacts
from cyvcf2 import VCF
def get_effects(variant, annotation_keys):
effects = []
effects += [geneimpacts.SnpEff(e, annotation_keys) for e in variant.INFO.get("ANN").split(",")]
return effects
def get_top_impact(effects):
top_impact = geneimpacts.Effect.top_severity(effects)
if isinstance(top_impact, list):
top_impact = top_impact[0]
return top_impact
def get_genes(effects):
genes_list = []
for effect in effects:
if effect.gene not in genes_list:
genes_list.append(effect.gene)
return genes_list
def get_transcript_effects(effects):
transcript_effects = dict()
for effect in effects:
if effect.transcript is not None:
transcript_effects[effect.transcript] = "{biotype}|{effect}".format(biotype=effect.biotype,
effect=effect.impact_severity)
return transcript_effects
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--annotated_vcf', help="snpEff annotated VCF file to scan")
parser.add_argument('-o', '--output', h
|
elp="File for output information")
args = parser.parse_args()
sys.stdout.write("Parsing VCFAnno VCF with CyVCF2\n")
reader = cyvcf2.VCFReader(args.annotated_vcf)
desc = reader["ANN"]["Description"]
annotation_keys = [x.strip("\"'") for x in re.split("\s*\|\s*", desc.split(":", 1)[1].strip('" '))]
sys.stdout.write("Parsing VCFAnno VCF\n")
vcf = VCF(args.annotate
|
d_vcf)
for variant in vcf:
effects = get_effects(variant, annotation_keys)
top_impact = get_top_impact(effects)
gene_effects = dict()
for effect in effects:
if effect.gene not in gene_effects.keys():
if effect.transcript is not None:
|
MrPablozOne/kaira
|
ptp/base/analysis.py
|
Python
|
gpl-3.0
| 6,178
| 0.003561
|
#
# Copyright (C) 2013 Stanislav Bohm
#
# This file is part of Kaira.
#
# Kaira is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License, or
# (at your option) any later version.
#
# Kaira is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Kaira. If not, see <http://www.gnu.org/licenses/>.
#
import utils
def all_free_variables(edges):
return utils.unions(edges, lambda edge: edge.get_free_vars())
def get_variable_sources(inscriptions):
sources = {}
for inscription in inscriptions:
if not inscription.is_expr_variable():
continue
if sources.get(inscription.expr):
continue
if inscription.is_bulk():
sources[inscription.expr] = None
else:
sources[inscription.expr] = inscription.uid
return sources
def is_dependant(inscription1, inscription2):
if inscription1.edge is inscription2.edge and \
inscription2.index < inscription1.index:
return True
if not inscription2.is_expr_variable():
return False
return inscription2.expr in inscription1.get_foreign_variables()
def analyze_transition(tr):
variable_sources = {} # string -> uid - which inscriptions carry input variables
reuse_tokens = {} # uid -> uid - identification number of token for output inscpription
fresh_tokens = [] # (uid, type) - what tokens has to be created for output
used_tokens = [] # [uid] - Tokens from input inscriptions that are reused on output
variable_sources_out = {} # string -> uid or None
bulk_overtake = [] # [uid]
overtaken_variables = set()
def inscription_out_weight(inscription):
# Reorder edges, bulk edges first because we want them send first
# Otherwise it can cause problems like in sending results in "workers" example
s = inscription.config.get("seq")
if s is None:
seq = 0
else:
seq = int(s) * 3
if inscription.is_bulk():
return seq
# Unconditional edges has higher priority
if inscription.is_conditioned():
return seq + 2
else:
return seq + 1
def inscription_in_weight(inscription):
if inscription.is_conditioned():
return 1
else:
return 0
inscriptions_in = sum((edge.inscriptions for edge in tr.edges_in), [])
inscriptions_in.sort(key=inscription_in_weight)
inscriptions_out = sum((edge.inscriptions for edge in tr.edges_out), [])
inscriptions_out.sort(key=inscription_out_weight)
variable_sources = get_variable_sources(inscriptions_in)
# Order input inscriptions by variable dependancy
inscriptions_in = utils.topological_ordering(inscriptions_in, is_dependant)
if inscriptions_in is None:
raise utils.PtpException("Circle variable dependancy", tr.get_source())
# Try reuse tokens
for inscription in inscriptions_out:
if inscription.is_bulk() or not inscription.is_local():
continue # Bulk and nonlocal edge cannot use token reusage
if not inscription.is_expr_variable():
continue # Current implementation reuses tokens only for variable expression
if inscription.is_collective():
continue # Collective operations cannot use token reusage
token_uid = variable_sources.get(inscription.expr)
if token_uid is None or token_uid in used_tokens:
# Variable is not taken from input as token
# or token is already reused --> reusage not possible
continue
reuse_tokens[inscription.uid] = token_uid
used_tokens.append(token_uid)
# Setup fresh variables where token was not reused
for inscription in inscriptions_out:
if not inscription.is_expr_variable():
continue # We are interested only in variables
variable = inscription.expr
if variable in variable_sources:
# Variable take from input so we do not have to deal here with it
continue
if variable in variable_sources_out:
# Variable already prepared for output
continue
if inscription.is_bulk():
# No token, just build variable
variable_sources_out[variable] = None
continue
if inscription.is_local():
# Local send, we prepare token
fresh_tokens.append((inscription.uid, inscription.edge.place.type))
variable_sources_out[variable] = inscription.uid
reuse_tokens[inscription.uid] = inscription.uid # Use this fresh new token
else:
# Just create variable
variable_sources_out[variable] = None
for inscription in reversed(inscriptions_out):
# Now we are checking overtake. It has to be in reversed order
# becacase overtake has to be the last operation on variable
if not inscription.is_bulk() or not inscription.is_expr_variable():
continue # We are interes
|
ted only in variables and bulk inscriptions
if inscription.expr not in overtaken_variables:
overtaken_variables.add(inscription.expr)
bulk_overtake.append(inscription.uid)
for inscription in inscriptions_out:
for variable in inscription.get_other_variables():
if variable not in variab
|
le_sources and \
variable not in variable_sources_out:
variable_sources_out[variable] = None
tr.inscriptions_in = inscriptions_in
tr.inscriptions_out = inscriptions_out
tr.variable_sources = variable_sources
tr.reuse_tokens = reuse_tokens
tr.variable_sources_out = variable_sources_out
tr.fresh_tokens = fresh_tokens
tr.bulk_overtake = bulk_overtake
|
tsl143/addons-server
|
src/olympia/ratings/tests/test_tasks.py
|
Python
|
bsd-3-clause
| 3,772
| 0
|
import mock
from olympia.amo.tests import addon_factory, TestCase, user_factory
from olympia.ratings.models import Rating
from olympia.ratings.tasks import addon_rating_aggregates
class TestAddonRatingAggregates(TestCase):
# Prevent <Rating>.refresh() from being fired when setting up test data,
# since it'd call addon_rating_aggregates too early.
@mock.patch.object(Rating, 'refresh', lambda x, update_denorm=False: None)
def test_addon_rating_aggregates(self):
addon = addon_factory()
addon2 = addon_factory()
# Add a purely unlisted add-on. It should not be considered when
# calculating bayesian rating for the other add-ons.
addon3 = addon_factory(total_ratings=3, average_rating=4)
self.make_addon_unlisted(addon3)
# Create a few ratings with various scores.
user = user_factory()
# Add an old rating that should not be used to calculate the average,
# because the same user posts a new one right after that.
old_rating = Rating.objects.create(
|
addon=addon, rating=1, user=user, is_latest=False, body=u'old')
new_rating = Rating.objects.create(addon=addon, rating=3, us
|
er=user,
body=u'new')
Rating.objects.create(addon=addon, rating=3, user=user_factory(),
body=u'foo')
Rating.objects.create(addon=addon, rating=2, user=user_factory())
Rating.objects.create(addon=addon, rating=1, user=user_factory())
# On another addon as well.
Rating.objects.create(addon=addon2, rating=1, user=user_factory())
Rating.objects.create(addon=addon2, rating=1, user=user_factory(),
body=u'two')
# addon_rating_aggregates should ignore replies, so let's add one.
Rating.objects.create(
addon=addon, rating=5, user=user_factory(), reply_to=new_rating)
# Make sure old_review is considered old, new_review considered new.
old_rating.reload()
new_rating.reload()
assert old_rating.is_latest is False
assert new_rating.is_latest is True
# Make sure total_ratings hasn't been updated yet (because we are
# mocking Rating.refresh()).
addon.reload()
addon2.reload()
assert addon.total_ratings == 0
assert addon2.total_ratings == 0
assert addon.bayesian_rating == 0
assert addon.average_rating == 0
assert addon2.bayesian_rating == 0
assert addon2.average_rating == 0
assert addon.text_ratings_count == 0
assert addon2.text_ratings_count == 0
# Trigger the task and test results.
addon_rating_aggregates([addon.pk, addon2.pk])
addon.reload()
addon2.reload()
assert addon.total_ratings == 4
assert addon2.total_ratings == 2
assert addon.bayesian_rating == 1.9821428571428572
assert addon.average_rating == 2.25
assert addon2.bayesian_rating == 1.375
assert addon2.average_rating == 1.0
assert addon.text_ratings_count == 2
assert addon2.text_ratings_count == 1
# Trigger the task with a single add-on.
Rating.objects.create(addon=addon2, rating=5, user=user_factory(),
body=u'xxx')
addon2.reload()
assert addon2.total_ratings == 2
addon_rating_aggregates(addon2.pk)
addon2.reload()
assert addon2.total_ratings == 3
assert addon2.text_ratings_count == 2
assert addon.bayesian_rating == 1.9821428571428572
assert addon.average_rating == 2.25
assert addon2.bayesian_rating == 1.97915
assert addon2.average_rating == 2.3333
|
WladimirSidorenko/CGSA
|
cgsa/dl/base.py
|
Python
|
mit
| 20,205
| 0.000099
|
#!/usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
##################################################################
# Documentation
##################################################################
# Imports
from __future__ import absolute_import, unicode_literals, print_function
try:
from cPickle import dump, load
except ImportError:
from _pickle import dump, load
from collections import Counter
from copy import deepcopy
from keras.callbacks import EarlyStopping, ModelCheckpoint, TensorBoard
from keras.layers.embeddings import Embedding
from keras.models import load_model
from keras.preprocessing.sequence import pad_sequences
from keras.regularizers import l2
from keras.utils import to_categorical
from six import iteritems
from sklearn.utils.class_weight import compute_class_weight
from tempfile import mkstemp
import abc
import numpy as np
import os
from cgsa.base import BaseAnalyzer
from cgsa.utils.common import LOGGER, is_relevant, normlex
from .layers import CUSTOM_OBJECTS, DFLT_INITIALIZER, EMPTY_IDX, UNK_IDX
from .layers.word2vec import Word2Vec
from .utils import ModelMGPU, N_GPUS
##################################################################
# Variables and Constants
# default dimensionality for task-specific vectors
DFLT_VDIM = 100
DFLT_N_EPOCHS = 24 # 24
EMPTY_TOK = "%EMPTY%"
UNK_TOK = "%UNK%"
DICT_OFFSET = 1
UNK_PROB = 1e-4
L2_COEFF = 1e-4
EMB_INDICES_NAME = "embedding_indices"
# LBA Results for Different Optimizers:
# sgd: Macro: 10.33%; Micro: 36.2623%;
# rmsprop: Macro: 30.84%; Micro: 44.5902%;
# adagrad: Macro: 35.45%; Micro: 61.5738%;
# adadelta: 30.84%; Micro: 44.5902%;
# adam: Macro: 30.84%; Micro: 44.5902%;
# nadam: 30.84%; Micro: 44.5902%;
DFLT_TRAIN_PARAMS = {"optimizer": "adagrad",
"metrics": ["categorical_accuracy"],
"loss": "categorical_hinge"}
##################################################################
# Methods
##################################################################
# Class
class DLBaseAnalyzer(BaseAnalyzer):
"""Class for DeepLearning-based sentiment analysis.
Attributes:
"""
def __init__(self, w2v=False, lstsq=False, embeddings=None, **kwargs):
"""Class constructor.
Args:
w2v (bool): use word2vec embeddings
lstsq (bool): use the least squares method
embeddings (cgsa.utils.word2vec.Word2Vec or None): pretrained
embeddings
"""
super(DLBaseAnalyzer, self).__init__()
self.name = "DLBaseAnalyzer"
# boolean flags indicating whether to use external embeddings
self._w2v = w2v
self._lstsq = lstsq
# actual external embeddings
self._embeddings = embeddings
# mapping from words to their embedding indices in `self._embs` or
# `self.W_EMB`
self._w2i = {EMPTY_TOK: EMPTY_IDX, UNK_TOK: UNK_IDX}
self._pad_value = EMPTY_IDX
# mapping from words to their embeddings (will be initialized after
# training the network, if `w2v` or `lstsq` are true)
self._embs = None
# least squares matrix (will be initialized after training the network,
# if true)
self._lstsq_mtx = None
self.ndim = -1 # vector dimensionality will be initialized later
self.intm_dim = -1
self._model = None
self._model_path = None
self._trained = False
self._n_epochs = DFLT_N_EPOCHS
# mapping from word to its embedding index
self._aux_keys = set((0, 1))
self._max_seq_len = -1
self._min_width = 0
self._n_y = 0
self._train_params = deepcopy(DFLT_TRAIN_PARAMS)
self._fit_params = {}
# variables needed for training
self._w_stat = self._pred_class = None
self.W_EMB = self._cost = self._dev_cost = None
# initialize functions to None
self._reset_funcs()
# set up functions for obtaining word embeddings at train and test
# times
self._init_wemb_funcs()
def train(self, train_x, train_y, dev_x, dev_y,
a_grid_search, a_multi_gpu):
self._start_training()
self._logger.debug("Training %s...", self.name)
self._logger.debug("Preparing dataset...")
train_x, train_y, dev_x, dev_y = self._prepare_data(
train_x, train_y, dev_x, dev_y
)
self._logger.debug("Dataset ready...")
# initialize the network
self._logger.debug("Initializing the network...")
# self._update_fit_params(train_y)
self._init_nn()
self._logger.debug("Network ready...")
# initialize callbacks
_, ofname = mkstemp(suffix=".hdf5", prefix=self.name + '.')
try:
early_stop = EarlyStopping(patience=3, verbose=1)
chck_point = ModelCheckpoint(
filepath=ofname, monitor="val_categorical_accuracy",
mode="auto", verbose=1,
save_weights_only=True,
save_best_only=True
)
tensorboard = TensorBoard(
log_dir=os.environ.get("TENSORBOARD_DIR", "/tmp"),
histogram_freq=1, batch_size=32,
write_graph=True, write_grads=True
)
if a_multi_gpu:
train_model = ModelMGPU(self._model)
self._fit_params["batch_size"] = 32 * N_GPUS
train_model.compile(**self._train_params)
else:
train_model = self._model
train_model.fit(train_x, train_y,
validation_data=(dev_x, dev_y),
epochs=self._n_epochs,
callbacks=[early_stop, chck_point, tensorboard],
|
**self._fit_params)
self._model.load_weights(ofname)
|
self._finish_training()
finally:
os.remove(ofname)
self._logger.debug("%s trained", self.name)
def predict_proba(self, msg, yvec):
wseq = self._tweet2wseq(msg)
embs = np.array(
self._pad(len(wseq), self._pad_value)
+ [self.get_test_w_emb(w) for w in wseq], dtype="int32")
ret = self._model.predict(np.asarray([embs]),
batch_size=1,
verbose=2)
yvec[:] = ret[0]
def predict_proba_raw(self, messages):
yvecs = np.zeros((len(messages), self._n_y))
for i, msg_i in enumerate(messages):
self.predict_proba(msg_i, yvecs[i])
return yvecs
def restore(self, embs):
"""Restore members which could not be serialized.
Args:
embs (cgsa.utils.word2vec.Word2Vec or None): pretrained
embeddings
"""
self._embeddings = embs
self._logger = LOGGER
self._init_wemb_funcs()
def reset(self):
"""Remove members which cannot be serialized.
"""
# set functions to None
self._reset_funcs()
self._embeddings = None
self.W_EMB = None
super(DLBaseAnalyzer, self).reset()
def save(self, path):
"""Dump model to disc.
Args:
a_path (str): file path at which to store the model
Returns:
void:
"""
# set functions to None
model_path = path + ".h5"
self._model.save(model_path)
self._model_path = os.path.basename(model_path)
# all paths are relative
model = self._model
self._model = None
with open(path, "wb") as ofile:
dump(self, ofile)
self._model = model
def _load(self, a_path):
super(DLBaseAnalyzer, self)._load(a_path)
self._model = load_model(
os.path.join(a_path, self._model_path),
custom_objects=CUSTOM_OBJECTS
)
@abc.abstractmethod
def _init_nn(self):
"""Initialize neural network.
"""
raise NotImplementedError
def _extract_feats(self, a_tweet):
pass
def _start_training(self):
"""Prepare for training.
|
mzdaniel/oh-mainline
|
vendor/packages/kombu/kombu/transport/pyamqplib.py
|
Python
|
agpl-3.0
| 9,517
| 0.002102
|
"""
kombu.transport.pyamqplib
============
|
=============
amqplib transport.
:copyright: (c) 2009 - 2011 by Ask Solem.
:license: BSD, see LICENSE for more details.
"""
import socket
try:
from ssl import SSLError
except ImportError:
class SSLError(Exception): # noqa
pass
from amqplib import client_0_8 as amqp
from amqplib.client_0_8 import transport
from amqplib.client_0_8.channel import Channel as _Channel
from amqplib.
|
client_0_8.exceptions import AMQPConnectionException
from amqplib.client_0_8.exceptions import AMQPChannelException
from kombu.transport import base
from kombu.utils.encoding import str_to_bytes
DEFAULT_PORT = 5672
# amqplib's handshake mistakenly identifies as protocol version 1191,
# this breaks in RabbitMQ tip, which no longer falls back to
# 0-8 for unknown ids.
transport.AMQP_PROTOCOL_HEADER = str_to_bytes("AMQP\x01\x01\x08\x00")
class Connection(amqp.Connection): # pragma: no cover
def _dispatch_basic_return(self, channel, args, msg):
reply_code = args.read_short()
reply_text = args.read_shortstr()
exchange = args.read_shortstr()
routing_key = args.read_shortstr()
exc = AMQPChannelException(reply_code, reply_text, (50, 60))
if channel.events["basic_return"]:
for callback in channel.events["basic_return"]:
callback(exc, exchange, routing_key, msg)
else:
raise exc
def __init__(self, *args, **kwargs):
super(Connection, self).__init__(*args, **kwargs)
self._method_override = {(60, 50): self._dispatch_basic_return}
def drain_events(self, allowed_methods=None, timeout=None):
"""Wait for an event on any channel."""
return self.wait_multi(self.channels.values(), timeout=timeout)
def wait_multi(self, channels, allowed_methods=None, timeout=None):
"""Wait for an event on a channel."""
chanmap = dict((chan.channel_id, chan) for chan in channels)
chanid, method_sig, args, content = self._wait_multiple(
chanmap.keys(), allowed_methods, timeout=timeout)
channel = chanmap[chanid]
if content \
and channel.auto_decode \
and hasattr(content, 'content_encoding'):
try:
content.body = content.body.decode(content.content_encoding)
except Exception:
pass
amqp_method = self._method_override.get(method_sig) or \
channel._METHOD_MAP.get(method_sig, None)
if amqp_method is None:
raise Exception('Unknown AMQP method (%d, %d)' % method_sig)
if content is None:
return amqp_method(channel, args)
else:
return amqp_method(channel, args, content)
def read_timeout(self, timeout=None):
if timeout is None:
return self.method_reader.read_method()
sock = self.transport.sock
prev = sock.gettimeout()
sock.settimeout(timeout)
try:
try:
return self.method_reader.read_method()
except SSLError, exc:
# http://bugs.python.org/issue10272
if "timed out" in str(exc):
raise socket.timeout()
raise
finally:
sock.settimeout(prev)
def _wait_multiple(self, channel_ids, allowed_methods, timeout=None):
for channel_id in channel_ids:
method_queue = self.channels[channel_id].method_queue
for queued_method in method_queue:
method_sig = queued_method[0]
if (allowed_methods is None) \
or (method_sig in allowed_methods) \
or (method_sig == (20, 40)):
method_queue.remove(queued_method)
method_sig, args, content = queued_method
return channel_id, method_sig, args, content
# Nothing queued, need to wait for a method from the peer
read_timeout = self.read_timeout
channels = self.channels
wait = self.wait
while 1:
channel, method_sig, args, content = read_timeout(timeout)
if (channel in channel_ids) \
and ((allowed_methods is None) \
or (method_sig in allowed_methods) \
or (method_sig == (20, 40))):
return channel, method_sig, args, content
# Not the channel and/or method we were looking for. Queue
# this method for later
channels[channel].method_queue.append((method_sig, args, content))
#
# If we just queued up a method for channel 0 (the Connection
# itself) it's probably a close method in reaction to some
# error, so deal with it right away.
#
if channel == 0:
wait()
def channel(self, channel_id=None):
try:
return self.channels[channel_id]
except KeyError:
return Channel(self, channel_id)
class Message(base.Message):
"""A message received by the broker.
.. attribute:: body
The message body.
.. attribute:: delivery_tag
The message delivery tag, uniquely identifying this message.
.. attribute:: channel
The channel instance the message was received on.
"""
def __init__(self, channel, msg, **kwargs):
props = msg.properties
super(Message, self).__init__(channel,
body=msg.body,
delivery_tag=msg.delivery_tag,
content_type=props.get("content_type"),
content_encoding=props.get("content_encoding"),
delivery_info=msg.delivery_info,
properties=msg.properties,
headers=props.get("application_headers"),
**kwargs)
class Channel(_Channel, base.StdChannel):
Message = Message
events = {"basic_return": []}
def __init__(self, *args, **kwargs):
self.no_ack_consumers = set()
super(Channel, self).__init__(*args, **kwargs)
def prepare_message(self, message_data, priority=None,
content_type=None, content_encoding=None, headers=None,
properties=None):
"""Encapsulate data into a AMQP message."""
return amqp.Message(message_data, priority=priority,
content_type=content_type,
content_encoding=content_encoding,
application_headers=headers,
**properties)
def message_to_python(self, raw_message):
"""Convert encoded message body back to a Python value."""
return self.Message(self, raw_message)
def close(self):
try:
super(Channel, self).close()
finally:
self.connection = None
def basic_consume(self, *args, **kwargs):
consumer_tag = super(Channel, self).basic_consume(*args, **kwargs)
if kwargs["no_ack"]:
self.no_ack_consumers.add(consumer_tag)
return consumer_tag
def basic_cancel(self, consumer_tag, **kwargs):
self.no_ack_consumers.discard(consumer_tag)
return super(Channel, self).basic_cancel(consumer_tag, **kwargs)
class Transport(base.Transport):
Connection = Connection
default_port = DEFAULT_PORT
# it's very annoying that amqplib sometimes raises AttributeError
# if the connection is lost, but nothing we can do about that here.
connection_errors = (AMQPConnectionException,
socket.error,
IOError,
OSError,
AttributeError)
channel_errors = (AMQPChannelException, )
def __init__(self, client, **kwargs):
self.client = client
self.default_port = kwargs.get("default_port") or self.default_port
def create_channel(self, connection):
return connection.channel()
def drain_events(self, connection, **kwargs):
return connection.drain_events(**kwargs)
def establish_connection(self):
"""Establi
|
MiningTheDisclosures/conflict-minerals-data
|
conflict_minerals_data/edgar/migrations/0008_edgardocumentcontent_urls.py
|
Python
|
mit
| 628
| 0.001592
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-14 06:27
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('edgar', '0007_auto_20170706_2215'),
]
operations = [
migrations.AddField(
model_name=
|
'edgardocumentcontent',
name='urls',
field=django.contrib.postgres.fields.ArrayF
|
ield(base_field=models.TextField(blank=True), blank=True, help_text='URL we parsed out of the content', null=True, size=None),
),
]
|
hydrogen18/fairywren
|
tracker.py
|
Python
|
mit
| 8,937
| 0.053933
|
import vanilla
import urlparse
import fnmatch
import base64
import bencode
import struct
import socket
import peers
import posixpath
from eventlet.green import zmq
import cPickle as pickle
import eventlet.queue
import fairywren
import itertools
import logging
import array
def sendBencodedWsgiResponse(env,start_response,responseDict):
headers = [('Content-Type','text/plain')]
headers.append(('Cache-Control','no-cache'))
start_response('200 OK',headers)
yield bencode.bencode(responseDict)
def getClientAddress(environ):
try:
return environ['HTTP_X_FORWARDED_FOR'].split(',')[-1].strip()
except KeyError:
return environ['REMOTE_ADDR']
def dottedQuadToInt(dq):
#Change the peer IP into an integer
try:
peerIp = socket.inet_aton(dq)
except socket.error:
raise ValueError('Not a valid IP address:%s' % peerIp)
#Convert from network byte order to integer
try:
peerIp, = struct.unpack('!I',peerIp)
except struct.error:
raise ValueError('Serious wtf, how did this fail')
return peerIp
class Tracker(object):
def __init__(self,auth,peers,pathDepth):
self.auth = auth
self.peers = peers
self.pathDepth = pathDepth
self.announceLog = logging.getLogger('fairywren.announce')
self.trackerLog = logging.getLogger('fairywren.tracker')
self.afterAnnounce = []
self.trackerLog.info('Created')
def addAfterAnnounce(self,callback):
self.afterAnnounce.append(callback)
def getScrape(self,info_hashes):
"""Return a dictionary object that contains a tracker scrape.
@param info_hashes: list on info_hashes to include in the scrape
"""
retval = {}
retval['files'] = {}
for info_hash in info_hashes:
result = {}
result['downloaded'] = 0
result['complete'] = self.peers.getNumberOfSeeds(info_hash)
result['incomplete'] = self.peers.getNumberOfLeeches(info_hash)
retval['files'][info_hash] = result
return retval
def announce(self,env,start_response):
#Extract and normalize the path
#Posix path may not be the best approach here but
#no alternate has been found
pathInfo = posixpath.normpath(env['PATH_INFO'])
#Split the path into components. Drop the first
#since it should always be the empty string
pathComponents = pathInfo.split('/')[1+self.pathDepth:]
#A SHA512 encoded in base64 is 88 characters
#but the last two are always '==' so
#86 is used here
if len(pathComponents) !=2 or len(pathComponents[0]) != 86 or pathComponents[1] != 'announce':
return vanilla.http_error(404,env,start_response)
#Only GET requests are valid
if env['REQUEST_METHOD'] != 'GET':
return vanilla.http_error(405,env,start_response)
#Add the omitted equals signs back in
secretKey = pathComponents[0] + '=='
#base64 decode the secret key
try:
secretKey = base64.urlsafe_b64decode(secretKey)
except TypeError:
return vanilla.http_error(404,env,start_response)
#Extract the IP of the peer
peerIp = getClientAddress(env)
peerIpAsString = peerIp
try:
peerIp = dottedQuadToInt(peerIp)
except ValueError:
return vanilla.http_error(500,env,start_response)
#Parse the query string. Absence indicates error
if 'QUERY_STRING' not in env:
return vanilla.http_error(400,env,start_response)
query = urlparse.parse_qs(env['QUERY_STRING'])
#List of tuples. Each tuple is
#
#Parameter name
#default value (if any)
#type conversion, side-effect free callable
params = []
def validateInfoHash(info_hash):
#Info hashes are a SHA1 hash, and are always 20 bytes
if len(info_hash) != 20:
raise ValueError("Length " + str(len(info_hash)) + ' not acceptable')
return info_hash
params.append(('info_hash',None,validateInfoHash))
def validatePeerId(peer_id):
#Peer IDs are a string chosen by the peer to identify itself
#and are always 20 bytes
if len(peer_id) != 20:
raise ValueError("Improper Length")
return peer_id
params.append(('peer_id',None,validatePeerId))
def validatePort(port):
port = int(port)
#Ipv4 ports should not be higher than this value
if port > 2 ** 16 - 1 or port <= 0:
raise ValueError("Port outside of range")
return port
def validateByteCount(b
|
yteCount):
byteCount = int(byteCount)
if byteCount < 0:
raise ValueError('byte count cannot be negative')
return byteCount
params.append(('port',None,validatePort))
params.append(('uploaded',None,validateByteCount))
params.append(('downloa
|
ded',None,validateByteCount))
params.append(('left',None,validateByteCount))
#If the client doesn't specify the compact parameter, it is
#safe to assume that compact responses are understood. So a
#default value of 1 is used. Additionally, any non zero
#value provided assumes the client wants a compact response
params.append(('compact',1,int))
def validateEvent(event):
event = event.lower()
if event not in ['started','stopped','completed']:
raise ValueError("Unknown event")
return event
params.append(('event','update',validateEvent))
maxNumWant = 35
def limitNumWant(numwant):
numwant = int(numwant)
if numwant < 0:
raise ValueError('numwant cannot be negative')
numwant = min(numwant,maxNumWant)
return numwant
params.append(('numwant',maxNumWant,limitNumWant))
#Dictionary holding parameters to query
p = dict()
#Use the params to generate the parameters
for param,defaultValue,typeConversion in params:
#If the parameter is in the query, extract the first
#occurence and type convert if requested
if param in query:
p[param] = query[param][0]
if typeConversion:
try:
p[param] = typeConversion(p[param])
except ValueError as e:
return vanilla.http_error(400,env,start_response,msg='bad value for ' + param)
#If the parameter is not in the query, then
#use a default value is present. Otherwise this is an error
else:
if defaultValue == None:
return vanilla.http_error(400,env,start_response,msg='missing ' + param)
p[param] = defaultValue
#Make sure the secret key is valid
userId = self.auth.authenticateSecretKey(secretKey)
if userId == None:
response = {}
response['failure reason'] = 'failed to authenticate secret key'
return sendBencodedWsgiResponse(env,start_response,response)
#Make sure the info hash is allowed
torrentId = self.auth.authorizeInfoHash(p['info_hash'])
if torrentId == None:
response = {}
response['failure reason'] = 'unauthorized info hash'
return sendBencodedWsgiResponse(env,start_response,response)
#Construct the peers entry
peer = peers.Peer(peerIp,p['port'],p['left'])
#This is the basic response format
response = {}
response['interval'] = 5*60
response['complete'] = 0
response['incomplete'] = 0
response['peers'] = []
#This value is set to True if the number of seeds or leeches
#changes in the course of processing this result
change = False
#This value is set to true if the peer is added, false if removed
addPeer = False
#For all 3 cases here just return peers
if p['event'] in ['started','completed','update']:
response['complete'] = self.peers.getNumberOfLeeches(p['info_hash'])
response['incomplete'] = self.peers.getNumberOfSeeds(p['info_hash'])
change = self.peers.updatePeer(p['info_hash'],peer)
if change:
addPeer = True
peersForResponse = self.peers.getPeers(p['info_hash'])
#Return a compact response or a traditional response
#based on what is requested
if p['compact'] != 0:
peerStruct = struct.Struct('!IH')
maxSize = p['numwant'] * peerStruct.size
peersBuffer = array.array('c')
for peer in itertools.islice(peersForResponse,0,p['numwant']):
peersBuffer.fromstring(peerStruct.pack(peer.ip,peer.port))
response['peers'] = peersBuffer.tostring()
else:
for peer in itertools.islice(peersForResponse,0,p['numwant']):
#For non-compact responses, use a bogus peerId. Hardly any client
#uses this type of response anyways. There is no real meaning to the
#peer ID e
|
daniestevez/gr-satellites
|
python/components/deframers/yusat_deframer.py
|
Python
|
gpl-3.0
| 2,570
| 0
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2021-2022 Daniel Estevez <[email protected]>
#
# This file is part of gr-satellites
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
from gnuradio import gr, digital
import pmt
from ...hier.sync_to_pdu_packed import sync_to_pdu_packed
from ...hdlc_deframer import hdlc_crc_check
# HDLC 0x7e flag
_syncword = '01111110'
class crop_and_check_crc(gr.basic_block):
"""
Helper block to crop using the final 0x7e flag and check CRC-16
"""
def __init__(self):
gr.basic_block.__init__(
self,
name='crop_and_check_crc',
in_sig=[],
out_sig=[])
self.crc_check = hdlc_crc_check()
self.message_port_register_in(pmt.intern('in'))
self.set_msg_handler(pmt.intern('in'), self.handle_msg)
self.message_port_register_out(pmt.intern('out'))
def handle_msg(self, msg_pmt):
msg = pmt.
|
cdr(msg_pmt)
if not pmt.is_u8vector(msg):
print('[ERROR] Received invalid message type. Expected u8vector')
return
packet = pmt.u8vector_elements(msg)
start = 0
while True:
try:
idx = packet[start:].index(0x7e)
except ValueError:
return
start += i
|
dx + 1
p = packet[:idx]
if self.crc_check.fcs_ok(p):
p = p[:-2]
self.message_port_pub(
pmt.intern('out'),
pmt.cons(pmt.PMT_NIL, pmt.init_u8vector(len(p), p)))
return
class yusat_deframer(gr.hier_block2):
"""
Hierarchical block to deframe YUSAT ad-hoc AX.25-like protocol
The input is a float stream of soft symbols. The output are PDUs
with YUSAT frames.
Args:
options: Options from argparse
"""
def __init__(self, options=None):
gr.hier_block2.__init__(
self,
'yusat_deframer',
gr.io_signature(1, 1, gr.sizeof_float),
gr.io_signature(0, 0, 0))
self.message_port_register_hier_out('out')
self.slicer = digital.binary_slicer_fb()
# We hope that 256 bytes is long enough to contain the full packet
self.deframer = sync_to_pdu_packed(
packlen=256, sync=_syncword, threshold=0)
self.crop = crop_and_check_crc()
self.connect(self, self.slicer, self.deframer)
self.msg_connect((self.deframer, 'out'), (self.crop, 'in'))
self.msg_connect((self.crop, 'out'), (self, 'out'))
|
ngageoint/geoq
|
geoq/accounts/migrations/0001_initial.py
|
Python
|
mit
| 3,881
| 0.004638
|
# Generated by Django 3.0.5 on 2020-04-17 14:12
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import easy_thumbnails.fields
import userena.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Organization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=250)),
('primary_contact', models.ForeignKey(help_text='Contact for org.', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'unique_together': {('name', 'primary_contact')},
},
),
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mugshot', easy_thumbnails.fields.ThumbnailerImageField(blank=True, help_text='A personal image displayed in your profile.', upload_to=userena.models.upload_to_mugshot, verbose_name='mugshot')),
('privacy', models.CharField(choices=[('open', 'Open'), ('registered', 'Registered'), ('closed', 'Closed')], default='registered', help_text='Designates who can view your profile.', max_length=15, verbose_name='privacy')),
('email', models.CharField(blank=True, max_length=250, null=True)),
('score', models.IntegerField(default=1)),
('last_activity', models.DateTimeField(auto_now_add=True)),
('openbadge_id', models.CharField(blank=True, max_length=250, null=True)),
('organization'
|
, models.ForeignKey(blank=True, help_text="If '------', no Organization records share the email domain.", null=True, on_delete=django.db.models.deletion.PROTECT, to='accounts.Organization')),
|
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='user')),
],
options={
'permissions': (('view_profile', 'Can view profile'),),
'abstract': False,
'default_permissions': ('add', 'change', 'delete'),
},
),
migrations.CreateModel(
name='UserAuthorization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('authorized', models.BooleanField(help_text='Check this to approve member access.')),
('permission_granted_on', models.DateTimeField(auto_now_add=True)),
('user_accepted_terms_on', models.DateTimeField(blank=True, null=True)),
('permissions_granted_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='permissions_granted_by', to=settings.AUTH_USER_MODEL)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('user_profile', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='accounts.UserProfile')),
],
),
migrations.CreateModel(
name='EmailDomain',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email_domain', models.CharField(max_length=50)),
('organization', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='accounts.Organization')),
],
),
]
|
barseghyanartur/django-admin-tools-stats
|
admin_tools_stats/migrations/0001_initial.py
|
Python
|
mit
| 3,865
| 0.004398
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-13 11:29
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='DashboardStats',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('graph_key', models.CharField(help_text='it needs to be one word unique. ex. auth, mygraph', max_length=90, unique=True, verbose_name='graph key')),
('graph_title', models.CharField(db_index=True, help_text='heading title of graph box', max_length=90, verbose_name='graph title')),
('model_app_name', models.CharField(help_text='ex. auth / dialer_cdr', max_length=90, verbose_name='app name')),
('model_name', models.CharField(help_text='ex. User', max_length=90, verbose_name='model name')),
('date_field_name', models.CharField(help_text='ex. date_joined', max_length=90, verbose_name='date field name')),
('operation_field_name', models.CharField(blank=True, help_text='The field you want to aggregate, ex. amount', max_length=90, null=True, verbose_name='Operate field name')),
('type_operation_field_name', models.CharField(blank=True, choices=[(b'Count', b'Count'), (b'Sum', b'Sum'), (b'Avg', b'Avg'), (b'Max', b'Max'), (b'Min', b'Min'), (b'StdDev', b'StdDev'), (b'Variance', b'Variance')], help_text='choose the type operation what you want to aggregate, ex. Sum', max_length=90, null=True, verbose_name='Choose Type operation')),
('is_visible', models.BooleanField(default=True, verbose_name='visible')),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name='date')),
('updated_date', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'dashboard_stats',
'verbose_name': 'dashboard stats',
'verbose_name_plural': 'dashboard stats',
},
),
migrations.CreateModel(
|
name='DashboardStatsCriteria',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_na
|
me='ID')),
('criteria_name', models.CharField(db_index=True, help_text='it needs to be one word unique. Ex. status, yesno', max_length=90, verbose_name='criteria name')),
('criteria_fix_mapping', jsonfield.fields.JSONField(blank=True, help_text='a JSON dictionary of key-value pairs that will be used for the criteria', null=True, verbose_name='fixed criteria / value')),
('dynamic_criteria_field_name', models.CharField(blank=True, help_text='ex. for call records - disposition', max_length=90, null=True, verbose_name='dynamic criteria field name')),
('criteria_dynamic_mapping', jsonfield.fields.JSONField(blank=True, help_text='a JSON dictionary of key-value pairs that will be used for the criteria', null=True, verbose_name='dynamic criteria / value')),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name='date')),
('updated_date', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'dash_stats_criteria',
'verbose_name': 'dashboard stats criteria',
'verbose_name_plural': 'dashboard stats criteria',
},
),
migrations.AddField(
model_name='dashboardstats',
name='criteria',
field=models.ManyToManyField(blank=True, to='admin_tools_stats.DashboardStatsCriteria'),
),
]
|
Hybrid-Cloud/Hybrid-Cloud-Patches-For-Tricircle
|
hybrid-cloud/neutron/plugins/openvswitch/common/config.py
|
Python
|
gpl-2.0
| 5,912
| 0.000507
|
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from neutron.agent.common import config
from neutron.plugins.common import constants as p_const
from neutron.plugins.openvswitch.common import constants
DEFAULT_BRIDGE_MAPPINGS = []
DEFAULT_VLAN_RANGES = []
DEFAULT_TUNNEL_RANGES = []
DEFAULT_TUNNEL_TYPES = []
ovs_opts = [
cfg.StrOpt('integration_bridge', default='br-int',
help=_("Integration bridge to use.")),
cfg.BoolOpt('enable_tunneling', default=False,
help=_("Enable tunneling support.")),
cfg.StrOpt('tunnel_bridge', default='br-tun',
help=_("Tunnel bridge to use.")),
cfg.StrOpt('int_peer_patch_port', default='patch-tun',
help=_("Peer patch port in integration bridge for tunnel "
"bridge.")),
cfg.StrOpt('tun_peer_patch_port', default='patch-int',
help=_("Peer patch port in tunnel bridge for integration "
"bridge.")),
cfg.StrOpt('local_ip', default='',
help=_("Local IP address of GRE tunnel endpoints.")),
cfg.ListOpt('bridge_mappings',
default=DEFAULT_BRIDGE_MAPPINGS,
help=_("List of <physical_network>:<bridge>. "
"Deprecated for ofagent.")),
cfg.StrOpt('tenant_network_type', default='local',
help=_("Network type for tenant networks "
"(local, vlan, gre, vxlan, or none).")),
cfg.ListOpt('network_vlan_ranges',
default=DEFAULT_VLAN_RANGES,
help=_("List of <physical_network>:<vlan_min>:<vlan_max> "
"or <physical_network>.")),
cfg.ListOpt('tunnel_id_ranges',
default=DEFAULT_TUNNEL_RANGES,
help=_("List of <tun_min>:<tun_max>.")),
cfg.StrOpt('tunnel_type', default='',
help=_("The type of tunnels to use when utilizing tunnels, "
"either 'gre' or 'vxlan'.")),
cfg.BoolOpt('use_veth_interconnection', default=False,
help=_("Use veths instead of patch ports to interconnect the "
"integration bridge to physical bridges.")),
#added by jiahaojie 00209498
cfg.StrOpt('user_interface_driver',
default='neutron.agent.linux.interface.OVSInterfaceDriver',
help='Driver used to create user devices.'),
cfg.StrOpt('vm_interface',
default='eth0',
help='Visual Machine Device used to get user port.'),
cfg.IntOpt('vm_device_mtu', default=1350,
help=_('MTU setting for device.')),
cfg.BoolOpt('enable_vtep',
default=False,
help='use to enbale vtep function.'),
]
agent_opts = [
cfg.IntOpt('polling_interval', default=2,
help=_("The number of seconds the agent will wait between "
"polling for local device changes.")),
cfg.BoolOpt('minimize_polling',
default=True,
help=_("Minimize
|
polling by monitoring ovsdb for interface "
"changes.")),
cfg.IntOpt('ovsdb_monitor_respawn_interval',
default=constants.DEFAULT_OVSDBMON_RESPAWN,
help=_("The number of seconds to wait before respawning the "
"ovsdb monitor after losing communication with it.")),
|
cfg.ListOpt('tunnel_types', default=DEFAULT_TUNNEL_TYPES,
help=_("Network types supported by the agent "
"(gre and/or vxlan).")),
cfg.IntOpt('vxlan_udp_port', default=p_const.VXLAN_UDP_PORT,
help=_("The UDP port to use for VXLAN tunnels.")),
cfg.IntOpt('veth_mtu',
help=_("MTU size of veth interfaces")),
cfg.BoolOpt('l2_population', default=False,
help=_("Use ML2 l2population mechanism driver to learn "
"remote MAC and IPs and improve tunnel scalability.")),
cfg.BoolOpt('arp_responder', default=False,
help=_("Enable local ARP responder if it is supported. "
"Requires OVS 2.1 and ML2 l2population driver. "
"Allows the switch (when supporting an overlay) "
"to respond to an ARP request locally without "
"performing a costly ARP broadcast into the overlay.")),
cfg.BoolOpt('dont_fragment', default=True,
help=_("Set or un-set the don't fragment (DF) bit on "
"outgoing IP packet carrying GRE/VXLAN tunnel.")),
cfg.BoolOpt('enable_distributed_routing', default=False,
help=_("Make the l2 agent run in DVR mode.")),
cfg.ListOpt('l2pop_network_types', default=['flat', 'vlan', 'vxlan'],
help=_("L2pop network types supported by the agent.")),
cfg.BoolOpt('enable_port_multi_device', default=False,
help=_("Port has multiple devices on bridge for XenServer.")),
]
qos_opts = [
cfg.BoolOpt('enable_dscp_vlanpcp_mapping', default=False,
help=_("Enable dscp map vlan pcp")),
]
cfg.CONF.register_opts(ovs_opts, "OVS")
cfg.CONF.register_opts(agent_opts, "AGENT")
cfg.CONF.register_opts(qos_opts, "qos")
config.register_agent_state_opts_helper(cfg.CONF)
config.register_root_helper(cfg.CONF)
|
congthuc/androguard-2.0-custom
|
database/DataUtils.py
|
Python
|
apache-2.0
| 4,034
| 0.008676
|
#author CongThuc 12/13/2015
import MySQLdb
from database.DBHelper import DBHelper
from database.DBConnectManager import DBConnectManager
from resourcefactories.AnalysisInitDefaultValue import AnalysisInitDefaultValue
db_helper = DBHelper()
class DataUtils:
def __init__(self):
print "init DataUtils"
def get_ActivitiesFromDB(self, db_connector):
activities = []
if db_connector is not None:
try:
query = "select * from activities"
activities = db_helper.get_Data(db_connector, db_connector.cursor(), query);
except Exception as e:
print e
return activities
def get_ActivitiesFromXML(self, db_connector):
activities = []
if db_connector is not None:
try:
query = "select * from activities_from_xml"
activities = db_helper.get_Data(db_connector, db_connector.cursor(), query);
except Exception as e:
print e
return activities
def get_PermissionFromDB(self, db_connector):
permissions = []
if db_connector is not None:
try:
query = "select * from permissions"
permissions = db_helper.get_Data(db_connector, db_connector.cursor(), query);
|
except Exception as e:
print e
return permissions
def get_Permissio
|
nFromXML(self, db_connector):
permissions = []
if db_connector is not None:
try:
query = "select * from permissions_from_xml"
permissions = db_helper.get_Data(db_connector, db_connector.cursor(), query);
except Exception as e:
print e
return permissions
def get_PermissionAnalysis(self, db_connector):
permission_detail = []
if db_connector is not None:
try:
query = "select permission_name, srcClass, srcMethod, srcMethodDes, dstClass, dstMethod, dstMethodDes " \
"from permission_analysis P1 INNER JOIN permissions P2 ON P1.permission_id = P2.id;"
permission_detail = db_helper.get_Data(db_connector, db_connector.cursor(), query);
except Exception as e:
print e
return permission_detail
def get_PackageFilter_Activity(self,db_connector, activities):
packages = []
if activities:
for ac in activities:
if db_connector is not None:
try:
select_stmt = "SELECT * FROM package_analysis WHERE srcClass like %(ac_name)s"
cursor = db_connector.cursor()
cursor.execute(select_stmt, { 'ac_name': "%" + ac[1]+ "%"})
rows = cursor.fetchall()
packages.extend(rows)
except Exception as e:
print e
return packages
def get_SensitiveAPIs(self, db_connector, table):
packages = []
if db_connector is not None:
for sen_APIs in AnalysisInitDefaultValue.Sensitive_APIs:
try:
select_stmt = "SELECT package_id, dstClass, dstMethod, dstMethodDes, srcClass, srcMethod, srcMethodDes FROM " + table + " WHERE dstMethod like %(sen_APIs)s"
cursor = db_connector.cursor()
cursor.execute(select_stmt, {'sen_APIs': "%" + sen_APIs + "%"})
rows = cursor.fetchall()
packages.extend(rows)
except Exception as e:
print e
return packages
def get_SensitiveAPIsFromDB(self, db_connector):
sensitive_apis = []
if db_connector is not None:
try:
query = "select * from sensitive_apis"
sensitive_apis = db_helper.get_Data(db_connector, db_connector.cursor(), query);
except Exception as e:
print e
return sensitive_apis
|
miketheman/opencomparison
|
package/migrations/0015_auto__del_repo__del_field_package_repo.py
|
Python
|
mit
| 10,959
| 0.008304
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'Repo'
db.delete_table('package_repo')
# Deleting field 'Package.repo'
db.delete_column('package_package', 'repo_id')
def backwards(self, orm):
# Adding model 'Repo'
db.create_table('package_repo', (
('slug_regex', self.gf('django.db.models.fields.CharField')(max_length='100', blank=True)),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
('user_url', self.gf('django.db.models.fields.CharField')(max_length='100', blank=True)),
('handler', self.gf('django.db.models.fields.CharField')(default='package.handlers.unsupported', max_length='200')),
('repo_regex', self.gf('django.db.models.fields.CharField')(max_length='100', blank=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length='50')),
('url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, blank=True)),
('is_supported', self.gf('django.db.models.fields.BooleanField')(default=False)),
('user_regex', self.gf('django.db.models.fields.CharField')(max_length='100', blank=True)),
('is_other', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('package', ['Repo'])
# Adding field 'Package.repo'
db.add_column('package_package', 'repo', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['package.Repo'], null=True), keep_default=False)
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'package.category': {
'Meta': {'ordering': "['title']", 'object_name': 'Category'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'show_pypi': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': "'50'"}),
'title_plural': ('django.db.models.fields.CharField', [], {'max_length': "'50'", 'blank': 'True'})
},
'package.commit': {
'Meta': {'ordering': "['-commit_date']", 'object_name': 'Commit'},
'commit_date': ('django.db.models.fields.DateTimeField', [], {}),
'created': ('django.db.models.fields.D
|
ateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['package.Package']"})
|
},
'package.package': {
'Meta': {'ordering': "['title']", 'object_name': 'Package'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['package.Category']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'creator'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'modifier'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'participants': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'pypi_downloads': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'pypi_home_page': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'pypi_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'related_packages': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_packages_r
|
jennyb/amsDecode
|
processBinFiles.py
|
Python
|
gpl-2.0
| 451
| 0.046563
|
#!/usr/bin/python
import os, subprocess
amsDecode = "/usr/local/bin/amsDecode"
|
path = "/usr/local/bin"
specDataFile = "specData.csv"
f = open("processFile.log", "w")
if os.p
|
ath.exists(specDataFile):
os.remove(specDataFile)
for fileName in os.listdir('.'):
if fileName.endswith('.bin'):
#print 'file :' + fileName
cmnd = [amsDecode,
fileName,
"-t -95",
"-b",
"68",
"468" ]
subprocess.call(cmnd,stdout=f)
f.close
|
Taapat/enigma2-openpli-vuplus
|
lib/python/Components/Converter/PliExtraInfo.py
|
Python
|
gpl-2.0
| 12,757
| 0.031199
|
# shamelessly copied from pliExpertInfo (Vali, Mirakels, Littlesat)
from enigma import iServiceInformation, iPlayableService
from Components.Converter.Converter import Converter
from Components.Element import cached
from Components.config import config
from Tools.Transponder import ConvertToHumanReadable, getChannelNumber
from Tools.GetEcmInfo import GetEcmInfo
from Poll import Poll
def addspace(text):
if text:
text += " "
return text
class PliExtraInfo(Poll, Converter, object):
def __init__(self, type):
Converter.__init__(self, type)
Poll.__init__(self)
self.type = type
self.poll_interval = 1000
self.poll_enabled = True
self.caid_data = (
( "0x100", "0x1ff", "Seca", "S", True ),
( "0x500", "0x5ff", "Via", "V", True ),
( "0x600", "0x6ff", "Irdeto", "I", True ),
( "0x900", "0x9ff", "NDS", "Nd", True ),
( "0xb00", "0xbff", "Conax", "Co", True ),
( "0xd00", "0xdff", "CryptoW", "Cw", True ),
( "0xe00", "0xeff", "PowerVU", "P", False ),
("0x1000", "0x10FF", "Tandberg", "TB", False ),
("0x1700", "0x17ff", "Beta", "B", True ),
("0x1800", "0x18ff", "Nagra", "N", True ),
("0x2600", "0x2600", "Biss", "Bi", False ),
("0x4ae0", "0x4ae1", "Dre", "D", False ),
("0x4aee", "0x4aee", "BulCrypt", "B1", False ),
("0x5581", "0x5581", "BulCrypt", "B2", False )
)
self.ca_table = (
("CryptoCaidSecaAvailable", "S", False),
("CryptoCaidViaAvailable", "V", False),
("CryptoCaidIrdetoAvailable", "I", False),
("CryptoCaidNDSAvailable", "Nd", False),
("CryptoCaidConaxAvailable", "Co", False),
("CryptoCaidCryptoWAvailable", "Cw", False),
("CryptoCaidPowerVUAvailable", "P", False),
("CryptoCaidBetaAvailable", "B", False),
("CryptoCaidNagraAvailable", "N", False),
("CryptoCaidBissAvailable", "Bi", False),
("CryptoCaidDreAvailable", "D", False),
("CryptoCaidBulCrypt1Available","B1", False),
("CryptoCaidBulCrypt2Available","B2", False),
("CryptoCaidTandbergAvailable", "TB", False),
("CryptoCaidSecaSelected", "S", True),
("CryptoCaidViaSelected", "V", True),
("CryptoCaidIrdetoSelected", "I", True),
("CryptoCaidNDSSelected", "Nd", True),
("CryptoCaidConaxSelected", "Co", True),
("CryptoCaidCryptoWSelected", "Cw", True),
("CryptoCaidPowerVUSelected", "P", True),
("CryptoCaidBetaSelected", "B", True),
("CryptoCaidNagraSelected", "N", True),
("CryptoCaidBissSelected", "Bi", True),
("CryptoCaidDreSelected", "D", True),
("CryptoCaidBulCrypt1Selected", "B1", True),
("CryptoCaidBulCrypt2Selected", "B2", True),
("CryptoCaidTandbergSelected", "TB", True),
)
self.ecmdata = GetEcmInfo()
self.feraw = self.fedata = self.updateFEdata = None
def getCryptoInfo(self, info):
if info.getInfo(iServiceInformation.sIsCrypted) == 1:
data = self.ecmdata.getEcmData()
self.current_source = data[0]
self.current_caid = data[1]
self.current_provid = data[2]
self.current_ecmpid = data[3]
else:
self.current_source = ""
self.current_caid = "0"
self.current_provid = "0"
self.current_ecmpid = "0"
def createCryptoBar(self, info):
res = ""
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
for caid_entry in self.caid_data:
if int(caid_entry[0], 16) <= int(self.current_caid, 16) <= int(caid_entry[1], 16):
color="\c0000??00"
else:
color = "\c007?7?7?"
try:
for caid in available_caids:
if int(caid_entry[0], 16) <= caid <= int(caid_entry[1], 16):
color="\c00????00"
except:
pass
if color != "\c007?7?7?" or caid_entry[4]:
if res: res += " "
res += color + caid_entry[3]
res += "\c00??????"
return res
def createCryptoSpecial(self, info):
caid_name = "FTA"
try:
for caid_entry in self.caid_data:
if int(caid_entry[0], 16) <= int(self.current_caid, 16) <= int(caid_entry[1], 16):
caid_name = caid_entry[2]
break
return caid_name + ":%04x:%04x:%04x:%04x" % (int(self.current_caid,16), int(self.current_provid,16), info.getInfo(iServiceInformation.sSID), int(self.current_ecmpid,16))
except:
pass
return ""
def createResolution(self, info):
xres = info.getInfo(iServiceInformation.sVideoWidth)
if xres == -1:
return ""
yres = info.getInfo(iServiceInformation.sVideoHeight)
mode = ("i", "p", " ")[info.getInfo(iServiceInformation.sProgressive)]
fps = str((info.getInfo(iServiceInformation.sFrameRate) + 500) / 1000)
return str(xres) + "x" + str(yres) + mode + fps
def createVideoCodec(self, info):
return ("MPEG2", "AVC", "MPEG1", "MPEG4-VC", "VC1", "VC1-SM", "HEVC", "")[info.getInfo(iServiceInformation.sVideoType)]
def createPIDInfo(self, info):
vpid = info.getInfo(iServiceInformation.sVideoPID)
apid = info.getInfo(iServiceInformation.sAudioPID)
pcrpid = info.getInfo(iServiceInformation.sPCRPID)
sidpid = info.getInfo(iServiceInformation.sSID)
tsid = info.getInfo(iServiceInformation.sTSID)
onid = info.getInfo(iServiceInformation.sONID)
if vpid < 0 : vpid = 0
if apid < 0 : apid = 0
if pcrpid < 0 : pcrpid = 0
if sidpid < 0 : sidpid = 0
if tsid < 0 : tsid = 0
if onid < 0 : onid = 0
return "%d-%d:%05d:%04d:%04d:%04d" % (onid, tsid, sidpid, vpid, apid, pcrpid)
def createTransponderInfo(self, fedata, feraw):
if not feraw:
return ""
elif "DVB-T" in feraw.get("tuner_type"):
tmp = addspace(self.createChannelNumber(fedata, feraw)) + addspace(self.createFrequency(feraw)) + addspace(self.createPolarization(fedata))
else:
tmp = addspace(self.createFrequency(feraw)) + addspace(self.createPolarization(fedata))
return addspace(self.createTunerSystem(fedata)) + tmp + addspace(self.createSymbolRate(fedata, feraw)) + addspace(self.createFEC(fedata, feraw)) \
+ addspace(self.createModulation(fedata)) + addspace(self.createOrbPos(feraw))
def createFrequency(self, feraw):
frequency = feraw.get("frequency")
if frequency:
if "DVB-T" in feraw.get("tuner_type"):
return str(int(frequency / 1000000. + 0.5
|
))
else:
return str(int(frequency / 1000 + 0.5))
return ""
def createChannelNumber(self, fedata, feraw):
return "DVB-T" in feraw.get("tuner_type") and fedata.get("channel") or ""
def createSymbolRate(self, fedata, feraw):
if "DVB-T" in feraw.get("tu
|
ner_type"):
bandwidth = fedata.get("bandwidth")
if bandwidth:
return bandwidth
else:
symbolrate = fedata.get("symbol_rate")
if symbolrate:
return str(symbolrate / 1000)
return ""
def createPolarization(self, fedata):
return fedata.get("polarization_abbreviation") or ""
def createFEC(self, fedata, feraw):
if "DVB-T" in feraw.get("tuner_type"):
code_rate_lp = fedata.get("code_rate_lp")
code_rate_hp = fedata.get("code_rate_hp")
if code_rate_lp and code_rate_hp:
return code_rate_lp + "-" + code_rate_hp
else:
fec = fedata.get("fec_inner")
if fec:
return fec
return ""
def createModulation(self, fedata):
if fedata.get("tuner_type") == _("Terrestrial"):
constellation = fedata.get("constellation")
if constellation:
return constellation
else:
modulation = fedata.get("modulation")
if modulation:
return modulation
return ""
def createTunerType(self, feraw):
return feraw.get("tuner_type") or ""
def createTunerSystem(self, fedata):
return fedata.get("system") or ""
def createOrbPos(self, feraw):
orbpos = feraw.get("orbital_position")
if orbpos > 1800:
return str((float(3600 - orbpos)) / 10.0) + "\xc2\xb0 W"
elif orbpos > 0:
return str((float(orbpos)) / 10.0) + "\xc2\xb0 E"
return ""
def createOrbPosOrTunerSystem(self, fedata,feraw):
orbpos = self.createOrbPos(feraw)
if orbpos is not "":
return orbpos
return self.createTunerSystem(fedata)
def createProviderName(self, info):
return info.getInfoString(iServiceInformation.sProvider)
@cached
def getText(self):
service = self.source.service
if service is None:
return ""
info = service and service.info()
if not info:
return ""
if self.type == "CryptoInfo":
self.getCryptoInfo(info)
if config.usage.show_cryptoinfo.value:
return addspace(self.createCryptoBar(info)
|
crate/crate-python
|
src/crate/client/sqlalchemy/tests/bulk_test.py
|
Python
|
apache-2.0
| 2,714
| 0
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this w
|
ork for
# additional information regarding copyright
|
ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
from unittest import TestCase
from unittest.mock import patch, MagicMock
import sqlalchemy as sa
from sqlalchemy.orm import Session
from sqlalchemy.ext.declarative import declarative_base
from crate.client.cursor import Cursor
fake_cursor = MagicMock(name='fake_cursor')
FakeCursor = MagicMock(name='FakeCursor', spec=Cursor)
FakeCursor.return_value = fake_cursor
class SqlAlchemyBulkTest(TestCase):
def setUp(self):
self.engine = sa.create_engine('crate://')
Base = declarative_base(bind=self.engine)
class Character(Base):
__tablename__ = 'characters'
name = sa.Column(sa.String, primary_key=True)
age = sa.Column(sa.Integer)
self.character = Character
self.session = Session()
@patch('crate.client.connection.Cursor', FakeCursor)
def test_bulk_save(self):
chars = [
self.character(name='Arthur', age=35),
self.character(name='Banshee', age=26),
self.character(name='Callisto', age=37),
]
fake_cursor.description = ()
fake_cursor.rowcount = len(chars)
fake_cursor.executemany.return_value = [
{'rowcount': 1},
{'rowcount': 1},
{'rowcount': 1},
]
self.session.bulk_save_objects(chars)
(stmt, bulk_args), _kwargs = fake_cursor.executemany.call_args
expected_stmt = "INSERT INTO characters (name, age) VALUES (?, ?)"
self.assertEqual(expected_stmt, stmt)
expected_bulk_args = (
('Arthur', 35),
('Banshee', 26),
('Callisto', 37)
)
self.assertEqual(expected_bulk_args, bulk_args)
|
danfairs/django-lazysignup
|
lazysignup/utils.py
|
Python
|
bsd-3-clause
| 570
| 0
|
def is_lazy_user(user):
""" Return True if the passed user is a lazy user. """
# Anonymous users are not lazy.
if user.is_anonymous:
r
|
eturn False
# Check the user backend. If the lazy signup backend
# authenticated them, then the user is lazy.
backend = getattr(user, 'backend', None)
if backend == 'lazysignup.backends.LazySignupBackend':
return True
# Otherwise, we have to fall back to checking the database.
from lazysignup.models import LazyUser
return bool(LazyUser.objects.filter(user=user).count() > 0)
| |
strogo/djpcms
|
tests/regression/routes/tests.py
|
Python
|
bsd-3-clause
| 741
| 0.026991
|
from djpcms import test
from djpcms.core.exceptions import AlreadyRegistered
import djpcms
class TestSites(test.TestCase):
def testMake(self):
self.assertRaises(AlreadyRegistered,djpcms.
|
MakeSite,__file__)
site = djpcms.MakeSite(__file__, route = '/extra/')
self.assertEqual(site.route,'/extra/')
def testClenUrl(self):
p = self.makepage(bit = 'test')
self.assertEqual(p.url,'/test/')
res = self.get('/test', status = 302, response = True)
self.assertEqual(res['locat
|
ion'],'http://testserver/test/')
res = self.get('/test////', status = 302, response = True)
self.assertEqual(res['location'],'http://testserver/test/')
|
laurent-george/weboob
|
modules/prixcarburants/pages.py
|
Python
|
agpl-3.0
| 2,744
| 0.001094
|
# -*- coding: utf-8 -*-
# Copyright(C) 2012 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from decimal import Decimal
from weboob.deprecated.browser import Page
from weboob.capabilities import NotAvailable
from weboob.capabilities.pricecomparison import Product, Shop, Price
class IndexPage(Page):
def get_token(self):
input = self.parser.select(self.document.getroot(), 'div#localisation input#recherche_recherchertype__token', 1)
return input.attrib['value']
def iter_products(self):
for li in self.parser.select(self.document.getroot(), 'div
|
#choix_carbu ul li'):
input = li.find('input')
label = li.find('label')
product = Product(input.attrib['value'])
product.name = unicode(label.text.strip())
if '&' in product.name:
# "E10 & SP95" produces a non-supported table.
|
continue
yield product
class ComparisonResultsPage(Page):
def get_product_name(self):
th = self.document.getroot().cssselect('table#tab_resultat tr th')
if th and len(th) == 9:
return u'%s' % th[5].find('a').text
def iter_results(self, product=None):
price = None
product.name = self.get_product_name()
for tr in self.document.getroot().cssselect('table#tab_resultat tr'):
tds = self.parser.select(tr, 'td')
if tds and len(tds) == 9 and product is not None:
price = Price('%s.%s' % (product.id, tr.attrib['id']))
price.product = product
price.cost = Decimal(tds[5].text.replace(',', '.'))
price.currency = u'€'
shop = Shop(price.id)
shop.name = unicode(tds[3].text.strip())
shop.location = unicode(tds[2].text.strip())
price.shop = shop
price.set_empty_fields(NotAvailable)
yield price
class ShopInfoPage(Page):
def get_info(self):
return self.parser.tostring(self.parser.select(self.document.getroot(), 'div.infos', 1))
|
qbuat/rootpy
|
rootpy/memory/deletion.py
|
Python
|
gpl-3.0
| 3,682
| 0.000543
|
# Copyright 2012 the rootpy developers
# distributed under the terms of the GNU General Public License
"""
This module supports monitoring TObject deletions.
.. warning::
This is not recommended for production
"""
from __future__ import absolute_import
from weakref import ref
import ctypes
from ctypes import CFUNCTYPE, py_object, addressof, c_int
from .. import compiled as C
from .. import QROOT, log
from ..utils.cinterface import callback, objectproxy_realaddress
__all__ = [
'monitor_deletion',
'monitor_object_deletion',
]
def monitor_deletion():
"""
Function for checking for correct deletion of weakref-able objects.
Example usage::
monitor, is_alive = monitor_deletion()
obj = set()
monitor(obj, "obj")
assert is_alive("obj") # True because there is a ref to `obj` is_alive
del obj
assert not is_alive("obj") # True because there `obj` is deleted
"""
monitors = {}
def set_deleted(x):
def _(weakref):
del monitors[x]
return _
def monitor(item, name):
monitors[name] = ref(item, set_deleted(name))
def is_alive(name):
return monitors.get(name, None) is not None
return monitor, is_alive
cleanuplog = log["memory.cleanup"]
cl
|
eanuplog.show_stack()
# Add python to the include path
C.add_python_includepath()
C.register_code("""
#ifndef __CINT__
#include <Python.h>
#endif
#include <TObject.h>
#include <TPython.h>
class RootpyObjectCleanup : public TObject {
public:
typedef void (*CleanupCallba
|
ck)(PyObject*);
CleanupCallback _callback;
RootpyObjectCleanup(CleanupCallback callback) : _callback(callback) {}
virtual void RecursiveRemove(TObject* object) {
// When arriving here, object->ClassName() will _always_ be TObject
// since we're called by ~TObject, and virtual method calls don't
// work as expected from there.
PyObject* o = TPython::ObjectProxy_FromVoidPtr(object, "TObject");
PyGILState_STATE gstate;
gstate = PyGILState_Ensure();
PyObject *ptype, *pvalue, *ptraceback;
PyErr_Fetch(&ptype, &pvalue, &ptraceback);
_callback(o);
PyErr_Restore(ptype, pvalue, ptraceback);
PyGILState_Release(gstate);
}
ClassDef(RootpyObjectCleanup, 0);
};
ClassImp(RootpyObjectCleanup);
""", ["RootpyObjectCleanup"])
MONITORED = {}
@CFUNCTYPE(None, py_object)
def on_cleanup(tobject):
# Note, when we arrive here, tobject is in its ~TObject, and hence the
# subclass part of the object doesn't exist, in some sense. Hence why we
# store information about the object on the MONITORED dict.
addr = objectproxy_realaddress(tobject)
if addr in MONITORED:
args = MONITORED[addr]
fn, args = args[0], args[1:]
fn(tobject, *args)
del MONITORED[addr]
initialized = False
def init():
global initialized
if initialized: return
initialized = True
cleanup = C.RootpyObjectCleanup(callback(on_cleanup))
cleanups = QROOT.gROOT.GetListOfCleanups()
cleanups.Add(cleanup)
import atexit
@atexit.register
def exit():
# Needed to ensure we don't get called after ROOT has gone away
cleanups.RecursiveRemove(cleanup)
def monitor_object_deletion(o, fn=lambda *args: None):
init()
# Required so that GetListOfCleanups().RecursiveRemove() is called.
o.SetBit(o.kMustCleanup)
args = fn, type(o).__name__, o.GetName(), o.GetTitle(), repr(o)
MONITORED[objectproxy_realaddress(o)] = args
|
zznn/futu-openAPI
|
app/mainapp.py
|
Python
|
apache-2.0
| 6,014
| 0.02758
|
# -*- coding: utf-8 -*-
from flask import Flask, jsonify, request, abort, make_response
from futu_server_api import *
from db import save_update_token
from db import delete_tokens
from db import list_cards
import logging
import logging.config
import json
app = Flask(__name__)
logging.config.fileConfig('./conf/log.ini')
no_db_logger = logging
|
.getLogger()
def check_parameters(pjson):
if not pjson or not 'app_account' in pjson or not 'card' in pjson or not 'appid' in pjson:
no_db_logger.info('No Parameter')
abort(400)
cli = {'account':pjson['app_account'], 'card':pjson['card'], 'appid':pjson['appid']}
return client(cli['account'], cli['card'], cli['appid'])
def log_handler(myjson, mytitle):
if 'ClientWarning' in myjson:
return '%s' % myjson['ClientWarning']
elif myjson['result_code'] == 0:
return 'SUCCESS'
else:
re
|
turn 'FAIL ,REASON OF FAILURE:%s ,PARAMETER:%s' % (myjson['error_msg'], request.json)
@app.route('/')
def hello_world():
no_db_logger.info('server start#####')
return 'hello 22222222 world!'
@app.route('/api/v1/tradetoken', methods=['POST'])
def trade_token():
trade_pswd = request.json['trade_pswd']
account = request.json['app_account']
card = request.json['card']
appid = request.json['appid']
cc = check_parameters(request.json)
message = cc.get_trade_token(trade_pswd)
if message['result_code'] != 0 and message['error_msg'] == 'didn\'t get accesstoken':
no_db_logger.info('didn\'t get accesstoken')
return json.dumps({'result_code':2,'error_msg':'didn\'t get accesstoken'}, ensure_ascii=False)
if message['result_code'] == 0:
token = message['data']['trade_token']
save_update_token(account, appid, None, token, card, True)
return jsonify(**message)
@app.route('/api/v1/account', methods=['POST'])
def get_account_detail():
cc = check_parameters(request.json)
message = cc.get_account_detail()
logtext = log_handler(message, '获取账户信息')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/account/cash', methods=['POST'])
def get_account_cash():
cc = check_parameters(request.json)
message = cc.get_account_cash()
logtext = log_handler(message, '获取账户现金')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/account/portfolio', methods=['POST'])
def get_account_portfolio():
cc = check_parameters(request.json)
message = cc.get_account_portfolio()
logtext = log_handler(message, '获取账户持仓')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/get_list_orders', methods=['POST'])
def get_list_orders():
date_begin = request.json['date_begin']
date_end = request.json['date_end']
cc = check_parameters(request.json)
message = cc.get_list_orders()
logtext = log_handler(message, '获取订单列表')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/get_list_trades', methods=['POST'])
def get_list_trades():
cc = check_parameters(request.json)
message = cc.get_list_trades()
logtext = log_handler(message, '获取交易列表')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/place_order', methods=['POST'])
def place_order():
code = request.json['code']
quantity = request.json['quantity']
price = request.json['price']
side = request.json['side']
ltype = request.json['type']
cc = check_parameters(request.json)
message = cc.place_order(code, quantity, price, side, ltype)
logtext = log_handler(message, '下单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/change_order', methods=['POST'])
def change_order():
order_id = request.json['order_id']
quantity = request.json['quantity']
price = request.json['price']
cc = check_parameters(request.json)
message = cc.change_order(order_id, quantity, price)
logtext = log_handler(message, '改单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/cancle_order', methods=['POST'])
def cancle_order():
order_id = request.json['order_id']
cc = check_parameters(request.json)
message = cc.cancel_order(order_id)
logtext = log_handler(message, '撤单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/ap1/v1/save_token', methods=['POST'])
def save_token():
account = request.json['app_account']
appid = request.json['appid']
market = request.json['market']
token = request.json['token']
card = request.json['card']
card_desc = request.json['text']
DB_result = save_update_token(account, appid, market, token, card, False, card_desc)
if DB_result == 'success':
no_db_logger.info('token save success')
return json.dumps({'result_code':0,'error_msg':''}, ensure_ascii=False)
else:
no_db_logger.info('token save fail')
return json.dumps({'result_code':1,'error_msg':'token保存失败'}, ensure_ascii=False)
@app.route('/api/v1/delete_token', methods=['POST'])
def delete_token():
appid = request.json['appid']
account = request.json['app_account']
DB_result = delete_tokens(account, appid)
if DB_result == 'success':
no_db_logger.info('token delete success')
return json.dumps({'result_code':0,'error_msg':''}, ensure_ascii=False)
else:
no_db_logger.info('token delete fail')
return json.dumps({'result_code':1,'error_msg':'token删除失败'}, ensure_ascii=False)
@app.route('/api/v1/list_card', methods=['POST'])
def list_card():
appid = request.json['appid']
account = request.json['app_account']
cards = list_cards(account, appid)
message = dict(cards=cards)
if isinstance(cards, list):
no_db_logger.info('list cards success')
return json.dumps({'result_code':0,'error_msg':'','data':message}, ensure_ascii=False)
else:
no_db_logger.info('list cards fail')
return json.dumps({'result_code':1,'error_msg':'查询账户卡号失败'}, ensure_ascii=False)
if __name__ == '__main__':
app.run()
|
LordSprit/Laser
|
main.py
|
Python
|
gpl-2.0
| 2,036
| 0.000983
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Modulos
import sys
import pygame
from pygame.locals import *
# Constantes
venx = 640
veny = 448
# Clases
class Pieza(pygame.sprite.Sprite): # 64x64 px tamaño
def __init__(self, tipo):
pygame.sprite.Sprite.__init__(self)
|
if tipo == 0:
self.image = load_image("tablero.png", True)
elif tipo == 1:
self.image = load_image("laser.png", True)
elif tipo == 2:
self.image = load_image("diana.png", True)
elif tipo == 3:
self.image = load_image("diana_espejo.png", True)
elif tipo == 4:
|
self.image = load_image("espejo.png", True)
elif tipo == 5:
self.image = load_image("espejotraves.png", True)
elif tipo == 6:
self.image = load_image("tunel.png", True)
elif tipo == 7:
self.image = load_image("bloqueo.png", True)
elif tipo == 8:
self.image = load_image("bloqueo_g.png", True)
elif tipo == 9:
self.image = load_image("portal.png", True)
else:
tipo = 0
self.image = load_image("tablero.png", True)
# Funciones
def load_image(filename, transparent=False):
try:
image = pygame.image.load(filename)
except pygame.error:
raise SystemExit
image = image.convert()
if transparent:
color = image.get_at((0, 0))
image.set_colorkey(color, RLEACCEL)
return image
#------------------------------------------
def main():
screen = pygame.display.set_mode((venx, veny))
pygame.display.set_caption("Laser Game")
background_image = load_image('fondo.png')
bola = Bola()
while True:
for eventos in pygame.event.get():
if eventos.type == QUIT:
sys.exit(0)
screen.blit(background_image, (0, 0))
screen.blit(bola.image, bola.rect)
pygame.display.flip()
return 0
if __name__ == '__main__':
pygame.init()
main()
|
jeroanan/Nes2
|
Tests/OpCodeTests/TestNopOpCode.py
|
Python
|
bsd-3-clause
| 306
| 0.003268
|
from Chip import OpCodeDefinitions
from Tests.OpCodeTests.OpCodeTestBase import OpCodeTestBase
cla
|
ss TestNopOpCode(OpCodeTestBase):
def test_nop_implied_command_calls_nop_method(self):
self.assert_opcode
|
_execution(OpCodeDefinitions.nop_implied_command, self.target.get_nop_command_executed)
|
lochiiconnectivity/boto
|
tests/integration/dynamodb2/test_cert_verification.py
|
Python
|
mit
| 1,511
| 0
|
# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR
|
PURPOSE AND NONINFRINGEMENT. IN NO EV
|
ENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Check that all of the certs on all service endpoints validate.
"""
import unittest
import boto.dynamodb2
class CertVerificationTest(unittest.TestCase):
dynamodb2 = True
ssl = True
def test_certs(self):
for region in boto.dynamodb2.regions():
c = region.connect()
c.list_tables()
|
HiSPARC/sapphire
|
sapphire/tests/analysis/test_process_events.py
|
Python
|
gpl-3.0
| 15,851
| 0.002713
|
import operator
import os
import shutil
import tempfile
import unittest
import warnings
import tables
from mock import Mock
from numpy import array
from numpy.testing import assert_array_equal
from sapphire.analysis import process_events
TEST_DATA_FILE = 'test_data/process_events.h5'
DATA_GROUP = '/s501'
class ProcessEventsTests(unittest.TestCase):
def setUp(self):
warnings.filterwarnings('ignore')
self.data_path = self.create_tempfile_from_testdata()
self.data = tables.open_file(self.data_path, 'a')
self.proc = process_events.ProcessEvents(self.data, DATA_GROUP, progress=False)
def tearDown(self):
warnings.resetwarnings()
self.data.close()
os.remove(self.data_path)
def test_get_traces_for_event(self):
event = self.proc.source[0]
self.assertEqual(self.proc.get_traces_for_event(event)[12][3], 1334)
def test__find_unique_row_ids(self):
ext_timestamps = self.proc.source.col('ext_timestamp')
enumerated_timestamps = list(enumerate(ext_timestamps))
enumerated_timestamps.sort(key=operator.itemgetter(1))
ids_in = [id for id, _ in enumerated_timestamps]
ids = self.proc._find_unique_row_ids(enumerated_timestamps)
self.assertEqual(ids, ids_in)
enumerated_timestamps = [(0, 1), (1, 1), (3, 2), (2, 2)]
ids = self.proc._find_unique_row_ids(enumerated_timestamps)
self.assertEqual(ids, [0, 3])
# Must be sorted by timestamp or the result will be differenct.
enumerated_timestamps = [(0, 1), (3, 2), (1, 1), (2, 2)]
ids = self.proc._find_unique_row_ids(enumerated_timestamps)
self.assertNotEqual(ids, [0, 3])
def test__reconstruct_time_from_traces(self):
event = self.proc.source[10]
times = self.proc._reconstruct_time_from_traces(event)
self.assertEqual(times[0], 162.5)
self.assertEqual(times[2], -999)
event['pulseheights'][0] = -1
times = self.proc._reconstruct_time_from_traces(event)
self.assertEqual(times[0], -1)
def test__reconstruct_time_from_trace(self):
trace = [220, 222, 224, 222, 220]
self.assertEqual(self.proc._reconstruct_time_from_trace(trace, 200), 0)
self.assertEqual(self.proc._reconstruct_time_from_trace(trace, 203), 2)
self.assertEqual(self.proc._reconstruct_time_from_trace(trace, 205), -999)
def test_first_above_threshold(self):
trace = [0, 2, 4, 2, 0]
self.assertEqual(self.proc.first_above_threshold(trace, 1), 1)
self.assertEqual(self.proc.first_above_threshold(trace, 3), 2)
self.assertEqual(self.proc.first_above_threshold(trace, 4), 2)
self.assertEqual(self.proc.first_above_threshold(trace, 5), -999)
# @patch.object(process_events.FindMostProbableValueInSpectrum, 'find_mpv')
def test__process_pulseintegrals(self):
self.proc.limit = 1
# mock_find_mpv.return_value = (-999, False)
# Because of small data sample fit fails for detector 1
self.assertEqual(self.proc._process_pulseintegrals()[0][1], -999.)
self.assertAlmostEqual(self.proc._process_pulseintegrals()[0][3], 3.98951741969)
self.proc.limit = None
def create_tempfile_from_testdata(self):
tmp_path = self.create_tempfile_path()
data_path = self.get_testdata_path()
shutil.copyfile(data_path, tmp_path)
return tmp_path
def create_tempfile_path(self):
fd, path = tempfile.mkstemp('.h5')
os.close(fd)
return path
def get_testdata_path(self):
dir_path = os.path.dirname(__file__)
return os.path.join(dir_path, TEST_DATA_FILE)
class ProcessIndexedEventsTests(ProcessEventsTests):
def setUp(self):
warnings.filterwarnings('ignore')
self.data_path = self.create_tempfile_from_testdata()
self.data = tables.open_file(self.data_path, 'a')
self.proc = process_events.ProcessIndexedEvents(self.data, DATA_GROUP, [0, 10], progress=False)
def test_process_traces(self):
timings = self.proc.process_traces()
self.assertEqual(timings[1][0], 162.5)
self.assertEqual(timings[1][1], -999)
def test_get_traces_for_indexed_event_index(self):
self.assertEqual(self.proc.get_traces_for_indexed_event_index(0)[12][3], 1334)
class ProcessEventsWithLINTTests(ProcessEventsTests):
def setUp(self):
warnings.filterwarnings('ignore')
self.data_path = self.create_tempfile_from_testdata()
self.data = tables.open_file(self.data_path, 'a')
self.proc = process_events.ProcessEventsWithLINT(self.data, DATA_GROUP, progress=False)
def test__reconstruct_time_from_traces(self):
event = self.proc.source[10]
times = self.proc._reconstruct_time_from_traces(event)
self.assertAlmostEqual(times[0], 160.685483871)
self.assertEqual(times[2], -999)
def test__reconstruct_time_from_trace(self):
trace = [200, 220]
self.assertEqual(self.proc._reconstruct_time_from_trace(trace, 180), 0)
self.assertEqual(self.proc._reconstruct_time_from_trace(trace, 190), 0.5)
self.assertEqual(self.proc._reconstruct_time_from_trace(tra
|
ce, 200), 1)
self.assertEqual(self.proc._reconstruct_time_from_trace(trace, 210), -999)
class ProcessEventsWithTriggerOffsetTests(ProcessEv
|
entsTests):
def setUp(self):
warnings.filterwarnings('ignore')
self.data_path = self.create_tempfile_from_testdata()
self.data = tables.open_file(self.data_path, 'a')
self.proc = process_events.ProcessEventsWithTriggerOffset(self.data, DATA_GROUP, progress=False)
def test__reconstruct_time_from_traces(self):
event = self.proc.source[10]
times = self.proc._reconstruct_time_from_traces(event)
self.assertEqual(times[0], 162.5)
self.assertEqual(times[2], -999)
self.assertEqual(times[4], 165)
def test__reconstruct_time_from_traces_with_external(self):
self.proc.trigger = [0, 0, 0, 1]
event = self.proc.source[10]
times = self.proc._reconstruct_time_from_traces(event)
self.assertEqual(times[0], 162.5)
self.assertEqual(times[2], -999)
self.assertEqual(times[4], -999)
def test__first_above_thresholds(self):
# 2 detectors
self.assertEqual(self.proc._first_above_thresholds((x for x in [200, 200, 900]), [300, 400], 900), [2, 2, -999])
self.assertEqual(self.proc._first_above_thresholds((x for x in [200, 200, 400]), [300, 400], 400), [2, 2, -999])
self.assertEqual(self.proc._first_above_thresholds((x for x in [200, 350, 450, 550]), [300, 400], 550), [1, 2, -999])
# 4 detectors
self.assertEqual(self.proc._first_above_thresholds((x for x in [200, 200, 900]), [300, 400, 500], 900), [2, 2, 2])
self.assertEqual(self.proc._first_above_thresholds((x for x in [200, 200, 400]), [300, 400, 500], 400), [2, 2, -999])
self.assertEqual(self.proc._first_above_thresholds((x for x in [200, 350, 450, 550]), [300, 400, 500], 550), [1, 2, 3])
# No signal
self.assertEqual(self.proc._first_above_thresholds((x for x in [200, 250, 200, 2000]), [300, 400, 500], 250), [-999, -999, -999])
def test__first_value_above_threshold(self):
trace = [200, 200, 300, 200]
self.assertEqual(self.proc._first_value_above_threshold(trace, 200), (0, 200))
self.assertEqual(self.proc._first_value_above_threshold(trace, 250), (2, 300))
self.assertEqual(self.proc._first_value_above_threshold(trace, 250, 4), (6, 300))
self.assertEqual(self.proc._first_value_above_threshold(trace, 500), (-999, 0))
def test__reconstruct_trigger(self):
self.proc.trigger = (0, 0, False, 0)
low_idx = [-999, -999, -999, -999]
high_idx = [-999, -999, -999, -999]
result = -999
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
self.proc.trigger = (0, 0, True, 0)
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
# Standard two det
|
nassan/sefaria-embedded
|
constants.py
|
Python
|
gpl-3.0
| 1,909
| 0
|
SEFARIA_API_NODE = "https://www.sefaria.org/api/texts/"
CACHE_MONITOR_LOOP_DELAY_IN_SECONDS = 86400
CACHE_LIFETIME_SECONDS = 604800
category_colors = {
"Commentary": "#4871bf",
"Tanakh": "#004e5f",
"Midrash": "#5d956f",
"Mishnah": "#5a99b7",
"Talmud": "#ccb479",
"Halakhah": "#802f3e",
"Kabbalah": "#594176",
"Philosophy": "#7f85a9",
"Liturgy": "#ab4e66",
"Tanaitic": "#00827f",
"Parshanut": "#9ab8cb",
"Chasidut": "#97b386",
"Musar": "#7c406f",
"Responsa": "#cb6158",
"Apocrypha":
|
"#c7a7b4",
"Other": "#073570",
"Quoti
|
ng Commentary": "#cb6158",
"Sheets": "#7c406f",
"Community": "#7c406f",
"Targum": "#7f85a9",
"Modern Works": "#7c406f",
"Modern Commentary": "#7c406f",
}
platform_settings = {
"twitter": {
"font_size": 29,
"additional_line_spacing_he": 5,
"additional_line_spacing_en": -10,
"image_width": 506,
"image_height": 253,
"margin": 20,
"category_color_line_width": 7,
"sefaria_branding": False,
"branding_height": 0
},
"facebook": {
"font_size": 70,
"additional_line_spacing_he": 12,
"additional_line_spacing_en": -20,
"image_width": 1200,
"image_height": 630,
"margin": 40,
"category_color_line_width": 15,
"sefaria_branding": False,
"branding_height": 0
},
"instagram": {
"font_size": 70,
"additional_line_spacing_he": 12,
"additional_line_spacing_en": 0,
"image_width": 1040,
"image_height": 1040,
"margin": 40,
"category_color_line_width": 13,
"sefaria_branding": True,
"branding_height": 100
}
}
|
darcyliu/storyboard
|
home/models.py
|
Python
|
mit
| 2,582
| 0.013555
|
#!/usr/bin/env python
# encoding: utf-8
"""
models.py
Created by Darcy Liu on 2012-03-03.
Copyright (c) 2012 Close To U. All rights reserved.
"""
from django.db import models
from django.contrib.auth.models import User
# class Setting(models.Model):
# sid = models.AutoField(primary_key=True)
# option = models.CharField(unique=True,max_length=128,verbose_name='Option')
# value = models.CharField(max_length=256,verbose_name='Value')
class Minisite(models.Model):
key = mod
|
els.AutoField(primary_key=True)
name = models.CharField(max_length=256,verbose_name='name')
slug = models.CharField(unique=True,max_length=128,verbose_name='slug')
meta = models.TextField(blank=True, verbose_name='meta')
description = models.TextField(blank=True, verbose_name='description')
author = models.ForeignKey(User,verbose_name='author')
created = models.DateTimeField(auto_now_add=True,verbose_name='created')
updated = mo
|
dels.DateTimeField(auto_now=True,verbose_name='updated')
def __unicode__(self):
result = self.name
return unicode(result)
class Page(models.Model):
key = models.AutoField(primary_key=True)
name = models.CharField(max_length=256,verbose_name='name')
slug = models.CharField(max_length=128,verbose_name='slug')
#type=//insite standlone
Mode_Choices = (
('0', 'insite'),
('1', 'standlone'),
)
mode = models.CharField(verbose_name='format',max_length=1,default=0,choices=Mode_Choices)
#content-type
mime = models.CharField(max_length=64,default='text/html;charset=utf-8',verbose_name='mime')
#format
Format_Choices = (
('0', 'txt'),
('1', 'html'),
('2', 'markdown'),
('3', 'textile'),
)
format = models.CharField(verbose_name='format',max_length=1,default=0,choices=Format_Choices)
text = models.TextField(blank=True, verbose_name='content')
script = models.TextField(blank=True, verbose_name='script')
style = models.TextField(blank=True, verbose_name='style')
text_html = models.TextField(blank=True, verbose_name='html')
minisite = models.ForeignKey(Minisite,verbose_name='minisite')
author = models.ForeignKey(User,verbose_name='author')
created = models.DateTimeField(auto_now_add=True,verbose_name='created')
updated = models.DateTimeField(auto_now=True,verbose_name='updated')
def __unicode__(self):
result = self.name
return unicode(result)
class Meta:
unique_together = (('slug', 'minisite'),)
|
compas-dev/compas
|
src/compas/datastructures/network/complementarity.py
|
Python
|
mit
| 1,442
| 0.001387
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from itertools import combinations
__all__ = [
'network_complement'
]
def network_complement(network, cls=None):
"""Generate the complement network of a network.
The complement of a graph G is the graph H with the same vertices
but whose edges consists of the edges not present in the graph G [1]_.
Parameters
----------
network : :class:`~compas.datastructures.Network`
A network.
Returns
-------
:class:`~compas.datastructures.Network`
The complement network.
References
----------
.. [1] Wolfram MathWorld. *Graph complement*.
Available at: http://mathworld.wolfram.com/GraphComplement.html.
Examples
--------
>>> import compas
>>> from compas.datastructures import Network
>>> from compas.datastructures import network_complement
>>> network = Network.from_obj(compas.get('lines.obj'))
>>> complement = network_complement(network)
>>> any(complement.has_edge(u, v, directed=False) for u, v in network.edges())
False
"""
if not cls:
cls = type(network)
nodes = [network.node_coordinates(key) for key in network.nodes()]
edges = [(u, v) for u, v in combinations(network.nodes(), 2) if
|
not network.has_edge(u, v, directed=False)]
retur
|
n cls.from_nodes_and_edges(nodes, edges)
|
ee08b397/panda3d
|
direct/src/tkpanels/Inspector.py
|
Python
|
bsd-3-clause
| 15,271
| 0.00681
|
"""Inspector
|
s allow you to visually browse through the members of
various python objects. To open an inspector, import this module, and
execute inspector.inspect(anObject) I start IDLE with this command
line: idle.py -c "from insp
|
ector import inspect"
so that I can just type: inspect(anObject) any time."""
__all__ = ['inspect', 'inspectorFor', 'Inspector', 'ModuleInspector', 'ClassInspector', 'InstanceInspector', 'FunctionInspector', 'InstanceMethodInspector', 'CodeInspector', 'ComplexInspector', 'DictionaryInspector', 'SequenceInspector', 'SliceInspector', 'InspectorWindow']
from direct.showbase.TkGlobal import *
from Tkinter import *
import Pmw
### public API
def inspect(anObject):
inspector = inspectorFor(anObject)
inspectorWindow = InspectorWindow(inspector)
inspectorWindow.open()
return inspectorWindow
### private
def inspectorFor(anObject):
typeName = type(anObject).__name__.capitalize() + 'Type'
if typeName in _InspectorMap:
inspectorName = _InspectorMap[typeName]
else:
print("Can't find an inspector for " + typeName)
inspectorName = 'Inspector'
inspector = globals()[inspectorName](anObject)
return inspector
### initializing
def initializeInspectorMap():
global _InspectorMap
notFinishedTypes = ['BufferType', 'EllipsisType', 'FrameType', 'TracebackType', 'XRangeType']
_InspectorMap = {
'Builtin_function_or_methodType': 'FunctionInspector',
'BuiltinFunctionType': 'FunctionInspector',
'BuiltinMethodType': 'FunctionInspector',
'ClassType': 'ClassInspector',
'CodeType': 'CodeInspector',
'ComplexType': 'Inspector',
'DictionaryType': 'DictionaryInspector',
'DictType': 'DictionaryInspector',
'FileType': 'Inspector',
'FloatType': 'Inspector',
'FunctionType': 'FunctionInspector',
'Instance methodType': 'InstanceMethodInspector',
'InstanceType': 'InstanceInspector',
'IntType': 'Inspector',
'LambdaType': 'Inspector',
'ListType': 'SequenceInspector',
'LongType': 'Inspector',
'MethodType': 'FunctionInspector',
'ModuleType': 'ModuleInspector',
'NoneType': 'Inspector',
'SliceType': 'SliceInspector',
'StringType': 'SequenceInspector',
'TupleType': 'SequenceInspector',
'TypeType': 'Inspector',
'UnboundMethodType': 'FunctionInspector'}
for each in notFinishedTypes:
_InspectorMap[each] = 'Inspector'
### Classes
class Inspector:
def __init__(self, anObject):
self.object = anObject
self.lastPartNumber = 0
self.initializePartsList()
self.initializePartNames()
def __str__(self):
return __name__ + '(' + str(self.object) + ')'
def initializePartsList(self):
self._partsList = []
keys = self.namedParts()
keys.sort()
for each in keys:
self._partsList.append(each)
#if not callable(getattr(self.object, each)):
# self._partsList.append(each)
def initializePartNames(self):
self._partNames = ['up'] + [str(each) for each in self._partsList]
def title(self):
"Subclasses may override."
return self.objectType().__name__.capitalize()
def getLastPartNumber(self):
return self.lastPartNumber
def selectedPart(self):
return self.partNumber(self.getLastPartNumber())
def namedParts(self):
return dir(self.object)
def stringForPartNumber(self, partNumber):
object = self.partNumber(partNumber)
doc = None
if callable(object):
try:
doc = object.__doc__
except:
pass
if doc:
return (str(object) + '\n' + str(doc))
else:
return str(object)
def partNumber(self, partNumber):
self.lastPartNumber = partNumber
if partNumber == 0:
return self.object
else:
part = self.privatePartNumber(partNumber)
return getattr(self.object, part)
def inspectorFor(self, part):
return inspectorFor(part)
def privatePartNumber(self, partNumber):
return self._partsList[partNumber - 1]
def partNames(self):
return self._partNames
def objectType(self):
return type(self.object)
###
class ModuleInspector(Inspector):
def namedParts(self):
return ['__dict__']
class ClassInspector(Inspector):
def namedParts(self):
return ['__bases__'] + self.object.__dict__.keys()
def title(self):
return self.object.__name__ + ' Class'
class InstanceInspector(Inspector):
def title(self):
return self.object.__class__.__name__
def namedParts(self):
return ['__class__'] + dir(self.object)
###
class FunctionInspector(Inspector):
def title(self):
return self.object.__name__ + "()"
class InstanceMethodInspector(Inspector):
def title(self):
return str(self.object.im_class) + "." + self.object.__name__ + "()"
class CodeInspector(Inspector):
def title(self):
return str(self.object)
###
class ComplexInspector(Inspector):
def namedParts(self):
return ['real', 'imag']
###
class DictionaryInspector(Inspector):
def initializePartsList(self):
Inspector.initializePartsList(self)
keys = self.object.keys()
keys.sort()
for each in keys:
self._partsList.append(each)
def partNumber(self, partNumber):
self.lastPartNumber = partNumber
if partNumber == 0:
return self.object
key = self.privatePartNumber(partNumber)
if key in self.object:
return self.object[key]
else:
return getattr(self.object, key)
class SequenceInspector(Inspector):
def initializePartsList(self):
Inspector.initializePartsList(self)
for each in range(len(self.object)):
self._partsList.append(each)
def partNumber(self, partNumber):
self.lastPartNumber = partNumber
if partNumber == 0:
return self.object
index = self.privatePartNumber(partNumber)
if type(index) == IntType:
return self.object[index]
else:
return getattr(self.object, index)
class SliceInspector(Inspector):
def namedParts(self):
return ['start', 'stop', 'step']
### Initialization
initializeInspectorMap()
class InspectorWindow:
def __init__(self, inspector):
self.inspectors = [inspector]
def topInspector(self):
return self.inspectors[len(self.inspectors) - 1]
def selectedPart(self):
return self.topInspector().selectedPart()
def inspectedObject(self):
return self.topInspector().object
def open(self):
self.top= Toplevel()
self.top.geometry('650x315')
self.createViews()
self.update()
#Private - view construction
def createViews(self):
self.createMenus()
# Paned widget for dividing two halves
self.framePane = Pmw.PanedWidget(self.top, orient = HORIZONTAL)
self.createListWidget()
self.createTextWidgets()
self.framePane.pack(expand = 1, fill = BOTH)
def setTitle(self):
self.top.title('Inspecting: ' + self.topInspector().title())
def createListWidget(self):
listFrame = self.framePane.add('list')
listWidget = self.listWidget = Pmw.ScrolledListBox(
listFrame, vscrollmode = 'static')
listWidget.pack(side=LEFT, fill=BOTH, expand=1)
# If you click in the list box, take focus so you can navigate
# with the cursor keys
listbox = listWidget.component('listbox')
listbox.bind('<ButtonPress-1>',
lambda e, l = listbox: l.focus_set())
listbox.bind('<ButtonRelease-1>', self.listSelectionChanged)
listbox.bind('<Double-Button-1>', self.popOrDive)
listbox.bind('<ButtonPress-3>', self.popupMenu)
listbox.bind('<KeyR
|
CAES-Python/CAES_Kivy_Garden
|
garden.light_indicator/example.py
|
Python
|
mit
| 229
| 0.026201
|
#example
from kivy.base import runTouchApp
from kivy.lang import Builder
fro
|
m kivy.garden.light_indicator impo
|
rt Light_indicator
from kivy.uix.button import Button
# LOAD KV UIX
runTouchApp(Builder.load_file('example.kv'))
|
brainiak/brainiak
|
brainiak/eventseg/event.py
|
Python
|
apache-2.0
| 26,617
| 0
|
# Copyright 2020 Princeton University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Event segmentation using a Hidden Markov Model
Given an ROI timeseries, this class uses an annealed fitting procedure to
segment the timeseries into events with stable activity patterns. After
learning the signature activity pattern of each event, the model can then be
applied to other datasets to identify a corresponding sequence of events.
Full details are available in:
Christopher Baldassano, Janice Chen, Asieh Zadbood,
Jonathan W Pillow, Uri Hasson, Kenneth A Norman
Discovering event structure in continuous narrative perception and memory
Neuron, Volume 95, Issue 3, 709 - 721.e5
https://doi.org/10.1016/j.neuron.2017.06.041
This class also extends the model described in the Neuron paper:
1) It allows transition matrices that are composed of multiple separate
chains of events rather than a single linear path. This allows a model to
contain patterns for multiple event sequences (e.g. narratives), and
fit probabilities along each of these chains on a new, unlabeled timeseries.
To use this option, pass in an event_chain vector labeling which events
belong to each chain, define event patterns using set_event_patterns(),
then fit to a new dataset with find_events.
2) To obtain better fits when the underlying event structure contains
events that vary substantially in length, the split_merge option allows
the fit() function to re-distribute events during fitting. The number of
merge/split proposals is controlled by split_merge_proposals, which
controls how thorough versus fast the fitting process is.
"""
# Authors: Chris Baldassano and Cătălin Iordan (Princeton University)
import numpy as np
from scipy import stats
import logging
import copy
from sklearn.base import BaseEstimator
from sklearn.utils.validation import check_is_fitted, check_array
from sklearn.exceptions import NotFittedError
import itertools
from . import _utils as utils # type: ignore
logger = logging.getLogger(__name__)
__all__ = [
"EventSegment",
]
class EventSegment(BaseEstimator):
"""Class for event segmentation of continuous fMRI data
Parameters
----------
n_events: int
Number of segments to learn
step_var: Callable[[int], float] : default 4 * (0.98 ** (step - 1))
The Gaussian variance to use during fitting, as a function of the
number of steps. Should decrease slowly over time.
n_iter: int, default: 500
Maximum number of steps to run during fitting
event_chains: ndarray with length = n_events
Array with unique value for each separate chain of events, each linked
in the order they appear in the array
split_merge: bool, default: False
Determines whether merge/split proposals are used during fitting with
fit(). This can improve fitting performance when events are highly
uneven in size, but requires additional time
split_merge_proposals: int, default: 1
Number of merges and splits to consider at each step. Computation time
scales as O(proposals^2) so this should usually be a small value
Attributes
----------
p_start, p_end: length n_events+1 ndarray
initial and final prior distributions over events
P: n_events+1 by n_events+1 ndarray
HMM transition matrix
ll_ : ndarray with length = number of training datasets
Log-likelihood for training datasets over the course of training
segments_: list of (time by event) ndarrays
Learned (soft) segmentation for training datasets
event_var_ : float
Gaussian variance at the end of learning
event_pat_ : voxel by event ndarray
Learned mean patterns for each event
"""
def _default_var_schedule(step):
return 4 * (0.98 ** (step - 1))
def __init__(self, n_events=2,
step_var=_default_var_schedule,
n_iter=500, event_chains=None,
split_merge=False, split_merge_proposals=1):
self.n_events = n_events
self.step_var = step_var
self.n_iter = n_iter
self.split_merge = split_merge
self.split_merge_proposals = split_merge_proposals
if event_chains is None:
self.event_chains = np.zeros(n_events)
else:
self.event_chains = event_chains
def _fit_validate(self, X):
"""Validate input to fit()
Validate data passed to fit(). Includes a transpose operation to
change the row/column order of X and z-scoring in time.
Parameters
----------
X: time by voxel ndarray, or a list of such ndarrays
fMRI data to be segmented
Returns
-------
X: list of voxel by time ndarrays
"""
if len(np.unique(self.event_chains)) > 1:
raise RuntimeError("Cannot fit chains, use set_event_patterns")
# Copy X into a list and transpose
X = copy.deepcopy(X)
if type(X) is not list:
X = [X]
for i in range(len(X)):
X[i] = check_array(X[i])
X[i] = X[i].T
# Check that number of voxels is consistent across datasets
n_dim = X[0].shape[0]
for i in range(len(X)):
assert (X[i].shape[0] == n_dim)
# Double-check that data is z-scored in time
for i in range(len(X)):
X[i] = stats.zscore(X[i], axis=1, ddof=1)
return X
def fit(self, X, y=None):
"""Learn a segmentation on training data
Fits event patterns and a segmentation to training data. After
running this function, the learned event patterns can be used to
segment other datasets using find_events
Parameters
----------
X: time by voxel ndarray, or a list of such ndarrays
fMRI data to be segmented. If a list is given, then all datasets
are segmented simultaneously with the same event patterns
y: not used (added to comply with BaseEstimator definition)
Returns
-------
self: the EventSegment object
"""
X = self._fit_validate(X)
n_train = len(X)
n_dim = X[0].shape[0]
self.classes_ = np.arange(self.n_events)
# Initialize variables for fitting
log_gamma = []
for i in range(n_train):
log_gamma.append(np.zeros((X[i].shape[1], self.n_events)))
step = 1
best_ll = float("-inf")
self.ll_ = np.empty((0, n_train))
while step <= self.n_iter:
iteration_var = self.step_var(step)
# Based on the current segmentation, compute the mean pattern
# for each event
seg_prob = [np.exp(lg) / np.sum(np.exp(lg), axis=0)
for lg in log_gamma]
mean_pat = np.empty((n_train, n_dim, self.n_events))
for i in range(n_train):
|
mean_pat[i, :, :] = X[i].dot(seg_prob[i])
mean_pat = np.mean(mean_pat, axis=0)
# Based on the current mean patterns, compute the event
# segmentation
self.ll_ = np.append(self.ll_, np.empty((1, n_train)), axis=0)
for i in range(n_train):
logprob = self._logprob_obs(X[i], mean_pat, iteration_var)
log_gamma[i], self.ll_[-1, i]
|
= self._forward_backward(logprob)
if step > 1 and self.split_merge:
curr_ll = np.mean(self.ll_[-1, :])
self.ll_[-1, :], log_gamma, mean_pat = \
self._split_merge(X, log_gamma, iteration_var, curr_ll)
# If log-likel
|
JustinAzoff/splunk-scripts
|
bubble.py
|
Python
|
mit
| 1,305
| 0.010728
|
"""bubble - re-emit a log record with superdomain
| bubble [field=host] [parts=3]
adds 'superhost' field
"""
import sys,splunk.Intersplunk
import re
ipregex = r"(?P<ip>((25[0-5]|2[0-4]\d|[01]\d\d|\d?\d)\.){3}(25[0-5]|2[0-4]\d|[01]\d\d|\d?\d))"
ip_rex = re.compile(ipregex)
def super_domain(host, output_parts):
parts = host.split(".")
num_parts = len(parts)
if output_parts > num_parts:
return host
if ip_rex.match(host):
host = '.'.join(parts[:-output_parts])
else:
host = '.'.join(parts[-output_parts:])
return host
def add_superhost(results, field, num_parts):
for r in results:
if field not in r:
continue
d = super_domain(r[field], num_parts)
r['superhost'] = d
yield r
try:
keywords, options = splunk.Intersplunk.getKeywordsAndOptions()
field = options.get('field', 'hostname')
num_parts
|
= int(options.get('parts', 2))
results,dummyresults,settings = splunk.Intersplunk.getOrganizedResults()
results = list(add_superhost(results, field, num_parts))
except:
import traceback
stack = traceback.format_exc()
result
|
s = splunk.Intersplunk.generateErrorResults("Error : Traceback: " + str(stack))
splunk.Intersplunk.outputResults( results )
|
moonfruit/yyfeed
|
lib/yyfeed/util/cache.py
|
Python
|
apache-2.0
| 1,274
| 0.000785
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
import cPickle as pickle
except ImportError:
import pickle
import os.path
class FileCache(dict):
def __init__(self, filename):
self.filename = os.path.abspath(filename)
try:
self.update(pickle.load(open(self.filename)))
except:
pass
def __setitem__(self, key, value):
super(FileCache, self).__setitem__(key, value)
pickle.dump(self, open(self.filename, 'w'))
def set(self, key, value):
self.__setitem__(key, value)
def get_stats(self):
pass
try:
import pylibmc as memcache
except ImportError:
import memcache
class Cache(object):
def __init__(self, servers=None, default='.cache', **kargs):
if servers is None:
self.cache = memcache.Client(**kargs)
else:
self.cache = memcache.Client(servers, **kargs)
if not self.cache.get_stats():
self.cache = FileCache(default)
|
def __getitem__(self, key):
return self.cache.get(key)
def __setitem__(self, key, value):
self.cache.set(key, value)
def get(self, key):
return self.cache.get(key)
def set(self, key, value):
self.cach
|
e.set(key, value)
|
prescott66/mypaint
|
gui/inktool.py
|
Python
|
gpl-2.0
| 28,795
| 0.000903
|
# This file is part of MyPaint.
# Copyright (C) 2015 by Andrew Chadwick <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
## Imports
import math
from numpy import isfinite
import collections
import weakref
from logging import getLogger
logger = getLogger(__name__)
from gettext import gettext as _
import gi
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GLib
import gui.mode
import gui.overlays
import gui.style
import gui.drawutils
import lib.helpers
import gui.cursor
## Class defs
class _Phase:
"""Enumeration of the states that an InkingMode can be in"""
CAPTURE = 0
ADJUST = 1
_NODE_FIELDS = ("x", "y", "pressure", "xtilt", "ytilt", "time")
class _Node (collections.namedtuple("_Node", _NODE_FIELDS)):
"""Recorded control point, as a namedtuple.
Node tuples have the following 6 fields, in order
* x, y: model coords, float
* pressure: float in [0.0, 1.0]
* xtilt, ytilt: float in [-1.0, 1.0]
* time: absolute seconds, float
"""
class _EditZone:
"""Enumeration of what the pointer is on in the ADJUST phase"""
EMPTY_CANVAS = 0
CONTROL_NODE = 1 # see target_node_index
REJECT_BUTTON = 2
ACCEPT_BUTTON = 3
class InkingMode (gui.mode.ScrollableModeMixin,
gui.mode.BrushworkModeMixin,
gui.mode.DragMode):
## Metadata properties
ACTION_NAME = "InkingMode"
pointer_behavior = gui.mode.Behavior.PAINT_FREEHAND
scroll_behavior = gui.mode.Behavior.CHANGE_VIEW
permitted_switch_actions = (
set(gui.mode.BUTTON_BINDING_ACTIONS).union([
'RotateViewMode',
'ZoomViewMode',
'PanViewMode',
])
)
## Metadata methods
@classmethod
def get_name(cls):
return _(u"Inking")
def get_usage(self):
return _(u"Draw, and then adjust smooth lines")
@property
def inactive_cursor(self):
return None
@property
def active_cursor(self):
if self.phase == _Phase.ADJUST:
if self.zone == _EditZone.CONTROL_NODE:
return self._crosshair_cursor
elif self.zone != _EditZone.EMPTY_CANVAS: # assume button
return self._arrow_cursor
return None
## Class config vars
# Input node capture settings:
MAX_INTERNODE_DISTANCE_MIDDLE = 30 # display pixels
MAX_INTERNODE_DISTANCE_ENDS = 10 # display pixels
MAX_INTERNODE_TIME = 1/100.0 # seconds
# Captured input nodes are then interpolated with a spline.
# The code tries to make nice smooth input for the brush engine,
# but avoids generating too much work.
INTERPOLATION_MAX_SLICE_TIME = 1/200.0 # seconds
INTERPOLATION_MAX_SLICE_DISTANCE = 20 # model pixels
INTERPOLATION_MAX_SLICES = MAX_INTERNODE_DISTANCE_MIDDLE * 5
# In other words, limit to a set number of interpolation slices
# per display pixel at the time of stroke capture.
## Initialization & lifecycle methods
def __init__(self, **kwargs):
logger.info("Initializing %r", self)
super(InkingMode, self).__init__(**kwargs)
self.phase = _Phase.CAPTURE
self.zone = _EditZone.EMPTY_CANVAS
self._overlays = {} # keyed by tdw
self._reset_nodes()
self._reset_capture_data()
self._reset_adjust_data()
self._task_queue = collections.deque() # (cb, args, kwargs)
self._task_queue_runner_id = None
self._click_in
|
fo = None # (button, zone)
self._current_override_cursor = None
# Button pressed while drawing
# Not every device sends button presses, but evde
|
v ones
# do, and this is used as a workaround for an evdev bug:
# https://github.com/mypaint/mypaint/issues/223
self._button_down = None
self._last_good_raw_pressure = 0.0
self._last_good_raw_xtilt = 0.0
self._last_good_raw_ytilt = 0.0
def _reset_nodes(self):
self.nodes = [] # nodes that met the distance+time criteria
def _reset_capture_data(self):
self._last_event_node = None # node for the last event
self._last_node_evdata = None # (xdisp, ydisp, tmilli) for nodes[-1]
def _reset_adjust_data(self):
self.zone = _EditZone.EMPTY_CANVAS
self.target_node_index = None
self._dragged_node_start_pos = None
def _ensure_overlay_for_tdw(self, tdw):
overlay = self._overlays.get(tdw)
if not overlay:
overlay = InkingModeOverlay(self, tdw)
tdw.display_overlays.append(overlay)
self._overlays[tdw] = overlay
return overlay
def _is_active(self):
for mode in self.doc.modes:
if mode is self:
return True
return False
def _discard_overlays(self):
for tdw, overlay in self._overlays.items():
tdw.display_overlays.remove(overlay)
tdw.queue_draw()
self._overlays.clear()
def enter(self, **kwds):
"""Enters the mode: called by `ModeStack.push()` etc."""
super(InkingMode, self).enter(**kwds)
if not self._is_active():
self._discard_overlays()
self._ensure_overlay_for_tdw(self.doc.tdw)
self._arrow_cursor = self.doc.app.cursors.get_action_cursor(
self.ACTION_NAME,
gui.cursor.Name.ARROW,
)
self._crosshair_cursor = self.doc.app.cursors.get_action_cursor(
self.ACTION_NAME,
gui.cursor.Name.CROSSHAIR_OPEN_PRECISE,
)
def leave(self, **kwds):
"""Leaves the mode: called by `ModeStack.pop()` etc."""
if not self._is_active():
self._discard_overlays()
self._stop_task_queue_runner(complete=True)
super(InkingMode, self).leave(**kwds) # supercall will commit
def checkpoint(self, flush=True, **kwargs):
"""Sync pending changes from (and to) the model
If called with flush==False, this is an override which just
redraws the pending stroke with the current brush settings and
color. This is the behavior our testers expect:
https://github.com/mypaint/mypaint/issues/226
When this mode is left for another mode (see `leave()`), the
pending brushwork is committed properly.
"""
if flush:
# Commit the pending work normally
self._start_new_capture_phase(rollback=False)
super(InkingMode, self).checkpoint(flush=flush, **kwargs)
else:
# Queue a re-rendering with any new brush data
# No supercall
self._stop_task_queue_runner(complete=False)
for tdw in self._overlays.keys():
self._queue_draw_buttons(tdw)
self._queue_redraw_all_nodes(tdw)
self._queue_redraw_curve(tdw)
def _start_new_capture_phase(self, rollback=False):
"""Let the user capture a new ink stroke"""
if rollback:
self._stop_task_queue_runner(complete=False)
self.brushwork_rollback_all()
else:
self._stop_task_queue_runner(complete=True)
self.brushwork_commit_all()
for tdw in self._overlays.keys():
self._queue_draw_buttons(tdw)
self._queue_redraw_all_nodes(tdw)
self._reset_nodes()
self._reset_capture_data()
self._reset_adjust_data()
self.phase = _Phase.CAPTURE
## Raw event handling (prelight & zone selection in adjust phase)
def button_press_cb(self, tdw, event):
current_layer = tdw.doc._layers.current
if not (tdw.is_sensitive and current_layer.get_paintable()):
return False
self._update_zone_and_target(tdw, event.x, event.y)
if self.phase == _Phase.ADJUST:
if self.zone in (_EditZone.REJECT_BUTTON,
_EditZone.ACCEPT
|
cedriclaunay/gaffer
|
python/GafferUI/ApplicationMenu.py
|
Python
|
bsd-3-clause
| 5,283
| 0.038236
|
##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANT
|
ABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (I
|
NCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import weakref
import IECore
import Gaffer
import GafferUI
def appendDefinitions( menuDefinition, prefix ) :
menuDefinition.append( prefix + "/About Gaffer...", { "command" : about } )
menuDefinition.append( prefix + "/Preferences...", { "command" : preferences } )
menuDefinition.append( prefix + "/Documentation...", { "command" : IECore.curry( GafferUI.showURL, os.path.expandvars( "$GAFFER_ROOT/doc/gaffer/html/index.html" ) ) } )
menuDefinition.append( prefix + "/Quit", { "command" : quit, "shortCut" : "Ctrl+Q" } )
def quit( menu ) :
scriptWindow = menu.ancestor( GafferUI.ScriptWindow )
application = scriptWindow.scriptNode().ancestor( Gaffer.ApplicationRoot )
unsavedNames = []
for script in application["scripts"].children() :
if script["unsavedChanges"].getValue() :
f = script["fileName"].getValue()
f = f.rpartition( "/" )[2] if f else "untitled"
unsavedNames.append( f )
if unsavedNames :
dialogue = GafferUI.ConfirmationDialogue(
"Discard Unsaved Changes?",
"The following files have unsaved changes : \n\n" +
"\n".join( [ " - " + n for n in unsavedNames ] ) +
"\n\nDo you want to discard the changes and quit?",
confirmLabel = "Discard and Quit"
)
if not dialogue.waitForConfirmation( parentWindow=scriptWindow ) :
return
# Defer the actual removal of scripts till an idle event - removing all
# the scripts will result in the removal of the window our menu item is
# parented to, which would cause a crash as it's deleted away from over us.
GafferUI.EventLoop.addIdleCallback( IECore.curry( __removeAllScripts, application ) )
def __removeAllScripts( application ) :
for script in application["scripts"].children() :
application["scripts"].removeChild( script )
return False # remove idle callback
__aboutWindow = None
def about( menu ) :
global __aboutWindow
if __aboutWindow is not None and __aboutWindow() :
window = __aboutWindow()
else :
window = GafferUI.AboutWindow( Gaffer.About )
__aboutWindow = weakref.ref( window )
scriptWindow = menu.ancestor( GafferUI.ScriptWindow )
scriptWindow.addChildWindow( window )
window.setVisible( True )
__preferencesWindows = weakref.WeakKeyDictionary()
def preferences( menu ) :
scriptWindow = menu.ancestor( GafferUI.ScriptWindow )
application = scriptWindow.scriptNode().ancestor( Gaffer.ApplicationRoot )
global __preferencesWindows
window = __preferencesWindows.get( application, None )
if window is not None and window() :
window = window()
else :
window = GafferUI.Dialogue( "Preferences" )
closeButton = window._addButton( "Close" )
window.__closeButtonConnection = closeButton.clickedSignal().connect( __closePreferences )
saveButton = window._addButton( "Save" )
window.__saveButtonConnection = saveButton.clickedSignal().connect( __savePreferences )
nodeUI = GafferUI.NodeUI.create( application["preferences"] )
window._setWidget( nodeUI )
__preferencesWindows[application] = weakref.ref( window )
scriptWindow.addChildWindow( window )
window.setVisible( True )
def __closePreferences( button ) :
button.ancestor( type=GafferUI.Window ).setVisible( False )
def __savePreferences( button ) :
scriptWindow = button.ancestor( GafferUI.ScriptWindow )
application = scriptWindow.scriptNode().ancestor( Gaffer.ApplicationRoot )
application.savePreferences()
button.ancestor( type=GafferUI.Window ).setVisible( False )
|
Salamek/git-deploy
|
git_deploy/database.py
|
Python
|
gpl-3.0
| 3,246
| 0.021873
|
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
db = SQLAlchemy()
class BaseTable(db.Model):
__abstract__ = True
updated = db.Column(db.DateTime, default=func.now(), onupdate=func.current_timestamp())
created = db.Column(db.DateTime, default=func.now())
# Server -> Namespace -> Repository -> Branch -> Commit -> Deploy -> Log
class Server(BaseTable):
__tablename__ = 'server'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
namespace = relationship("Namespace", order_by="Namespace.id", backref="server")
class Namespace(BaseTable):
__tablename__ = 'namespace'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
server_id = db.Column(db.Integer, db.ForeignKey('server.id'))
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
repository = relationship("Repository", order_by="Repository.id", backref="namespace")
class Repository(BaseTable):
__tablename__ = 'repository'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
namespace_id = db.Column(db.Integer, db.ForeignKey('namespace.id'))
branch = relationship("Branch", order_by="Branch.updated.desc()", backref="repository")
class Branch(BaseTable):
__tablename__ = 'branch'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
repository_id = db.Column(db.Integer, db.ForeignKey('repository.id'))
commit = relationship("Commit", order_by="Commit.created.desc()", backref="branch")
class Commit(BaseTable):
__tablename__ = 'commit'
id = db.Column(db.Integer, primary_key=True)
sha = db.Column(db.String(40))
name = db.Column(db.String(255))
description = db.Column(db.String(1024))
status = db.Column(db.Enum('ERROR', 'WARNING', 'OK', 'UNKNOWN', 'RUNNING', name='commit_status_type'))
runtime = db.Column(db.Integer)
branch_id = db.Column(db.Integer, db.ForeignKey('branch.id'))
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
deploy = relationship("Deploy", order_by="Deploy.id", backref="commit")
class Deploy(BaseTable):
__tablename__ = 'deploy'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
uri = db.Column(db.String(1024))
|
status = db.Column(db.Enum('ERROR', 'WARNING', 'OK', 'UNKNOWN', 'RUNNING', name='deploy_status_type'))
runtime = db.Column(db.Integer)
commit_id = db.Column(db.In
|
teger, db.ForeignKey('commit.id'))
log = relationship("Log", order_by="Log.id", backref="deploy")
class Log(BaseTable):
__tablename__ = 'log'
id = db.Column(db.Integer, primary_key=True)
data = db.Column(db.String(1024))
status = db.Column(db.Enum('ERROR', 'WARNING', 'OK', 'UNKNOWN', name='log_status_type'))
deploy_id = db.Column(db.Integer, db.ForeignKey('deploy.id'))
class User(BaseTable):
__tablename__ = 'user'
id = db.Column(db.Integer, primary_key=True)
first_name = db.Column(db.String(255))
last_name = db.Column(db.String(255))
email = db.Column(db.String(255))
password = db.Column(db.String(255))
commit = relationship("Commit", order_by="Commit.id", backref="user")
namespace = relationship("Namespace", order_by="Namespace.id", backref="user")
|
drbitboy/SpiceyPy
|
spiceypy/__init__.py
|
Python
|
mit
| 1,313
| 0
|
"""
The MIT License (MIT)
Copyright (c) [2015-2018] [Andrew Annex]
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS I
|
N THE
SOFTWARE.
"""
from .spiceypy import *
from .utils import support_types
__author__ = 'AndrewAnnex'
# Default setting for error reporting so that programs don't just exit out!
erract("set", 10, "return")
errdev("s
|
et", 10, "null")
|
julienc91/ShFlickr
|
main.py
|
Python
|
mit
| 6,714
| 0.004319
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = "Julien Chaumont"
__copyright__ = "Copyright 2014, Julien Chaumont"
__licence__ = "MIT"
__version__ = "1.0.2"
__contact__ = "julienc91 [at] outlook.fr"
import flickrapi
import os, sys
import re
from config import *
class ShFlickr:
##
# Connexion to Flickr.
#
def __init__(self):
self.flickr = flickrapi.FlickrAPI(API_KEY, API_SECRET)
(token, frob) = self.flickr.get_token_part_one(perms='delete')
if not token:
raw_input("Press ENTER after you authorized this program")
self.flickr.get_token_part_two((token, frob))
##
# Get the list of files to synchronize with Flickr.
# @param folder Path to the main folder
# @return A tuple (photos_to_sync, photosets_to_create) where photos_to_sync
# is the list of files to synchronize for each subfolder, and
# photoset_ids is the list of albums with their respective id on Flickr,
# or None if the album does not exist yet.
#
def synclist(self, folder=PICTURE_FOLDER_PATH):
print "Getting the list of pictures to synchronize..."
subfolders = [lfile for lfile in os.listdir(unicode(folder))
if os.path.isdir(os.path.join(folder, lfile))
and re.match(SUBFOLDERS_REGEXP, lfile)]
photosets = self.flickr.photosets_getList(user_id=USER_ID)
photos_to_sync = {}
photoset_ids = {}
for subfolder in subfolders:
subfolder = subfolder.encode("UTF-8")
# Check if the album already exists on Flickr
photoset_id = None
for photoset in photosets.find('photosets').findall('photoset'):
photoset_title = photoset.find('title').text
if type(photoset_title) == unicode:
photoset_title = photoset_title.encode("UTF-8")
if photoset_title == subfolder:
photoset_id = str(photoset.attrib['id'])
break
photoset_ids[subfolder] = photoset_id
# Get the list of pictures to synchronize within this album
photos_to_sync[subfolder] = self.synclist_subfolder(os.path.join(folder, subfolder), photoset_id)
return photos_to_sync, photoset_ids
##
# Get the list of pictures to synchronize within an album.
# @param subfolder Complete path to the subfolder to synchronize
# @param photoset_id Id of the album on Flickr, or None of the album does not exist yet
# @return The list of the pictures to synchronize.
#
def synclist_subfolder(self, subfolder, photoset_id=None):
files = [lfile for lfile in os.listdir(unicode(subfolder))
if lfile.endswith(PICTURE_EXTENSIONS)]
files_to_sync = []
if photoset_id is not None:
# Find which file were not uploaded
photoset = list(self.flickr.walk_set(photoset_id))
for lfile in files:
lfile = lfile.encode("UTF-8")
found = False
for photo in photoset:
photo = photo.get('title')
if type(photo) == unicode:
photo = photo.encode("UTF-8")
if photo == lfile:
found = True
break
if not found:
files_to_sync.append(lfile)
else:
for lfile in files:
files_to_sync.append(lfile)
return files_to_sync
##
# Performs the upload.
# @param photos_to_sync A dictionary containing the list of
# pictures to upload for each subfolder.
# @param photoset_ids Dict of albums and their Flickr ids.
# @param folder Path to the main folder.
#
def upload(self, photos_to_sync, photosets={}, folder=PICTURE_FOLDER_PATH):
for subfolder in sorted(photos_to_sync):
count = 1
total = len(photos_to_sync[subfolder])
len_count = len(str(total))
consecutive_errors = 0
print "Album %s: %s photos to synchronize" % (subfolder, total)
for photo in sorted(photos_to_sync[subfolder]):
print "%-*s/%s\t %s" % (len_count, count, total, photo)
nb_errors = 0
done = False
while nb_errors < MAX_RETRIES and not done:
try:
path = os.path.join(folder, subfolder, photo).encode("UTF-8")
photo = photo.encode("UTF-8")
response = self.flickr.upload(filename=path,
title=photo,
is_public=VISIBLE_PUBLIC,
|
is_family=VISIBLE_FAMILY,
is_friend=VISIBLE_FRIEND)
except KeyboardInterrupt:
print "Exit by user request"
return
except:
nb_errors += 1
consecutive_errors += 1
if consecutive_errors >= MAX_CONSECUTIVE_ERRORS:
|
print "5 failed uploads in a row, aborting."
return
else:
print "Error, retrying upload (%s/%s)" % (nb_errors, MAX_RETRIES)
else:
photo_id = response.find('photoid').text
done = True
count += 1
consecutive_errors = 0
if photoset_ids[subfolder] is None:
print "Creating the remote album %s" % subfolder
response = self.flickr.photosets_create(title=subfolder,
primary_photo_id=photo_id)
photoset_ids[subfolder] = response.find('photoset').attrib['id']
else:
self.flickr.photosets_addPhoto(photoset_id=photoset_ids[subfolder],
photo_id=photo_id)
if nb_errors == 3:
print "%s failed to upload" % photo
if __name__ == "__main__":
shflickr = ShFlickr()
photos_to_sync, photoset_ids = shflickr.synclist()
shflickr.upload(photos_to_sync, photoset_ids)
|
Ecotrust/PEW-EFH
|
mp/visualize/views.py
|
Python
|
apache-2.0
| 11,227
| 0.004186
|
# Create your views here.
from django.contrib.auth.models import Group
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
import os
from querystring_parser import parser
import simplejson
from simplejson import dumps
from social.backends.google import GooglePlusAuth
from madrona.features import get_feature_by_uid
import settings
from .models import *
from data_manager.models import *
from mp_settings.models import *
def show_planner(request, project=None, template='planner.html'):
try:
socket_url = settings.SOCKET_URL
except AttributeError:
socket_url = ''
try:
if project:
mp_settings = MarinePlannerSettings.objects.get(slug_name=project)
else:
mp_settings = MarinePlannerSettings.objects.get(active=True)
project_name = mp_settings.project_name
latitude = mp_settings.latitude
longitude = mp_settings.longitude
zoom = mp_settings.zoom
default_hash = mp_settings.default_hash
min_zoom = mp_settings.min_zoom
max_zoom = mp_settings.max_zoom
project_logo = mp_settings.project_logo
try:
if project_logo:
url_validator = URLValidator()
url_validator(project_logo)
except ValidationError, e:
project_logo = os.path.join(settings.MEDIA_URL, project_logo)
project_icon = mp_settings.project_icon
try:
url_validator = URLValidator()
url_validator(project_icon)
except ValidationError, e:
project_icon = os.path.join(settings.MEDIA_URL, project_icon)
project_home_page = mp_settings.project_home_page
enable_drawing = mp_settings.enable_drawing
bitly_registered_domain = mp_settings.bitly_registered_domain
bitly_username = mp_settings.bitly_username
bitly_api_key = mp_settings.bitly_api_key
except:
project_name = project_logo = project_icon = project_home_page = bitly_registered_domain = bitly_username = bitly_api_key = default_hash = ""
latitude = longitude = zoom = min_zoom = max_zoom = None
enable_drawing = False
context = {
'MEDIA_URL': settings.MEDIA_URL, 'SOCKET_URL': socket_url, 'login': 'true',
'project_name': project_name, 'latitude': latitude, 'longitude': longitude, 'zoom': zoom,
'default_hash': default_hash, 'min_zoom': min_zoom, 'max_zoom': max_zoom,
'project_logo': project_logo, 'project_icon': project_icon, 'project_home_page': project_home_page,
'enable_drawing': enable_drawing,
'bitly_registered_domain': bitly_registered_domain, 'bitly_username': bitly_username, 'bitly_api_key': bitly_api_key
}
if request.user.is_authenticated:
context['session'] = request.session._session_key
if request.user.is_authenticated() and request.user.social_auth.all().count() > 0:
context['picture'] = request.user.social_auth.all()[0].extra_data.get('picture')
if settings.SOCIAL_AUTH_GOOGLE_PLUS_KEY:
context['plus_scope'] = ' '.join(GooglePlusAuth.DEFAULT_SCOPE)
context['plus_id'] = settings.SOCIAL_AUTH_GOOGLE_PLUS_KEY
if settings.UNDER_MAINTENANCE_TEMPLATE:
return render_to_response('under_maintenance.html',
RequestContext(request, context))
return render_to_response(template, RequestContext(request, context))
def show_embedded_map(request, project=None, template='map.html'):
try:
if project:
mp_settings = MarinePlannerSettings.objects.get(slug_name=project)
else:
mp_settings = MarinePlannerSettings.objects.get(active=True)
project_name = mp_settings.project_name
project_logo = mp_settings.project_logo
try:
if project_logo:
url_validator = URLValidator(verify_exists=False)
url_validator(project_logo)
except ValidationError, e:
project_logo = os.path.join(settings.MEDIA_URL, project_logo)
project_home_page = mp_settings.project_home_page
except:
project_name = project_logo = project_home_page = None
context = {
'MEDIA_URL': settings.MEDIA_URL,
'project_name': project_name,
'project_logo': project_logo,
'project_home_page': project_home_page
}
#context = {'MEDIA_URL': settings.MEDIA_URL}
return render_to_response(
|
template, RequestContext(request, context))
def show_mobile_map(request, project=None, template='mobile-map.html'):
try:
if project:
mp_settings = MarinePlannerSettings.objects.get(slug_name=project)
else:
mp_settings = MarinePlannerSettings.objects.get(active=True)
print 'so far so good'
project_name = mp_settings.project_name
projec
|
t_logo = mp_settings.project_logo
print project_name
print project_logo
# try:
# if project_logo:
# url_validator = URLValidator(verify_exists=False)
# url_validator(project_logo)
# except ValidationError, e:
# project_logo = os.path.join(settings.MEDIA_URL, project_logo)
print 'almost there...'
project_home_page = mp_settings.project_home_page
print 'here we go...'
latitude = mp_settings.latitude
print latitude
longitude = mp_settings.longitude
print longitude
zoom = mp_settings.zoom
print zoom
min_zoom = mp_settings.min_zoom
max_zoom = mp_settings.max_zoom
print min_zoom
print max_zoom
except:
project_name = project_logo = project_home_page = None
context = {
'MEDIA_URL': settings.MEDIA_URL,
# 'project_name': project_name,
# 'project_logo': project_logo,
# 'project_home_page': project_home_page
'latitude': latitude,
'longitude': longitude,
'zoom': zoom
}
#context = {'MEDIA_URL': settings.MEDIA_URL}
return render_to_response(template, RequestContext(request, context))
def get_sharing_groups(request):
from madrona.features import user_sharing_groups
from functools import cmp_to_key
import locale
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
json = []
sharing_groups = user_sharing_groups(request.user)
for group in sharing_groups:
members = []
for user in group.user_set.all():
if user.first_name.replace(' ', '') != '' and user.last_name.replace(' ', '') != '':
members.append(user.first_name + ' ' + user.last_name)
else:
members.append(user.username)
sorted_members = sorted(members, key=cmp_to_key(locale.strcoll))
json.append({
'group_name': group.name,
'group_slug': slugify(group.name)+'-sharing',
'members': sorted_members
})
return HttpResponse(dumps(json))
'''
'''
def share_bookmark(request):
group_names = request.POST.getlist('groups[]')
bookmark_uid = request.POST['bookmark']
bookmark = get_feature_by_uid(bookmark_uid)
viewable, response = bookmark.is_viewable(request.user)
if not viewable:
return response
#remove previously shared with groups, before sharing with new list
bookmark.share_with(None)
groups = []
for group_name in group_names:
groups.append(Group.objects.get(name=group_name))
bookmark.share_with(groups, append=False)
return HttpResponse("", status=200)
'''
'''
def get_bookmarks(request):
#sync the client-side bookmarks with the server side bookmarks
#update the server-side bookmarks and return the new list
try:
bookmark_dict = parser.parse(request.POST.ur
|
polyanskiy/refractiveindex.info-scripts
|
scripts/Rakic 1998 - Au (LD model).py
|
Python
|
gpl-3.0
| 2,100
| 0.028529
|
# -*- coding: utf-8 -*-
# Author: Mikhail Polyanskiy
# Last modified: 2017-04-02
# Original data: Rakić et al. 1998, https://doi.org/10.1364/AO.37.005271
import numpy as np
import matplotlib.pyplot as plt
# Lorentz-Drude (LD) model parameters
ωp = 9.03 #eV
f0 = 0.760
Γ0 = 0.053 #eV
f1 = 0.024
Γ1 = 0.241 #eV
ω1 = 0.415 #eV
f2 = 0.010
Γ2 = 0.345 #eV
ω2 = 0.830 #eV
f3 = 0.071
Γ3 = 0.870 #eV
ω3 = 2.969 #eV
f4 = 0.601
Γ4 = 2.494 #eV
ω4 = 4.304 #eV
f5 = 4.384
Γ5 = 2.214 #eV
ω5 = 13.32 #eV
Ωp = f0**.5 * ωp #eV
def LD(ω): #ω: eV
ε = 1-Ωp**2/(ω*(ω+1j*Γ0))
ε += f1*ωp**2 / ((ω1**2-ω**2)-1j*ω*Γ1)
ε += f2*ωp**2 / ((ω2**2-ω**2)-1j*ω*Γ2)
|
ε += f3*ωp**2 / ((ω3**2-ω**2)-1j*ω*Γ3)
ε += f4*ωp**2 / ((ω4**2-ω**2)-1j*ω*Γ4)
ε += f5*ωp**2 / ((ω5**2-ω**2)-1j*ω*Γ5)
return ε
ev_min=0.2
ev_max=5
npoints=200
eV = np.logspace(np.log10(ev_min), np.log10(ev_max), npoints)
μm = 4.13566733e-1*2.99792458/eV
ε = LD(eV)
n = (ε**.5).real
k = (ε**.5).imag
#============================ DATA OUTPUT =================================
file = open('out.txt', 'w')
for i in range(npoints-1, -1, -1):
file.write('\n
|
{:.4e} {:.4e} {:.4e}'.format(μm[i],n[i],k[i]))
file.close()
#=============================== PLOT =====================================
plt.rc('font', family='Arial', size='14')
plt.figure(1)
plt.plot(eV, -ε.real, label="-ε1")
plt.plot(eV, ε.imag, label="ε2")
plt.xlabel('Photon energy (eV)')
plt.ylabel('ε')
plt.xscale('log')
plt.yscale('log')
plt.legend(bbox_to_anchor=(0,1.02,1,0),loc=3,ncol=2,borderaxespad=0)
#plot n,k vs eV
plt.figure(2)
plt.plot(eV, n, label="n")
plt.plot(eV, k, label="k")
plt.xlabel('Photon energy (eV)')
plt.ylabel('n, k')
plt.yscale('log')
plt.legend(bbox_to_anchor=(0,1.02,1,0),loc=3,ncol=2,borderaxespad=0)
#plot n,k vs μm
plt.figure(3)
plt.plot(μm, n, label="n")
plt.plot(μm, k, label="k")
plt.xlabel('Wavelength (μm)')
plt.ylabel('n, k')
plt.xscale('log')
plt.yscale('log')
plt.legend(bbox_to_anchor=(0,1.02,1,0),loc=3,ncol=2,borderaxespad=0)
|
andreaso/ansible
|
lib/ansible/modules/network/nxos/nxos_overlay_global.py
|
Python
|
gpl-3.0
| 8,884
| 0.0009
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your
|
option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTAB
|
ILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nxos_overlay_global
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Configures anycast gateway MAC of the switch.
description:
- Configures anycast gateway MAC of the switch.
author: Gabriele Gerbino (@GGabriele)
notes:
- Default restores params default value
- Supported MAC address format are "E.E.E", "EE-EE-EE-EE-EE-EE",
"EE:EE:EE:EE:EE:EE" and "EEEE.EEEE.EEEE"
options:
anycast_gateway_mac:
description:
- Anycast gateway mac of the switch.
required: true
default: null
'''
EXAMPLES = '''
- nxos_overlay_global:
anycast_gateway_mac: "b.b.b"
username: "{{ un }}"
password: "{{ pwd }}"
host: "{{ inventory_hostname }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"asn": "65535", "router_id": "1.1.1.1", "vrf": "test"}
existing:
description: k/v pairs of existing BGP configuration
returned: verbose mode
type: dict
sample: {"asn": "65535", "bestpath_always_compare_med": false,
"bestpath_aspath_multipath_relax": false,
"bestpath_compare_neighborid": false,
"bestpath_compare_routerid": false,
"bestpath_cost_community_ignore": false,
"bestpath_med_confed": false,
"bestpath_med_missing_as_worst": false,
"bestpath_med_non_deterministic": false, "cluster_id": "",
"confederation_id": "", "confederation_peers": "",
"graceful_restart": true, "graceful_restart_helper": false,
"graceful_restart_timers_restart": "120",
"graceful_restart_timers_stalepath_time": "300", "local_as": "",
"log_neighbor_changes": false, "maxas_limit": "",
"neighbor_down_fib_accelerate": false, "reconnect_interval": "60",
"router_id": "11.11.11.11", "suppress_fib_pending": false,
"timer_bestpath_limit": "", "timer_bgp_hold": "180",
"timer_bgp_keepalive": "60", "vrf": "test"}
end_state:
description: k/v pairs of BGP configuration after module execution
returned: verbose mode
type: dict
sample: {"asn": "65535", "bestpath_always_compare_med": false,
"bestpath_aspath_multipath_relax": false,
"bestpath_compare_neighborid": false,
"bestpath_compare_routerid": false,
"bestpath_cost_community_ignore": false,
"bestpath_med_confed": false,
"bestpath_med_missing_as_worst": false,
"bestpath_med_non_deterministic": false, "cluster_id": "",
"confederation_id": "", "confederation_peers": "",
"graceful_restart": true, "graceful_restart_helper": false,
"graceful_restart_timers_restart": "120",
"graceful_restart_timers_stalepath_time": "300", "local_as": "",
"log_neighbor_changes": false, "maxas_limit": "",
"neighbor_down_fib_accelerate": false, "reconnect_interval": "60",
"router_id": "1.1.1.1", "suppress_fib_pending": false,
"timer_bestpath_limit": "", "timer_bgp_hold": "180",
"timer_bgp_keepalive": "60", "vrf": "test"}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["router bgp 65535", "vrf test", "router-id 1.1.1.1"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
from ansible.module_utils.nxos import get_config, load_config
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netcfg import CustomNetworkConfig
PARAM_TO_COMMAND_KEYMAP = {
'anycast_gateway_mac': 'fabric forwarding anycast-gateway-mac',
}
def invoke(name, *args, **kwargs):
func = globals().get(name)
if func:
return func(*args, **kwargs)
def get_value(arg, config, module):
REGEX = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M)
value = ''
if PARAM_TO_COMMAND_KEYMAP[arg] in config:
value = REGEX.search(config).group('value')
return value
def get_existing(module, args):
existing = {}
config = str(get_config(module))
for arg in args:
existing[arg] = get_value(arg, config, module)
return existing
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = value
else:
new_dict[new_key] = value
return new_dict
def get_commands(module, existing, proposed, candidate):
commands = list()
proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed)
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in proposed_commands.items():
if value == 'default':
existing_value = existing_commands.get(key)
if existing_value:
commands.append('no {0} {1}'.format(key, existing_value))
else:
if 'anycast-gateway-mac' in key:
value = normalize_mac(value, module)
command = '{0} {1}'.format(key, value)
commands.append(command)
if commands:
candidate.add(commands, parents=[])
def normalize_mac(proposed_mac, module):
try:
if '-' in proposed_mac:
splitted_mac = proposed_mac.split('-')
if len(splitted_mac) != 6:
raise ValueError
for octect in splitted_mac:
if len(octect) != 2:
raise ValueError
elif '.' in proposed_mac:
splitted_mac = []
splitted_dot_mac = proposed_mac.split('.')
if len(splitted_dot_mac) != 3:
raise ValueError
for octect in splitted_dot_mac:
if len(octect) > 4:
raise ValueError
else:
octect_len = len(octect)
padding = 4 - octect_len
splitted_mac.append(octect.zfill(padding+1))
elif ':' in proposed_mac:
splitted_mac = proposed_mac.split(':')
if len(splitted_mac) != 6:
raise ValueError
for octect in splitted_mac:
if len(octect) != 2:
raise ValueError
else:
raise ValueError
except ValueError:
module.fail_json(msg='Invalid MAC address format',
proposed_mac=proposed_mac)
joined_mac = ''.join(splitted_mac)
mac = [joined_mac[i:i+4] for i in range(0, len(joined_mac), 4)]
return '.'.join(mac).upper()
def main():
argument_spec = dict(
anycast_gateway_mac=dict(required=True, type='str'),
m_facts=dict(required=False, default=False, type='bool'),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
args = [
'anycast_gateway_mac'
|
uglyDwarf/evdevshift
|
scafold2.py
|
Python
|
mit
| 4,502
| 0.026877
|
#!/usr/bin/env python3
#
import re
import evdev
import subprocess
import time
import argparse
def process_test_line(line, controls):
tmp = line.strip()
fields = tmp.split()
operation = fields[1].lower()
if operation == 'receive':
target = fields[2].lower()
if target == 'syn':
return (operation, 0, 0, 0)
elif target == 'axis':
ctrl_type = evdev.ecodes.EV_ABS
else:
ctrl_type = evdev.ecodes.EV_KEY
control = int(fields[3])
value = int(fields[4])
else:
control_str = fields[2]
if not control_str in controls:
print('Warning: Control {0} unknown.'.format(control_str))
print(line)
return None
(ctrl_type, control) = controls[control_str]
value = int(fields[3])
return (operation, ctrl_type, control, value)
def read_config(fname):
sequence = []
devname = ''
controls = {}
f = open(fname)
test_re = re.compile('//\*(.*)$')
dev_re = re.compile('^\s*(grab\s+)?device\s+"([^"]+)"')
def_re = re.compile('^\s*(button|axis)\s+(\S+)\s*=\s*(\S+)')
for line in f:
m = test_re.match(line)
if m:
tst = process_test_line(line, controls)
if tst:
sequence.append(tst)
continue
m = dev_re.match(line)
if m:
devname = m.group(2)
continue
m = def_re.match(line)
if m:
if m.group(1) == 'axis':
controls[m.group(2)] = (evdev.ecodes.EV_ABS, int(m.group(3)));
else:
controls[m.group(2)] = (evdev.ecodes.EV_KEY, int(m.group(3)));
f.close()
return {'seq':sequence, 'devname': devname, 'controls': controls}
def make_cap(config):
axes = []
buttons = []
# loops through keys of dictionary
for ctrl in config['controls']:
(ctrl_type, ctrl_id) = config['controls'][ctrl]
if ctrl_type == evdev.ecodes.EV_KEY
|
:
buttons.append(ctrl_id)
else:
axes.append((ctrl_id, evdev.AbsInfo(0, 255, 0, 15, 0, 0)))
# sort the arrays
axes.sort()
buttons.sort()
cap = {}
if axes:
cap[evdev.ecodes.EV_ABS] = axes;
if buttons:
cap[evd
|
ev.ecodes.EV_KEY] = buttons;
return cap
def find_device(name):
patt = re.compile(name)
devices = [evdev.InputDevice(fn) for fn in evdev.list_devices()]
for device in devices:
if patt.match(device.name):
return device
parser = argparse.ArgumentParser(description = 'Test evdevshift using specially prepared config.')
parser.add_argument('--config', type=str, dest='arg')
args = parser.parse_args()
arg = args.arg
# read the config and prepare the caps of the source device
config = read_config(arg)
cap = make_cap(config)
# create the source device
ui = evdev.UInput(cap, name=config['devname'], vendor = 0xf30, product = 0x110, version=0x110)
eds_templ = subprocess.Popen(['./evdevshift_dbg', '--device={0}'.format(ui.device.fn), '--template=regrestest.conf'])
# start the evdevshift and point it to the config
eds = subprocess.Popen(['./evdevshift_dbg', '--config={0}'.format(arg)])
# temporary, to make sure the evdevshift started and created the device...
time.sleep(1)
# find the newly created device
dev = find_device('evdevshift')
print(dev)
#send the test sequence and check the outputs
buffered = False
problems = 0
for ev in config['seq']:
if ev[0] == 'send':
print('=====================================')
print('Sending (type {0} code {1} val {2})'.format(ev[1], ev[2], ev[3]))
sent = True
ui.write(ev[1], ev[2], ev[3])
else:
if sent:
#print('syn')
ui.syn()
sent = False
# give the stuff some time to pass the events
# not nice, will need to rework to avoid races
time.sleep(0.1)
in_ev = dev.read_one()
if in_ev:
if (in_ev.type == ev[1]) and (in_ev.code == ev[2]) and (in_ev.value == ev[3]):
print('Response OK (type {0} code {1} val {2})'.format(ev[1], ev[2], ev[3]))
else:
problems += 1
print('Error: Expected (type {0} code {1} val {2})'.format(ev[1], ev[2], ev[3]))
print(' Received (type {0} code {1} val {2})'.format(in_ev.type, in_ev.code, in_ev.value))
print('=====================================')
print('Expected error (Read wrong number of bytes (-1)!)')
ui.close()
time.sleep(1)
if problems == 0:
print('\n\nNo problems encountered!')
else:
print('\n\n{0} problems found.'.format(problems))
|
popen2/he_dns
|
update_he_dns.py
|
Python
|
mit
| 4,258
| 0.005636
|
#!/usr/bin/env python
import os
import re
import sys
import socket
import httplib
import urlparse
from urllib import urlencode
from urllib2 import urlopen
from argparse import ArgumentParser
from collections import OrderedDict
def _get_discover_url(given_discover_url, update_type):
if update_type == '4':
return given_discover_url
elif update_type == '6':
parsed_url = urlparse.urlsplit(given_discover_url)
for (family, socktype, proto, canonname, sockaddr) in socket.getaddrinfo(parsed_url.netloc, parsed_url.port, socket.AF_INET6):
address, port, flow_info, scope_id = sockaddr
return urlparse.urlunsplit((parsed_url.scheme, '[' + address + ']', parsed_url.path, parsed_url.query, parsed_url.fragment))
raise ValueError('Cannot find an IPv6 address with the discovery URL {}'.format(given_discover_url))
else:
raise ValueError('Unknown update type {!r}'.format(update_type))
def _discover_own_address(discover_url):
response = urlopen(discover_url)
code, content = response.code, response.read()
if code != httplib.OK:
print >>sys.stderr, 'error: could not discover own address.'
print >>sys.stderr, 'server returned {}, {}'.format(code, content)
raise SystemExit(1)
parsed_response = re.search(r'Your IP address is\s*:\s*(?P<ip_address>(\d+\.\d+\.\d+\.\d+)|([0-9a-fA-F:]+))', content)
if parsed_response is None:
print >>sys.stderr, 'error: could not parse own IP properly'
print >>sys.stderr, 'server returned:', content
raise SystemExit(2)
return parsed_response.groupdict()['ip_address']
def _send_update(hostname, password, update_url, ip_address):
data = urlencode(OrderedDict(hostname=hostname, password=password, myip=ip_address))
response = urlopen(update_url, data)
content = response.read().strip()
if response.code != httplib.OK:
print >>sys.stderr, 'error: update failed. error is {}'.format(response.code)
print >>sys.stderr, content
raise SystemExit(3)
parsed_content = re.match(r'^(?P<key>badauth|nochg|good|noipv6)(\s(?P<value>.*))?$', content)
if parsed_content is None:
print >>sys.stderr, 'error: unknown returned response: {}'.format(content)
raise SystemExit(4)
key, value = parsed_content.groupdict()['key'], parsed_content.groupdict()['value']
if key == 'badauth':
print >>sys.stderr, 'error: the domain name and password do not match'
print >>sys.stderr, 'Make sure you are using a domain name that has been marked for dynamic upd
|
ates,'
print >>sys.stderr, 'and that the password used is the update key (not your account password).'
raise SystemExit(5)
elif key == 'nochg':
print >>sys.stderr, 'no update required (IP is {})'.format(value)
elif key == 'no
|
ipv6':
print >>sys.stderr, 'cannot update ipv6 for this hostname'
elif key == 'good':
print >>sys.stderr, 'update complete: {}'.format(value)
def main():
parser = ArgumentParser()
parser.add_argument('hostname', help='The hostname (domain name) to be updated. Make sure this domain has been marked for dynamic DNS updating')
parser.add_argument('password', help='Update key for this domain (as generated from the zone management interface)')
parser.add_argument('-u', '--update-url', default='https://dyn.dns.he.net/nic/update',
help='URL to post the update to')
parser.add_argument('-d', '--discover-url', default='http://checkip.dns.he.net',
help='Service for discovery of own address')
parser.add_argument('-t', '--type', default='4',
help='Type of update: either "4" for IPv4 or "6" for IPv6')
parser.add_argument('-i', '--ip-address', default=None,
help='The IP address to be updated for this domain. Leave blank to auto-discover')
args = parser.parse_args()
if args.ip_address is None:
discover_url = _get_discover_url(args.discover_url, args.type)
args.ip_address = _discover_own_address(discover_url)
_send_update(args.hostname, args.password, args.update_url, args.ip_address)
if __name__ == '__main__':
main()
|
390910131/Misago
|
misago/threads/urls/privatethreads.py
|
Python
|
gpl-2.0
| 5,045
| 0.008523
|
from django.conf.urls import patterns, include, url
from misago.threads.views.privatethreads import PrivateThreadsView
urlpatterns = patterns('',
url(r'^private-threads/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/(?P<page>\d+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/sort-(?P<sort>[\w-]+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/sort-(?P<sort>[\w-]+)/(?P<page>\d+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/show-(?P<show>[\w-]+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/show-(?P<show>[\w-]+)/(?P<page>\d+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/sort-(?P<sort>[\w-]+)/show-(?P<show>[\w-]+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/sort-(?P<sort>[\w-]+)/show-(?P<show>[\w-]+)/(?P<page>\d+)/$', PrivateThreadsView.as_view(), name='private_threads'),
)
# thread view
from misago.threads.views.privatethreads import ThreadView
urlpatterns += patterns('',
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/$', ThreadView.as_view(), name='private_thread'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/(?P<page>\d+)/$', ThreadView.as_view(), name='private_thread'),
)
# goto views
from misago.threads.views.privatethreads import (GotoLastView, GotoNewView,
GotoPostView)
urlpatterns += patterns('',
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/last/$', GotoLastView.as_view(), name='private_thread_last'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/new/$', GotoNewView.as_view(), name='private_thread_new'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/post-(?P<post_id>\d+)/$', GotoPostView.as_view(), name='private_thread_post'),
)
# reported posts views
from misago.threads.views.privatethreads import ReportedPostsListView
urlpatterns += patterns('',
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/reported-posts/$', ReportedPostsListView.as_view(), name='private_thread_reported'),
)
# participants views
from misago.threads.views.privatethreads import (ThreadParticipantsView,
EditThreadParticipantsView,
AddThreadParticipantsView,
RemoveThreadParticipantView,
LeaveThreadView)
urlpatterns += patterns('',
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/participants/$', ThreadParticipantsView.as_view(), name='private_thread_participants'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/edit-participants/$', EditThreadParticipantsView.as_view(), name='private_thread_edit_participants'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/remove-participant/(?P<user_id>\d+)/$', RemoveThreadParticipantView.as_view(), name='private_thread_remove_participant'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/add-participants/$', AddThreadParticipantsView.as_view(), name='private_thread_add_participants'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/leave/$', LeaveThreadView.as_view(), name='private_thread_leave'),
)
# post views
from misago.threads.views.privatethreads import (QuotePostView, HidePostView,
UnhidePostView,
DeletePostView,
ReportPostView)
urlpatterns += patterns('',
url(r'^private-post/(?P<post_id>\d+)/quote/$', QuotePostView.as_view(), name='quote_private_post'),
url(r'^private-post/(?P<post_id>\d+)/unhide/$', UnhidePostView.as_view(), name='unhide_private_post'),
url(r'^private-post/(?P<post_id>\
|
d+)/hide/$', HidePostView.as_view(), name='hide_private_post'),
url(r'^private-post/(?P<post_id>\d+)/delete/$', DeletePostView.as_view(), name='delete_private_post'),
url(r'^private-post/(?P<post_id>\d+)/report/$', Repor
|
tPostView.as_view(), name='report_private_post'),
)
# events view
from misago.threads.views.privatethreads import EventsView
urlpatterns += patterns('',
url(r'^edit-private-event/(?P<event_id>\d+)/$', EventsView.as_view(), name='edit_private_event'),
)
# posting views
from misago.threads.views.privatethreads import PostingView
urlpatterns += patterns('',
url(r'^start-private-thread/$', PostingView.as_view(), name='start_private_thread'),
url(r'^reply-private-thread/(?P<thread_id>\d+)/$', PostingView.as_view(), name='reply_private_thread'),
url(r'^edit-private_post/(?P<thread_id>\d+)/(?P<post_id>\d+)/edit/$', PostingView.as_view(), name='edit_private_post'),
)
|
billvsme/videoSpider
|
webs/douban/tasks/down_celebrity_images.py
|
Python
|
mit
| 1,702
| 0
|
# -*- coding: utf-8 -*-
import ast
import os
import requests
import models
from config import config, sqla
from gevent.pool import Pool
from helpers import random_str, down
base_path = config.get('photo', 'path')
base_path = os.path.join(base_path, 'celebrity')
cookies = {
'bid': ''
}
def create_down(str_urls, douban_id, category):
urls = ast.literal_eval(str_urls or "[]")
path = os.path.join(base_path, category)
for url in urls:
filename = str(douban_id) + '_' + url.split('/')[-1].strip('?')
cookies['bid'] = random_str(11)
down(url, cookies, path, filename)
def create_requests_and_save_datas(douban_id):
session = sqla['session']
cookies['bid'] = random_str(11)
celebrity = session.query(models.Celebrity).filter_by(
douban_id=douban_id
).one()
cover_url = celebrity.cover
thumbnail_co
|
ver_url = celebrity.thumbnail_cover
photos_url = celebrity.photos
thumbnail_photos_url = celebrity.thumbnail_photos
down(
cover_url,
cookies,
os.path.join(base_path, 'cover'),
str(douban_id)+'_'+cover_url.split('/')[-1].strip('?')
)
down(
thumbnail_cover_url,
cookies,
os.path.join(base_path, 'thumbnail_cover'),
str(douban_id)+'_'+cover_url.split('/')[-1].strip('?'
|
)
)
create_down(photos_url, douban_id, 'photos')
create_down(thumbnail_photos_url, douban_id, 'thumbnail_photos')
def task(douban_ids, pool_number):
pool = Pool(pool_number)
for douban_id in douban_ids:
pool.spawn(
create_requests_and_save_datas,
douban_id=douban_id
)
pool.join()
|
rhefner1/ghidonations
|
gaesessions/__init__.py
|
Python
|
apache-2.0
| 21,922
| 0.002144
|
"""A fast, lightweight, and secure session WSGI middleware for use with GAE."""
import datetime
import hashlib
import hmac
import logging
import os
import pickle
import threading
import time
from Cookie import CookieError, SimpleCookie
from base64 import b64decode, b64encode
from google.appengine.api import memcache
from google.appengine.ext import db
# Configurable cookie options
COOKIE_NAME_PREFIX = "DgU" # identifies a cookie as being one used by gae-sessions (so you can set cookies too)
COOKIE_PATH = "/"
DEFAULT_COOKIE_ONLY_THRESH = 10240 # 10KB: GAE only allows ~16000B in HTTP header - leave ~6KB for other info
DEFAULT_LIFETIME = datetime.timedelta(days=7)
# constants
SID_LEN = 43 # timestamp (10 chars) + underscore + md5 (32 hex chars)
SIG_LEN = 44 # base 64 encoded HMAC-SHA256
MAX_COOKIE_LEN = 4096
EXPIRE_COOKIE_FMT = ' %s=; expires=Wed, 01-Jan-1970 00:00:00 GMT; Path=' + COOKIE_PATH
COOKIE_FMT = ' ' + COOKIE_NAME_PREFIX + '%02d="%s"; %sPath=' + COOKIE_PATH + '; HttpOnly'
COOKIE_FMT_SECURE = COOKIE_FMT + '; Secure'
COOKIE_DATE_FMT = '%a, %d-%b-%Y %H:%M:%S GMT'
COOKIE_OVERHEAD = len(COOKIE_FMT % (0, '', '')) + len(
'expires=Xxx, xx XXX XXXX XX:XX:XX GMT; ') + 150 # 150=safety margin (e.g., in case browser uses 4000 instead of 4096)
MAX_DATA_PER_COOKIE = MAX_COOKIE_LEN - COOKIE_OVERHEAD
_tls = threading.local()
def get_current_session():
"""Returns the session associated with the current request."""
return _tls.current_session
def set_current_session(session):
"""Sets the session associated with the current request."""
_tls.current_session = session
def is_gaesessions_key(k):
return k.startswith(COOKIE_NAME_PREFIX)
class SessionModel(db.Model):
"""Contains session data. key_name is the session ID and pdump contains a
pickled dictionary which maps session variables to their values."""
pdump = db.BlobProperty()
class Session(object):
"""Manages loading, reading/writing key-value pairs, and saving of a session.
``sid`` - if set, then the session for that sid (if any) is loaded. Otherwise,
sid will be loaded from the HTTP_COOKIE (if any).
"""
DIRTY_BUT_DONT_PERSIST_TO_DB = 1
def __init__(self, sid=None, lifetime=DEFAULT_LIFETIME, no_datastore=False,
cookie_only_threshold=DEFAULT_COOKIE_ONLY_THRESH, cookie_key=None):
self._accessed = False
self.sid = None
self.cookie_keys = []
self.cookie_data = None
self.data = {}
self.dirty = False # has the session been changed?
self.lifetime = lifetime
self.no_datastore = no_datastore
self.cookie_only_thresh = cookie_only_threshold
self.base_key = cookie_key
if sid:
self.__set_sid(sid, False)
self.data = None
else:
self.__read_cookie()
@staticmethod
def __compute_hmac(base_key, sid, text):
"""Computes the signature for text given base_key and sid."""
key = base_key + sid
return b64encode(hmac.new(key, text, hashlib.sha256).digest())
def __read_cookie(self):
"""Reads the HTTP Cookie and loads the sid and data from it (if any)."""
try:
# check the cookie to see if a session has been started
cookie = SimpleCookie(os.environ['HTTP_COOKIE'])
self.cookie_keys = filter(is_gaesessions_key, cookie.keys())
if not self.cookie_keys:
return # no session yet
self.cookie_keys.sort()
data = ''.join(cookie[k].value for k in self.cookie_keys)
i = SIG_LEN + SID_LEN
sig, sid, b64pdump = data[:SIG_LEN], data[SIG_LEN:i], data[i:]
pdump = b64decode(b64pdump)
actual_sig = Session.__compute_hmac(self.base_key, sid, pdump)
if sig == actual_sig:
self.__set_sid(sid, False)
# check for expiration and terminate the session if it has expired
if self.get_expiration() != 0 and time.time() > self.get_expiration():
return self.terminate()
if pdump:
self.data = self.__decode_data(pdump)
else:
self.data = None # data is in memcache/db: load it on-demand
else:
logging.warn('cookie with invalid sig received from %s: %s' % (os.environ.get('REMOTE_ADDR'), b64pdump))
except (CookieError, KeyError, IndexError, TypeError):
# there is no cookie (i.e., no session) or the cookie is invalid
self.terminate(False)
def make_cookie_headers(self):
"""Returns a list of cookie headers to send (if any)."""
# expire all cookies if the session has ended
if not self.sid:
return [EXPIRE_COOKIE_FMT % k for k in self.cookie_keys]
if self.cookie_data is None:
return [] # no cookie headers need to be sent
# build the cookie header(s): includes sig, sid, and cookie_data
if self.is_ssl_only():
m = MAX_DATA_PER_COOKIE - 8
fmt = COOKIE_FMT_SECURE
else:
m = MAX_DATA_PER_COOKIE
fmt = COOKIE_FMT
sig = Session.__compute_hmac(self.base_key, self.sid, self.cookie_data)
cv = sig + self.sid + b64encode(self.cookie_data)
num_cookies = 1 + (len(cv) - 1) / m
if self.get_expiration() > 0:
ed = "expires=%s; " % datetime.datetime.fromtimestamp(self.get_
|
expiration()).strftime(COOKIE_DATE_FMT)
else:
ed = ''
cookies = [fmt % (i, cv[i * m:i * m + m], ed) for i in xrange(num_cookies)]
# expire old cookies which aren't needed anymore
old_cookies = xrange(num_cookies, len(self.cookie_k
|
eys))
key = COOKIE_NAME_PREFIX + '%02d'
cookies_to_ax = [EXPIRE_COOKIE_FMT % (key % i) for i in old_cookies]
return cookies + cookies_to_ax
def is_active(self):
"""Returns True if this session is active (i.e., it has been assigned a
session ID and will be or has been persisted)."""
return self.sid is not None
def is_ssl_only(self):
"""Returns True if cookies set by this session will include the "Secure"
attribute so that the client will only send them over a secure channel
like SSL)."""
return self.sid is not None and self.sid[-33] == 'S'
def is_accessed(self):
"""Returns True if any value of this session has been accessed."""
return self._accessed
def ensure_data_loaded(self):
"""Fetch the session data if it hasn't been retrieved it yet."""
self._accessed = True
if self.data is None and self.sid:
self.__retrieve_data()
def get_expiration(self):
"""Returns the timestamp at which this session will expire."""
try:
return int(self.sid[:-33])
except:
return 0
def __make_sid(self, expire_ts=None, ssl_only=False):
"""Returns a new session ID."""
# make a random ID (random.randrange() is 10x faster but less secure?)
if expire_ts is None:
expire_dt = datetime.datetime.now() + self.lifetime
expire_ts = int(time.mktime((expire_dt).timetuple()))
else:
expire_ts = int(expire_ts)
if ssl_only:
sep = 'S'
else:
sep = '_'
return ('%010d' % expire_ts) + sep + hashlib.md5(os.urandom(16)).hexdigest()
@staticmethod
def __encode_data(d):
"""Returns a "pickled+" encoding of d. d values of type db.Model are
protobuf encoded before pickling to minimize CPU usage & data size."""
# separate protobufs so we'll know how to decode (they are just strings)
eP = {} # for models encoded as protobufs
eO = {} # for everything else
for k, v in d.iteritems():
if isinstance(v, db.Model):
eP[k] = db.model_to_protobuf(v)
else:
eO[k] = v
return pickle.dumps((eP, eO), 2)
@staticmethod
def __decode_data(pdump):
"""Returns a data
|
Jonathanliu92251/watson-conversation
|
wechat/watson-wechat.py
|
Python
|
apache-2.0
| 1,140
| 0.032456
|
import itchat, time, re
from itchat.content import *
import urllib2, urllib
import json
from watson_developer_cloud import ConversationV1
response={'context':{}}
@i
|
tchat.msg_register([TEXT])
def text_reply(msg):
global response
request_text = msg['Text'].encode('UTF-8')
conversation = ConversationV1(
username='9c359fba-0692-4afa-afb1-bd5bf4d7e367',
password='5Id2zfapBV6e',
version='2017-04-21')
# replace with your own worksp
|
ace_id
workspace_id = 'd3e50587-f36a-4bdf-bf3e-38c382e8d63a'
print "request ==>", request_text
try:
type(eval(response))
except:
print "first call"
response = conversation.message(workspace_id=workspace_id, message_input={
'text': request_text}, context=response['context'])
else:
print "continue call"
response = conversation.message(workspace_id=workspace_id, message_input={
'text': request_text}, context=response['context'])
if len( response['output']['text']) >0:
response_text = response['output']['text'][0]
else:
response_text = "No message"
itchat.send( response_text, msg['FromUserName'])
itchat.auto_login()
itchat.run(debug=True)
|
yamateh/robotframework
|
src/robot/running/arguments/argumentvalidator.py
|
Python
|
apache-2.0
| 2,829
| 0.000353
|
# Copyright 2008-2013 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robot.errors import DataError
from robot.utils import plural_or_not
from robot.variables import is_list_var
class ArgumentValidator(object):
def __init__(self, argspec):
self._argspec = argspec
def validate(self, positional, named, dryrun=False):
if dryrun and any(is_list_var(arg) for arg in positional):
return
self._validate_no_multiple_values(positional, named, self._argspec)
self._validate_limits(positional, named, self._argspec)
self._validate_no_mandatory_missing(positional, named, self._argspec)
def _validate_limits(self, positional, named, spec):
count = len(positional) + self._named_positi
|
onals(named, spec)
if not spec.minargs <= count <= spec.maxargs:
self._ra
|
ise_wrong_count(count, spec)
def _named_positionals(self, named, spec):
if not spec.supports_named:
return 0
return sum(1 for n in named if n in spec.positional)
def _raise_wrong_count(self, count, spec):
minend = plural_or_not(spec.minargs)
if spec.minargs == spec.maxargs:
expected = '%d argument%s' % (spec.minargs, minend)
elif not spec.varargs:
expected = '%d to %d arguments' % (spec.minargs, spec.maxargs)
else:
expected = 'at least %d argument%s' % (spec.minargs, minend)
if spec.kwargs:
expected = expected.replace('argument', 'non-keyword argument')
raise DataError("%s '%s' expected %s, got %d."
% (spec.type, spec.name, expected, count))
def _validate_no_multiple_values(self, positional, named, spec):
for name in spec.positional[:len(positional)]:
if name in named and spec.supports_named:
raise DataError("%s '%s' got multiple values for argument '%s'."
% (spec.type, spec.name, name))
def _validate_no_mandatory_missing(self, positional, named, spec):
for name in spec.positional[len(positional):spec.minargs]:
if name not in named:
raise DataError("%s '%s' missing value for argument '%s'."
% (spec.type, spec.name, name))
|
karanjeets/CSCI-544
|
Experimental/classifier/tweet_can_es.py
|
Python
|
apache-2.0
| 5,270
| 0.009298
|
# -*- coding: utf-8 -*-
import nltk
import csv
import random
import codecs
import re
from nltk.corpus import stopwords
stopset = list(set(stopwords.words('spanish')))
hil_tweets = []
trump_tweets = []
bernie_tweets = []
cruz_tweets = []
classes = {}
def transform(temp):
if temp == "imo":
return "opinion"
elif temp == "inches":
return "inch"
elif temp == "including" or temp == "included" or temp == "includes":
return "include"
elif temp == "issued" or temp == "issues":
return "issue"
elif temp == "ppl":
return "people"
elif temp == "pri
|
ces":
return "price"
elif temp == "say":
return "says"
elif temp == "shocked" or temp == "shocker" or temp == "shocking":
return "shock"
#elif temp == "sooooo" or temp == "soooo" or temp == "sooo" or temp == "soo":
# return "so"
return temp
def getPureWord(word):
#if str.startswith(word,'@'):
# return ""
#print word
temp = word.
|
lower()
if str.startswith(temp,"http"):
return ""
temp = ''.join(e for e in temp if e.isalpha())
#if temp not in stop_words and temp !='':
if temp not in stopset and temp !='':
return transform(temp)
else:
return ""
def purifyText(input):
output = input.replace('\r','').replace('\n','')
op = re.sub(r'\w+:\/{2}[\d\w-]+(\.[\d\w-]+)*(?:(?:\/[^\s/]*))*', '', output)
op1 = " ".join(getPureWord(w) for w in op.split())
return op1.strip()
def buildHash():
#Hillary, Bernie, Trump, Cruz, GOP, DEM
classes["trump"] = ["donald","trump","donaldtrump"]
classes["cruz"] = ["tedcruz","cruz","ted"]
classes["hillary"] = ["hillaryclinton","hillary","clinton"]
classes["bernie"] = ["berniesanders","bernie","sanders","bern"]
classes["gop"] = ["gop","gopdebate","republicans"]
classes["dem"] = ["dem","demdebate","democrats","Democratic","democrata","democrat"]
def getEntities(line):
line = line.lower()
op = set()
for key in classes:
temp = classes[key]
#print temp
for t in temp:
#print type(line)
if t.lower() in line:
op.add(key)
if key in op:
break
return list(op)
def unicode_csv_reader(utf8_data, dialect=csv.excel, **kwargs):
csv_reader = csv.reader(utf8_data, dialect=dialect, **kwargs)
for row in csv_reader:
yield [unicode(cell, 'utf-8') for cell in row]
# Process Tweet
def processTweet(tweet):
tweet = tweet.lower()
# Convert www.* or https?://* to URL
tweet = re.sub('((www\.[^\s]+)|(https?://[^\s]+))', 'URL', tweet)
# Convert @username to AT_USER
tweet = re.sub('@[^\s]+', 'AT_USER', tweet)
# Remove additional white spaces
tweet = re.sub('[\s]+', ' ', tweet)
# Replace #word with word
tweet = re.sub(r'#([^\s]+)', r'\1', tweet)
# trim
tweet = tweet.strip('\'"')
return tweet
def tweet_word(words):
return dict([(word.decode('utf-8'), True) for word in words.split() if word.decode('utf-8') not in stopset])
buildHash()
test_set = []
for x in ['a', 'b', 'c', 'd', 'e']:
#for x in ['annotatedTrump2.csv']:
with codecs.open('../python/Annotated4/annotated.csva' + x, 'rb') as csvfile:
tweets = csv.reader(csvfile, delimiter=',', quotechar='\'')
for tweet in tweets:
if tweet[12] == 'berniePositive':
bernie_tweets.append(purifyText(tweet[13]))
elif tweet[12] == 'hillaryPositive':
hil_tweets.append(purifyText(tweet[13]))
elif tweet[12] == 'cruzPositive':
cruz_tweets.append(purifyText(tweet[13]))
elif tweet[12] == 'trumpPositive':
trump_tweets.append(purifyText(tweet[13]))
elif tweet[12] == 'nuetral':
test_set.append(tweet)
labeled_words = ([(word, 'hillary') for word in hil_tweets] + [(word, 'trump') for word in trump_tweets] + [(word, 'cruz') for word in cruz_tweets] + [(word, 'bernie') for word in bernie_tweets])
random.shuffle(labeled_words)
featuresets = [(tweet_word(n), classify) for (n, classify) in labeled_words]
train_set = featuresets
# Generating Test Set...
'''
for x in ['testTrump.csv']:
with codecs.open('../python/annotated2/' + x, 'rb') as csvfile:
tweets = csv.reader(csvfile, delimiter=',', quotechar='\'')
for tweet in tweets:
if tweet[7] == '0':
test_set.append(tweet)
'''
# Ref - http://www.nltk.org/api/nltk.classify.html
# ALGORITHMS = ['GIS', 'IIS', 'MEGAM', 'TADM']
algorithm = nltk.classify.MaxentClassifier.ALGORITHMS[1]
classifier = nltk.MaxentClassifier.train(train_set, algorithm, max_iter=3)
classifier.show_most_informative_features(10)
#print(nltk.classify.accuracy(classifier, test_set))
i = 1
with open("canoutput.csv", 'wb') as f:
for tweet in test_set:
op1 = purifyText(tweet[13])
op = getEntities(op1)
if "trump" in op or "bernie" in op or "hillary" in op or "cruz" in op:
result = classifier.classify(tweet_word(op1))
print tweet[13]
print result
#else:
# print result + "Positive"
i += 1
if i > 100:
break
|
UManPychron/pychron
|
pychron/media_storage/ftp_storage.py
|
Python
|
apache-2.0
| 2,366
| 0.000423
|
# ===============================================================================
# Copyright 2016 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
import os
import socket
from ftplib import FTP
import paramiko
from pychron.media_storage.storage import RemoteStorage
class FTPStorage(RemoteStorage):
url_name = 'FTP'
def put(self, src, dest):
client = self._get_client()
self._put(client, src, dest)
self._close_client(client)
def _close_client(self, client):
client.quit()
def _get_client(self):
client = FTP(self.host)
client.login(self.username, self.password)
return client
def _put(self, client, src, dest):
head, ext = os.path.splitext(src)
if ext in ('.jpg', '.png'):
with open(src, 'rb') as rfile:
client.storbinary('STOR {}'.format(dest), rfile, 1024)
else:
with open(src, 'r') as rfile:
client.storlines('STOR {}'.format(dest), rfile)
class SFTPStorage(FTPStorage):
url_name = 'SFTP'
def _get_client(self):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(self.host, username=self.username, password=self.password, timeout=2)
except (socket.timeout, paramiko.AuthenticationException):
self.warning_dialog('Could not connect to s
|
erver')
return
return ssh.open_sft
|
p()
def _close_client(self, client):
client.close()
def _put(self, client, src, dest):
client.put(src, dest)
# ============= EOF =============================================
|
pacoqueen/cican
|
utils/gmapcatcher/gmapcatcher/pyGPSD/nmea/serial/win32.py
|
Python
|
gpl-3.0
| 9,044
| 0.006634
|
from ctypes import *
from ctypes.wintypes import HANDLE
from ctypes.wintypes import BOOL
from ctypes.wintypes import LPCWSTR
_stdcall_libraries = {}
_stdcall_libraries['kernel32'] = WinDLL('kernel32')
from ctypes.wintypes import DWORD
from ctypes.wintypes import WORD
from ctypes.wintypes import BYTE
INVALID_HANDLE_VALUE = HANDLE(-1).value
class _SECURITY_ATTRIBUTES(Structure):
pass
LPSECURITY_ATTRIBUTES = POINTER(_SECURITY_ATTRIBUTES)
CreateEventW = _stdcall_libraries['kernel32'].CreateEventW
CreateEventW.restype = HANDLE
CreateEventW.argtypes = [LPSECURITY_ATTRIBUTES, BOOL, BOOL, LPCWSTR]
CreateEvent = CreateEventW # alias
CreateFileW = _stdcall_libraries['kernel32'].CreateFileW
CreateFileW.restype = HANDLE
CreateFileW.argtypes = [LPCWSTR, DWORD, DWORD, LPSECURITY_ATTRIBUTES, DWORD, DWORD, HANDLE]
CreateFile = CreateFileW # alias
class _OVERLAPPED(Structure):
pass
OVERLAPPED = _OVERLAPPED
class _COMSTAT(Structure):
pass
COMSTAT = _COMSTAT
class _DCB(Structure):
pass
DCB = _DCB
class _COMMTIMEOUTS(Structure):
pass
COMMTIMEOUTS = _COMMTIMEOUTS
GetLastError = _stdcall_libraries['kernel32'].GetLastError
GetLastError.restype = DWORD
GetLastError.argtypes = []
LPOVERLAPPED = POINTER(_OVERLAPPED)
LPDWORD = POINTER(DWORD)
GetOverlappedResult = _stdcall_libraries['kernel32'].GetOverlappedResult
GetOverlappedResult.restype = BOOL
GetOverlappedResult.argtypes = [HANDLE, LPOVERLAPPED, LPDWORD, BOOL]
ResetEvent = _stdcall_libraries['kernel32'].ResetEvent
ResetEvent.restype = BOOL
ResetEvent.argtypes = [HANDLE]
LPCVOID = c_void_p
WriteFile = _stdcall_libraries['kernel32'].WriteFile
WriteFile.restype = BOOL
WriteFile.argtypes = [HANDLE, LPCVOID, DWORD, LPDWORD, LPOVERLAPPED]
LPVOID = c_void_p
ReadFile = _stdcall_libraries['kernel32'].ReadFile
ReadFile.restype = BOOL
ReadFile.argtypes = [HANDLE, LPVOID, DWORD, LPDWORD, LPOVERLAPPED]
CloseHandle = _stdcall_libraries['kernel32'].CloseHandle
CloseHandle.restype = BOOL
CloseHandle.argtypes = [HANDLE]
ClearCommBreak = _stdcall_libraries['kernel32'].ClearCommBreak
ClearCommBreak.restype = BOOL
ClearCommBreak.argtypes = [HANDLE]
LPCOMSTAT = POINTER(_COMSTAT)
ClearCommError = _stdcall_libraries['kernel32'].ClearCommE
|
rror
ClearCommError.restype = BOOL
ClearCommError.argtypes = [HANDLE, LP
|
DWORD, LPCOMSTAT]
SetupComm = _stdcall_libraries['kernel32'].SetupComm
SetupComm.restype = BOOL
SetupComm.argtypes = [HANDLE, DWORD, DWORD]
EscapeCommFunction = _stdcall_libraries['kernel32'].EscapeCommFunction
EscapeCommFunction.restype = BOOL
EscapeCommFunction.argtypes = [HANDLE, DWORD]
GetCommModemStatus = _stdcall_libraries['kernel32'].GetCommModemStatus
GetCommModemStatus.restype = BOOL
GetCommModemStatus.argtypes = [HANDLE, LPDWORD]
LPDCB = POINTER(_DCB)
GetCommState = _stdcall_libraries['kernel32'].GetCommState
GetCommState.restype = BOOL
GetCommState.argtypes = [HANDLE, LPDCB]
LPCOMMTIMEOUTS = POINTER(_COMMTIMEOUTS)
GetCommTimeouts = _stdcall_libraries['kernel32'].GetCommTimeouts
GetCommTimeouts.restype = BOOL
GetCommTimeouts.argtypes = [HANDLE, LPCOMMTIMEOUTS]
PurgeComm = _stdcall_libraries['kernel32'].PurgeComm
PurgeComm.restype = BOOL
PurgeComm.argtypes = [HANDLE, DWORD]
SetCommBreak = _stdcall_libraries['kernel32'].SetCommBreak
SetCommBreak.restype = BOOL
SetCommBreak.argtypes = [HANDLE]
SetCommMask = _stdcall_libraries['kernel32'].SetCommMask
SetCommMask.restype = BOOL
SetCommMask.argtypes = [HANDLE, DWORD]
SetCommState = _stdcall_libraries['kernel32'].SetCommState
SetCommState.restype = BOOL
SetCommState.argtypes = [HANDLE, LPDCB]
SetCommTimeouts = _stdcall_libraries['kernel32'].SetCommTimeouts
SetCommTimeouts.restype = BOOL
SetCommTimeouts.argtypes = [HANDLE, LPCOMMTIMEOUTS]
WaitForSingleObject = _stdcall_libraries['kernel32'].WaitForSingleObject
WaitForSingleObject.restype = DWORD
WaitForSingleObject.argtypes = [HANDLE, DWORD]
ONESTOPBIT = 0 # Variable c_int
TWOSTOPBITS = 2 # Variable c_int
ONE5STOPBITS = 1
NOPARITY = 0 # Variable c_int
ODDPARITY = 1 # Variable c_int
EVENPARITY = 2 # Variable c_int
MARKPARITY = 3
SPACEPARITY = 4
RTS_CONTROL_HANDSHAKE = 2 # Variable c_int
RTS_CONTROL_DISABLE = 0 # Variable c_int
RTS_CONTROL_ENABLE = 1 # Variable c_int
SETRTS = 3
CLRRTS = 4
DTR_CONTROL_HANDSHAKE = 2 # Variable c_int
DTR_CONTROL_DISABLE = 0 # Variable c_int
DTR_CONTROL_ENABLE = 1 # Variable c_int
SETDTR = 5
CLRDTR = 6
MS_DSR_ON = 32 # Variable c_ulong
EV_RING = 256 # Variable c_int
EV_PERR = 512 # Variable c_int
EV_ERR = 128 # Variable c_int
SETXOFF = 1 # Variable c_int
EV_RXCHAR = 1 # Variable c_int
GENERIC_WRITE = 1073741824 # Variable c_long
PURGE_TXCLEAR = 4 # Variable c_int
FILE_FLAG_OVERLAPPED = 1073741824 # Variable c_int
EV_DSR = 16 # Variable c_int
MAXDWORD = 4294967295L # Variable c_uint
EV_RLSD = 32 # Variable c_int
ERROR_IO_PENDING = 997 # Variable c_long
MS_CTS_ON = 16 # Variable c_ulong
EV_EVENT1 = 2048 # Variable c_int
EV_RX80FULL = 1024 # Variable c_int
PURGE_RXABORT = 2 # Variable c_int
FILE_ATTRIBUTE_NORMAL = 128 # Variable c_int
PURGE_TXABORT = 1 # Variable c_int
SETXON = 2 # Variable c_int
OPEN_EXISTING = 3 # Variable c_int
MS_RING_ON = 64 # Variable c_ulong
EV_TXEMPTY = 4 # Variable c_int
EV_RXFLAG = 2 # Variable c_int
MS_RLSD_ON = 128 # Variable c_ulong
GENERIC_READ = 2147483648L # Variable c_ulong
EV_EVENT2 = 4096 # Variable c_int
EV_CTS = 8 # Variable c_int
EV_BREAK = 64 # Variable c_int
PURGE_RXCLEAR = 8 # Variable c_int
ULONG_PTR = c_ulong
INFINITE = 0xFFFFFFFFL
class N11_OVERLAPPED4DOLLAR_48E(Union):
pass
class N11_OVERLAPPED4DOLLAR_484DOLLAR_49E(Structure):
pass
N11_OVERLAPPED4DOLLAR_484DOLLAR_49E._fields_ = [
('Offset', DWORD),
('OffsetHigh', DWORD),
]
PVOID = c_void_p
N11_OVERLAPPED4DOLLAR_48E._anonymous_ = ['_0']
N11_OVERLAPPED4DOLLAR_48E._fields_ = [
('_0', N11_OVERLAPPED4DOLLAR_484DOLLAR_49E),
('Pointer', PVOID),
]
_OVERLAPPED._anonymous_ = ['_0']
_OVERLAPPED._fields_ = [
('Internal', ULONG_PTR),
('InternalHigh', ULONG_PTR),
('_0', N11_OVERLAPPED4DOLLAR_48E),
('hEvent', HANDLE),
]
_SECURITY_ATTRIBUTES._fields_ = [
('nLength', DWORD),
('lpSecurityDescriptor', LPVOID),
('bInheritHandle', BOOL),
]
_COMSTAT._fields_ = [
('fCtsHold', DWORD, 1),
('fDsrHold', DWORD, 1),
('fRlsdHold', DWORD, 1),
('fXoffHold', DWORD, 1),
('fXoffSent', DWORD, 1),
('fEof', DWORD, 1),
('fTxim', DWORD, 1),
('fReserved', DWORD, 25),
('cbInQue', DWORD),
('cbOutQue', DWORD),
]
_DCB._fields_ = [
('DCBlength', DWORD),
('BaudRate', DWORD),
('fBinary', DWORD, 1),
('fParity', DWORD, 1),
('fOutxCtsFlow', DWORD, 1),
('fOutxDsrFlow', DWORD, 1),
('fDtrControl', DWORD, 2),
('fDsrSensitivity', DWORD, 1),
('fTXContinueOnXoff', DWORD, 1),
('fOutX', DWORD, 1),
('fInX', DWORD, 1),
('fErrorChar', DWORD, 1),
('fNull', DWORD, 1),
('fRtsControl', DWORD, 2),
('fAbortOnError', DWORD, 1),
('fDummy2', DWORD, 17),
('wReserved', WORD),
('XonLim', WORD),
('XoffLim', WORD),
('ByteSize', BYTE),
('Parity', BYTE),
('StopBits', BYTE),
('XonChar', c_char),
('XoffChar', c_char),
('ErrorChar', c_char),
('EofChar', c_char),
('EvtChar', c_char),
('wReserved1', WORD),
]
_COMMTIMEOUTS._fields_ = [
('ReadIntervalTimeout', DWORD),
('ReadTotalTimeoutMultiplier', DWORD),
('ReadTotalTimeoutConstant', DWORD),
('WriteTotalTimeoutMultiplier', DWORD),
('WriteTotalTimeoutConstant', DWORD),
]
__all__ = ['GetLastError', 'MS_CTS_ON', 'FILE_ATTRIBUTE_NORMAL',
'DTR_CONTROL_ENABLE', '_COMSTAT', 'MS_RLSD_ON',
'GetOverlappedResult', 'SETXON', 'PURGE_TXABORT',
'PurgeComm', 'N11_OVERLAPPED4DOLLAR_48E', 'EV_RING',
'ONESTOPBIT', 'SETXOFF', 'PURGE_RXABORT', 'GetCommState',
'RTS_CONTROL_ENABLE', '_DCB', 'CreateEvent',
'_COMMTIMEOUTS', '_SECURITY_ATTRIBUTES', 'EV_DSR',
'EV_PERR', 'EV_RXFLAG', 'OPEN_EXISTING', 'DCB',
'FILE_FLAG_OVERLAPPED', 'EV_CTS', 'SetupComm',
'LPOVERLAPPED', 'EV_TXEMPTY', 'ClearCommBreak',
'LPSECURITY_ATTRIBUTES', 'SetCommBreak', 'SetCommTimeouts',
|
pjdelport/django-analytical
|
analytical/tests/test_tag_gauges.py
|
Python
|
mit
| 2,359
| 0
|
"""
Tests for the Gauges template tags and filters.
"""
from django.http import HttpRequest
from django.template import Context
from django.test.utils import override_settings
from analytical.templatetags.gauges import GaugesNode
from analytical.tests.utils import TagTestCase
from analytical.utils import AnalyticalException
@override_settings(GAUGES_SITE_ID='1234567890abcdef0123456789')
class GaugesTagTestCase(TagTestCase):
"""
Tests for the ``gauges`` template tag.
"""
def test_tag(self):
self.assertEqual("""
<script type="text/javascript">
var _gauges = _gauges || [];
(function() {
var t = document.createElement('script');
t.type = 'text/javascript';
t.async = true;
t.id = 'ga
|
uges-tracker';
t.setAttribute('data-site-id', '1234567890abcdef0123456789');
t.src = '//secure.gaug.es/track.js';
var s = document.getElementsByTagName('s
|
cript')[0];
s.parentNode.insertBefore(t, s);
})();
</script>
""", self.render_tag('gauges', 'gauges'))
def test_node(self):
self.assertEqual(
"""
<script type="text/javascript">
var _gauges = _gauges || [];
(function() {
var t = document.createElement('script');
t.type = 'text/javascript';
t.async = true;
t.id = 'gauges-tracker';
t.setAttribute('data-site-id', '1234567890abcdef0123456789');
t.src = '//secure.gaug.es/track.js';
var s = document.getElementsByTagName('script')[0];
s.parentNode.insertBefore(t, s);
})();
</script>
""", GaugesNode().render(Context()))
@override_settings(GAUGES_SITE_ID=None)
def test_no_account_number(self):
self.assertRaises(AnalyticalException, GaugesNode)
@override_settings(GAUGES_SITE_ID='123abQ')
def test_wrong_account_number(self):
self.assertRaises(AnalyticalException, GaugesNode)
@override_settings(ANALYTICAL_INTERNAL_IPS=['1.1.1.1'])
def test_render_internal_ip(self):
req = HttpRequest()
req.META['REMOTE_ADDR'] = '1.1.1.1'
context = Context({'request': req})
r = GaugesNode().render(context)
self.assertTrue(r.startswith(
'<!-- Gauges disabled on internal IP address'), r)
self.assertTrue(r.endswith('-->'), r)
|
tosh1ki/pyogi
|
doc/sample_code/get_ki2_list.py
|
Python
|
mit
| 741
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
def get_ki2_list(parser):
parser.add_argument('-p', '--path_2chkifu',
default='~/data/shogi/2chkifu/',
help='2chkifu.zipを展開したディレクトリ')
args = parser.parse_args()
path_2chkifu = args.path_2chkifu
sub_dir_list = ['00001', '10000', '20000', '30000', '40000']
path_ki2_list = []
# Extract paths of KI2 files
for sub_dir in sub_dir_list:
path_dir = os.path.expanduser(os.path.join(
|
path_2chkifu, sub_dir))
ki2files = os.listdir(path_dir)
for ki2file in ki2files:
path_ki2_list.append(os.path.join(path_dir, ki2file))
|
return sorted(path_ki2_list)
|
ridindirtyatl/truffle-api
|
routes.py
|
Python
|
agpl-3.0
| 261
| 0
|
from flask import Blueprint, jsonify, request
|
routes_api = Blueprint('ro
|
utes_api', __name__)
@routes_api.route('/v1/routes', methods=['GET'])
def routes_get():
'''
Get a list of routes
It is handler for GET /routes
'''
return jsonify()
|
ewandor/home-assistant
|
homeassistant/components/alarm_control_panel/manual.py
|
Python
|
apache-2.0
| 10,887
| 0
|
"""
Support for manual alarms.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/alarm_control_panel.manual/
"""
import copy
import datetime
import logging
import voluptuous as vol
import homeassistant.components.alarm_control_panel as alarm
import homeassistant.util.dt as dt_util
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMED_CUSTOM_BYPASS, STATE_ALARM_DISARMED, STATE_ALARM_PENDING,
STATE_ALARM_TRIGGERED, CONF_PLATFORM, CONF_NAME, CONF_CODE,
CONF_DELAY_TIME, CONF_PENDING_TIME, CONF_TRIGGER_TIME,
CONF_DISARM_AFTER_TRIGGER)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_point_in_time
CONF_CODE_TEMPLATE = 'code_template'
DEFAULT_ALARM_NAME = 'HA Alarm'
DEFAULT_DELAY_TIME = datetime.timedelta(seconds=0)
DEFAULT_PENDING_TIME = datetime.timedelta(seconds=60)
DEFAULT_TRIGGER_TIME = datetime.timedelta(seconds=120)
DEFAULT_DISARM_AFTER_TRIGGER = False
SUPPORTED_STATES = [STATE_ALARM_DISARMED, STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMED_CUSTOM_BYPASS, STATE_ALARM_TRIGGERED]
SUPPORTED_PRETRIGGER_STATES = [state for state in SUPPORTED_STATES
if state != STATE_ALARM_TRIGGERED]
SUPPORTED_PENDING_STATES = [state for state in SUPPORTED_STATES
if state != STATE_ALARM_DISARMED]
ATTR_PRE_PENDING_STATE = 'pre_pending_state'
ATTR_POST_PENDING_STATE = 'post_pending_state'
def _state_validator(config):
config = copy.deepcopy(config)
for state in SUPPORTED_PRETRIGGER_STATES:
if CONF_DELAY_TIME not in config[state]:
config[state][CONF_DELAY_TIME] = config[CONF_DELAY_TIME]
if CONF_TRIGGER_TIME not in config[state]:
config[state][CONF_TRIGGER_TIME] = config[CONF_TRIGGER_TIME]
for state in SUPPORTED_PENDING_STATES:
if CONF_PENDING_TIME not in config[state]:
config[state][CONF_PENDING_TIME] = config[CONF_PENDING_TIME]
return config
def _state_schema(state):
schema = {}
if state in SUPPORTED_PRETRIGGER_STATES:
schema[vol.Optional(CONF_DELAY_TIME)] = vol.All(
cv.time_period, cv.positive_timedelta)
schema[vol.Optional(CONF_TRIGGER_TIME)] = vol.All(
cv.time_period, cv.positive_timedelta)
if state in SUPPORTED_PENDING_STATES:
schema[vol.Optional(CONF_PENDING_TIME)] = vol.All(
cv.time_period, cv.positive_timedelta)
return vol.Schema(schema)
PLATFORM_SCHEMA = vol.Schema(vol.All({
vol.Required(CONF_PLATFORM): 'manual',
vol.Optional(CONF_NAME, default=DEFAULT_ALARM_NAME): cv.string,
vol.Exclusive(CONF_CODE, 'code validation'): cv.string,
vol.Exclusive(CONF_CODE_TEMPLATE, 'code validation'): cv.template,
vol.Optional(CONF_DELAY_TIME, default=DEFAULT_DELAY_TIME):
vol.All(cv.time_period, cv.positive_timedelta),
vol.Optional(CONF_PENDING_TIME, default=DEFAULT_PENDING_TIME):
vol.All(cv.time_period, cv.positive_timedelta),
vol.Optional(CONF_TRIGGER_TIME, default=DEFAULT_TRIGGER_TIME):
vol.All(cv.time_period, cv.positive_timedelta),
vol.Optional(CONF_DISARM_AFTER_TRIGGER,
default=DEFAULT_DISARM_AFTER_TRIGGER): cv.boolean,
vol.Optional(STATE_ALARM_ARMED_AWAY, default={}):
_state_schema(STATE_ALARM_ARMED_AWAY),
vol.Optional(STATE_ALARM_ARMED_HOME, default={}):
_state_schema(STATE_ALARM_ARMED_HOME),
vol.Optional(STATE_ALARM_ARMED_NIGHT, default={}):
_state_schema(STATE_ALARM_ARMED_NIGHT),
vol.Optional(STATE_ALARM_ARMED_CUSTOM_BYPASS, default={}):
_state_schema(STATE_ALARM_ARMED_CUSTOM_BYPASS),
vol.Optional(STATE_ALARM_DISARMED, default={}):
_state_schema(STATE_ALARM_DISARMED),
vol.Optional(STATE_ALARM_TRIGGERED, default={}):
_state_schema(STATE_ALARM_TRIGGERED),
}, _state_validator))
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the manual alarm platform."""
add_devices([ManualAlarm(
hass,
config[CONF_NAME],
config.get(CONF_CODE),
config.get(CONF_CODE_TEMPLATE),
config.get(CONF_DISARM_AFTER_TRIGGER, DEFAULT_DISARM_AFTER_TRIGGER),
config
)])
class ManualAlarm(alarm.AlarmControlPanel):
"""
Representation of an alarm status.
When armed, will be pending for 'pending_time', after that armed.
When triggered, will be pending for the triggering state's 'delay_time'
plus the triggered state's 'pending_time'.
After that will be triggered for 'trigger_time', after that we return to
the previous state or disarm if `disarm_after_trigger` is true.
A trigger_time of zero disables the alarm_trigger service.
"""
def __init__(self, hass, name, code, code_template,
disarm_after_trigger, config):
"""Init the manual alarm panel."""
self._state = STATE_ALARM_DISARMED
self._hass = hass
self._name = name
if code_template:
self._code = code_template
self._code.hass = hass
else:
self._code = code or None
self._disarm_after_trigger = disarm_after_trigger
self._previous_state = self._state
self._state_ts = None
self._delay_time_by_state = {
state: config[state][CONF_DELAY_TIME]
for state in SUPPORTED_PRETRIGGER_STATES}
self._trigger_time_by_state = {
state: config[state][CONF_TRIGGER_TIME]
for state in SUPPORTED_PRETRIGGER_STATES}
self._pending_time_by_state = {
state: config[state][CONF_PENDING_TIME]
for state in SUPPORTED_PENDING_STATES}
@property
def should_poll(self):
"""Return the plling state."""
return False
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
if self._state == STATE_ALARM_TRIGGERED:
if self._within_pending_time(self._state):
return STATE_ALARM_PENDING
trigger_time = self._trigger_time_by_state[self._previous_state]
if (self._state_ts + self._pending_time(self._state) +
trigger_time) < dt_util.utc
|
now():
if self._disarm_after_trigger:
return STATE_ALARM_DISARMED
else:
self._state = self._previous_state
return self._state
if self._state in SUPPORTED_PENDING_STATES and \
self._within_pending_time(self._state):
return STATE_ALARM_PENDING
return self._state
@property
def _active_state(self):
if self.st
|
ate == STATE_ALARM_PENDING:
return self._previous_state
else:
return self._state
def _pending_time(self, state):
pending_time = self._pending_time_by_state[state]
if state == STATE_ALARM_TRIGGERED:
pending_time += self._delay_time_by_state[self._previous_state]
return pending_time
def _within_pending_time(self, state):
return self._state_ts + self._pending_time(state) > dt_util.utcnow()
@property
def code_format(self):
"""One or more characters."""
return None if self._code is None else '.+'
def alarm_disarm(self, code=None):
"""Send disarm command."""
if not self._validate_code(code, STATE_ALARM_DISARMED):
return
self._state = STATE_ALARM_DISARMED
self._state_ts = dt_util.utcnow()
self.schedule_update_ha_state()
def alarm_arm_home(self, code=None):
"""Send arm home command."""
if not self._validate_code(code, STATE_ALARM_ARMED_HOME):
return
self._update_state(STATE_ALARM_ARMED_HOME)
def alarm_arm_away(self, code=None):
""
|
3dfxsoftware/cbss-addons
|
lunch/__openerp__.py
|
Python
|
gpl-2.0
| 2,636
| 0.006084
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2012 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Lunch Orders',
'author': 'OpenERP SA',
'version': '0.2',
'depends': ['base', 'report'],
'category' : 'Tools',
'summary': 'Lunch Order, Meal, Food',
'description': """
The base module to manage lunch.
================================
Many companies order sandwiches, pizzas and other, from usual suppliers, for their employees to offer them more facilities.
However lunches management within the company requires proper administration especially when the number of employees or suppliers is important.
The “Lunch Order” module has been developed to make this management easier but also to offer employees more tools and usability.
In addition to a full meal and supplier management, this module offers the possibility to display warning and provides quick order selection based on employee’s preferences.
If you wan
|
t to save your employees' time and avoid them to always have coins in their pockets, this module is essential.
""",
'data': [
'security/lunch_security.xml',
'lunch_view.xml',
'wizard/lunch_order_view.xml',
'wizard/lunch_validation_view.
|
xml',
'wizard/lunch_cancel_view.xml',
'lunch_report.xml',
'report/report_lunch_order_view.xml',
'security/ir.model.access.csv',
'views/report_lunchorder.xml',
'views/lunch.xml',
],
'images': ['images/new_order.jpeg','images/lunch_account.jpeg','images/order_by_supplier_analysis.jpeg','images/alert.jpeg'],
'demo': ['lunch_demo.xml',],
'installable': True,
'application' : True,
'certificate' : '001292377792581874189',
'images': [],
}
|
toopy/django-mon-premier-projet
|
src/mon_premier_projet/apps/vcard/templatetags/vcard.py
|
Python
|
mit
| 901
| 0.006659
|
from django import forms, template
from django.core.cache import cache
from repertoire_telephonique.models import Phone
register = template.Library()
@register.simple_tag
def simple_add(a, b):
return a + b
@register.inclusion_tag('vcard/tags/form_phone.html')
def get_form_phone(contact_id):
#
|
get from cache
cache_key = 'phone_choices_%s' % con
|
tact_id
choices = cache.get(cache_key)
# not in cache generate choices
if not choices:
choices = [(_p.id, '%s %s' % (_p.prefix, _p.value))
for _p in Phone.objects.filter(contact_id=contact_id)]
# cache update
cache.set(cache_key, choices)
# dynamic form to manage dynamic choices
class PhoneForm(forms.Form):
phone = forms.MultipleChoiceField(choices=choices)
return {
'contact_id': contact_id,
'form': PhoneForm()
}
|
martynovp/edx-platform
|
lms/djangoapps/courseware/tests/test_submitting_problems.py
|
Python
|
agpl-3.0
| 51,827
| 0.001777
|
# -*- coding: utf-8 -*-
"""
Integration tests for submitting problem responses and getting grades.
"""
import json
import os
from textwrap import dedent
from django.conf import settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from mock import patch
from nose.plugins.attrib import attr
from capa.tests.response_xml_factory import (
OptionResponseXMLFactory, CustomResponseXMLFactory, SchematicResponseXMLFactory,
CodeResponseXMLFactory,
)
from courseware import grades
from courseware.models import StudentModule, StudentModuleHistory
from courseware.tests.helpers import LoginEnrollmentTestCase
from lms.djangoapps.lms_xblock.runtime import quote_slashes
from student.tests.factories import UserFactory
from student.models import anonymous_id_for_user
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.partitions.partitions import Group, UserPartition
from openedx.core.djangoapps.credit.api import (
set_credit_requirements, get_credit_requirement_status
)
from openedx.core.djangoapps.credit.models import CreditCourse, CreditProvider
from openedx.core.djangoapps.user_api.tests.factories import UserCourseTagFactory
from openedx.core.djangoapps.grading_policy.utils import MaxScoresCache
class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Check that a course gets graded properly.
"""
# arbitrary constant
COURSE_SLUG = "100"
COURSE_NAME = "test_course"
def setUp(self):
super(TestSubmittingProblems, self).setUp(create_user=False)
# Create course
self.course = CourseFactory.create(display_name=self.COURSE_NAME, number=self.COURSE_SLUG)
assert self.course, "Couldn't load course %r" % self.COURSE_NAME
# create a test student
self.student = '[email protected]'
self.password = 'foo'
self.create_account('u1', self.student, self.password)
self.activate_user(self.student)
self.enroll(self.course)
self.student_user = User.objects.get(email=self.student)
self.factory = RequestFactory()
def refresh_course(self):
"""
Re-fetch the course from the database so that the object being dealt with has everything added to it.
"""
self.course = self.store.get_course(self.course.id)
def problem_location(self, problem_url_name):
"""
Returns the url of the problem given the problem's name
"""
return self.course.id.make_usage_key('problem', problem_url_name)
def modx_url(self, problem_location, dispatch):
"""
Return the url needed for the desired action.
problem_location: location of the problem on which we want some action
dispatch: the the action string that gets passed to the view as a kwarg
example: 'check_problem' for having responses processed
"""
return reverse(
'xblock_handler',
kwargs={
'course_id': self.course.id.to_deprecated_string(),
'usage_id': quote_slashes(problem_location.to_deprecated_string()),
'handler': 'xmodule_handler',
'suffix': dispatch,
}
)
def submit_question_answer(self, problem_url_name, responses):
"""
Submit answers to a question.
Responses is a dict mapping problem ids to answers:
{'2_1': 'Correct', '2_2': 'Incorrect'}
"""
problem_location = self.problem_location(problem_url_name)
modx_url = self.modx_url(problem_location, 'problem_check')
answer_key_prefix = 'input_{}_'.format(problem_location.html_id())
# format the response dictionary to be sent in the post request by adding the above prefix to each
|
key
response_dict = {(answer_key_prefix + k): v for k, v in responses.items()}
resp = self.client.post(modx_url, response_dict)
retu
|
rn resp
def look_at_question(self, problem_url_name):
"""
Create state for a problem, but don't answer it
"""
location = self.problem_location(problem_url_name)
modx_url = self.modx_url(location, "problem_get")
resp = self.client.get(modx_url)
return resp
def reset_question_answer(self, problem_url_name):
"""
Reset specified problem for current user.
"""
problem_location = self.problem_location(problem_url_name)
modx_url = self.modx_url(problem_location, 'problem_reset')
resp = self.client.post(modx_url)
return resp
def show_question_answer(self, problem_url_name):
"""
Shows the answer to the current student.
"""
problem_location = self.problem_location(problem_url_name)
modx_url = self.modx_url(problem_location, 'problem_show')
resp = self.client.post(modx_url)
return resp
def add_dropdown_to_section(self, section_location, name, num_inputs=2):
"""
Create and return a dropdown problem.
section_location: location object of section in which to create the problem
(problems must live in a section to be graded properly)
name: string name of the problem
num_input: the number of input fields to create in the problem
"""
prob_xml = OptionResponseXMLFactory().build_xml(
question_text='The correct answer is Correct',
num_inputs=num_inputs,
weight=num_inputs,
options=['Correct', 'Incorrect', u'ⓤⓝⓘⓒⓞⓓⓔ'],
correct_option='Correct'
)
problem = ItemFactory.create(
parent_location=section_location,
category='problem',
data=prob_xml,
metadata={'rerandomize': 'always'},
display_name=name
)
# re-fetch the course from the database so the object is up to date
self.refresh_course()
return problem
def add_graded_section_to_course(self, name, section_format='Homework', late=False, reset=False, showanswer=False):
"""
Creates a graded homework section within a chapter and returns the section.
"""
# if we don't already have a chapter create a new one
if not(hasattr(self, 'chapter')):
self.chapter = ItemFactory.create(
parent_location=self.course.location,
category='chapter'
)
if late:
section = ItemFactory.create(
parent_location=self.chapter.location,
display_name=name,
category='sequential',
metadata={'graded': True, 'format': section_format, 'due': '2013-05-20T23:30'}
)
elif reset:
section = ItemFactory.create(
parent_location=self.chapter.location,
display_name=name,
category='sequential',
rerandomize='always',
metadata={
'graded': True,
'format': section_format,
}
)
elif showanswer:
section = ItemFactory.create(
parent_location=self.chapter.location,
display_name=name,
category='sequential',
showanswer='never',
metadata={
'graded': True,
'format': section_format,
}
)
else:
section = ItemFactory.create(
parent_location=self.chapter.location,
display_name=name,
category='sequential',
metadata={'graded': True, 'format': section_format}
)
# now that we've added the problem and section to the course
# we fetch the course from the database so the object we are
# dealing with has these additions
self.refresh_course()
return section
|
chiehwen/logbook
|
logbook/testsuite/__init__.py
|
Python
|
bsd-3-clause
| 2,534
| 0.001579
|
# -*- coding: utf-8 -*-
"""
logbook.testsuite
~~~~~~~~~~~~~~~~~
The logbook testsuite.
:copyright: (c) 2010 by Armin Ronacher, Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
import sys
import unittest
import logbook
_skipped_modules = []
_missing = object()
_func_ident = lambda f: f
_func_none = lambda f: None
class LogbookTestSuite(unittest.TestSuite):
def run(self, result):
try:
return unittest.TestSuite.run(self, result)
finally:
sys.stderr.write('\n')
for mod in _skipped_modules:
msg = '*** Failed to import %s, tests skipped.\n' % mod
sys.stderr.write(msg)
class LogbookTestCase(unittest.TestCase):
def setUp(self):
self.log = logbook.Logger('testlogger')
# silence deprecation warning displayed on Py 3.2
LogbookTestCase.assert_ = LogbookTestCase.assertTrue
def make_fake_mail_handler(**kwargs):
class FakeMailHandler(logbook.MailHandler):
mails = []
def get_connection(self):
return self
def close_connection(self, con):
pass
def sendmail(self, fromaddr, recipients, mail):
self.mails.append((fromaddr, recipients, mail))
kwargs.setdefault('level', logbook.ERROR)
return FakeMailHandler('[email protected]', ['[email protected]'], **kwargs)
def skip_if(condition):
if condition:
return _func_ident
else:
return _func_none
def require(name):
if name in _skippe
|
d_modules:
return _func_none
try:
__import__(name)
except ImportError:
_skipped_modules.append(name)
return _func_none
return _func_ident
def missing(name):
def decorate(f):
def wrapper(*args, **kwargs):
old = sys.modules.get(name, _missing)
sys.modules[name] = None
try:
f(*args, **kwargs)
finally:
if old is _missing:
|
del sys.modules[name]
else:
sys.modules[name] = old
return wrapper
return decorate
def suite():
loader = unittest.TestLoader()
suite = LogbookTestSuite()
suite.addTests(loader.loadTestsFromName('logbook.testsuite.test_regular'))
if sys.version_info >= (2, 5):
suite.addTests(loader.loadTestsFromName
('logbook.testsuite.test_contextmanager'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
shafiquejamal/easyframes
|
easyframes/test/test_statamerge.py
|
Python
|
apache-2.0
| 4,184
| 0.032744
|
import unittest
import pandas as pd
from pandas.util.testing import assert_series_equal
import numpy as np
from easyframes.easyframes import hhkit
class TestStataMerge(unittest.TestCase):
def setUp(self):
"""
df_original = pd.read_csv('sample_hh_dataset.csv')
df = df_original.copy()
print(df.to_dict())
"""
self.df_master = pd.DataFrame(
{'educ': {0: 'secondary', 1: 'bachelor', 2: 'primary', 3: 'higher', 4: 'bachelor', 5: 'secondary',
6: 'higher', 7: 'higher', 8: 'primary', 9: 'primary'},
'hh': {0: 1, 1: 1, 2: 1, 3: 2, 4: 3, 5: 3, 6: 4, 7: 4, 8: 4, 9: 4},
'id': {0: 1, 1: 2, 2: 3, 3: 1, 4: 1, 5: 2, 6: 1, 7: 2, 8: 3, 9: 4},
'has_car': {0: 1, 1: 1, 2: 1, 3: 1, 4: 0, 5: 0, 6: 1, 7: 1, 8: 1, 9: 1},
'weighthh': {0: 2, 1: 2, 2: 2, 3: 3, 4: 2, 5: 2, 6: 3, 7: 3, 8: 3, 9: 3},
'house_rooms': {0: 3, 1: 3, 2: 3, 3: 2, 4: 1, 5: 1, 6: 3, 7: 3, 8: 3, 9: 3},
'prov': {0: 'BC', 1: 'BC', 2: 'BC', 3: 'Alberta', 4: 'BC', 5: 'BC', 6: 'Alberta',
7: 'Alberta', 8: 'Alberta', 9: 'Alberta'},
'age': {0: 44, 1: 43, 2: 13, 3: 70, 4: 23, 5: 20, 6: 37, 7: 35, 8: 8, 9: 15},
'fridge': {0: 'yes', 1: 'yes', 2: 'yes', 3: 'no', 4: 'yes', 5: 'yes', 6: 'no',
7: 'no', 8: 'no', 9: 'no'},
'male': {0: 1, 1: 0, 2: 1, 3: 1, 4: 1, 5: 0, 6: 1, 7: 0, 8: 0, 9: 0}})
self.df_using_hh = pd.DataFrame(
{'hh': {0: 2, 1: 4, 2: 5, 3: 6, 4: 7},
'has_fence': {0: 1, 1: 0, 2: 1, 3: 1, 4: 0}
})
self.df_using_ind = pd.DataFrame(
{'empl': {0: 'not employed', 1: 'full-time', 2: 'part-time', 3: 'part-time', 4: 'full-time', 5: 'part-time',
6: 'self-employed', 7: 'full-time', 8: 'self-employed'},
'hh': {0: 1, 1: 1, 2: 1, 3: 2, 4: 5, 5: 5, 6: 4, 7: 4, 8: 4},
'id': {0: 1, 1: 2, 2: 4,
|
3: 1, 4: 1, 5: 2, 6: 1, 7: 2, 8: 5}
})
# @unittest.skip("demonstrating skipping")
def test_new_columns_added_merging_hh_level(self):
myhhkit = hhkit(self.df_master)
# myhhkit.from_dict(self.df_master)
myhhkit_using_hh = hhkit(self.df_using_hh)
# myhhkit_using_hh.from_dict(self.df_using_hh)
myhhkit.statamerge(myhhkit_using_hh, on=['hh'])
list_of_columns = myhhkit.df.columns.values.tolist()
self.assertIn('has_fence',list_of_columns)
# also check
|
that the values are correct
correct_values = pd.Series([np.nan, np.nan, np.nan, 1, np.nan, np.nan, 0, 0, 0, 0, 1, 1, 0])
assert_series_equal(correct_values, myhhkit.df['has_fence'])
# @unittest.skip("demonstrating skipping")
def test_new_columns_added_merging_ind_level(self):
myhhkit = hhkit(self.df_master)
# myhhkit.from_dict(self.df_master)
myhhkit_using_ind = hhkit(self.df_using_ind)
# myhhkit_using_ind.from_dict(self.df_using_ind)
myhhkit.statamerge(myhhkit_using_ind, on=['hh','id'])
list_of_columns = myhhkit.df.columns.values.tolist()
self.assertIn('empl',list_of_columns)
# also check that the values are correct
correct_values = pd.Series(['not employed', 'full-time', np.nan, 'part-time', np.nan, np.nan,
'self-employed', 'full-time', np.nan, np.nan, 'part-time', 'full-time', 'part-time', 'self-employed'])
assert_series_equal(correct_values, myhhkit.df['empl'])
# @unittest.skip("demonstrating skipping")
def test_check_proper_merged_variable_created_and_is_correct_hh_level(self):
myhhkit = hhkit(self.df_master)
# myhhkit.from_dict(self.df_master)
myhhkit_using_hh = hhkit(self.df_using_hh)
# myhhkit_using_hh.from_dict(self.df_using_hh)
correct_values = pd.Series([1, 1, 1, 3, 1, 1, 3, 3, 3, 3, 2, 2, 2])
myhhkit.statamerge(myhhkit_using_hh, on=['hh'], mergevarname='_merge_hh')
assert_series_equal(correct_values, myhhkit.df['_merge_hh'])
def test_check_proper_merged_variable_created_and_is_correct_ind_level(self):
myhhkit = hhkit(self.df_master)
# myhhkit.from_dict(self.df_master)
myhhkit_using_ind = hhkit(self.df_using_ind)
# myhhkit_using_ind.from_dict(self.df_using_ind)
correct_values = pd.Series([3, 3, 1, 3, 1, 1, 3, 3, 1, 1, 2, 2, 2, 2])
myhhkit.statamerge(myhhkit_using_ind, on=['hh','id'], mergevarname='_merge_hh')
assert_series_equal(correct_values, myhhkit.df['_merge_hh'])
if __name__ == '__main__':
unittest.main()
|
slashdd/sos
|
sos/cleaner/archives/insights.py
|
Python
|
gpl-2.0
| 1,284
| 0
|
# Copyright 2021 Red Hat, Inc. Jake Hunsaker <[email protected]>
# This file is part of the sos project: https://github.com/sosreport/sos
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the
|
terms and conditions of
# version 2 of the GNU General Public License.
#
# See the LICENSE file in the source distribution for further information.
from sos.cleaner.archives import SoSObfuscationArchive
import tarfile
class InsightsArchive(SoSObfuscationArchive):
"""This class represents archives generated by the insights-client utility
for
|
RHEL systems.
"""
type_name = 'insights'
description = 'insights-client archive'
prep_files = {
'hostname': 'data/insights_commands/hostname_-f',
'ip': 'data/insights_commands/ip_addr',
'mac': 'data/insights_commands/ip_addr'
}
@classmethod
def check_is_type(cls, arc_path):
try:
return tarfile.is_tarfile(arc_path) and 'insights-' in arc_path
except Exception:
return False
def get_archive_root(self):
top = self.archive_path.split('/')[-1].split('.tar')[0]
if self.tarobj.firstmember.name == '.':
top = './' + top
return top
|
project-asap/IReS-Platform
|
asap-tools/imr-code/imr_workflow_spark/operators/imr_tools.py
|
Python
|
apache-2.0
| 5,668
| 0.000706
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
tools for imr datasets
@author: Chris Mantas
@contact: [email protected]
@since: Created on 2016-02-12
@todo: custom formats, break up big lines
@license: http://www.apache.org/licenses/LICENSE-2.0 Apache License
"""
from ast import literal_eval
from collections import defaultdict
def create_label_encoder(labels):
"""
Creates a label encoder from a list of labels
:param labels: a list of integers
:return: a LabelEncoder object
"""
from sklearn.preprocessing import LabelEncoder
encoder = LabelEncoder()
encoder.fit(labels)
return encoder
def get_features_
|
from_line(line):
"""
Given a text line it returns
a) o
|
nly the last element of the tuple if the line is a tuple.
That element we assume to be a list of features.
b) the line's elements if the line is not a tuple
:param line:
:return:
"""
from ast import literal_eval
entry = literal_eval(line)
return entry[-1] if isinstance(entry, tuple) else entry
def parse_line(line):
"""
Parses a string line to a tuple
:param line:
:return:
"""
from ast import literal_eval
try:
entry = literal_eval(line)
if not isinstance(entry, tuple):
raise Exception("Input parsed, but is not a tuple")
except:
raise Exception("Could not evaluate (parse) input into an object")
return entry
def tuple_to_labeled_point(entry, category, l_encoder=None):
"""
Creates a label point from a text line that is formated as a tuple
:param entry: a tuple of format (3, 2, 1, [3,4,4 ..]), where the first
entries in the tuple are labels, and the last entry is
a list of features
:param category: which one of the labels in the tuple to keep for the
labeled point (0 to 2 for imr dataset)
:param l_encoder: the label encoder to encode the label (if any)
:return: a LabeledPoint
"""
from pyspark.mllib.classification import LabeledPoint
label = entry[category]
if l_encoder:
label = l_encoder.transform(label)
features = entry[-1]
return LabeledPoint(label, features) # return a new labelPoint
def classify_line(features, model, l_encoder=None):
"""
Classifies the features based on the given model.
If a label encoder is specified, it reverses the encoding of the label
:param features: a vector of features
:param model: a Classification Model
:param l_encoder: a LabelEncoder
:return: a tuple of: label, [feat1, feat2 ... featN]
"""
encoded_prediction = model.predict(features)
prediction = l_encoder.inverse_transform(encoded_prediction) \
if l_encoder else encoded_prediction
return prediction, features
def label_encoders_from_json_file(labels_json_file, category=None):
"""
Loads a mapping of categories->available_labels from a json file.
If category is specified it returns the LabelEncoder for this category.
If not, it returns a dict of category->LabelEncoder
:param labels_json_file:
:param category:
:return:
"""
from json import load
from sklearn.preprocessing import LabelEncoder
with open(labels_json_file) as infile:
all_labels = load(infile)
label_dict = dict(map(
lambda (k, v): (int(k), LabelEncoder().fit(v)),
all_labels.iteritems()
))
return label_dict[category] if category else label_dict
def labels_from_csv_file(csv_file, label_range):
"""
Parses a csv dataset and keeps a set of all the labels in 'label_range'.
Preserves the order in which it sees labels - does not contain duplicates.
:param csv_file:
:param label_range:
:return:
"""
labels = defaultdict(list)
label_sets = defaultdict(set)
with open(csv_file) as infile:
for line in infile:
line_tokens = line.split(';')
for i in range(label_range[0], label_range[1]+1):
label = int(line_tokens[i])
if label not in label_sets[i]:
label_sets[i].add(label)
labels[i].append(label)
# convert to regular dict of lists
return dict(labels.iteritems())
# ======================= MAIN ========================= #
if __name__ == "__main__":
from argparse import ArgumentParser
from json import dump
cli_parser = ArgumentParser(description='tools for imr datasets')
cli_parser.add_argument("operation",
help="the operation to run: 'train' or 'classify'")
cli_parser.add_argument("input",
help="the input dataset (formatted as a csv file"
"separated with ';' character")
cli_parser.add_argument("output", help="the output file")
cli_parser.add_argument("-rs", '--range-start', type=int, default=1,
help="the start of the range of labels")
cli_parser.add_argument("-re", '--range-end', type=int, default=3,
help="the end of the range of labels (inclusive)")
args = cli_parser.parse_args()
if args.operation == "storelabels":
from collections import defaultdict
# get a dict of labels from a csv dataset
labels_dict = labels_from_csv_file(args.input,
(args.range_start, args.range_end))
# dump it to the output file
with open(args.output, 'w+') as outfile:
dump(labels_dict, outfile)
else:
print("I do not know operation:", args.operation)
|
dougbeal/google-apis-client-generator
|
src/googleapis/codegen/utilities/json_expander.py
|
Python
|
apache-2.0
| 1,809
| 0.008292
|
#!/usr/bin/python2.7
# Copyright 2012 Google Inc. All Rights Reserved.
"""Support for simple JSON templates.
A JSON template is a dictionary of JSON data in which string values
may be simple templates in string.Template format (i.e.,
$dollarSignEscaping). By default, the template is expanded against
its own data, optionally updated with ad
|
ditional context.
"""
imp
|
ort json
from string import Template
import sys
__author__ = '[email protected] (Jacob Smullyan)'
def ExpandJsonTemplate(json_data, extra_context=None, use_self=True):
"""Recursively template-expand a json dict against itself or other context.
The context for string expansion is the json dict itself by default, updated
by extra_context, if supplied.
Args:
json_data: (dict) A JSON object where string values may be templates.
extra_context: (dict) Additional context for template expansion.
use_self: (bool) Whether to expand the template against itself, or only use
extra_context.
Returns:
A dict where string template values have been expanded against
the context.
"""
if use_self:
context = dict(json_data)
else:
context = {}
if extra_context:
context.update(extra_context)
def RecursiveExpand(obj):
if isinstance(obj, list):
return [RecursiveExpand(x) for x in obj]
elif isinstance(obj, dict):
return dict((k, RecursiveExpand(v)) for k, v in obj.iteritems())
elif isinstance(obj, (str, unicode)):
return Template(obj).safe_substitute(context)
else:
return obj
return RecursiveExpand(json_data)
if __name__ == '__main__':
if len(sys.argv) > 1:
json_in = open(sys.argv[1])
else:
json_in = sys.stdin
data = json.load(json_in)
expanded = ExpandJsonTemplate(data)
json.dump(expanded, sys.stdout, indent=2)
|
lptorres/noah-inasafe
|
web_api/third_party/raven/transport/exceptions.py
|
Python
|
gpl-3.0
| 498
| 0
|
"""
raven.transport.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more d
|
etails.
:license: BSD, see LICENSE for more details.
"""
class InvalidScheme(ValueError):
"""
Raised when a transport is constructed using a URI which is not
handled by the transport
"""
class DuplicateScheme(StandardError):
"""
Raised when registering a handler for a particular scheme which
is already registered
|
"""
|
wasit7/tutorials
|
arduino_python/02_python_serial/readUno.py
|
Python
|
mit
| 669
| 0.019432
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 07 13:58:49 2015
@author: Wasit
"""
import serial
import re
import datetime
#ser = serial.Serial('/dev/tty.usbserial', 9600)
#ser = serial.Serial('COM7', 9600)
#ser = serial.Serial(0) # open first serial port
ser=None
for i in xrange(10):
try:
ser = serial.Serial(i)
break
except:
print "port COM%d is disabled"%(i+1)
print "Connecting to port: "
|
+ser.name
endTime = datetime.datetime.now() + datetime.timedelta(seconds=5)
while True:
if datetime.datetime.now() >= endTime:
br
|
eak
record=re.split(',',ser.readline())
record = map(int, record)
print record
ser.close()
|
cloudify-cosmo/cloudify-manager
|
rest-service/manager_rest/test/endpoints/test_execution_schedules.py
|
Python
|
apache-2.0
| 8,636
| 0
|
from datetime import datetime, timedelta
from manager_rest.test.base_test import BaseServerTestCase
from cloudify_rest_client.exceptions import CloudifyClientError
class ExecutionSchedulesTestCase(BaseServerTestCase):
DEPLOYMENT_ID = 'deployment'
fmt = '%Y-%m-%dT%H:%M:%S.%fZ'
an_hour_from_now = \
datetime.utcnow().replace(microsecond=0) + timedelta(hours=1)
two_hours_from_now = \
datetime.utcnow().replace(microsecond=0) + timedelta(hours=2)
three_hours_from_now = \
datetime.utcnow().replace(microsecond=0) + timedelta(hours=3)
three_weeks_from_now = \
datetime.utcnow().replace(microsecond=0) + timedelta(weeks=3)
deployment_id = None
def setUp(self):
super(ExecutionSchedulesTestCase, self).setUp()
_, self.deployment_id, _, _ = self.put_deployment(self.DEPLOYMENT_ID)
def test_schedule_create(self):
schedule_id = 'sched-1'
workflow_id = 'install'
schedule = self.client.execution_schedules.create(
schedule_id, self.deployment_id, workflow_id,
since=self.an_hour_from_now, recurrence='1 minutes', count=5)
self.assertEqual(schedule.id, schedule_id)
self.assertEqual(schedule.deployment_id, self.deployment_id)
self.assertEqual(schedule.workflow_id, workflow_id)
self.assertEqual(datetime.strptime(schedule.since, self.fmt),
self.an_hour_from_now)
self.assertEqual(len(schedule['all_next_occurrences']), 5)
self.assertEqual(
datetime.strptime(schedule['next_occurrence'], self.fmt),
self.an_hour_from_now)
self.assertEqual(schedule['slip'], 0)
self.assertEqual(schedule['stop_on_fail'], False)
def test_schedule_create_weekdays(self):
schedule = self.client.execution_schedules.create(
'sched-weekdays', self.deployment_id, 'install',
since=self.an_hour_from_now, until=self.three_weeks_from_now,
recurrence='1 days', weekdays=['mo', 'tu', 'we', 'th'])
self.assertEqual(len(schedule['all_next_occurrences']), 12) # 3w * 4d
def test_schedules_list(self):
schedule_ids = ['sched-1', 'sched-2']
for schedule_id in schedule_ids:
self.client.execution_schedules.create(
schedule_id, self.deployment_id, 'install',
since=self.an_hour_from_now, recurrence='1 minutes', count=5)
schedules = self.client.execution_schedules.list()
self.assertEqual(len(schedules), 2)
self.assertSetEqual({s.id for s in schedules}, set(schedule_ids))
def test_schedule_delete(self):
self.client.execution_schedules.create(
'delete-me', self.deployment_id, 'install',
since=self.an_hour_from_now, recurrence='1 minutes', count=5)
self.assertEqual(len(self.client.execution_schedules.list()), 1)
self.client.execution_schedules.delete('delete-me', self.deployment_id)
self.assertEqual(len(self.client.execution_schedules.list()), 0)
def test_schedule_update(self):
schedule = self.client.execution_schedules.create(
'update-me', self.deployment_id, 'install',
since=self.an_hour_from_now, until=self.two_hours_from_now,
recurrence='1 minutes')
# `until` is inclusive
self.assertEqual(len(schedule['all_next_occurrences']), 61)
self.assertEqual(schedule['rule']['recurrence'], '1 minutes')
self.assertEqual(schedule['slip'], 0)
self.client.execution_schedules.update(
'update-me', self.deployment_id, recurrence='5 minutes', slip=30)
# get the schedule from the DB and not directly from .update endpoint
schedule = self.client.execution_schedules.get('update-me',
self.deployment_id)
self.assertEqual(len(schedule['all_next_occurrences']), 13) # 60/5+1
self.assertEqual(schedule['rule']['recurrence'], '5 minutes')
self.assertEqual(schedule['slip'], 30)
self.client.execution_schedules.update(
'update-me', self.deployment_id, until=self.three_hours_from_now)
schedule = self.client.execution_schedules.get('update-me',
self.deployment_id)
self.assertEqual(len(schedule['all_next_occurrences']), 25) # 2*60/5+1
def test_schedule_get_invalid_id(self):
self.assertRaisesRegex(
CloudifyClientError,
'404: Requested `ExecutionSchedule` .* was not found',
self.client.execution_schedules.get,
'nonsuch',
self.deployment_id
)
def test_schedule_create_no_since(self):
self.assertRaises(
AssertionError,
self.client.execution_schedules.create,
'some_id', self.deployment_id, 'some_workflow',
recurrence='1 minutes', count=5
)
def test_schedule_create_invalid_time_format(self):
self.assertRaisesRegex(
AttributeError,
"'str' object has no attribute 'isoformat'",
self.client.execution_schedules.create,
'some_id', self.deployment_id, 'install',
since='long ago', recurrence='1 minutes', count=5
)
def test_schedule_create_invalid_workflow(self):
self.assertRaisesRegex(
CloudifyClientError,
'400: Workflow some_workflow does not exist',
self.client.execution_schedules.create,
'some_id', self.deployment_id, 'some_workflow',
since=self.an_hour_from_now, recurrence='1 minutes', count=5,
)
def test_schedule_invalid_weekdays(self):
self.assertRaisesRegex(
CloudifyClientError,
'400:.* invalid weekday',
self.client.execution_schedules.create,
'bad-weekdays', self.deployment_id, 'install',
since=self.an_hour_from_now, recurrence='4 hours',
weekdays=['oneday', 'someday']
)
self.client.execution_schedules.create(
'good-weekdays', self.deployment_id, 'install',
since=self.an_hour_from_now, recurrence='4 hours', count=6,
weekdays=['mo', 'tu']
)
self.assertRaisesRegex(
CloudifyClientError,
'400:.* invalid weekday',
self.client.execution_schedules.update,
'good-weekdays', self.deployment_id, weekdays=['oneday', 'someday']
)
def test_schedule_create_invalid_complex_weekdays(self):
self.assertRaisesRegex(
CloudifyClientError,
'400:.* invalid weekday',
self.client.execution_schedules.create,
'bad-complex-wd', self.deployment_id, 'install',
since=self.an_hour_from_now, recurrence='4 hours',
weekdays=['5tu']
)
def test_schedule_create_invalid_recurrence_with_complex_weekdays(self):
self.assertRaisesRegex(
CloudifyClientError,
'400:.* complex weekday expression',
self.client.execution_schedules.create,
'bad-complex-wd', self.deployment_id, 'install',
since=self.an_hour_from_now, recurrence='4 hours',
weekdays=['2mo', 'l-tu']
)
def test_schedule_invalid_repetition_without_recurrence(self):
recurrence_error = \
'400: recurrence must be specified for execution count ' \
'larger than 1'
s
|
elf.assertRaisesRegex(
CloudifyClientError,
recurrence_error,
self.client.execution_schedules.create,
'no-recurrence-no-count', self.deployment_id, '
|
uninstall',
since=self.an_hour_from_now, weekdays=['su', 'mo', 'tu'],
)
self.client.execution_schedules.create(
'no-recurrence-count-1', self.deployment_id, 'install',
since=self.an_hour_from_now, count=1,
)
self.assertRaisesRegex(
CloudifyClientError,
recurrence_error,
self.client.execution_schedules.upd
|
bardes/sonitus
|
tools/tone_gen.py
|
Python
|
mit
| 1,271
| 0.02203
|
#!/usr/bin/env python
from sys import argv, stderr
usage = \
"""
Usage: {program} <sample rate> <A4 freq.> [octaves=8]
e.g.: {program} 64000 442.0 5
""".format(program=argv[0])
if len(argv) < 3 or len(argv) > 4 :
print(usage, file = stderr)
|
exit(1)
A4 = 0
sample_rate = 0
octaves = 8
try:
A4 = float(argv[2])
except:
print("Error, invalid argument: Freq. must be a number!", file = stderr)
print(usage, file = stderr)
exit(1)
try:
sample_rate = int(argv[1])
except:
print("Error, invalid argume
|
nt: Sample rate must be an integer!", \
file = stderr)
print(usage, file = stderr)
exit(1)
if len(argv) == 4 :
try:
octaves = int(argv[3])
except:
print("Error, invalid argument: Octaves must be an integer!", \
file = stderr)
print(usage, file = stderr)
exit(1)
freq_ratio = 2**(1/12)
base_freq = A4/(freq_ratio**57)
periods = [round(sample_rate/(2*base_freq*freq_ratio**t)) \
for t in range(0, 12*octaves)]
print("uint16_t tone_periods[{ntones}] = {{".format(ntones=12*octaves))
for o in range(0, octaves):
print('\t', end='')
for i in range(0, 12):
print("{period}, ".format(period=periods[12*o+i]), end='')
print('')
print("};")
|
BirkbeckCTP/janeway
|
src/press/migrations/0002_auto_20170727_1504.py
|
Python
|
agpl-3.0
| 979
| 0.003064
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-07-27 15:04
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('press', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='press',
name='password_length',
field=models.PositiveIntegerField(default=12, validators=[django.core.validators.MinValueValidator(9)]),
),
migrations.AddField(
model_name='press',
name='password_number',
field=models.BooleanField(default=F
|
alse, help_text='If set, passwords must include one number.'),
),
migrations.AddField(
model_name='press',
name='password_upper',
f
|
ield=models.BooleanField(default=False, help_text='If set, passwords must include one upper case.'),
),
]
|
Exanis/cannelloni
|
backend/filters/count_column.py
|
Python
|
mit
| 662
| 0.003026
|
# -*- coding: utf8 -*-
"CountColumn filter"
from .abstract import AbstractFilter
class CountColumn(AbstractFilter):
"Count a flux's column and put the result in a variable"
name = 'Compter colonnes'
description = "Compte le nombre de col
|
onnes d'un flux et met le résultat dans une variable"
node_in = ['cible']
parameters = [
{
'name': 'Variable',
'key': 'target',
'type': 'integer'
}
]
def run(self):
"Execute the filter"
target = self._model.config('target')
value = len(self._flux_in['cible']['headers'])
self._registery.set(target, va
|
lue)
|
BWallet/sx
|
src/obelisk/models.py
|
Python
|
agpl-3.0
| 3,035
| 0.003295
|
import bitcoin
import struct
import serialize
class BlockHeader:
def __init__(self):
self.height = None
@classmethod
def deserialize(cls, raw):
assert len(raw) == 80
self = cls()
self.version = struct.unpack('<I', raw[:4])[0]
self.previous_block_hash = raw[4:36][::-1]
assert len(self.previous_block_hash) == 32
self.merkle = raw[36:68][::-1]
assert len(self.merkle) == 32
self.timestamp, self.bits, self.nonce = struct.unpack('<III', raw[68:])
return self
@property
def hash(self):
data = struct.pack('<I', self.version)
data += self.previous_block_hash[::-1]
data += self.merkle[::-1]
data += struct.pack('<III', self.timestamp, self.bits, self.nonce)
return bitcoin.Hash(data)[::-1]
def __repr__(self):
return '<BlockHeader
|
%s>' % (self.hash.encode("hex"),)
class OutPoint(object):
def __init__(self):
self.hash = None
self.index = None
def is_null(self):
return (len(self.hash) == 0) and (self.index == 0xffffffff)
def __repr__(self):
return "OutPoint(hash=%s, index=%i)" % (self.hash.encode("hex"), self.index)
def s
|
erialize(self):
return serialize.ser_output_point(self)
@staticmethod
def deserialize(bytes):
return serialize.deser_output_point(bytes)
class TxOut(object):
def __init__(self):
self.value = None
self.script = ""
def __repr__(self):
return "TxOut(value=%i.%08i script=%s)" % (self.value // 100000000, self.value % 100000000, self.script.encode("hex"))
def serialize(self):
return serialize.ser_txout(self)
@staticmethod
def deserialize(bytes):
return serialize.deser_txout(bytes)
class TxIn(object):
def __init__(self):
self.previous_output = OutPoint()
self.script = ""
self.sequence = 0xffffffff
def is_final(self):
return self.sequence == 0xffffffff
def __repr__(self):
return "TxIn(previous_output=%s script=%s sequence=%i)" % (repr(self.previous_output), self.script.encode("hex"), self.sequence)
def serialize(self):
return serialize.ser_txin(self)
@staticmethod
def deserialize(bytes):
return serialize.deser_txin(bytes)
class Transaction:
def __init__(self):
self.version = 1
self.locktime = 0
self.inputs = []
self.outputs = []
def is_final(self):
for tin in self.vin:
if not tin.is_final():
return False
return True
def is_coinbase(self):
return len(self.vin) == 1 and self.vin[0].prevout.is_null()
def __repr__(self):
return "Transaction(version=%i inputs=%s outputs=%s locktime=%i)" % (self.version, repr(self.inputs), repr(self.outputs), self.locktime)
def serialize(self):
return serialize.ser_tx(self)
@staticmethod
def deserialize(bytes):
return serialize.deser_tx(bytes)
|
pmitche/it3105-aiprogramming
|
project1/common/constraintnet.py
|
Python
|
mit
| 304
| 0.009868
|
__author__ = 'sondredyvik
|
'
class ConstraintNet:
def __init__(self):
self.constraints = {}
def add_constraint(self,key,constraint):
if key in self.constraints:
self.constraints[key].append(constraint)
else:
self.constraints[
|
key] = [constraint]
|
FreedomCoop/valuenetwork
|
valuenetwork/api/schemas/ProcessClassification.py
|
Python
|
agpl-3.0
| 887
| 0.002255
|
#
# Graphene schema for exposing ProcessClassification model
#
import graphene
from valuenetwork.valueaccounting.models import ProcessType
from valuenetwork.api.types.Process import ProcessClassification
from valuenetwork.api.types.EconomicEvent import Action
from django.db.models import Q
class Query(object): #graphene.AbstractType):
process_classification = graphene.Field(ProcessClassification,
|
id=graphene.Int())
all_process_classifications = graphene.List(ProcessClassification)
def resolve_process_classification(self, args, *rargs):
id = args.get('id')
if id is not None:
pt = ProcessType.objects.get(pk=id)
if pt:
retur
|
n pt
return None
def resolve_all_process_classifications(self, args, context, info):
return ProcessType.objects.all()
|
FlaskGuys/Flask-Imagine
|
flask_imagine/adapters/filesystem.py
|
Python
|
mit
| 4,244
| 0.001178
|
"""
This module implement a filesystem storage adapter.
"""
from __future__ import unicode_literals
import errno
import logging
import os
from flask import current_app
from .interface import ImagineAdapterInterface
from PIL import Image
LOGGER = logging.getLogger(__name__)
class ImagineFilesystemAdapter(ImagineAdapterInterface):
"""
Filesystem storage adapter
"""
source_folder = None
cache_folder = None
def __init__(self, **kwargs):
"""
Init _adapter
:param kwargs: parameters
:return:
"""
self.source_folder = kwargs.get('source_folder', '').strip('/')
self.cache_folder = kwargs.get('cache_folder', 'cache').strip('/')
def get_item(self, path):
"""
Get resource item
:param path: string
:return: PIL.Image
"""
if self.source_folder:
item_path = '%s/%s/%s' % (
current_app.static_folder,
self.source_folder,
path.strip('/')
)
else:
item_path = '%s/%s' % (
current_app.static_folder,
path.strip('/')
)
if os.path.isfile(item_path):
try:
return Image.open(item_path)
except IOError as err:
LOGGER.warning('File not found on path "%s" with error: %s' % (item_path, str(err)))
return False
else:
return False
def create_cached_item(self, path, content):
"""
Create cached resource item
:param path: str
:param content: Image
:return: str
"""
if isinstance(content, Image.Image):
item_path = '%s/%s/%s' % (
current_app.static_folder,
self.cache_folder,
path.strip('/')
)
self.make_dirs(item_path)
content.save(item_path)
if os.path.isfile(item_path):
return '%s/%s/%s' % (current_app.static_url_path, self.cache_folder, path.strip('/'))
else: # pragma: no cover
LOGGER.warning('File is not created on path: %s' % item_path)
return False
else:
return False
def get_cached_item(self, path):
"""
Get cached resource item
:param path: str
:return: PIL.Image
"""
item_path = '%s/%s/%s' % (
current_app.static_folder,
self.cache_folder,
path.strip('/')
)
if os.path.isfile(item_path):
try:
return Image.open(item_path)
except IOError as err: # pragma: no cover
LOGGER.warning('Cached file not foun
|
d on path "%s" with error: %s' % (item_path, str(err)))
return False
else:
return False
def check_cached_item(self, p
|
ath):
"""
Check for cached resource item exists
:param path: str
:return: bool
"""
item_path = '%s/%s/%s' % (
current_app.static_folder,
self.cache_folder,
path.strip('/')
)
if os.path.isfile(item_path):
return '%s/%s/%s' % (current_app.static_url_path, self.cache_folder, path.strip('/'))
else:
return False
def remove_cached_item(self, path):
"""
Remove cached resource item
:param path: str
:return: bool
"""
item_path = '%s/%s/%s' % (
current_app.static_folder,
self.cache_folder,
path.strip('/')
)
if os.path.isfile(item_path):
os.remove(item_path)
return True
@staticmethod
def make_dirs(path):
"""
Create directories if not exist
:param path: string
:return:
"""
try:
os.makedirs(os.path.dirname(path))
except OSError as err:
if err.errno != errno.EEXIST:
LOGGER.error('Failed to create directory %s with error: %s' % (path, str(err)))
raise
|
picoCTF/picoCTF-Platform-2
|
api/api/app.py
|
Python
|
mit
| 2,943
| 0.008155
|
"""
Flask routing
"""
from flask import Flask, request, session, send_from_directory, render_template
from werkzeug.contrib.fixers import ProxyFix
app = Flask(__name__, static_path="/")
app.wsgi_app = ProxyFix(app.wsgi_app)
import api
import json
import mimetypes
import os.path
from datetime import datetime
from api.common import WebSuccess, WebError
from api.annotations import api_wrapper, require_login, require_teacher, require_admin, check_csrf
from api.annotations import block_before_competition, block_after_competition
from api.annotations import log_action
import api.routes.autogen
import api.routes.user
import api.routes.team
import api.routes.stats
import api.routes.admin
import api.routes.group
import api.routes.problem
import api.routes.achievements
log = api.logger.use(__name__)
session_cookie_domain = "127.0.0.1"
session_cookie_path = "/"
session_cookie_name = "flask"
secret_key = ""
def config_app(*args, **kwargs):
"""
Return the app object configured correctly.
This needed to be done for gunicorn.
"""
app.secret_key = secret_key
app.config["SESSION_COOKIE_DOMAIN"] = session_cookie_domain
app.config["SESSION_COOKIE_PATH"] = session_cookie_path
app.config["SESSION_COOKIE_NAME"] = session_cookie_name
app.register_blueprint(api.routes.autogen.blueprint, url_prefix="/api/autogen")
app.register_blueprint(api.routes.user.blueprint, url_prefix="/api/user")
app.register_blueprint(api.routes.team.blueprint, url_prefix="/api/team")
app.register_blueprint(api.routes.stats.blueprint, url_prefix="/api/stats")
app
|
.register_blueprint(api.routes.admin.blueprint, url_prefix="/api/admin")
app.register_blueprint(api.routes.group.blueprint, url_prefix="/api/group")
app.register_blueprint(api.routes.problem.blueprint, url_prefix="/api/problems")
|
app.register_blueprint(api.routes.achievements.blueprint, url_prefix="/api/achievements")
api.logger.setup_logs({"verbose": 2})
return app
@app.after_request
def after_request(response):
response.headers.add('Access-Control-Allow-Methods', 'GET, POST')
response.headers.add('Access-Control-Allow-Credentials', 'true')
response.headers.add('Access-Control-Allow-Headers', 'Content-Type, *')
response.headers.add('Cache-Control', 'no-cache')
response.headers.add('Cache-Control', 'no-store')
if api.auth.is_logged_in():
if 'token' in session:
response.set_cookie('token', session['token'])
else:
csrf_token = api.common.token()
session['token'] = csrf_token
response.set_cookie('token', csrf_token)
# JB: This is a hack. We need a better solution
if request.path[0:19] != "/api/autogen/serve/":
response.mimetype = 'appication/json'
return response
@app.route('/api/time', methods=['GET'])
@api_wrapper
def get_time():
return WebSuccess(data=int(datetime.utcnow().timestamp()))
|
MIT-Model-Open-Data-and-Identity-System/SensibleData-Platform
|
sensible_data_platform/sensible_data_platform/settings.py
|
Python
|
mit
| 6,429
| 0.004977
|
# Django settings for sensible_data_platform project.
import os
import LOCAL_SETTINGS
from utils import SECURE_platform_config
DEBUG = True
TEMPLATE_DEBUG = DEBUG
MAINTENANCE_MODE = False
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
BASE_DIR = LOCAL_SETTINGS.BASE_DIR
ROOT_DIR = LOCAL_SETTINGS.ROOT_DIR
ROOT_URL = LOCAL_SETTINGS.ROOT_URL
DATABASES = LOCAL_SETTINGS.DATABASES
BASE_URL = LOCAL_SETTINGS.BASE_URL
TRUST_ROOTS = LOCAL_SETTINGS.TRUST_ROOTS
PLATFORM_NAME = LOCAL_SETTINGS.PLATFORM_NAME
SUPPORT_EMAIL = LOCAL_SETTINGS.SUPPORT_EMAIL
EMAIL_HOST = LOCAL_SETTINGS.EMAIL_HOST
EMAIL_PORT = LOCAL_SETTINGS.EMAIL_PORT
EMAIL_HOST_USER = SECURE_platform_config.EMAIL_HOST_USER
EMAIL_HOST_PASSWORD = SECURE_platform_config.EMAIL_HOST_PASSWORD
DEFAULT_FROM_EMAIL = LOCAL_SETTINGS.DEFAULT_FROM_EMAIL
EMAIL_USE_TLS = LOCAL_SETTINGS.EMAIL_USE_TLS
MAINTENANCE_IGNORE_URLS = (
r'^.*/admin/$',
)
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
LOGIN_URL = ROOT_URL + 'accounts/login/'
LOGIN_REDIRECT_URL = ROOT_URL
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'da'
LANGUAGES =
|
(
('da', 'Danish'),
('en', 'English'),
)
SITE_ID = 1
# If you set this to False, Django will make some optimizati
|
ons so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ROOT_DIR+'static_root/'
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = ROOT_URL+'static/'
# Additional locations of static files
STATICFILES_DIRS = (
ROOT_DIR+'static/',
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = LOCAL_SETTINGS.SECRET_KEY
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'maintenancemode.middleware.MaintenanceModeMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.static',
'django.core.context_processors.i18n',
'django.contrib.auth.context_processors.auth',
'sensible_data_platform.context_processors.platform',
)
ROOT_URLCONF = 'sensible_data_platform.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'sensible_data_platform.wsgi.application'
TEMPLATE_DIRS = (
ROOT_DIR+'templates',
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'openid_provider',
'accounts',
'render',
'identity_providers',
'utils',
'oauth2app',
'oauth2_authorization_server',
'uni_form',
'service_manager',
'south',
'sensible_platform_documents',
'password_reset',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
import hashlib
SESSION_COOKIE_NAME = str(hashlib.sha1(SECRET_KEY).hexdigest())
LOCALE_PATHS = (
'/home/arks/sensibledtu_DEVEL/SensibleData-Platform/sensible_data_platform/locale',
)
|
SandStormHoldings/ScratchDocs
|
pg.py
|
Python
|
mit
| 12,792
| 0.021029
|
from __future__ import print_function
from builtins import object
import psycopg2
import psycopg2.extras
from gevent.lock import BoundedSemaphore as Semaphore
from gevent.local import local as gevent_local
from config import PG_DSN,DONESTATES
from gevent import sleep
# migration stuff
import json,re
from datetime import datetime,date
import decimal
class ConnectionPool(object):
def __init__(self, dsn, max_con=12, max_idle=3,
connection_factory=psycopg2.extras.RealDictConnection):
self.dsn = dsn
self.max_con = max_con
self.max_idle = max_idle
self.connection_factory = connection_factory
self._sem = Semaphore(max_con)
self._free = []
self._local = gevent_local()
def __enter__(self):
self._sem.acquire()
try:
if getattr(self._local, 'con', None) is not None:
con = self._local.con
print('WARNING: returning existing connection (re-entered connection pool)!')
if self._free:
con = self._free.pop()
else:
con = psycopg2.connect(
dsn=self.dsn, connection_factory=self.connection_factory)
self._local.con = con
return con
except: # StandardError:
#print('releasing')
self._sem.release()
#print('released')
raise
def __exit__(self, exc_type, exc_value, traceback):
try:
if self._local.con is None:
raise RuntimeError("Exit connection pool with no connection?")
if exc_type is not None:
self.rollback()
else:
self.commit()
if len(self._free) < self.max_idle:
self._free.append(self._local.con)
self._local.con = None
finally:
self._sem.release()
#print('released')
def commit(self):
self._local.con.commit()
def rollback(self):
self._local.con.rollback()
def connect():
#raise Exception('from where')
pg = psycopg2.connect(PG_DSN)
pgc = pg.cursor(cursor_factory=psycopg2.extras.DictCursor)
pg.set_client_encoding('utf-8')
return pg,pgc
def json_serial(obj):
"""JSON serializer for objects not serializable by default json code"""
if isinstance(obj, datetime):
serial = obj.isoformat()
elif isinstance(obj,decimal.Decimal):
serial = float(obj)
elif isinstance(obj,set):
serial = list(obj)
elif isinstance(obj,date):
serial = obj.isoformat()
else:
raise Exception(type(obj))
return serial
raise TypeError ("Type not serializable")
def get_journals(P,C,assignee=None,metastate_group='merge',archive=False):
qry = "select * from tasks where 1=1" #journal_entries where 1=1"
args=[]
if assignee=='all': assignee=None
if assignee:
qry+=" and contents->>'assignee'=%s"
args.append(assignee)
if metastate_group:
if metastate_group!='production':
if not archive: #and t['status'] in cfg.DONESTATES: continue
qry+=" and contents->>'status' not in %s"
args.append(tuple(DONESTATES))
elif archive: #and t['status'] not in cfg.DONESTATES: continue
qry+=" and contents->>'status' in %s"
args.append(tuple(DONESTATES))
else:
raise Exception('wtf')
args = tuple(args) ;
C.execute(qry,args)
rt=[]
for r in C.fetchall():
rt.append(r)
return rt
def journal_digest(j):
"""prepare a digest of the journal's most recent state."""
rt={}
for i in j:
cat = i['created_at']
jc = i['content']
ja = i['creator']
for k,v in list(i['attrs'].items()):
if type(
|
cat)!=str:
cat = cat.strftime('%Y-%m-%dT%H:%I:%S')
if k not in rt: rt[k]={'created_at':cat}
#print('about to compare',type(rt[k]['created_at']),'with',type(cat))
if rt[k]['created_at']<=cat:
rt[k]['created_at']
|
=cat
rt[k]['value']=v
return rt
def validate_save(C,tid,fetch_stamp,exc=True):
C.execute("select changed_at,changed_by from tasks where id=%s",(tid,))
res = C.fetchone()
if res and fetch_stamp and res['changed_at'] and res.get('changed_by')!='notify-trigger':
eq = res['changed_at']==fetch_stamp
if exc:
assert eq,"task %s: fetch stamp!=changed_at by %s (%s , %s)"%(tid,res.get('changed_by'),fetch_stamp,res and res['changed_at']) or None
else:
return eq,res['changed_at'],res['changed_by']
return True,res and res.get('changed_at') or None,res and res.get('changed_by') or None
def migrate_one(t,pgc,fetch_stamp=None,user=None):
td={}
tid = t._id
parid = "/".join(tid.split("/")[0:-1])
if not parid: parid=None
for k in t.__dict__:
if k not in ['_dynamic_properties','_doc']:
if t.__dict__[k] is not None:
assert k not in td,"%s already exists with value %s (!= %s) for %s"%(k,td[k],t.__dict__[k],t._id)
td[k]=t.__dict__[k]
if 'journal' in td and len(td['journal']):
td['journal_digest']=journal_digest(td['journal'])
tdj = json.dumps(td,default=json_serial)
pgc.execute("select * from tasks where id=%s",(tid,))
res = pgc.fetchone()
if not res:
op='ins'
qry = "insert into tasks (contents,parent_id,changed_at,changed_by,id) values(%s,%s,%s,%s,%s)"
chat=datetime.now() ; chatf='now'
suser = user
else:
excont = res['contents']
nwcont = json.loads(tdj)
# exf = open('/tmp/ex.json','w') ; exf.write(json.dumps(excont)) ; exf.close()
# nwf = open('/tmp/nw.json','w') ; nwf.write(json.dumps(nwcont)) ; nwf.close()
if nwcont==excont and user not in ['notify-trigger']:
chat = res['changed_at'] ; chatf='existing'
suser = res['changed_by']
else:
chatf='now'
chat = datetime.now() ;
suser = user
#raise Exception(type(nwcont),type(excont),len(nwcont),len(excont),nwcont==excont)
op='upd'
qry = "update tasks set contents=%s,parent_id=%s,changed_at=%s,changed_by=%s where id=%s"
data = (tdj,parid,chat,suser,t._id)
#print qry,data
print((op,t._id,parid))
pgc.execute(qry,data)
# -- create table tasks (id varchar primary key, parent_id varchar references tasks(id) , contents json);
def get_repos(C):
C.execute("select name from repos")
res = C.fetchall()
return [r['name'] for r in res]
def get_usernames(C):
C.execute("select username from participants where active=true order by username")
res = C.fetchall()
return [r['username'] for r in res]
def hasperm_db(C,perm,user):
qry = "select count(*) cnt from participants where username=%s and %s=any(perms) and active=true"
C.execute(qry,(perm,user))
o = C.fetchone()
rt = o['cnt'] and True or False
return rt
def hasperm(perms,perm):
rt = perm in perms
#print(qry,(user,perm),'=>',rt)
return rt
def get_participants(C,sort=True,disabled=False):
qry = "select * from participants "
if not disabled: qry+=" where active=true "
if sort: qry+=" order by username"
C.execute(qry)
rt = {}
for r in C.fetchall():
if r['username'] not in rt: rt[r['username']]={}
for k in list(r.keys()):
rt[r['username']][k]=r[k]
return rt #dict([(r['username'],dict([(k,r[k]) for k in r.keys()])) for r in C.fetchall()])
def get_all_journals(C,day=None,creator=None):
qry = "select * from journal_entries where 1=1 "
cnd=[]
if day:
qry+=" and created_at::date between %s and %s"
cnd.append(day[0]) ; cnd.append(day[1])
if creator:
qry+=" and creator=%s"
cnd.append(creator)
C.execute(qry,cnd)
jes = C.fetchall()
return [{'creator':je['creator'],
'content':je['cnt'],
'attrs':je['attrs
|
imk1/IMKTFBindingCode
|
makeSequenceInputsKMerMotifCounts.py
|
Python
|
mit
| 12,089
| 0.020928
|
import sys
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.Alphabet import generic_dna
from Bio import SeqIO
import numpy
import math
import itertools as it
MATERNALFASTAFILENAMELISTFILENAME = sys.argv[1]
PATERNALFASTAFILENAMELISTFILENAME = sys.argv[2]
PEAKHEIGHTFILENAME = sys.argv[3]
VALUEFILENAME = sys.argv[4]
OUTPUTFILENAME = sys.argv[5]
K = int(sys.argv[6])
WINDOWSIZE = int(sys.argv[7])
WINDOWSTRIDE = int(sys.argv[8])
MOTIFLISTFILENAME = sys.argv[9]
def makeSequenceList():
# Make a list of groups of 4 bases
baseList = ["A", "C", "G", "T"]
sequenceList = []
allSameList = []
for baseOne in baseList:
# Iterate through the first base options
for baseTwo in baseList:
# Iterate through the second base options
for baseThree in baseList:
# Iterate through the third base options
for baseFour in baseList:
# Iterate through the fourth base options
currentSequence = baseOne + baseTwo + baseThree + baseFour
sequenceList.append(currentSequence)
if ((baseOne + baseTwo == baseThree + baseFour) or (baseOne + baseTwo == baseFour + baseThree)) or ((baseTwo + baseOne == baseThree + baseFour) or (baseTwo + baseOne == baseFour + baseThree)):
# The individuals are the same at the current base
allSameList.append(1)
else:
# The individuals are not the same at the current base
allSameList.append(0)
return [sequenceList, allSameList]
def filterSequenceList(sequenceList, allSameList, base):
# Filter sequences to include only those that contain the base
sequenceListFiltered = []
allSameListFiltered = []
for i in range(len(sequenceList)):
# Iterate through the sequences and choose only those that contain the base
if base in sequenceList[i]:
# The base is in the current sequence, so use it
sequenceListFiltered.append(sequenceList[i])
allSameListFiltered.append(allSameList[i])
return [sequenceListFiltered, allSameListFiltered]
def makeKMerDictFromMotifFile():
# Make a dictionary that maps K-mers from a motif file to numbers
# MAP ONLY VARIABLE K-mers
motifListFile = open(MOTIFLISTFILENAME)
kMerDict = {}
count = 0
[sequenceList, allSameList] = makeSequenceList()
for line in motifListFile:
# Iterate through the motifs and make an entry in the dictionary for each 4 x K combination of each motif
motif = line.strip()
sequences = []
allSame = []
for base in motif:
# Iterate through the motif bases and select 4-base sequences that contain the base in the motif
[sequenceListFiltered, allSameListFiltered] = filterSequenceList(sequenceList, allSameList, base)
if len(sequences) == 0:
# At the first base, so add each sequence
sequences = sequenceListFiltered
allSame = allSameListFiltered
else:
# At a later base, so find all combinations of it and earlier bases
lastSequences = sequences
lastAllSameList = allSame
sequences = []
allSame = []
for i in range(len(lastSequences)):
# Iterate through the existing sequences and add each new one to it
for j in range(len(sequenceListFiltered)):
# Iterate through the new sequences and append each
sequences.append(lastSequences[i] + sequenceListFiltered[j])
allSame.append(lastAllSameList[i] * allSameListFiltered[j])
for i in range(len(sequences)):
# Iterate through the sequences and add each new one that is not all the same to the dictionary
if (allSame[i] == 0) and (sequences[i] not in kMerDict.keys()):
# The sequence is new and not all the same, so add it to the dictionary
kMerDict[sequences[i]] = count
count = count + 1
motifListFile.close()
return kMerDict
def getFastaList(fastaFileList):
# Get the next fasta from each file in a list
fastaList = []
for fastaFile in fastaFileList:
# Iterate through the fasta files and begin parsing each
fastaName = fastaFile.readline().strip()
if fastaName != "":
# Not at the end of the fasta file
DNASequenceRecord = SeqRecord(Seq(fastaFile.readline().strip(), generic_dna), name = fastaName)
fastaList.append(DNASequenceRecord)
else:
# At the end of the fasta file, so stop
break
return fastaList
def makeValueFileEntries(valueFile, peakHeightLineElements, peakHeightColA, peakHeightColB):
# Make the value entries
# The 1st 8 are the fold change from individual A to individual B, and the next 8 are the fold change from individual B to individual A
FCA = int(peakHeightLineElements[peakHeightColA]) - int(peakHeightLineElements[peakHeightColB])
for i in range(8):
# Record the fold change from individual A to individual B 8 times for the 8 examples
valueFile.write(str(FCA) + "\n")
F
|
CB = int(peakHeightLineElements[peakHeightColB]) - int(peakHeightLineElement
|
s[peakHeightColA])
for i in range(8):
# Record the fold change from individual B to individual A 8 times for the 8 examples
valueFile.write(str(FCB) + "\n")
def makeReverseComplements(seqRecordMaternalA, seqRecordPaternalA, seqRecordMaternalB, seqRecordPaternalB):
# Make the reverse complements of all of the sequences
seqReverseMaternalA = seqRecordMaternalA.seq.reverse_complement().upper()
seqReversePaternalA = seqRecordPaternalA.seq.reverse_complement().upper()
seqReverseMaternalB = seqRecordMaternalB.seq.reverse_complement().upper()
seqReversePaternalB = seqRecordPaternalB.seq.reverse_complement().upper()
return [seqReverseMaternalA, seqReversePaternalA, seqReverseMaternalB, seqReversePaternalB]
def recordKMerCounts(kMerCounts, outputFile):
# Record the k-mer counts in the windows of the sequence to the output file
for count in kMerCounts:
# Iterate through the k-mer counts and record each in the output file
outputFile.write(str(count) + "\t")
outputFile.write("\n")
def getKMerFeatures(sequenceOneA, sequenceTwoA, sequenceOneB, sequenceTwoB, outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE):
# Convert the fasta sequence pair to the (4 x 4) x (4 x 4) alphabet and record its k-mer counts
# ASSUMES THAT sequenceOneA sequenceTwoA, sequenceOneB, and sequenceTwoB ARE THE SAME LENGTH (no indels)
# THE END OF THE SEQUENCE WILL GET CUT OFF IF THE WINDOWS DO NOT EXACTLY ENCOMPASS THE SEQUENCE
numWindows = math.trunc(float(len(sequenceOneA) - WINDOWSIZE)/float(WINDOWSTRIDE)) + 1
kMerCounts = numpy.zeros(numWindows*len(kMerDict), 1)
for l in range(numWindows):
# Iterate through the windows and get the k-mer counts in each
windowStart = l * WINDOWSTRIDE
windowEnd = windowStart + WINDOWSIZE
for i in range(windowStart, windowEnd - K + 1):
# Iterate through the k-mers in the current window mark a 1 in the appropriate k-mer
sequenceToLookUp = ""
for j in range(K):
# Iterate through the bases in the k-mer and make the sequence combination that represents it
sequenceToLookUp = sequenceOneA[i + j] + sequenceTwoA[i + j] + sequenceOneB[i + j] + sequenceTwoB[i + j]
if sequenceToLookUp not in kMerDict.keys():
# The sequence has a wild card or is not in a motif, so skip it
continue
kMerCounts[(l * len(kMerDict)) + kMerDict[sequenceToLookUp]] = kMerCounts[kMerDict[sequenceToLookUp]] + 1
recordKMerCounts(kMerCounts, outputFile)
def makeFourFeatureCounts(seqRecordMaternalA, seqRecordPaternalA, seqRecordMaternalB, seqRecordPaternalB, outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE):
# Make four sets of sequence features for two sequence pairs
# Sequence set 1: (MaternalA, PaternalA), (MaternalB, PaternalB)
# Sequence set 2: (MaternalA, PaternalA), (PaternalB, MaternalB)
# Sequence set 3: (PaternalA, MaternalA), (MaternalB, PaternalB)
# Sequence set 4: (PaternalA, MaternalA), (PaternalB, MaternalB)
getKMerFeatures(seqRecordMaternalA, seqRecordPaternalA, seqRecordMaternalB, seqRecordPaternalB, outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE)
getKMerFeatures(seqRecordMaternalA, seqRecordPaternalA, seqRecordPaternalB, seqRecordMaternalB, outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE)
getKMerFeatures(seqRecordPaternalA, seqRecordMaternalA,
|
moble/spherical_functions
|
spherical_functions/SWSH_grids/utilities.py
|
Python
|
mit
| 2,746
| 0.006191
|
# Copyright (c) 2020, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/spherical_functions/blob/master/LICENSE>
### NOTE: The functions in this file are intended purely for inclusion in the Grid class. In
### particular, they assume that the first argument, `self` is an instance of Grid. They should
### probably not be used outside of that class.
def modes(self, ell_max=None, **kwargs):
"""Return mode weights of function decomposed into SWSHs
This method uses `spinsfast` to convert values on an equiangular grid to mode weights.
The output array has one less dimension than this object; rather than the last two axes giving
the values on the two-dimensional grid, the last axis gives the mode weights.
Parameters
==========
ell_max: None or int [defaults to None]
Maximum ell value in the output. If None, the result will have enough ell values to express
the data on the grid without aliasing: (max(n_phi, n_theta) - 1) // 2.
**kwargs: any types
Additional keyword arguments are passed through to the Modes constructor on output
"""
import copy
import numpy as np
import spinsfast
from .. import Modes
ell_max = ell_max or (max(n_phi, n_theta) - 1) // 2
metadata = copy.copy
return Modes(spinsfast.map2salm(self.view(np.ndarray), self.s, ell_max),
spin_weight=self.s, ell_min=0, ell_max=ell_max, **metadata)
def _check_broadcasting(self, array, reverse=False):
"""Test whether or not the given array can broadcast against this object"""
import numpy as np
if isinstance(array, type(self)):
try:
if reverse:
np.broadcast(array, self)
else:
np.broadcast(self, array)
except ValueError:
return False
else:
return True
else:
if np.ndim(array) > np.ndim(self)-2:
raise ValueError(f"Cannot broadcast array of {np.ndim(array)} dimensions against {type(self).__name__} "
f"object of fewer ({np.ndim(self)-2}) non-grid dimensions.\n"
|
"This is to ensure that scalars do not operate on individual "
"grid va
|
lues; they must operate on all simultaneously.\n"
"If that is the case and you still want to broadcast, add more "
"dimensions before this object's first dimension.")
try:
if reverse:
np.broadcast(array, self[..., 0, 0])
else:
np.broadcast(self[..., 0, 0], array)
except ValueError:
return False
else:
return True
|
nijel/weblate
|
weblate/metrics/apps.py
|
Python
|
gpl-3.0
| 906
| 0
|
#
# Copyright © 2012–2022 Michal Čihař <[email protected]>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundat
|
ion, either version 3 of the Licen
|
se, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
from django.apps import AppConfig
class MetricsConfig(AppConfig):
name = "weblate.metrics"
label = "metrics"
verbose_name = "Metrics"
|
RayleighChen/Improve
|
Project/python/PyLDA-Apriori/m3.py
|
Python
|
mit
| 2,169
| 0.033195
|
# -*- coding: utf-8 -*-
import sys
#Éú³ÉºòÑ¡¼¯C1
#return£º×Öµäkey=item;value=item³öÏֵĴÎÊý
def getC1(srcdata):
c1 = {}
for transaction in srcdata:
for item in transaction:
key = frozenset(set([item])) #frozenset²Å¿ÉÒÔ×÷Ϊ×ÖµäµÄkey
#¼ÆÊýitem
if key in c1:
c1[key] = c1[key] + 1
else:
c1[key] = 1
return c1
#return£º Âú×ã×îС֧³Ö¶ÈµÄºòÑ¡¼¯
def getL(c, supct):
# ɾ³ýСÓÚ×îС֧³Ö¶ÈµÄitem
for key in [item for item in c if c[item] < supct]:
del c[key]
#if c != {}:
#print c
return c
#¸ù¾ÝÉÏÒ»¸öL²úÉúºòÑ¡¼¯C
#ɨÃèÔ´Êý¾Ý£¬¼ÆÊýitem
def getnextcandi(preL, srcdata):
c = {}
for key1 in preL:
for key2 in preL:
if key1 != key2:
# preL ºÍ preL Éú³ÉµÑ¿¨¶û»ý
key = key1.union(key2)
c[key] = 0
#¼ÆÊýitem
for i in srcdata:
for item in c:
if item.issubset(i):
c[item] = c[item] + 1
return c
# Apriori Ëã·¨
def Apriori(filename, supct):
#¶ÁÈ¡Êý¾ÝÎļþ
#Îļþ¸ñʽ:Ò»ÐÐÒ»¸öÊÂÎñ,Ò»¸öÊÂÎñµÄ¸÷¸öÔªËØÒÔTab(\t)·Ö¸ô
srcdata = [line.strip("\n").split(" ") for line in file(filename)]
c = getC1(srcdata)
L = getL(c, supct)
c = getnextcandi(L, srcdata)
return c
if __name__
|
== "__main__":
if len(sys.argv) == 3:
#Usage: apri.py filename surpport
items = Apriori(sys.argv[1], int(sys.argv[2]))
for key in [item for item in items if items[item] < int(sys.argv[2])]:
del items[key]
ap = {}
for itor in items:
#print items[itor]
#print itor
strword = ''
for w
|
ord in itor:
strword += word + " "
ap[strword.strip(' ')] = items[itor]
linelst = sorted(ap.items(), lambda x, y: cmp(x[1], y[1]), reverse=True)
for i in range(len(linelst)):
print "#" + str(linelst[i][1]) + " " + linelst[i][0]
#for (k, v) in ap.items():
#print "#" + str(v) + " " + k
else:
#for example
print "err args"
|
ringcentral/python-sdk
|
demo_sms.py
|
Python
|
mit
| 609
| 0.00821
|
#!/usr/bin/env python
# encoding: utf-8
import urllib
from config
|
import USERNAME, EXTENSION, PASSWORD, APP_KEY, APP_SECRET, SERVER, MOBILE
from ringcentral import SDK
def main():
sdk = SDK(APP_KEY, APP_SECRET, SERVER)
platform = sdk.platform()
platform.login(USERNAME, EXTENSION, PASSWORD)
to_numbers =
|
"1234567890"
params = {'from': {'phoneNumber': USERNAME},'to': [{'phoneNumber': to_number}],'text': "SMS message"}
response = platform.post('/restapi/v1.0/account/~/extension/~/sms', params)
print 'Sent SMS: ' + response.json().uri
if __name__ == '__main__':
main()
|
jemromerol/apasvo
|
apasvo/gui/views/loaddialog.py
|
Python
|
gpl-3.0
| 5,689
| 0.000879
|
# encoding: utf-8
'''
@author: Jose Emilio Romero Lopez
@copyright: Copyright 2013-2014, Jose Emilio Romero Lopez.
@license: GPL
@contact: [email protected]
This file is part of APASVO.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from PySide import QtGui
from apasvo.gui.views.generated import ui_loaddialog
from apasvo.utils.formats import rawfile
FORMATS = {'Autodetect': None,
'Binary': rawfile.format_binary,
'Text': rawfile.format_text,
}
DEFAULT_FORMAT = 'Autodetect'
DTYPES = (rawfile.datatype_int16,
rawfile.datatype_int32,
rawfile.datatype_int64,
rawfile.datatype_float16,
rawfile.datatype_float32,
rawfile.datatype_float64, )
DTYPES_LABELS = ('16 bits, PCM',
'32 bits, PCM',
'64 bits, PCM',
'16 bits, float',
'32 bits, float',
'64 bits, float', )
BYTEORDERS = (rawfile.byteorder_little_endian,
rawfile.byteorder_big_endian)
class LoadDialog(QtGui.QDialog, ui_loaddialog.Ui_LoadDialog):
"""A dialog window to load seismic data stored in a binary or text file.
Allows the user to choose several settings in order to load a seismic
signal, i.e.:
Format: Binary or text format.
Data-type: Float16, Float32 or Float64,
Endianness: Little-endian or big-endian.
Sample rate.
The class also infers the right parameters for the chosen file and shows
a preview of the loaded data for the selected parameters.
Attributes:
filename: Name of the opened file.
"""
def __init__(self, parent, filename):
super(LoadDialog, self).__init__(parent)
self.setupUi(self)
self.FileFormatComboBox.currentIndexChanged.connect(self.on_format_change)
self.FileFormatComboBox.currentIndexChanged.connect(self.load_preview)
self.DataTypeComboBox.currentIndexChanged.connect(self.load_preview)
self.ByteOrderComboBox.currentIndexChanged.connect(self.load_preview)
# init file format combobox
self.FileFormatComboBox.addItems(FORMATS.keys())
self.FileFormatComboBox.setCurrentIndex(FORMATS.keys().index(DEFAULT_FORMAT))
# init datatype combobox
self.DataTypeComboBox.addItems(DTYPES_LABELS)
self.DataTypeComboBox.setCurrentIndex(DTYPES.index(rawfile.datatype_float64))
self.filename = filename
self.load_preview()
def on_format_change(self, idx):
"""Updates UI after toggling the format value."""
fmt = FORMATS[self.FileFormatComboBox.currentText()]
if fmt == rawfile.format_binary:
self.DataTypeComboBox.setVisible(True)
self.DataTypeLabel.setVisible(True)
self.ByteOrderComboBox.setVisible(True)
self.ByteOrderLabel.setVisible(True)
self.groupBox_2.setVisible(True)
self.SampleFrequencySpinBox.setVisible(True)
self.SampleFrequencyLabel.setVisible(True)
elif fmt == rawfile.format_text:
self.DataTypeComboBox.setVisible(False)
self.DataTypeLabel.setVisible(False)
self.ByteOrderComboBox.setVisible(False)
self.ByteOrderLabel.set
|
Visible(False)
self.groupBox_2.setVisible(True)
self.SampleFrequencySpinBox.setVisible(True)
self.SampleFrequencyLabel.setVisible(True)
else:
self.DataTypeComboBox.setVisible(False)
self.DataTypeLabel.setVisible(False)
self.ByteOrderComboBox.setVisible(False)
sel
|
f.ByteOrderLabel.setVisible(False)
self.groupBox_2.setVisible(False)
self.SampleFrequencySpinBox.setVisible(False)
self.SampleFrequencyLabel.setVisible(False)
self.groupBox.adjustSize()
self.adjustSize()
def load_preview(self):
"""Shows a preview of loaded data using the selected parameters."""
# Load parameters
values = self.get_values()
try:
# Set up a file handler according to the type of raw data (binary or text)
fhandler = rawfile.get_file_handler(self.filename, **values)
# Print data preview
array = fhandler.read_in_blocks().next()
data = ''
for x in array:
data += ("%g\n" % x)
except:
data = '*** There was a problem reading the file content ***'
self.buttonBox.button(QtGui.QDialogButtonBox.Ok).setEnabled(False)
else:
self.buttonBox.button(QtGui.QDialogButtonBox.Ok).setEnabled(True)
self.PreviewTextEdit.clear()
self.PreviewTextEdit.setText(data)
def get_values(self):
"""Gets selected parameters."""
return {'fmt': FORMATS[self.FileFormatComboBox.currentText()],
'dtype': DTYPES[self.DataTypeComboBox.currentIndex()],
'byteorder': BYTEORDERS[self.ByteOrderComboBox.currentIndex()],
'fs': float(self.SampleFrequencySpinBox.value())}
|
mackorone/euler
|
src/025.py
|
Python
|
mit
| 155
| 0.006452
|
from fibonacci import Fibonacci
def ans():
return Fibonacci.index(Fibonacci.after(int('9' * 999)))
|
if __name__ == '__main__':
pr
|
int(ans())
|
kdani3/searx
|
utils/fetch_currencies.py
|
Python
|
agpl-3.0
| 4,394
| 0.009558
|
# -*- coding: utf-8 -*-
import json
import re
import unicodedata
import string
from urllib import urlencode
|
from requests import get
languages = {'de', 'en', 'es', 'fr', 'hu', 'it', 'nl', 'jp'}
url_template = 'https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&{query}&props=labels%7Cdatatype%7Cclaims%7Caliases&languages=' + '|'.join(languages)
url_wmflabs_template = 'http://wdq.wmflabs.org/api?q='
url_wikidata_search_template='http://www.wikidata.org/w/api.php?action=query&list=search&format=json&srnamespace=0&srprop=sectionti
|
tle&{query}'
wmflabs_queries = [
'CLAIM[31:8142]', # all devise
]
db = {
'iso4217' : {
},
'names' : {
}
}
def remove_accents(data):
return unicodedata.normalize('NFKD', data).lower()
def normalize_name(name):
return re.sub(' +',' ', remove_accents(name.lower()).replace('-', ' '))
def add_currency_name(name, iso4217):
global db
db_names = db['names']
if not isinstance(iso4217, basestring):
print "problem", name, iso4217
return
name = normalize_name(name)
if name == '':
print "name empty", iso4217
return
iso4217_set = db_names.get(name, None)
if iso4217_set is not None and iso4217 not in iso4217_set:
db_names[name].append(iso4217)
else:
db_names[name] = [ iso4217 ]
def add_currency_label(label, iso4217, language):
global db
db['iso4217'][iso4217] = db['iso4217'].get(iso4217, {})
db['iso4217'][iso4217][language] = label
def get_property_value(data, name):
prop = data.get('claims', {}).get(name, {})
if len(prop) == 0:
return None
value = prop[0].get('mainsnak', {}).get('datavalue', {}).get('value', '')
if value == '':
return None
return value
def parse_currency(data):
iso4217 = get_property_value(data, 'P498')
if iso4217 is not None:
unit = get_property_value(data, 'P558')
if unit is not None:
add_currency_name(unit, iso4217)
labels = data.get('labels', {})
for language in languages:
name = labels.get(language, {}).get('value', None)
if name != None:
add_currency_name(name, iso4217)
add_currency_label(name, iso4217, language)
aliases = data.get('aliases', {})
for language in aliases:
for i in range(0, len(aliases[language])):
alias = aliases[language][i].get('value', None)
add_currency_name(alias, iso4217)
def fetch_data(wikidata_ids):
url = url_template.format(query=urlencode({'ids' : '|'.join(wikidata_ids)}))
htmlresponse = get(url)
jsonresponse = json.loads(htmlresponse.content)
entities = jsonresponse.get('entities', {})
for pname in entities:
pvalue = entities.get(pname)
parse_currency(pvalue)
def add_q(i):
return "Q" + str(i)
def fetch_data_batch(wikidata_ids):
while len(wikidata_ids) > 0:
if len(wikidata_ids) > 50:
fetch_data(wikidata_ids[0:49])
wikidata_ids = wikidata_ids[50:]
else:
fetch_data(wikidata_ids)
wikidata_ids = []
def wdq_query(query):
url = url_wmflabs_template + query
htmlresponse = get(url)
jsonresponse = json.loads(htmlresponse.content)
qlist = map(add_q, jsonresponse.get('items', {}))
error = jsonresponse.get('status', {}).get('error', None)
if error != None and error != 'OK':
print "error for query '" + query + "' :" + error
fetch_data_batch(qlist)
def wd_query(query, offset=0):
qlist = []
url = url_wikidata_search_template.format(query=urlencode({'srsearch': query, 'srlimit': 50, 'sroffset': offset}))
htmlresponse = get(url)
jsonresponse = json.loads(htmlresponse.content)
for r in jsonresponse.get('query', {}).get('search', {}):
qlist.append(r.get('title', ''))
fetch_data_batch(qlist)
## fetch ##
for q in wmflabs_queries:
wdq_query(q)
# static
add_currency_name(u"euro", 'EUR')
add_currency_name(u"euros", 'EUR')
add_currency_name(u"dollar", 'USD')
add_currency_name(u"dollars", 'USD')
add_currency_name(u"peso", 'MXN')
add_currency_name(u"pesos", 'MXN')
# write
f = open("currencies.json", "wb")
json.dump(db, f, indent=4, encoding="utf-8")
f.close()
|
timj/scons
|
test/Variables/ListVariable.py
|
Python
|
mit
| 5,013
| 0.000798
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test the ListVariable canned Variable type.
"""
import os
import TestSCons
test = TestSCons.TestSCons()
SConstruct_path = test.workpath('SConstruct')
def check(expect):
result = test.stdout().split('\n')
r = result[1:len(expect)+1]
assert r == expect, (r, expect)
test.write(SConstruct_path, """\
from SCons.Variables.ListVariable import ListVariable
LV = ListVariable
from SCons.Variables import ListVariable
list_of_libs = Split('x11 gl qt ical')
optsfile = 'scons.variables'
opts = Variables(optsfile, args=ARGUMENTS)
opts.AddVariables(
ListVariable('shared',
'libraries to build as shared libraries',
'all',
names = list_of_libs,
map = {'GL':'gl', 'QT':'qt'}),
LV('listvariable', 'listvariable help', 'all', names=['l1', 'l2', 'l3'])
)
env = Environment(variables=opts)
opts.Save(optsfile, env)
Help(opts.GenerateHelpText(env))
print(env['shared'])
if 'ical' in env['shared']:
print('1')
else:
print('0')
print(" ".join(env['shared']))
print(env.subst('$shared'))
# Test subst_path() because it's used in $CPPDEFINES expansions.
print(env.subst_path('$shared'))
Default(env.Alias('dummy', None))
""")
test.run()
check(['all', '1', 'gl ical qt x11', 'gl ical qt x11',
"['gl ical qt x11']"
|
])
expect = "shared = 'all'"+os.linesep+"listvariable = 'all'"+os.linesep
test.must_match(test.workpath('scons.variables'), expect)
|
check(['all', '1', 'gl ical qt x11', 'gl ical qt x11',
"['gl ical qt x11']"])
test.run(arguments='shared=none')
check(['none', '0', '', '', "['']"])
test.run(arguments='shared=')
check(['none', '0', '', '', "['']"])
test.run(arguments='shared=x11,ical')
check(['ical,x11', '1', 'ical x11', 'ical x11',
"['ical x11']"])
test.run(arguments='shared=x11,,ical,,')
check(['ical,x11', '1', 'ical x11', 'ical x11',
"['ical x11']"])
test.run(arguments='shared=GL')
check(['gl', '0', 'gl', 'gl'])
test.run(arguments='shared=QT,GL')
check(['gl,qt', '0', 'gl qt', 'gl qt', "['gl qt']"])
expect_stderr = """
scons: *** Error converting option: shared
Invalid value(s) for option: foo
""" + test.python_file_line(SConstruct_path, 19)
test.run(arguments='shared=foo', stderr=expect_stderr, status=2)
# be paranoid in testing some more combinations
expect_stderr = """
scons: *** Error converting option: shared
Invalid value(s) for option: foo
""" + test.python_file_line(SConstruct_path, 19)
test.run(arguments='shared=foo,ical', stderr=expect_stderr, status=2)
expect_stderr = """
scons: *** Error converting option: shared
Invalid value(s) for option: foo
""" + test.python_file_line(SConstruct_path, 19)
test.run(arguments='shared=ical,foo', stderr=expect_stderr, status=2)
expect_stderr = """
scons: *** Error converting option: shared
Invalid value(s) for option: foo
""" + test.python_file_line(SConstruct_path, 19)
test.run(arguments='shared=ical,foo,x11', stderr=expect_stderr, status=2)
expect_stderr = """
scons: *** Error converting option: shared
Invalid value(s) for option: foo,bar
""" + test.python_file_line(SConstruct_path, 19)
test.run(arguments='shared=foo,x11,,,bar', stderr=expect_stderr, status=2)
test.write('SConstruct', """
from SCons.Variables import ListVariable
opts = Variables(args=ARGUMENTS)
opts.AddVariables(
ListVariable('gpib',
'comment',
['ENET', 'GPIB'],
names = ['ENET', 'GPIB', 'LINUX_GPIB', 'NO_GPIB']),
)
env = Environment(variables=opts)
Help(opts.GenerateHelpText(env))
print(env['gpib'])
Default(env.Alias('dummy', None))
""")
test.run(stdout=test.wrap_stdout(read_str="ENET,GPIB\n", build_str="""\
scons: Nothing to be done for `dummy'.
"""))
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
mcansky/cabotapp
|
cabot/urls.py
|
Python
|
mit
| 6,860
| 0.008017
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from cabot.cabotapp.views import (
run_status_check, graphite_api_data, checks_run_recently,
duplicate_icmp_check, duplicate_graphite_check, duplicate_http_check, duplicate_jenkins_check,
duplicate_instance, acknowledge_alert, remove_acknowledgement,
GraphiteCheckCreateView, GraphiteCheckUpdateView,
HttpCheckCreateView, HttpCheckUpdateView,
ICMPCheckCreateView, ICMPCheckUpdateView,
JenkinsCheckCreateView, JenkinsCheckUpdateView,
StatusCheckDeleteView, StatusCheckListView, StatusCheckDetailView,
StatusCheckResultDetailView, StatusCheckReportView, UserProfileUpdateAlert)
from cabot.cabotapp.views import (InstanceListView, InstanceDetailView,
InstanceUpdateView, InstanceCreateView, InstanceDeleteView,
ServiceListView, ServiceDetailView,
ServiceUpdateView, ServiceCreateView, Ser
|
viceDeleteView,
UserProfileUpdateView, ShiftListView, subscriptions)
from cabot import rest_urls
from django.contrib import admin
from django.views.generic.base import RedirectView
from django.contrib.auth.views import login, logout, password_rese
|
t, password_reset_done, password_reset_confirm
admin.autodiscover()
from importlib import import_module
import logging
logger = logging.getLogger(__name__)
urlpatterns = patterns('',
url(r'^$', view=RedirectView.as_view(url='services/', permanent=False),
name='dashboard'),
url(r'^subscriptions/', view=subscriptions,
name='subscriptions'),
url(r'^accounts/login/', view=login, name='login'),
url(r'^accounts/logout/', view=logout, name='logout'),
url(r'^accounts/password-reset/',
view=password_reset, name='password-reset'),
url(r'^accounts/password-reset-done/',
view=password_reset_done, name='password-reset-done'),
url(r'^accounts/password-reset-confirm/',
view=password_reset_confirm, name='password-reset-confirm'),
url(r'^status/', view=checks_run_recently,
name='system-status'),
url(r'^services/', view=ServiceListView.as_view(),
name='services'),
url(r'^service/create/', view=ServiceCreateView.as_view(),
name='create-service'),
url(r'^service/update/(?P<pk>\d+)/',
view=ServiceUpdateView.as_view(
), name='update-service'),
url(r'^service/delete/(?P<pk>\d+)/',
view=ServiceDeleteView.as_view(
), name='delete-service'),
url(r'^service/(?P<pk>\d+)/',
view=ServiceDetailView.as_view(), name='service'),
url(r'^service/acknowledge_alert/(?P<pk>\d+)/',
view=acknowledge_alert, name='acknowledge-alert'),
url(r'^service/remove_acknowledgement/(?P<pk>\d+)/',
view=remove_acknowledgement, name='remove-acknowledgement'),
url(r'^instances/', view=InstanceListView.as_view(),
name='instances'),
url(r'^instance/create/', view=InstanceCreateView.as_view(),
name='create-instance'),
url(r'^instance/update/(?P<pk>\d+)/',
view=InstanceUpdateView.as_view(
), name='update-instance'),
url(r'^instance/duplicate/(?P<pk>\d+)/',
view=duplicate_instance, name='duplicate-instance'),
url(r'^instance/delete/(?P<pk>\d+)/',
view=InstanceDeleteView.as_view(
), name='delete-instance'),
url(r'^instance/(?P<pk>\d+)/',
view=InstanceDetailView.as_view(), name='instance'),
url(r'^checks/$', view=StatusCheckListView.as_view(),
name='checks'),
url(r'^check/run/(?P<pk>\d+)/',
view=run_status_check, name='run-check'),
url(r'^check/delete/(?P<pk>\d+)/',
view=StatusCheckDeleteView.as_view(
), name='delete-check'),
url(r'^check/(?P<pk>\d+)/',
view=StatusCheckDetailView.as_view(), name='check'),
url(r'^checks/report/$',
view=StatusCheckReportView.as_view(), name='checks-report'),
url(r'^icmpcheck/create/', view=ICMPCheckCreateView.as_view(),
name='create-icmp-check'),
url(r'^icmpcheck/update/(?P<pk>\d+)/',
view=ICMPCheckUpdateView.as_view(
), name='update-icmp-check'),
url(r'^icmpcheck/duplicate/(?P<pk>\d+)/',
view=duplicate_icmp_check, name='duplicate-icmp-check'),
url(r'^graphitecheck/create/',
view=GraphiteCheckCreateView.as_view(
), name='create-graphite-check'),
url(r'^graphitecheck/update/(?P<pk>\d+)/',
view=GraphiteCheckUpdateView.as_view(
), name='update-graphite-check'),
url(r'^graphitecheck/duplicate/(?P<pk>\d+)/',
view=duplicate_graphite_check, name='duplicate-graphite-check'),
url(r'^httpcheck/create/', view=HttpCheckCreateView.as_view(),
name='create-http-check'),
url(r'^httpcheck/update/(?P<pk>\d+)/',
view=HttpCheckUpdateView.as_view(
), name='update-http-check'),
url(r'^httpcheck/duplicate/(?P<pk>\d+)/',
view=duplicate_http_check, name='duplicate-http-check'),
url(r'^jenkins_check/create/', view=JenkinsCheckCreateView.as_view(),
name='create-jenkins-check'),
url(r'^jenkins_check/update/(?P<pk>\d+)/',
view=JenkinsCheckUpdateView.as_view(
), name='update-jenkins-check'),
url(r'^jenkins_check/duplicate/(?P<pk>\d+)/',
view=duplicate_jenkins_check, name='duplicate-jenkins-check'),
url(r'^result/(?P<pk>\d+)/',
view=StatusCheckResultDetailView.as_view(
), name='result'),
url(r'^shifts/', view=ShiftListView.as_view(),
name='shifts'),
url(r'^graphite/', view=graphite_api_data,
name='graphite-data'),
url(r'^user/(?P<pk>\d+)/profile/$',
view=UserProfileUpdateView.as_view(), name='user-profile'),
url(r'^user/(?P<pk>\d+)/profile/(?P<alerttype>.+)',
view=UserProfileUpdateAlert.as_view(
), name='update-alert-user-data'),
url(r'^admin/', include(admin.site.urls)),
# Comment below line to disable browsable rest api
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api/', include(rest_urls.router.urls)),
)
def append_plugin_urls():
"""
Appends plugin specific URLs to the urlpatterns variable.
"""
global urlpatterns
for plugin in settings.CABOT_PLUGINS_ENABLED_PARSED:
try:
_module = import_module('%s.urls' % plugin)
except Exception as e:
pass
else:
urlpatterns += patterns('',
url(r'^plugins/%s/' % plugin, include('%s.urls' % plugin))
)
append_plugin_urls()
|
sosuke-k/cornel-movie-dialogs-corpus-storm
|
mdcorpus/tests/test_orm.py
|
Python
|
mit
| 4,351
| 0.002758
|
"""Testing for ORM"""
from unittest import TestCase
import nose
from nose.tools import eq_
from sets import Set
from mdcorpus.orm import *
class ORMTestCase(TestCase):
def setUp(self):
self.store = Store(create_database("sqlite:"))
self.store.execute(MovieTitlesMetadata.CREATE_SQL)
self.store.execute(MovieCharactersMetadata.CREATE_SQL)
self.store.execute(RawScriptUrl.CREATE_SQL)
self.store.execute(MovieConversation.CREATE_SQL)
self.store.execute(MovieLine.CREATE_SQL)
movie = self.store.add(MovieTitlesMetadata(0,
u"10 things i hate about you",
1999,
6.90,
62847))
bianca = self.store.add(MovieCharactersMetadata(0,
"BIANCA",
"f",
4))
bruce = self.store.add(MovieCharactersMetadata(1,
"BRUCE",
"?",
"?"))
cameron = self.store.add(MovieCharactersMetadata(2,
"CAMERON",
"m",
"3"))
url = self.store.add(RawScriptUrl("http://www.dailyscript.com/scripts/10Things.html"))
conversation = self.store.add(MovieConversation(0, 2, 0))
line194 = self.store.add(MovieLine(
194, "Can we make this quick? Roxanne Korrine and Andrew Barrett are having an incredibly horrendous public break- up on the quad. Again."))
line195 = self.store.add(MovieLine(
195, "Well, I thought we'd start with pronunciation, if that's okay with you."))
line196 = self.store.add(MovieLine(
196, "Not the hacking and gagging and spitting part. Please."))
line197 = self.store.add(MovieLine(
197, "Okay... then how 'bout we try out some French cuisine. Saturday? Night?"))
self.store.flush()
|
movie.characters.add(bianca)
movie.characters.add(bruce)
movie.characters.add(cameron)
url.movie = movie
line_id_list = [194, 195, 196, 197]
for (i, line_id) in enumerate(line_id_list):
line = self.store.find(MovieLine, MovieLine.id == line_id).one()
line.number = i + 1
|
conversation.lines.add(line)
self.store.commit()
def tearDown(self):
print "done"
class MovieTitlesMetadataTestCase(ORMTestCase):
@nose.with_setup(ORMTestCase.setUp, ORMTestCase.tearDown)
def test_url(self):
movie = self.store.find(MovieTitlesMetadata, MovieTitlesMetadata.id == 0).one()
eq_(movie.url(), "http://www.dailyscript.com/scripts/10Things.html")
class MovieCharactersMetadataTestCase(ORMTestCase):
@nose.with_setup(ORMTestCase.setUp, ORMTestCase.tearDown)
def test_gender(self):
bianca = self.store.find(MovieCharactersMetadata, MovieCharactersMetadata.id == 0).one()
bruce = self.store.find(MovieCharactersMetadata, MovieCharactersMetadata.id == 1).one()
cameron = self.store.find(MovieCharactersMetadata, MovieCharactersMetadata.id == 2).one()
eq_(bianca.gender(), "f")
eq_(bruce.gender(), "?")
eq_(cameron.gender(), "m")
class MovieConversationTestCase(ORMTestCase):
@nose.with_setup(ORMTestCase.setUp, ORMTestCase.tearDown)
def test_consistency(self):
conversation = self.store.find(MovieConversation, MovieConversation.id == 1).one()
eq_(conversation.first_character.movie.title, conversation.movie.title)
eq_(conversation.second_character.movie.title, conversation.movie.title)
@nose.with_setup(ORMTestCase.setUp, ORMTestCase.tearDown)
def test_line_list(self):
conversation = self.store.find(MovieConversation, MovieConversation.id == 1).one()
line_ids = [line.id for line in conversation.line_list()]
eq_(line_ids, [194, 195, 196, 197])
|
patricklaw/pants
|
src/python/pants/core/register.py
|
Python
|
apache-2.0
| 1,821
| 0.000549
|
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
"""Core rules for Pants to operate correctly.
These are always activated and cannot be disabled.
"""
from pants.core.goals import check, fmt, lint, package, publish, repl, run, tailor, test
from pants.core.target_types import (
ArchiveTarget,
|
FilesGeneratorTarget,
FileTarget,
GenericTarget,
RelocatedFiles,
ResourcesGeneratorTarget,
ResourceTarget,
)
from pants.core.target_types import rules as target_type_rules
from pants.core.util_rules import (
archive,
config_files,
distdir,
external_tool,
filter_empty_sources,
pants_bin,
source_files,
stripped_source_files,
subprocess_environment,
)
from pants.goal import anonymous_telemetry, stats_aggregator
from pants.source i
|
mport source_root
def rules():
return [
# goals
*check.rules(),
*fmt.rules(),
*lint.rules(),
*package.rules(),
*publish.rules(),
*repl.rules(),
*run.rules(),
*tailor.rules(),
*test.rules(),
# util_rules
*anonymous_telemetry.rules(),
*archive.rules(),
*config_files.rules(),
*distdir.rules(),
*external_tool.rules(),
*filter_empty_sources.rules(),
*pants_bin.rules(),
*source_files.rules(),
*source_root.rules(),
*stats_aggregator.rules(),
*stripped_source_files.rules(),
*subprocess_environment.rules(),
*target_type_rules(),
]
def target_types():
return [
ArchiveTarget,
FileTarget,
FilesGeneratorTarget,
GenericTarget,
ResourceTarget,
ResourcesGeneratorTarget,
RelocatedFiles,
]
|
botswana-harvard/microbiome
|
microbiome/apps/mb_maternal/managers/maternal_arv_post_mod_manager.py
|
Python
|
gpl-2.0
| 736
| 0.002717
|
from django.db import models
class MaternalArvPostModManager(models.Manager):
def get_by_natural_key(
self, arv_code, report_datetime, visit_instance, appt_status,
visit_definition_code, subject_identifier_as_pk):
MaternalVisit = models.get_model('mb_maternal', 'MaternalVisit')
MaternalArvPost = models.get_model('mb_maternal', 'MaternalArvPost')
maternal_visit = MaternalVisit.objects.get_by_natural_key(
report_datetime, visit_instance, appt_status, visit_definition_code, subje
|
ct_identifier_as_pk)
maternal_arv_post = MaternalArvPost.objects.get(maternal_visit=
|
maternal_visit)
return self.get(arv_code=arv_code, maternal_arv_post=maternal_arv_post)
|
alexryndin/ambari
|
ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hcat.py
|
Python
|
apache-2.0
| 2,284
| 0.011384
|
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions
|
and
limitations under the License.
"""
from resource_management import *
import sys
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
from ambari_commons import OSConst
@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
def hcat():
import params
XmlConfig("hive-site.xml",
conf_dir = params.hive_conf_dir,
|
configurations = params.config['configurations']['hive-site'],
owner=params.hive_user,
configuration_attributes=params.config['configuration_attributes']['hive-site']
)
@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def hcat():
import params
Directory(params.hive_conf_dir,
create_parents = True,
owner=params.hcat_user,
group=params.user_group,
)
Directory(params.hcat_conf_dir,
create_parents = True,
owner=params.hcat_user,
group=params.user_group,
)
Directory(params.hcat_pid_dir,
owner=params.webhcat_user,
create_parents = True
)
XmlConfig("hive-site.xml",
conf_dir=params.hive_client_conf_dir,
configurations=params.hive_site_config,
configuration_attributes=params.config['configuration_attributes']['hive-site'],
owner=params.hive_user,
group=params.user_group,
mode=0644)
File(format("{hcat_conf_dir}/hcat-env.sh"),
owner=params.hcat_user,
group=params.user_group,
content=InlineTemplate(params.hcat_env_sh_template)
)
|
django-id/website
|
app_forum/tests/test_forms.py
|
Python
|
mit
| 685
| 0.008759
|
from django.test import TestCase
from app_forum.models import Forum, Comment
from app_forum.
|
forms import CommentForm, ThreadForm
# test for forms
class CommentFormTest(TestCase):
def test_comment_forms(self):
form_data = {
'comment_content' : 'comment'
}
form = CommentForm(data=for
|
m_data)
self.assertTrue(form.is_valid())
class ThreadFormTest(TestCase):
def test_thread_forms(self):
thread_data = {
'forum_title' : 'title',
'forum_category' : 'category',
'forum_content' : 'content'
}
thread = ThreadForm(data=thread_data)
self.assertFalse(thread.is_valid())
|
olivierverdier/homogint
|
homogint/homogint.py
|
Python
|
mit
| 1,957
| 0.00511
|
#!/usr/bin/env python
# coding: UTF-8
from __future__ import division
import numpy as np
def left_multiplication(g, x):
"""
Multiplication action of a group and a vector.
"""
return np.dot(g, x)
def trans_adjoint(g, x):
return np.dot(np.dot(g,x),g.T)
class RungeKutta(object):
def __init__(self, method):
self.method = method
self.movement = self.method.movement
self.nb_stages = len(self.method.edges) + 1
|
def compute_vectors(self, movement_field, stages):
"""
|
Compute the Lie algebra elements for the stages.
"""
return np.array([movement_field(stage) for stage in stages])
def get_iterate(self, movement_field, action):
def evol(stages):
new_stages = stages.copy()
for (i,j, transition) in self.method.edges:
# inefficient as a) only some vectors are needed b) recomputed for each edge
vects = self.compute_vectors(movement_field, new_stages)
# the order of the edges matters; the goal is that explicit method need only one iteration
new_stages[i] = action(self.movement(transition(vects)), new_stages[j])
return new_stages
return evol
@classmethod
def fix(self, iterate, z):
"""
Find a fixed point to the iterating function `iterate`.
"""
for i in range(30):
new_z = iterate(z)
if np.allclose(z, new_z, atol=1e-10, rtol=1e-16):
break
z = new_z
else:
raise Exception("No convergence after {} steps".format(i))
return z, i
def step(self, movement_field, x0, action=None):
if action is None:
action = left_multiplication
iterate = self.get_iterate(movement_field, action)
z0 = np.array([x0]*self.nb_stages) # initial guess
z, i = self.fix(iterate, z0)
return z[-1]
|
taejoonlab/taejoonlab-toolbox
|
PopGen/phy2bmp.py
|
Python
|
gpl-3.0
| 248
| 0.008065
|
from ete3 import Tree,TreeStyle,TextFace
t = Tree('tagfrog.phy')
for node in t.traverse():
node.img_style['size'] = 3
if node.is_leaf():
|
name_face = TextFace(node.name)
ts = TreeStyle()
ts.show_scale = True
t.render('tag
|
frog.pdf')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.