repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
Kraymer/keroaek
|
keroaek/vlc.py
|
Python
|
mit
| 292,112
| 0.005402
|
#! /usr/bin/python
# Python ctypes bindings for VLC
#
# Copyright (C) 2009-2012 the VideoLAN team
# $Id: $
#
# Authors: Olivier Aubert <contact at olivieraubert.net>
# Jean Brouwers <MrJean1 at gmail.com>
# Geoff Salmon <geoff.salmon at gmail.com>
#
# This library is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of the
# License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
"""This module provides bindings for the LibVLC public API, see
U{http://wiki.videolan.org/LibVLC}.
You can find the documentation and a README file with some examples
at U{http://www.advene.org/download/python-ctypes/}.
Basically, the most important class is L{Instance}, which is used
to create a libvlc instance. From this instance, you then create
L{MediaPlayer} and L{MediaListPlayer} instances.
Alternatively, you may create instances of the L{MediaPlayer} and
L{MediaListPlayer} class directly and an instance of L{Instance}
will be implicitly created. The latter can be obtained using the
C{get_instance} method of L{MediaPlayer} and L{MediaListPlayer}.
"""
import ctypes
from ctypes.util import find_library
import os
import sys
import functools
# Used by EventManager in override.py
from inspect import getargspec
__version__ = "N/A"
build_date = "Wed Apr 1 21:28:00 2015"
if sys.version_info[0] > 2:
str = str
unicode = str
bytes = bytes
basestring = (str, bytes)
PYTHON3 = True
def str_to_bytes(s):
"""Translate string or bytes to bytes.
"""
if isinstance(s, str):
return bytes(s, sys.getfilesystemencoding())
else:
return s
def bytes_to_str(b):
"""Translate bytes to string.
"""
if isinstance(b, bytes):
return b.decode(sys.getfilesystemencoding())
else:
return b
else:
str = str
unicode = unicode
bytes = str
basestring = basestring
PYTHON3 = False
def str_to_bytes(s):
"""Translate string or bytes to bytes.
"""
if isinstance(s, unicode):
return s.encode(sys.getfilesystemencoding())
else:
return s
def bytes_to_str(b):
"""Translate bytes to unicode string.
"""
if isinstance(b, str):
return unicode(b, sys.getfilesystemencoding())
else:
return b
# Internal guard to prevent internal classes to be directly
# instanciated.
_internal_guard = object()
def find_lib():
dll = None
plugin_path = None
if sys.platform.startswith('linux'):
p = find_library('vlc')
try:
dll = ctypes.CDLL(p)
except OSError: # may fail
dll = ctypes.CDLL('libvlc.so.5')
elif sys.platform.startswith('win'):
p = find_library('libvlc.dll')
if p is None:
try: # some registry settings
# leaner than win32api, win32con
if PYTHON3:
import winreg as w
else:
import _winreg as w
for r in w.HKEY_LOCAL_MACHINE, w.HKEY_CURRENT_USER:
try:
r = w.OpenKey(r, 'Software\\VideoLAN\\VLC')
plugin_path, _ = w.QueryValueEx(r, 'InstallDir')
w.CloseKey(r)
break
except w.error:
pass
except ImportError: # no PyWin32
pass
if plugin_path is None:
# try some standard locations.
for p in ('Program Files\\VideoLan\\', 'VideoLan\\',
'Program Files\\', ''):
p = 'C:\\' + p + 'VLC\\libvlc.dll'
if os.path.exists(p):
plugin_path = os.path.dirname(p)
break
if plugin_path is not None: # try loading
p = os.getcwd()
os.chdir(plugin_path)
# if chdir failed, this will raise an exception
dll = ctypes.CDLL('libvlc.dll')
# restore cwd after dll has been loaded
os.chdir(p)
else: # may fail
dll = ctypes.CDLL('libvlc.dll')
else:
plugin_path = os.path.dirname(p)
dll = ctypes.CDLL(p)
elif sys.platform.startswith('darwin'):
# FIXME: should find a means to configure path
d = '/Applications/VLC.app/Contents/MacOS/'
p = d + 'lib/libvlc.dylib'
if os.path.exists(p):
dll = ctypes.CDLL(p)
d += 'modules'
if os.path.isdir(d):
plugin_path = d
else: # hope, some PATH is set...
dll = ctypes.CDLL('libvlc.dylib')
else:
raise NotImplementedError('%s: %s not supported' % (sys.argv[0], sys.platform))
return (dll, plugin_path)
# plugin_path used on win32 and MacOS in override.py
dll, plugin_path = find_lib()
class VLCException(Exception):
"""Exception raised by libvlc methods.
"""
pass
try:
_Ints = (int, long)
except NameError: # no long in Python 3+
_Ints = int
_Seqs = (list, tuple)
# Used for handling *event_manager() methods.
class memoize_parameterless(object):
"""Decorator. Caches a parameterless method's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
Adapted from https://wiki.python.org/moin/PythonDecoratorLibrary
"""
def __init__(self, func):
self.func = func
self._cache = {}
def __call__(self, obj):
try:
return self._cache[obj]
except KeyError:
v = self._cache[obj] = self.func(obj)
return v
def __repr__(self):
"""Return the function's docstring.
"""
return self.func.__doc__
def __g
|
et__(self, obj, objtype):
"""Support instance methods.
"""
retur
|
n functools.partial(self.__call__, obj)
# Default instance. It is used to instanciate classes directly in the
# OO-wrapper.
_default_instance = None
def get_default_instance():
"""Return the default VLC.Instance.
"""
global _default_instance
if _default_instance is None:
_default_instance = Instance()
return _default_instance
_Cfunctions = {} # from LibVLC __version__
_Globals = globals() # sys.modules[__name__].__dict__
def _Cfunction(name, flags, errcheck, *types):
"""(INTERNAL) New ctypes function binding.
"""
if hasattr(dll, name) and name in _Globals:
p = ctypes.CFUNCTYPE(*types)
f = p((name, dll), flags)
if errcheck is not None:
f.errcheck = errcheck
# replace the Python function
# in this module, but only when
# running as python -O or -OO
if __debug__:
_Cfunctions[name] = f
else:
_Globals[name] = f
return f
raise NameError('no function %r' % (name,))
def _Cobject(cls, ctype):
"""(INTERNAL) New instance from ctypes.
"""
o = object.__new__(cls)
o._as_parameter_ = ctype
return o
def _Constructor(cls, ptr=_internal_guard):
"""(INTERNAL) New wrapper from ctypes.
"""
if ptr == _internal_guard:
raise VLCException("(INTERNAL) ctypes class. You should get references for this class through methods of the LibVLC API.")
if ptr is None or ptr == 0:
return None
return _Cobject(cls, ctypes.c_void_p(ptr))
class _Cstruct(ctypes.Structur
|
sebastienbarbier/723e_server
|
seven23/models/accounts/migrations/0002_auto_20161128_1335.py
|
Python
|
mit
| 1,974
| 0.003546
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-28 13:35
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('accounts', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AccountRules',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('permissions', models.CharField(choices=[(b'A', b'Administration'), (b'W', b'Read/write'), (b'R', b'Read')], max_length=1)),
],
),
migrations.DeleteModel(
name='InvitationRequest',
),
migrations.AlterModelOptions(
name='account',
options={'ordering': ('create', 'name'), 'verbose_name': 'Account'},
),
migrations.RemoveField(
model_name='account',
name='user',
),
migrations.AlterField(
model_name='account',
name='create',
field=models.DateField(auto_now_add=True, verbose_name='Creation date'),
),
migrations.AddField(
model_name='accountrules',
name='account',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.Account'),
),
migrations.AddField(
|
model_name='accountrules',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='account',
name='users',
field=models.ManyToManyField(related_name='account', through='accounts.AccountRules', to=settings.AUTH_USER_MODEL),
|
),
]
|
antong/ldaptor
|
ldaptor/test/test_server.py
|
Python
|
lgpl-2.1
| 24,613
| 0.008451
|
"""
Test cases for ldaptor.protocols.ldap.ldapserver module.
"""
from twisted.trial import unittest
import sets, base64
from twisted.internet import protocol, address
from twisted.python import components
from ldaptor import inmemory, interfaces, schema, delta, entry
from ldaptor.protocols.ldap import ldapserver, ldapclient, ldaperrors, fetchschema
from ldaptor.protocols import pureldap, pureber
from twisted.test import proto_helpers
from ldaptor.test import util, test_schema
class LDAPServerTest(unittest.TestCase):
def setUp(self):
self.root = inmemory.ReadOnlyInMemoryLDAPEntry(
dn='dc=example,dc=com',
attributes={ 'dc': 'example',
})
self.stuff = self.root.addChild(
rdn='ou=stuff',
attributes={
'objectClass': ['a', 'b'],
'ou': ['stuff'],
})
self.thingie = self.stuff.addChild(
rdn='cn=thingie',
attributes={
'objectClass': ['a', 'b'],
'cn': ['thingie'],
})
self.another = self.stuff.addChild(
rdn='cn=another',
attributes={
'objectClass': ['a', 'b'],
'cn': ['another'],
})
server = ldapserver.LDAPServer()
server.factory = self.root
server.transport = proto_helpers.StringTransport()
server.connectionMade()
self.server = server
def test_bind(self):
self.server.dataReceived(str(pureldap.LDAPMessage(pureldap.LDAPBindRequest(), id=4)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(pureldap.LDAPBindResponse(resultCode=0), id=4)))
def test_bind_success(self):
self.thingie['userPassword'] = ['{SSHA}yVLLj62rFf3kDAbzwEU0zYAVvbWrze8='] # "secret"
self.server.dataReceived(str(pureldap.LDAPMessage(pureldap.LDAPBindRequest(
dn='cn=thingie,ou=stuff,dc=example,dc=com',
auth='secret'), id=4)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPBindResponse(resultCode=0,
matchedDN='cn=thingie,ou=stuff,dc=example,dc=com'),
id=4)))
def test_bind_invalidCredentials_badPassword(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPBindRequest(dn='cn=thingie,ou=stuff,dc=example,dc=com',
auth='invalid'),
id=734)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPBindResponse(
resultCode=ldaperrors.LDAPInvalidCredentials.resultCode),
id=734)))
def test_bind_invalidCredentials_nonExisting(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPBindRequest(dn='cn=non-existing,dc=example,dc=com',
auth='invalid'),
id=78)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPBindResponse(
resultCode=ldaperrors.LDAPInvalidCredentials.resultCode),
id=78)))
def test_bind_badVersion_1_anonymous(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPBindRequest(version=1),
id=32)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPBindResponse(
resultCode=ldaperrors.LDAPProtocolError.resultCode,
errorMessage='Version 1 not supported'),
id=32)))
def test_bind_badVersion_2_anonymous(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPBindRequest(version=2),
id=32)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPBindResponse(
resultCode=ldaperrors.LDAPProtocolError.resultCode,
errorMessage='Version 2 not supported'),
id=32)))
def test_bind_badVersion_4_anonymous(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPBindRequest(version=4),
id=32)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPBindResponse(
resultCode=ldaperrors.LDAPProtocolError.resultCode,
errorMessage='Version 4 not supported'),
id=32)))
def test_bind_badVersion_4_nonExisting(self):
# TODO make a test just like this one that would pass authentication
# if version was correct, to ensure we don't leak that info either.
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPBindRequest(version=4,
dn='cn=non-existing,dc=example,dc=com',
auth='invalid'),
id=11)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPBindResponse(
resultCode=ldaperrors.LDAPProtocolError.resultCode,
errorMessage='Version 4 not supported'),
id=11)))
def test_unbind(self):
self.server.dataReceived(str(pureldap.LDAPMessage(pureldap.LDAPUnbindRequest(), id=7)))
self.assertEquals(self.server.transport.value(),
'')
def test_search_outOfTree(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPSearchRequest(
baseObject='dc=invalid',
), id=2)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultDone(resu
|
ltCode=ldaperrors.LDAPNoSuchObject.resultCode),
id=2)),
|
)
def test_search_matchAll_oneResult(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPSearchRequest(
baseObject='cn=thingie,ou=stuff,dc=example,dc=com',
), id=2)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='cn=thingie,ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('cn', ['thingie']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultDone(resultCode=0),
id=2)),
)
def test_search_matchAll_manyResults(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPSearchRequest(
baseObject='ou=stuff,dc=example,dc=com',
), id=2)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('ou', ['stuff']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='cn=another,ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('cn', ['another']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='cn=thingie,ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('cn', ['thingie']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultDone(resultCode=0
|
flgiordano/netcash
|
+/google-cloud-sdk/lib/surface/logging/sinks/update.py
|
Python
|
bsd-3-clause
| 7,511
| 0.003595
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""'logging sinks update' command."""
from googlecloudsdk.api_lib.logging import util
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import list_printer
from googlecloudsdk.core import log
class Update(base.Command):
"""Updates a sink."""
@staticmethod
def Args(parser):
"""Register flags for this command."""
parser.add_argument(
'sink_name', help='The name of the sink to update.')
parser.add_argument(
'destination', nargs='?',
help=('A new destination for the sink. '
'If omitted, the sink\'s existing destination is unchanged.'))
parser.add_argument(
'--log-filter', required=False,
help=('A new filter expression for the sink. '
'If omitted, the sink\'s existing filter (if any) is unchanged.'))
parser.add_argument(
'--output-version-format', required=False,
help=('Format of the log entries being exported. Detailed information: '
'https://cloud.google.com/logging/docs/api/introduction_v2'),
choices=('V1', 'V2'))
def GetLogSink(self):
"""Returns a log sink specified by the arguments."""
client = self.context['logging_client_v1beta3']
return client.projects_logs_sinks.Get(
self.context['sink_reference'].Request())
def GetLogServiceSink(self):
"""Returns a log service sink specified by the arguments."""
client = self.context['logging_client_v1beta3']
return client.projects_logServices_sinks.Get(
self.context['sink_reference'].Request())
def GetProjectSink(self):
"""Returns a project sink specified by the arguments."""
# Use V2 logging API for project sinks.
client = self.context['logging_client_v2beta1']
messages = self.context['logging_messages_v2beta1']
sink_ref = self.context['sink_reference']
return client.projects_sinks.Get(
messages.LoggingProjectsSinksGetRequest(
projectsId=sink_ref.projectsId, sinksId=sink_ref.sinksId))
def UpdateLogSink(self, sink_data):
"""Updates a log sink specified by the arguments."""
client = self.context['logging_client_v1beta3']
messages = self.context['logging_messages_v1beta3']
sink_ref = self.context['sink_reference']
return client.projects_logs_sinks.Update(
messages.LoggingProjectsLogsSinksUpdateRequest(
projectsId=sink_ref.projectsId, logsId=sink_ref.logsId,
sinksId=sink_data['name'], logSink=messages.LogSink(**sink_data)))
def UpdateLogServiceSink(self, sink_data):
"""Updates a log service sink specified by the arguments."""
client = self.context['logging_client_v1beta3']
messages = self.context['logging_messages_v1beta3']
sink_ref = self.context['sink_reference']
return client.projects_logServices_sinks.Update(
messages.LoggingProjectsLogServicesSinksUpdateRequest(
projectsId=sink_ref.projectsId,
logServicesId=sink_ref.logServicesId, sinksId=sink_data['name'],
logSink=messages.LogSink(**sink_data)))
def UpdateProjectSink(self, sink_data):
"""Updates a project sink specified by the arguments."""
# Use V2 logging API for project sinks.
client = self.context['logging_client_v2beta1']
messages = self.context['logging_messages_v2beta1']
sink_ref = self.context['sink_reference']
# Change string value to enum.
sink_data['outputVersionFormat'] = getattr(
messages.LogSink.OutputVersionFormatValueValuesEnum,
sink_data['outputVersionFormat'])
return client.projects_sinks.Update(
messages.LoggingProjectsSinksUpdateRequest(
projectsId=sink_ref.projectsId, sinksId=sink_data['name'],
logSink=messages.LogSink(**sink_data)))
@util.HandleHttpError
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
The updated sink with its new destination.
"""
util.CheckSinksCommandArguments(args)
# One of the flags is required to update the sink.
# log_filter can be an empty string, so check explicitly for None.
if not (args.destination or args.log_filter is not None or
args.output_version_format):
raise exceptions.ToolException(
'[destination], --log-filter or --output-version-format is required')
# Calling Update on a non-existing sink creates it.
# We need to make sure it exists, otherwise we would create it.
if args.log:
sink = self.GetLogSink()
elif args.service:
sink = self.GetLogServiceSink()
else:
sink = self.GetProjectSink()
# Only update fields that were passed to the command.
if args.destination:
destination = args.destination
else:
destination = sink.destination
if args.log_filter is not None:
log_filter = args.log_filter
else:
log_filter = sink.filter
sink_ref = self.context['sink_reference']
sink_data = {'name': sink_ref.sinksId, 'destination': destination,
'filter': log_filter}
if args.log:
result = util.TypedLogSink(self.UpdateLogSink(sink_data),
log_name=args.log)
el
|
if args.service:
result = util.TypedLogSink(self.UpdateLogServiceSink(sink_data),
service_name=args.service)
else:
if args.output_version_format:
sink_data['outputVersionFormat'] = args.output_version_format
else:
sink_data['outputVersionFormat'] = sink.outputVersionFormat.name
result = util.TypedLogSink(self.UpdateProjectSink(sink_data))
log.UpdatedResource(sink_r
|
ef)
return result
def Display(self, unused_args, result):
"""This method is called to print the result of the Run() method.
Args:
unused_args: The arguments that command was run with.
result: The value returned from the Run() method.
"""
list_printer.PrintResourceList('logging.typedSinks', [result])
util.PrintPermissionInstructions(result.destination)
Update.detailed_help = {
'DESCRIPTION': """\
Changes the *[destination]* or *--log-filter* associated with a sink.
If you don't include one of the *--log* or *--log-service* flags,
this command updates a project sink.
The new destination must already exist and Cloud Logging must have
permission to write to it.
Log entries are exported to the new destination immediately.
""",
'EXAMPLES': """\
To only update a project sink filter, run:
$ {command} my-sink --log-filter='metadata.severity>=ERROR'
Detailed information about filters can be found at:
https://cloud.google.com/logging/docs/view/advanced_filters
""",
}
|
j2sol/ansible
|
plugins/inventory/yaml.py
|
Python
|
gpl-3.0
| 7,145
| 0.003779
|
#!/usr/bin/env python
# Support a YAML file hosts.yml as external inventory in Ansible
# Copyright (C) 2012 Jeroen Hoekx <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
File format:
- <hostname>
or
- host: <hostname>
vars:
- myvar: value
- myvbr: vblue
groups:
- mygroup1
- mygroup2
or
- group: <groupname>
vars:
- groupvar: value
hosts:
- myhost1
- myhost2
groups:
- subgroup1
- subgroup2
Any statement except the first definition is optional.
"""
import json
import os
import sys
from optparse import OptionParser
import yaml
class Host():
def __init__(self, name):
self.name = name
self.groups = []
self.vars = {}
def __repr__(self):
return "Host('%s')"%(self.name)
def set_variable(self, key, value):
self.vars[key] = value
def get_variables(self):
result = {}
for group in self.groups:
for k,v in group.get_variables().items():
result[k] = v
for k, v in self.vars.items():
result[k] = v
return result
def add_group(self, group):
if group not in self.groups:
self.groups.append(group)
class Group():
def __init__(self, name):
self.name = name
self.hosts = []
self.vars = {}
self.subgroups = []
self.parents = []
def __repr__(self):
return "Group('%s')"%(self.name)
def get_hosts(self):
""" List all hosts in this group, including subgroups """
result = [ host for host in self.hosts ]
for group in self.subgroups:
for host in group.get_hosts():
if host not in result:
result.append(host)
return result
def add_host(self, host):
if host not in self.hosts:
self.hosts.append(host)
host.add_group(self)
def add_subgroup(self, group):
if group not in self.subgroups:
self.subgroups.append(group)
group.add_parent(self)
def add_parent(self, group):
if group not in self.parents:
self.parents.append(group)
def set_variable(self, key, value):
self.vars[key] = value
def get_variables(self):
result = {}
for group in self.parents:
result.update( group.get_variables() )
result.update(self.vars)
return result
def find_group(name, groups):
for group in groups:
if name == group.name:
return group
def parse_vars(vars, obj):
### vars can be a list of dicts or a dictionary
if type(vars) == dict:
for k,v in vars.items():
obj.set_variable(k, v)
elif type(vars) == list:
for var in vars:
k,v = var.items()[0]
obj.set_variable(k, v)
def parse_yaml(yaml_hosts):
groups = []
all_hosts = Group('all')
ungrouped = Group('ungrouped')
groups.append(ungrouped)
### groups first, so hosts can be added to 'ungrouped' if necessary
subgroups = []
for entry in yaml_hosts:
if 'group' in entry and type(entry)==dict:
group = find_group(entry['group'], groups)
if not group:
group = Group(entry['group'])
groups.append(group)
if 'vars' in entry:
parse_vars(entry['vars'], group)
if 'hosts' in entry:
for host_name in entry['hosts']:
host = None
for test_host in all_hosts.get_hosts():
if test_host.name == host_name:
host = test_host
break
else:
host = Host(host_name)
all_hosts.add_host(host)
group.add_host(host)
if 'groups' in entry:
for subgroup in entry['groups']:
subgroups.append((group.name, subgroup))
for name, sub_name in subgroups:
group = find_group(name, groups)
subgroup = find_group(sub_name, groups)
group.add_subgroup(subgroup)
for entry in yaml_hosts:
### a host is either a dict or a single line definition
if type(entry) in [str, unicode]:
for test_host in all_hosts.get_hosts():
if test_host.name == entry:
break
else:
host = Host(entry)
all_hosts.add_host(host)
ungrouped.add_host(host)
elif 'host' in entry:
host = None
no_group = False
|
for test_host in all_hosts.get_h
|
osts():
### all hosts contains only hosts already in groups
if test_host.name == entry['host']:
host = test_host
break
else:
host = Host(entry['host'])
all_hosts.add_host(host)
no_group = True
if 'vars' in entry:
parse_vars(entry['vars'], host)
if 'groups' in entry:
for test_group in groups:
if test_group.name in entry['groups']:
test_group.add_host(host)
all_hosts.add_host(host)
no_group = False
if no_group:
ungrouped.add_host(host)
return groups, all_hosts
parser = OptionParser()
parser.add_option('-l', '--list', default=False, dest="list_hosts", action="store_true")
parser.add_option('-H', '--host', default=None, dest="host")
parser.add_option('-e', '--extra-vars', default=None, dest="extra")
options, args = parser.parse_args()
base_dir = os.path.dirname(os.path.realpath(__file__))
hosts_file = os.path.join(base_dir, 'hosts.yml')
with open(hosts_file) as f:
yaml_hosts = yaml.safe_load( f.read() )
groups, all_hosts = parse_yaml(yaml_hosts)
if options.list_hosts == True:
result = {}
for group in groups:
result[group.name] = [host.name for host in group.get_hosts()]
print json.dumps(result)
sys.exit(0)
if options.host is not None:
result = {}
host = None
for test_host in all_hosts.get_hosts():
if test_host.name == options.host:
host = test_host
break
result = host.get_variables()
if options.extra:
k,v = options.extra.split("=")
result[k] = v
print json.dumps(result)
sys.exit(0)
parser.print_help()
sys.exit(1)
|
rzarzynski/tempest
|
tempest/api/compute/admin/test_aggregates.py
|
Python
|
apache-2.0
| 10,395
| 0
|
# Copyright 2013 NEC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common.utils import data_utils
from tempest_lib import exceptions as lib_exc
from tempest.api.compute import base
from tempest.common import tempest_fixtures as fixtures
from tempest import test
class AggregatesAdminTestJSON(base.BaseV2ComputeAdminTest):
"""
Tests Aggregates API that require admin privileges
"""
_host_key = 'OS-EXT-SRV-ATTR:host'
@classmethod
def setup_clients(cls):
super(AggregatesAdminTestJSON, cls).setup_clients()
cls.client = cls.os_adm.aggregates_client
@classmethod
def resource_setup(cls):
super(AggregatesAdminTestJSON, cls).resource_setup()
cls.aggregate_name_prefix = 'test_aggregate_'
cls.az_name_prefix = 'test_az_'
hosts_all = cls.os_adm.hosts_client.list_hosts()
hosts = map(lambda x: x['host_name'],
filter(lambda y: y['service'] == 'compute', hosts_all))
cls.host = hosts[0]
def _try_delete_aggregate(self, aggregate_id):
# delete aggregate, if it exists
try:
self.client.delete_aggregate(aggregate_id)
# if aggregate not found, it depict it was deleted in the test
except lib_exc.NotFound:
pass
@test.attr(type='gate')
@test.idempotent_id('0d148aa3-d54c-4317-aa8d-42040a475e20')
def test_aggregate_create_delete(self):
# Create and delete an aggregate.
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
aggregate = self.client.create_aggregate(name=aggregate_name)
self.addCleanup(self._try_delete_aggregate, aggregate['id'])
self.assertEqual(aggregate_name, aggregate['name'])
self.assertIsNone(aggregate['availability_zone'])
self.client.delete_aggregate(aggregate['id'])
self.client.wait_for_resource_deletion(aggregate['id'])
@test.attr(type='gate')
@test.idempotent_id('5873a6f8-671a-43ff-8838-7ce430bb6d0b')
def test_aggregate_create_delete_with_az(self):
# Create and delete an aggregate.
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
az_name = data_utils.rand_name(self.az_name_prefix)
aggregate = self.client.create_aggregate(
name=aggregate_name, availability_zone=az_name)
self.addCleanup(self._try_delete_aggregate, aggregate['id'])
self.assertEqual(aggregate_name, aggregate['name'])
self.assertEqual(az_name, aggregate['availability_zone'])
self.client.delete_aggregate(aggregate['id'])
self.client.wait_for_resource_deletion(aggregate['id'])
@test.attr(type='gate')
@test.idempotent_id('68089c38-04b1-4758-bdf0-cf0daec4defd')
def test_aggregate_create_verify_entry_in_list(self):
# Create an aggregate and ensure it is listed.
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
aggregate = self.client.create_aggregate(name=aggregate_name)
self.addCleanup(self.client.delete_aggregate, aggregate['id'])
aggregates = self.client.list_aggregates()
self.assertIn((aggregate['id'], aggregate['availability_zone']),
map(lambda x: (x['id'], x['availability_zone']),
aggregates))
@test.attr(type='gate')
@test.idempotent_id('36ec92ca-7a73-43bc-b920-7531809e8540')
def test_aggregate_create_update_metadata_get_details(self):
# Create an aggregate and ensure its details are returned.
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
aggregate = self.client.create_aggregate(name=aggregate_name)
self.addCleanup(self.client.delete_aggregate, aggregate['id'])
body = self.client.get_aggregate(aggregate['id'])
self.assertEqual(aggregate['name'], body['name'])
self.assertEqual(aggregate['availability_zone'],
body['availability_zone'])
self.assertEqual({}, body["metadata"])
# set the metadata of the aggregate
meta = {"key": "value"}
body = self.client.set_metadata(aggregate['id'], meta)
self.assertEqual(meta, body["metadata"])
# verify the metadata has been set
body = self.client.get_aggregate(aggregate['id'])
self.assertEqual(meta, body["metadata"])
@test.attr(type='gate')
@test.idempotent_id('4d2b2004-40fa-40a1-aab2-66f4dab81beb')
def test_aggregate_create_update_with_az(self):
# Update an aggregate and ensure properties are updated correctly
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
az_name = data_utils.rand_name(self.az_name_prefix)
aggregate = self.client.create_aggregate(
name=aggregate_name, availability_zone=az_name)
self.addCleanup(self.client.delete_aggregate, aggregate['id'])
self.assertEqual(aggregate_name, aggregate['name'])
self.assertEqual(az_name, aggregate['availability_zone'])
self.assertIsNotNone(aggregate['id'])
aggregate_id = aggregate['id']
new_aggregate_name = aggregate_name + '_new'
new_az_name = az_name + '_new'
resp_aggregate = self.client.update_aggregate(aggregate_id,
new_aggregate_name,
new_az_name)
self.assertEqual(new_aggregate_name, resp_aggregate['name'])
self.assertEqual(new_az_name, resp_aggregate['availability_zone'])
aggregates = self.client.list_aggregates()
self.assertIn((aggregate_id, new_aggregate_name, new_az_name),
map(lambda x:
(x['id'], x['name'], x['availability_zone']),
aggregates))
@test.attr(type='gate')
@test.idempotent_id('c8e85064-e79b-4906-9931-c11c24294d02')
def test_aggregate_add_remove_host(self):
# Add an host to the given aggregate and remove.
self.useFixture(fixtures.LockFixture('availability_zone'))
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
aggregate = self.client.create_aggregate(name=aggregate_name)
self.addCleanup(self.client.delete_aggregate, aggregate['id'])
body = self.client.add_host(aggregate['id'], self.host)
self.assertEqual(aggregate_name, body['name'])
self.assertEqual(aggregate['availability_zone'],
body['availability_zone'])
self.assertIn(self.host, body['hosts'])
body = self.client.remove_host(aggregate['id'], self.host)
self.assertEqual(aggregate_name, body['name'])
self.assertEqual(aggregate['availability_zone'],
body['availability_zone'])
self.assertN
|
otIn(self.ho
|
st, body['hosts'])
@test.attr(type='gate')
@test.idempotent_id('7f6a1cc5-2446-4cdb-9baa-b6ae0a919b72')
def test_aggregate_add_host_list(self):
# Add an host to the given aggregate and list.
self.useFixture(fixtures.LockFixture('availability_zone'))
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
aggregate = self.client.create_aggregate(name=aggregate_name)
self.addCleanup(self.client.delete_aggregate, aggregate['id'])
self.client.add_host(aggregate['id'], self.host)
self.addCleanup(self.client.remove_host, aggregate['id'], self.host)
aggregates = self.client.list_aggregates()
aggs = filter(lambda x: x['id'] == aggregate['id'], aggregate
|
mitmedialab/MediaCloud-Web-Tools
|
server/views/topics/__init__.py
|
Python
|
apache-2.0
| 3,391
| 0.002359
|
import logging
import datetime
import mediacloud.api
import re
from server import mc
from server.auth import is_user_logged_in
from server.util.csv import SOURCE_LIST_CSV_METADATA_PROPS
logger = logging.getLogger(__name__)
TOPIC_MEDIA_INFO_PROPS = ['media_id', 'name', 'url']
TOPIC_MEDIA_PROPS = ['story_count', 'media_inlink_count', 'inlink_count', 'outlink_count',
'facebook_share_count', 'simple_tweet_count']
TOPIC_MEDIA_URL_SHARING_PROPS = ['sum_post_count', 'sum_channel_count', 'sum_author_count']
TOPIC_MEDIA_CSV_PROPS = TOPIC_MEDIA_INFO_PROPS + TOPIC_MEDIA_PROPS + TOPIC_MEDIA_URL_SHARING_PROPS + \
SOURCE_LIST_CSV_METADATA_PROPS
def _parse_media_ids(args):
media_ids = []
if 'sources[]' in args:
src = args['sources[]']
if isinstance(src, str):
media_ids = src.split(',')
media_ids = " ".join([str(m) for m in media_ids])
src = re.sub(r'\[*\]*', '', str(src))
if len(src) == 0:
media_ids = []
media_ids = src.split(',') if len(src) > 0 else []
else:
media_ids = src
return media_ids
def _parse_collection_ids(args):
collection_ids = []
if 'collections[]' in args:
coll = args['collections[]']
if isinstance(coll, str):
tags_ids = coll.split(',')
tags_ids = " ".join([str(m) for m in tags_ids])
coll = re.sub(r'\[*\]*', '', str(tags_ids))
if len(coll) == 0:
collection_ids = []
else:
collection_ids = coll.split(',') # make a list
else:
collection_ids = coll
return collection_ids
# TODO: Migrate eto use mediapicker.concate!
# helper for topic preview queries
def concatenate_query_for_solr(solr_seed_query=None, media_ids=None, tags_ids=None):
query = ''
if solr_seed_query not in [None,'']:
query = '({})'.format(solr_seed_query)
if len(media_ids) > 0 or len(tags_ids) > 0:
if solr_seed_query not in [None,'']:
query += " AND ("
else:
query += "(*) AND ("
# add in the media sources they specified
if len(media_ids) > 0:
media_ids = media_ids.split(',') if isinstance(media_ids, str) else media_ids
query_media_ids = " ".join(map(str, media_ids))
query_media_ids = re.sub(r'\[*\]*', '', str(query_media_ids))
query_media_ids = " media_id:({})".format(query_media_ids)
query += '(' + query_media_ids + ')'
|
if len(media_ids) > 0 and len(tags_ids) > 0:
query += " OR "
# add in the collections they specified
if len(tags_ids) > 0:
|
tags_ids = tags_ids.split(',') if isinstance(tags_ids, str) else tags_ids
query_tags_ids = " ".join(map(str, tags_ids))
query_tags_ids = re.sub(r'\[*\]*', '', str(query_tags_ids))
query_tags_ids = " tags_id_media:({})".format(query_tags_ids)
query += '(' + query_tags_ids + ')'
query += ')'
return query
def concatenate_solr_dates(start_date, end_date):
publish_date = mediacloud.api.MediaCloud.dates_as_query_clause(
datetime.datetime.strptime(start_date, '%Y-%m-%d').date(),
datetime.datetime.strptime(end_date, '%Y-%m-%d').date())
return publish_date
|
izadorozhna/tempest
|
tempest/tests/cmd/test_javelin.py
|
Python
|
apache-2.0
| 18,257
| 0
|
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslotest import mockpatch
from tempest_lib import exceptions as lib_exc
from tempest.cmd import javelin
from tempest.tests import base
class JavelinUnitTest(base.TestCase):
def setUp(self):
super(JavelinUnitTest, self).setUp()
javelin.setup_logging()
self.fake_client = mock.MagicMock()
self.fake_object = mock.MagicMock()
def test_load_resources(self):
with mock.patch('six.moves.builtins.open', mock.mock_open(),
create=True) as open_mock:
with mock.patch('yaml.load', mock.MagicMock(),
create=True) as load_mock:
javelin.load_resources(self.fake_object)
load_mock.assert_called_once_with(open_mock(self.fake_object))
def test_keystone_admin(self):
self.useFixture(mockpatch.PatchObject(javelin, "OSClient"))
javelin.OPTS = self.fake_object
javelin.keystone_admin()
javelin.OSClient.assert_called_once_with(
self.fake_object.os_username,
self.fake_object.os_password,
self.fake_object.os_tenant_name)
def test_client_for_user(self):
fake_user = mock.MagicMock()
javelin.USERS = {fake_user['name']: fake_user}
self.useFixture(mockpatch.PatchObject(javelin, "OSClient"))
javelin.client_for_user(fake_user['name'])
javelin.OSClient.assert_called_once_with(
fake_user['name'], fake_user['pass'], fake_user['tenant'])
def test_client_for_non_existing_user(self):
fake_non_existing_user = self.fake_object
fake_user = mock.MagicMock()
javelin.USERS = {fake_user['name']: fake_user}
self.useFixture(mockpatch.PatchObject(javelin, "OSClient"))
javelin.client_for_user(fake_non_existing_user['name'])
self.assertFalse(javelin.OSClient.called)
def test_attach_volumes(self):
self.useFixture(mockpatch.PatchObject(javelin, "client_for_user",
return_value=self.fake_client))
self.useFixture(mockpatch.PatchObject(
javelin, "_get_volume_by_name",
return_value=self.fake_object.volume))
self.useFixture(mockpatch.PatchObject(
javelin, "_get_server_by_name",
return_value=self.fake_object.server))
javelin.attach_volumes([self.fake_object])
mocked_function = self.fake_client.volumes.attach_volume
mocked_function.assert_called_once_with(
self.fake_object.volume['id'],
self.fake_object.server['id'],
self.fake_object['device'])
class TestCreateResources(JavelinUnitTest):
def test_create_tenants(self):
self.fake_client.identity.list_tenants.return_value = {'tenants': []}
self.useFixture(mockpatch.PatchObject(javelin, "keystone_admin",
return_value=self.fake_client))
javelin.create_tenants([self.fake_object['name']])
mocked_function = self.fake_client.identity.create_tenant
mocked_function.assert_called_once_with(self.fake_object['name'])
def test_create_duplicate_tenant(self):
self.fake_client.identity.list_tenants.return_value = {'tenants': [
{'name': self.fake_object['name']}]}
self.useFixture(mockpatch.PatchObject(javelin, "keystone_admin",
return_value=self.fake_client))
javelin.create_tenants([self.fake_object['name']])
mocked_function = self.fake_client.identity.create_tenant
self.assertFalse(mocked_function.called)
def test_create_users(self):
self.fake_client.identity.get_tenant_by_name.return_value = \
self.fake_object['tenant']
self.fake_client.identity.get_user_by_username.side_effect = \
lib_exc.NotFound("user is not found")
self.useFixture(mockpatch.PatchObject(javelin, "keystone_admin",
return_value=self.fake_client))
javelin.create_users([self.fake_object])
fake_tenant_id = self.fake_object['tenant']['id']
fake_email = "%s@%s" % (self.fake_object['user'], fake_tenant_id)
mocked_function = self.fake_client.identity.create_user
mocked_function.assert_called_once_with(self.fake_object['name'],
self.fake_object['password'],
fake_tenant_id,
fake_email,
enabled=True)
def test_create_user_missing_tenant(self):
self.fake_client.identity.get_tenant_by_name.side_effect = \
lib_exc.NotFound("tenant is not found")
self.useFixture(mockpatch.PatchObject(javelin, "keystone_admin",
return_value=self.fake_client))
javelin.create_users([self.fake_object])
mocked_function = self.fake_client.identity.create_user
self.assertFalse(mocked_function.called)
def test_create_objects(self):
self.useFixture(mockpatch.PatchObject(javelin, "client_for_user",
return_value=self.fake_client))
self.useFixture(mockpatch.PatchObject(javelin, "_assign_swift_role"))
self.useFixture(mockpatch.PatchObject(javelin, "_file_contents",
return_value=self.fake_object.content))
javelin.create_objects([self.fake_object])
mocked_function = self.fake_client.containers.create_container
mo
|
cked_function.assert_called_once_with(self.fake_object['container'])
mocked_function = self.fake_client.objects.create_object
mocked_function.assert_called_once_with(self.fake_object['container'],
self.fake_object['name'],
self.fake_object.content)
def test_create_images(self):
|
self.fake_client.images.create_image.return_value = \
self.fake_object['body']
self.useFixture(mockpatch.PatchObject(javelin, "client_for_user",
return_value=self.fake_client))
self.useFixture(mockpatch.PatchObject(javelin, "_get_image_by_name",
return_value=[]))
self.useFixture(mockpatch.PatchObject(javelin, "_resolve_image",
return_value=(None, None)))
with mock.patch('six.moves.builtins.open', mock.mock_open(),
create=True) as open_mock:
javelin.create_images([self.fake_object])
mocked_function = self.fake_client.images.create_image
mocked_function.assert_called_once_with(self.fake_object['name'],
self.fake_object['format'],
self.fake_object['format'])
mocked_function = self.fake_client.images.store_image_file
fake_image_id = self.fake_object['body'].get('id')
mocked_function.assert_called_once_with(fake_image_id, open_mock())
def test_create_networks(self):
self.fake_client.networks.list_networks.return_value = {
'networks': []}
self.useFixture(mockpatch.PatchObject(javelin, "client_for_user",
return_value=self.fake_client))
jave
|
pycontw/pycontw2016
|
src/sponsors/migrations/0013_auto_20180305_1339.py
|
Python
|
mit
| 624
| 0.001603
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2018-03-05
|
05:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sponsors', '0012_sponsor_level_smallint'),
]
operations = [
migrations.AlterField(
model_name='sponsor',
name='conference',
field=m
|
odels.SlugField(choices=[('pycontw-2016', 'PyCon Taiwan 2016'), ('pycontw-2017', 'PyCon Taiwan 2017'), ('pycontw-2018', 'PyCon Taiwan 2018')], default='pycontw-2018', verbose_name='conference'),
),
]
|
nielsbuwen/ilastik
|
tests/launch_workflow.py
|
Python
|
gpl-3.0
| 4,466
| 0.008509
|
###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# In addition, as a special exception, the copyright holders of
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# http://ilas
|
tik.org/license.html
###############################################################################
import sys
from functools import partial
fro
|
m PyQt4.QtGui import QApplication
import threading
# This function was copied from: http://bugs.python.org/issue1230540
# It is necessary because sys.excepthook doesn't work for unhandled exceptions in other threads.
def install_thread_excepthook():
"""
Workaround for sys.excepthook thread bug
(https://sourceforge.net/tracker/?func=detail&atid=105470&aid=1230540&group_id=5470).
Call once from __main__ before creating any threads.
If using psyco, call psycho.cannotcompile(threading.Thread.run)
since this replaces a new-style class method.
"""
import sys
run_old = threading.Thread.run
def run(*args, **kwargs):
try:
run_old(*args, **kwargs)
except (KeyboardInterrupt, SystemExit):
raise
except:
sys.excepthook(*sys.exc_info())
threading.Thread.run = run
#python launch_workflow.py --workflow=PixelClassificationWorkflow --playback_script=$f --playback_speed=2.0 --exit_on_failure --exit_on_success
#sys.argv.append( "/Users/bergs/MyProject.ilp" )
## EXAMPLE PLAYBACK TESTING ARGS
#sys.argv.append( "--playback_script=/Users/bergs/Documents/workspace/ilastik-meta/ilastik/tests/event_based/recording-20130450-2111.py" )
#sys.argv.append( "--playback_speed=3" )
#sys.argv.append( "--exit_on_failure" )
sys.argv.append( "--workflow=PixelClassificationWorkflow" )
import argparse
parser = argparse.ArgumentParser( description="Ilastik Pixel Classification Workflow" )
parser.add_argument('--playback_script', help='An event recording to play back after the main window has opened.', required=False)
parser.add_argument('--playback_speed', help='Speed to play the playback script.', default=0.5, type=float)
parser.add_argument('--exit_on_failure', help='Immediately call exit(1) if an unhandled exception occurs.', action='store_true', default=False)
parser.add_argument('--exit_on_success', help='Quit the app when the playback is complete.', action='store_true', default=False)
parser.add_argument('--project', nargs='?', help='A project file to open on startup.')
parser.add_argument('--workflow', help='A project file to open on startup.')
parsed_args = parser.parse_args()
init_funcs = []
# Start the GUI
if parsed_args.project is not None:
def loadProject(shell):
shell.openProjectFile(parsed_args.project)
init_funcs.append( loadProject )
onfinish = None
if parsed_args.exit_on_success:
onfinish = QApplication.quit
if parsed_args.playback_script is not None:
from ilastik.utility.gui.eventRecorder import EventPlayer
def play_recording(shell):
player = EventPlayer(parsed_args.playback_speed)
player.play_script(parsed_args.playback_script, onfinish)
init_funcs.append( partial(play_recording) )
if parsed_args.exit_on_failure:
old_excepthook = sys.excepthook
def print_exc_and_exit(*args):
old_excepthook(*args)
sys.stderr.write("Exiting early due to an unhandled exception. See error output above.\n")
QApplication.exit(1)
sys.excepthook = print_exc_and_exit
install_thread_excepthook()
# Import all possible workflows so they are registered with the base class
import ilastik.workflows
# Ask the base class to give us the workflow type
from ilastik.workflow import Workflow
workflowClass = Workflow.getSubclass(parsed_args.workflow)
# Launch the GUI
from ilastik.shell.gui.startShellGui import startShellGui
sys.exit( startShellGui( workflowClass, *init_funcs ) )
|
surajshanbhag/Indoor_SLAM
|
src/control/piControl/encoderRun.py
|
Python
|
gpl-3.0
| 1,623
| 0.021565
|
from __future__ import division
import encoder
import socket_class as socket
import threading
import time
import sys
rightC,leftC = (0,0)
s = None
IP = "10.42.0.1"
host = 50679
class sendData(threading.Thread):
def __init__(self,waitTime):
self.waitTime = waitTime
threading.Thread.__init__(self)
def run(self):
#send info every waitTime
global s
global rightC,leftC
conf = "OK"
while True:
if(conf == "OK"):
s.send(str(rightC)+","+str(leftC))
conf = s.recv(10)
print "sent",str(rightC),",",str(leftC)
time.sleep(self.waitTime)
def right():
global rightC
rightC += 1
print "right: ",rightC,"\t","left :",leftC
def left():
global leftC
leftC += 1
print "right: ",rightC,"\t","left :",leftC
def checkArgs():
global IP,host
if(len(sys.argv)!=1):
IP = sys.argv[1]
host = sys.argv[2]
if __name__ == "
|
__main__":
"""if 2 arguments are passed in overwrite IP and port number to those values else u
|
se IP = 10.42.0.1 and 50679"""
encoder.encoderSetup()
if len(sys.argv) in (1,3):
checkArgs()
s = socket.initSocket()
while True:
try:
socket.connect(s,IP,host)
break
except:
pass
#start thread to send info in background
t = sendData(.01)
t.daemon = True
t.start()
#read encoder values
encoder.getEncoder(right,left)
else:
encoder.getEncoder(right,left)
|
jzbontar/orange-tree
|
Orange/classification/linear_regression.py
|
Python
|
gpl-3.0
| 4,108
| 0.000974
|
import numpy
|
as np
import scipy.sparse as sp
from scipy.optimize import fmin_l_bfgs_b
from Orange.classification import Learner, Model
__all__ = ["Lin
|
earRegressionLearner"]
class LinearRegressionLearner(Learner):
def __init__(self, lambda_=1.0, preprocessors=None, **fmin_args):
'''L2 regularized linear regression (a.k.a Ridge regression)
This model uses the L-BFGS algorithm to minimize the linear least
squares penalty with L2 regularization. When using this model you
should:
- Choose a suitable regularization parameter lambda_
- Continuize all discrete attributes
- Consider appending a column of ones to the dataset (intercept term)
- Transform the dataset so that the columns are on a similar scale
:param lambda_: the regularization parameter. Higher values of lambda_
force the coefficients to be small.
:type lambda_: float
Examples
--------
import numpy as np
from Orange.data import Table
from Orange.classification.linear_regression import LinearRegressionLearner
data = Table('housing')
data.X = (data.X - np.mean(data.X, axis=0)) / np.std(data.X, axis=0) # normalize
data.X = np.hstack((data.X, np.ones((data.X.shape[0], 1)))) # append ones
m = LinearRegressionLearner(lambda_=1.0)
c = m(data) # fit
print(c(data)) # predict
'''
super().__init__(preprocessors=preprocessors)
self.lambda_ = lambda_
self.fmin_args = fmin_args
def cost_grad(self, theta, X, y):
t = X.dot(theta) - y
cost = t.dot(t)
cost += self.lambda_ * theta.dot(theta)
cost /= 2.0 * X.shape[0]
grad = X.T.dot(t)
grad += self.lambda_ * theta
grad /= X.shape[0]
return cost, grad
def fit(self, X, Y, W):
if Y.shape[1] > 1:
raise ValueError('Linear regression does not support '
'multi-target classification')
if np.isnan(np.sum(X)) or np.isnan(np.sum(Y)):
raise ValueError('Linear regression does not support '
'unknown values')
theta = np.zeros(X.shape[1])
theta, cost, ret = fmin_l_bfgs_b(self.cost_grad, theta,
args=(X, Y.ravel()), **self.fmin_args)
return LinearRegressionModel(theta)
class LinearRegressionModel(Model):
def __init__(self, theta):
self.theta = theta
def predict(self, X):
return X.dot(self.theta)
if __name__ == '__main__':
import Orange.data
import sklearn.cross_validation as skl_cross_validation
np.random.seed(42)
def numerical_grad(f, params, e=1e-4):
grad = np.zeros_like(params)
perturb = np.zeros_like(params)
for i in range(params.size):
perturb[i] = e
j1 = f(params - perturb)
j2 = f(params + perturb)
grad[i] = (j2 - j1) / (2.0 * e)
perturb[i] = 0
return grad
d = Orange.data.Table('housing')
d.X = np.hstack((d.X, np.ones((d.X.shape[0], 1))))
d.shuffle()
# m = LinearRegressionLearner(lambda_=1.0)
# print(m(d)(d))
# # gradient check
# m = LinearRegressionLearner(lambda_=1.0)
# theta = np.random.randn(d.X.shape[1])
#
# ga = m.cost_grad(theta, d.X, d.Y.ravel())[1]
# gm = numerical_grad(lambda t: m.cost_grad(t, d.X, d.Y.ravel())[0], theta)
#
# print(np.sum((ga - gm)**2))
for lambda_ in (0.01, 0.03, 0.1, 0.3, 1, 3):
m = LinearRegressionLearner(lambda_=lambda_)
scores = []
for tr_ind, te_ind in skl_cross_validation.KFold(d.X.shape[0]):
s = np.mean((m(d[tr_ind])(d[te_ind]) - d[te_ind].Y.ravel())**2)
scores.append(s)
print('{:5.2f} {}'.format(lambda_, np.mean(scores)))
m = LinearRegressionLearner(lambda_=0)
print('test data', np.mean((m(d)(d) - d.Y.ravel())**2))
print('majority', np.mean((np.mean(d.Y.ravel()) - d.Y.ravel())**2))
|
clifforloff/opmservice
|
opmapp/tests.py
|
Python
|
gpl-2.0
| 24,082
| 0.008139
|
"""
Test for opmapp application.
"""
# from python
import datetime
# from selenium
from selenium.webdriver.firefox.webdriver import WebDriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
from django.test import LiveServerTestCase
import unittest, time, re
class LoginLogoutTestCase(LiveServerTestCase):
fixtures = ['user-data.json', 'basic-data.json']
@classmethod
def setUpClass(cls):
cls.selenium = WebDriver()
cls.selenium.implicitly_wait(30)
super(LoginLogoutTestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.selenium.quit()
super(LoginLogoutTestCase, cls).tearDownClass()
def setUp(self):
self.verificationErrors = []
super(LoginLogoutTestCase, self).setUp()
def tearDown(self):
self.assertEqual([], self.verificationErrors)
super(LoginLogoutTestCase, self).tearDown()
def test_login(self):
driver = self.selenium
driver.get(self.live_server_url + "/")
driver.find_element_by_link_text("Login").click()
driver.find_element_by_id("id_username").clear()
driver.find_element_by_id("id_username").send_keys("opmtest")
driver.find_element_by_id("id_password").clear()
driver.find_element_by_id("id_password").send_keys("secretpass")
driver.find_element_by_css_selector("input[type=\"submit\"]").click()
# Warning: assertTextPresent may require manual changes
self.assertRegexpMatches(driver.find_element_by_css_selector("BODY").text, r"^[\s\S]*opmtest[\s\S]*$")
driver.find_element_by_link_text("Logout").click()
# Warning: assertTextPresent may require manual changes
self.assertRegexpMatches(driver.find_element_by_css_selector("BODY").text, r"^[\s\S]*Login[\s\S]*$")
def is_element_present(self, how, what):
try: self.selenium.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def authenticate(username, password, base_url, selenium, client):
"""
Authenticates the selenium driver using the django client driver.
Basically, it passes the sessionid cookie to selenium.
"""
client.login(username="opmtest", password="secretpass")
sess = client.cookies['sessionid']
# Make a first request in order to avoid overlapping of cookies
selenium.get(base_url)
selenium.add_cookie({"name":"sessionid",
"value":sess.value, "path":sess["path"],
"httponly":sess["httponly"], "max-age":sess["max-age"],
"expiry":sess["expires"]})
class BasicTestCase(LiveServerTestCase):
fixtures = ['user-data.json', 'basic-data.json']
@classmethod
def setUpClass(cls):
cls.selenium = WebDriver()
cls.selenium.implicitly_wait(30)
super(BasicTestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.selenium.quit()
super(BasicTestCase, cls).tearDownClass()
def setUpBasic(self, prop, base_url, entity):
self.property = prop
self.entity = entity
self.base_url = base_url
self.verificationErrors = []
# Login test user
authenticate("opmtest", "secretpass", self.live_server_url+"/",
self.selenium, self.client)
driver = self.selenium
driver.get(self.live_server_url + "/")
driver.find_element_by_xpath("//li/a[text()='"+self.property+"']").click()
driver.get(self.live_server_url + self.base_url)
driver.find_element_by_link_text(self.entity).click()
super(BasicTestCase, self).setUp()
def tearDown(self):
self.assertEqual([], self.verificationErrors)
# Logout test user
driver = self.selenium
driver.find_element_by_link_text("Logout").click()
super(BasicTestCase, self).tearDown()
def check_highlight_property(self):
try: self.assertTrue(self.is_element_present(By.XPATH, "//a[contains(@style,'yellow') and .//text()='"+self.entity+"']"))
except AssertionError as e: self.verificationErrors.append(str(e))
def check_highlight_entity(self):
driver = self.selenium
try: self.assertRegexpMatches(driver.title, r"^[\s\S]*"+self.entity+"[\s\S]*$")
except AssertionError as e: self.verificationErrors.append(str(e))
def is_element_present(self, how, what):
try: self.selenium.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
class TenantTestCase(BasicTestCase):
def setUp(self):
super(TenantTestCase, self).setUpBasic('Broad Ripple Trails', "/tenants/", "Tenants")
def test_highlight_property(self):
super(TenantTestCase, self).check_highlight_property()
def test_highlight_entity(self):
super(TenantTestCase, self).check_highlight_entity()
def test_filter_pos(self):
driver = self.selenium
driver.find_element_by_id("id_last_name").clear()
driver.find_element_by_id("id_last_name").send_keys("obam")
driver.find_element_by_xpath("//input[@value='Filter']").click()
try: self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='content']/table[@id='id_list_view']/tbody/tr/td/a[contains(text(),'Obama')]"))
except AssertionError as e: self.verificationErrors.append(str(e))
driver.find_element_by_css_selector("input[type=\"button\"]").click()
Select(driver.find_element_by_id("id_unit")).select_by_visible_text("5209 CV")
driver.find_element_by_xpath("//input[@value='Filter']").click()
try: self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='content']/table[@id='id_list_view']/tbody/tr/td/a[contains(text(),'Obama')]"))
except AssertionError as e: self.verificationErrors.append(str(e))
def test_filter_neg(self):
driver = self.selenium
driver.find_element_by_id("id_last_name").clear()
driver.find_element_by_id("id_last_name").send_keys("obamertrte")
driver.find_element_by_xpath("//input[@value='Filter']").click()
try: self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='content']/table[@id='id_list_view']/tbody[count(tr)=0]"))
except AssertionError as e: self.verificationErrors.append(str(e))
driver.find_element_by_css_selector("input[type=\"button\"]").click()
Select(driver.find_element_by_id("id_unit")).select_by_visible_text("5211 CV")
driver.find_element_by_xpath("//input[@value='Filter']").click()
try: self.assertFalse(self.is_element_present(By.XPATH, "//div[@id='content']/table[@id='id_list_view']/tbody/tr/td/a[contains(text(),'Obama')]"))
except AssertionError as e: self.verificationErrors.append(str(e))
def test_add_pos(self):
driver = self.selenium
driver.find_element_by_id("id_add_item").click()
driver.find_element_by_id("id_first_name").clear()
driver.find_element_by_id("id_first_name").send_keys("Barack")
driver.find_element_by_id("id_last_name").clear()
driver.find_element_by_id("id_last_name").send_keys("Obama")
driver.find_element_by_id("id_start_date").clear()
driver.find_element_by_id("id_start_date").send_keys("2012-12-02")
driver.find_element_by_id("id_end_date").clear()
|
driver.find_element_by_id("id_end_date").send_keys("2012-12-31")
Select(driver.find_element_by_id("id_unit")).select_by_visible_text("5209 CV")
driver.find_element_by_id("id_permanent_address1").clear()
driver.find_element_by_id("id_permanent_address1").send_keys("1220 Montg
|
omery St.")
driver.find_element_by_id("id_permanent_address2").clear()
driver.find_element_by_id("id_permanent_address2").send_keys("1995 Shattuck St.")
driver.find_element_by_id("id_permanent_city").clear()
driver.find_element_by_id("i
|
igorgue/zeromqlogs
|
docs/conf.py
|
Python
|
mit
| 7,723
| 0.00764
|
# -*- coding: utf-8 -*-
#
# sample documentation build configuration file, created by
# sphinx-quickstart on Mon Apr 16 21:22:43 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'sample'
copyright = u'2012, Kenneth Reitz'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'v0.0.1'
# The full version, including alpha/beta/rc tags.
release = 'v0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#d
|
efault_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = T
|
rue
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'sampledoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'sample.tex', u'sample Documentation',
u'Kenneth Reitz', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'sample', u'sample Documentation',
[u'Kenneth Reitz'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'sample', u'sample Documentation',
u'Kenneth Reitz', 'sample', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
robotii/notemeister
|
src/lib/Note.py
|
Python
|
gpl-2.0
| 439
| 0.029613
|
#!/usr/bin/env python
import gtk
#import NoteBuffer
import notemeister
class Note:
def __init__(self, path=None, title='', body='', link='', wrap="1"):
self.path = path
self.title = title
self.
|
body = body
self.link = link
self.wrap = wrap
self.buffer = notemeister.NoteBuffer.NoteBuffer()
self.buffer.set_text(self.body)
def
|
__str__(self):
return '(%d) Note "%s" has body: %s' % (self.index, self.title, self.body)
|
orwell-int/agent-server-game-python
|
setup.py
|
Python
|
bsd-3-clause
| 1,239
| 0.000807
|
#!/usr/bin/env python
import setuptools
# Hack to prevent stupid TypeError: 'NoneType' object is not callable error on
# exit of python setup.py test # in multiprocessing/
|
util.py _exit_function when
# running python setup.py test (see
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
assert multiprocessing
except ImportError:
pass
setuptools.s
|
etup(
name='orwell.agent',
version='0.0.1',
description='Agent connecting to the game server.',
author='',
author_email='',
packages=setuptools.find_packages(exclude="test"),
test_suite='nose.collector',
install_requires=['pyzmq', 'cliff'],
tests_require=['nose', 'coverage', 'mock'],
entry_points={
'console_scripts': [
'thought_police = orwell.agent.main:main',
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX :: Linux',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6'],
python_requires='>=3.6.0',
)
|
oilshell/blog-code
|
fd-passing/py_fanos_test.py
|
Python
|
apache-2.0
| 2,191
| 0.01141
|
#!/usr/bin/env python3
"""
py_fanos_test.py: Tests for py_fanos.py
"""
import socket
import sys
import unittest
import py_fanos # module under test
class FanosTest(unittest.TestCase):
def testSendReceive(self):
left, right = socket.socketpair()
py_fanos.send(left, b'foo')
fd_out = []
msg = py_fanos.recv(right, fd_out=fd_out)
self.assertEqual(b'foo', msg)
self.assertEqual([], fd_out)
py_fanos.send(left, b'spam', [sys.stdin.fileno(), sys.stdout.fileno(), sys.stderr.fileno()])
msg = py_fanos.recv(right, fd_out=fd_out)
self.assertEqual(b'spam', msg)
self.assertEqual(3,
|
len(fd_out))
print(fd_out)
left.close()
msg = py_fanos.recv(right)
self.assertEqual(None, msg) # Valid EOF
right.close()
class InvalidMessageTests(unittest.TestCase):
"""COPIED to native/fanos_test.py."""
def testInvalidColon(self):
left, right = socket.socketpair()
left.send(b':') # Should be 3:f
|
oo,
try:
msg = py_fanos.recv(right)
except ValueError as e:
print(type(e))
print(e)
else:
self.fail('Expected failure')
left.close()
right.close()
def testInvalidDigits(self):
left, right = socket.socketpair()
left.send(b'34') # EOF in the middle of length
left.close()
try:
msg = py_fanos.recv(right)
except ValueError as e:
print(type(e))
print(e)
else:
self.fail('Expected failure')
right.close()
def testInvalidMissingColon(self):
left, right = socket.socketpair()
left.send(b'34foo') # missing colon
left.close()
try:
msg = py_fanos.recv(right)
except ValueError as e:
print(type(e))
print(e)
else:
self.fail('Expected failure')
right.close()
def testInvalidMissingComma(self):
left, right = socket.socketpair()
# Short payload BLOCKS indefinitely?
#left.send(b'3:fo')
left.send(b'3:foo') # missing comma
left.close()
try:
msg = py_fanos.recv(right)
except ValueError as e:
print(type(e))
print(e)
else:
self.fail('Expected failure')
right.close()
if __name__ == '__main__':
unittest.main()
|
sniperganso/python-manilaclient
|
manilaclient/v2/share_snapshots.py
|
Python
|
apache-2.0
| 6,363
| 0
|
# Copyright 2012 NetApp
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Interface for shares extension."""
try:
from urllib import urlencode # noqa
except ImportError:
from urllib.parse import urlencode # noqa
from manilaclient import api_versions
from manilaclient import base
from manilaclient.common import constants
from manilaclient.openstack.common.apiclient import base as common_base
class ShareSnapshot(common_base.Resource):
"""Represent a snapshot of a share."""
def __repr__(self):
return "<ShareSnapshot: %s>" % self.id
def update(self, **kwargs):
"""Update this snapshot."""
self.manager.update(self, **kwargs)
def reset_state(self, state):
"""Update the snapshot with the privided state."""
self.manager.reset_state(self, state)
def delete(self):
"""Delete this snapshot."""
self.manager.delete(self)
def force_delete(self):
"""Delete the specified snapshot ignoring its current state."""
self.manager.force_delete(self)
class ShareSnapshotManager(base.ManagerWithFind):
"""Manage :class:`ShareSnapshot` resources."""
resource_class = ShareSnapshot
def create(self, share, force=False, name=None, description=None):
"""Create a snapshot of the given share.
:param share_id: The ID of the share to snapshot.
:param force: If force is True, create a snapshot even if the
share is busy. Default is False.
:param name: Name of the snapshot
:param description: Description of the snapshot
:rtype: :class:`ShareSnapshot`
"""
body = {'snapshot': {'share_id': common_base.getid(share),
'force': forc
|
e,
'name': name,
'description': description}}
return s
|
elf._create('/snapshots', body, 'snapshot')
def get(self, snapshot):
"""Get a snapshot.
:param snapshot: The :class:`ShareSnapshot` instance or string with ID
of snapshot to delete.
:rtype: :class:`ShareSnapshot`
"""
snapshot_id = common_base.getid(snapshot)
return self._get('/snapshots/%s' % snapshot_id, 'snapshot')
def list(self, detailed=True, search_opts=None, sort_key=None,
sort_dir=None):
"""Get a list of snapshots of shares.
:param search_opts: Search options to filter out shares.
:param sort_key: Key to be sorted.
:param sort_dir: Sort direction, should be 'desc' or 'asc'.
:rtype: list of :class:`ShareSnapshot`
"""
if search_opts is None:
search_opts = {}
if sort_key is not None:
if sort_key in constants.SNAPSHOT_SORT_KEY_VALUES:
search_opts['sort_key'] = sort_key
else:
raise ValueError(
'sort_key must be one of the following: %s.'
% ', '.join(constants.SNAPSHOT_SORT_KEY_VALUES))
if sort_dir is not None:
if sort_dir in constants.SORT_DIR_VALUES:
search_opts['sort_dir'] = sort_dir
else:
raise ValueError(
'sort_dir must be one of the following: %s.'
% ', '.join(constants.SORT_DIR_VALUES))
if search_opts:
query_string = urlencode(
sorted([(k, v) for (k, v) in list(search_opts.items()) if v]))
if query_string:
query_string = "?%s" % (query_string,)
else:
query_string = ''
if detailed:
path = "/snapshots/detail%s" % (query_string,)
else:
path = "/snapshots%s" % (query_string,)
return self._list(path, 'snapshots')
def delete(self, snapshot):
"""Delete a snapshot of a share.
:param snapshot: The :class:`ShareSnapshot` to delete.
"""
self._delete("/snapshots/%s" % common_base.getid(snapshot))
def _do_force_delete(self, snapshot, action_name="force_delete"):
"""Delete the specified snapshot ignoring its current state."""
return self._action(action_name, common_base.getid(snapshot))
@api_versions.wraps("1.0", "2.6")
def force_delete(self, snapshot):
return self._do_force_delete(snapshot, "os-force_delete")
@api_versions.wraps("2.7") # noqa
def force_delete(self, snapshot):
return self._do_force_delete(snapshot, "force_delete")
def update(self, snapshot, **kwargs):
"""Update a snapshot.
:param snapshot: The :class:`ShareSnapshot` instance or string with ID
of snapshot to delete.
:rtype: :class:`ShareSnapshot`
"""
if not kwargs:
return
body = {'snapshot': kwargs, }
snapshot_id = common_base.getid(snapshot)
return self._update("/snapshots/%s" % snapshot_id, body)
def _do_reset_state(self, snapshot, state, action_name="reset_status"):
"""Update the specified share snapshot with the provided state."""
return self._action(action_name, snapshot, {"status": state})
@api_versions.wraps("1.0", "2.6")
def reset_state(self, snapshot, state):
return self._do_reset_state(snapshot, state, "os-reset_status")
@api_versions.wraps("2.7") # noqa
def reset_state(self, snapshot, state):
return self._do_reset_state(snapshot, state, "reset_status")
def _action(self, action, snapshot, info=None, **kwargs):
"""Perform a snapshot 'action'."""
body = {action: info}
self.run_hooks('modify_body_for_action', body, **kwargs)
url = '/snapshots/%s/action' % common_base.getid(snapshot)
return self.api.client.post(url, body=body)
|
cmcerove/pyvxl
|
pyvxl/tests/run.py
|
Python
|
mit
| 884
| 0.002262
|
#!/usr/bin/env python
"""Run pytest with coverage and generate an html report."""
from sys import argv
from os import system as run
# To run a spec
|
ific file with debug logging prints:
# py -3 -m pytest test_can.py --log-cli-format="%(asctime)s.%(msecs)d %(levelname)s: %(message)s (%(filename)s:%(lineno)d)" --log-cli-level=debug
def main(): # noqa
run_str = 'python -m coverage run --include={} --omit=./* -m pytest {} {}'
arg = ''
# All source files included in coverage
includes = '../*'
if len(argv) >= 2:
arg = argv[1]
|
if ':' in argv[1]:
includes = argv[1].split('::')[0]
other_args = ' '.join(argv[2:])
run(run_str.format(includes, arg, other_args))
# Generate the html coverage report and ignore errors
run('python -m coverage html -i')
if __name__ == '__main__':
main()
|
iovation/launchkey-python
|
features/steps/directory_device_steps.py
|
Python
|
mit
| 7,152
| 0.00028
|
from uuid import uuid4, UUID
from behave import given, when, then
from formencode import Invalid, validators
@given("I made a Device linking request")
@given("I have made a Device linking request")
@when("I make a Device linking request")
def make_device_linking_request(context):
current_directory = context.entity_manager.get_current_directory()
context.directory_device_manager.create_linking_request(
user_identifier=str(uuid4()),
directory_id=current_directory.id
)
@then("the Device link
|
ing response contains a valid QR Code URL")
def linking_response_contains_
|
valid_qr_code_url(context):
try:
validators.URL().to_python(
context.entity_manager.get_current_linking_response().qrcode
)
except Invalid as e:
raise Exception("Could not parse QR Code as URL: %s" % e)
@then("the Device linking response contains a valid Linking Code")
def linking_response_contains_valid_linking_code(context):
code = context.entity_manager.get_current_linking_response().code
if not code:
raise Exception("Linking code was not valid: %s" % code)
@then("the Device linking response contains a valid Device ID")
def linking_response_contains_valid_linking_code(context):
device_id = context.entity_manager.get_current_linking_response().device_id
try:
if not device_id:
raise ValueError
UUID(device_id)
except ValueError:
raise Exception("Device ID was not valid: %s" % device_id)
@given("I retrieve the Devices list for the current User")
@when("I retrieve the Devices list for the current User")
def retrieve_devices_list_for_current_user(context):
current_directory = context.entity_manager.get_current_directory()
current_user_identifier = context.directory_device_manager.\
current_user_identifier
context.directory_device_manager.retrieve_user_devices(
current_user_identifier, current_directory.id)
@when("I retrieve the Devices list for the user \"{user_identifier}\"")
def retrieve_devices_list_for_current_user(context, user_identifier):
current_directory = context.entity_manager.get_current_directory()
context.directory_device_manager.retrieve_user_devices(
user_identifier,
current_directory.id
)
@then("the Device List has {count:d} Device")
@then("the Device List has {count:d} Devices")
@then("there should be {count:d} Device in the Devices list")
@then("there should be {count:d} Devices in the Devices list")
def verify_device_list_count(context, count):
current_device_list = context.entity_manager.get_current_device_list()
if current_device_list is None or len(current_device_list) != count:
raise Exception("Device list length length is not %s: %s" % (
count, current_device_list))
@then("all of the devices should be inactive")
def verify_device_list_count(context):
current_device_list = context.entity_manager.get_current_device_list()
for device in current_device_list:
if device.status.is_active:
raise Exception("Device was active: %s" % device)
@then("all of the devices should be active")
def verify_device_list_count(context):
current_device_list = context.entity_manager.get_current_device_list()
for device in current_device_list:
if not device.status.is_active:
raise Exception("Device was not active: %s" % device)
@when("I unlink the Device with the ID \"{device_id}\"")
def unlink_device_with_id(context, device_id):
current_directory = context.entity_manager.get_current_directory()
current_user_identifier = context.directory_device_manager. \
current_user_identifier
context.directory_device_manager.unlink_device(
device_id,
current_user_identifier,
current_directory.id
)
@when("I unlink the current Device")
def unlink_current_device(context):
current_directory = context.entity_manager.get_current_directory()
current_user_identifier = context.directory_device_manager. \
current_user_identifier
current_device = context.entity_manager.get_current_device()
context.directory_device_manager.unlink_device(
current_device.id,
current_user_identifier,
current_directory.id
)
@when("I attempt to unlink the device with the ID \"{device_id}\"")
def attempt_to_unlink_device_with_id(context, device_id):
current_directory = context.entity_manager.get_current_directory()
current_user_identifier = context.directory_device_manager. \
current_user_identifier
try:
context.directory_device_manager.unlink_device(
device_id,
current_user_identifier,
current_directory.id
)
except Exception as e:
context.current_exception = e
@when("I attempt to unlink the device from the User Identifier "
"\"{user_identifier}\"")
def attempt_to_unlink_user_identifier_device(context, user_identifier):
current_directory = context.entity_manager.get_current_directory()
try:
context.directory_device_manager.unlink_device(
str(uuid4()),
user_identifier,
current_directory.id
)
except Exception as e:
context.current_exception = e
# Device manager steps
@given("I have a linked device")
def link_device(context):
context.execute_steps(u'''
Given I made a Device linking request
When I link my device
''')
@when("I link my device")
def link_physical_device(context):
sdk_key = context.entity_manager.get_current_directory_sdk_keys()[0]
context.sample_app_device_manager.set_sdk_key(sdk_key)
linking_code = context.entity_manager.get_current_linking_response().code
context.sample_app_device_manager.link_device(linking_code)
# We should now be on the home page if everything succeeded
context.appium_device_manager.get_scrollable_element_by_text("Auth Methods")
@when("I link my physical device with the name \"{device_name}\"")
def link_device_with_name(context, device_name):
sdk_key = context.entity_manager.get_current_directory_sdk_keys()[0]
linking_code = context.entity_manager.get_current_linking_response().code
context.sample_app_device_manager.link_device(linking_code,
device_name=device_name)
@when("I approve the auth request")
def approve_auth_request(context):
context.sample_app_device_manager.approve_request()
@when("I deny the auth request")
def deny_auth_request(context):
context.sample_app_device_manager.deny_request()
@when("I receive the auth request and acknowledge the failure message")
def deny_auth_request(context):
context.sample_app_device_manager.receive_and_acknowledge_auth_failure()
@when("I make a Device linking request with a TTL of {ttl:d} seconds")
def step_impl(context, ttl):
current_directory = context.entity_manager.get_current_directory()
context.directory_device_manager.create_linking_request(
user_identifier=str(uuid4()),
directory_id=current_directory.id,
ttl=ttl
)
|
handbaggerli/DbInstaller
|
Python/DbInstaller.py
|
Python
|
gpl-3.0
| 6,513
| 0.005236
|
# -*- coding: utf-8 -*-
import sys
from argparse import ArgumentParser
from DatabaseLogin import DatabaseLogin
from GlobalInstaller import GlobalInstaller
from PyQt5 import QtWidgets
from Ui_MainWindow import Ui_MainWindow
# import damit Installer funktioniert. auch wenn diese nicht hier benoetigt werden.
from PyQt5 import QtCore, QtGui
import cx_Oracle
import json
import base64
import urllib
from Crypto.Cipher import AES
from chardet import UniversalDetector
def get_parser():
parser = ArgumentParser()
# Parameter, welche die Gui Initalisierung Regeln.
parser.add_argument('--inst_synonym', action='store_true', default=False,
help=r"Setzt Flag für die Installation von Synonymen.")
parser.add_argument('--inst
|
_sequence', action='store_true', default=False,
help=r"Setzt Flag für die Installation von Sequenzen.")
parser.add_argument('--inst_tab_save', action='store_true', default=False,
help=r"Setzt Flag für die Installation von Tab Save Tabellen.")
parser.add_argument('--inst_tab', action='store_false', default=T
|
rue,
help=r"Entfernt Flag für die Installation von Tab Tabellen.")
parser.add_argument('--inst_view', action='store_false', default=True,
help=r"Entfernt Flag für die Installation von Views.")
parser.add_argument('--inst_package', action='store_false', default=True,
help=r"Entfernt Flag für die Installation von Packages.")
parser.add_argument('--inst_sql', action='store_false', default=True,
help=r"Entfernt Flag für die Installation von Sqls.")
# Erweiterte Parameter, welche die Gui Initalisierung Regeln.
parser.add_argument('--username', default=r"", help=r"Benutzername der Datenbank Verbindung.")
parser.add_argument('--password', default=r"", help=r"Passwort der Datenbank Verbindung.")
parser.add_argument('--connection', default=r"", help=r"Connection der Datenbank Verbindung.")
parser.add_argument('--svnBasePath', default=r"", help=r"Schreibt Pfad in SVN Basis Pfad.")
parser.add_argument('--svnKndPath', default=r"", help=r"Schreibt Pfad in SVN Kassen Pfad.")
parser.add_argument('--installationPath', default=r"", help=r"Schreibt Pfad in Installation Pfad.")
parser.add_argument('--global_defines_file', default=r"",
help=r"Pfad zu einem TAB seperierten File wo die Defines vordefiniert sind.")
# jsonl_parameters ueberschreibt alle anderen Parameter.
parser.add_argument('--jsonl_parameters', type=str, default=r'',
help=(r"Übergabe von allen Parameter in einem JSONL Format."
"Dieses Format überschreibt alle Parameter."))
# Parameter welche eine blinde Installation ohne Gui zulassen. Dazu muss showGui Paramter zwingend False sein.
parser.add_argument('--hideGui', action='store_true', default=False, help=r"Startet DB Installer ohne GUI.")
parser.add_argument('--clean_installation_path', action='store_true', default=False,
help=r"Führt Aktion Installationspfad Bereinigen durch. Nur in Kombi-nation von Parameter –-hideGui oder --json_file_path.")
parser.add_argument('--copy_all_data_to_installation', action='store_true', default=False,
help=r"Führt Aktion Dateien ab Pfade Laden durch. Nur in Kombination von Parameter -–hideGui oder --json_file_path.")
parser.add_argument('--install_objects', action='store_true', default=False,
help=r"Führt Aktion Objekte installieren durch. Nur in Kombination von Parameter –-hideGui oder --json_file_path.")
parser.add_argument('--json_file_path', default=r"",
help=(r"Übergabe eines Parameter Files in Jsonl Format."
"Zusammen mit den Argumenten für die Aktionen kann damit eine ganze Kette von "
"Arbeiten mit einem einzigen Aufruf erledigt werden. "
"Arbeiten in einem Jsonl File sind immer ohne Gui "
"und schreiben Debug Informationen auf die Konsole."))
return parser
#
# Main Programm. All starts at this point.
#
if __name__ == "__main__":
parser = get_parser()
args = parser.parse_args()
dbLogin = DatabaseLogin(userName=args.username, passWord=args.password, connection=args.connection)
dbLogin.testConnection(printInfo=False)
globalInstaller = GlobalInstaller(dbLogin=dbLogin, svnBasePath=args.svnBasePath, svnKndPath=args.svnKndPath,
installationPath=args.installationPath, flag_synonym=args.inst_synonym,
flag_sequence=args.inst_sequence, flag_tab_save=args.inst_tab_save,
flag_tab=args.inst_tab, flag_view=args.inst_view, flag_package=args.inst_package,
flag_sql=args.inst_sql, global_defines_file=args.global_defines_file,
jsonl_parameters=args.jsonl_parameters
)
if len(args.json_file_path) > 0:
globalInstaller.workJsonlFile(json_file_path=args.json_file_path,
cleanInstallationPath=args.clean_installation_path,
copy_all_data_to_installation=args.copy_all_data_to_installation,
install_objects=args.install_objects)
elif args.hideGui:
# Calls function without gui.
# used in command line only.
if args.clean_installation_path:
globalInstaller.cleanInstallationPath()
if args.copy_all_data_to_installation:
globalInstaller.readInstallationObjectFromPath()
globalInstaller.copyAllData2InstallationPath()
if args.install_objects:
globalInstaller.installAllObjects2Database()
else:
# Default Obption starts Gui
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
ui.connect_user_isgnals()
ui.set_user_variables(globalInstaller=globalInstaller)
MainWindow.show()
sys.exit(app.exec_())
|
CongLi/avocado-vt
|
virttest/libvirt_vm.py
|
Python
|
gpl-2.0
| 106,547
| 0.000319
|
"""
Utility classes and functions to handle Virtual Machine creation using libvirt.
:copyright: 2011 Red Hat Inc.
"""
import time
import string
import os
import logging
import fcntl
import re
import shutil
import tempfile
import platform
import aexpect
from avocado.utils import process
from avocado.utils import crypto
from avocado.core import exceptions
from . import error_context
from . import utils_misc
from . import virt_vm
from . import storage
from . import remote
from . import virsh
from . import libvirt_xml
from . import data_dir
from . import xml_utils
from . import utils_selinux
def normalize_connect_uri(connect_uri):
"""
Processes connect_uri Cartesian into something virsh can use
:param connect_uri: Cartesian Params setting
:return: Normalized connect_uri
"""
if connect_uri == "default":
result = virsh.canonical_uri()
else:
result = virsh.canonical_uri(uri=connect_uri)
if not result:
raise ValueError("Normalizing connect_uri '%s' failed, is libvirt "
"running?" % connect_uri)
return result
def complete_uri(ip_address, protocol=None, port=None):
"""
Return a complete URI with the combination of ip_address and local uri.
It is useful when you need to connect remote hypervisor.
:param ip_address: an ip address or a hostname
:param protocol: protocol for uri eg: tcp, spice etc.
:param port: port for the protocol
:return: a complete uri
"""
if protocol and port:
complete_uri = "%s://%s:%s" % (protocol, ip_address, port)
else:
# Allow to raise CmdError if canonical_uri is failed
uri = virsh.canonical_uri(ignore_status=False)
driver = uri.split(":")[0]
# The libvirtd daemon's mode(system or session on qemu)
daemon_mode = uri.split("/")[-1]
complete_uri = "%s+ssh://%s/%s" % (driver, ip_address, daemon_mode)
return complete_uri
def get_uri_with_transport(uri_type='qemu', transport="", dest_ip=""):
"""
Return a URI to connect driver on dest with a specified transport.
:param origin_uri: The URI on dest used to connect itself directly.
:param transport: The transport type connect to dest.
:param dest_ip: The ip of destination.
"""
_type2uri_ = {'qemu': "qemu:///system",
'qemu_system': "qemu:///system",
'qemu_session': "qemu:///session",
'lxc': "lxc:///",
'xen': "xen:///",
'esx': "esx:///"}
try:
origin_uri = _type2uri_[uri_type]
except KeyError:
raise ValueError("Param uri_type = %s is not supported." % (uri_type))
# For example:
# ("qemu:///system")-->("qemu", "system")
# ("lxc:///")-->("lxc", "")
origin_uri_elems = origin_uri.split(":///")
transport_uri_driver = origin_uri_elems[0]
transport_uri_dest = origin_uri_elems[-1]
if transport:
transport_uri_driver = ("%s+%s" % (transport_uri_driver, transport))
transport_uri_dest
|
= ("://%s/%s" % (dest_ip, transport_uri
|
_dest))
return ("%s%s" % (transport_uri_driver, transport_uri_dest))
class VM(virt_vm.BaseVM):
"""
This class handles all basic VM operations for libvirt.
"""
def __init__(self, name, params, root_dir, address_cache, state=None):
"""
Initialize the object and set a few attributes.
:param name: The name of the object
:param params: A dict containing VM params
(see method make_create_command for a full description)
:param root_dir: Base directory for relative filenames
:param address_cache: A dict that maps MAC addresses to IP addresses
:param state: If provided, use this as self.__dict__
"""
if state:
self.__dict__ = state
else:
self.process = None
self.serial_ports = []
self.serial_console_log = None
self.serial_console = None
self.redirs = {}
self.vnc_port = None
self.vnc_autoport = True
self.pci_assignable = None
self.netdev_id = []
self.device_id = []
self.pci_devices = []
self.uuid = None
self.remote_sessions = []
self.spice_port = 8000
self.name = name
self.params = params
self.root_dir = root_dir
self.address_cache = address_cache
self.vnclisten = "0.0.0.0"
self.connect_uri = normalize_connect_uri(params.get("connect_uri",
"default"))
self.driver_type = virsh.driver(uri=self.connect_uri)
self.params['driver_type_' + self.name] = self.driver_type
# virtnet init depends on vm_type/driver_type being set w/in params
super(VM, self).__init__(name, params)
logging.info("Libvirt VM '%s', driver '%s', uri '%s'",
self.name, self.driver_type, self.connect_uri)
def is_lxc(self):
"""
Return True if VM is linux container.
"""
return (self.connect_uri and self.connect_uri.count("lxc"))
def is_qemu(self):
"""
Return True if VM is a qemu guest.
"""
return (self.connect_uri and self.connect_uri.count("qemu"))
def is_xen(self):
"""
Return True if VM is a xen guest.
"""
return (self.connect_uri and self.connect_uri.count("xen"))
def is_esx(self):
"""
Return True if VM is a esx guest.
"""
return (self.connect_uri and self.connect_uri.count("esx"))
def verify_alive(self):
"""
Make sure the VM is alive.
:raise VMDeadError: If the VM is dead
"""
if not self.is_alive():
raise virt_vm.VMDeadError("Domain %s is inactive" % self.name,
self.state())
def is_alive(self):
"""
Return True if VM is alive.
"""
return virsh.is_alive(self.name, uri=self.connect_uri)
def is_dead(self):
"""
Return True if VM is dead.
"""
return virsh.is_dead(self.name, uri=self.connect_uri)
def is_paused(self):
"""
Return True if VM is paused.
"""
return (self.state() == "paused")
def is_persistent(self):
"""
Return True if VM is persistent.
"""
try:
dominfo = (virsh.dominfo(self.name,
uri=self.connect_uri).stdout.strip())
return bool(re.search(r"^Persistent:\s+[Yy]es", dominfo,
re.MULTILINE))
except process.CmdError:
return False
def is_autostart(self):
"""
Return True if VM is autostart.
"""
try:
dominfo = (virsh.dominfo(self.name,
uri=self.connect_uri).stdout.strip())
return bool(re.search(r"^Autostart:\s+enable", dominfo,
re.MULTILINE))
except process.CmdError:
return False
def exists(self):
"""
Return True if VM exists.
"""
return virsh.domain_exists(self.name, uri=self.connect_uri)
def undefine(self, options=None):
"""
Undefine the VM.
"""
try:
virsh.undefine(self.name, options=options, uri=self.connect_uri,
ignore_status=False)
except process.CmdError, detail:
logging.error("Undefined VM %s failed:\n%s", self.name, detail)
return False
return True
def define(self, xml_file):
"""
Define the VM.
"""
if not os.path.exists(xml_file):
logging.error("File %s not found." % xml_file)
return False
try:
virsh.define(xml_file, uri=self.connect_uri,
ignore_status=False)
except process.CmdError, detail:
logging.error("Defined VM from %s failed:\n%s", xml_file, de
|
xHeliotrope/injustice_dropper
|
env/lib/python3.4/site-packages/django_twilio/settings.py
|
Python
|
mit
| 242
| 0
|
#
|
-*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
"""
django_twilio specific settings.
"""
from .utils import discover_twilio_credentials
TWILIO_ACCOUNT_SID, TWILIO
|
_AUTH_TOKEN = discover_twilio_credentials()
|
esc/dask
|
dask/array/tests/test_ghost.py
|
Python
|
bsd-3-clause
| 9,623
| 0.009145
|
import pytest
pytest.importorskip('numpy')
import numpy as np
from numpy.testing import assert_array_almost_equal, assert_array_equal
import dask
import dask.array as da
from dask.array.ghost import (Array, fractional_slice, getitem, trim_internal,
ghost_internal, nearest, constant, boundaries,
reflect, periodic, ghost)
from dask.core import get
def eq(a, b):
if isinstance(a, Array):
a = a.compute(get=dask.get)
if isinstance(b, Array):
b = b.compute(get=dask.get)
c = a == b
if isinstance(c, np.ndarray):
c = c.all()
return c
def test_fractional_slice():
assert fractional_slice(('x', 4.9), {0: 2}) == \
(getitem, ('x', 5), (slice(0, 2),))
assert fractional_slice(('x', 3, 5.1), {0: 2, 1: 3}) == \
(getitem, ('x', 3, 5), (slice(None, None, None), slice(-3, None)))
assert fractional_slice(('x', 2.9, 5.1), {0: 2, 1: 3}) == \
(getitem, ('x', 3, 5), (slice(0, 2), slice(-3, None)))
def test_ghost_internal():
x = np.arange(64).reshape((8, 8))
d = da.from_array(x, chunks=(4, 4))
g = ghost_internal(d, {0: 2, 1: 1})
result = g.compute(get=get)
assert g.chunks == ((6, 6), (5, 5))
expected = np.array([
[ 0, 1, 2, 3, 4, 3, 4, 5, 6, 7],
[ 8, 9, 10, 11, 12, 11, 12, 13, 14, 15],
[16, 17, 18, 19, 20, 19, 20, 21, 22, 23],
[24, 25, 26, 27, 28, 27, 28, 29, 30, 31],
[32, 33, 34, 35, 36, 35, 36, 37, 38, 39],
[40, 41, 42, 43, 44, 43, 44, 45, 46, 47],
[16, 17, 18, 19, 20, 19, 20, 21, 22, 23],
[24, 25, 26, 27, 28, 27, 28, 29, 30, 31],
[32, 33, 34, 35, 36, 35, 36, 37, 38, 39],
[40, 41, 42, 43, 44, 43, 44, 45, 46, 47],
[48, 49, 50, 51, 52, 51, 52, 53, 54, 55],
[56, 57, 58, 59, 60, 59, 60, 61, 62, 63]])
assert eq(result, expected)
def test_trim_internal():
d = da.ones((40, 60), chunks=(10, 10))
e = trim_internal(d, axes={0: 1, 1: 2})
assert e.chunks == ((8, 8, 8, 8), (6, 6, 6, 6, 6, 6))
def test_periodic():
x = np.arange(64).reshape((8, 8))
d = da.from_array(x, chunks=(4, 4))
e = periodic(d, axis=0, depth=2)
assert e.shape[0] == d.shape[0] + 4
assert e.shape[1] == d.shape[1]
assert eq(e[1, :], d[-1, :])
assert eq(e[0, :], d[-2, :])
def test_reflect():
x = np.arange(10)
d = da.from_array(x, chunks=(5, 5))
e = reflect(d, axis=0, depth=2)
expected = np.array([1, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 8])
assert eq(e, expected)
e = reflect(d, axis=0, depth=1)
expected = np.array([0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9])
assert eq(e, expected)
def test_nearest():
x = np.arange(10)
d = da.from_array(x, chunks=(5, 5))
e = nearest(d, axis=0, depth=2)
expected = np.array([0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 9])
assert eq(e, expected)
e = nearest(d, axis=0, depth=1)
expected = np.array([0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9])
assert eq(e, expected)
def test_constant():
x = np.arange(64).reshape((8, 8))
d = da.from_array(x, chunks=(4, 4))
e = constant(d, axis=0, depth=2, value=10)
assert e.shape[0] == d.shape[0] + 4
assert e.shape[1] == d.shape[1]
assert eq(e[1, :], 10)
assert eq(e[-1, :], 10)
def test_boundaries():
x = np.arange(64).reshape((8, 8))
d = da.from_array(x, chunks=(4, 4))
e = boundaries(d, {0: 2, 1: 1}, {0: 0, 1: 'periodic'})
expected = np.array(
[[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 7, 0, 1, 2, 3, 4, 5, 6, 7, 0],
[15, 8, 9,10,11,12,13,14,15, 8],
[23,16,17,18,19,20,21,22,23,16],
[31,24,25,26,27,28,29,30,31,24],
[39,32,33,34,35,36,37,38,39,32],
[47,40,41,42,43,44,45,46,47,40],
[55,48,49,50,51,52,53,54,55,48],
[63,56,57,58,59,60,61,62,63,56],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
assert eq(e, expected)
def test_ghost():
x = np.arange(64).reshape((8, 8))
d = da.from_array(x, chunks=(4, 4))
g = ghost(d, depth={0: 2, 1: 1}, boundary={0: 100, 1: 'reflect'})
assert g.chunks == ((8, 8), (6, 6))
expected = np.array(
[[100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100],
[100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100],
[ 0, 0, 1, 2, 3, 4, 3, 4, 5, 6, 7, 7],
[ 8, 8, 9, 10, 11, 12, 11, 12, 13, 14, 15, 15],
[ 16, 16, 17, 18, 19, 20, 19, 20, 21, 22, 23, 23],
[ 24, 24, 25, 26, 27, 28, 27, 28, 29, 30, 31, 31],
[ 32, 32, 33, 34, 35, 36, 35, 36, 37, 38, 39, 39],
[ 40, 40, 41, 42, 43, 44, 43, 44, 45, 46, 47, 47],
[ 16, 16, 17, 18, 19, 20, 19, 20, 21, 22, 23, 23],
[ 24, 24, 25, 26, 27, 28, 27, 28, 29, 30, 31, 31],
[ 32, 32, 33, 34, 35, 36, 35, 36, 37, 38, 39, 39],
[ 40, 40, 41, 42, 43, 44, 43, 44, 45, 46, 47, 47],
[ 48, 48, 49, 50, 51, 52, 51, 52, 53, 54, 55, 55],
[ 56, 56, 57, 58, 59, 60, 59, 60, 61, 62, 63, 63],
[100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100],
[100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100]])
assert eq(g, expected)
g = ghost(d, depth={0: 2, 1: 1}, boundary={0: 100})
assert g.chunks == ((8, 8), (5, 5))
def test_map_overlap():
x = da.arange(10, chunks=5)
y = x.map_overlap(lambda x: x + len(x), depth=2)
assert eq(y, np.arange(10) + 5 + 2 + 2)
def test_nearest_ghost():
a = np.arange(144).reshape(12, 12).astype(float)
darr = da.from_array(a, chunks=(6, 6))
garr = ghost(darr, depth={0: 5, 1: 5},
boundary={0: 'nearest', 1: 'nearest'})
tarr = trim_internal(g
|
arr, {0: 5, 1: 5})
assert_array_almost_equal(tarr, a)
def test_0_depth():
expected = np.arange(100).reshape(10, 10)
darr = da.from_array(expected, chunks=(5, 2))
depth = {0: 0, 1: 0}
reflected = ghost(darr, depth=depth, bound
|
ary='reflect')
nearest = ghost(darr, depth=depth, boundary='nearest')
periodic = ghost(darr, depth=depth, boundary='periodic')
constant = ghost(darr, depth=depth, boundary=42)
result = trim_internal(reflected, depth)
assert_array_equal(result, expected)
result = trim_internal(nearest, depth)
assert_array_equal(result, expected)
result = trim_internal(periodic, depth)
assert_array_equal(result, expected)
result = trim_internal(constant, depth)
assert_array_equal(result, expected)
def test_some_0_depth():
expected = np.arange(100).reshape(10, 10)
darr = da.from_array(expected, chunks=(5, 5))
depth = {0: 4, 1: 0}
reflected = ghost(darr, depth=depth, boundary='reflect')
nearest = ghost(darr, depth=depth, boundary='nearest')
periodic = ghost(darr, depth=depth, boundary='periodic')
constant = ghost(darr, depth=depth, boundary=42)
result = trim_internal(reflected, depth)
assert_array_equal(result, expected)
result = trim_internal(nearest, depth)
assert_array_equal(result, expected)
result = trim_internal(periodic, depth)
assert_array_equal(result, expected)
result = trim_internal(constant, depth)
assert_array_equal(result, expected)
def test_one_chunk_along_axis():
a = np.arange(2 * 9).reshape(2, 9)
darr = da.from_array(a, chunks=((2,), (2, 2, 2, 3)))
g = ghost(darr, depth=0, boundary=0)
assert a.shape == g.shape
def test_constant_boundaries():
a = np.arange(1 * 9).reshape(1, 9)
darr = da.from_array(a, chunks=((1,), (2, 2, 2, 3)))
b = boundaries(darr, {0: 0, 1: 0}, {0: 0, 1: 0})
assert b.chunks == darr.chunks
def test_depth_equals_boundary_length():
expected = np.arange(100).reshape(10, 10)
darr = da.from_array(expected, chunks=(5, 5))
depth = {0: 5, 1: 5}
reflected = ghost(darr, depth=depth, boundary='reflect')
nearest = ghost(darr, depth=depth, boundary='nearest')
pe
|
shail2810/nova
|
nova/tests/unit/virt/vmwareapi/test_vmops.py
|
Python
|
apache-2.0
| 114,813
| 0.000575
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import mock
from oslo_serialization import jsonutils
from oslo_utils import units
from oslo_utils import uuidutils
from oslo_vmware import exceptions as vexc
from oslo_vmware.objects import datastore as ds_obj
from oslo_vmware import vim_util as vutil
import six
from nova.compute import power_state
from nova import context
from nova import exception
from nova.network import model as network_model
from nova import objects
from nova import test
from nova.tests.unit import fake_flavor
from nova.tests.unit import fake_instance
import nova.tests.unit.image.fake
from nova.tests.unit.virt.vmwareapi import fake as vmwareapi_fake
from nova.tests.unit.virt.vmwareapi import stubs
from nova import utils
from nova import version
from nova.virt import hardware
from nova.virt.vmwareapi import constants
from nova.virt.vmwareapi import driver
from nova.virt.vmwareapi import ds_util
from nova.virt.vmwareapi import images
from nova.virt.vmwareapi import vif
from nova.virt.vmwareapi import vim_util
from nova.virt.vmwareapi import vm_util
from nova.virt.vmwareapi import vmops
class DsPathMatcher(object):
def __init__(self, expected_ds_path_str):
self.expected_ds_path_str = expected_ds_path_str
def __eq__(self, ds_path_param):
return str(ds_path_param) == self.expected_ds_path_str
class VMwareVMOpsTestCase(test.NoDBTestCase):
def setUp(self):
super(VMwareVMOpsTestCase, self).setUp()
vmwareapi_fake.reset()
stubs.set_stubs(self.stubs)
self.flags(enabled=True, group='vnc')
self.flags(image_cache_subdirectory_name='vmware_base',
my_ip='',
flat_injected=True)
self._context = context.RequestContext('fake_user', 'fake_project')
self._session = driver.VMwareAPISession()
self._virtapi = mock.Mock()
self._image_id = nova.tests.unit.image.fake.get_valid_image_id()
fake_ds_ref = vmwareapi_fake.ManagedObjectReference('fake-ds')
self._ds = ds_obj.Datastore(
ref=fake_ds_ref, name='fake_ds',
capacity=10 * units.Gi,
freespace=10 * units.Gi)
self._dc_info = vmops.DcInfo(
ref='fake_dc_ref', name='fake_dc',
vmFolder='fake_vm_folder')
cluster = vmwareapi_fake.create_cluster('fake_cluster', fake_ds_ref)
self._instance_values = {
'display_name': 'fake_display_name',
'name': 'fake_name',
'uuid': 'fake_uuid',
'vcpus': 1,
'memory_mb': 512,
'image_ref': self._image_id,
'root_gb': 10,
'node': '%s(%s)' % (cluster.mo_id, cluster.name),
'expected_attrs': ['system_metadata'],
}
self._instance = fake_instance.fake_instance_obj(
self._context, **self._instance_values)
self._flavor = objects.Flavor(name='m1.small', memory_mb=512, vcpus=1,
root_gb=10, ephemeral_gb=0, swap=0,
extra_specs={})
self._instance.flavor = self._flavor
self._vmops = vmops.VMwareVMOps(self._session, self._virtapi, None,
cluster=cluster.obj)
self._cluster = cluster
self._image_meta = objects.ImageMeta.from_dict({})
subnet_4 = network_model.Subnet(cidr='192.168.0.1/24',
dns=[network_model.IP('192.168.0.1')],
gateway=
network_model.IP('192.168.0.1'),
ips=[
network_model.IP('192.168.0.100')],
routes=None)
subnet_6 = network_model.Subnet(cidr='dead:beef::1/64',
dns=None,
gateway=
network_model.IP('dead:beef::1'),
ips=[network_model.IP(
'dead:beef::dcad:beff:feef:0')],
routes=None)
network = network_model.Network(id=0,
bridge='fa0',
label='fake',
subnets=[subnet_4, subnet_6],
vlan=None,
|
bridge_interface=None,
injected=True)
s
|
elf._network_values = {
'id': None,
'address': 'DE:AD:BE:EF:00:00',
'network': network,
'type': None,
'devname': None,
'ovs_interfaceid': None,
'rxtx_cap': 3
}
self.network_info = network_model.NetworkInfo([
network_model.VIF(**self._network_values)
])
pure_IPv6_network = network_model.Network(id=0,
bridge='fa0',
label='fake',
subnets=[subnet_6],
vlan=None,
bridge_interface=None,
injected=True)
self.pure_IPv6_network_info = network_model.NetworkInfo([
network_model.VIF(id=None,
address='DE:AD:BE:EF:00:00',
network=pure_IPv6_network,
type=None,
devname=None,
ovs_interfaceid=None,
rxtx_cap=3)
])
self._metadata = (
"name:fake_display_name\n"
"userid:fake_user\n"
"username:None\n"
"projectid:fake_project\n"
"projectname:None\n"
"flavor:name:m1.micro\n"
"flavor:memory_mb:6\n"
"flavor:vcpus:28\n"
"flavor:ephemeral_gb:8128\n"
"flavor:root_gb:496\n"
"flavor:swap:33550336\n"
"imageid:70a599e0-31e7-49b7-b260-868f441e862b\n"
"package:%s\n" % version.version_string_with_package())
def test_get_machine_id_str(self):
result = vmops.VMwareVMOps._get_machine_id_str(self.network_info)
self.assertEqual('DE:AD:BE:EF:00:00;192.168.0.100;255.255.255.0;'
'192.168.0.1;192.168.0.255;192.168.0.1#', result)
result = vmops.VMwareVMOps._get_machine_id_str(
self.pure_IPv6_network_info)
self.assertEqual('DE:AD:BE:EF:00:00;;;;;#', result)
def _setup_create_folder_mocks(self):
ops = vmops.VMwareVMOps(mock.Mock(), mock.Mock(), mock.Mock())
base_name = 'folder'
ds_name = "datastore"
ds_ref = mock.Mock()
ds_ref.value = 1
dc_ref = mock.Mock()
ops._datastore_dc_mapping[ds_ref.value] = vmops.DcInfo(
ref=dc_ref,
name='fake-name',
vmFolder='fake-folder')
path = ds_obj.DatastorePath(ds_name, base_name)
return ds_name, ds_ref, ops, path, dc_ref
@mock.patch.object(ds_util, 'mkdir')
def test_create_folder_if_missing(self, mock_mkdir):
ds_name, ds_ref, ops, path, dc = sel
|
ESS-LLP/erpnext
|
erpnext/healthcare/doctype/medication/medication.py
|
Python
|
gpl-3.0
| 2,862
| 0.025507
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe import _
from frappe.model.document import Document
from frappe.model.rename_doc import rename_doc
class Medication(Document):
def validate(self):
self.enable_disable_item()
def after_insert(self):
create_item_from_medication(self)
def on_update
|
(self):
if self.change_in_item:
self.update_item_and_item_price()
def enable_disable_item(self):
if self.is_billable:
if self.disabled:
frappe.db.set_value('Item', self.item, 'disabled', 1)
else:
frappe.db.set_value('Item', self.item, 'disabled', 0)
def update_item_and_item_price(self):
if
|
self.is_billable and self.item:
item_doc = frappe.get_doc('Item', {'item_code': self.item})
item_doc.item_name = self.medication_name
item_doc.item_group = self.item_group
item_doc.description = self.description
item_doc.stock_uom = self.stock_uom
item_doc.disabled = 0
item_doc.save(ignore_permissions=True)
if self.rate:
item_price = frappe.get_doc('Item Price', {'item_code': self.item})
item_price.item_name = self.medication_name
item_price.price_list_rate = self.rate
item_price.save()
elif not self.is_billable and self.item:
frappe.db.set_value('Item', self.item, 'disabled', 1)
self.db_set('change_in_item', 0)
def create_item_from_medication(doc):
disabled = doc.disabled
if doc.is_billable and not doc.disabled:
disabled = 0
uom = doc.stock_uom or frappe.db.get_single_value('Stock Settings', 'stock_uom')
item = frappe.get_doc({
'doctype': 'Item',
'item_code': doc.medication_name,
'item_name':doc.medication_name,
'item_group': doc.item_group,
'description':doc.description,
'is_sales_item': 1,
'is_service_item': 1,
'is_purchase_item': 0,
'is_stock_item': 0,
'show_in_website': 0,
'is_pro_applicable': 0,
'disabled': disabled,
'stock_uom': uom
}).insert(ignore_permissions=True, ignore_mandatory=True)
make_item_price(item.name, doc.rate)
doc.db_set('item', item.name)
def make_item_price(item, item_price):
price_list_name = frappe.db.get_value('Price List', {'selling': 1})
frappe.get_doc({
'doctype': 'Item Price',
'price_list': price_list_name,
'item_code': item,
'price_list_rate': item_price
}).insert(ignore_permissions=True, ignore_mandatory=True)
@frappe.whitelist()
def change_item_code_from_medication(item_code, doc):
doc = frappe._dict(json.loads(doc))
if frappe.db.exists('Item', {'item_code': item_code}):
frappe.throw(_('Item with Item Code {0} already exists').format(item_code))
else:
rename_doc('Item', doc.item_code, item_code, ignore_permissions=True)
frappe.db.set_value('Medication', doc.name, 'item_code', item_code)
return
|
thethomaseffect/travers-media-tools
|
traversme/encoder/media_object.py
|
Python
|
mit
| 2,000
| 0
|
""" TODO: Add docstring """
import re
import pexpect
class MediaObject(object):
"""Represents an encodable object"""
def __init__(self, input_filename, output_filename):
self.input_filename = input_filename
self.output_filename = output_filename
self.media_duration = self.get_media_duration()
# INFO: All other media information could potentiall
|
y be put here too
def get_media_duration(self):
"""
Spawns an avprobe process to get the media duration.
Spawns an avprobe process and saves the output to a list, then uses
regex to find the duration of the media and return it as an integer.
"""
info_process = pexpect.spawn("/usr/bin/avprobe " + self.input_filename)
subprocess_output = info_process.readlines()
info_process.close
# Non-greedy match on characters 'Dura
|
tion: ' followed by
# number in form 00:00:00:00
regex_group = re.compile(".*?Duration: .*?(\\d+):(\\d+):(\\d+).(\\d+)",
re.IGNORECASE | re.DOTALL)
# Exits as soon as duration is found
# PERF: Perform some tests to find the min number of lines
# certain not to contain the duration, then operate on a slice
# not containing those lines
for line in subprocess_output:
regex_match = regex_group.search(line)
if regex_match:
# Return the total duration in seconds
return ((int(regex_match.group(1)) * 3600) + # Hours
(int(regex_match.group(2)) * 60) + # Minutes
int(regex_match.group(3)) + # Seconds
# Round milliseconds to nearest second
1 if int(regex_match.group(3)) > 50 else 0)
# Not found so it's possible the process terminated early or an update
# broke the regex. Unlikely but we must return something just in case.
return -1
|
shuang1330/tf-faster-rcnn
|
lib/model/test.py
|
Python
|
mit
| 8,856
| 0.015583
|
# --------------------------------------------------------
# Tensorflow Faster R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Xinlei Chen
# --------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
#import cv2
from scipy.misc import imresize
from scipy.misc import imread
import numpy as np
try:
import cPickle as pickle
except ImportError:
import pickle
import os
import math
import tensorflow as tf
from utils.timer import Timer
from utils.cython_nms import nms, nms_new
from utils.boxes_grid import get_boxes_grid
from utils.blob import im_list_to_blob
from model.config import cfg, get_output_dir
from model.bbox_transform import clip_boxes, bbox_transform_inv
def _get_image_blob(im):
"""Converts an image into a network input.
Arguments:
im (ndarray): a color image in BGR order
Returns:
blob (ndarray): a data blob holding an image pyramid
im_scale_factors (list): list of image scales (relative to im) used
in the image pyramid
"""
im_orig = im.astype(np.float32, copy=True)
im_orig -= cfg.PIXEL_MEANS
im_shape = im_orig.shape
im_size_min = np.min(im_shape[0:2])
im_size_max = np.max(im_shape[0:2])
processed_ims = []
im_scale_factors = []
for target_size in cfg.TEST.SCALES:
im_scale = float(target_size) / float(im_size_min)
# Prevent the biggest axis from being more than MAX_SIZE
if np.round(im_scale * im_size_max) > cfg.TEST.MAX_SIZE:
im_scale = float(cfg.TEST.MAX_SIZE) / float(im_size_max)
im_row,im_col,_ = im.shape
im = imresize(im_orig, (int(im_row*im_scale), int(im_col*im_scale)))
im_scale_factors.append(im_scale)
processed_ims.append(im)
# Create a blob to hold the input images
blob = im_list_to_blob(processed_ims)
return blob, np.array(im_scale_factors)
def _get_blobs(im):
"""Convert an image and RoIs within that image into network inputs."""
blobs = {}
blobs['data'], im_scale_factors = _get_image_blob(im)
return blobs, im_scale_factors
def _clip_boxes(boxes, im_shape):
"""Clip boxes to image boundaries."""
# x1 >= 0
boxes[:, 0::4] = np.maximum(boxes[:, 0::4], 0)
# y1 >= 0
boxes[:, 1::4] = np.maximum(boxes[:, 1::4], 0)
# x2 < im_shape[1]
boxes[:, 2::4] = np.minimum(boxes[:, 2::4], im_shape[1] - 1)
# y2 < im_shape[0]
boxes[:, 3::4] = np.minimum(boxes[:, 3::4], im_shape[0] - 1)
return boxes
def _rescale_boxes(boxes, inds, scales):
"""Rescale boxes according to image rescaling."""
for i in range(boxes.shape[0]):
boxes[i,:] = boxes[i,:] / scales[int(inds[i])]
return boxes
def im_detect(sess, net, im):
blobs, im_scales = _get_blobs(im)
assert len(im_scales) == 1, "Only single-image batch implemented"
im_blob = blobs['data']
# seems to have height, width, and image scales
# still not sure about the scale, maybe full image it is 1.
blobs['im_info'] = \
np.array([[im_blob.shape[1], im_blob.shape[2], im_scales[0]]], dtype=np.float32)
_, scores, bbox_pred, rois = \
net.test_image(sess, blobs['data'], blobs['im_info'])
boxes = rois[:, 1:5] / im_scales[0]
# print(scores.shape, bbox_pred.shape, rois.shape, boxes.shape)
scores = np.reshape(scores, [scores.shape[0], -1])
bbox_pred = np.reshape(bbox_pred, [bbox_pred.shape[0], -1])
if cfg.TEST.BBOX_REG:
# Apply bounding-box regression deltas
box_deltas = bbox_pred
pred_boxes = bbox_transform_inv(boxes, box_deltas)
pred_boxes = _clip_boxes(pred_boxes, im.shape)
else:
# Simply repeat the boxes, once for each class
pred_boxes = np.tile(boxes, (1, scores.shape[1]))
return scores, pred_boxes
def apply_nms(all_boxes, thresh):
"""Apply non-maximum suppression to all predicted boxes output by the
test_net method.
"""
num_classes = len(all_boxes)
num_images = len(all_boxes[0])
nms_boxes = [[[] for _ in range(num_images)] for _ in range(num_classes)]
for cls_ind in range(num_classes):
for im_ind in range(num_images):
dets = all_boxes[cls_ind][im_ind]
if dets == []:
continue
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
scores = dets[:, 4]
i
|
nds = np.where((x2 > x1) & (y2 > y1) & (scores > cfg.TEST.DET_THRESHOLD))[0]
dets = dets[inds,:]
if dets == []:
continue
keep = nms(dets, thresh)
if len(keep) == 0:
continue
nms_boxes[cls_ind][im_ind] = dets[keep, :]
|
.copy()
return nms_boxes
def test_net(sess, net, imdb, weights_filename, experiment_setup=None,
max_per_image=100, thresh=0.05):
np.random.seed(cfg.RNG_SEED)
"""Test a Fast R-CNN network on an image database."""
num_images = len(imdb.image_index)
# num_images = 2
# all detections are collected into:
# all_boxes[cls][image] = N x 5 array of detections in
# (x1, y1, x2, y2, score)
all_boxes = [[[] for _ in range(num_images)]
for _ in range(imdb.num_classes)]
output_dir = get_output_dir(imdb, weights_filename)
print('using output_dir: ', output_dir)
# timers
_t = {'im_detect' : Timer(), 'misc' : Timer()}
# define a writer to write the histogram of summaries
# test_tbdir = '/home/shuang/projects/tf-faster-rcnn/tensorboard/'
# if not os.path.exists(test_tbdir):
# print('making directory for test tensorboard result')
# os.mkdir(test_tbdir)
# writer = tf.summary.FileWriter(test_tbdir,sess.graph)
# define a folder for activation results
test_actdir = '../activations_retrained'
if not os.path.exists(test_actdir):
os.mkdir(test_actdir)
# define a folder for zero fractions
test_zerodir = './zero_fractions'
if not os.path.exists(test_zerodir):
os.mkdir(test_zerodir)
for i in range(num_images):
im = imread(imdb.image_path_at(i))
_t['im_detect'].tic()
scores, boxes = im_detect(sess, net, im)
_t['im_detect'].toc()
# write act summaries to tensorboard
# writer.add_summary(act_summaries)
# record the zero fraction -> only for vgg16
# zero_frac = []
# for layer_ind in range(13):
# batch_num,row,col,filter_num = acts[layer_ind].shape
# zero_frac.append([])
# for j in range(filter_num):
# # print(acts[0][:,:,:,i].shape)
# fraction = 1-np.count_nonzero(acts[layer_ind][:,:,:,j])/(batch_num*row*col)
# zero_frac[layer_ind].append(fraction)
_t['misc'].tic()
# skip j = 0, because it's the background class
chosen_classes = []
for j in range(1, imdb.num_classes):
# for j, clas in enumerate(imdb._classes[1:]):
inds = np.where(scores[:, j] > thresh)[0]
cls_scores = scores[inds, j]
cls_boxes = boxes[inds, j*4:(j+1)*4]
cls_dets = np.hstack((cls_boxes, cls_scores[:, np.newaxis])) \
.astype(np.float32, copy=False)
keep = nms(cls_dets, cfg.TEST.NMS)
cls_dets = cls_dets[keep, :]
all_boxes[j][i] = cls_dets
# if len(cls_dets)!=0: # only for recording activations_res
# chosen_classes.append(imdb._classes[j])
# Limit to max_per_image detections *over all classes*
if max_per_image > 0:
image_scores = np.hstack([all_boxes[j][i][:, -1]
for j in range(1, imdb.num_classes)])
if len(image_scores) > max_per_image:
image_thresh = np.sort(image_scores)[-max_per_image]
for j in range(1, imdb.num_classes):
keep = np.where(all_boxes[j][i][:, -1] >= image_thresh)[0]
all_boxes[j][i] = all_boxes[j][i][keep, :]
_t['misc'].toc()
# write acts to a seperate text file for each seprate image file -> only vgg
# f_name = '{}/{}.txt'.format(test_actdir,i)
# act_file = open(f_name,'w')
# act_file.write('\n'.join(chosen_classes))
# act_file.write('\n')
# sum_act = []
# for arr in acts:
# temp = np.sum(arr,axis = (0,1,2))
# sum_act.append(temp)
# for item in sum_act:
# act_file.write('{}\n'.format(str(item)))
# act_file.close()
# chosen_classes = []
# write zero fractions to text files -> only vgg
# file_name = '{}/{}.txt'.format(test_zerodir,i)
# zero_file =
|
e-gob/plataforma-kioscos-autoatencion
|
scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/module_utils/service.py
|
Python
|
bsd-3-clause
| 7,923
| 0.002398
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c) Ansible Inc, 2016
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import glob
import os
import pickle
import platform
import select
import shlex
import subprocess
import traceback
from ansible.module_utils.six import PY2, b
from ansible.module_utils._text import to_bytes, to_text
def sysv_is_enabled(name):
'''
This function will check if the service name supplied
is enabled in any of the sysv runlevels
:arg name: name of the service to test for
'''
return bool(glob.glob('/etc/rc?.d/S??%s' % name))
def get_sysv_script(name):
'''
This function will return the expected path for an init script
corresponding to the service name supplied.
:arg name: name or path of the service to test for
'''
if name.startswith('/'):
result = name
else:
result = '/etc/init.d/%s' % name
return result
def sysv_exists(name):
'''
This function will return True or False depending on
the existence of an init script corresponding to the service name supplied.
:arg name: name of the service to test for
'''
return os.path.exists(get_sysv_script(name))
def fail_if_missing(module, found, service, msg=''):
'''
This function will return an error or exit gracefully depending on check mode status
and if the service is missing or not.
:arg module: is an AnsibleModule object, used for it's utility methods
:arg found: boolean indicating if services was found or not
:arg service: name of service
:kw msg: extra info to append to error/success msg when missing
'''
if not found:
if module.check_mode:
module.exit_json(msg="Service %s not found on %s, assuming it will exist on full run" % (service, msg), changed=True)
else:
module.fail_json(msg='Could not find the requested service %s: %s' % (service, msg))
def daemonize(module, cmd):
'''
Execute a command while detaching as a daemon, returns rc, stdout, and stderr.
:arg module: is an AnsibleModule object, used for it's utility methods
:arg cmd: is a list or string repres
|
enting the command and options to run
This is complex because daemonization is hard for people.
What we do is daemonize a part of this module, the daemon runs the c
|
ommand,
picks up the return code and output, and returns it to the main process.
'''
# init some vars
chunk = 4096 # FIXME: pass in as arg?
errors = 'surrogate_or_strict'
# start it!
try:
pipe = os.pipe()
pid = os.fork()
except OSError:
module.fail_json(msg="Error while attempting to fork: %s", exception=traceback.format_exc())
# we don't do any locking as this should be a unique module/process
if pid == 0:
os.close(pipe[0])
# Set stdin/stdout/stderr to /dev/null
fd = os.open(os.devnull, os.O_RDWR)
# clone stdin/out/err
for num in range(3):
if fd != num:
os.dup2(fd, num)
# close otherwise
if fd not in range(3):
os.close(fd)
# Make us a daemon
pid = os.fork()
# end if not in child
if pid > 0:
os._exit(0)
# get new process session and detach
sid = os.setsid()
if sid == -1:
module.fail_json(msg="Unable to detach session while daemonizing")
# avoid possible problems with cwd being removed
os.chdir("/")
pid = os.fork()
if pid > 0:
os._exit(0)
# if command is string deal with py2 vs py3 conversions for shlex
if not isinstance(cmd, list):
if PY2:
cmd = shlex.split(to_bytes(cmd, errors=errors))
else:
cmd = shlex.split(to_text(cmd, errors=errors))
# make sure we always use byte strings
run_cmd = []
for c in cmd:
run_cmd.append(to_bytes(c, errors=errors))
# execute the command in forked process
p = subprocess.Popen(run_cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=lambda: os.close(pipe[1]))
fds = [p.stdout, p.stderr]
# loop reading output till its done
output = {p.stdout: b(""), p.sterr: b("")}
while fds:
rfd, wfd, efd = select.select(fds, [], fds, 1)
if (rfd + wfd + efd) or p.poll():
for out in fds:
if out in rfd:
data = os.read(out.fileno(), chunk)
if not data:
fds.remove(out)
output[out] += b(data)
# even after fds close, we might want to wait for pid to die
p.wait()
# Return a pickled data of parent
return_data = pickle.dumps([p.returncode, to_text(output[p.stdout]), to_text(output[p.stderr])], protocol=pickle.HIGHEST_PROTOCOL)
os.write(pipe[1], to_bytes(return_data, errors=errors))
# clean up
os.close(pipe[1])
os._exit(0)
elif pid == -1:
module.fail_json(msg="Unable to fork, no exception thrown, probably due to lack of resources, check logs.")
else:
# in parent
os.close(pipe[1])
os.waitpid(pid, 0)
# Grab response data after child finishes
return_data = b("")
while True:
rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]])
if pipe[0] in rfd:
data = os.read(pipe[0], chunk)
if not data:
break
return_data += b(data)
# Note: no need to specify encoding on py3 as this module sends the
# pickle to itself (thus same python interpreter so we aren't mixing
# py2 and py3)
return pickle.loads(to_bytes(return_data, errors=errors))
def check_ps(module, pattern):
# Set ps flags
if platform.system() == 'SunOS':
psflags = '-ef'
else:
psflags = 'auxww'
# Find ps binary
psbin = module.get_bin_path('ps', True)
(rc, out, err) = module.run_command('%s %s' % (psbin, psflags))
# If rc is 0, set running as appropriate
if rc == 0:
for line in out.split('\n'):
if pattern in line:
return True
return False
|
Transkribus/TranskribusDU
|
TranskribusDU/ObjectModel/XMLDSTEXTClass.py
|
Python
|
bsd-3-clause
| 12,221
| 0.013504
|
# -*- coding: utf-8 -*-
"""
XML object class
Hervé Déjean
cpy Xerox 2009
a class for TEXT from a XMLDocument
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .XMLDSObjectClass import XMLDSObjectClass
from .XMLDSTOKENClass import XMLDSTOKENClass
from config import ds_xml_def as ds_xml
class XMLDSTEXTClass(XMLDSObjectClass):
"""
TEXT (chunk) class
"""
name=ds_xml.sTEXT
def __init__(self,domNode = None):
XMLDSObjectClass.__init__(self)
XMLDSObjectClass.id += 1
self._domNode = domNode
self.tagName = ds_xml.sTEXT
self.Obaseline=None
self.setName(ds_xml.sTEXT)
# def getX(self): return float(self.getAttribute('x'))
# def getY(self): return float(self.getAttribute('y'))
# def getX2(self):
# return float(self.getAttribute('x'))+self.getWidth()
# def getY2(self):
# return float(self.getAttribute('y'))+self.getHeight()
# def getHeight(self): return float(self.getAttribute('height'))
# def getWidth(self): return float(self.getAttribute('width'))
def fromDom(self,domNode):
"""
only contains TEXT?
attributes x y id height width (all!)
"""
# self.setName(domNode.atg)
self.setNode(domNode)
# get properties
# for prop in domNode.keys():
# self.addAttribute(prop,domNode.get(prop))
try:
self._id = self.getAttribute('id')
except:pass
for prop in domNode.keys():
self.addAttribute(prop,domNode.get(prop))
if prop =='x': self._x= float(domNode.get(prop))
elif prop =='y': self._y = float(domNode.get(prop))
elif prop =='height': self._h = float(domNode.get(prop))
elif prop =='width': self.setWidth(float(domNode.get(prop)))
self.addAttribute('x2', self.getX()+self.getWidth())
self.addAttribute('y2',self.getY()+self.getHeight() )
if self.hasAttribute('blpoints'):
from ObjectModel.XMLDSBASELINEClass import XMLDSBASELINEClass
b= XMLDSBASELINEClass()
b.fromDom(domNode)
b.setParent(self.getParent())
self.setBaseline(b)
## if no text: add a category: text, graphic, image, whitespace??
for txt in domNode.itertext():
stxt=txt.strip()
if len(stxt) == 0:
continue
if type(txt) != str:
pass
else:
try:txt=txt.decode('utf-8')
except AttributeError as e:
pass
if self.getContent() is not None:
self.addContent(txt)
else:
self.setContent(txt)
ldomElts = domNode.findall('./%s'%(ds_xml.sTOKEN))
for elt in ldomElts:
try:
myObject= XMLDSTOKENClass(elt)
self.addObject(myObject)
myObject.setPage(self.getParent().getPage())
myObject.fromDom(elt)
except: pass #print 'issue with token'
def setBaseline(self,ob): self.Obaseline = ob
def getBaseline(self):
return self.Obaseline
def computeBaseline(self):
if self.getBaseline() is not None:
return self.getBaseline()
# lHisto={}
lY=[]
lX=[]
# test if TOKEN has position (not in GT)!
for token in self.getAllNamedObjects(XMLDSTOKENClass):
try:
lX.append(token.getX())
lX.append(token.getX2())
lY.append(token.getY())
lY.append(token.getY2())
except TypeError:
pass
import numpy as np
if len(lX) > 0:
a,bx = np.polyfit(lX, lY, 1)
lPoints = ','.join(["%d,%d"%(xa,ya) for xa,ya in zip(lX, lY)])
# print 'ANLGE:',math.degrees(math.atan(a))
ymax = a*self.getWidth()+bx
from ObjectModel.XMLDSBASELINEClass import XMLDSBASELINEClass
b= XMLDSBASELINEClass()
b.setNode(self)
# b.addAttribute("points",lPoints)
b.setAngle(a)
b.setBx(bx)
b.setPoints(lPoints)
b.setParent(self)
self.setBaseline(b)
b.computePoints()
def getTokens(self):
"""
if dom tokens: rturn them
else split content
"""
if self.getAllNamedObjects(XMLDSTOKENClass) != []:
return self.getAllNamedObjects(XMLDSTOKENClass)
else:
for token in self.getContent().split():
oT=XMLDSTOKENClass()
oT.setParent(self)
oT.setPage(self.getPage())
self.addObject(oT)
oT.setContent(token)
return self.getAllNamedObjects(XMLDSTOKENClass)
def getSetOfFeaturesXPos(self,TH,lAttr,myObject):
from spm.feature import featureObject
if self._lBasicFeatures is None:
self._lBasicFeatures = []
ftype= featureObject.NUMERICAL
feature = featureObject()
feature.setName('x')
feature.setTH(TH)
feature.addNode(self)
feature.setObjectName(self)
feature.setValue(round(self.getX()))
feature.setType(ftype)
self.addFeature(feature)
ftype= featureObject.NUMERICAL
feature = featureObject()
feature.setName('x2')
feature.setTH(TH)
feature.addNode(self)
feature.setObjectName(self)
feature.setValue(round(self.getX()+self.getWidth()))
feature.setType(ftype)
self.addFeature(feature)
ftype= featureObject.NUMERICAL
feature = featureObject()
feature.setName('xc')
feature.setTH(TH)
feature.addNode(self)
feature.setObjectName(self)
feature.setValue(round(self.getX()+self.getWidth()/2))
feature.setType(ftype)
self.addFeature(feature)
return self.getSetofFeatures()
|
def getSetOfListedAttributes(self,TH,lAttributes,myObject):
"""
Generate a set of features: X start of the lines
"""
from spm.feature import featureObject
if self._lBasicFeatures is None:
self._lBasicFeatures = []
# needed to keep canonical values!
elif self.ge
|
tSetofFeatures() != []:
return self.getSetofFeatures()
lHisto = {}
for elt in self.getAllNamedObjects(myObject):
for attr in lAttributes:
try:lHisto[attr]
except KeyError:lHisto[attr] = {}
if elt.hasAttribute(attr):
# if elt.getWidth() >500:
# print elt.getName(),attr, elt.getAttribute(attr) #, elt.getNode()
try:
try:lHisto[attr][round(float(elt.getAttribute(attr)))].append(elt)
except KeyError: lHisto[attr][round(float(elt.getAttribute(attr)))] = [elt]
except TypeError:pass
for attr in lAttributes:
for value in lHisto[attr]:
# print attr, value, lHisto[attr][value]
if len(lHisto[attr][value]) > 0.1:
ftype= featureObject.NUMERICAL
feature = featureObject()
feature.setName(attr)
# feature.setName('f')
feature.setTH(TH)
feature.addNode(self)
feature.setObjectName(self)
feature.setValue(value)
feature.setType(ftype)
self.addFeature(feature)
if 'text' in lAttributes:
|
fengbohello/practice
|
python/pdb/sample.py
|
Python
|
lgpl-3.0
| 81
| 0.037037
|
def add(x, y):
return
|
x + y
x = 0
import pdb; pdb.set_trace()
x = ad
|
d(1, 2)
|
cloudify-cosmo/cloudify-manager-blueprints
|
components/manager-ip-setter/scripts/create-internal-ssl-certs.py
|
Python
|
apache-2.0
| 1,346
| 0
|
# This script has to run using the Python executable found in:
# /opt/mgmtworker/env/bin/python in order to properly load the manager
# blueprints utils.py module.
import argparse
import logging
import utils
class CtxWithLogger(object):
logger = logging.getLogger('internal-ssl-certs-logger')
utils.ctx = CtxWithLogger()
parser = arg
|
parse.ArgumentParser()
parser.add_argument('--metadata', default=utils.CERT_METADATA_FILE_PATH,
help='File containing the cert metadata. It should be a '
'JSON file containing an object with the '
'"internal_rest_host" and "networks" fields.')
parser.add_argument('manager_ip', default=None, nargs='?',
help='The IP of
|
this machine on the default network')
if __name__ == '__main__':
args = parser.parse_args()
cert_metadata = utils.load_cert_metadata(filename=args.metadata)
internal_rest_host = args.manager_ip or cert_metadata['internal_rest_host']
networks = cert_metadata.get('networks', {})
networks['default'] = internal_rest_host
cert_ips = [internal_rest_host] + list(networks.values())
utils.generate_internal_ssl_cert(ips=cert_ips, name=internal_rest_host)
utils.store_cert_metadata(internal_rest_host, networks,
filename=args.metadata)
|
ekumenlabs/terminus
|
terminus/geometry/enu.py
|
Python
|
apache-2.0
| 1,657
| 0.002414
|
"""
Copyright (C) 2017 Open Source Robotics Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LI
|
CENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDIT
|
IONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import math
import numpy as np
import latlon
import ecef
class Enu(object):
def __init__(self, e, n, u):
self.e = e
self.n = n
self.u = u
def __eq__(self, other):
return self.e == other.e and self.n == other.n and self.u == other.u
def __hash__(self):
return hash((self.e, self.n, self.u))
def to_ecef(self, origin):
# this doesn't work at the poles because longitude is not uniquely defined there
sin_lon = origin._sin_lon()
sin_lat = origin._sin_lat()
cos_lon = origin._cos_lon()
cos_lat = origin._cos_lat()
global_to_ecef_matrix = np.array([[-sin_lon, -cos_lon * sin_lat, cos_lon * cos_lat],
[cos_lon, - sin_lon * sin_lat, sin_lon * cos_lat],
[0, cos_lat, sin_lat]])
enu_vector = np.array([[self.e], [self.n], [self.u]])
ecef_vector = np.dot(global_to_ecef_matrix, enu_vector)
return ecef.Ecef(ecef_vector[0][0], ecef_vector[1][0], ecef_vector[2][0])
|
benosteen/pairtree
|
pairtree/pairtree_revlookup.py
|
Python
|
apache-2.0
| 3,270
| 0.015596
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
FS Pairtree storage - Reverse lookup
====================================
Conventions used:
From http://www.cdlib.org/inside/diglib/pairtree/pairtreespec.html version 0.1
This is an implementation of a reverse lookup index, using the pairtree path spec to
record the link between local id and the id's that it corresponds to.
eg to denote issn:1234-1234 as being linked to a global id of "uuid:1e4f..."
--> create a file at ROOT_DIR/pairtree_rl/is/sn/+1/23/4-/12/34/uuid+1e4f...
Note that the id it links to is recorded as a filename encoded as per the pairtree spec.
Usage
=====
>>> from pairtree import PairtreeReverseLookup
>>>
|
rl = PairtreeReverseLookup(storage_dir="ROOT")
>>> rl["issn:1234-1234"].append("uuid:1e4f...")
>>> rl["issn:1234-1234"]
["uuid:1
|
e4f"]
>>> rl["issn:1234-1234"] = ["id:1", "uuid:32fad..."]
>>>
Notes
=====
This was created to avoid certain race conditions I had with a pickled dictionary for this index.
A sqllite or similar lookup would also be effective, but this one relies solely on pairtree.
"""
import os
from pairtree.pairtree_path import id_encode, id_decode, id_to_dirpath
PAIRTREE_RL = "pairtree_rl"
class PairtreeReverseLookup_list(object):
def __init__(self, rl_dir, id):
self._rl_dir = rl_dir
self._id = id
self._dirpath = id_to_dirpath(self._id, self._rl_dir)
def _get_ids(self):
if os.path.isdir(self._dirpath):
ids = []
for f in os.listdir(self._dirpath):
ids.append(id_decode(f))
return ids
else:
return []
def _add_id(self, new_id):
if not os.path.exists(self._dirpath):
os.makedirs(self._dirpath)
enc_id = id_encode(new_id)
if not os.path.isfile(enc_id):
with open(os.path.join(self._dirpath, enc_id), "w") as f:
f.write(new_id)
def _exists(self, id):
if os.path.exists(self._dirpath):
return id_encode(id) in os.listdir(self._dirpath)
else:
return False
def append(self, *args):
[self._add_id(x) for x in args if not self._exists(x)]
def __len__(self):
return len(os.listdir(self._dirpath))
def __repr__(self):
return "ID:'%s' -> ['%s']" % (self._id, "','".join(self._get_ids()))
def __str__(self):
return self.__repr__()
def __iter__(self):
for f in self._get_ids():
yield id_decode(f)
class PairtreeReverseLookup(object):
def __init__(self, storage_dir="data"):
self._storage_dir = storage_dir
self._rl_dir = os.path.join(storage_dir, PAIRTREE_RL)
self._init_store()
def _init_store(self):
if not os.path.isdir(self._storage_dir):
os.makedirs(self._storage_dir)
def __getitem__(self, id):
return PairtreeReverseLookup_list(self._rl_dir, id)
def __setitem__(self, id, value):
id_c = PairtreeReverseLookup_list(self._rl_dir, id)
if isinstance(list, value):
id_c.append(*value)
else:
id_c.append(value)
def __delitem__(self, id):
dirpath = id_to_dirpath(id, self._rl_dir)
if os.path.isdir(dirpath):
for f in os.listdir(dirpath):
os.remove(os.path.join(dirpath, f))
os.removedirs(dirpath) # will throw OSError if the dir cannot be removed.
self._init_store() # just in case
|
saltstack/libnacl
|
doc/conf.py
|
Python
|
apache-2.0
| 10,276
| 0.007201
|
# -*- coding: utf-8 -*-
#
# libnacl documentation build configuration file, created by
# sphinx-quickstart on Thu May 29 10:29:25 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
sys.path.insert(0, os.path.abspath('..'))
from libnacl import version
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'libnacl'
copyright = u'2020, Thomas S Hatch'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additi
|
onal_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the
|
index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
#htmlhelp_basename = 'libnacl'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'libnacl.tex', u'libnacl Documentation',
u'Thomas S Hatch', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'libnacl', u'libnacl Documentation',
[u'Thomas S Hatch'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'libnacl', u'libnacl Documentation',
u'Thomas S Hatch', 'libnacl', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the
|
django-danceschool/django-danceschool
|
danceschool/discounts/migrations/0011_auto_20210127_2052.py
|
Python
|
bsd-3-clause
| 525
| 0
|
# Generated by Django 2.2.17 on 2021-01-28 01:52
from django.db import migrations
|
, models
class Migration(migrations.Migration):
dependencies = [
('discounts', '0010_merge_20191028_1925'),
]
operations = [
migrations.AddField(
model_name='registrationdiscount',
name='applied',
field=models.BooleanField(null=True, verbose_name='Use finalized'),
),
migra
|
tions.DeleteModel(
name='TemporaryRegistrationDiscount',
),
]
|
jsachs/infer
|
infer/lib/python/inferlib/issues.py
|
Python
|
bsd-3-clause
| 9,001
| 0.000222
|
# Copyright (c) 2015 - present Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import codecs
import datetime
import itertools
import operator
import os
import re
import sys
try:
from lxml import etree
except ImportError:
etree = None
from . import colorize, config, source, utils
ISSUE_KIND_ERROR = 'ERROR'
ISSUE_KIND_WARNING = 'WARNING'
ISSUE_KIND_INFO = 'INFO'
ISSUE_KIND_ADVICE = 'ADVICE'
# field names in rows of json reports
JSON_INDEX_DOTTY = 'dotty'
JSON_INDEX_FILENAME = 'file'
JSON_INDEX_HASH = 'hash'
JSON_INDEX_INFER_SOURCE_LOC = 'infer_source_loc'
JSON_INDEX_ISL_FILE = 'file'
JSON_INDEX_ISL_LNUM = 'lnum'
JSON_INDEX_ISL_CNUM = 'cnum'
JSON_INDEX_ISL_ENUM = 'enum'
JSON_INDEX_KIND = 'kind'
JSON_INDEX_LINE = 'line'
JSON_INDEX_PROCEDURE = 'procedure'
JSON_INDEX_PROCEDURE_ID = 'procedure_id'
JSON_INDEX_QUALIFIER = 'qualifier'
JSON_INDEX_QUALIFIER_TAGS = 'qualifier_tags'
JSON_INDEX_TYPE = 'bug_type'
JSON_INDEX_TRACE = 'bug_trace'
JSON_INDEX_TRACE_LEVEL = 'level'
JSON_INDEX_TRACE_FILENAME = 'filename'
JSON_INDEX_TRACE_LINE = 'line_number'
JSON_INDEX_TRACE_DESCRIPTION = 'description'
JSON_INDEX_VISIBILITY = 'visibility'
ISSUE_TYPES_URL = 'http://fbinfer.com/docs/infer-issue-types.html#'
def _text_of_infer_loc(loc):
return ' ({}:{}:{}-{}:)'.format(
loc[JSON_INDEX_ISL_FILE],
loc[JSON_INDEX_ISL_LNUM],
loc[JSON_INDEX_ISL_CNUM],
loc[JSON_INDEX_ISL_ENUM],
)
def text_of_report(report):
filename = report[JSON_INDEX_FILENAME]
kind = report[JSON_INDEX_KIND]
line = report[JSON_INDEX_LINE]
error_type = report[JSON_INDEX_TYPE]
msg = report[JSON_INDEX_QUALIFIER]
infer_loc = ''
if JSON_INDEX_INFER_SOURCE_LOC in report:
infer_loc = _text_of_infer_loc(report[JSON_INDEX_INFER_SOURCE_LOC])
return '%s:%d: %s: %s%s\n %s' % (
filename,
line,
kind.lower(),
error_type,
infer_loc,
msg,
)
def _text_of_report_list(project_root, reports, bugs_txt_path, limit=None,
formatter=colorize.TERMINAL_FORMATTER):
n_issues = len(reports)
if n_issues == 0:
if formatter == colorize.TERMINAL_FORMATTER:
out = colorize.color(' No issues found ',
colorize.SUCCESS, formatter)
return out + '\n'
else:
return 'No issues found'
text_errors_list = []
for report in reports[:limit]:
filename = report[JSON_INDEX_FILENAME]
line = report[JSON_INDEX_LINE]
source_context = ''
source_context = source.build_source_context(
os.path.join(project_root, filename),
formatter,
line,
)
indenter = source.Indenter() \
.indent_push() \
.add(source_context)
source_context = '\n' + unicode(indenter)
msg = text_of_report(report)
if report[JSON_INDEX_KIND] == ISSUE_KIND_ERROR:
msg = colorize.color(msg, colorize.ERROR, formatter)
el
|
if report[JSON_INDEX_KIND] == ISSUE_KIND_WARNING:
msg = colorize.color(msg, colorize.WARNING, formatter)
elif report[JSON_INDEX_KIND] == ISSUE_KIND_ADVICE:
msg = colorize.color(msg, colorize.ADVICE, formatter)
text =
|
'%s%s' % (msg, source_context)
text_errors_list.append(text)
error_types_count = {}
for report in reports:
t = report[JSON_INDEX_TYPE]
# assert failures are not very informative without knowing
# which assertion failed
if t == 'Assert_failure' and JSON_INDEX_INFER_SOURCE_LOC in report:
t += _text_of_infer_loc(report[JSON_INDEX_INFER_SOURCE_LOC])
if t not in error_types_count:
error_types_count[t] = 1
else:
error_types_count[t] += 1
max_type_length = max(map(len, error_types_count.keys())) + 2
sorted_error_types = error_types_count.items()
sorted_error_types.sort(key=operator.itemgetter(1), reverse=True)
types_text_list = map(lambda (t, count): '%s: %d' % (
t.rjust(max_type_length),
count,
), sorted_error_types)
text_errors = '\n\n'.join(text_errors_list)
if limit >= 0 and n_issues > limit:
text_errors += colorize.color(
('\n\n...too many issues to display (limit=%d exceeded), please ' +
'see %s or run `inferTraceBugs` for the remaining issues.')
% (limit, bugs_txt_path), colorize.HEADER, formatter)
issues_found = 'Found {n_issues}'.format(
n_issues=utils.get_plural('issue', n_issues),
)
msg = '{issues_found}\n\n{issues}\n\n{header}\n\n{summary}'.format(
issues_found=colorize.color(issues_found,
colorize.HEADER,
formatter),
issues=text_errors,
header=colorize.color('Summary of the reports',
colorize.HEADER, formatter),
summary='\n'.join(types_text_list),
)
return msg
def _is_user_visible(project_root, report):
kind = report[JSON_INDEX_KIND]
return kind in [ISSUE_KIND_ERROR, ISSUE_KIND_WARNING, ISSUE_KIND_ADVICE]
def print_and_save_errors(infer_out, project_root, json_report, bugs_out,
pmd_xml):
errors = utils.load_json_from_path(json_report)
errors = [e for e in errors if _is_user_visible(project_root, e)]
console_out = _text_of_report_list(project_root, errors, bugs_out,
limit=10)
utils.stdout('\n' + console_out)
plain_out = _text_of_report_list(project_root, errors, bugs_out,
formatter=colorize.PLAIN_FORMATTER)
with codecs.open(bugs_out, 'w',
encoding=config.CODESET, errors='replace') as file_out:
file_out.write(plain_out)
if pmd_xml:
xml_out = os.path.join(infer_out, config.PMD_XML_FILENAME)
with codecs.open(xml_out, 'w',
encoding=config.CODESET,
errors='replace') as file_out:
file_out.write(_pmd_xml_of_issues(errors))
def merge_reports_from_paths(report_paths):
json_data = []
for json_path in report_paths:
json_data.extend(utils.load_json_from_path(json_path))
return _sort_and_uniq_rows(json_data)
def _pmd_xml_of_issues(issues):
if etree is None:
print('ERROR: "etree" Python package not found.')
print('ERROR: You need to install it to use Infer with --pmd-xml')
sys.exit(1)
root = etree.Element('pmd')
root.attrib['version'] = '5.4.1'
root.attrib['date'] = datetime.datetime.now().isoformat()
for issue in issues:
fully_qualifed_method_name = re.search('(.*)\(.*',
issue[JSON_INDEX_PROCEDURE_ID])
class_name = ''
package = ''
if fully_qualifed_method_name is not None:
# probably Java
info = fully_qualifed_method_name.groups()[0].split('.')
class_name = info[-2:-1][0]
method = info[-1]
package = '.'.join(info[0:-2])
else:
method = issue[JSON_INDEX_PROCEDURE]
file_node = etree.Element('file')
file_node.attrib['name'] = issue[JSON_INDEX_FILENAME]
violation = etree.Element('violation')
violation.attrib['begincolumn'] = '0'
violation.attrib['beginline'] = str(issue[JSON_INDEX_LINE])
violation.attrib['endcolumn'] = '0'
violation.attrib['endline'] = str(issue[JSON_INDEX_LINE] + 1)
violation.attrib['class'] = class_name
violation.attrib['method'] = method
violation.attrib['package'] = package
violation.attrib['prior
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_04_01/aio/operations/_network_security_groups_operations.py
|
Python
|
mit
| 30,870
| 0.005248
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkSecurityGroupsOperations:
"""NetworkSecurityGroupsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
network_security_group_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
network_security_group_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continua
|
tion_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, r
|
aw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def get(
self,
resource_group_name: str,
network_security_group_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NetworkSecurityGroup":
"""Gets the specified network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or
|
arthurdarcet/aiohttp
|
tools/check_changes.py
|
Python
|
apache-2.0
| 1,224
| 0
|
#!/usr/bin/env python3
import sys
from pathlib import Path
ALLOWED_SUFFIXES = ['.feature',
'.bugfix',
'.doc',
'.removal',
'.misc']
def get_root(script_path):
folder = script_path.absolute().parent
while not (folder / '.git').exists():
folder = folder.parent
if folder == folder.anchor:
raise RuntimeError("git repo not found")
return folder
def main(argv):
print('Check "CHANGES" folder... ', end='', flush=True)
here = Path(argv[0])
root = get_root(here)
changes = root
|
/ 'CHANGES'
failed = False
for fname in changes.iterdir():
if fname.name in ('.gitignore', '.TEMPLATE.rst'):
continue
if fname.suffix not in ALLOWED_SUFFIXES:
if not failed:
print('')
print(fname, 'has illegal suffix', file=sys.stderr)
failed = True
if failed:
|
print('', file=sys.stderr)
print('Allowed suffixes are:', ALLOWED_SUFFIXES, file=sys.stderr)
print('', file=sys.stderr)
else:
print('OK')
return int(failed)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
aaniin/AliPhysics
|
PWGMM/MC/aligenqa/aligenqa/roofie/figure.py
|
Python
|
bsd-3-clause
| 20,665
| 0.002516
|
import string
import random
import logging
import os
from rootpy import asrootpy, log
from rootpy.plotting import Legend, Canvas, Pad, Graph
from rootpy.plotting.base import Color, MarkerStyle
from rootpy.plotting.utils import get_limits
import ROOT
# from external import husl
# suppress some nonsense logging messages when writing to pdfs.
# Also, setup default logger
log["/ROOT.TCanvas.Print"].setLevel(log.WARNING)
logging.basicConfig(level=logging.DEBUG)
log = log["/roofie"]
def is_plottable(obj):
"""
Check if the given object is considered a plottable.
Currently, TH1 and TGraph are considered plottable.
"""
return isinstance(obj, (ROOT.TH1, ROOT.TGraph))
class Styles(object):
# Define names of plot layouts:
class _Default_Style(object):
pt_per_cm = 28.4527625
titlefont = 43
labelfont = 43
markerSizepx = 4 # number of pixels of the marker
class Presentation_full(_Default_Style):
axisTitleSize = 14
axisLabelSize = 14
legendSize = 14
canvasWidth = 340
canvasHeight = 300
plot_margins = (.13, .05, .13, .1) # left, right, bottom, top
plot_ytitle_offset = 1.15 # factor of the normal offset :P, may lay outside of the canvas
class Presentation_half(_Default_Style):
axisTitleSize = 10
axisLabelSize = 10
legendSize = 10
canvasWidth = 170
canvasHeight = 150
plot_margins = (.3, .08, .2, .1)
plot_ytitle_offset = 1
class Public_full(_Default_Style):
axisTitleSize = 10
axisLabelSize = 8
legendSize = 8
canvasWidth = 340
canvasHeight = 300
plot_margins = (.13, .05, .13, .04)
plot_ytitle_offset = 1.15
def gen_random_name():
"""Generate a random name for temp hists"""
return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(25))
def get_color_generator(palette='root', ncolors=10):
"""
Returns a generator for n colors.
Parameters
----------
palette : string
name of the color palette which should be used
ncolors : int
number of colors this palette should have, it might be ignored by some palettes!
Returns
-------
generator :
colors which can be digested by _rootpy_
"""
# generated with sns.palplot(sns.color_palette("colorblind", 10))
if palette == 'colorblind':
colors = ([(0.0, 0.4470588235294118, 0.6980392156862745),
(0.0, 0.6196078431372549, 0.45098039215686275),
(0.8352941176470589, 0.3686274509803922, 0.0),
(0.8, 0.4745098039215686, 0.6549019607843137),
(0.9411764705882353, 0.8941176470588236, 0.25882352941176473),
(0.33725490196078434, 0.7058823529411765, 0.9137254901960784)])
if palette == 'set2':
colors = ([(0.40000000596046448, 0.7607843279838562, 0.64705884456634521),
(0.98131487965583808, 0.55538641635109398, 0.38740485135246722),
(0.55432528607985565, 0.62711267120697922, 0.79595541393055635),
(0.90311419262605563, 0.54185316071790801, 0.76495195557089413),
(0.65371782148585622, 0.84708959004458262, 0.32827375098770734),
(0.9986312957370983, 0.85096502233954041, 0.18488274134841617),
(0.89573241682613591, 0.76784315109252932, 0.58182240093455595),
(0.70196080207824707, 0.70196080207824707, 0.70196080207824707)])
if palette == 'husl':
colors = [(0.9677975592919913, 0.44127456009157356, 0.5358103155058701),
(0.8616090647292522, 0.536495730113334, 0.19548899031476086),
(0.6804189127793346, 0.6151497514677574, 0.19405452111445337),
(0.46810256823426105, 0.6699492535792404, 0.1928958739904499),
(0.20125317221201128, 0.6907920815379025, 0.47966761189275336),
(0.21044753832183283, 0.6773105080456748, 0.6433941168468681),
(0.2197995660828324, 0.6625157876850336, 0.7732093159317209),
(0.433280341176423, 0.6065273407962815, 0.9585467098271748),
(0.8004936186423958, 0.47703363533737203, 0.9579547196007522),
(0.962272393509669, 0.3976451968965351, 0.8008274363432775)]
if palette == 'root':
# named colors of the ROOT TColor colorwheel are between 800 and 900, +1 to make them look better
colors = []
for i in range(0, ncolors):
colors.append((800 + int(100.0 / ncolors) * i) + 1)
if colors:
for color in colors:
yield color
else:
raise ValueError("Unknonw palette")
class Figure(object):
def __init__(self):
# User settable parameters:
self.title = ''
self.xtitle = ''
self.ytitle = ''
self.plot = self.Plot()
self.legend = self.Legend()
# Private:
self._plottables = []
self.style = Styles.Presentation_full
class Plot(object):
logx = False
logy = False
gridx = False
gridy = False
palette = 'root'
palette_ncolors = 10
xmin, xmax, ymin, ymax = None, None, None, None
frame = None
class Legend(object):
title = None
position = 'tl'
def _create_legend(self):
nentries = len([pdic['legend_title'] for pdic in self._plottables if pdic['legend_title'] != ''])
leg = Legend(nentries, leftmargin=0, rightmargin=0, entrysep=0.01,
textsize=self.style.legendSize, textfont=43, margin=0.1, )
if self.legend.title:
leg.SetHeader(self.legend.title)
leg.SetBorderSize(0) # no box
leg.SetFillStyle(0) # transparent background of legend TPave(!)
return leg
def _theme_plottable(self, obj):
try:
axes = obj.GetXaxis(), obj.GetYaxis()
for axis in axes:
axis.SetLabelSize(self.style.axisLabelSize)
axis.SetLabelFont(self.style.labelfont)
axis.SetTitleFont(self.style.titlefont)
axis.SetTitleSize(self.style.axisTitleSize)
# yaxis only settings:
axes[1].SetTitleOffset(self.style.plot_ytitle_offset)
except AttributeError:
# obj might not be of the right type
pass
# apply styles, this might need to get more fine grained
# markers are avilable in children of TAttMarker
if isinstance(obj, ROOT.TAttMarker):
# marker size 1 == 8 px, and never scales with canvas...
obj.SetMarkerSize(self.style.markerSizepx / 8.0)
def add_plottable(self, obj, legend_title='', markerstyle='circle', color=None, use_as_frame=None):
"""
Add a plottable objet to this figure
|
. This function performs a
copy of the passed object and assigns it a random name. Once
commited, these should not be touched any more by the user!!!
Parameters
----------
obj : Hist1D, Graph, N
|
one
A root plottable object; If none, this object will only show up in the legend
legend_title : string
Title for this plottable as shown in the legend
"""
# Make a copy if we got a plottable
if obj is not None:
p = asrootpy(obj.Clone(gen_random_name()))
else:
p = ROOT.TLegendEntry()
if isinstance(p, ROOT.TH1):
p.SetDirectory(0) # make sure that the hist is not associated with a file anymore!
self._plottables.append({'p': p,
'legend_title': legend_title,
'markerstyle': markerstyle,
'color': color,
'use_as_frame': use_as_frame,
})
def import_plottables_from_canvas(self, canvas):
"""
Import plottables from a canvas which was previously created with roofie
Parameters
---
|
ZeitOnline/zeit.redirect
|
src/zeit/redirect/redirect.py
|
Python
|
bsd-3-clause
| 710
| 0
|
from pyramid.httpexceptions import HTTPMovedPerma
|
nently
from pyramid.view import view_config
from zeit.redirect.db import Redirect
import json
@view_config(route_name='redirect', renderer='string')
def check_redirect(request):
redirect = Redirect.query().filter_by(source=request.path).first()
if redirect:
# XXX Should we be protocol-relative (https e
|
tc.)?
raise HTTPMovedPermanently(
'http://' + request.headers['Host'] + redirect.target)
else:
return ''
@view_config(route_name='add', renderer='string', request_method='POST')
def add_redirect(request):
body = json.loads(request.body)
Redirect.add(body['source'], body['target'])
return '{}'
|
Staffjoy/client_python
|
staffjoy/resources/organization.py
|
Python
|
mit
| 959
| 0
|
from staffjoy.resource import Resource
from staffjoy.resources.location import Location
from staffjoy.resources.admin import Admin
from staffjoy.resources.organization_worker import OrganizationWorker
class Organization(Resource):
PATH = "organizations/{organization_id}"
ID_NAME = "organization_id"
def get_locations(self, **kwargs):
return Location.get_all(parent=self, **kwargs)
def get_location(self, id):
return Locatio
|
n.get(parent=self, id=id)
def create_location(self, **kwargs):
return Location.create(parent=self, **kwargs)
def get_admins(self):
return Admin.get_all(parent=self)
def get_admin(self, id):
return Admin.get(parent=self, id=id)
def create_admin(self, **kwargs):
"""Typically just pass email"""
return Admin.create(parent=self, **kwargs)
def get_workers(self, **kwargs):
return OrganizationWorker.get_all(parent=self, **kwa
|
rgs)
|
pnwairfire/fccsmap
|
setup.py
|
Python
|
gpl-3.0
| 1,292
| 0.001548
|
from setuptools import setup, find_packages
from fccsmap import __version__
test_requirements = []
with open('requirements-test.txt') as f:
test_requirements = [r for r in f.read().splitlines()]
setup(
name='fccsmap',
version=__version__,
author='Joel Dubowy',
license='GPLv3+',
author_email='jdu
|
[email protected]',
packages=find_packages(),
scripts=[
'bin/fccsmap'
],
package_data={
'fccsmap': ['data/*.nc']
},
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
|
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"Programming Language :: Python :: 3.8",
"Operating System :: POSIX",
"Operating System :: MacOS"
],
url='https://github.com/pnwairfire/fccsmap/',
description='supports the look-up of FCCS fuelbed information by lat/lng or vector geo spatial data.',
install_requires=[
"afscripting>=2.0.0",
# Note: numpy and gdal must now be installed manually beforehand
"shapely==1.7.1",
"pyproj==3.0.0.post1",
"rasterstats==0.15.0"
],
dependency_links=[
"https://pypi.airfire.org/simple/afscripting/",
],
tests_require=test_requirements
)
|
davvi/Hardway3
|
ex30.py
|
Python
|
mit
| 492
| 0
|
#!/usr/bin/env python
people = 30
cars = 40
trucks = 15
if cars > people:
print("We should take the cars.")
elif cars < p
|
eople:
print("We should not take the cars")
else:
print("We can't decide.")
if trucks > cars:
print("That's too many trucks.")
elif trucks < cars:
print("Maybe we coudl take the trucks.")
else:
print("We still can't dec
|
ide.")
if people > trucks:
print("Alright, let's just take the trucks.")
else:
print("Fine, let's stay home then.")
|
hkariti/mopidy-youtube
|
mopidy_youtube/backend.py
|
Python
|
apache-2.0
| 5,359
| 0.000187
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import string
from urlparse import urlparse, parse_qs
from mopidy import backend
from mopidy.models import SearchResult, Track, Album, Artist
import pykka
import pafy
import requests
import unicodedata
from mopidy_youtube import logger
yt_api_endpoint = 'https://www.googleapis.com/youtube/v3/'
yt_key = 'AIzaSyAl1Xq9DwdE_KD4AtPaE4EJl3WZe2zCqg4'
def resolve_track(track, stream=False):
logger.debug("Resolving Youtube for track '%s'", track)
if hasattr(track, 'uri'):
return resolve_url(track.comment, stream)
else:
return resolve_url(track.split('.')[-1], stream)
def safe_url(uri):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
safe_uri = unicodedata.normalize(
'NFKD',
|
unicode(uri)
).encode('ASCII', 'ignore')
|
return re.sub(
'\s+',
' ',
''.join(c for c in safe_uri if c in valid_chars)
).strip()
def resolve_url(url, stream=False):
video = pafy.new(url)
if not stream:
uri = 'youtube:video/%s.%s' % (
safe_url(video.title), video.videoid
)
else:
uri = video.getbestaudio()
if not uri: # get video url
uri = video.getbest()
logger.debug('%s - %s %s %s' % (
video.title, uri.bitrate, uri.mediatype, uri.extension))
uri = uri.url
if not uri:
return
if '-' in video.title:
title = video.title.split('-')
track = Track(
name=title[1].strip(),
comment=video.videoid,
length=video.length*1000,
artists=[Artist(name=title[0].strip())],
album=Album(
name='Youtube',
images=[video.bigthumb, video.bigthumbhd]
),
uri=uri
)
else:
track = Track(
name=video.title,
comment=video.videoid,
length=video.length*1000,
album=Album(
name='Youtube',
images=[video.bigthumb, video.bigthumbhd]
),
uri=uri
)
return track
def search_youtube(q):
query = {
'part': 'id',
'maxResults': 15,
'type': 'video',
'q': q,
'key': yt_key
}
pl = requests.get(yt_api_endpoint+'search', params=query)
playlist = []
for yt_id in pl.json().get('items'):
try:
track = resolve_url(yt_id.get('id').get('videoId'))
playlist.append(track)
except Exception as e:
logger.info(e.message)
return playlist
def resolve_playlist(url):
logger.info("Resolving Youtube for playlist '%s'", url)
query = {
'part': 'snippet',
'maxResults': 50,
'playlistId': url,
'fields': 'items/snippet/resourceId',
'key': yt_key
}
pl = requests.get(yt_api_endpoint+'playlistItem', params=query)
playlist = []
for yt_id in pl.json().get('items'):
try:
yt_id = yt_id.get('snippet').get('resourceId').get('videoId')
playlist.append(resolve_url(yt_id))
except Exception as e:
logger.info(e.message)
return playlist
class YoutubeBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(YoutubeBackend, self).__init__()
self.config = config
self.library = YoutubeLibraryProvider(backend=self)
self.playback = YoutubePlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['youtube', 'yt']
class YoutubeLibraryProvider(backend.LibraryProvider):
def lookup(self, track):
if 'yt:' in track:
track = track.replace('yt:', '')
if 'youtube.com' in track:
url = urlparse(track)
req = parse_qs(url.query)
if 'list' in req:
return resolve_playlist(req.get('list')[0])
else:
return [resolve_url(track)]
else:
return [resolve_url(track)]
def search(self, query=None, uris=None):
if not query:
return
if 'uri' in query:
search_query = ''.join(query['uri'])
url = urlparse(search_query)
if 'youtube.com' in url.netloc:
req = parse_qs(url.query)
if 'list' in req:
return SearchResult(
uri='youtube:search',
tracks=resolve_playlist(req.get('list')[0])
)
else:
logger.info(
"Resolving Youtube for track '%s'", search_query)
return SearchResult(
uri='youtube:search',
tracks=[resolve_url(search_query)]
)
else:
search_query = '|'.join(query.values()[0])
logger.info("Searching Youtube for query '%s'", search_query)
return SearchResult(
uri='youtube:search',
tracks=search_youtube(search_query)
)
class YoutubePlaybackProvider(backend.PlaybackProvider):
def play(self, track):
track = resolve_track(track, True)
return super(YoutubePlaybackProvider, self).play(track)
|
AlanZatarain/pysal
|
pysal/region/tests/test_maxp.py
|
Python
|
bsd-3-clause
| 1,770
| 0
|
import unittest
import pysal
import numpy as np
import random
class Test_Maxp(unittest.TestCase):
def setUp(self):
random.seed(100)
np.random.seed(100)
def test_Maxp(self):
w = pysal.lat2W(10, 10)
z = np.random.random_sample((w.n, 2))
p = np.ones((w.n, 1), float)
floor = 3
solution = pysal.region.Maxp(
w, z, floo
|
r, floor_variable=p, initial=100)
self.assertEquals(solution.p, 29)
self.assertEquals(solution.regions[0], [4, 14, 5, 24, 3, 25, 15, 23])
def test_inference(self):
w = pysal.weights.lat2W(5, 5)
z = np.random.random_sample((w.n, 2))
p = np.ones((w.n, 1), float)
floor = 3
solution = pysal.region.Maxp(
w, z, floor, floor_variable=p, initial=100)
solution.inference(nperm=9)
self.assertAlmostEquals(solution.pvalue, 0.200000
|
00000000001, 10)
def test_cinference(self):
w = pysal.weights.lat2W(5, 5)
z = np.random.random_sample((w.n, 2))
p = np.ones((w.n, 1), float)
floor = 3
solution = pysal.region.Maxp(
w, z, floor, floor_variable=p, initial=100)
solution.cinference(nperm=9, maxiter=100)
self.assertAlmostEquals(solution.cpvalue, 0.10000000000000001, 10)
def test_Maxp_LISA(self):
w = pysal.lat2W(10, 10)
z = np.random.random_sample((w.n, 2))
p = np.ones(w.n)
mpl = pysal.region.Maxp_LISA(w, z, p, floor=3, floor_variable=p)
self.assertEquals(mpl.p, 31)
self.assertEquals(mpl.regions[0], [99, 89, 98, 97])
suite = unittest.TestLoader().loadTestsFromTestCase(Test_Maxp)
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite)
|
helloworldC2/VirtualRobot
|
Node.py
|
Python
|
mit
| 260
| 0.019231
|
class Node(object):
|
def __init__(self,pos,parent,costSoFar,distanceToEnd):
self.pos = pos
self.parent = parent
self.costSoFar = costSoFar
|
self.distanceToEnd = distanceToEnd
self.totalCost = distanceToEnd +costSoFar
|
Hattivat/hypergolic-django
|
hypergolic/catalog/views/astronaut_views.py
|
Python
|
agpl-3.0
| 1,773
| 0
|
from django.views.generic.detail import DetailView
from django.views.generic.edit import UpdateView, DeleteView
from catalog.views.base import GenericListView, GenericCreateView
from catalog.models import Astronaut, CrewedMission
from catalog.forms import AstronautForm
from catalog.filters import AstronautFilter
from django.core.urlresolvers import reverse_lazy
from django.core.urlresolvers import reverse
from django.http import Http404
class AstronautListView(GenericListView):
model = Astronaut
f = AstronautFilter
display_data = ('organization', 'nationality', 'birth_date')
class AstronautDetailView(DetailView):
model = Astronaut
template_name =
|
"catalog/astronaut_detail.html"
class AstronautCreateView(GenericCreateView):
model = Astronaut
form_class = AstronautForm
success_url = reverse_lazy("astronaut_list")
def form_valid(self, form):
obj = form.save(commit=False)
obj.creator = self.request.user
obj.save()
return super(AstronautUpdateView, self).form_valid(form)
def get_success_url
|
(self):
return reverse("astronaut_detail", args=(self.object.pk,))
class AstronautUpdateView(UpdateView):
model = Astronaut
form_class = AstronautForm
template_name = "catalog/generic_update.html"
initial = {}
def form_valid(self, form):
obj = form.save(commit=False)
obj.modifier = self.request.user
obj.save()
return super(AstronautUpdateView, self).form_valid(form)
def get_success_url(self):
return reverse("astronaut_detail", args=(self.object.pk,))
class AstronautDeleteView(DeleteView):
model = Astronaut
template_name = "catalog/generic_delete.html"
success_url = reverse_lazy("astronaut_list")
|
rudyryk/python-samples
|
hello_tornado/hello_asyncio.py
|
Python
|
cc0-1.0
| 3,463
| 0.001733
|
# hello_asyncio.py
import asyncio
import tornado.ioloop
import tornado.web
import tornado.gen
from tornado.httpclient import AsyncHTTPClient
try:
import aioredis
except ImportError:
print("Please install aioredis: pip install aioredis")
exit(0)
class AsyncRequestHandler(tornado.web.Requ
|
estHandler):
"""Base class for request handlers with `asyncio` coroutines support.
It runs methods on Tornado's ``AsyncIOMainLoop`` instance.
Subclasses have to implement one of `get_async()`, `post_async()`, etc.
Asynchronous method should be decorated with `@asyncio.coroutine`.
Usage example::
class MyAsyncRequestHandler(AsyncRequestHandler):
@asyncio.coroutine
def get_async(self):
html = yield from self.application.http.get('http:/
|
/python.org')
self.write({'html': html})
You may also just re-define `get()` or `post()` methods and they will be simply run
synchronously. This may be convinient for draft implementation, i.e. for testing
new libs or concepts.
"""
@tornado.gen.coroutine
def get(self, *args, **kwargs):
"""Handle GET request asyncronously, delegates to
``self.get_async()`` coroutine.
"""
yield self._run_method('get', *args, **kwargs)
@tornado.gen.coroutine
def post(self, *args, **kwargs):
"""Handle POST request asyncronously, delegates to
``self.post_async()`` coroutine.
"""
yield self._run_method('post', *args, **kwargs)
@asyncio.coroutine
def _run_async(self, coroutine, future_, *args, **kwargs):
"""Perform coroutine and set result to ``Future`` object."""
try:
result = yield from coroutine(*args, **kwargs)
future_.set_result(result)
except Exception as e:
future_.set_exception(e)
print(traceback.format_exc())
def _run_method(self, method_, *args, **kwargs):
"""Run ``get_async()`` / ``post_async()`` / etc. coroutine
wrapping result with ``tornado.concurrent.Future`` for
compatibility with ``gen.coroutine``.
"""
coroutine = getattr(self, '%s_async' % method_, None)
if not coroutine:
raise tornado.web.HTTPError(405)
future_ = tornado.concurrent.Future()
asyncio.async(
self._run_async(coroutine, future_, *args, **kwargs)
)
return future_
class MainHandler(AsyncRequestHandler):
@asyncio.coroutine
def get_async(self):
redis = self.application.redis
yield from redis.set('my-key', 'OK')
val = yield from redis.get('my-key')
self.write('Hello asyncio.coroutine: %s' % val)
class Application(tornado.web.Application):
def __init__(self):
# Prepare IOLoop class to run instances on asyncio
tornado.ioloop.IOLoop.configure('tornado.platform.asyncio.AsyncIOMainLoop')
handlers = [
(r"/", MainHandler),
]
super().__init__(handlers, debug=True)
def init_with_loop(self, loop):
self.redis = loop.run_until_complete(
aioredis.create_redis(('localhost', 6379), loop=loop)
)
if __name__ == "__main__":
print("Run hello_asyncio ... http://127.0.0.1:8888")
application = Application()
application.listen(8888)
loop = asyncio.get_event_loop()
application.init_with_loop(loop)
loop.run_forever()
|
teeple/pns_server
|
work/install/Python-2.7.4/Doc/conf.py
|
Python
|
gpl-2.0
| 5,857
| 0.001195
|
# -*- coding: utf-8 -*-
#
# Python documentation build configuration file
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
import sys, os, time
sys.path.append(os.path.abspath('tools/sphinxext'))
# General configuration
# ---------------------
extensions = ['sphinx.ext.refcounting', 'sphinx.ext.coverage',
'sphinx.ext.doctest', 'pyspecific']
templates_path = ['tools/sphinxext']
# General substitutions.
project = 'Python'
copyright = '1990-%s, Python Software Foundation' % time.strftime('%Y')
# The default replacements for |version| and |release|.
#
# The short X.Y version.
# version = '2.6'
# The full version, including alpha/beta/rc tags.
# release = '2.6a0'
# We look for the Include/patchlevel.h file in the current Python source tree
# and replace the values accordingly.
import patchlevel
version, release = patchlevel.get_version_info()
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of files that shouldn't be included in the build.
unused_docs = [
'maclib/scrap',
'library/xmllib',
'library/xml.etree',
]
# Ignore .rst in Sphinx its self.
exclude_trees = ['tools/sphinx']
# Relative filename of the reference count data file.
refcount_file = 'data/refcounts.dat'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# Options for HTML output
# -----------------------
html_theme = 'default'
html_theme_options = {'collapsiblesidebar': True}
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, filenames relative to this file.
html_sidebars = {
'index': 'indexsidebar.html',
}
# Additional templates that should be rendered to pages.
html_additional_pages = {
'download': 'download.html',
'index': 'indexcontent.html',
}
# Output an OpenSearch description file.
html_use_opensearch = 'http://docs.python.org
|
/'
# Additional static files.
html_static_path = ['tools/sphinxext/static']
# Output file base name for HTML help builder.
htmlhelp_basename = 'python' + release.replace('.', '')
# Split the index
html_split_index = True
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
latex_paper_size = 'a4'
# The font size ('10pt', '11pt' or '12pt').
latex_font_size = '10pt'
# Grouping the document tree into
|
LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
_stdauthor = r'Guido van Rossum\\Fred L. Drake, Jr., editor'
latex_documents = [
('c-api/index', 'c-api.tex',
'The Python/C API', _stdauthor, 'manual'),
('distutils/index', 'distutils.tex',
'Distributing Python Modules', _stdauthor, 'manual'),
('extending/index', 'extending.tex',
'Extending and Embedding Python', _stdauthor, 'manual'),
('install/index', 'install.tex',
'Installing Python Modules', _stdauthor, 'manual'),
('library/index', 'library.tex',
'The Python Library Reference', _stdauthor, 'manual'),
('reference/index', 'reference.tex',
'The Python Language Reference', _stdauthor, 'manual'),
('tutorial/index', 'tutorial.tex',
'Python Tutorial', _stdauthor, 'manual'),
('using/index', 'using.tex',
'Python Setup and Usage', _stdauthor, 'manual'),
('faq/index', 'faq.tex',
'Python Frequently Asked Questions', _stdauthor, 'manual'),
('whatsnew/' + version, 'whatsnew.tex',
'What\'s New in Python', 'A. M. Kuchling', 'howto'),
]
# Collect all HOWTOs individually
latex_documents.extend(('howto/' + fn[:-4], 'howto-' + fn[:-4] + '.tex',
'', _stdauthor, 'howto')
for fn in os.listdir('howto')
if fn.endswith('.rst') and fn != 'index.rst')
# Additional stuff for the LaTeX preamble.
latex_preamble = r'''
\authoraddress{
\strong{Python Software Foundation}\\
Email: \email{[email protected]}
}
\let\Verbatim=\OriginalVerbatim
\let\endVerbatim=\endOriginalVerbatim
'''
# Documents to append as an appendix to all manuals.
latex_appendices = ['glossary', 'about', 'license', 'copyright']
# Get LaTeX to handle Unicode correctly
latex_elements = {'inputenc': r'\usepackage[utf8x]{inputenc}', 'utf8extra': ''}
# Options for the coverage checker
# --------------------------------
# The coverage checker will ignore all modules/functions/classes whose names
# match any of the following regexes (using re.match).
coverage_ignore_modules = [
r'[T|t][k|K]',
r'Tix',
r'distutils.*',
]
coverage_ignore_functions = [
'test($|_)',
]
coverage_ignore_classes = [
]
# Glob patterns for C source files for C API coverage, relative to this directory.
coverage_c_path = [
'../Include/*.h',
]
# Regexes to find C items in the source files.
coverage_c_regexes = {
'cfunction': (r'^PyAPI_FUNC\(.*\)\s+([^_][\w_]+)'),
'data': (r'^PyAPI_DATA\(.*\)\s+([^_][\w_]+)'),
'macro': (r'^#define ([^_][\w_]+)\(.*\)[\s|\\]'),
}
# The coverage checker will ignore all C items whose names match these regexes
# (using re.match) -- the keys must be the same as in coverage_c_regexes.
coverage_ignore_c_items = {
# 'cfunction': [...]
}
|
fredericojordan/fast-chess
|
scripts/hashfileStats.py
|
Python
|
mit
| 900
| 0.004444
|
import zlib, base64, sys
MAX_DEPTH = 50
if __name__ == "__main__":
try:
hashfile = open("hashfile", "r")
except:
print("ERROR: While opening hash file!")
sys.exit(-1)
line_number = 0
depths
|
= [0 for _ in range(MAX_DEPTH)]
for line in hashfile.readlines():
line_number += 1
l = line.strip().split()
if len(l) < 7:
print(
"Bad entry on line " + str(line_number) + " (ignored): " + line.strip()
)
continue
hash = l[0]
depth = int(l[1])
score = int(l[2
|
])
fen = " ".join(l[3:])
depths[depth] += 1
hashfile.close()
print("-- Depths --")
for i in range(MAX_DEPTH):
if not depths[i]:
continue
print("{:2d}: {:8d}".format(i, depths[i]))
print("------------")
|
r26zhao/django_blog
|
blog/migrations/0012_auto_20170621_1250.py
|
Python
|
mit
| 586
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-21 04:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0011_auto_20170621_1224'),
]
operations = [
|
migrations.AddField(
model_name='category',
name='slug',
field=models
|
.SlugField(default=''),
),
migrations.AddField(
model_name='tag',
name='slug',
field=models.SlugField(default=''),
),
]
|
chop-dbhi/serrano
|
serrano/resources/exporter.py
|
Python
|
bsd-2-clause
| 3,758
| 0
|
from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse
from django.http import Http404
from modeltree.tree import MODELTREE_DEFAULT_ALIAS, trees
from restlib2.params import Parametizer, IntParam, StrParam
from avocado.export import BaseExporter, registry as exporters
from avocado.query import pipeline, utils
from serrano.resources import API_VERSION
from serrano.resources.base import BaseResource
from serrano.resources.processors import EXPORTER_RESULT_PROCESSOR_NAME, \
process_results
# Single list of all registered exporters
EXPORT_TYPES = zip(*exporters.choices)[0]
class ExporterRootResource(BaseResource):
def get_links(self, request):
uri = request.build_absolute_uri
links = {
'self': uri(reverse('serrano:data:exporter')),
}
for export_type in EXPORT_TYPES:
links[export_type] = {
'link': uri(reverse(
'serrano:data:exporter',
kwargs={'export_type': export_type}
)),
'data': {
'title': exporters.get(export_type).short_name,
'description': exporters.get(export_type).long_name,
}
}
return links
def get(self, request):
resp = {
'title': 'Serrano Exporter Endpoints',
'version': API_VERSION
}
return resp
class ExporterParametizer(Parametizer):
limit = IntParam(50)
processor = StrParam('default', choices=pipeline.query_processors)
reader = StrParam('cached', choices=BaseExporter.readers)
tree = StrParam(MODELTREE_DEFAULT_ALIAS, choices=trees)
class ExporterResource(BaseResource):
cache_max_age = 0
private_cache = True
parametizer = ExporterParametizer
QUERY_NAME_TEMPLATE = '{session_key}:{export_type}'
def _get_query_name(self, request, export_type):
return self.QUERY_NAME_TEMPLATE.format(
session_key=request.session.session_key,
export_type=export_type)
# Resource is dependent on the available export types
def is_not_found(self, request, response, export_type, **kwargs):
return export_type not in EXPORT_TYPES
def get(self, request, export_type, **kwargs):
view = self.get_view(request)
context = self.get_context(request)
params = self.get_params(request)
# Configure the query options used for retrieving the results.
query_options = {
'export_type': export_type,
'query_name': self._get_query_name(request, export_type),
}
query_options.update(**kwargs)
query_options.update(params)
try:
row_data = utils.get_result_rows(context, view, query_options,
request=request)
except ValueError:
raise Http404
return process_results(
request, EXPORTER_RESULT_PROCESSOR_NA
|
ME, row_data)
post = get
def delete(self, request, export_type, **kwargs):
query_name = self._get_query_name(request, export_type)
canceled = utils.cancel_query(query_name)
|
return self.render(request, {'canceled': canceled})
exporter_resource = ExporterResource()
exporter_root_resource = ExporterRootResource()
# Resource endpoints
urlpatterns = patterns(
'',
url(r'^$', exporter_root_resource, name='exporter'),
url(r'^(?P<export_type>\w+)/$', exporter_resource, name='exporter'),
url(r'^(?P<export_type>\w+)/(?P<page>\d+)/$', exporter_resource,
name='exporter'),
url(r'^(?P<export_type>\w+)/(?P<page>\d+)\.\.\.(?P<stop_page>\d+)/$',
exporter_resource, name='exporter'),
)
|
BjoernT/python-openstackclient
|
openstackclient/object/v1/container.py
|
Python
|
apache-2.0
| 7,719
| 0
|
# Copyright 2013 Nebula Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Container v1 action implementations"""
import logging
import six
from cliff import command
from cliff import lister
from cliff import show
from openstackclient.common import parseractions
from openstackclient.common import utils
class CreateContainer(lister.Lister):
"""Create new container"""
log = logging.getLogger(__name__ + '.CreateContainer')
def get_parser(self, prog_name):
parser = super(CreateContainer, self).get_parser(prog_name)
parser.add_argument(
'containers',
metavar='<container-name>',
nargs="+",
help='New container name(s)',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
results = []
for container in parsed_args.containers:
data = self.app.client_manager.object_store.container_create(
container=container,
)
results.append(data)
columns = ("account", "container", "x-trans-id")
return (columns,
(utils.get_dict_properties(
s, columns,
formatters={},
) for s in results))
class DeleteContainer(command.Command):
"""Delete container"""
log = logging.getLogger(__name__ + '.DeleteContainer')
def get_parser(self, prog_name):
parser = super(DeleteContainer, self).get_parser(prog_name)
parser.add_argument(
'containers',
metavar='<container>',
nargs="+",
help='Container(s) to delete',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
for container in parsed_args.containers:
self.app.client_manager.object_store.container_delete(
container=container,
)
class ListContainer(lister.Lister):
"""List containers"""
log = logging.getLogger(__name__ + '.ListContainer')
def get_parser(self, prog_name):
parser = super(ListContainer, self).get_parser(prog_name)
parser.add_argument(
"--prefix",
metavar="<prefix>",
help="Filter list using <prefix>",
)
parser.add_argument(
"--marker",
metavar="<marker>",
help="Anchor for paging",
)
parser.add_argument(
"--end-marker",
metavar="<end-marker>",
help="End anchor for paging",
)
parser.add_argument(
"--limit",
metavar="<limit>",
type=int,
help="Limit the number of containers returned",
)
|
parser.add_argument(
'--long',
action='store_true',
default=False,
help='List additional fields in output',
)
parser.add_argument(
|
'--all',
action='store_true',
default=False,
help='List all containers (default is 10000)',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
if parsed_args.long:
columns = ('Name', 'Bytes', 'Count')
else:
columns = ('Name',)
kwargs = {}
if parsed_args.prefix:
kwargs['prefix'] = parsed_args.prefix
if parsed_args.marker:
kwargs['marker'] = parsed_args.marker
if parsed_args.end_marker:
kwargs['end_marker'] = parsed_args.end_marker
if parsed_args.limit:
kwargs['limit'] = parsed_args.limit
if parsed_args.all:
kwargs['full_listing'] = True
data = self.app.client_manager.object_store.container_list(
**kwargs
)
return (columns,
(utils.get_dict_properties(
s, columns,
formatters={},
) for s in data))
class SaveContainer(command.Command):
"""Save container contents locally"""
log = logging.getLogger(__name__ + ".SaveContainer")
def get_parser(self, prog_name):
parser = super(SaveContainer, self).get_parser(prog_name)
parser.add_argument(
'container',
metavar='<container>',
help='Container to save',
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
self.app.client_manager.object_store.container_save(
container=parsed_args.container,
)
class SetContainer(command.Command):
"""Set container properties"""
log = logging.getLogger(__name__ + '.SetContainer')
def get_parser(self, prog_name):
parser = super(SetContainer, self).get_parser(prog_name)
parser.add_argument(
'container',
metavar='<container>',
help='Container to modify',
)
parser.add_argument(
"--property",
metavar="<key=value>",
required=True,
action=parseractions.KeyValueAction,
help="Set a property on this container "
"(repeat option to set multiple properties)"
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
self.app.client_manager.object_store.container_set(
parsed_args.container,
properties=parsed_args.property,
)
class ShowContainer(show.ShowOne):
"""Display container details"""
log = logging.getLogger(__name__ + '.ShowContainer')
def get_parser(self, prog_name):
parser = super(ShowContainer, self).get_parser(prog_name)
parser.add_argument(
'container',
metavar='<container>',
help='Container to display',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
data = self.app.client_manager.object_store.container_show(
container=parsed_args.container,
)
if 'properties' in data:
data['properties'] = utils.format_dict(data.pop('properties'))
return zip(*sorted(six.iteritems(data)))
class UnsetContainer(command.Command):
"""Unset container properties"""
log = logging.getLogger(__name__ + '.UnsetContainer')
def get_parser(self, prog_name):
parser = super(UnsetContainer, self).get_parser(prog_name)
parser.add_argument(
'container',
metavar='<container>',
help='Container to modify',
)
parser.add_argument(
'--property',
metavar='<key>',
required=True,
action='append',
default=[],
help='Property to remove from container '
'(repeat option to remove multiple properties)',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
self.app.client_manager.object_store.container_unset(
parsed_args.container,
properties=parsed_args.property,
)
|
palantir/typedjsonrpc
|
tests/test_server.py
|
Python
|
apache-2.0
| 10,750
| 0.001023
|
# coding: utf-8
#
# Copyright 2015 Palantir Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import json
import pytest
import six
import werkzeug.debug
from webtest import TestApp
from werkzeug.exceptions import HTTPException
import typedjsonrpc.errors
from typedjsonrpc.registry import Registry
from typedjsonrpc.server import DebuggedJsonRpcApplication, Response, Server, current_request
if six.PY3:
import unittest.mock as mock
else:
import mock
class TestDebuggedJsonRpcApplication(object):
@staticmethod
def get_app():
registry = Registry()
server = Server(registry)
debugged_app = DebuggedJsonRpcApplication(server)
return registry, server, debugged_app
def test_handle_debug_no_such_traceback(self):
registry, server, debugged_app = TestDebuggedJsonRpcApplication.get_app()
with pytest.raises(HTTPException) as excinfo:
debugged_app.handle_debug(None, None, -1)
assert excinfo.value.code == 404
def test_handle_debug_response_called(self):
registry, server, debugged_app = TestDebuggedJsonRpcApplication.get_app()
mock_traceback = mock.Mock()
mock_traceback.render_full = mock.Mock(return_value="")
mock_traceback.frames = mock.NonCallableMagicMock()
registry.tracebacks[1234] = mock_traceback
start_response = mock.Mock()
environ = {
"SERVER_NAME": "localhost",
"SERVER_PORT": "5060",
"PATH_INFO": "/api",
"REQUEST_METHOD": "POST",
"wsgi.url_scheme": "http",
}
debugged_app.handle_debug(environ, start_response, 1234)
@mock.patch("typedjsonrpc.server.DebuggedJsonRpcApplication.handle_debug",
mock.Mock(return_value=["foo"]))
def test_debug_application_debug_endpoint(self):
registry, server, debugged_app = TestDebuggedJsonRpcApplication.get_app()
environ = {
"SERVER_NAME": "localhost",
"SERVER_PORT": "5060",
"PATH_INFO": "/debug/1234",
"REQUEST_METHOD": "POST",
"wsgi.url_scheme": "http",
}
start_response = mock.Mock()
assert ["foo"] == debugged_app.debug_application(environ, start_response)
assert DebuggedJsonRpcApplication.handle_debug.called
@mock.patch("werkzeug.debug.DebuggedApplication.debug_application",
mock.Mock(return_value=["foo"]))
def test_debug_application_normal_endpoint(self):
registry, server, debugged_app = TestDebuggedJsonRpcApplication.get_app()
environ = {
"SERVER_NAME": "localhost",
"SERVER_PORT": "5060",
"PATH_INFO": "/api",
"REQUEST_METHOD": "POST",
"wsgi.url_scheme": "http",
}
start_response = mock.NonCallableMock()
result = debugged_app.debug_application(environ, start_response)
assert result == ["foo"]
assert werkzeug.debug.DebuggedApplication.debug_application.called
class TestServer(object):
@staticmethod
def _create_mock_registry():
mock_registry = mock.Mock()
mock_registry.json_encoder = json.JSONEncoder()
mock_registry.json_decoder = json.JSONDecoder()
mock_registry.dispatch.return_value = json.dumps({
"jsonrpc": "2.0",
"id": "foo",
"result": "bar"
})
return mock_registry
def test_wsgi_app_invalid_endpoint(self):
environ = {
"SERVER_NAME": "localhost",
"SERVER_PORT": "5060",
"PATH_INFO": "/bogus",
"REQUEST_METHOD": "POST",
"wsgi.url_scheme": "http",
}
mock_registry = self._create_mock
|
_registry()
server = Server(mock_registry, "/foo")
with pytest.raises(HTTPException) as excinfo:
server(environ, None)
assert excinfo.value.code == 404
def test_wsgi_app_dispatch(self):
environ = {
"SERVER_NAME": "localhost",
|
"SERVER_PORT": "5060",
"PATH_INFO": "/foo",
"REQUEST_METHOD": "POST",
"wsgi.url_scheme": "http",
}
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
mock_start_response = mock.Mock()
server(environ, mock_start_response)
mock_registry.dispatch.assert_called_once_with(mock.ANY)
def test_before_first_request_funcs(self):
environ = {
"SERVER_NAME": "localhost",
"SERVER_PORT": "5060",
"PATH_INFO": "/foo",
"REQUEST_METHOD": "POST",
"wsgi.url_scheme": "http",
}
mock_registry = self._create_mock_registry()
mock_start = mock.Mock()
mock_start.return_value(None)
server = Server(mock_registry, "/foo")
server.register_before_first_request(mock_start)
mock_start_response = mock.Mock()
server(environ, mock_start_response)
server(environ, mock_start_response)
mock_start.assert_called_once_with()
def test_http_status_code_empty_response(self):
mock_registry = self._create_mock_registry()
mock_registry.dispatch.return_value = None
server = Server(mock_registry, "/foo")
app = TestApp(server)
app.post("/foo", status=204)
def test_http_status_code_success_response(self):
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
app = TestApp(server)
app.post("/foo", status=200)
def test_http_status_code_batched_response_half_success(self):
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
mock_registry.dispatch.return_value = json.dumps([
{
"jsonrpc": "2.0",
"id": "foo",
"result": "bar"
}, {
"jsonrpc": "2.0",
"id": "bar",
"error": typedjsonrpc.errors.MethodNotFoundError().as_error_object()
}
])
app = TestApp(server)
app.post("/foo", status=200)
def test_http_status_code_batched_response_all_failed(self):
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
mock_registry.dispatch.return_value = json.dumps([
{
"jsonrpc": "2.0",
"id": "foo",
"error": typedjsonrpc.errors.MethodNotFoundError().as_error_object()
}, {
"jsonrpc": "2.0",
"id": "bar",
"error": typedjsonrpc.errors.MethodNotFoundError().as_error_object()
}
])
app = TestApp(server)
app.post("/foo", status=200)
def test_http_status_code_method_not_found(self):
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
mock_registry.dispatch.return_value = json.dumps({
"jsonrpc": "2.0",
"id": "foo",
"error": typedjsonrpc.errors.MethodNotFoundError().as_error_object()
})
app = TestApp(server)
app.post("/foo", status=404)
def test_http_status_code_parse_error(self):
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
mock_registry.dispatch.return_value = json.dumps({
"jsonrpc": "2.0",
"id": "foo",
"error": typedjsonrpc.errors.ParseError().as_error_object
|
greenaddress/pycoin
|
pycoin/test/ecdsa_test.py
|
Python
|
mit
| 1,478
| 0.012855
|
#!/usr/bin/env python
import unittest
from pycoin.ecdsa import generator_secp256k1, sign, verify, public_pair_for_secret_exponent
class ECDSATestCase(unittest.TestCase):
def test_sign_verify(self):
def do_test(secret_exponent, val_list):
public_point = public_pair_for_secret_exponent(generator_secp256k1, secret_exponent)
for v in val_list:
signature = sign(generator_secp256k1, secret_exponent, v)
r = verify(generator_secp256k1, public_point, v, signature)
# Check that the 's' value is 'low', to prevent possible transaction malleability as per
# h
|
ttps://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki#low-s-values-in-signatures
assert signature[1] <= 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF5D576E7357A4501DDFE92F46681B20A0
assert r == True
signature = signature[0],signature[1]+1
r = verify(generator_secp256k1, public_point, v, signature)
assert r == False
|
val_list = [100,20000,30000000,400000000000,50000000000000000,60000000000000000000000]
do_test(0x1111111111111111111111111111111111111111111111111111111111111111, val_list)
do_test(0xdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd, val_list)
do_test(0x47f7616ea6f9b923076625b4488115de1ef1187f760e65f89eb6f4f7ff04b012, val_list)
if __name__ == '__main__':
unittest.main()
|
acevest/monitor
|
utils.py
|
Python
|
gpl-2.0
| 6,848
| 0.017251
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------
# File Name: utils.py
# Author: Zhao Yanbai
# Thu Oct 30 06:33:24 2014
# Description: none
#
|
----------------------------------------------------
|
--------------------
import logging
import struct
import socket
import web
import MySQLdb
import commands
import json
import time
from mail import SendMail
from weixin import SendWeiXinMsg
def init_logging(path) :
logging.basicConfig(filename=path, level = logging.INFO, format ='%(levelname)s\t%(asctime)s: %(message)s')
def SendMsg(title, msg) :
if not SendWeiXinMsg(msg) :
SendMail(title, msg)
class Storage(dict) :
def __getattr__(self, key) :
try :
return self[key]
except KeyError, k:
raise AttributeError, k
def __setattr__(self, key, value) :
self[key] = value
def __delattr__(self, key) :
try :
del self[key]
except KeyError, k:
raise AttributeError, k
def __repr__(self) :
return '<Storage ' + dict.__repr__(self) + '>'
def ip2int(ip) :
return struct.unpack("!I", socket.inet_aton(ip))[0]
def int2ip(i) :
print i
return str(socket.inet_ntoa(struct.pack("!I", int(i))))
def INET_ATON(ipstr) :
ip = ip2int(ipstr)
return str(ip)
def INET_NTOA(ip) :
ipstr = int2ip(int(ip) & 0xFFFFFFFF)
return ipstr
def CheckIP(s) :
try :
return len([i for i in s.split('.') if (0<= int(i)<= 255)])== 4
except :
return False
def CheckPort(port) :
return port.isdigit() and int(port) > 0 and int(port) < 65536
def CheckLogic(logic) :
if not logic.isdigit() :
return False
logic = int(logic)
return (logic == 0 or logic == 1 or logic == 2)
class PageBase(object):
def __init__(self) :
self.ActionMap = { }
self.action = ''
self.SetActionHandler('New', self.New)
self.SetActionHandler('Add', self.Add)
self.SetActionHandler('Del', self.Del)
self.SetActionHandler('Edit', self.Edit)
self.SetActionHandler('List', self.List)
self.SetActionHandler('Search', self.Search)
self.SetActionHandler('UNIMPLEMENTED', self.UNIMPLEMENTED)
self.Ret = {
'Err' : -1,
'Msg' : 'Unknown'
}
def ErrMsg(self, msg) :
self.Ret['Err'] = 1
self.Ret['Msg'] = msg
return json.dumps(self.Ret, ensure_ascii=False)
return self.Ret
def SucMsg(self, msg) :
self.Ret['Err'] = 0
self.Ret['Msg'] = msg
return json.dumps(self.Ret, ensure_ascii=False)
return self.Ret
def SucJsonData(self, data) :
self.Ret['Err'] = 0
self.Ret['Msg'] = 'success'
self.Ret['Data'] = data
r = json.dumps(self.Ret, ensure_ascii=False)
return r
def AuthorizedUser(self) :
return True
def UNIMPLEMENTED(self) :
if len(self.action) == 0 :
return "UNIMPLEMENTED"
return "UNIMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def REQUEST_HANDLER(self) :
self.action = web.input().get('action', '').strip()
return self.ActionMap.get(self.action, self.List)()
def GET(self) :
if not self.AuthorizedUser() :
return "UNAUTHORIZED USER"
return self.REQUEST_HANDLER()
def POST(self) :
if not self.AuthorizedUser() :
return "UNAUTHORIZED USER"
return self.REQUEST_HANDLER()
def SetActionHandler(self, action, handler) :
self.ActionMap[action] = handler
def New(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def Add(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def Del(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def Edit(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def List(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def Update(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def Search(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
class DBBase(object):
def __init__(self, db) :
self.db = db
self.ret = {
"Err" : 0,
"Msg" : "No Error",
}
def SetSuccMsg(self, msg) :
self.ret["Err"] = 0
self.ret["Msg"] = msg
def SetFailMsg(self, msg) :
self.ret["Err"] = 1
self.ret["Msg"] = msg
def IsFail(self) :
return self.ret['Err'] == 1
def Fail(self, msg='UnSetErrReason') :
self.ret['Err'] = 1
self.ret['Msg'] = msg
return self.ret
def Ret(self) :
return self.ret
def GetRetMsg(self) :
return self.ret['Msg']
def Result(self, url='') :
if self.IsFail() :
return self.GetRetMsg()
#return config.render.ErrMsg(msg=self.GetRetMsg())
else :
#return config.render.Msg(msg=self.GetRetMsg(), url = url)
web.seeother(url)
def Read(self, sql, sidx="", sord="") :
if sidx != "" :
sord = sord.upper()
if sord != "ASC" and sord != "DESC" :
sord = "ASC"
sql = sql + " ORDER BY " + sidx + " " + sord
try :
#print sql
records = list(self.db.query(sql))
except MySQLdb.ProgrammingError :
records = []
return records
def Modify(self, sql) :
sqls = sql.split(';')
for sql in sqls :
if len(sql) < 5 :
break
#self.db.query(sql)
#return
try :
#print sql
self.db.query(sql)
self.SetSuccMsg(u"操作完成")
except MySQLdb.ProgrammingError :
self.SetFailMsg("MySQL Programming Error")
except MySQLdb.IntegrityError :
self.SetFailMsg("Duplicate Record")
except :
self.SetFailMsg("Unknown Error")
if self.IsFail() :
break
return self.ret
def GetSvrOutputLines(cmd) :
lines = []
o = commands.getoutput(cmd)
#print o
for line in o.splitlines() :
if len(line) == 0 :
break
if line[0] != '>' :
continue
line = line[1:]
line = line.strip()
lines.append(line)
return lines
def Ts2TmStr(ts=int(time.time())) :
return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(ts))
|
tpltnt/SimpleCV
|
SimpleCV/MachineLearning/MLTestSuite.py
|
Python
|
bsd-3-clause
| 9,967
| 0.019063
|
from __future__ import print_function
from SimpleCV import *
print("")
print("This program runs a list of test for machine learning on")
print("the SimpleCV library. Not all scores will be high, this")
print("is just to ensure that the libraries are functioning correctly")
print("on your system")
print("")
print("***** WARNING *****")
print("This program is about to download a large data set to run it's test")
inp = raw_input("Do you want to continue [Y/n]")
if not (inp == "" or inp.lower() == "y"):
print("Exiting the program")
sys.exit()
machine_learning_data_set = "https://github.com/downloads/sightmachine/SimpleCV/machine_learning_dataset.zip"
data_path = download_and_extract(machine_learning_data_set)
w = 800
h = 600
n=50
display = Display(resolution = (w,h))
hue = HueHistogramFeatureExtractor(mNBins=16)
edge = EdgeHistogramFeatureExtractor()
bof = BOFFeatureExtractor()
bof.load('../Features/cbdata.txt')
haar = HaarLikeFeatureExtractor(fname="../Features/haar.txt")
morph = MorphologyFeatureExtractor()
spath = data_path + "/data/structured/"
upath = data_path + "/data/unstructured/"
ball_path = spath+"ball/"
basket_path = spath+"basket/"
boat_path = spath+"boat/"
cactus_path = spath +"cactus/"
cup_path = spath+"cup/"
duck_path = spath+"duck/"
gb_path = spath+"greenblock/"
match_path = spath+"matches/"
rb_path = spath+"redblock/"
s1_path = spath+"stuffed/"
s2_path = spath+"stuffed2/"
s3_path = spath+"stuffed3/"
arbor_path = upath+"arborgreens/"
football_path = upath+"football/"
sanjuan_path = upath+"sanjuans/"
print('SVMPoly')
#Set up am SVM with a poly kernel
extractors = [hue]
path = [cactus_path,cup_path,basket_path]
classes = ['cactus','cup','basket']
props ={
'KernelType':'Poly', #default is a RBF Kernel
'SVMType':'C', #default is C
'nu':None, # NU for SVM NU
'c':None, #C for SVM C - the slack variable
'degree':3, #degree for poly kernels - defaults to 3
'coef':None, #coef for Poly/Sigmoid defaults to 0
'gamma':None, #kernel param for poly/rbf/sigma - default is 1/#samples
}
print('Train')
classifierSVMP = SVMClassifier(extractors,props)
data = []
for p in path:
data.append(ImageSet(p))
classifierSVMP.train(data,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierSVMP.test(data,classes,disp=display,subset=n)
files = []
for ext in IMAGE_FORMATS:
files.e
|
xtend(glob.glob( os.path.join(path[0], ext)))
for i
|
in range(10):
img = Image(files[i])
cname = classifierSVMP.classify(img)
print(files[i]+' -> '+cname)
classifierSVMP.save('PolySVM.pkl')
print('Reloading from file')
testSVM = SVMClassifier.load('PolySVM.pkl')
#testSVM.setFeatureExtractors(extractors)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = testSVM.classify(img)
print(files[i]+' -> '+cname)
print('###############################################################################')
print('SVMRBF ')
# now try an RBF kernel
extractors = [hue,edge]
path = [cactus_path,cup_path,basket_path]
classes = ['cactus','cup','basket']
props ={
'KernelType':'RBF', #default is a RBF Kernel
'SVMType':'NU', #default is C
'nu':None, # NU for SVM NU
'c':None, #C for SVM C - the slack variable
'degree':None, #degree for poly kernels - defaults to 3
'coef':None, #coef for Poly/Sigmoid defaults to 0
'gamma':None, #kernel param for poly/rbf/sigma
}
print('Train')
classifierSVMRBF = SVMClassifier(extractors,props)
data = []
for p in path:
data.append(ImageSet(p))
classifierSVMRBF.train(data,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierSVMRBF.test(data,classes,disp=display,subset=n)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = classifierSVMRBF.classify(img)
print(files[i]+' -> '+cname)
classifierSVMRBF.save('RBFSVM.pkl')
print('Reloading from file')
testSVMRBF = SVMClassifier.load('RBFSVM.pkl')
#testSVMRBF.setFeatureExtractors(extractors)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = testSVMRBF.classify(img)
print(files[i]+' -> '+cname)
print('###############################################################################')
print('Bayes')
extractors = [haar]
classifierBayes = NaiveBayesClassifier(extractors)#
print('Train')
path = [arbor_path,football_path,sanjuan_path]
classes = ['arbor','football','sanjuan']
classifierBayes.train(path,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierBayes.test(path,classes,disp=display,subset=n)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = classifierBayes.classify(img)
print(files[i]+' -> '+cname)
classifierBayes.save('Bayes.pkl')
print('Reloading from file')
testBayes = NaiveBayesClassifier.load('Bayes.pkl')
testBayes.setFeatureExtractors(extractors)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = testBayes.classify(img)
print(files[i]+' -> '+cname)
print('###############################################################################')
print('###############################################################################')
print('Forest')
extractors = [morph]
classifierForest = TreeClassifier(extractors,flavor='Forest')#
print('Train')
path = [s1_path,s2_path,s3_path]
classes = ['s1','s2','s3']
classifierForest.train(path,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierForest.test(path,classes,disp=display,subset=n)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = classifierForest.classify(img)
print(files[i]+' -> '+cname)
classifierForest.save('forest.pkl')
print('Reloading from file')
testForest = TreeClassifier.load('forest.pkl')
testForest.setFeatureExtractors(extractors)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = testForest.classify(img)
print(files[i]+' -> '+cname)
print('###############################################################################')
print('Bagged Tree')
extractors = [haar]
classifierBagTree = TreeClassifier(extractors,flavor='Bagged')#
print('Train')
path = [s1_path,s2_path,s3_path]
classes = ['s1','s2','s3']
classifierBagTree.train(path,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierBagTree.test(path,classes,disp=display,subset=n)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = classifierBagTree.classify(img)
print(files[i]+' -> '+cname)
classifierBagTree.save('bagtree.pkl')
print('Reloading from file')
testBagTree = TreeClassifier.load('bagtree.pkl')
testBagTree.setFeatureExtractors(extractors)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = testBagTree.classify(img)
print(files[i]+' -> '+cname)
print('###############################################################################')
print('Vanilla Tree')
extractors = [haar]
classifierTree = TreeClassifier(featureExtractors=extractors)
print('Train')
path = [s1_path,s2_path,s3_path]
classes = ['s1','s2','s3']
classifierTree.train(path,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierTree.test(path,classes,disp=display,subset=n)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = classifierTree.classify(img)
print(files[i]+' -> '+cname)
print('Reloading from file')
classifierTree.save('tree.pkl')
testTree = TreeClassifier.load('tree.pkl')
testTree.setFeatureExtractors(extractors)
for i in range(10):
img = Image(files[i])
cname = testTree.classify(img)
print(files[i]+' -> '+cname)
print('###############################################################################
|
eezee-it/server-tools
|
auto_backup/models/db_backup.py
|
Python
|
agpl-3.0
| 10,250
| 0.000195
|
# -*- coding: utf-8 -*-
# © 2004-2009 Tiny SPRL (<http://tiny.be>).
# © 2015 Agile Business Group <http://www.agilebg.com>
# © 2016 Grupo ESOC Ingeniería de Servicios, S.L.U. - Jairo Llopis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/gpl.html).
import os
import shutil
import tempfile
import traceback
from contextlib import contextmanager
from datetime import datetime, timedelta
from glob import iglob
from openerp import exceptions, models, fields, api, _, tools
from openerp.service import db
import logging
_logger = logging.getLogger(__name__)
try:
import pysftp
except ImportError:
_logger.warning('Cannot import pysftp')
class DbBackup(models.Model):
_name = 'db.backup'
_inherit = "mail.thread"
_sql_constraints = [
("name_unique", "UNIQUE(name)", "Cannot duplicate a configuration."),
("days_to_keep_positive", "CHECK(days_to_keep >= 0)",
"I cannot remove backups from the future. Ask Doc for that."),
]
name = fields.Char(
string="Name",
compute="_compute_name",
store=True,
help="Summary of this backup process",
)
folder = fields.Char(
default=lambda self: self._default_folder(),
oldname="bkp_dir",
help='Absolute path for storing the backups',
required=True
)
days_to_keep = fields.Integer(
oldname="daystokeep",
required=True,
default=0,
help="Backups older than this will be deleted automatically. "
"Set 0 to disable autodeletion.",
)
method = fields.Selection(
selection=[("local", "Local disk"), ("sftp", "Remote SFTP server")],
default="local",
help="Choose the storage method for this backup.",
)
sftp_host = fields.Char(
string='SFTP Server',
oldname="sftpip",
help=(
"The host name or IP address from your remote"
" server. For example 192.168.0.1"
)
)
sftp_port = fields.Integer(
string="SFTP Port",
default=22,
oldname="sftpport",
help="The port on the FTP server that accepts SSH/SFTP calls."
)
sftp_user = fields.Char(
string='Username in the SFTP Server',
oldname="sftpusername",
help=(
"The username where the SFTP connection "
"should be made with. This is the user on the external server."
)
)
sftp_password = fields.Char(
string="SFTP Password",
oldname="sftppassword",
help="The password for the SFTP connection. If you specify a private "
"key file, then this is the password to decrypt it.",
)
sftp_private_key = fields.Char(
string="Private key location",
help="Path to the private key file. Only the Odoo user should have "
"read permissions for that file.",
)
@api.model
def _default_folder(self):
"""Default to ``backups`` folder inside current server datadir."""
return os.path.join(
tools.config["data_dir"],
"backups",
self.env.cr.dbname)
@api.multi
@api.depends("folder", "method", "sftp_host", "sftp_port", "sftp_user")
def _compute_name(self):
"""Get the right summary for this job."""
for rec in self:
if rec.method == "local":
rec.name = "%s @ localhost" % rec.folder
elif rec.method == "sftp":
rec.name = "sftp://%s@%s:%d%s" % (
rec.sftp_user, rec.sftp_host, rec.sftp_port, rec.folder)
@api.constrains("folder", "method")
@api.multi
def _check_folder(self):
"""Do not use the filestore or you will backup your backups."""
for s in self:
if (s.method == "local" and
s.folder.startswith(
tools.config.filestore(self.env.cr.dbname))):
raise exceptions.ValidationError(
_("Do not save backups on your filestore, or you will "
"backup your backups too!"))
@api.multi
def action_sftp_test_connection(self):
"""Check if the SFTP settings are correct."""
try:
# Just open and clos
|
e the connection
with self.sftp_connection():
raise exceptions.Warning(_("Connection Test Succeeded!"))
e
|
xcept (pysftp.CredentialException, pysftp.ConnectionException):
_logger.info("Connection Test Failed!", exc_info=True)
raise exceptions.Warning(_("Connection Test Failed!"))
@api.multi
def action_backup(self):
"""Run selected backups."""
backup = None
filename = self.filename(datetime.now())
successful = self.browse()
# Start with local storage
for rec in self.filtered(lambda r: r.method == "local"):
with rec.backup_log():
# Directory must exist
try:
os.makedirs(rec.folder)
except OSError:
pass
with open(os.path.join(rec.folder, filename),
'wb') as destiny:
# Copy the cached backup
if backup:
with open(backup) as cached:
shutil.copyfileobj(cached, destiny)
# Generate new backup
else:
db.dump_db(self.env.cr.dbname, destiny)
backup = backup or destiny.name
successful |= rec
# Ensure a local backup exists if we are going to write it remotely
sftp = self.filtered(lambda r: r.method == "sftp")
if sftp:
if backup:
cached = open(backup)
else:
cached = tempfile.TemporaryFile()
db.dump_db(self.env.cr.dbname, cached)
with cached:
for rec in sftp:
with rec.backup_log():
with rec.sftp_connection() as remote:
# Directory must exist
try:
remote.makedirs(rec.folder)
except pysftp.ConnectionException:
pass
# Copy cached backup to remote server
with remote.open(
os.path.join(rec.folder, filename),
"wb") as destiny:
shutil.copyfileobj(cached, destiny)
successful |= rec
# Remove old files for successful backups
successful.cleanup()
@api.model
def action_backup_all(self):
"""Run all scheduled backups."""
return self.search([]).action_backup()
@api.multi
@contextmanager
def backup_log(self):
"""Log a backup result."""
try:
_logger.info("Starting database backup: %s", self.name)
yield
except:
_logger.exception("Database backup failed: %s", self.name)
escaped_tb = tools.html_escape(traceback.format_exc())
self.message_post(
"<p>%s</p><pre>%s</pre>" % (
_("Database backup failed."),
escaped_tb),
subtype=self.env.ref("auto_backup.failure"))
else:
_logger.info("Database backup succeeded: %s", self.name)
self.message_post(_("Database backup succeeded."))
@api.multi
def cleanup(self):
"""Clean up old backups."""
now = datetime.now()
for rec in self.filtered("days_to_keep"):
with rec.cleanup_log():
oldest = self.filename(now - timedelta(days=rec.days_to_keep))
if rec.method == "local":
for name in iglob(os.path.join(rec.folder,
"*.dump.zip")):
if os.path.basename(name) < oldest:
os.unlink(name)
|
Dhandapani/gluster-ovirt
|
backend/manager/tools/engine-image-uploader/src/ovf/ovfenvelope.py
|
Python
|
apache-2.0
| 398,478
| 0.004236
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Fri Dec 2 15:05:18 2011 by generateDS.py version 2.7b.
#
import sys
import getopt
import re as re_
etree_ = None
Verbose_import_ = False
( XMLParser_import_none, XMLParser_import_lxml,
XMLParser_import_elementtree
) = range(3)
XMLParser_import_library = None
try:
# lxml
from lxml import etree as etree_
XMLParser_import_library = XMLParser_import_lxml
if Verbose_import_:
print("running with lxml.etree")
except ImportError:
try:
# cElementTree from Python 2.5+
import xml.etree.cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree on Python 2.5+")
except ImportError:
try:
# ElementTree from Python 2.5+
import xml.etree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree on Python 2.5+")
except ImportError:
try:
# normal cElementTree install
import cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree")
except ImportError:
try:
# normal ElementTree install
import elementtree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree")
|
except ImportError:
raise ImportError("Failed to import ElementTree from any known place")
def parsexml_(*args, **kwargs):
if (XMLParser_import_library == XMLParser_import_lxml and
|
'parser' not in kwargs):
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
kwargs['parser'] = etree_.ETCompatXMLParser()
doc = etree_.parse(*args, **kwargs)
return doc
#
# User methods
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError, exp:
class GeneratedsSuper(object):
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_integer_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of integers')
return input_data
def gds_format_float(self, input_data, input_name=''):
return '%f' % input_data
def gds_validate_float(self, input_data, node, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_float_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of floats')
return input_data
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_double_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of doubles')
return input_data
def gds_format_boolean(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_boolean(self, input_data, node, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_boolean_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(node, 'Requires sequence of booleans ("true", "1", "false", "0")')
return input_data
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'ascii'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
#
# Support/utility functions.
#
def showIndent(outfile, level):
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
if not inStr:
return ''
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.fi
|
valdecar/Murka
|
pyttsx-master/setup.py
|
Python
|
gpl-3.0
| 1,287
| 0.002331
|
'''
pyttsx setup script.
Copyright (c) 2009, 2013 Peter Parente
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED 'AS IS' AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
'''
from setuptools import setup, find_packages
setup(name='pyttsx',
version='1.2',
description='pyttsx - cross platform text-to-speech',
long_description='pyttsx is a Python package supporting common text-to-speech engines on Mac OS X, Windows, and Linux.',
author='Peter Parente',
author_email='[email protected]',
url='https://github.com/parente/pyttsx',
download_url='http://pypi.python.org/pypi/pyttsx',
|
license='BSD License',
pa
|
ckages=['pyttsx', 'pyttsx.drivers']
)
|
kevinkirkup/hedge
|
python/hedge/stock.py
|
Python
|
gpl-3.0
| 309
| 0.003236
|
#!/usr/bin/env py
|
thon
# encoding: utf-8
"""
Generic stock functions
"""
class Stock(object):
"""
Generic Stock information
"""
def __init__(self, symbol, name, sector):
super(Stock, self).__init__()
|
self.symbol = symbol
self.name = name
self.sector = sector
|
Liuchang0812/slides
|
pycon2015cn/ex6_auto_install/autoinstall.py
|
Python
|
mit
| 590
| 0.00339
|
from __future__ import print_function
import sys
import subprocess
class AutoInstall(object):
_loaded = set()
@classmethod
def find_module(cls, name, path, target=None):
if path is None and name not in cls._loa
|
ded:
cls._loaded.add(name)
print("Installing", name)
try:
out = subprocess.check_output(['sudo', sys.executable, '-m', 'pip', 'install', name])
print(out)
except Exception as e:
print("Failed" + e.message)
return None
sys.meta_path.append(AutoInstall
|
)
|
josenavas/labman
|
labman/db/configuration_manager.py
|
Python
|
bsd-3-clause
| 5,785
| 0
|
# ----------------------------------------------------------------------------
# Copyright (c) 2017-, labman development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from os import environ
from os.path import expanduser, exists
from datetime import datetime
from configparser import ConfigParser
class ConfigurationManager(object):
"""Holds the labman configuration
Parameters
----------
conf_fp: str, optional
Filepath to the configuration file. Default: config_test.cfg
Attributes
----------
test_environment : bool
If true, we are in a test environment.
database : str
The postgres database to connect to
user : str
The postgres user
password : str
The postgres password for the previous user
admin_user : str
The administrator user, which can be used to create/drop environments
admin_password : str
The postgres password for the admin_user
host : str
The host where the database lives
port : int
The port used to connect to the postgres database in the previous host
qiita_server_cert : str
If qiita enabled, the qiita server certificate
Raises
------
RuntimeError
When an option is no longer available.
"""
@staticmethod
def create(config_fp, test_env, db_host, db_port, db_name, db_user,
db_password, db_admin_user, db_admin_password, log_dir,
qiita_server_cert):
"""Creates a new labman configuration file
Parameters
----------
config_fp : str
Path to the configuration file
test_env : bool
If true, a config file for a test environment will be created
db_host : str
The host where the database lives
db_port : int
The port used to connect to the postgres database in the previous
host
db_name : str
The postgres database to connect to
db_user : str
The postgres user
db_password : str
The postgres password for the previous user
db_admin_user : str
The administrator user, which can be used to create/drop
environments
db_admin_password : str
The postgres password for the admin_user
log_dir : str
Path to the log directory
qiita_server_cert : str
The qiita server certificate (for testing)
"""
with open(config_fp, 'w') as f:
f.write(CONFIG_TEMPLATE % {
'test': test_env,
'date': str(datetime.now()),
'user': db_user,
'admin_user': db_admin_user,
'password': db_password,
'admin_password': db_admin_password,
'database': db_name,
'host': db_host,
'port': db_port,
'logdir': log_dir,
'qiita_cert': qiita_server_cert})
def __init__(self):
# If conf_fp is None, we default to the test configuration file
try:
self.conf_fp = environ['LABMAN_CONFIG_FP']
except KeyError:
self.conf_fp = expanduser('~/.labman.cfg')
if not exists(self.conf_fp):
raise RuntimeError(
'Please, configure labman using `labman config`. If the '
'config file is not in `~/.labman.cfg`, please set the '
'`LABMAN_CONFIG_FP` environment variable to the '
'configuration file')
# Parse the configuration file
config = ConfigParser()
with open(self.conf_fp, 'U') as conf_file:
config.readfp(conf_file)
_required_sections = {'postgres'}
if not _required_sections.issubset(set(config.sections())):
missing = _required_sections - set(config.sections())
raise RuntimeError(', '.join(missing))
self._get_main(config)
self._get_postgres(config)
self._get_qiita(config)
def _get_main(self, config):
"""Get the main configuration"""
self.test_environment = config.getboolean('main', 'TEST_ENVIRONMENT')
self.log_dir = config.get('main', 'LOG_DIR')
def _get_postgres(self, config):
"""Get the configuration of the postgres sect
|
ion"""
self.user = config.get('postgres', 'USER')
self.admin_user = config.get('postgres', 'ADMIN_USER') or None
self.password = config.get('postgres', 'PASSWORD')
if not self.password:
self.password = None
self.admin_password = config.get('postgres', 'ADMIN_PASSWORD')
if not self.admin_password:
self.admin_password = None
se
|
lf.database = config.get('postgres', 'DATABASE')
self.host = config.get('postgres', 'HOST')
self.port = config.getint('postgres', 'PORT')
def _get_qiita(self, config):
self.qiita_server_cert = config.get('qiita', 'SERVER_CERT')
CONFIG_TEMPLATE = """# Configuration file generated by labman on %(date)s
# ------------------------- MAIN SETTINGS ----------------------------------
[main]
TEST_ENVIRONMENT=%(test)s
LOG_DIR=%(logdir)s
# ----------------------- POSTGRES SETTINGS --------------------------------
[postgres]
USER=%(user)s
PASSWORD=%(password)s
ADMIN_USER=%(admin_user)s
ADMIN_PASSWORD=%(admin_password)s
DATABASE=%(database)s
HOST=%(host)s
PORT=%(port)s
# ------------------------- QIITA SETTINGS ----------------------------------
[qiita]
SERVER_CERT=%(qiita_cert)s
"""
|
CiuffysHub/MITMf
|
mitmflib-0.18.4/mitmflib/argh/decorators.py
|
Python
|
gpl-3.0
| 5,296
| 0.000756
|
# coding: utf-8
#
# Copyright © 2010—2014 Andrey Mikhaylenko and contributors
#
# This file is part of Argh.
#
# Argh is free software under terms of the GNU Lesser
# General Public License version 3 (LGPLv3) as published by the Free
# Software Foundation. See the file README.rst for copying conditions.
#
"""
Command decorators
~~~~~~~~~~~~~~~~~~
"""
from mitmflib.argh.constants import (ATTR_ALIASES, ATTR_ARGS, ATTR_NAME,
ATTR_WRAPPED_EXCEPTIONS,
ATTR_WRAPPED_EXCEPTIONS_PROCESSOR,
ATTR_EXPECTS_NAMESPACE_OBJECT)
__all__ = ['aliases', 'named', 'arg', 'wrap_errors', 'expects_obj']
def named(new_name):
"""
Sets given string as command name instead of the function name.
The string is used verbatim without further processing.
Usage::
@named('load')
def do_load_some_stuff_and_keep_the_original_function_name(args):
...
The resulting command will be available only as ``load``. To add aliases
without renaming the command, check :func:`aliases`.
.. versionadded:: 0.19
"""
def wrapper(func):
setattr(func, ATTR_NAME, new_name)
return func
return wrapper
def aliases(*names):
"""
Defines alternative command name(s) for given function (along with its
original name). Usage::
@aliases('co', 'check')
def checkout(args):
...
The resulting command will be available as ``checkout``, ``check`` and ``co``.
.. note::
This decorator only works with a recent version of argparse (see `Python
issue 9324`_ and `Python rev 4c0426`_). Such version ships with
**Python 3.2+** and may be available in other environments as a separate
package. Argh does not issue warnings and simply ignores aliases if
they are not supported. See :attr:`~argh.assembling.SUPPORTS_ALIASES`.
.. _Python issue 9324: http://bugs.python.org/issue9324
.. _Python rev 4c0426: http://hg.python.org/cpython/rev/4c0426261148/
.. versionadded:: 0.19
"""
def wrapper(func):
setattr(func, ATTR_ALIASES, names)
return func
return wrapper
def arg(*args, **kwargs):
"""
Declares an argument for given function. Does not register the function
anywhere, nor does it modify the function in any way. The signature is
exactly the same as that of :meth:`argparse.ArgumentParser.add_argument`,
only some keywords are not required if they can be easily guessed.
Usage::
@arg('path')
@arg('--format', choices=['yaml','json'], default='json')
@arg('--dry-run', default=False)
@arg('-v', '--verbosity', choices=range(0,3), default=1)
def load(args):
loaders = {'json': json.load, 'yaml': yaml.load}
loader = loaders[args.format]
data = loader(args.path)
if not args.dry_run:
if 1 < verbosity:
print('saving to the database')
put_to_database(data)
Note that:
* you didn't have to specify ``action="store_true"`` for ``--dry-run``;
* you didn't have to specify ``type=int`` for ``--verbosity``.
"""
def wrapper(func):
declared_args = getattr(func, ATTR_ARGS, [])
# The innermost decorator is called first but appears last in the code.
# We need to preserve the expected order of positional arguments, so
# the outermost
|
decorator inserts its value before the innermost's:
declared_args.insert(0, dict(option_strings=args, **kwargs))
setattr(func, ATTR_ARGS, declared_args)
return func
return wrapper
def wrap_errors(errors=None, processor=None, *args):
"""
Decorator. Wraps given exceptions into
:class:`~argh.exceptions.CommandError`. Usage::
@wrap_errors([AssertionError]
|
)
def foo(x=None, y=None):
assert x or y, 'x or y must be specified'
If the assertion fails, its message will be correctly printed and the
stack hidden. This helps to avoid boilerplate code.
:param errors:
A list of exception classes to catch.
:param processor:
A callable that expects the exception object and returns a string.
For example, this renders all wrapped errors in red colour::
from termcolor import colored
def failure(err):
return colored(str(err), 'red')
@wrap_errors(processor=failure)
def my_command(...):
...
"""
def wrapper(func):
if errors:
setattr(func, ATTR_WRAPPED_EXCEPTIONS, errors)
if processor:
setattr(func, ATTR_WRAPPED_EXCEPTIONS_PROCESSOR, processor)
return func
return wrapper
def expects_obj(func):
"""
Marks given function as expecting a namespace object.
Usage::
@arg('bar')
@arg('--quux', default=123)
@expects_obj
def foo(args):
yield args.bar, args.quux
This is equivalent to::
def foo(bar, quux=123):
yield bar, quux
In most cases you don't need this decorator.
"""
setattr(func, ATTR_EXPECTS_NAMESPACE_OBJECT, True)
return func
|
IljaGrebel/OpenWrt-SDK-imx6_HummingBoard
|
staging_dir/host/lib/scons-2.3.5/SCons/Options/EnumOption.py
|
Python
|
gpl-2.0
| 1,980
| 0.001515
|
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obt
|
aining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this perm
|
ission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/EnumOption.py rel_2.3.5:3329:275e75118ad4 2015/06/20 11:18:26 bdbaddog"
__doc__ = """Place-holder for the old SCons.Options module hierarchy
This is for backwards compatibility. The new equivalent is the Variables/
class hierarchy. These will have deprecation warnings added (some day),
and will then be removed entirely (some day).
"""
import SCons.Variables
import SCons.Warnings
warned = False
def EnumOption(*args, **kw):
global warned
if not warned:
msg = "The EnumOption() function is deprecated; use the EnumVariable() function instead."
SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
warned = True
return SCons.Variables.EnumVariable(*args, **kw)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
xuru/pyvisdk
|
pyvisdk/do/extension_fault_type_info.py
|
Python
|
mit
| 1,025
| 0.00878
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def ExtensionFaultTypeInfo(vim, *args, **kwargs):
'''This data object type describes fault types defined by the extension.'''
obj = vim.client.factory.create('ns0:ExtensionFaultTypeInfo')
# do some validation checking...
if (len(args) + len(kwargs)) < 1:
raise IndexError('Expected at least 2 arguments got: %d' % len(args))
required = [ 'faultID' ]
optional = [ 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
se
|
tattr(obj, name, arg)
for name, value
|
in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
ViennaRNA/forgi
|
test/forgi/threedee/utilities/test_dssr.py
|
Python
|
gpl-3.0
| 2,377
| 0.000841
|
import unittest
import json
import forgi.threedee.utilities._dssr as ftud
import forgi.threedee.model.coarse_grain as ftmc
import forgi.graph.residue as fgr
class TestHelperFunctions(unittest.TestCase):
def test_dssr_to_pdb_atom_id_validIds(self):
self.assertEqual(ftud.dssr_to_pdb_resid(
"B.C24"), ("B", (" ", 24, " ")))
self.assertEqual(ftud.dssr_to_pdb_resid(
"1:B.C24"), ("B", (" ", 24, " ")))
self.assertEqual(ftud.dssr_to_pdb_resid(
"LYS124"), (None, (" ", 124, " ")))
self.assertEqual(ftud.dssr_to_pdb_resid(
"Z12.U13"), ("Z12", (" ", 13, " ")))
self.assertEqual(ftud.dssr_to_pdb_resid(
"A.5BU36"), ("A", (" ", 36, " ")))
self.assertEqual(ftud.dssr_to_pdb_resid(
"C.C47^M"), ("C", (" ", 47, "M")))
self.assertEqual(ftud.dssr_to_pdb_resid(
"C.5BU47^M"), ("C", (" ", 47, "M")))
self.assertEqual(ftud.dssr_to_pdb_resid(u'A.C1'), ("A", (" ", 1, " ")))
self.assertEqual(ftud.dssr_to_pdb_resid(
u'B.U-1'), ("B", (" ", -1, " ")))
self.assertEqual(ftud.dssr_to_pdb_resid(
u'A.A-2'), ("A", (" ", -2, " ")))
|
class TestCoaxialStacks(unittest.TestCase):
def setUp(self):
cg = ftmc.CoarseGrainRNA.from_bg_file("test/forgi/threedee/data/1J1U.cg")
with open("test/forgi/threedee/data/1J1U.json") as f:
j = json.load(f)
self.dssr = ftud.DSSRAnnotation(j, cg)
def test_coaxial_stacks(self):
self.assertEqual(sorted(self.dss
|
r.coaxial_stacks()),
sorted([["s2", "s1"], ["s0", "s3"]]))
@unittest.skip("Currently not working. TODO")
def test_compare_coaxial_stacks(self):
forgi, dssr = self.dssr.compare_coaxial_stack_annotation()
self.assertEqual(len(dssr), 2)
self.assertGreaterEqual(len(forgi), 1)
self.assertGreaterEqual(len(forgi & dssr), 1)
self.assertIn(("s0", "s5"), (x.stems for x in forgi))
for x in forgi:
self.assertEqual(x.forgi, "stacking")
for x in dssr:
self.assertEqual(x.dssr, "stacking")
def test_stacking_nts(self):
stacks = self.dssr.stacking_nts()
self.assertIn((fgr.RESID("B:544"), fgr.RESID("B:545")), stacks)
self.assertNotIn((fgr.RESID("B:549"), fgr.RESID("B:544")), stacks)
|
pointhi/PySplat
|
PySplat/util/__init__.py
|
Python
|
gpl-3.0
| 682
| 0.004399
|
'''
pysplat is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
pysplat is distributed in the hope that it will be useful,
but WITHOUT
|
ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You shoul
|
d have received a copy of the GNU General Public License
along with kicad-footprint-generator. If not, see < http://www.gnu.org/licenses/ >.
(C) 2016 by Thomas Pointhuber, <[email protected]>
'''
|
liushuaikobe/GitArchiveUtils
|
gitradar/config.py
|
Python
|
gpl-2.0
| 375
| 0
|
#
|
-*- coding: utf-8 -*-
from tornado.options import define
define('debug', default=False, type=bool)
# Tornado的监听端口
define('port', default=8888, type=int)
# WHoosh Search相关
define('whoosh_ix_
|
path', default='/Users/liushuai/Desktop/index', type=str)
# MongoDB
define('mongo_addr', default='127.0.0.1', type=str)
define('mongo_port', default=27017, type=int)
|
JakubPetriska/poker-cfr
|
test/sampling_tests.py
|
Python
|
mit
| 2,099
| 0.003335
|
import unittest
import os
import numpy as np
from tools.sampling import read_log_file
from tools.walk_trees import walk_trees_with_data
from tools.game_tree.nodes import ActionNode, BoardCardsNode, HoleCardsNode
LEDUC_POKER_GAME_FILE_PATH = 'games/leduc.limit.2p.game'
class SamplingTests(unittest.TestCase):
def test_log_parsing_to_sample_trees(self):
players = read_log_file(
LEDUC_POKER_GAME_FILE_PATH,
'test/sample_log.log',
['player_1', 'player_2'])
callback_was_called_at_least_once = False
def node_callback(data, node):
nonlocal callback_was_called_at_least_once
if isinstance(node, ActionNode):
callback_was_called_at_least_once = True
if data:
self.assertTrue(np.all(node.action_decision_counts == [0, 1, 0]))
else:
self.assertTrue(np.all(node.action_decision_counts == [0, 0, 0]))
return [data if action == 1 else False for action in node.children]
elif isinstance(node, HoleCardsNode):
|
return [cards == (43,) or cards == (47,) for cards in node.children]
elif isinstance(node, BoardCardsNode):
return [data if cards == (50,) else False for cards in node.children]
else:
return [data for _ in node.children]
for name in players:
player_tree = players[name]
|
walk_trees_with_data(node_callback, True, player_tree)
self.assertTrue(callback_was_called_at_least_once)
def test_log_parsing_to_sample_trees_performance(self):
players = read_log_file(
LEDUC_POKER_GAME_FILE_PATH,
'test/sample_log-large.log',
['CFR_trained', 'Random_1'])
visits_sum = 0
for name in players:
player_tree = players[name]
for _, root_action_node in player_tree.children.items():
visits_sum += np.sum(root_action_node.action_decision_counts)
self.assertEqual(visits_sum, 50000)
|
himaaaatti/qtile
|
libqtile/manager.py
|
Python
|
mit
| 59,989
| 0.000267
|
# Copyright (c) 2008, Aldo Cortesi. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import division
try:
import tracemalloc
except ImportError:
tracemalloc = None
from libqtile.dgroups import DGroups
from xcffib.xproto import EventMask, WindowError, AccessError, DrawableError
import logging
import os
import pickle
import shlex
import signal
import sys
import traceback
import xcffib
import xcffib.xinerama
import xcffib.xproto
import six
from . import asyncio
from .config import Drag, Click, Screen, Match, Rule
from .group import _Group
from .log_utils import logger
from .state import QtileState
from .utils import QtileError, get_cache_dir
from .widget.base import _Widget
from . import command
from . import hook
from . import utils
from . import window
from . import xcbq
if sys.version_info >= (3, 3):
def _import_module(module_name, dir_path):
import importlib
file_name = os.path.join(dir_path, module_name) + '.py'
f = importlib.machinery.SourceFileLoader(module_name, file_name)
module = f.load_module()
return module
else:
def _import_module(module_name, dir_path):
import imp
fp = None
try:
fp, pathname, description = imp.find_module(module_name, [dir_path])
module = imp.load_module(module_name, fp, pathname, description)
finally:
if fp:
fp.close()
return module
class Qtile(command.CommandObject):
"""
This object is the __root__ of the command graph.
"""
def __init__(self, config, displayName=None, fname=None, no_spawn=False, state=None):
self.no_spawn = no_spawn
self._eventloop = None
self._finalize = False
if not displayName:
displayName = os.environ.get("DISPLAY")
if not displayName:
raise QtileError("No DISPLAY set.")
if not fname:
# Dots might appear in the host part of the display name
# during remote X sessions. Let's strip the host part first.
displayNum = displayName.partition(":")[2]
if "." not in displayNum:
displayName += ".0"
fname = command.find_sockfile(displayName)
self.conn = xcbq.Connection(displayName)
self.config = config
self.fname = fname
hook.init(self)
self.windowMap = {}
self.widgetMap = {}
self.groupMap = {}
self.groups = []
self.keyMap = {}
# Find the modifier mask for the numlock key, if there is one:
nc = self.conn.keysym_to_keycode(xcbq.keysyms["Num_Lock"])
self.numlockMask = xcbq.ModMasks[self.conn.get_modifier(nc)]
self.validMask = ~(self.numlockMask | xcbq.ModMasks["lock"])
# Because we only do Xinerama multi-screening,
# we can assume that the first
# screen's root is _the_ root.
self.root = self.conn.default_screen.root
self.root.set_attribute(
eventmask=(
EventMask.StructureNotify |
EventMask.SubstructureNotify |
EventMask.SubstructureRedirect |
EventMask.EnterWindow |
EventMask.LeaveWindow
)
)
self.root.set_property(
'_NET_SUPPORTED',
[self.conn.atoms[x] for x in xcbq.SUPPORTED_ATOMS]
)
self.supporting_wm_check_window = self.conn.create_window(-1, -1, 1, 1)
self.root.set_property(
'_NET_SUPPORTING_WM_CHECK',
self.supporting_wm_check_window.wid
)
# setup the default cursor
self.root.set_cursor('left_ptr')
wmname = getattr(self.config, "wmname", "qtile")
self.supporting_wm_check_window.set_property('_NET_WM_NAME', wmname)
self.supporting_wm_check_window.set_property(
'_NET_SUPPORTING_WM_CHECK',
self.supporting_wm_check_window.wid
)
if config.main:
config.main(self)
self.dgroups = None
if self.config.groups:
key_binder = None
if hasattr(self.config, 'dgroups_key_binder'):
key_binder = self.config.dgroups_key_binder
self.dgroups = DGroups(self, self.config.groups, key_binder)
if hasattr(config, "widget_defaults") and config.widget_defaults:
_Widget.global_defaults = config.widget_defaults
else:
_Widget.global_defaults = {}
for i in self.groups:
self.groupMap[i.name] = i
self.setup_eventloop()
self.server = command._Server(self.fname, self, config, self._eventloop)
self.currentScreen = None
self.screens = []
self._process_screens()
self.currentScreen = self.screens[0]
self._drag = None
self.ignoreEvents = set([
xcffib.xproto.KeyReleaseEvent,
xcffib.xproto.ReparentNotifyEvent,
xcffib.xproto.CreateNotifyEvent,
# DWM handles this to help "broken focusing windows".
xcffib.xproto.MapNotifyEvent,
xcffib.xproto.LeaveNotifyEvent,
xcffib.xproto.FocusOutEvent,
xcffib.xproto.FocusInEvent,
xcffib.xproto.NoExposureEvent
])
self.conn.flush()
self.conn.xsync()
self._xpoll()
# Map and Grab keys
|
for key in self.config.keys:
self.mapKey(key)
# It fixes problems with focus when clicking windows of some specific clients like xterm
def noop(qtile):
pass
|
self.config.mouse += (Click([], "Button1", command.lazy.function(noop), focus="after"),)
self.mouseMap = {}
for i in self.config.mouse:
if self.mouseMap.get(i.button_code) is None:
self.mouseMap[i.button_code] = []
self.mouseMap[i.button_code].append(i)
self.grabMouse()
# no_spawn is set when we are restarting; we only want to run the
# startup hook once.
if not no_spawn:
hook.fire("startup_once")
hook.fire("startup")
self.scan()
self.update_net_desktops()
hook.subscribe.setgroup(self.update_net_desktops)
if state:
st = pickle.load(six.BytesIO(state.encode()))
try:
st.apply(self)
except:
logger.exception("failed restoring state")
self.selection = {
"PRIMARY": {"owner": None, "selection": ""},
"CLIPBOARD": {"owner": None, "selection": ""}
}
self.setup_selection()
def setup_selection(self):
PRIMARY = self.conn.atoms["PRIMARY"]
CLIPBOARD = self.conn.atoms["CLIPBOARD"]
self.selection_window = self.conn.create_window(-1, -1, 1, 1)
self.selection_window.set_attribute(eventmask=EventMask.PropertyChange)
self.conn.xfixes.select_selection_input(self.selection_window,
"PRIMARY")
self.conn.xfixes.select_selection_input(self.selecti
|
godiard/sugar
|
src/jarabe/frame/frameinvoker.py
|
Python
|
gpl-2.0
| 1,411
| 0
|
# Copyright (C) 2007, Eduardo Silva <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from gi.repository import Gdk
from sugar3.graphics import style
from sugar3.graphics.palette import WidgetInvoker
def _get_screen_area():
frame_thickness = style.GRID_CELL_SIZE
screen_area = Gdk.Rectangle()
|
screen_area.x = screen_area.y = frame_thickness
screen_area.w
|
idth = Gdk.Screen.width() - frame_thickness
screen_area.height = Gdk.Screen.height() - frame_thickness
return screen_area
class FrameWidgetInvoker(WidgetInvoker):
def __init__(self, widget):
WidgetInvoker.__init__(self, widget, widget.get_child())
self._position_hint = self.ANCHORED
self._screen_area = _get_screen_area()
|
thinksabin/wafw00f
|
wafw00f/plugins/f5trafficshield.py
|
Python
|
bsd-3-clause
| 408
| 0
|
#!/usr/bin/env python
NAME = 'F5
|
Trafficshield'
def is_waf(self):
for hv in [['cookie',
|
'^ASINFO='], ['server', 'F5-TrafficShield']]:
r = self.matchheader(hv)
if r is None:
return
elif r:
return r
# the following based on nmap's http-waf-fingerprint.nse
if self.matchheader(('server', 'F5-TrafficShield')):
return True
return False
|
devs1991/test_edx_docmode
|
venv/lib/python2.7/site-packages/sympy/polys/domains/sympyrealdomain.py
|
Python
|
agpl-3.0
| 1,456
| 0.00206
|
"""Implementation of :class:`SymPyRealDomain` class. """
from sympy.polys.domains.realdomain import RealDomain
from sympy.polys.domains.groundtypes import SymPyRealType
class SymPyRealDomain(RealDomain):
"""Domain for real numbers based on
|
SymPy Float type. """
dtype = SymPyRealType
zero = dtype(0)
one = dtype(1)
alias = 'RR_sympy'
def __init__(self):
pass
def from_ZZ_python(K1, a, K0):
"""Convert a Python `int` object to `dtype`. """
return SymPyRealType(a)
def from_QQ_python(K1, a,
|
K0):
"""Convert a Python `Fraction` object to `dtype`. """
return SymPyRealType(a.numerator) / a.denominator
def from_ZZ_sympy(K1, a, K0):
"""Convert a SymPy `Integer` object to `dtype`. """
return SymPyRealType(a.p)
def from_QQ_sympy(K1, a, K0):
"""Convert a SymPy `Rational` object to `dtype`. """
return SymPyRealType(a.p) / a.q
def from_ZZ_gmpy(K1, a, K0):
"""Convert a GMPY `mpz` object to `dtype`. """
return SymPyRealType(int(a))
def from_QQ_gmpy(K1, a, K0):
"""Convert a GMPY `mpq` object to `dtype`. """
return SymPyRealType(int(a.numer())) / int(a.denom())
def from_RR_sympy(K1, a, K0):
"""Convert a SymPy `Float` object to `dtype`. """
return a
def from_RR_mpmath(K1, a, K0):
"""Convert a mpmath `mpf` object to `dtype`. """
return SymPyRealType(a)
|
adieu/authentic2
|
authentic2/saml/migrations/0022_auto__chg_field_libertysession_django_session_key__chg_field_libertyar.py
|
Python
|
agpl-3.0
| 24,009
| 0.006872
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from authentic2.compat import user_model_label
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'LibertySession.django_session_key'
db.alter_column(u'saml_libertysession', 'django_session_key', self.gf('django.db.models.fields.CharField')(max_length=128))
# Changing field 'LibertyArtifact.provider_id'
db.alter_column(u'saml_libertyartifact', 'provider_id', self.gf('django.db.models.fields.CharField')(max_length=256))
# Changing field 'LibertyArtifact.artifact'
db.alter_column(u'saml_libertyartifact', 'artifact', self.gf('django.db.models.fields.CharField')(max_length=128, primary_key=True))
# Changing field 'LibertyArtifact.django_session_key'
db.alter_column(u'saml_libertyartifact', 'django_session_key', self.gf('django.db.models.fields.CharField')(max_length=128))
# Changing field 'LibertyManageDump.django_session_key'
db.alter_column(u'saml_libertymanagedump', 'django_session_key', self.gf('django.db.models.fields.CharField')(max_length=128))
# Changing field 'LibertySessionSP.django_session_key'
db.alter_column(u'saml_libertysessionsp', 'django_session_key', self.gf('django.db.models.fields.CharField')(max_length=128))
# Changing field
|
'LibertyAssertion.provider_id'
db.alter_column(u'saml_libertyassertion', 'provider_id', self.gf('django.db.models.fields.CharField')(max_length=256))
# Changing field 'LibertyAssertion.assertion_id'
db.alter_column(u'saml_libertyassertion', 'assertion_id', self.gf('django.db.models.fields.CharField')(max_length=128))
# Changing field 'LibertyAssertion.session_index'
db.alter_column(u'saml_libertyassertion', 'session_index',
|
self.gf('django.db.models.fields.CharField')(max_length=128))
# Changing field 'LibertySessionDump.django_session_key'
db.alter_column(u'saml_libertysessiondump', 'django_session_key', self.gf('django.db.models.fields.CharField')(max_length=128))
def backwards(self, orm):
# Changing field 'LibertySession.django_session_key'
db.alter_column(u'saml_libertysession', 'django_session_key', self.gf('django.db.models.fields.CharField')(max_length=40))
# Changing field 'LibertyArtifact.provider_id'
db.alter_column(u'saml_libertyartifact', 'provider_id', self.gf('django.db.models.fields.CharField')(max_length=80))
# Changing field 'LibertyArtifact.artifact'
db.alter_column(u'saml_libertyartifact', 'artifact', self.gf('django.db.models.fields.CharField')(max_length=40, primary_key=True))
# Changing field 'LibertyArtifact.django_session_key'
db.alter_column(u'saml_libertyartifact', 'django_session_key', self.gf('django.db.models.fields.CharField')(max_length=40))
# Changing field 'LibertyManageDump.django_session_key'
db.alter_column(u'saml_libertymanagedump', 'django_session_key', self.gf('django.db.models.fields.CharField')(max_length=40))
# Changing field 'LibertySessionSP.django_session_key'
db.alter_column(u'saml_libertysessionsp', 'django_session_key', self.gf('django.db.models.fields.CharField')(max_length=40))
# Changing field 'LibertyAssertion.provider_id'
db.alter_column(u'saml_libertyassertion', 'provider_id', self.gf('django.db.models.fields.CharField')(max_length=80))
# Changing field 'LibertyAssertion.assertion_id'
db.alter_column(u'saml_libertyassertion', 'assertion_id', self.gf('django.db.models.fields.CharField')(max_length=50))
# Changing field 'LibertyAssertion.session_index'
db.alter_column(u'saml_libertyassertion', 'session_index', self.gf('django.db.models.fields.CharField')(max_length=80))
# Changing field 'LibertySessionDump.django_session_key'
db.alter_column(u'saml_libertysessiondump', 'django_session_key', self.gf('django.db.models.fields.CharField')(max_length=40))
models = {
u'attribute_aggregator.attributesource': {
'Meta': {'object_name': 'AttributeSource'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'namespace': ('django.db.models.fields.CharField', [], {'default': "('Default', 'Default')", 'max_length': '100'})
},
user_model_label: {
'Meta': {'object_name': user_model_label.split('.')[-1]},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
},
u'idp.attributeitem': {
'Meta': {'object_name': 'AttributeItem'},
'attribute_name': ('django.db.models.fields.CharField', [], {'default': "('OpenLDAProotDSE', 'OpenLDAProotDSE')", 'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'output_name_format': ('django.db.models.fields.CharField', [], {'default': "('urn:oasis:names:tc:SAML:2.0:attrname-format:uri', 'SAMLv2 URI')", 'max_length': '100'}),
'output_namespace': ('django.db.models.fields.CharField', [], {'default': "('Default', 'Default')", 'max_length': '100'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['attribute_aggregator.AttributeSource']", 'null': 'True', 'blank': 'True'})
},
u'idp.attributelist': {
'Meta': {'object_name': 'AttributeList'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'attributes of the list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['idp.AttributeItem']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
u'idp.attributepolicy': {
'Meta': {'object_name': 'AttributePolicy'},
'allow_attributes_selection': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'ask_consent_attributes': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'attribute_filter_for_sso_from_push_sources': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filter attributes of push sources with list'", 'null': 'True', 'to': u"orm['idp.AttributeList']"}),
'attribute_list_for_sso_from_pull_sources': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes from pull sources'", 'null': 'True', 'to': u"orm['idp.AttributeList']"}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'filter_source_of_filtered_attributes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'forward_attributes_from_push_sources': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'map_attributes_from_push_sources': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'map_attributes_of_filtered_attributes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'output_name_format': ('django.db.models.fields.CharField', [], {'default': "('urn:oasis:names:tc:SAML:2.0:attrname-format:uri', 'SAMLv2 URI')", 'max_length': '100'}),
'output_namespace': ('django.db.models.fields.CharField', [], {'default': "('Default', 'Default')", 'max_length': '100'}),
'send_error_and_no_attrs_if_missing_required_attrs': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source_filt
|
pingiun/keybaseproofbot
|
keybaseproofbot/handlers.py
|
Python
|
mit
| 15,489
| 0.003168
|
import time
import json
import logging
import re
import requests
from telegram import InlineQueryResultArticle, InputTextMessageContent, ParseMode
from telegram.ext import ConversationHandler
from keybaseproofbot.models import Proof
from keybaseproofbot.proof_handler import check_proof_message, lookup_proof, store_proof, check_key
from keybaseproofbot.utils import fix_dashes
from keybaseproofbot.wrapperfilter import filter_group, filter_private
@filter_group
def proof_message_handle(bot, update):
if update.message.from_user.username == '':
logging.info("User (%s) without username sent message.",
update.message.from_user.first_name)
return
entities = [
entity for entity in update.message.entities if entity.type == 'pre' or entity.type == 'code'
]
if len(entities) != 2:
logging.warning(
"Message with message id %s from sender %s does not have two pre blocks.",
update.message.message_id, update.message.from_user.username)
bot.delete_message(chat_id=update.message.chat_id, message_id=update.message.message_id)
return
succes, proof = check_proof_message(bot, update, entities)
if succes:
signed_block = update.message.text[entities[1].offset:entities[1]
.offset + entities[1].length]
store_proof(proof, signed_block, update)
bot.send_message(
chat_id=update.message.chat_id,
text="Your proof was succesfully stored!",
reply_to_message_id=update.message.message_id)
elif proof == 'invalid_sign':
bot.send_message(
chat_id=update.message.chat_id,
text="Your proof is not valid. Paging @pingiun to take a look at it.",
reply_to_message_id=update.message.message_id)
bot.deleteMessage(chat_id=update.message.chat_id, message_id=update.message.id)
elif proof == 'notimplemented':
bot.send_message(
chat_id=update.message.chat_id,
text="Using other hosts than keybase.io is not supported yet.")
bot.deleteMessage(chat_id=update.message.chat_id, message_id=update.message.id)
@filter_group
def other_message_handle(bot, update):
bot.deleteMessage(
chat_id=update.message.chat_id, message_id=update.message.id)
def inline_handler(bot, update):
query = update.inline_query.query
proofs = Proof.query.filter(
Proof.telegram_username.like("%{}%".format(query))).all()
results = [
InlineQueryResultArticle(
id=proof.telegram_username,
title=proof.telegram_username,
input_message_content=InputTextMessageContent(
"✅ https://keybase.io/{} is @{} on Telegram (cached). You can talk to @KeybaseProofBot for current information, or check out @KeybaseProofs.".format(
proof.keybase_username, proof.telegram_username))) for proof in proofs
]
update.inline_query.answer(results)
@filter_private
def start(bot, update):
bot.send_message(
chat_id=update.message.chat_id,
text="Hello, welcome to the (unofficial) Keybase Telegram Proving Bot. "
"I can help you search for Telegram user proofs.\n"
"*Please keep in mind that this bot is unofficial, which means that your telegram proof "
"is not included in your signature chain. Revocations are also not implemented (yet).*\n\n"
"You can control me with these commands:\n\n"
"/newproof - build a proof message to post in @KeybaseProofs\n"
"/lookup - check if a user has proved their identity on Telegram\n"
"/forwardproof - the bot forwards the proof message for a certain Telegram user\n"
"/cancel - cancel the current command",
parse_mode=ParseMode.MARKDOWN)
@f
|
ilter_private
def notusername(bot, u
|
pdate):
bot.send_message(
chat_id=update.message.chat_id,
text="Please enter a username like @pingiun, or /cancel to cancel "
"the current command.")
@filter_private
def notkbusername(bot, update):
bot.send_message(
chat_id=update.message.chat_id, text="Please enter a correct input.")
@filter_private
def cancel(bot, update):
bot.send_message(
chat_id=update.message.chat_id, text="Canceled current command.")
return ConversationHandler.END
@filter_private
def newproof(bot, update, args):
if not update.message.from_user.username:
bot.send_message(
chat_id=update.message.chat_id,
text="You need to have a username to prove it!")
if len(args) == 1:
update.message.text = args[0]
return make_json(bot, update)
bot.send_message(
chat_id=update.message.chat_id,
text="Please enter a keybase username to connect to your Telegram account."
)
return 'enter_kbusername'
temp_proof_data = {}
@filter_private
def make_json(bot, update):
match = re.match(r'^(?:(?:(?:https://)?keybase.io/)|@)?([A-Za-z_]+)$',
update.message.text)
if not match:
return notkbusername(bot, update)
username = match.group(0)
r = requests.get(
'https://keybase.io/_/api/1.0/user/lookup.json?usernames={}&fields=basics,public_keys'.format(username))
try:
keybase = r.json()
except json.decoder.JSONDecodeError as e:
logging.exception(e)
bot.send_message(
chat_id=update.message.chat_id,
text="Something went wrong while looking up your username.")
return ConversationHandler.END
try:
fingerprint = keybase['them'][0]['public_keys']['primary'][
'key_fingerprint']
host = 'keybase.io'
key_id = fingerprint[:-16]
kid = keybase['them'][0]['public_keys']['primary']['kid']
uid = keybase['them'][0]['id']
username = keybase['them'][0]['basics']['username']
except KeyError:
bot.send_message(
chat_id=update.message.chat_id,
text="Your username was not found on Keybase!")
return
try:
data = {
'body': {
'key': {
'fingerprint': fingerprint,
'host': host,
'key_id': key_id,
'kid': kid,
'uid': uid,
'username': username,
},
'service': {
'name': 'telegram',
'username': update.message.from_user.username,
},
'type': 'web_service_binding',
'version': 1,
},
'ctime': int(time.time()),
'expire_in': 60 * 60 * 24 * 365 * 1, # Expire in 1 year
'tag': 'signature'
}
temp_proof_data[update.message.chat_id] = data
json_block = json.dumps(data, indent=4)
except Exception as e:
logging.exception(e)
bot.send_message(
chat_id=update.message.chat_id, text="Something went wrong!")
return
bot.send_message(chat_id=update.message.chat_id,
text="Okay, please paste the following into your terminal (where you can use the keybase cli client) and paste the output here.")
bot.send_message(
chat_id=update.message.chat_id,
text="```\nkeybase pgp sign --message \"{}\"\n```".format(json_block.replace(r'"', r'\"')),
parse_mode=ParseMode.MARKDOWN)
bot.send_message(chat_id=update.message.chat_id,
text="If want to use gpg(2) you can copy and paste this command instead:")
bot.send_message(chat_id=update.message.chat_id,
text="```\necho \"{}\" | gpg -a --sign\n```".format(json_block.replace(r'"', r'\"')),
parse_mode=ParseMode.MARKDOWN)
return 'sign_block'
@filter_private
def check_block(bot, update):
if update.message.text.startswith('/cancel'):
return cancel()
lines = update.message.text.split('\n')
if len(lines) > 1 and not ("BEGIN PGP MESSAGE" in line
|
MetrodataTeam/incubator-airflow
|
tests/contrib/operators/test_sftp_operator.py
|
Python
|
apache-2.0
| 9,161
| 0.001092
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
from base64 import b64encode
from datetime import datetime
from airflow import configuration
from airflow import models
from airflow.contrib.operators.sftp_operator import SFTPOperator, SFTPOperation
from airflow.contrib.operators.ssh_operator import SSHOperator
from airflow.models import DAG, TaskInstance
from airflow.settings import Session
TEST_DAG_ID = 'unit_tests'
DEFAULT_DATE = datetime(2017, 1, 1)
def reset(dag_id=TEST_DAG_ID):
session = Session()
tis = session.query(models.TaskInstance).filter_by(dag_id=dag_id)
tis.delete()
session.commit()
session.close()
reset()
class SFTPOperatorTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
from airflow.contrib.hooks.ssh_hook import SSHHook
hook = SSHHook(ssh_conn_id='ssh_default')
hook.no_host_key_check = True
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE,
'provide_context': True
}
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
dag.schedule_interval = '@once'
self.hook = hook
self.dag = dag
self.test_dir = "/tmp"
self.test_local_filename = 'test_local_file'
self.test_remote_filename = 'test_remote_file'
self.test_local_filepath = '{0}/{1}'.format(self.test_dir,
self.test_local_filename)
self.test_remote_filepath = '{0}/{1}'.format(self.test_dir,
self.test_remote_filename)
def test_pickle_file_transfer_put(self):
configuration.set("core", "enable_xcom_pickling", "True")
test_local_file_content = \
b"This is local file content \n which is multiline " \
b"continuing....with other character\nanother line here \n this is last line"
# create a test file locally
with open(self.test_local_filepath, 'wb') as f:
f.write(test_local_file_content)
# put test file to remote
put_test_task = SFTPOperator(
task_id="test_sftp",
ssh_hook=self.hook,
local_filepath=self.test_local_filepath,
remote_filepath=self.test_remote_filepath,
operation=SFTPOperation.PUT,
dag=self.dag
)
self.assertIsNotNone(put_test_task)
ti2 = TaskInstance(task=put_test_task, execution_date=datetime.now())
ti2.run()
# check the remote file content
check_file_task = SSHOperator(
task_id="test_check_file",
ssh_hook=self.hook,
command="cat {0}".format(self.test_remote_filepath),
do_xcom_push=True,
dag=self.dag
)
self.assertIsNotNone(check_file_task)
ti3 = TaskInstance(task=check_file_task, execution_date=datetime.now())
ti3.run()
self.assertEqual(
ti3.xcom_pull(task_ids='test_check_file', key='return_value').strip(),
test_local_file_content)
def test_json_file_transfer_put(self):
configuration.set("core", "enable_xcom_pickling", "False")
test_local_file_content = \
b"This is local file content \n which is multiline " \
b"continuing....with other character\nanother line here \n this is last line"
# create a test file locally
with open(self.test_local_filepath, 'wb') as f:
f.write(test_local_file_content)
# put test file to remote
put_test_task = SFTPOperator(
task_id="test_sftp",
ssh_hook=self.hook,
local_filepath=self.test_local_filepath,
remote_filepath=self.test_remote_filepath,
operation=SFTPOperation.PUT,
dag=self.dag
)
self.assertIsNotNone(put_test_task)
ti2 = TaskInstance(task=put_test_task, execution_date=datetime.now())
ti2.run()
# check the remote file content
check_file_task = SSHOperator(
task_id="test_check_file",
ssh_hook=self.hook,
command="cat {0}".format(self.test_remote_filepath),
do_xcom_push=True,
dag=self.dag
)
self.assertIsNotNone(check_file_task)
ti3 = TaskInstance(task=check_file_task, execution_date=datetime.now())
ti3.run()
self.assertEqual(
ti3.xcom_pull(task_ids='test_check_file', key='return_value').strip(),
b64encode(test_local_file_content).decode('utf-8'))
def test_pickle_file_transfer_get(self):
configuration.set("core", "enable_xcom_pickling", "True")
test_remote_file_content = \
"This is remote file content \n which is also multiline " \
"another line here \n this is last line. EOF"
|
# create a test file remotely
create_file_task = SSHOperator(
task_id="test_create_file",
ssh_hook=self.hook,
command="echo '{0}' > {1}".format(test_remote_file_content,
sel
|
f.test_remote_filepath),
do_xcom_push=True,
dag=self.dag
)
self.assertIsNotNone(create_file_task)
ti1 = TaskInstance(task=create_file_task, execution_date=datetime.now())
ti1.run()
# get remote file to local
get_test_task = SFTPOperator(
task_id="test_sftp",
ssh_hook=self.hook,
local_filepath=self.test_local_filepath,
remote_filepath=self.test_remote_filepath,
operation=SFTPOperation.GET,
dag=self.dag
)
self.assertIsNotNone(get_test_task)
ti2 = TaskInstance(task=get_test_task, execution_date=datetime.now())
ti2.run()
# test the received content
content_received = None
with open(self.test_local_filepath, 'r') as f:
content_received = f.read()
self.assertEqual(content_received.strip(), test_remote_file_content)
def test_json_file_transfer_get(self):
configuration.set("core", "enable_xcom_pickling", "False")
test_remote_file_content = \
"This is remote file content \n which is also multiline " \
"another line here \n this is last line. EOF"
# create a test file remotely
create_file_task = SSHOperator(
task_id="test_create_file",
ssh_hook=self.hook,
command="echo '{0}' > {1}".format(test_remote_file_content,
self.test_remote_filepath),
do_xcom_push=True,
dag=self.dag
)
self.assertIsNotNone(create_file_task)
ti1 = TaskInstance(task=create_file_task, execution_date=datetime.now())
ti1.run()
# get remote file to local
get_test_task = SFTPOperator(
task_id="test_sftp",
ssh_hook=self.hook,
local_filepath=self.test_local_filepath,
remote_filepath=self.test_remote_filepath,
operation=SFTPOperation.GET,
dag=self.dag
)
self.assertIsNotNone(get_test_task)
ti2 = TaskInstance(task=get_test_task, execution_date=datetime.now())
ti2.run()
# test the received content
content_received = None
|
IntelBUAP/Python3
|
codigo27.py
|
Python
|
gpl-2.0
| 388
| 0.033679
|
#! /usr/bin/python3
# -*- coding:utf-8 -*-
# Funciones y parametros arbitrarios
def funcion(**nombres):
print (type(nombres))
|
for alumno in nombres:
print ("%s es alumno y tiene %d años" % (alumno, nombres[alumno]))
retu
|
rn nombres
#diccionario = {"Adrian":25, "Niño":25, "Roberto":23, "Celina":23}
print (funcion(Adrian = 25, Nino = 25, Roberto = 23, Celina = 23))
|
bram85/topydo
|
topydo/commands/AddCommand.py
|
Python
|
gpl-3.0
| 3,836
| 0.000261
|
# Topydo - A todo.txt client written in Python.
# Copyright (C) 2014 - 2015 Bram Schoenmakers <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" Provides the AddCommand class that implements the 'add' subcommand. """
import codecs
import re
from datetime import date
from os.path import expanduser
from sys import stdin
from topydo.lib.Config import config
from topydo.lib.prettyprinters.Numbers import PrettyPrinterNumbers
from topydo.lib.WriteCommand import WriteCommand
class AddCommand(WriteCommand):
def __init__(self, p_args, p_todolist, # pragma: no branch
p_out=lambda a: None,
p_err=lambda a: None,
p_prompt=lambda a: None):
super().__init__(
p_args, p_todolist, p_out, p_err, p_prompt)
self.text = ' '.join(p_args)
self.from_file = None
def _process_flags(self):
opts, args = self.getopt('f:')
for opt, value in opts:
if opt == '-f':
self.from_file = expanduser(value)
self.args = args
def get_todos_from_file(self):
if self.from_file == '-':
f = stdin
else:
f = codecs.open(self.from_file, 'r', encoding='utf-8')
todos = f.read().splitlines()
return todos
def _add_todo(self, p_todo_text):
def _preprocess_input_todo(p_todo_text):
"""
Pre-processes user input when adding a task.
It detects a priority mid-sentence and puts it at the start.
"""
todo_text = re.sub(r'^(.+) (\([A-Z]\))(.*)$', r'\2 \1\3',
p_todo_text)
return todo_text
todo_text = _preprocess_input_todo(p_todo_text)
todo = self.todolist.add(todo_text)
self.postprocess_input_todo(todo)
if config().auto_creation_date():
todo.set_creation_date(date.today())
self.out(self.printer.print_todo(todo))
def execute(self):
""" Adds a todo item to the list. """
if not super().execute():
return False
self.printer.add_filter(PrettyPrinterNumbers(self.todolist))
self._process_flags()
if self.from_file:
try:
new_todos = self.get_todos_from_file()
for todo in new_todos:
self._add_todo(todo)
except (IOError, OSError):
self.error('File not found: ' + self.from_file)
else:
if self.text:
self._add_todo(self.text)
else:
self.error(self.usage())
def usage(self):
return """Synopsis:
add <TEXT>
add -f <FILE> | -"""
def help(self):
return """\
This subcommand automatically adds the creation date to the added item.
TEXT may contain:
* Priorities mid-sentence. Example: add "Water flow
|
ers (C)"
* Dependencies using before, after, partof, parents-of and children-of tags.
These are translated to the corresponding 'id' and 'p' tags. The values of
these tags correspond to the todo number (not the dependency number).
Example: add "Subtask partof:1"
-f : Add t
|
odo items from specified FILE or from standard input.\
"""
|
hpcleuven/easybuild-framework
|
easybuild/toolchains/iompi.py
|
Python
|
gpl-2.0
| 1,514
| 0.001982
|
##
# Copyright 2012-2016 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supe
|
rcomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Scien
|
ce and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for iompi compiler toolchain (includes Intel compilers (icc, ifort) and OpenMPI.
@author: Stijn De Weirdt (Ghent University)
@author: Kenneth Hoste (Ghent University)
"""
from easybuild.toolchains.iccifort import IccIfort
from easybuild.toolchains.mpi.openmpi import OpenMPI
class Iompi(IccIfort, OpenMPI):
"""
Compiler toolchain with Intel compilers (icc/ifort) and OpenMPI.
"""
NAME = 'iompi'
SUBTOOLCHAIN = IccIfort.NAME
|
wasit7/tutorials
|
django/Pieng/myclass/myclass/settings.py
|
Python
|
mit
| 3,183
| 0.001257
|
"""
Django settings for myclass project.
Generated by 'django-admin startproject' using D
|
jango 1.9.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build pat
|
hs inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'n^qif^$w3ooxd1m5&6ir7m^fy%3oq@s+d&pxyut32upkgzbg&4'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'myquiz',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'myclass.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'myclass.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
|
solefaucet/sole-server
|
fabfile.py
|
Python
|
mit
| 1,674
| 0.005376
|
from fabric.api import env
from fabric.context_managers import cd
from fabric.operations import run, local, put
env.shell = '/bin/bash -l -c'
env.user = 'd'
env.roledefs.update({
'staging': ['staging.solebtc.com'],
'production': ['solebtc.com']
})
# Heaven will execute fab -R staging deploy:branch_name=master
def deploy(branch_name):
deployProduction(branch_name) if env.roles[0] == 'production' else deployStaging(branch_name)
def deployStaging(branch_name):
printMessage("staging")
codedir = '$GOPATH/src/github.com/solefaucet/solebtc'
run('rm -rf %s' % codedir)
run('mkdir -p %s' % codedir)
local('git archive --format=tar --output=/tmp/archive.tar %s' % branch_name)
local('ls /tmp')
put('/tmp/archive.tar', '~/')
local('rm /tmp/archive.tar')
run('mv archive.tar %s' % codedir)
with cd(codedir):
run('tar xf archive.tar')
run('go build -o solebtc')
# mv doc to nginx root
run('mv apidoc/v1.json /usr/share/nginx/html/doc')
# database version control
run("mysql -e 'create database if not exists sol
|
ebtc_prod';")
run('go get bitbucket.org/liamstask/goose/cmd/goose')
run('goose -env production up')
# restart solebtc service with supervisorctl
run('supervisorctl restart solebtc')
def deployProduction(branch_name):
printMessage("production")
# TODO
# scp executable file from staging to production, database up, restart service
# mark current timestamp or commit as version number so we can rollback easil
|
y
def printMessage(server):
print("Deploying to %s server at %s as %s" % (server, env.host, env.user))
|
sdu14SoftwareEngineering/GameOfLife_WEB
|
game/method/ready_game.py
|
Python
|
apache-2.0
| 3,728
| 0
|
from game import models
from game.method.in_game import thread_fields, Thread_field
from game.tool.room_tool import *
from game.tool.tools import to_json
# 实时获得房间信息 room_id
def get_room_info(request):
room_id = int(request.POST['room_id'])
room = get_room_by_id(room_id)
print(room_id)
print(room.users_status)
users_array = []
for
|
u_id in room.users:
find_user = models.User.objects.filter(id=u_id)
if find_user:
find_user = find_user[0]
u_
|
dict = {
'user_id': find_user.id,
'user_name': find_user.username,
'win': find_user.win,
'fail': find_user.fail,
'user_status': room.users_status[u_id]
}
users_array.append(u_dict)
# 结果
response = {
'status': room.status,
'owner': room.owner,
'users': users_array
}
print(response)
return to_json(response)
# 更改准备状态 room_id user_id
def change_user_status(request):
user_id = int(request.POST['user_id'])
room_id = int(request.POST['room_id'])
room = get_room_by_id(room_id)
u_status = room.users_status[user_id]
room.users_status[user_id] = not u_status
return to_json({'response_code': 1, 'user_status': not u_status})
# 房主开始游戏 user_id room_id
def begin_game(request):
user_id = int(request.POST['user_id'])
room_id = int(request.POST['room_id'])
room = get_room_by_id(room_id)
if user_id == room.owner:
f = True
for u_id in room.users:
if u_id != room.owner and not room.users_status[u_id]:
f = False
if f:
room.users_status[user_id] = True
room.status = True
# 计算布局线程,存入线程
thread_fields[room_id] = Thread_field(room.users, room_id)
thread_fields[room_id].start()
return to_json({'response_code': 1})
else:
return to_json({'response_code': -1})
else:
return to_json({'response_code': -1})
# # 用户准备 user_id room_id
# def user_ready(request):
# user_id = request.POST.get('user_id')
# room_id = request.POST.get('room_id')
# # 找到此房间
# room = get_room_by_id(room_id)
# room.users_status[user_id] = True
#
#
# # 用户取消准备 user_id room_id
# def user_cancel_ready(request):
# user_id = request.POST.get('user_id')
# room_id = request.POST.get('room_id')
# # 找到此房间
# room = get_room_by_id(room_id)
# room.users_status[user_id] = False
#
#
# # 开始游戏 owner_id room_id
# def owner_begin(request):
# owner_id = request.POST.get('owner_id')
# room_id = request.POST.get('room_id')
# # 找到此房间
# room = get_room_by_id(room_id)
# room.users_status[owner_id] = True
# all_ready = True
# if room_id == room.owner:
# for u in room.users:
# if not room.users_status[u]:
# all_ready = False
# break
# if all_ready:
# # 全部准备好
# room.status = True
# return 0
# else:
# # 有人没有准备好
# return 0
# else:
# # 这个人不是房主
# return 0
#
#
# # 检查是否开始游戏了 room_id
# def check_room_status(request):
# room_id = request.POST.get('room_id')
# # 找到此房间
# room = get_room_by_id(room_id)
# if room.status:
# # 已经开始了
# return 0
# else:
# # 还没有开始
# return 0
|
Shouqun/node-gn
|
tools/depot_tools/tests/gclient_test.py
|
Python
|
mit
| 40,396
| 0.003688
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for gclient.py.
See gclient_smoketest.py for integration tests.
"""
import Queue
import copy
import logging
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import gclient
import gclient_utils
import gclient_scm
from testing_support import trial_dir
def write(filename, content):
"""Writes the content of a file and create the directories as needed."""
filename = os.path.abspath(filename)
dirname = os.path.dirname(filename)
if not os.path.isdir(dirname):
os.makedirs(dirname)
with open(filename, 'w') as f:
f.write(content)
class SCMMock(object):
def __init__(self, unit_test, name, url):
self.unit_test = unit_test
self.name = name
self.url = url
def RunCommand(self, command, options, args, file_list):
self.unit_test.assertEquals('None', command)
self.unit_test.processed.put((self.name, self.url))
def FullUrlForRelativeUrl(self, url):
return self.url + url
# pylint: disable=no-self-use
def DoesRemoteURLMatch(self, _):
return True
def GetActualRemoteURL(self, _):
return self.url
class GclientTest(trial_dir.TestCase):
def setUp(self):
super(GclientTest, self).setUp()
self.processed = Queue.Queue()
self.previous_dir = os.getcwd()
os.chdir(self.root_dir)
# Manual mocks.
self._old_createscm = gclient.Dependency.CreateSCM
gclient.Dependency.CreateSCM = self._createscm
self._old_sys_stdout = sys.stdout
sys.stdout = gclient.gclient_utils.MakeFileAutoFlush(sys.stdout)
sys.stdout = gclient.gclient_utils.MakeFileAnnotated(sys.stdout)
def tearDown(self):
self.assertEquals([], self._get_processed())
gclient.Dependency.CreateSCM = self._old_createscm
sys.stdout = self._old_sys_stdout
os.chdir(self.previous_dir)
super(GclientTest, self).tearDown()
def _createscm(self, parsed_url, root_dir, name, out_fh=None, out_cb=None):
self.assertTrue(parsed_url.startswith('svn://example.com/'), parsed_url)
self.assertTrue(root_dir.startswith(self.root_dir), root_dir)
return SCMMock(self, name, parsed_url)
def testDependencies(self):
self._dependencies('1')
def testDependenciesJobs(self):
self._dependencies('1000')
def _dependencies(self, jobs):
"""Verifies that dependencies are processed in the right order.
e.g. if there is a dependency 'src' and another 'src/third_party/bar', that
bar isn't fetched until 'src' is done.
Args:
|jobs| is the number of parallel jobs simulated.
"""
parser = gclient.OptionParser()
options, args = parser.parse_args(['--jobs', jobs])
write(
'.gclient',
'solutions = [\n'
' { "name": "foo", "url": "svn://example.com/foo" },\n'
' { "name": "bar", "url": "svn://example.com/bar" },\n'
' { "name": "bar/empty", "url": "svn://example.com/bar_empty" },\n'
']')
write(
os.path.join('foo', 'DEPS'),
'deps = {\n'
' "foo/dir1": "/dir1",\n'
# This one will depend on dir1/dir2 in bar.
' "foo/dir1/dir2/dir3": "/dir1/dir2/dir3",\n'
' "foo/dir1/dir2/dir3/dir4": "/dir1/dir2/dir3/dir4",\n'
'}')
write(
os.path.join('bar', 'DEPS'),
'deps = {\n'
# There is two foo/dir1/dir2. This one is fetched as bar/dir1/dir2.
' "foo/dir1/dir2": "/dir1/dir2",\n'
'}')
write(
os.path.join('bar/empty', 'DEPS'),
'deps = {\n'
'}')
obj = gclient.GClient.LoadCurrentConfig(options)
self._check_requirements(obj.dependencies[0], {})
self._check_requirements(obj.dependencies[1], {})
obj.RunOnDeps('None', args)
actual = self._get_processed()
first_3 = [
('bar', 'svn://example.com/bar'),
('bar/empty', 'svn://example.com/bar_empty'),
('foo', 'svn://example.com/foo'),
]
if jobs != 1:
# We don't care of the ordering of these items except that bar must be
# before bar/empty.
self.assertTrue(
actual.index(('bar', 'svn://example.com/bar')) <
actual.index(('bar/empty', 'svn://example.com/bar_empty')))
self.assertEquals(first_3, sorted(actual[0:3]))
else:
self.assertEquals(first_3, actual[0:3])
self.assertEquals(
[
('foo/dir1', 'svn://example.com/foo/dir1'),
('foo/dir1/dir2', 'svn://example.com/bar/dir1/dir2'),
('foo/dir1/dir2/dir3', 'svn://example.com/foo/dir1/dir2/dir3'),
('foo/dir1/dir2/dir3/dir4',
'svn://example.com/foo/dir1/dir2/dir3/dir4'),
],
actual[3:])
self.assertEquals(3, len(obj.dependencies))
self.assertEquals('foo', obj.dependencies[0].name)
self.assertEquals('bar', obj.dependencies[1].name)
self.assertEquals('bar/emp
|
ty', obj.dependencies[2].name)
self._check_requirements(
obj.dependencies[0],
{
'foo/dir1': ['bar', 'bar/empty', 'foo'],
'foo/dir1/dir2/dir3':
['bar', 'bar/empty', 'foo', 'foo/dir1', 'foo/dir1/dir2'],
'foo/dir1/dir2/dir3/dir4':
[
|
'bar', 'bar/empty', 'foo', 'foo/dir1', 'foo/dir1/dir2',
'foo/dir1/dir2/dir3'],
})
self._check_requirements(
obj.dependencies[1],
{
'foo/dir1/dir2': ['bar', 'bar/empty', 'foo', 'foo/dir1'],
})
self._check_requirements(
obj.dependencies[2],
{})
self._check_requirements(
obj,
{
'foo': [],
'bar': [],
'bar/empty': ['bar'],
})
def _check_requirements(self, solution, expected):
for dependency in solution.dependencies:
e = expected.pop(dependency.name)
a = sorted(dependency.requirements)
self.assertEquals(e, a, (dependency.name, e, a))
self.assertEquals({}, expected)
def _get_processed(self):
"""Retrieves the item in the order they were processed."""
items = []
try:
while True:
items.append(self.processed.get_nowait())
except Queue.Empty:
pass
return items
def testAutofix(self):
# Invalid urls causes pain when specifying requirements. Make sure it's
# auto-fixed.
url = 'proto://host/path/@revision'
d = gclient.Dependency(
None, 'name', url, url, None, None, None,
None, '', True, False, None, True)
self.assertEquals('proto://host/path@revision', d.url)
def testStr(self):
parser = gclient.OptionParser()
options, _ = parser.parse_args([])
obj = gclient.GClient('foo', options)
obj.add_dependencies_and_close(
[
gclient.Dependency(
obj, 'foo', 'raw_url', 'url', None, None, None, None, 'DEPS', True,
False, None, True),
gclient.Dependency(
obj, 'bar', 'raw_url', 'url', None, None, None, None, 'DEPS', True,
False, None, True),
],
[])
obj.dependencies[0].add_dependencies_and_close(
[
gclient.Dependency(
obj.dependencies[0], 'foo/dir1', 'raw_url', 'url', None, None, None,
None, 'DEPS', True, False, None, True),
],
[])
# Make sure __str__() works fine.
# pylint: disable=protected-access
obj.dependencies[0]._file_list.append('foo')
str_obj = str(obj)
self.assertEquals(263, len(str_obj), '%d\n%s' % (len(str_obj), str_obj))
def testHooks(self):
topdir = self.root_dir
gclient_fn = os.path.join(topdir, '.gclient')
fh = open(gclient_fn, 'w')
print >> fh, 'solutions = [{"name":"top","url":"svn://example.com/top"}]'
fh.close()
subdir_fn = os.path.join(topdir, 'top')
os.mkdir(subdir_fn)
deps_fn = os.path.join(subdir_fn, 'DEPS')
fh = open(deps_fn, 'w')
hooks = [{'pattern':'.', 'action':['cmd1', 'arg1', 'arg2']}]
print >> fh, 'hooks = %s' % repr(hooks)
fh.close()
fh = open(os.path.join(subdir_fn, 'fake.txt'), 'w')
print >> fh, 'bogus content'
fh.close()
os.ch
|
limingzju/ClearCacheServer
|
server.py
|
Python
|
bsd-2-clause
| 617
| 0.024311
|
from os import system
import xmlrpclib
from SimpleXMLRPC
|
Server import SimpleXMLRPCServer
def clear_buffer_cache():
system('free -g')
system('sync')
system("sudo sed -n 's/0/3/w /proc/sys/vm/drop_caches' /proc/sys/vm/drop_caches")
system('sync')
system("sudo sed -n 's/3/0/w /proc/sys/vm/drop_caches' /proc/sys/vm/drop_caches")
system('free -g')
return True
def is_even(n):
return n%2 == 0
server =
|
SimpleXMLRPCServer(('0.0.0.0', 8888))
print 'Listening on port 8888...'
server.register_function(clear_buffer_cache, 'clear_buffer_cache')
server.register_function(is_even, 'is_even')
server.serve_forever()
|
joshzarrabi/e-mission-server
|
emission/tests/client_tests/TestGamified.py
|
Python
|
bsd-3-clause
| 7,135
| 0.004905
|
# Standard imports
import unittest
import json
import logging
from datetime import datetime, timedelta
# Our imports
from emission.clients.gamified import gamified
from emission.core.get_database import get_db, get_mode_db, get_section_db
from emission.core.wrapper.user import User
from emission.core.wrapper.client import Client
import emission.tests.common
logging.basicConfig(level=logging.DEBUG)
class TestGamified(unittest.TestCase):
def setUp(self):
import emission.tests.common
from copy import copy
self.testUsers = ["[email protected]", "[email protected]", "[email protected]",
"[email protected]", "[email protected]"]
self.serverName = 'localhost'
# Sometimes, we may have entries left behind in the database if one of the tests failed
# or threw an exception, so let us start by cleaning up all entries
emission.tests.common.dropAllCollections(get_db())
self.ModesColl = get_mode_db()
self.assertEquals(self.ModesColl.find().count(), 0)
self.setupUserAndClient()
emission.tests.common.loadTable(self.serverName, "Stage_Modes", "emission/tests/data/modes.json")
emission.tests.common.loadTable(self.serverName, "Stage_Sections", "emission/tests/d
|
ata/testCarbonFile")
self.SectionsColl = get_section_db(
|
)
self.walkExpect = 1057.2524056424411
self.busExpect = 2162.668467546699
self.busCarbon = 267.0/1609
self.airCarbon = 217.0/1609
self.driveCarbon = 278.0/1609
self.busOptimalCarbon = 92.0/1609
self.allDriveExpect = (self.busExpect * self.driveCarbon + self.walkExpect * self.driveCarbon)/1000
self.myFootprintExpect = float(self.busExpect * self.busCarbon)/1000
self.sb375GoalExpect = 40.142892/7
self.mineMinusOptimalExpect = 0
self.allDriveMinusMineExpect = float(self.allDriveExpect - self.myFootprintExpect)/self.allDriveExpect
self.sb375DailyGoalMinusMineExpect = float(self.sb375GoalExpect - self.myFootprintExpect)/self.sb375GoalExpect
self.now = datetime.now()
self.twodaysago = self.now - timedelta(days=2)
self.weekago = self.now - timedelta(weeks = 1)
for section in self.SectionsColl.find():
section['section_start_datetime'] = self.twodaysago
section['section_end_datetime'] = self.twodaysago + timedelta(hours = 1)
section['predicted_mode'] = {'walking': 1.0}
if section['user_id'] == '[email protected]':
logging.debug("Setting user_id for section %s, %s = %s" %
(section['trip_id'], section['section_id'], self.user.uuid))
section['user_id'] = self.user.uuid
if section['confirmed_mode'] == 5:
airSection = copy(section)
airSection['confirmed_mode'] = 9
airSection['_id'] = section['_id'] + "_air"
self.SectionsColl.insert(airSection)
airSection['confirmed_mode'] = ''
airSection['_id'] = section['_id'] + "_unconf"
self.SectionsColl.insert(airSection)
# print("Section start = %s, section end = %s" %
# (section['section_start_datetime'], section['section_end_datetime']))
self.SectionsColl.save(section)
def setupUserAndClient(self):
# At this point, the more important test is to execute the query and see
# how well it works
fakeEmail = "[email protected]"
client = Client("gamified")
client.update(createKey = False)
emission.tests.common.makeValid(client)
(resultPre, resultReg) = client.preRegister("this_is_the_super_secret_id", fakeEmail)
studyList = Client.getPendingClientRegs(fakeEmail)
self.assertEqual(studyList, ["gamified"])
user = User.register("[email protected]")
self.assertEqual(user.getFirstStudy(), 'gamified')
self.user = user
def testGetScoreComponents(self):
components = gamified.getScoreComponents(self.user.uuid, self.weekago, self.now)
self.assertEqual(components[0], 0.75)
# bus_short disappears in optimal, air_short disappears as long motorized, so optimal = 0
# self.assertEqual(components[1], (self.busExpect * self.busCarbon) / 1000)
# TODO: Figure out what we should do when optimal == 0. Currently, we
# return 0, which seems sub-optimal (pun intended)
self.assertEqual(components[1], 0.0)
# air_short disappears as long motorized, but we need to consider walking
self.assertAlmostEqual(components[2], self.allDriveMinusMineExpect, places=4)
# air_short disappears as long motorized, so only bus_short is left
self.assertAlmostEqual(components[3], self.sb375DailyGoalMinusMineExpect, places = 4)
# Checks both calcScore and updateScore, since we calculate the score before we update it
def testUpdateScore(self):
self.assertEqual(gamified.getStoredScore(self.user), (0, 0))
components = gamified.updateScore(self.user.uuid)
print "self.allDriveMinusMineExpect = %s, self.sb375DailyGoalMinusMineExpect = %s" % \
(self.allDriveMinusMineExpect, self.sb375DailyGoalMinusMineExpect)
expectedScore = 0.75 * 50 + 30 * self.allDriveMinusMineExpect + 20 * 0.0 + \
10 * self.sb375DailyGoalMinusMineExpect
storedScore = gamified.getStoredScore(self.user)
self.assertEqual(storedScore[0], 0)
self.assertAlmostEqual(storedScore[1], expectedScore, 6)
def testGetLevel(self):
self.assertEqual(gamified.getLevel(0), (1, 1))
self.assertEqual(gamified.getLevel(11.0), (1, 1))
self.assertEqual(gamified.getLevel(21.0), (1, 2))
self.assertEqual(gamified.getLevel(100), (2, 1))
self.assertEqual(gamified.getLevel(199.0), (2, 1))
self.assertEqual(gamified.getLevel(200), (2, 2))
self.assertEqual(gamified.getLevel(201.0), (2, 2))
self.assertEqual(gamified.getLevel(999), (2, 5))
self.assertEqual(gamified.getLevel(1000), (3, 1))
self.assertEqual(gamified.getLevel(9999.0), (3, 5))
self.assertEqual(gamified.getLevel(10000), (3, 5))
self.assertEqual(gamified.getLevel(100000), (3, 5))
def testGetFileName(self):
self.assertEqual(gamified.getFileName(1, 1), "level_1_1.png")
self.assertEqual(gamified.getFileName(1.0, 2.0), "level_1_2.png")
self.assertEqual(gamified.getFileName(1.055, 2), "level_1_2.png")
def testRunBackgroundTasksForDay(self):
self.assertEqual(gamified.getStoredScore(self.user), (0, 0))
components = gamified.runBackgroundTasks(self.user.uuid)
expectedScore = 0.75 * 50 + 30 * self.allDriveMinusMineExpect + 20 * 0.0 + \
10 * self.sb375DailyGoalMinusMineExpect
storedScore = gamified.getStoredScore(self.user)
self.assertEqual(storedScore[0], 0)
self.assertAlmostEqual(storedScore[1], expectedScore, 6)
if __name__ == '__main__':
unittest.main()
|
buckbaskin/Insight
|
service1/app/__init__.py
|
Python
|
apache-2.0
| 125
| 0.008
|
from flask import Flask
server =
|
Flask(_
|
_name__)
server.config['SERVER_NAME'] = '127.0.0.1:5001'
from app import endpoints
|
verdimrc/nfldb
|
nfldb/sql.py
|
Python
|
unlicense
| 15,501
| 0.000129
|
from __future__ import absolute_import, division, print_function
from nfldb.db import _upsert
class Entity (object):
"""
This is an abstract base class that handles most of the SQL
plumbing for entities in `nfldb`. Its interface is meant to be
declarative: specify the schema and let the methods defined here
do the SQL generation work. However, it is possible to override
methods (like `nfldb.Entity._sql_field`) when more customization
is desired.
Note that many of the methods defined here take an `aliases`
argument. This should be a dictionary mapping table name (defined
in `nfldb.Entity._sql_tables`) to some custom prefix. If it
isn't provided, then the table name itself is used.
"""
# This class doesn't introduce any instance variables, but we need
# to declare as such, otherwise all subclasses will wind up with a
# `__dict__`. (Thereby negating the benefit of using __slots__.)
__slots__ = []
_sql_tables = {}
"""
A dictionary with four keys: `primary`, `tables`, `managed` and
`derived`.
The `primary` key should map to a list of primary key
fields that correspond to a shared minimal subset of primary keys
in all tables that represent this entity. (i.e., It should be the
foreign key that joins all tables in the representation together.)
The `tables` key should map to an association list of table names
that map to lists of fields for that table. The lists of fields for
every table should be *disjoint*: no two tables may share a field
name in common (outside of the primary key).
The `managed` key should be a list of tables that are managed
directly by `nfldb`. `INSERT`, `UPDATE` and `DELETE` queries
will be generated appropriately. (Tables not in this list are
assumed to be maintained by the database itself, e.g., they are
actually views or materialized views maintained by triggers.)
The `derived` key should map to a list of *computed* fields. These
are fields that aren't directly stored in the table, but can be
computed from combining columns in the table (like `offense_tds` or
`points`). This API will expose such fields as regular SQL columns
in the API, and will handle writing them for you in `WHERE` and
`ORDER BY` statements. The actual implementation of each computed
field should be in an e
|
ntity's `_sql_field` method (overriding the
one defined on `nfldb.Entity`). The derived fields must be listed
here so that the SQL generation code is aware of them.
|
"""
@classmethod
def _sql_columns(cls):
"""
Returns all columns defined for this entity. Every field
corresponds to a single column in a table.
The first `N` columns returned correspond to this entity's
primary key, where `N` is the number of columns in the
primary key.
"""
cols = cls._sql_tables['primary'][:]
for table, table_cols in cls._sql_tables['tables']:
cols += table_cols
return cols
@classmethod
def sql_fields(cls):
"""
Returns a list of all SQL fields across all tables for this
entity, including derived fields. This method can be used
in conjunction with `nfldb.Entity.from_row_tuple` to quickly
create new `nfldb` objects without every constructing a dict.
"""
if not hasattr(cls, '_cached_sql_fields'):
cls._cached_sql_fields = cls._sql_columns()
cls._cached_sql_fields += cls._sql_tables['derived']
return cls._cached_sql_fields
@classmethod
def from_row_dict(cls, db, row):
"""
Introduces a new entity object from a full SQL row result from
the entity's tables. (i.e., `row` is a dictionary mapping
column to value.) Note that the column names must be of the
form '{entity_name}_{column_name}'. For example, in the `game`
table, the `gsis_id` column must be named `game_gsis_id` in
`row`.
"""
obj = cls(db)
seta = setattr
prefix = cls._sql_primary_table() + '_'
slice_from = len(prefix)
for k in row:
if k.startswith(prefix):
seta(obj, k[slice_from:], row[k])
return obj
@classmethod
def from_row_tuple(cls, db, t):
"""
Given a tuple `t` corresponding to a result from a SELECT query,
this will construct a new instance for this entity. Note that
the tuple `t` must be in *exact* correspondence with the columns
returned by `nfldb.Entity.sql_fields`.
"""
cols = cls.sql_fields()
seta = setattr
obj = cls(db)
for i, field in enumerate(cols):
seta(obj, field, t[i])
return obj
@classmethod
def _sql_from(cls, aliases=None):
"""
Return a valid SQL `FROM table AS alias [LEFT JOIN extra_table
...]` string for this entity.
"""
# This is a little hokey. Pick the first table as the 'FROM' table.
# Subsequent tables are joined.
from_table = cls._sql_primary_table()
as_from_table = cls._sql_table_alias(from_table, aliases)
extra_tables = ''
for table, _ in cls._sql_tables['tables'][1:]:
extra_tables += cls._sql_join_to(cls,
from_table=from_table,
to_table=table,
from_aliases=aliases,
to_aliases=aliases)
return '''
FROM {from_table} AS {as_from_table}
{extra_tables}
'''.format(from_table=from_table, as_from_table=as_from_table,
extra_tables=extra_tables)
@classmethod
def _sql_select_fields(cls, fields, wrap=None, aliases=None):
"""
Returns correctly qualified SELECT expressions for each
field in `fields` (namely, a field may be a derived field).
If `wrap` is a not `None`, then it is applied to the result
of calling `cls._sql_field` on each element in `fields`.
All resulting fields are aliased with `AS` to correspond to
the name given in `fields`. Namely, this makes table aliases
opaque to the resulting query, but this also disallows
selecting columns of the same name from multiple tables.
"""
if wrap is None:
wrap = lambda x: x
sql = lambda f: wrap(cls._sql_field(f, aliases=aliases))
entity_prefix = cls._sql_primary_table()
return ['%s AS %s_%s' % (sql(f), entity_prefix, f) for f in fields]
@classmethod
def _sql_relation_distance(cls_from, cls_to):
primf = set(cls_from._sql_tables['primary'])
primt = set(cls_to._sql_tables['primary'])
if len(primf.intersection(primt)) == 0:
return None
outsiders = primf.difference(primt).union(primt.difference(primf))
if len(primf) > len(primt):
return -len(outsiders)
else:
return len(outsiders)
@classmethod
def _sql_join_all(cls_from, cls_tos):
"""
Given a list of sub classes `cls_tos` of `nfldb.Entity`,
produce as many SQL `LEFT JOIN` clauses as is necessary so
that all fields in all entity types given are available for
filtering.
Unlike the other join functions, this one has no alias support
or support for controlling particular tables.
The key contribution of this function is that it knows how to
connect a group of tables correctly. e.g., If the group of
tables is `game`, `play` and `play_player`, then `game` and
`play` will be joined and `play` and `play_player` will be
joined. (Instead of `game` and `play_player` or some other
erronoeous combination.)
In essence, each table is joined with the least general table
in the group.
"""
assert cls_from not in cls_tos, \
'cannot join %s with itself with `sql_join_all`' % cls_
|
apatriciu/OpenStackOpenCL
|
computeOpenCL/nova/contrib/OpenCLServer/OpenCLServer.py
|
Python
|
apache-2.0
| 23,459
| 0.01006
|
from queues import queue_opencl_devices
from queues import queue_opencl_contexts
from queues import queue_opencl_buffers
from queues import queue_opencl_programs
from queues import queue_opencl_kernels
from queues import queue_opencl_command_queues
from queues import queue_opencl_notify
import sys
from oslo.config import cfg
import PyOpenCLInterface
from kombu.mixins import ConsumerMixin
import binascii
def DispatchDevices(method, args):
if method == 'ListDevices':
nErr = 0
try:
result = PyOpenCLInterface.ListDevices()
except:
nErr = -128
return result
if method == 'GetDeviceProperties':
nid = int(args['id'])
try:
result = PyOpenCLInterface.GetDeviceProperties(nid)
except:
nErr = -128
DeviceProperties = {}
return (DeviceProperties, nErr)
return result
return -128
def DispatchContexts(method, args):
if method == 'ListContexts':
nErr = 0
try:
result = PyOpenCLInterface.ListContexts()
except:
nErr = -128
return (result, nErr)
if method == 'GetContextProperties':
nid = int(args['id'])
try:
result = PyOpenCLInterface.GetContextProperties(nid)
except:
nErr = -128
ContextProperties = {}
return (ContextProperties, nErr)
return result
if method == 'CreateContext':
try:
listDevices = args['Devices']
properties = args['Properties']
result = PyOpenCLInterface.CreateContext(listDevices, properties)
except:
return -128
return result
if method == 'ReleaseContext':
nid = int(args['id'])
try:
result = PyOpenCLInterface.ReleaseContext(nid)
except:
return -128
return result
if method == 'RetainContext':
nid = int(args['id'])
try:
result = PyOpenCLInterface.RetainContext(nid)
except:
return -128
return result
return -128
def DispatchBuffers(method, args):
if method == 'ListBuffers':
nErr = 0
try:
result = PyOpenCLInterface.ListBuffers()
except:
print "DISPATCHBUFFERS : Exception caught ListBuffers"
nErr = -128
return (result, nErr)
if method == 'GetBufferProperties':
nid = int(args['id'])
try:
result = PyOpenCLInterface.GetBufferProperties(nid)
except:
nErr = -128
BufferProperties = {}
return (BufferProperties, nErr)
return result
if method == 'CreateBuffer':
try:
context = int(args['Context'])
size = int(args['Size'])
properties = args['Properties']
result = PyOpenCLInterface.CreateBuffer(context, size, properties)
except:
print "DISPATCHBUFFERS.CreateBuffer Exception Caught : %s" % sys.exc_info()[0]
return -128
return result
if method == 'ReleaseBuffer':
nid = int(args['id'])
try:
result = PyOpenCLInterface.ReleaseBuffer(nid)
except:
return -128
return result
if method == 'RetainBuffer':
nid = int(args['id'])
try:
result = PyOpenCLInterface.RetainBuffer(nid)
except:
return -128
return result
print "DISPATCHBUFFERS : Unknown Method"
return -128
def DispatchPrograms(method, args):
if method == 'ListPrograms':
nErr = 0
try:
result = PyOpenCLInterface.ListPrograms()
except:
nErr = -128
return (result, nErr)
if method ==
|
'GetPr
|
ogramProperties':
try:
nid = int(args['id'])
result = PyOpenCLInterface.GetProgramProperties(nid)
except:
print "Exception caught in DispatchPrograms.%s " % method
print "Exception info %s " % sys.exc_info()[0]
nErr = -128
Properties = {}
return (Properties, nErr)
return result
if method == 'CreateProgram':
try:
context = int(args['Context'])
programStringsList = args['ProgramStrings']
programStrings = []
for stru in programStringsList:
programStrings.append(str(stru))
result = PyOpenCLInterface.CreateProgram(context, programStrings)
except:
print "Exception caught in DispatchPrograms.%s " % method
print "Exception info %s " % sys.exc_info()[0]
return -128
return result
if method == 'ReleaseProgram':
try:
nid = int(args['id'])
result = PyOpenCLInterface.ReleaseProgram(nid)
except:
print "Exception caught in DispatchPrograms.%s " % method
print "Exception info %s " % sys.exc_info()[0]
return -128
return result
if method == 'RetainProgram':
try:
nid = int(args['id'])
result = PyOpenCLInterface.RetainProgram(nid)
except:
print "Exception caught in DispatchPrograms.%s " % method
print "Exception info %s " % sys.exc_info()[0]
return -128
return result
if method == 'BuildProgram':
try:
nid = int(args['id'])
listDevices = args['Devices']
buildOptions = args['Options']
result = PyOpenCLInterface.BuildProgram(nid, listDevices, buildOptions)
except:
print "Exception caught in DispatchPrograms.%s " % method
print "Exception info %s " % sys.exc_info()[0]
return -128
return result
if method == 'GetProgramBuildInfo':
try:
nid = int(args['id'])
device = int(args['Device'])
buildInfo = args['BuildInfo']
result = PyOpenCLInterface.GetProgramBuildInfo(nid, device, buildInfo)
except:
print "Exception caught in DispatchPrograms.%s " % method
print "Exception info %s " % sys.exc_info()[0]
return -128
return result
print "DISPATCHPROGRAMS : Unknown Method"
return -128
def DispatchKernels(method, args):
if method == 'ListKernels':
nErr = 0
try:
result = PyOpenCLInterface.ListKernels()
except:
nErr = -128
return (result, nErr)
if method == 'GetKernelProperties':
try:
nid = int(args['id'])
result = PyOpenCLInterface.GetKernelProperties(nid)
except:
print "Exception caught in DispatchKernels.%s " % method
print "Exception info %s " % sys.exc_info()[0]
nErr = -128
Properties = {}
return (Properties, nErr)
return result
if method == 'CreateKernel':
try:
program = int(args['Program'])
kernel_name = str(args['KernelName'])
result = PyOpenCLInterface.CreateKernel(program, kernel_name)
except:
print "Exception caught in DispatchKernels.%s " % method
print "Exception info %s " % sys.exc_info()[0]
return -128
return result
if method == 'ReleaseKernel':
try:
nid = int(args['id'])
result = PyOpenCLInterface.ReleaseKernel(nid)
except:
print "Exception caught in DispatchKernels.%s " % method
print "Exception info %s " % sys.exc_info()[0]
return -128
return result
if method == 'RetainKernel':
try:
nid = int(args['id'])
result = PyOpenCLInterface.RetainKernel(nid)
except:
print "Exception caught in DispatchKernel.%s " % method
print "Exception info %s " % sys.exc_info()[0]
return -128
return result
if method == 'KernelSetArgument':
try:
nid = int(args['id'])
paramIndex = int(args['ParamIndex'])
|
destenson/bitcoin--bitcoin
|
contrib/devtools/github-merge.py
|
Python
|
mit
| 14,113
| 0.014667
|
#!/usr/bin/env python3
# Copyright (c) 2016-2017 Bitcoin Core Developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# This script will locally construct a merge commit for a pull request on a
# github repository, inspect it, sign it and optionally push it.
# The following temporary branches are created/overwritten and deleted:
# * pull/$PULL/base (the current master we're merging onto)
# * pull/$PULL/head (the current state of the remote pull request)
# * pull/$PULL/merge (github's merge)
# * pull/$PULL/local-merge (our merge)
# In case of a clean merge that is accepted by the user, the local branch with
# name $BRANCH is overwritten with the merged result, and optionally pushed.
from __future__ import division,print_function,unicode_literals
import os
from sys import stdin,stdout,stderr
import argparse
import hashlib
import subprocess
import sys
import json,codecs
try:
from urllib.request import Request,urlopen
except:
from urllib2 import Request,urlopen
# External tools (can be overridden using environment)
GIT = os.getenv('GIT','git')
BASH = os.getenv('BASH','bash')
# OS specific configuration for terminal attributes
ATTR_RESET = ''
ATTR_PR = ''
COMMIT_FORMAT = '%h %s (%an)%d'
if os.name == 'posix': # if posix, assume we can use basic terminal escapes
ATTR_RESET = '\033[0m'
ATTR_PR = '\033[1;36m'
COMMIT_FORMAT = '%C(bold blue)%h%Creset %s %C(cyan)(%an)%Creset%C(green)%d%Creset'
def git_config_get(option, default=None):
'''
Get named configuration option from git repository.
'''
try:
return subprocess.check_output([GIT,'config','--get',option]).rstrip().decode('utf-8')
except subprocess.CalledProcessError as e:
return default
def retrieve_pr_info(repo,pull):
'''
Retrieve pull request information from github.
Return None if no title can be found, or an error happens.
'''
try:
req = Request("https://api.github.com/repos/"+repo+"/pulls/"+pull)
result = urlopen(req)
reader = codecs.getreader('utf-8')
obj = json.load(reader(result))
return obj
except Exception as e:
print('Warning: unable to retrieve pull information from github: %s' % e)
return None
def ask_prompt(text):
print(text,end=" ",file=stderr)
stderr.flush()
reply = stdin.readline().rstrip()
print("",file=stderr)
return reply
def get_symlink_files():
files = sorted(subprocess.check_output([GIT, 'ls-tree', '--full-tree', '-r', 'HEAD']).splitlines())
ret = []
for f in files:
if (int(f.decode('utf-8').split(" ")[0], 8) & 0o170000) == 0o120000:
ret.append(f.decode('utf-8').split("\t")[1])
return ret
def tree_sha512sum(commit='HEAD'):
# request metadata for entire tree, recursively
files = []
blob_by_name = {}
for line in subprocess.check_output([GIT, 'ls-tree', '--full-tree', '-r', commit]).splitlines():
name_sep = line.index(b'\t')
metadata = line[:name_sep].split() # perms, 'blob', blobid
assert(metadata[1] == b'blob')
name = line[name_sep+1:]
files.append(name)
blob_by_name[name] = metadata[2]
files.sort()
# open connection to git-cat-file in batch mode to request data for all blobs
# this is much faster than launching it per file
p = subprocess.Popen([GIT, 'cat-file', '--batch'], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
overall = hashlib.sha512()
for f in files:
blob = blob_by_name[f]
# request blob
p.stdin.write(blob + b'\n')
p.stdin.flush()
# read header: blob, "blob", size
reply = p.stdout.readline().split()
assert(reply[0] == blob and reply[1] == b'blob')
size = int(reply[2])
# hash the blob data
intern = hashlib.sha512()
ptr = 0
while ptr < size:
bs = min(65536, size - ptr)
piece = p.stdout.read(bs)
if len(piece) == bs:
intern.update(piece)
else:
raise IOError('Premature EOF reading git cat-file output')
ptr += bs
dig = intern.hexdigest()
assert(p.stdout.read(1) == b'\n') # ignore LF that follows blob data
# update overall hash with file hash
overall.update(dig.encode("utf-8"))
overall.update(" ".encode("utf-8"))
overall.update(f)
overall.update("\n".encode("utf-8"))
p.stdin.close()
if p.wait():
raise IOError('Non-zero return value executing git cat-file')
return overall.hexdigest()
def print_merge_details(pull, title, branch, base_branch, head_branch):
print('%s#%s%s %s %sinto %s%s' % (ATTR_RESET+ATTR_PR,pull,ATTR_RESET,title,ATTR_RESET+ATTR_PR,branch,ATTR_RESET))
subprocess.check_call([GIT,'log','--graph','--topo-order','--pretty=format:'+COMMIT_FORMAT,base_branch+'..'+head_branch])
def parse_arguments():
epilog = '''
In addition, you can set the following git configuration variables:
githubmerge.repository (mandatory),
user.signingkey (mandatory),
githubmerge.host (default: [email protected]),
githubmerge.branch (no default),
githubmerge.testcmd (default: none).
'''
parser = argparse.ArgumentParser(description='Utility to merge, sign and push github pull requests',
epilog=epilog)
parser.add_argument('pull', metavar='PULL', type=int, nargs=1,
help='Pull request ID to merge')
|
parser.add_argument('branch', metavar='BRANCH', type=str, nargs='?',
default=None, help='Branch to merge against (default: githubmerge.branch setting, or base branch for pull, or \'master\')')
return parser.parse_args()
def main():
# Extract settings from git repo
repo = git_config_get('githubmerge.repository')
host = git_config_get('githubmerge.host','[email protected]')
|
opt_branch = git_config_get('githubmerge.branch',None)
testcmd = git_config_get('githubmerge.testcmd')
signingkey = git_config_get('user.signingkey')
if repo is None:
print("ERROR: No repository configured. Use this command to set:", file=stderr)
print("git config githubmerge.repository <owner>/<repo>", file=stderr)
sys.exit(1)
if signingkey is None:
print("ERROR: No GPG signing key set. Set one using:",file=stderr)
print("git config --global user.signingkey <key>",file=stderr)
sys.exit(1)
host_repo = host+":"+repo # shortcut for push/pull target
# Extract settings from command line
args = parse_arguments()
pull = str(args.pull[0])
# Receive pull information from github
info = retrieve_pr_info(repo,pull)
if info is None:
sys.exit(1)
title = info['title'].strip()
body = info['body'].strip()
# precedence order for destination branch argument:
# - command line argument
# - githubmerge.branch setting
# - base branch for pull (as retrieved from github)
# - 'master'
branch = args.branch or opt_branch or info['base']['ref'] or 'master'
# Initialize source branches
head_branch = 'pull/'+pull+'/head'
base_branch = 'pull/'+pull+'/base'
merge_branch = 'pull/'+pull+'/merge'
local_merge_branch = 'pull/'+pull+'/local-merge'
devnull = open(os.devnull,'w')
try:
subprocess.check_call([GIT,'checkout','-q',branch])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot check out branch %s." % (branch), file=stderr)
sys.exit(3)
try:
subprocess.check_call([GIT,'fetch','-q',host_repo,'+refs/pull/'+pull+'/*:refs/heads/pull/'+pull+'/*'])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find pull request #%s on %s." % (pull,host_repo), file=stderr)
sys.exit(3)
try:
subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+head_branch], stdout=devnull, stderr=stdout)
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find head of pull request #%s on %s." % (pull,host_repo), file=stderr)
sys.
|
aliyun/oss-ftp
|
python27/win32/Lib/site-packages/cryptography/x509/name.py
|
Python
|
mit
| 2,116
| 0
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License
|
. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
impo
|
rt six
from cryptography import utils
from cryptography.x509.oid import ObjectIdentifier
class NameAttribute(object):
def __init__(self, oid, value):
if not isinstance(oid, ObjectIdentifier):
raise TypeError(
"oid argument must be an ObjectIdentifier instance."
)
if not isinstance(value, six.text_type):
raise TypeError(
"value argument must be a text type."
)
self._oid = oid
self._value = value
oid = utils.read_only_property("_oid")
value = utils.read_only_property("_value")
def __eq__(self, other):
if not isinstance(other, NameAttribute):
return NotImplemented
return (
self.oid == other.oid and
self.value == other.value
)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.oid, self.value))
def __repr__(self):
return "<NameAttribute(oid={0.oid}, value={0.value!r})>".format(self)
class Name(object):
def __init__(self, attributes):
self._attributes = attributes
def get_attributes_for_oid(self, oid):
return [i for i in self if i.oid == oid]
def __eq__(self, other):
if not isinstance(other, Name):
return NotImplemented
return self._attributes == other._attributes
def __ne__(self, other):
return not self == other
def __hash__(self):
# TODO: this is relatively expensive, if this looks like a bottleneck
# for you, consider optimizing!
return hash(tuple(self._attributes))
def __iter__(self):
return iter(self._attributes)
def __len__(self):
return len(self._attributes)
def __repr__(self):
return "<Name({0!r})>".format(self._attributes)
|
y4n9squared/HEtest
|
hetest/python/circuit_generation/ibm/ibm_wire.py
|
Python
|
bsd-2-clause
| 1,345
| 0.003717
|
# *****************************************************************
# Copyright (c) 2013 Massachusetts Institute of Technology
#
# Developed exclusively at US Government expense under US Air Force contract
# FA8721-05-C-002. The rights of the United States Government to use, modify,
# reproduce, release, perform, display or disclose this computer software and
# computer software documentation in whole or in part, in any manner and for
# any purpose whatsoever, and to have or authorize others to do so, are
# Unrestricted and Unlimited.
#
# Licensed for use under the BSD License as described in the BSD-LICENSE.txt
# file in the root directory of this release.
#
# Project: SPAR
# Authors:
|
SY
# Description: IBM TA2 wire class
#
# Modifications:
# Date Name Modification
# ---- ---- ------------
# 22 Oct 2012 SY Original Version
# *****************************************************************
import ibm_circuit_object as ico
class IBMInputWire(ic
|
o.IBMCircuitObject):
"""
This class represents a single IBM input wire.
"""
def __init__(self, displayname, circuit):
"""Initializes the wire with the display name and circuit specified."""
ico.IBMCircuitObject.__init__(self, displayname, 0.0, 0, circuit)
|
david-ragazzi/nupic
|
nupic/encoders/coordinate.py
|
Python
|
gpl-3.0
| 6,560
| 0.00564
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import hashlib
import itertools
import numpy
from nupic.bindings.math import Random
from nupic.encoders.base i
|
mport Encoder
class CoordinateEncoder(Encoder):
"""
Given a
|
coordinate in an N-dimensional space, and a radius around
that coordinate, the Coordinate Encoder returns an SDR representation
of that position.
The Coordinate Encoder uses an N-dimensional integer coordinate space.
For example, a valid coordinate in this space is (150, -49, 58), whereas
an invalid coordinate would be (55.4, -5, 85.8475).
It uses the following algorithm:
1. Find all the coordinates around the input coordinate, within the
specified radius.
2. For each coordinate, use a uniform hash function to
deterministically map it to a real number between 0 and 1. This is the
"order" of the coordinate.
3. Of these coordinates, pick the top W by order, where W is the
number of active bits desired in the SDR.
4. For each of these W coordinates, use a uniform hash function to
deterministically map it to one of the bits in the SDR. Make this bit active.
5. This results in a final SDR with exactly W bits active
(barring chance hash collisions).
"""
def __init__(self,
w=21,
n=1000,
name=None,
verbosity=0):
"""
See `nupic.encoders.base.Encoder` for more information.
@param name An optional string which will become part of the description
"""
# Validate inputs
if (w <= 0) or (w % 2 == 0):
raise ValueError("w must be an odd positive integer")
if (n <= 6 * w) or (not isinstance(n, int)):
raise ValueError("n must be an int strictly greater than 6*w. For "
"good results we recommend n be strictly greater "
"than 11*w")
self.w = w
self.n = n
self.verbosity = verbosity
self.encoders = None
if name is None:
name = "[%s:%s]" % (self.n, self.w)
self.name = name
def getWidth(self):
"""See `nupic.encoders.base.Encoder` for more information."""
return self.n
def getDescription(self):
"""See `nupic.encoders.base.Encoder` for more information."""
return [('coordinate', 0), ('radius', 1)]
def getScalars(self, inputData):
"""See `nupic.encoders.base.Encoder` for more information."""
return numpy.array([0]*len(inputData))
def encodeIntoArray(self, inputData, output):
"""
See `nupic.encoders.base.Encoder` for more information.
@param inputData (tuple) Contains coordinate (numpy.array)
and radius (float)
@param output (numpy.array) Stores encoded SDR in this numpy array
"""
(coordinate, radius) = inputData
neighbors = self._neighbors(coordinate, radius)
winners = self._topWCoordinates(neighbors, self.w)
bitFn = lambda coordinate: self._bitForCoordinate(coordinate, self.n)
indices = numpy.array([bitFn(w) for w in winners])
output[:] = 0
output[indices] = 1
@staticmethod
def _neighbors(coordinate, radius):
"""
Returns coordinates around given coordinate, within given radius.
Includes given coordinate.
@param coordinate (numpy.array) Coordinate whose neighbors to find
@param radius (float) Radius around `coordinate`
@return (numpy.array) List of coordinates
"""
ranges = [range(n-radius, n+radius+1) for n in coordinate.tolist()]
return numpy.array(list(itertools.product(*ranges)))
@classmethod
def _topWCoordinates(cls, coordinates, w):
"""
Returns the top W coordinates by order.
@param coordinates (numpy.array) A 2D numpy array, where each element
is a coordinate
@param w (int) Number of top coordinates to return
@return (numpy.array) A subset of `coordinates`, containing only the
top ones by order
"""
orders = numpy.array([cls._orderForCoordinate(c)
for c in coordinates.tolist()])
indices = numpy.argsort(orders)[-w:]
return coordinates[indices]
@staticmethod
def _hashCoordinate(coordinate):
"""Hash a coordinate to a 64 bit integer."""
coordinateStr = ",".join(str(v) for v in coordinate)
# Compute the hash and convert to 64 bit int.
hash = int(int(hashlib.md5(coordinateStr).hexdigest(), 16) % (2 ** 64))
return hash
@classmethod
def _orderForCoordinate(cls, coordinate):
"""
Returns the order for a coordinate.
@param coordinate (numpy.array) Coordinate
@return (float) A value in the interval [0, 1), representing the
order of the coordinate
"""
seed = cls._hashCoordinate(coordinate)
rng = Random(seed)
return rng.getReal64()
@classmethod
def _bitForCoordinate(cls, coordinate, n):
"""
Maps the coordinate to a bit in the SDR.
@param coordinate (numpy.array) Coordinate
@param n (int) The number of available bits in the SDR
@return (int) The index to a bit in the SDR
"""
seed = cls._hashCoordinate(coordinate)
rng = Random(seed)
return rng.getUInt32(n)
def dump(self):
print "CoordinateEncoder:"
print " w: %d" % self.w
print " n: %d" % self.n
@classmethod
def read(cls, proto):
encoder = object.__new__(cls)
encoder.w = proto.w
encoder.n = proto.n
encoder.verbosity = proto.verbosity
encoder.name = proto.name
return encoder
def write(self, proto):
proto.w = self.w
proto.n = self.n
proto.verbosity = self.verbosity
proto.name = self.name
|
tensorflow/agents
|
tf_agents/bandits/policies/loss_utils.py
|
Python
|
apache-2.0
| 3,764
| 0.002125
|
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Loss utility code."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from typing import Optional, Text
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-ve
|
rsio
|
n-import
from tf_agents.typing import types
def pinball_loss(
y_true: types.Tensor,
y_pred: types.Tensor,
weights: types.Float = 1.0,
scope: Optional[Text] = None,
loss_collection: tf.compat.v1.GraphKeys = tf.compat.v1.GraphKeys.LOSSES,
reduction: tf.compat.v1.losses.Reduction = tf.compat.v1.losses.Reduction
.SUM_BY_NONZERO_WEIGHTS,
quantile: float = 0.5) -> types.Float:
"""Adds a Pinball loss for quantile regression.
```
loss = quantile * (y_true - y_pred) if y_true > y_pred
loss = (quantile - 1) * (y_true - y_pred) otherwise
```
See: https://en.wikipedia.org/wiki/Quantile_regression#Quantiles
`weights` acts as a coefficient for the loss. If a scalar is provided, then
the loss is simply scaled by the given value. If `weights` is a tensor of size
`[batch_size]`, then the total loss for each sample of the batch is rescaled
by the corresponding element in the `weights` vector. If the shape of
`weights` matches the shape of `predictions`, then the loss of each
measurable element of `predictions` is scaled by the corresponding value of
`weights`.
Args:
y_true: tensor of true targets.
y_pred: tensor of predicted targets.
weights: Optional `Tensor` whose rank is either 0, or the same rank as
`labels`, and must be broadcastable to `labels` (i.e., all dimensions must
be either `1`, or the same as the corresponding `losses` dimension).
scope: The scope for the operations performed in computing the loss.
loss_collection: collection to which the loss will be added.
reduction: Type of reduction to apply to loss.
quantile: A float between 0. and 1., the quantile we want to regress.
Returns:
Weighted Pinball loss float `Tensor`. If `reduction` is `NONE`, this has the
same shape as `labels`; otherwise, it is scalar.
Raises:
ValueError: If the shape of `predictions` doesn't match that of `labels` or
if the shape of `weights` is invalid. Also if `labels` or `predictions`
is None.
@compatibility(eager)
The `loss_collection` argument is ignored when executing eagerly. Consider
holding on to the return value or collecting losses via a `tf.keras.Model`.
@end_compatibility
"""
if y_true is None:
raise ValueError('y_true must not be None.')
if y_pred is None:
raise ValueError('y_pred must not be None.')
with tf.compat.v1.name_scope(scope, 'pinball_loss',
(y_pred, y_true, weights)) as scope:
y_pred = tf.cast(y_pred, dtype=tf.float32)
y_true = tf.cast(y_true, dtype=tf.float32)
y_pred.get_shape().assert_is_compatible_with(y_true.get_shape())
error = tf.subtract(y_true, y_pred)
loss_tensor = tf.maximum(quantile * error, (quantile - 1) * error)
return tf.compat.v1.losses.compute_weighted_loss(
loss_tensor, weights, scope, loss_collection, reduction=reduction)
|
ee08b397/dpark
|
examples/jit.py
|
Python
|
bsd-3-clause
| 757
| 0.007926
|
'''
Notice:
1. The function for jit should locate in mfs
2. For the usage of jit types and signatures, please refer Numba documentation <http://numba.github.com/numba-doc/0.10/index.html>
'''
from dpark import _ctx as dpark, jit, autojit
import numpy
@jit('f8(f8[:])')
def add1(x):
sum = 0
|
.0
for i in xrange(x.shape[0]):
sum += i*x[i]
return sum
@autojit
def add2(x):
sum = 0.0
for i in xrange(x.shape[0]):
sum += i*x[i]
return sum
def add3(x):
sum = 0.0
for i in
|
xrange(x.shape[0]):
sum += i*x[i]
return sum
rdd = dpark.makeRDD(range(0, 10)).map(lambda x: numpy.arange(x*1e7, (x+1)*1e7))
print rdd.map(add1).collect()
print rdd.map(add2).collect()
print rdd.map(add3).collect()
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-3.5.0/Lib/nntplib.py
|
Python
|
mit
| 43,081
| 0.000696
|
"""An NNTP client class based on:
- RFC 977: Network News Transfer Protocol
- RFC 2980: Common NNTP Extensions
- RFC 3977: Network News Transfer Protocol (version 2)
Example:
>>> from nntplib import NNTP
>>> s = NNTP('news')
>>> resp, count, first, last, name = s.group('comp.lang.python')
>>> print('Group', name, 'has', count, 'articles, range', first, 'to', last)
Group comp.lang.python has 51 articles, range 5770 to 5821
>>> resp, subs = s.xhdr('subject', '{0}-{1}'.format(first, last))
>>> resp = s.quit()
>>>
Here 'resp' is the server response line.
Error responses are turned into exceptions.
To post an article from a file:
>>> f = open(filename, 'rb') # file containing article, including header
>>> resp = s.post(f)
>>>
For descriptions of all methods, read the comments in the code below.
Note that all arguments and return values representing article numbers
are strings, not numbers, since they are rarely used for calculations.
"""
# RFC 977 by Brian Kantor and Phil Lapsley.
# xover, xgtitle, xpath, date methods by Kevan Heydon
# Incompatible changes from the 2.x nntplib:
# - all commands are encoded as UTF-8 data (using the "surrogateescape"
# error handler), except for raw message data (POST, IHAVE)
# - all responses are decoded as UTF-8 data (using the "surrogateescape"
# error handler), except for raw message data (ARTICLE, HEAD, BODY)
# - the `file` argument to various methods is keyword-only
#
# - NNTP.date() returns a datetime object
# - NNTP.newgroups() and NNTP.newnews() take a datetime (or date) object,
# rather than a pair of (date, time) strings.
# - NNTP.newgroups() and NNTP.list() return a list of GroupInfo named tuples
# - NNTP.descriptions() returns a dict mapping group names to descriptions
# - NNTP.xover() returns a list of dicts mapping field names (header or metadata)
# to field values; each dict representing a message overview.
# - NNTP.article(), NNTP.head() and NNTP.body() return a (response, ArticleInfo)
# tuple.
# - the "internal" methods have been marked private (they now start with
# an underscore)
# Other changes from the 2.x/3.1 nntplib:
# - automatic querying of capabilities at connect
# - New method NNTP.getcapabilities()
# - New method NNTP.over()
# - New helper function decode_header()
# - NNTP.post() and NNTP.ihave() accept file objects, bytes-like objects and
# arbitrary iterables yielding lines.
# - An extensive test suite :-)
# TODO:
# - return structured data (GroupInfo etc.) everywhere
# - support HDR
# Imports
import re
import socket
import collections
import datetime
import warnings
try:
import ssl
except ImportError:
_have_ssl = False
else:
_have_ssl = True
from email.header import decode_header as _email_decode_header
from socket import _GLOBAL_DEFAULT_TIMEOUT
__all__ = ["NNTP",
"NNTPError", "NNTPReplyError", "NNTPTemporaryError",
"NNTPPermanentError", "NNTPProtocolError", "NNTPDataError",
"decode_header",
]
# maximal line length when calling readline(). This is to prevent
# reading arbitrary length lines. RFC 3977 limits NNTP line length to
# 512 characters, including CRLF. We have selected 2048 just to be on
# the safe side.
_MAXLINE = 2048
# Exceptions raised when an error or invalid response is received
class NNTPError(Exception):
"""Base class for all nntplib exceptions"""
def __init__(self, *args):
Exception.__init__(self, *args)
try:
self.response = args[0]
except IndexError:
self.response = 'No response given'
class NNTPReplyError(NNTPError):
"""Unexpected [123]xx reply"""
pass
class NNTPTemporaryE
|
rror(NNTPError):
"""4xx errors"""
pass
class NNTPPermanentError(NNTPError):
"""5xx errors"""
pass
class NNTPProtocolError(NNTPError):
"""Response does not begin with [1-5]"""
pass
class NNTPDataError(NNTPError):
""
|
"Error in response data"""
pass
# Standard port used by NNTP servers
NNTP_PORT = 119
NNTP_SSL_PORT = 563
# Response numbers that are followed by additional text (e.g. article)
_LONGRESP = {
'100', # HELP
'101', # CAPABILITIES
'211', # LISTGROUP (also not multi-line with GROUP)
'215', # LIST
'220', # ARTICLE
'221', # HEAD, XHDR
'222', # BODY
'224', # OVER, XOVER
'225', # HDR
'230', # NEWNEWS
'231', # NEWGROUPS
'282', # XGTITLE
}
# Default decoded value for LIST OVERVIEW.FMT if not supported
_DEFAULT_OVERVIEW_FMT = [
"subject", "from", "date", "message-id", "references", ":bytes", ":lines"]
# Alternative names allowed in LIST OVERVIEW.FMT response
_OVERVIEW_FMT_ALTERNATIVES = {
'bytes': ':bytes',
'lines': ':lines',
}
# Line terminators (we always output CRLF, but accept any of CRLF, CR, LF)
_CRLF = b'\r\n'
GroupInfo = collections.namedtuple('GroupInfo',
['group', 'last', 'first', 'flag'])
ArticleInfo = collections.namedtuple('ArticleInfo',
['number', 'message_id', 'lines'])
# Helper function(s)
def decode_header(header_str):
"""Takes an unicode string representing a munged header value
and decodes it as a (possibly non-ASCII) readable value."""
parts = []
for v, enc in _email_decode_header(header_str):
if isinstance(v, bytes):
parts.append(v.decode(enc or 'ascii'))
else:
parts.append(v)
return ''.join(parts)
def _parse_overview_fmt(lines):
"""Parse a list of string representing the response to LIST OVERVIEW.FMT
and return a list of header/metadata names.
Raises NNTPDataError if the response is not compliant
(cf. RFC 3977, section 8.4)."""
fmt = []
for line in lines:
if line[0] == ':':
# Metadata name (e.g. ":bytes")
name, _, suffix = line[1:].partition(':')
name = ':' + name
else:
# Header name (e.g. "Subject:" or "Xref:full")
name, _, suffix = line.partition(':')
name = name.lower()
name = _OVERVIEW_FMT_ALTERNATIVES.get(name, name)
# Should we do something with the suffix?
fmt.append(name)
defaults = _DEFAULT_OVERVIEW_FMT
if len(fmt) < len(defaults):
raise NNTPDataError("LIST OVERVIEW.FMT response too short")
if fmt[:len(defaults)] != defaults:
raise NNTPDataError("LIST OVERVIEW.FMT redefines default fields")
return fmt
def _parse_overview(lines, fmt, data_process_func=None):
"""Parse the response to a OVER or XOVER command according to the
overview format `fmt`."""
n_defaults = len(_DEFAULT_OVERVIEW_FMT)
overview = []
for line in lines:
fields = {}
article_number, *tokens = line.split('\t')
article_number = int(article_number)
for i, token in enumerate(tokens):
if i >= len(fmt):
# XXX should we raise an error? Some servers might not
# support LIST OVERVIEW.FMT and still return additional
# headers.
continue
field_name = fmt[i]
is_metadata = field_name.startswith(':')
if i >= n_defaults and not is_metadata:
# Non-default header names are included in full in the response
# (unless the field is totally empty)
h = field_name + ": "
if token and token[:len(h)].lower() != h:
raise NNTPDataError("OVER/XOVER response doesn't include "
"names of additional headers")
token = token[len(h):] if token else None
fields[fmt[i]] = token
overview.append((article_number, fields))
return overview
def _parse_datetime(date_str, time_str=None):
"""Parse a pair of (date, time) strings, and return a datetime object.
If only the date is given, it is assumed to be date and time
concatenated together (e.g. response to the DATE command).
"""
if time_str is None:
time_str = date_str[-6:]
date_str = date_str[:-6]
hours = int(time_str[:2])
minutes = int(time_str[2:4
|
AnshulYADAV007/Lean
|
Algorithm.Python/CustomDataIndicatorExtensionsAlgorithm.py
|
Python
|
apache-2.0
| 3,632
| 0.013499
|
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WIT
|
HOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Common")
AddReference("QuantConnect.I
|
ndicators")
from System import *
from QuantConnect import *
from QuantConnect.Indicators import *
from QuantConnect.Data import *
from QuantConnect.Data.Market import *
from QuantConnect.Data.Custom import *
from QuantConnect.Algorithm import *
from QuantConnect.Python import PythonQuandl
### <summary>
### The algorithm creates new indicator value with the existing indicator method by Indicator Extensions
### Demonstration of using the external custom datasource Quandl to request the VIX and VXV daily data
### </summary>
### <meta name="tag" content="using data" />
### <meta name="tag" content="using quantconnect" />
### <meta name="tag" content="custom data" />
### <meta name="tag" content="indicators" />
### <meta name="tag" content="indicator classes" />
### <meta name="tag" content="plotting indicators" />
### <meta name="tag" content="charting" />
class CustomDataIndicatorExtensionsAlgorithm(QCAlgorithm):
# Initialize the data and resolution you require for your strategy
def Initialize(self):
self.SetStartDate(2014,1,1)
self.SetEndDate(2018,1,1)
self.SetCash(25000)
self.vix = 'CBOE/VIX'
self.vxv = 'CBOE/VXV'
# Define the symbol and "type" of our generic data
self.AddData(QuandlVix, self.vix, Resolution.Daily)
self.AddData(Quandl, self.vxv, Resolution.Daily)
# Set up default Indicators, these are just 'identities' of the closing price
self.vix_sma = self.SMA(self.vix, 1, Resolution.Daily)
self.vxv_sma = self.SMA(self.vxv, 1, Resolution.Daily)
# This will create a new indicator whose value is smaVXV / smaVIX
self.ratio = IndicatorExtensions.Over(self.vxv_sma, self.vix_sma)
# Plot indicators each time they update using the PlotIndicator function
self.PlotIndicator("Ratio", self.ratio)
self.PlotIndicator("Data", self.vix_sma, self.vxv_sma)
# OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.
def OnData(self, data):
# Wait for all indicators to fully initialize
if not (self.vix_sma.IsReady and self.vxv_sma.IsReady and self.ratio.IsReady): return
if not self.Portfolio.Invested and self.ratio.Current.Value > 1:
self.MarketOrder(self.vix, 100)
elif self.ratio.Current.Value < 1:
self.Liquidate()
# In CBOE/VIX data, there is a "vix close" column instead of "close" which is the
# default column namein LEAN Quandl custom data implementation.
# This class assigns new column name to match the the external datasource setting.
class QuandlVix(PythonQuandl):
def __init__(self):
self.ValueColumnName = "VIX Close"
|
electric-cloud/metakit
|
python/setup.py
|
Python
|
mit
| 7,317
| 0.009703
|
from distutils.core import setup, Extension, Command
from distutils.command.build import build
from distutils.command.build_ext import build_ext
from distutils.command.config import config
from distutils.msvccompiler import MSVCCompiler
from distutils import sysconfig
import string
import sys
mkobjs = ['column', 'custom', 'derived', 'fileio', 'field',
'format', 'handler', 'persist', 'remap', 'std',
'store', 'string', 'table', 'univ', 'view', 'viewx']
class config_mk(config):
def run(self):
# work around bug in Python 2.2-supplied check_header, fixed
# in Python 2.3; body needs to be a valid, non-zero-length string
if self.try_cpp(body="/* body */", headers=['unicodeobject.h'],
include_dirs=[sysconfig.get_python_inc()]):
build = self.distribution.reinitialize_command('build_ext')
build.define = 'HAVE_UNICODEOBJECT_H'
# trust that mk4.h provides the correct HAVE_LONG_LONG value,
# since Mk4py doesn't #include "config.h"
class build_mk(build):
def initialize_options(self):
# build in builds directory by default, unless specified otherwise
build.initialize_options(self)
self.build_base = '../builds'
class build_mkext(build_ext):
def finalize_options(self):
self.run_command('config')
# force use of C++ compiler (helps on some platforms)
import os
cc = os.environ.get('CXX', sysconfig.get_config_var('CXX'))
if not cc:
cc = sysconfig.get_config_var('CCC') # Python 1.5.2
if cc:
os.environ['CC'] = cc
build_ext.finalize_options(self)
def build_extension(self, ext):
# work around linker problem with MacPython 2.3
if sys.platform == 'darwin':
try:
self.compiler.linker_so.remove("-Wl,-x")
except: pass
# work around linker problem with Linux, Python 2.2 and earlier:
# despite setting $CC above, still uses Python compiler
if sys.platform == 'linux2':
try:
ext.libraries.append("stdc++")
except: pass
if ext.name == "Mk4py":
if isinstance(self.compiler, MSVCCompiler):
suffix = '.obj'
if self.debug:
prefix = '../builds/msvc60/mklib/Debug/'
else:
prefix = '../builds/msvc60/mklib/Release/'
else:
suffix = '.o'
prefix = '../builds/'
for i in range(len(ext.extra_objects)):
nm = ext.extra_objec
|
ts[i]
if nm in mkobjs:
if string.find(nm, '.') == -1:
|
nm = nm + suffix
nm = prefix + nm
ext.extra_objects[i] = nm
build_ext.build_extension(self, ext)
class test_regrtest(Command):
# Original version of this class posted
# by Berthold Hoellmann to [email protected]
description = "test the distribution prior to install"
user_options = [
('build-base=', 'b',
"base build directory (default: 'build.build-base')"),
('build-purelib=', None,
"build directory for platform-neutral distributions"),
('build-platlib=', None,
"build directory for platform-specific distributions"),
('build-lib=', None,
"build directory for all distribution (defaults to either " +
"build-purelib or build-platlib"),
('test-dir=', None,
"directory that contains the test definitions"),
('test-options=', None,
"command-line options to pass to test.regrtest")
]
def initialize_options(self):
self.build_base = None
# these are decided only after 'build_base' has its final value
# (unless overridden by the user or client)
self.build_purelib = None
self.build_platlib = None
self.test_dir = 'test'
self.test_options = None
def finalize_options(self):
build = self.distribution.get_command_obj('build')
build_options = ('build_base', 'build_purelib', 'build_platlib')
for option in build_options:
val = getattr(self, option)
if val:
setattr(build, option, getattr(self, option))
build.ensure_finalized()
for option in build_options:
setattr(self, option, getattr(build, option))
def run(self):
# Invoke the 'build' command to "build" pure Python modules
# (ie. copy 'em into the build tree)
self.run_command('build')
# remember old sys.path to restore it afterwards
old_path = sys.path[:]
# extend sys.path
sys.path.insert(0, self.build_purelib)
sys.path.insert(0, self.build_platlib)
sys.path.insert(0, self.test_dir)
# Use test.regrtest, unlike the original version of this class
import test.regrtest
# jcw 2004-04-26 - why do I need to add these here to find the tests?
#import leaktest - not very portable
import test_inttypes
import test_stringtype
#import test_hash - doesn't work
# jcw end
test.regrtest.STDTESTS = []
test.regrtest.NOTTESTS = []
if self.test_options:
sys.argv[1:] = string.split(self.test_options, ' ')
else:
del sys.argv[1:]
# remove stale modules
del sys.modules['metakit']
try:
del sys.modules['Mk4py']
except:
pass
self.announce("running tests")
test.regrtest.main(testdir=self.test_dir)
# restore sys.path
sys.path = old_path[:]
#try:
# import metakit
#except:
# metakit = sys.modules['metakit']
setup(name = "metakit",
version = "2.4.9.7",
description = "Python bindings to the Metakit database library",
#long_description = metakit.__doc__,
author = "Gordon McMillan / Jean-Claude Wippler",
author_email = "[email protected]",
url = "http://www.equi4.com/metakit/python.html",
maintainer = "Jean-Claude Wippler",
maintainer_email = "[email protected]",
license = "X/MIT style, see: http://www.equi4.com/mklicense.html",
keywords = ['database'],
py_modules = ['metakit'],
cmdclass = {'build': build_mk, 'build_ext': build_mkext,
'test': test_regrtest, 'config': config_mk},
ext_modules = [Extension("Mk4py",
sources=["PyProperty.cpp",
"PyRowRef.cpp",
"PyStorage.cpp",
"PyView.cpp",
"scxx/PWOImp.cpp",
],
include_dirs=["scxx",
"../include"],
extra_objects=mkobjs,
)]
)
## Local Variables:
## compile-command: "python setup.py build -b ../builds"
## End:
|
raphael-group/comet
|
run_comet_full.py
|
Python
|
mit
| 10,224
| 0.010172
|
#!/usr/bin/python
# Load required modules
import sys, os, json, re, time, comet as C, multiprocessing as mp, random
from math import exp
import run_comet_simple as RC
def get_parser():
# Parse arguments
import argparse
description = 'Runs CoMEt on permuted matrices.'
parser = argparse.ArgumentParser(description=description)
# General parameters
parser.add_argument('-o', '--output_directory', required=True,
help='Output directory.')
parser.add_argument('--parallel', default=False, action='store_true',
help='Use multiprocessing to run a job on each core.')
parser.add_argument('-np', '--num_permutations', required=True, type=int,
help='Number of permuted matrices to use.')
# Mutation data
parser.add_argument('-m', '--mutation_matrix', required=True,
help='File name for mutation data.')
parser.add_argument('-mf', '--min_freq', type=int, default=0,
help='Minimum gene mutation frequency.')
parser.add_argument('-pf', '--patient_file', default=None,
help='File of patients to be included (optional).')
parser.add_argument('-gf', '--gene_file', default=None,
help='File of genes to be included (optional).')
# Comet
parser.add_argument('-ks', '--gene_set_sizes', nargs="*", type=int, required=True,
help='Gene set sizes (length must be t). This or -k must be set. ')
parser.add_argument('-N', '--num_iterations', type=int, default=pow(10, 3),
help='Number of iterations of MCMC.')
parser.add_argument('-NStop', '--n_stop', type=int, default=pow(10, 8),
help='Number of iterations of MCMC to stop the pipeline.')
parser.add_argument('-s', '--step_length', type=int, default=100,
help='Number of iterations between samples.')
parser.add_argument('-init', '--initial_soln', nargs="*",
|
help='Initial solution to use.')
parser.add_argument('-r', '--num_initial', default=1, type=int,
help='Number of different initial starts to use with MCMC.')
parser.add_argument('-tv', '--total_distance_cutoff', type=float, default=0.005,
help='stop condition of
|
convergence (total distance).')
# Parameters for determining the test to be applied in CoMEt
parser.add_argument('--exact_cut', default=0.001, type=float,
help='Maximum accumulated table prob. to stop exact test.')
parser.add_argument('--binom_cut', type=float, default=0.005,
help='Minumum pval cutoff for CoMEt to perform binom test.')
parser.add_argument('-nt', '--nt', default=10, type=int,
help='Maximum co-occurrence cufoff to perform exact test.')
# Files for subtypes/core-events run
parser.add_argument('-sub', '--subtype', default=None,
help='File with a list of subtype for performing subtype-comet.')
parser.add_argument('-ce', '--core_events', default=None,
help='File with a list of core events for performing subtype-comet.')
# Hidden parameters: users can still use these parameters but they won't show in the options
# Parameters for marginal probability graph (optional)
# File mapping genes/events to new names (optional).
parser.add_argument('-e', '--event_names', default=None, help=argparse.SUPPRESS)
# File mapping samples to cancer types.
parser.add_argument('-st', '--sample_types_file', default=None, help=argparse.SUPPRESS)
# Minimum edge weight for showing in the graph
parser.add_argument('-mew', '--minimum_edge_weight', type=float, default=0.001,
help=argparse.SUPPRESS)
# Minimum sampling frequency for a gene set to be included.
parser.add_argument('-msf', '--minimum_sampling_frequency', type=float, default=50,
help=argparse.SUPPRESS)
# Template file (HTML). Change at your own risk.
parser.add_argument('-tf', '--template_file', default="comet/src/html/template.html",
type=str, help=argparse.SUPPRESS)
# Maximum standard error cutoff to consider a line
parser.add_argument('-rmse', '--standard_error_cutoff', default=0.01, type=float,
help=argparse.SUPPRESS)
# Input file with lists of pre-run results.
parser.add_argument('--precomputed_scores', default=None, help=argparse.SUPPRESS)
# Accelerating factor for target weight
parser.add_argument('-acc', '--accelerator', default=1, type=int, help=argparse.SUPPRESS)
# Flag verbose output
parser.add_argument('-v', '--verbose', default=True, action="store_true",
help=argparse.SUPPRESS)
# Set the seed of the PRNG.
parser.add_argument('--seed', default=int(time.time()), type=int,
help=argparse.SUPPRESS)
# Edge swapping parameter.
parser.add_argument('-q', '--Q', type=int, default=100,
help=argparse.SUPPRESS)
# Keep temp files (CoMEt results and permuted matrices).
parser.add_argument('--keep_temp_files', required=False, action='store_true', default=False,
help=argparse.SUPPRESS)
return parser
def runComet(cometArgs):
return RC.run( RC.get_parser().parse_args(cometArgs) )
def run( args ):
# Set up the arguments for a general CoMEt run on real data
realOutputDir = "{}/comet-results".format(args.output_directory)
realCometArgs = []
permuteFlags = ["-np", "--parallel", "--keep_temp_files", "-o"]
for i, arg in enumerate(sys.argv[1:]):
if arg not in permuteFlags and sys.argv[i] not in permuteFlags:
realCometArgs.append( arg )
realCometArgs += [ "-o", realOutputDir, "--noviz"]
# perform simple run without viz first.
results = runComet(realCometArgs)
# Load mutation data using Multi-Dendrix and output as a temporary file
realMutations = C.load_mutation_data(args.mutation_matrix, args.patient_file,
args.gene_file, args.min_freq, args.subtype)
m, n, genes, patients, geneToCases, patientToGenes, subtypes = realMutations
if args.verbose:
print '* Mutation data: %s genes x %s patients' % (m, n)
# Construct bipartite graph from mutation data
if args.verbose: print "* Creating bipartite graph..."
G = C.construct_mutation_graph(geneToCases, patientToGenes)
if args.verbose:
print '\t- Graph has', len( G.edges() ), 'edges among', len( G.nodes() ), 'nodes.'
# reset the arguments for a general CoMEt run on permuted matrices
cometArgs = []
permuteFlags = ["-np", "--parallel", "--keep_temp_files", "-m", "-o"]
for i, arg in enumerate(sys.argv[1:]):
if arg not in permuteFlags and sys.argv[i] not in permuteFlags:
cometArgs.append( arg )
cometArgs.append('--noviz')
# Create a permuted matrix, and then run it through CoMEt
import tempfile
arguments = []
if args.keep_temp_files:
directory = args.output_directory
else:
directory = tempfile.mkdtemp(dir=".", prefix=".tmp")
# Generate random seeds for each permutation
random.seed(args.seed)
seeds = [ random.randint(0, 2**31-1) for _ in range(args.num_permutations) ]
for i, seed in enumerate(seeds):
# Print simple progress bar
sys.stdout.write("* Running CoMEt on permuted matrices... {}/{}\r".format(i+1, args.num_permutations))
sys.stdout.flush()
# Create a permuted dataset and save it a temporary file
mutations = C.permute_mutation_data(G, genes, patients, seed, args.Q)
_, _, _, _, geneToCases, patientToGenes = mutations
adj_list = [ p + "\t" + "\t".join( sorted(patientToGenes[p]) ) for p in patients ]
permutation_file = "{}/permuted-matrix-{}.m2".format(directory, i+1)
with open(permutation_file, 'w') as outfile: outfile.wri
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.