repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
sbg/sevenbridges-python
|
sevenbridges/models/compound/tasks/batch_group.py
|
Python
|
apache-2.0
| 454
| 0
|
from sevenbridges.meta.resource import Resource
from sevenbridges.meta.fields import StringField, DictField
|
class BatchGroup(Resource):
"""
Batch group for a batch task.
Represents the group that is assigned to the child task
from the batching criteria that was used when the task was started.
"""
value = StringField(read_only=True)
fields = DictField(read_only=True)
def __str__(self):
return '<Batch group>'
| |
TheCoderNextdoor/DjangoSites
|
django_tut/website/music/views.py
|
Python
|
gpl-3.0
| 3,632
| 0.020099
|
# from django.shortcuts import render, get_object_or_404
# from .models import Album, Song
# def index(request):
# all_albums = Album.objects.all()
# context = {
# 'all_albums':all_albums,
# }
# return render(request, 'music/index.html', context)
# def detail(request, album_id):
# album = get_object_or_404(Album, pk=album_id)
# return render(request, 'music/detail.html', {'album': album})
# def favourite(request, album_id):
# album = get_object_or_404(Album, pk=album_id)
# try:
# selected_song = album.song_set.get(pk=request.POST['song'])
# except(KeyError, Song.DoesNotExist):
# return render(request, 'music/detail.html', {
# 'album': album,
# 'error_message':'Did not select a valid song'
# })
# else:
# selected_song.is_favourite = True
# selected_song.save()
# return render(request, 'music/detail.html', {'album': album})
from django.views import generic
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.core.urlresolvers import reverse_lazy
from django.shortcuts import render, redirect
from django.contrib.auth import authenticate, login
from django.views.generic import View
from .forms import UserForm
from .models import Album
class IndexView(generic.ListView):
template_name = "music/index.html"
def get_queryset(self):
return Album.objects.all()
class DetailView(generic.DetailView):
model = Album
template_name = "music/detail.html"
class AlbumCreate(CreateView):
model = Album
fields = ['artist', 'title', 'genre', 'logo']
class AlbumUpdate(UpdateView):
model = Album
fields = ['artist', 'title', 'genre', 'logo']
class AlbumDelete(DeleteView):
model = Album
success_url = reverse_lazy('music:index')
class UserFormView(View):
form_class = UserForm
template_name = 'music/registration_form.html'
#blank form (POST)
def get(self, request):
form = self.form_class(None)
return render(request, self.template_name, {'form':form})
#process form data (POST)
def post(self, request):
form = self.form_class(request.POST)
if form.is_valid():
user = form.save(commit=False)
#cleaned data
username = form.cleaned_data['username']
password = form.cleaned_data['password']
user.set_password(password)
user.save()
#return user objects if correct credentials
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
#request.user.username
return redirect('music:index')
return render(request, self.template_name, {'form':form})
| ||
StoDevX/cs251-toolkit
|
cs251tk/specs/load.py
|
Python
|
mit
| 2,467
| 0.001621
|
import sys
from logging import warning
from glob imp
|
ort iglob
import json
import os
import shutil
from ..common import chdir, run
from .cache import cache_specs
from .dirs import get_specs_dir
def load_all_specs(*, basedir=get_specs_dir(), skip_update_check=True):
os.makedirs(basedir, exist_ok=True)
if not skip_update_check:
with chdir(basedir):
res, _, _ = run(['git', 'fetch', 'origin'])
if res != 's
|
uccess':
print("Error fetching specs", file=sys.stderr)
_, res, _ = run(['git', 'log', 'HEAD..origin/master'])
if res != '':
print("Spec updates found - Updating", file=sys.stderr)
with chdir(basedir):
run(['git', 'pull', 'origin', 'master'])
# the repo has a /specs folder
basedir = os.path.join(basedir, 'specs')
cache_specs(basedir)
spec_files = iglob(os.path.join(basedir, '_cache', '*.json'))
# load_spec returns a (name, spec) tuple, so we just let the dict() constructor
# turn that into the {name: spec} pairs of a dictionary for us
return dict([load_spec(filename, basedir) for filename in spec_files])
def load_some_specs(idents, *, basedir=get_specs_dir()):
# the repo has a /specs folder
basedir = os.path.join(basedir, 'specs')
cache_specs(basedir)
wanted_spec_files = [os.path.join(basedir, '_cache', '{}.json'.format(ident)) for ident in idents]
all_spec_files = iglob(os.path.join(basedir, '_cache', '*.json'))
loadable_spec_files = set(all_spec_files).intersection(wanted_spec_files)
# load_spec returns a (name, spec) tuple, so we just let the dict() constructor
# turn that into the {name: spec} pairs of a dictionary for us
return dict([load_spec(filename) for filename in loadable_spec_files])
def load_spec(filename, basedir):
with open(filename, 'r', encoding='utf-8') as specfile:
loaded_spec = json.load(specfile)
name = os.path.splitext(os.path.basename(filename))[0]
assignment = loaded_spec['assignment']
# Ask if user wants to re-cache specs to fix discrepancy
if name != assignment:
warning('assignment "{}" does not match the filename {}'.format(assignment, filename))
recache = input("Re-cache specs? (Y/N)")
if recache and recache.lower()[0] == "y":
shutil.rmtree(os.path.join(basedir, '_cache'))
cache_specs(basedir)
return assignment, loaded_spec
|
edisondotme/motoPi
|
main/apps.py
|
Python
|
mit
| 80
| 0.0125
|
from django.apps import AppConfig
|
class MainConfig(AppConfi
|
g):
name = 'main'
|
jk977/twitch-plays
|
bot/tests/commands.py
|
Python
|
gpl-3.0
| 1,035
| 0.002899
|
import unittest
from chat.commands.commandlist import CommandList
from chat.command import Command
from tests.structs.dummychat import DummyChat
class TestCommands(unittest.TestCase):
|
def setUp(self):
self.chat = DummyChat()
def test_get(self):
command = CommandList.get('help', self.chat, 'message')
self.assertTrue(command and isinstance(command, Command), 'Command get failed')
def test_validate(self):
fail_msg = 'Command validate failed'
self.assertTrue(CommandList.validate('help'), fail_msg)
self.assertTrue(C
|
ommandList.validate('!help'), fail_msg)
self.assertTrue(CommandList.validate('song'), fail_msg)
self.assertTrue(CommandList.validate('!song'), fail_msg)
self.assertTrue(CommandList.validate('restart'), fail_msg)
self.assertTrue(CommandList.validate('!restart'), fail_msg)
self.assertFalse(CommandList.validate('not a function'), fail_msg)
self.assertFalse(CommandList.validate('!not a function'), fail_msg)
|
2gis/pytestrail
|
testrail/testcase.py
|
Python
|
mit
| 474
| 0
|
class TestRailTestCase:
def __init__(self, title, section, suite, steps):
self.title = title
self.secti
|
on_name = section
self.suite_name = suite
self.steps = steps
self.type_id = 1
self.priority_id = 4
def to_json_dict(self):
return {
't
|
itle': self.title,
'type_id': self.type_id,
'priority_id': self.priority_id,
'custom_steps_separated': self.steps
}
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/django-1.3/django/core/mail/__init__.py
|
Python
|
bsd-3-clause
| 5,072
| 0.002957
|
"""
Tools for sending email.
"""
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
# Imported for backwards compatibility, and for the sake
# of a cleaner namespace. These symbols used to be in
# django/core/mail.py before the introduction of email
# backends and the subsequent reorganization (See #10355)
from django.core.mail.utils import CachedDnsName, DNS_NAME
from django.core.mail.message import \
EmailMessage, EmailMultiAlternatives, \
SafeMIMEText, SafeMIMEMultipart, \
DEFAULT_ATTACHMENT_MIME_TYPE, make_msgid, \
BadHeaderError, forbid_multi_line_headers
from django.core.mail.backends.smtp import EmailBackend as _SMTPConnection
def get_connection(backend=None, fail_silently=False, **kwds):
"""Load an e-mail backend and return an instance of it.
If backend is None (default) settings.EMAIL_BACKEND is used.
Both fail_silently and other keyword arguments are used in the
constructor of the backend.
"""
path = backend or settings.EMAIL_BACKEND
try:
mod_name, klass_name = path.rsplit('.', 1)
mod = import_module(mod_name)
except ImportError, e:
raise ImproperlyConfigured(('Error importing email backend module %s: "%s"'
% (mod_name, e)))
try:
klass = getattr(mod, klass_name)
except AttributeError:
raise ImproperlyConfigured(('Module "%s" does not define a '
'"%s" class' % (mod_name, klass_name)))
return klass(fail_silently=fail_silently, **kwds)
def send_mail(subject, message, from_email, recipient_list,
fail_silently=False, auth_user=None, auth_password=None,
connection=None):
"""
Easy wrapper for sending a single message to a recipient list. All members
of the recipient list will see the other recipients in the 'To' field.
If auth_user is None, the EMAIL_HOST_USER setting is used.
If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
Note: The API for this method is frozen. New code wanting to extend the
functionality should use the EmailMessage class directly.
"""
connection = connection or get_connection(username=auth_user,
password=auth_password,
fail_silently=fail_silently)
return EmailMessage(subject, message, from_email, recipient_list,
connection=connection).send()
def send_mass_mail(datatuple, fail_silently=False, auth_user=None,
auth_password=None, connection=None):
"""
Given a datatuple of (subject, message, from_email, recipient_list), sends
each message to each recipient list. Returns the number of e-mails sent.
If from_email is None, the DEFAULT_FROM_EMAIL setting is used.
If auth_user and auth_password are set, they're used to log in.
If auth_user is None, the EMAIL_HOST_USER setting is used.
If auth_password is None, the EMAIL_HOST_PA
|
SSWORD setting is used.
Note: The API for this method is frozen. New code wanting to extend the
functionality should use the EmailMessage class directly.
"""
connection = connection or get_connection(username=auth_user,
password=auth_password,
fail_silently=fail_silently)
messages = [EmailMessage(subject, message, sender, recipient)
|
for subject, message, sender, recipient in datatuple]
return connection.send_messages(messages)
def mail_admins(subject, message, fail_silently=False, connection=None,
html_message=None):
"""Sends a message to the admins, as defined by the ADMINS setting."""
if not settings.ADMINS:
return
mail = EmailMultiAlternatives(u'%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject),
message, settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS],
connection=connection)
if html_message:
mail.attach_alternative(html_message, 'text/html')
mail.send(fail_silently=fail_silently)
def mail_managers(subject, message, fail_silently=False, connection=None,
html_message=None):
"""Sends a message to the managers, as defined by the MANAGERS setting."""
if not settings.MANAGERS:
return
mail = EmailMultiAlternatives(u'%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject),
message, settings.SERVER_EMAIL, [a[1] for a in settings.MANAGERS],
connection=connection)
if html_message:
mail.attach_alternative(html_message, 'text/html')
mail.send(fail_silently=fail_silently)
class SMTPConnection(_SMTPConnection):
def __init__(self, *args, **kwds):
import warnings
warnings.warn(
'mail.SMTPConnection is deprecated; use mail.get_connection() instead.',
DeprecationWarning
)
super(SMTPConnection, self).__init__(*args, **kwds)
|
ruamel/ordereddict
|
test/unit/test_support.py
|
Python
|
mit
| 17,653
| 0.003852
|
"""Supporting definitions for the Python regression tests."""
if __name__ != 'test_support':
raise ImportError, 'test_support must be imported from the test package'
import sys
class Error(Exception):
"""Base class for regression test exceptions."""
class TestFailed(Error):
"""Test failed."""
class TestSkipped(Error):
"""Test skipped.
This can be raised to indicate that a test was deliberatly
skipped, but not because a feature wasn't available. For
example, if some resource can't be used, such as the network
appears to be unavailable, this should be raised instead of
TestFailed.
"""
class ResourceDenied(TestSkipped):
"""Test skipped because it requested a disallowed resource.
This is raised when a test calls requires() for a resource that
has not be enabled. It is used to distinguish between expected
and unexpected skips.
"""
verbose = 1 # Flag set to 0 by regrtest.py
use_resources = None # Flag set to [] by regrtest.py
max_memuse = 0 # Disable bigmem tests (they will still be run with
# small sizes, to make sure they work.)
# _original_stdout is meant to hold stdout at the time regrtest began.
# This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
# The point is to have some flavor of stdout the user can actually see.
_original_stdout = None
def record_original_stdout(stdout):
global _original_stdout
_original_stdout = stdout
def get_original_stdout():
return _original_stdout or sys.stdout
def unload(name):
try:
del sys.modules[name]
except KeyError:
pass
def unlink(filename):
import os
try:
os.unlink(filename)
except OSError:
pass
def forget(modname):
'''"Forget" a module was ever imported by removing it from sys.modules and
deleting any .pyc and .pyo files.'''
unload(modname)
import os
for dirname in sys.path:
unlink(os.path.join(dirname, modname + os.extsep + 'pyc'))
# Deleting the .pyo file cannot be within the 'try' for the .pyc since
# the chance exists that there is no .pyc (and thus the 'try' statement
# is exited) but there is a .pyo file.
unlink(os.path.join(dirname, modname + os.extsep + 'pyo'))
def is_resource_enabled(resource):
"""Test whether a resource is enabled. Known resources are set by
regrtest.py."""
return use_resources is not None and resource in use_resources
def requires(resource, msg=None):
"""Raise ResourceDenied if the specified resource is not available.
If the caller's module is __main__ then automatically return True. The
possibility of False being returned occurs when regrtest.py is executing."""
# see if the caller's module is __main__ - if so, treat as if
# the resource was set
if sys._getframe().f_back.f_globals.get("__name__") == "__main__":
return
if not is_resource_enabled(resource):
if msg is None:
msg = "Use of the `%s' resource not enabled" % resource
raise ResourceDenied(msg)
def bind_port(sock, host='', preferred_port=54321):
"""Try to bind the sock to a port. If we are runni
|
ng multiple
tests and we don't try multiple ports, the test can fails. This
makes the test more robust."""
import socket, errno
# some random ports that hopefully no one is listening on.
for port in [preferred_port, 9907, 10243, 32999]:
|
try:
sock.bind((host, port))
return port
except socket.error, (err, msg):
if err != errno.EADDRINUSE:
raise
print >>sys.__stderr__, \
' WARNING: failed to listen on port %d, trying another' % port
raise TestFailed, 'unable to find port to listen on'
FUZZ = 1e-6
def fcmp(x, y): # fuzzy comparison function
if type(x) == type(0.0) or type(y) == type(0.0):
try:
x, y = coerce(x, y)
fuzz = (abs(x) + abs(y)) * FUZZ
if abs(x-y) <= fuzz:
return 0
except:
pass
elif type(x) == type(y) and type(x) in (type(()), type([])):
for i in range(min(len(x), len(y))):
outcome = fcmp(x[i], y[i])
if outcome != 0:
return outcome
return cmp(len(x), len(y))
return cmp(x, y)
try:
unicode
have_unicode = 1
except NameError:
have_unicode = 0
is_jython = sys.platform.startswith('java')
import os
# Filename used for testing
if os.name == 'java':
# Jython disallows @ in module names
TESTFN = '$test'
elif os.name == 'riscos':
TESTFN = 'testfile'
else:
TESTFN = '@test'
# Unicode name only used if TEST_FN_ENCODING exists for the platform.
if have_unicode:
# Assuming sys.getfilesystemencoding()!=sys.getdefaultencoding()
# TESTFN_UNICODE is a filename that can be encoded using the
# file system encoding, but *not* with the default (ascii) encoding
if isinstance('', unicode):
# python -U
# XXX perhaps unicode() should accept Unicode strings?
TESTFN_UNICODE = "@test-\xe0\xf2"
else:
# 2 latin characters.
TESTFN_UNICODE = unicode("@test-\xe0\xf2", "latin-1")
TESTFN_ENCODING = sys.getfilesystemencoding()
# TESTFN_UNICODE_UNENCODEABLE is a filename that should *not* be
# able to be encoded by *either* the default or filesystem encoding.
# This test really only makes sense on Windows NT platforms
# which have special Unicode support in posixmodule.
if (not hasattr(sys, "getwindowsversion") or
sys.getwindowsversion()[3] < 2): # 0=win32s or 1=9x/ME
TESTFN_UNICODE_UNENCODEABLE = None
else:
# Japanese characters (I think - from bug 846133)
TESTFN_UNICODE_UNENCODEABLE = eval('u"@test-\u5171\u6709\u3055\u308c\u308b"')
try:
# XXX - Note - should be using TESTFN_ENCODING here - but for
# Windows, "mbcs" currently always operates as if in
# errors=ignore' mode - hence we get '?' characters rather than
# the exception. 'Latin1' operates as we expect - ie, fails.
# See [ 850997 ] mbcs encoding ignores errors
TESTFN_UNICODE_UNENCODEABLE.encode("Latin1")
except UnicodeEncodeError:
pass
else:
print \
'WARNING: The filename %r CAN be encoded by the filesystem. ' \
'Unicode filename tests may not be effective' \
% TESTFN_UNICODE_UNENCODEABLE
# Make sure we can write to TESTFN, try in /tmp if we can't
fp = None
try:
fp = open(TESTFN, 'w+')
except IOError:
TMP_TESTFN = os.path.join('/tmp', TESTFN)
try:
fp = open(TMP_TESTFN, 'w+')
TESTFN = TMP_TESTFN
del TMP_TESTFN
except IOError:
print ('WARNING: tests will fail, unable to write to: %s or %s' %
(TESTFN, TMP_TESTFN))
if fp is not None:
fp.close()
unlink(TESTFN)
del os, fp
def findfile(file, here=__file__):
"""Try to find a file on sys.path and the working directory. If it is not
found the argument passed to the function is returned (this does not
necessarily signal failure; could still be the legitimate path)."""
import os
if os.path.isabs(file):
return file
path = sys.path
path = [os.path.dirname(here)] + path
for dn in path:
fn = os.path.join(dn, file)
if os.path.exists(fn): return fn
return file
def verify(condition, reason='test failed'):
"""Verify that condition is true. If not, raise TestFailed.
The optional argument reason can be given to provide
a better error text.
"""
if not condition:
raise TestFailed(reason)
def vereq(a, b):
"""Raise TestFailed if a == b is false.
This is better than verify(a == b) because, in case of failure, the
error message incorporates repr(a) and repr(b) so you can see the
inputs.
Note that "
|
msabramo/pycobertura
|
tests/test_cobertura.py
|
Python
|
mit
| 7,957
| 0.000754
|
import mock
import lxml.etree as ET
from .utils import make_cobertura
def test_parse_path():
from pycobertura import Cobertura
xml_path = 'foo.xml'
with mock.patch('pycobertura.cobertura.os.path.exists', return_value=True):
with mock.patch('pycobertura.cobertura.ET.parse') as mock_parse:
cobertura = Cobertura(xml_path)
assert cobertura.xml is mock_parse.return_value.getroot.return_value
def test_version():
cobertura = make_cobertura()
assert cobertura.version == '1.9'
def test_line_rate():
cobertura = make_cobertura()
assert cobertura.line_rate() == 0.9
def test_line_rate_by_class():
cobertura = make_cobertura()
expected_line_rates = {
'Main': 1.0,
'search.BinarySearch': 0.9166666666666666,
'search.ISortedArraySearch': 1.0,
'search.LinearSearch': 0.7142857142857143,
}
for class_name in cobertura.classes():
assert cobertura.line_rate(class_name) == \
expected_line_rates[class_name]
def test_branch_rate():
cobertura = make_cobertura()
assert cobertura.branch_rate() == 0.75
def test_branch_rate_by_class():
cobertura = make_cobertura()
expected_branch_rates = {
'Main': 1.0,
'search.BinarySearch': 0.8333333333333334,
'search.ISortedArraySearch': 1.0,
'search.LinearSearch': 0.6666666666666666,
}
for class_name in cobertura.classes():
assert cobertura.branch_rate(class_name) == \
expected_branch_rates[class_name]
def test_total_misses():
cobertura = make_cobertura()
assert cobertura.total_misses() == 3
def test_m
|
issed_statements_by_class_name():
cobertura = make_cobertura()
expected_missed_statements = {
'Main': [],
'search.BinarySearch': [24],
'search.ISortedArraySearch': [],
'search.LinearSearch': [19, 24],
}
for class_name in cobertura.classes():
assert cobertura.missed_statements(class_name) ==
|
\
expected_missed_statements[class_name]
def test_list_packages():
cobertura = make_cobertura()
packages = cobertura.packages()
assert packages == ['', 'search']
def test_list_classes():
cobertura = make_cobertura()
classes = cobertura.classes()
assert classes == [
'Main',
'search.BinarySearch',
'search.ISortedArraySearch',
'search.LinearSearch'
]
def test_hit_lines__by_iterating_over_classes():
cobertura = make_cobertura()
expected_lines = {
'Main': [10, 16, 17, 18, 19, 23, 25, 26, 28, 29, 30],
'search.BinarySearch': [12, 16, 18, 20, 21, 23, 25, 26, 28, 29, 31],
'search.ISortedArraySearch': [],
'search.LinearSearch': [9, 13, 15, 16, 17],
}
for class_name in cobertura.classes():
assert cobertura.hit_statements(class_name) == expected_lines[class_name]
def test_missed_lines():
cobertura = make_cobertura()
expected_lines = {
'Main': [],
'search.BinarySearch': [24],
'search.ISortedArraySearch': [],
'search.LinearSearch': [19, 20, 21, 22, 23, 24],
}
for class_name in cobertura.classes():
assert cobertura.missed_lines(class_name) == expected_lines[class_name]
def test_total_statements():
cobertura = make_cobertura()
assert cobertura.total_statements() == 30
def test_total_statements_by_class():
cobertura = make_cobertura()
expected_total_statements = {
'Main': 11,
'search.BinarySearch': 12,
'search.ISortedArraySearch': 0,
'search.LinearSearch': 7,
}
for class_name in cobertura.classes():
assert cobertura.total_statements(class_name) == \
expected_total_statements[class_name]
def test_total_misses():
cobertura = make_cobertura()
assert cobertura.total_misses() == 3
def test_total_misses_by_class():
cobertura = make_cobertura()
expected_total_misses = {
'Main': 0,
'search.BinarySearch': 1,
'search.ISortedArraySearch': 0,
'search.LinearSearch': 2,
}
for class_name in cobertura.classes():
assert cobertura.total_misses(class_name) == \
expected_total_misses[class_name]
def test_total_hits():
cobertura = make_cobertura()
assert cobertura.total_hits() == 27
def test_total_hits_by_class():
cobertura = make_cobertura()
expected_total_misses = {
'Main': 11,
'search.BinarySearch': 11,
'search.ISortedArraySearch': 0,
'search.LinearSearch': 5,
}
for class_name in cobertura.classes():
assert cobertura.total_hits(class_name) == \
expected_total_misses[class_name]
def test_filename():
cobertura = make_cobertura()
expected_filenames = {
'Main': 'Main.java',
'search.BinarySearch': 'search/BinarySearch.java',
'search.ISortedArraySearch': 'search/ISortedArraySearch.java',
'search.LinearSearch': 'search/LinearSearch.java',
}
for class_name in cobertura.classes():
assert cobertura.filename(class_name) == \
expected_filenames[class_name]
def test_filepath():
base_path = 'foo/bar/baz'
cobertura = make_cobertura(base_path=base_path)
expected_filepaths = {
'Main': 'foo/bar/baz/Main.java',
'search.BinarySearch': 'foo/bar/baz/search/BinarySearch.java',
'search.ISortedArraySearch': 'foo/bar/baz/search/ISortedArraySearch.java',
'search.LinearSearch': 'foo/bar/baz/search/LinearSearch.java',
}
for class_name in cobertura.classes():
assert cobertura.filepath(class_name) == \
expected_filepaths[class_name]
def test_class_source__sources_not_found():
cobertura = make_cobertura('tests/cobertura.xml')
expected_sources = {
'Main': [(0, 'tests/Main.java not found', None)],
'search.BinarySearch': [(0, 'tests/search/BinarySearch.java not found', None)],
'search.ISortedArraySearch': [(0, 'tests/search/ISortedArraySearch.java not found', None)],
'search.LinearSearch': [(0, 'tests/search/LinearSearch.java not found', None)],
}
for class_name in cobertura.classes():
assert cobertura.class_source(class_name) == expected_sources[class_name]
def test_line_statuses():
cobertura = make_cobertura('tests/dummy.source1/coverage.xml')
expected_line_statuses = {
'dummy/__init__': [],
'dummy/dummy': [
(1, True),
(2, True),
(4, True),
(5, False),
(6, False),
],
'dummy/dummy2': [
(1, True),
(2, True),
],
'dummy/dummy4': [
(1, False),
(2, False),
(4, False),
(5, False),
(6, False)
],
}
for class_name in cobertura.classes():
assert cobertura.line_statuses(class_name) == \
expected_line_statuses[class_name]
def test_class_source__sources_found():
cobertura = make_cobertura('tests/dummy.source1/coverage.xml')
expected_sources = {
'dummy/__init__': [],
'dummy/dummy': [
(1, 'def foo():\n', True),
(2, ' pass\n', True),
(3, '\n', None),
(4, 'def bar():\n', True),
(5, " a = 'a'\n", False),
(6, " b = 'b'\n", False),
],
'dummy/dummy2': [
(1, 'def baz():\n', True),
(2, ' pass\n', True)
],
'dummy/dummy4': [
(1, 'def barbaz():\n', False),
(2, ' pass\n', False),
(3, '\n', None),
(4, 'def foobarbaz():\n', False),
(5, ' a = 1 + 3\n', False),
(6, ' pass\n', False)
],
}
for class_name in cobertura.classes():
assert cobertura.class_source(class_name) == \
expected_sources[class_name]
|
thelok/crits_scripts
|
crits/core/managament/commands/get_indicator_types.py
|
Python
|
mit
| 4,701
| 0.005956
|
from optparse import make_option
from django.core.management.base import BaseCommand
from crits.core.mongo_tools import mongo_connector
import pprint
class Command(BaseCommand):
"""
Gets a count of indicator types and object types in CRITs
"""
help = "Gets a count of indicator types and object types in CRITs"
option_list = BaseCommand.option_list + (
make_option('--sort_count',
'-s',
dest='sort_count',
default=False,
action="store_true",
help='Sort by count instead of by the type\'s name.'
),
make_option('--agg_obj_by_collection',
'-a',
dest='agg_obj_by_collection',
default=False,
action="store_true",
help='For object types: Aggregate by collection instead of '
'combining all results.'
),
)
all_object_collections = [
"actors",
"backdoors",
"campaigns",
"certificates",
"domains",
"email",
"events",
"exploits",
"indicators",
"ips",
"pcaps",
"raw_data",
"sample",
"screenshots",
"targets",
"yara_rules"
]
def handle(self, *args, **kwargs):
sort_count = kwargs.get('sort_count')
agg_obj_by_collection = kwargs.get('agg_obj_by_collection')
pp = pprint.PrettyPrinter(indent=4)
self.aggregate_indicator_types(sort_count, pp)
self.aggregate_object_types(sort_count, agg_obj_by_collection, pp)
def aggregate_indicator_types(self, sort_count, pp):
collection = "indicators"
pipe = [ { "$group": {"_id":"$type" , "count":{"$sum": 1}}}, {"$sort": {"_id": 1}} ]
if sort_count is True:
pipe.append({"$sort": {"count": 1}})
else:
pipe.append({"$sort": {"_id": 1}})
db = mongo_connector(collection)
results = db.aggregate(pipeline=pipe)
print "INDICATOR TYPES IN COLLECTION [%s]" % collection
pp.pprint(results)
print
def aggregate_object_for_collection(self, collection, sort_count):
pipe = [
{"$unwind": "$objects"},
{"$group" :
{"_id":
{"obj_type":
{"$cond":
{"if":
{"$and":
[{"$gt":["$objects.name", None] },
{"$ne": ["$objects.type", "$objects.name"]}]
},
"then": {"$concat": [ "$objects.type", " - ", "$objects.name" ]},
"else": "$o
|
bjects.type"
}
}
},
"count": {"$sum": 1}
}
}
]
if sort_count is True:
pipe.append({"$sort": {"count": 1}})
else:
pipe.append({"$sort": {"_id": 1}})
db = mongo_connector(collection)
results = db.aggregate(pipeline=pipe)
return results
def aggregate_object_types(self, sort_count, is_agg_per_c
|
ollection, pp):
results = {}
for collection in self.all_object_collections:
object_types = self.aggregate_object_for_collection(collection, sort_count)
results[collection] = object_types
if is_agg_per_collection:
for collection in self.all_object_collections:
print "OBJECT TYPES FOR COLLECTION: [%s]" % collection.upper()
if len(results[collection]['result']) != 0:
pp.pprint(results[collection]['result'])
else:
print "None found."
print
else:
all_obj_types = {}
for collection in self.all_object_collections:
collection_results = results[collection]
for collection_result in collection_results['result']:
obj_type = collection_result['_id']['obj_type']
all_obj_types[obj_type] = collection_result['count'] + all_obj_types.get(obj_type, 0);
print "OBJECT TYPES FOR ALL COLLECTIONS"
if(sort_count):
import operator
sorted_x = sorted(all_obj_types.items(), key=operator.itemgetter(1))
pp.pprint(sorted_x)
else:
pp.pprint(all_obj_types)
print
print
|
areitz/pants
|
src/python/pants/backend/jvm/tasks/scala_repl.py
|
Python
|
apache-2.0
| 2,743
| 0.008385
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generator
|
s, nested_scopes, print_function,
unicode_literals, with_statement)
|
from pants.backend.jvm.tasks.jvm_task import JvmTask
from pants.backend.jvm.tasks.jvm_tool_task_mixin import JvmToolTaskMixin
from pants.base.target import Target
from pants.console.stty_utils import preserve_stty_settings
from pants.java.util import execute_java
class ScalaRepl(JvmToolTaskMixin, JvmTask):
@classmethod
def register_options(cls, register):
super(ScalaRepl, cls).register_options(register)
register('--main', default='scala.tools.nsc.MainGenericRunner',
help='The entry point for running the repl.')
cls.register_jvm_tool(register, 'scala-repl', default=['//:scala-repl'])
@classmethod
def prepare(cls, options, round_manager):
super(ScalaRepl, cls).prepare(options, round_manager)
# TODO(John Sirois): these are fake requirements in order to force compile run before this
# goal. Introduce a RuntimeClasspath product for JvmCompile and PrepareResources to populate
# and depend on that.
# See: https://github.com/pantsbuild/pants/issues/310
round_manager.require_data('resources_by_target')
round_manager.require_data('classes_by_target')
def execute(self):
(accept_predicate, reject_predicate) = Target.lang_discriminator('java')
targets = self.require_homogeneous_targets(accept_predicate, reject_predicate)
if targets:
tools_classpath = self.tool_classpath('scala-repl')
self.context.release_lock()
with preserve_stty_settings():
classpath = self.classpath(targets, cp=tools_classpath)
# The scala repl requires -Dscala.usejavacp=true since Scala 2.8 when launching in the way
# we do here (not passing -classpath as a program arg to scala.tools.nsc.MainGenericRunner).
jvm_options = self.jvm_options
if not any(opt.startswith('-Dscala.usejavacp=') for opt in jvm_options):
jvm_options.append('-Dscala.usejavacp=true')
print('') # Start REPL output on a new line.
try:
# NOTE: We execute with no workunit, as capturing REPL output makes it very sluggish.
execute_java(classpath=classpath,
main=self.get_options().main,
jvm_options=jvm_options,
args=self.args)
except KeyboardInterrupt:
# TODO(John Sirois): Confirm with Steve Gury that finally does not work on mac and an
# explicit catch of KeyboardInterrupt is required.
pass
|
odahoda/noisicaa
|
noisicaa/builtin_nodes/control_track/track_ui.py
|
Python
|
gpl-2.0
| 15,641
| 0.000895
|
#!/usr/bin/python3
# @begin:license
#
# Copyright (c) 2015-2019, Benjamin Niemann <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @end:license
import fractions
import logging
from typing import Any, List, Tuple
from PyQt5.QtCore import Qt
from PyQt5 import QtCore
from PyQt5 import QtGui
from noisicaa.core.typing_extra import down_cast
from noisicaa import audioproc
from noisicaa import core
from noisicaa import music
from noisicaa.ui.track_list import base_track_editor
from noisicaa.ui.track_list import time_view_mixin
from noisicaa.ui.track_list import tools
from . import model
logger = logging.getLogger(__name__)
class EditControlPointsTool(tools.ToolBase):
track = None # type: ControlTrackEditor
def __init__(self, **kwargs: Any) -> None:
super().__init__(
type=tools.ToolType.EDIT_CONTROL_POINTS,
group=tools.ToolGroup.EDIT,
**kwargs)
self.__moving_point = None # type: ControlPoint
self.__moving_point_original_pos = None # type: QtCore.QPoint
self.__moving_point_offset = None # type: QtCore.QPoint
self.__move_mode = 'any'
self.__move_range = None # type: Tuple[int, int]
def iconName(self) -> str:
return 'edit-control-points'
def mousePressEvent(self, evt: QtGui.QMouseEvent) -> None:
self.track.updateHighlightedPoint()
if (evt.button() == Qt.LeftButton
and evt.modifiers() == Qt.NoModifier
and self.track.highlightedPoint() is not None):
self.__moving_point = self.track.highlightedPoint()
self.__moving_point_original_pos = self.__moving_point.pos()
self.__moving_point_offset = evt.pos() - self.__moving_point.pos()
self.__move_mode = 'any'
point_index = self.__moving_point.index
if point_index > 0:
range_left = self.track.points[point_index - 1].pos().x() + 1
else:
range_left = self.track.timeToX(audioproc.MusicalTime(0, 1))
if point_index < len(self.track.points) - 1:
range_right = self.track.points[point_index + 1].pos().x() - 1
else:
range_right = self.track.timeToX(self.track.projectEndTime())
self.__move_range = (range_left, range_right)
evt.accept()
return
if (evt.button() == Qt.LeftButton
and evt.modifiers() == Qt.ShiftModifier
and self.track.highlightedPoint() is not None):
with self.project.apply_mutations('%s: Remove control point' % self.track.track.name):
self.track.track.delete_control_point(self.track.highlightedPoint().point)
evt.accept()
return
if evt.button() == Qt.RightButton and self.__moving_point is not None:
self.track.setPointPos(self.__moving_point, self.__moving_point_original_pos)
self.__moving_point = None
evt.accept()
return
super().mousePressEvent(evt)
def mouseMoveEvent(self, evt: QtGui.QMouseEvent) -> None:
if self.__moving_point is not None:
new_pos = evt.pos() - self.__moving_point_offset
if evt.modifiers() == Qt.ControlModifier:
delta = new_pos - self.__moving_point_original_pos
if self.__move_mode == 'any' and delta.manhattanLength() > 5:
if abs(delta.x()) > abs(delta.y()):
self.__move_mode = 'horizontal'
else:
self.__move_mode = 'vertical'
else:
self.__move_mode = 'any'
if self.__move_mode == 'horizontal':
new_pos.setY(self.__moving_point_original_pos.y())
elif self.__move_mode == 'vertical':
new_pos.setX(self.__moving_point_original_pos.x())
range_left, range_right = self.__move_range
if new_pos.x() < range_left:
new_pos.setX(range_left)
elif new_pos.x() > range_right:
new_pos.setX(range_right)
if new_pos.y() < 0:
new_pos.setY(0)
elif new_pos.y() > self.track.height() - 1:
new_pos.setY(self.track.height() - 1)
self.track.setPointPos(self.__moving_point, new_pos)
evt.accept()
return
self.track.updateHighlightedPoint()
super().mouseMoveEvent(evt)
def mouseReleaseEvent(self, evt: QtGui.QMouseEvent) -> None:
if evt.button() == Qt.LeftButton and self.__moving_point is not None:
pos = self.__moving_point.pos()
self.__moving_point = None
if self.__move_mode != 'vertical':
new_time = self.track.xToTime(pos.x())
else:
new_time = None
if self.__move_mode != 'horizontal':
new_value = self.track.yToValue(pos.y())
else:
new_value = None
with self.project.apply_mutations('%s: Change control point' % self.track.track.name):
self.track.highlightedPoint().point.time = new_time
|
self.track.highlightedPoint().point.value = new_value
evt.accept()
return
super().mouseReleaseEvent(evt)
def mouseDoubleClickEvent(self, evt: QtGui.QMouseEvent) -> None:
if evt.button() == Qt.LeftButton and evt.modifiers() == Qt.N
|
oModifier:
# If the first half of the double click initiated a move,
# cancel that move now.
if self.__moving_point is not None:
self.track.setPointPos(self.__moving_point, self.__moving_point_original_pos)
self.__moving_point = None
time = self.track.xToTime(evt.pos().x())
for point in self.track.track.points:
if point.time == time:
with self.project.apply_mutations(
'%s: Change control point' % self.track.track.name):
point.value = self.track.yToValue(evt.pos().y())
break
else:
with self.project.apply_mutations(
'%s: Insert control point' % self.track.track.name):
self.track.track.create_control_point(
self.track.xToTime(evt.pos().x()),
self.track.yToValue(evt.pos().y()))
evt.accept()
return
super().mouseDoubleClickEvent(evt)
class ControlTrackToolBox(tools.ToolBox):
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
self.addTool(EditControlPointsTool)
class ControlPoint(core.AutoCleanupMixin, object):
def __init__(self, track_editor: 'ControlTrackEditor', point: model.ControlPoint) -> None:
super().__init__()
self.__track_editor = track_editor
self.__point = point
self.__pos = QtCore.QPoint(
self.__track_editor.timeToX(self.__point.time),
self.__track_editor.valueToY(self.__point.value))
self.__listeners = core.ListenerList()
self.add_cleanup_function(self.__listeners.cleanup)
self.__listeners.add(self.__point.time_changed.add(self.onTimeChanged))
self.__listeners.add(self.__point.value_changed.add(self.onValueChanged))
def onTimeChanged(self, change: music.Pro
|
Eficent/purchase-workflow
|
procurement_purchase_no_grouping/__manifest__.py
|
Python
|
agpl-3.0
| 662
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2015 AvanzOsc (http://www.avanzosc.es)
# Copyright 2015-2017 - Pedro M. Baeza <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
{
"name": "Procurement Purchase No Grouping",
"version": "10.0.1.0.0",
"author": "AvanzOSC,"
"Tecn
|
ativa,"
"Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Procurements",
"depends": [
'purchase',
'procurement',
],
"data": [
'views/product_category_view.xml',
],
'installable': True,
|
'license': 'AGPL-3',
}
|
GoogleCloudPlatform/tf-estimator-tutorials
|
00_Miscellaneous/model_optimisation/optimize_graph_keras.py
|
Python
|
apache-2.0
| 7,518
| 0.009178
|
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specifi
|
c language governing permissions and
# limitations under the License.
""" Extract from notebook for Serving Optimization on Keras """
from __future__ import print_function
from datetime import datetime
import os
import sh
import sys
import tensorflow as tf
from tensorflow import data
from tensorflow.pyth
|
on.saved_model import tag_constants
from tensorflow.python.tools import freeze_graph
from tensorflow.python import ops
from tensorflow.tools.graph_transforms import TransformGraph
from inference_test import inference_test, load_mnist_keras
from optimize_graph import (run_experiment, get_graph_def_from_saved_model,
describe_graph, get_size, get_metagraph, get_graph_def_from_file,
convert_graph_def_to_saved_model, freeze_model, optimize_graph, TRANSFORMS)
NUM_CLASSES = 10
MODELS_LOCATION = 'models/mnist'
MODEL_NAME = 'keras_classifier'
def keras_model_fn(params):
inputs = tf.keras.layers.Input(shape=(28, 28), name='input_image')
input_layer = tf.keras.layers.Reshape(target_shape=(28, 28, 1), name='reshape')(inputs)
# convolutional layers
conv_inputs = input_layer
for i in range(params.num_conv_layers):
filters = params.init_filters * (2**i)
conv = tf.keras.layers.Conv2D(kernel_size=3, filters=filters, strides=1, padding='SAME', activation='relu')(conv_inputs)
max_pool = tf.keras.layers.MaxPool2D(pool_size=2, strides=2, padding='SAME')(conv)
batch_norm = tf.keras.layers.BatchNormalization()(max_pool)
conv_inputs = batch_norm
flatten = tf.keras.layers.Flatten(name='flatten')(conv_inputs)
# fully-connected layers
dense_inputs = flatten
for i in range(len(params.hidden_units)):
dense = tf.keras.layers.Dense(units=params.hidden_units[i], activation='relu')(dense_inputs)
dropout = tf.keras.layers.Dropout(params.dropout)(dense)
dense_inputs = dropout
# softmax classifier
logits = tf.keras.layers.Dense(units=NUM_CLASSES, name='logits')(dense_inputs)
softmax = tf.keras.layers.Activation('softmax', name='softmax')(logits)
# keras model
model = tf.keras.models.Model(inputs, softmax)
return model
def create_estimator_keras(params, run_config):
keras_model = keras_model_fn(params)
print(keras_model.summary())
optimizer = tf.keras.optimizers.Adam(lr=params.learning_rate)
keras_model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
mnist_classifier = tf.keras.estimator.model_to_estimator(
keras_model=keras_model,
config=run_config
)
return mnist_classifier
#### Train and Export Model
def train_and_export_model(train_data, train_labels):
model_dir = os.path.join(MODELS_LOCATION, MODEL_NAME)
hparams = tf.contrib.training.HParams(
batch_size=100,
hidden_units=[512, 512],
num_conv_layers=3,
init_filters=64,
dropout=0.2,
max_training_steps=50,
eval_throttle_secs=10,
learning_rate=1e-3,
debug=True
)
run_config = tf.estimator.RunConfig(
tf_random_seed=19830610,
save_checkpoints_steps=1000,
keep_checkpoint_max=3,
model_dir=model_dir
)
if tf.gfile.Exists(model_dir):
print('Removing previous artifacts...')
tf.gfile.DeleteRecursively(model_dir)
os.makedirs(model_dir)
estimator = run_experiment(hparams, train_data, train_labels, run_config, create_estimator_keras)
def make_serving_input_receiver_fn():
inputs = {'input_image': tf.placeholder(
shape=[None,28,28], dtype=tf.float32, name='serving_input_image')}
return tf.estimator.export.build_raw_serving_input_receiver_fn(inputs)
export_dir = os.path.join(model_dir, 'export')
if tf.gfile.Exists(export_dir):
tf.gfile.DeleteRecursively(export_dir)
estimator.export_savedmodel(
export_dir_base=export_dir,
serving_input_receiver_fn=make_serving_input_receiver_fn()
)
return export_dir
def setup_model():
train_data, train_labels, eval_data, eval_labels = load_mnist_keras()
export_dir = train_and_export_model(train_data, train_labels)
return export_dir, eval_data
NUM_TRIALS = 10
def main(args):
if len(args) > 1 and args[1] == '--inference':
export_dir = args[2]
_, _, eval_data, _ = load_mnist_keras()
total_load_time = 0.0
total_serve_time = 0.0
saved_model_dir = os.path.join(
export_dir, [f for f in os.listdir(export_dir) if f.isdigit()][0])
for i in range(0, NUM_TRIALS):
load_time, serving_time = inference_test(saved_model_dir, eval_data, repeat=10000)
total_load_time += load_time
total_serve_time += serving_time
print("****************************************")
print("*** Load time on original model: {:.2f}".format(total_load_time / NUM_TRIALS))
print("*** Serve time on original model: {:.2f}".format(total_serve_time / NUM_TRIALS))
print("****************************************")
total_load_time = 0.0
total_serve_time = 0.0
optimized_export_dir = os.path.join(export_dir, 'optimized')
for i in range(0, NUM_TRIALS):
load_time, serving_time = inference_test(optimized_export_dir, eval_data,
signature='serving_default',
repeat=10000)
total_load_time += load_time
total_serve_time += serving_time
print("****************************************")
print("*** Load time on optimized model: {:.2f}".format(total_load_time / NUM_TRIALS))
print("*** Serve time on optimized model: {:.2f}".format(total_serve_time / NUM_TRIALS))
print("****************************************")
else:
# generate and output original model
export_dir, eval_data = setup_model()
saved_model_dir = os.path.join(export_dir, os.listdir(export_dir)[-1])
describe_graph(get_graph_def_from_saved_model(saved_model_dir))
get_size(saved_model_dir, 'saved_model.pb')
get_metagraph(saved_model_dir)
# freeze model and describe it
freeze_model(saved_model_dir, 'softmax/Softmax', 'frozen_model.pb')
frozen_filepath = os.path.join(saved_model_dir, 'frozen_model.pb')
describe_graph(get_graph_def_from_file(frozen_filepath))
get_size(saved_model_dir, 'frozen_model.pb', include_vars=False)
# optimize model and describe it
optimize_graph(saved_model_dir, 'frozen_model.pb', TRANSFORMS, 'softmax/Softmax')
optimized_filepath = os.path.join(saved_model_dir, 'optimized_model.pb')
describe_graph(get_graph_def_from_file(optimized_filepath))
get_size(saved_model_dir, 'optimized_model.pb', include_vars=False)
# convert to saved model and output metagraph again
optimized_export_dir = os.path.join(export_dir, 'optimized')
convert_graph_def_to_saved_model(optimized_export_dir, optimized_filepath,
'softmax', 'softmax/Softmax:0')
get_size(optimized_export_dir, 'saved_model.pb')
get_metagraph(optimized_export_dir)
if __name__ == '__main__':
main(sys.argv)
|
saeki-masaki/cinder
|
cinder/volume/drivers/dell/dell_storagecenter_iscsi.py
|
Python
|
apache-2.0
| 7,182
| 0
|
# Copyright 2015 Dell Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''Volume driver for Dell Storage Center.'''
from oslo_log import log as logging
from oslo_utils import excutils
from cinder import exception
from cinder.i18n import _, _LE, _LI
from cinder.volume.drivers.dell import dell_storagecenter_common
from cinder.volume.drivers import san
LOG = logging.getLogger(__name__)
class DellStorageCenterISCSIDriver(san.SanISCSIDriver,
dell_storagecenter_common.DellCommonDriver):
'''Implements commands for Dell StorageCenter ISCSI management.
To enable the driver add the following line to the cinder configuration:
volume_driver=cinder.volume.drivers.dell.DellStorageCenterISCSIDriver
'''
VERSION = '1.0.2'
def __init__(self, *args, **kwargs):
super(DellStorageCenterISCSIDriver, self).__init__(*args, **kwargs)
self.backend_name = (
self.configuration.safe_get('volume_backend_name')
or 'Dell-iSCSI')
def initialize_connection(self, volume, connector):
# Initialize_connection will find or create a server identified by the
# connector on the Dell backend. It will then map the volume to it
# and return the properties as follows..
# {'driver_volume_type': 'iscsi',
# data = {'target_discovered': False,
# 'target_iqn': preferred iqn,
# 'target_iqns': all iqns,
# 'target_portal': preferred portal,
# 'target_portals': all portals,
# 'target_lun': preferred lun,
# 'target_luns': all luns,
# 'access_mode': access_mode
# }
# We use id to name the volume name as it is a
# known unique name.
volume_name = volume.get('id')
initiator_name = connector.get('initiator')
multipath = connector.get('multipath', False)
LOG.info(_LI('initialize_ connection: %(vol)s:%(initiator)s'),
{'vol': volume_name,
'initiator': initiator_name})
with self._client.open_connection() as api:
try:
# Find our server.
server = api.find_server(initiator_name)
# No? Create it.
if server is None:
server = api.create_server(initiator_name)
# Find the volume on the storage center.
scvolume = api.find_volume(volume_name)
# if we have a server and a volume lets bring them together.
if server is not None and scvolume is not None:
mapping = api.map_volume(scvolume,
server)
if mapping is not None:
# Since we just mapped our volume we had best update
# our sc volume object.
scvolume = api.find_volume(volume_name)
# Our return.
iscsiprops = {}
ip = None
port = None
if not multipath:
# We want to make sure we point to the specified
# ip address for our target_portal return. This
# isn't an issue with multipath since it should
# try all the alternate portal.
ip = self.configuration.iscsi_ip_address
port = self.configur
|
ation.iscsi_port
# Three cases that should all be satisfied with the
|
# same return of Target_Portal and Target_Portals.
# 1. Nova is calling us so we need to return the
# Target_Portal stuff. It should ignore the
# Target_Portals stuff.
# 2. OS brick is calling us in multipath mode so we
# want to return Target_Portals. It will ignore
# the Target_Portal stuff.
# 3. OS brick is calling us in single path mode so
# we want to return Target_Portal and
# Target_Portals as alternates.
iscsiprops = (api.find_iscsi_properties(scvolume,
ip,
port))
# Return our iscsi properties.
return {'driver_volume_type': 'iscsi',
'data': iscsiprops}
except Exception:
error = (_('Failed to initialize connection '
'%(initiator)s %(vol)s') %
{'initiator': initiator_name,
'vol': volume_name})
LOG.error(error)
raise exception.VolumeBackendAPIException(error)
# We get here because our mapping is none or we have no valid iqn to
# return so blow up.
raise exception.VolumeBackendAPIException(
_('Unable to map volume'))
def terminate_connection(self, volume, connector, force=False, **kwargs):
# Grab some initial info.
initiator_name = connector.get('initiator')
volume_name = volume.get('id')
LOG.debug('Terminate connection: %(vol)s:%(initiator)s',
{'vol': volume_name,
'initiator': initiator_name})
with self._client.open_connection() as api:
try:
scserver = api.find_server(initiator_name)
# Find the volume on the storage center.
scvolume = api.find_volume(volume_name)
# If we have a server and a volume lets pull them apart.
if (scserver is not None and
scvolume is not None and
api.unmap_volume(scvolume, scserver) is True):
LOG.debug('Connection terminated')
return
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Failed to terminate connection '
'%(initiator)s %(vol)s'),
{'initiator': initiator_name,
'vol': volume_name})
raise exception.VolumeBackendAPIException(
_('Terminate connection failed'))
|
attugit/cxxjson
|
test/conftest.py
|
Python
|
mit
| 587
| 0.001704
|
from pytest import fixture
from itertools import combinations
import msgpack as pymsgpack
values = [
42, 7, 3.14, 2.71, 'lorem', 'ipsum', True, False, None, b'lorem', b'ipsum', [], [
'lorem', 42, 3.14, True, None, ['ipsum']], dict(), {
'
|
lorem': 'ipsum', 'dolor': 42, 'sit': 3.14, 'amet': [
|
True, None], 'consectetur':{
'adipisicing': 'elit'}}]
pairs = tuple(combinations(values, 2))
@fixture
def cxxjson():
from cxx import json
return json
@fixture
def cxxmsgpack():
from cxx import msgpack
return msgpack
|
pkill-nine/qutebrowser
|
tests/conftest.py
|
Python
|
gpl-3.0
| 6,876
| 0.000873
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2017 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
# pylint: disable=unused-import,wildcard-import,unused-wildcard-import
"""The qutebrowser test suite conftest file."""
import os
import sys
import warnings
import pytest
import hypothesis
from PyQt5.QtCore import PYQT_VERSION
pytest.register_assert_rewrite('helpers')
from helpers import logfail
from helpers.logfail import fail_on_logging
from helpers.messagemock import message_mock
from helpers.fixtures import *
from qutebrowser.utils import qtutils
# Set hypothesis settings
hypothesis.settings.register_profile('default',
hypothesis.settings(strict=True))
hypothesis.settings.load_profile('default')
def _apply_platform_markers(config, item):
"""Apply a skip marker to a given item."""
markers = [
('posix', os.name != 'posix', "Requires a POSIX os"),
('windows', os.name != 'nt', "Requires Windows"),
('linux', not sys.platform.startswith('linux'), "Requires Linux"),
('mac', sys.platform != 'darwin', "Requires macOS"),
('not_mac', sys.platform == 'darwin', "Skipped on macOS"),
('not_frozen', getattr(sys, 'frozen', False),
"Can't be run when frozen"),
('frozen', not getattr(sys, 'frozen', False),
"Can only run when frozen"),
('ci', 'CI' not in os.environ, "Only runs on CI."),
('issue2478', os.name == 'nt' and config.webengine,
"Bro
|
ken with QtWebEngine on Windows"),
]
for searched_marker, condition, default_reason in markers:
marker = item.get_marker(search
|
ed_marker)
if not marker or not condition:
continue
if 'reason' in marker.kwargs:
reason = '{}: {}'.format(default_reason, marker.kwargs['reason'])
del marker.kwargs['reason']
else:
reason = default_reason + '.'
skipif_marker = pytest.mark.skipif(condition, *marker.args,
reason=reason, **marker.kwargs)
item.add_marker(skipif_marker)
def pytest_collection_modifyitems(config, items):
"""Handle custom markers.
pytest hook called after collection has been performed.
Adds a marker named "gui" which can be used to filter gui tests from the
command line.
For example:
pytest -m "not gui" # run all tests except gui tests
pytest -m "gui" # run only gui tests
It also handles the platform specific markers by translating them to skipif
markers.
Args:
items: list of _pytest.main.Node items, where each item represents
a python test that will be executed.
Reference:
http://pytest.org/latest/plugins.html
"""
remaining_items = []
deselected_items = []
for item in items:
deselected = False
if 'qapp' in getattr(item, 'fixturenames', ()):
item.add_marker('gui')
if hasattr(item, 'module'):
module_path = os.path.relpath(
item.module.__file__,
os.path.commonprefix([__file__, item.module.__file__]))
module_root_dir = module_path.split(os.sep)[0]
assert module_root_dir in ['end2end', 'unit', 'helpers',
'test_conftest.py']
if module_root_dir == 'end2end':
item.add_marker(pytest.mark.end2end)
_apply_platform_markers(config, item)
if item.get_marker('xfail_norun'):
item.add_marker(pytest.mark.xfail(run=False))
if item.get_marker('js_prompt'):
if config.webengine:
js_prompt_pyqt_version = 0x050700
else:
js_prompt_pyqt_version = 0x050300
item.add_marker(pytest.mark.skipif(
PYQT_VERSION <= js_prompt_pyqt_version,
reason='JS prompts are not supported with this PyQt version'))
if deselected:
deselected_items.append(item)
else:
remaining_items.append(item)
config.hook.pytest_deselected(items=deselected_items)
items[:] = remaining_items
def pytest_ignore_collect(path):
"""Ignore BDD tests if we're unable to run them."""
skip_bdd = hasattr(sys, 'frozen')
rel_path = path.relto(os.path.dirname(__file__))
return rel_path == os.path.join('end2end', 'features') and skip_bdd
@pytest.fixture(scope='session')
def qapp(qapp):
"""Change the name of the QApplication instance."""
qapp.setApplicationName('qute_test')
return qapp
def pytest_addoption(parser):
parser.addoption('--qute-delay', action='store', default=0, type=int,
help="Delay between qutebrowser commands.")
parser.addoption('--qute-profile-subprocs', action='store_true',
default=False, help="Run cProfile for subprocesses.")
parser.addoption('--qute-bdd-webengine', action='store_true',
help='Use QtWebEngine for BDD tests')
def pytest_configure(config):
webengine_arg = config.getoption('--qute-bdd-webengine')
webengine_env = os.environ.get('QUTE_BDD_WEBENGINE', '')
config.webengine = bool(webengine_arg or webengine_env)
# Fail early if QtWebEngine is not available
# pylint: disable=unused-variable
if config.webengine:
import PyQt5.QtWebEngineWidgets
@pytest.fixture(scope='session', autouse=True)
def check_display(request):
if (not request.config.getoption('--no-xvfb') and
'QUTE_BUILDBOT' in os.environ and
request.config.xvfb is not None):
raise Exception("Xvfb is running on buildbot!")
if sys.platform == 'linux' and not os.environ.get('DISPLAY', ''):
raise Exception("No display and no Xvfb available!")
@pytest.hookimpl(tryfirst=True, hookwrapper=True)
def pytest_runtest_makereport(item, call):
"""Make test information available in fixtures.
See http://pytest.org/latest/example/simple.html#making-test-result-information-available-in-fixtures
"""
outcome = yield
rep = outcome.get_result()
setattr(item, "rep_" + rep.when, rep)
|
airbnb/airflow
|
tests/api_connexion/schemas/test_event_log_schema.py
|
Python
|
apache-2.0
| 4,612
| 0.000867
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from airflow import DAG
from airflow.api_connexion.schemas.event_log_schema import (
EventLogCollection,
event_log_collection_schema,
event_log_schema,
)
from airflow.models import Log, TaskInstance
from airflow.operators.dummy import DummyOperator
from airflow.utils import timezone
from airflow.utils.session import create_session, provide_session
class TestEventLogSchemaBase(unittest.TestCase):
def setUp(self) -> None:
with create_session() as session:
session.query(Log).delete()
self.default_time = "2020-06-09T13:00:00+00:00"
self.default_time2 = '2020-06-11T07:00:00+00:00'
def tearDown(self) -> None:
with create_session() as session:
session.query(Log).delete()
def _create_task_instance(self):
with DAG(
'TEST_DAG_ID',
start_date=timezone.parse(self.default_time),
end_date=timezone.parse(self.default_time),
):
op1 = DummyOperator(task_id="TEST_TASK_ID", owner="airflow")
return TaskInstance(task=op1, execution_date=timezone.parse(self.default_time))
class TestEventLogSchema(TestEventLogSchemaBase):
@provide_session
def test_serialize(self, session):
event_log_model = Log(event="TEST_EVENT", task_instance=self._create_task_instance())
session.add(event_log_model)
session.commit()
event_log_model.dttm = timezone.parse(self.default_time)
log_model = session.query(Log).first()
deserialized_log = event_log_schema.dump(log_model)
self.assertEqual(
deserialized_log,
{
"event_log_id": event_log_model.id,
"event": "TEST_EVENT",
"dag_id": "TEST_DAG_ID",
"task_id": "TEST_TASK_ID",
"execution_date": self.default_time,
"owner": 'airflow',
"when": self.default_time,
"extra": None,
},
)
class TestEventLogCollection(TestEventLogSchemaBase):
@provide_session
def test_serialize(self, session):
event_log_model_1 = Log(event="TEST_EVENT_1", task_instance=self._create_task_instance())
event_log_model_2 = Log(event="TEST_EVENT_2", task_instance=self._create_task_instance())
event_logs = [event_log_model_1, event_log_model_2]
session.add_all(event_logs)
session.commit()
event_log_model_1.dttm = timezone.parse(self.default_time)
event_log_model_2.dttm = timezone.parse(self.default_time2)
instance = EventLogCollection(event_logs=event_logs, total_entries=2)
deserialized_event_logs = event_log_collection_schema.dump(instance)
self.assertEqual(
deserialized_event_logs,
{
"event_logs": [
{
"event_log_id": event_log_model_1.id,
"event": "TEST_EVENT_1",
"dag_id": "TEST_DAG_ID",
"task_id": "TEST_TASK_ID",
"execution_date": self.default_time,
"owner": 'airflow',
"when": self.default_time,
"extra": None,
},
{
"event_log_id": event_log_model_2.id,
|
"event": "TEST_EVENT_2",
"dag_id": "TEST_DAG_ID",
"task_id": "TEST_TASK_ID",
"execution_date": self.default
|
_time,
"owner": 'airflow',
"when": self.default_time2,
"extra": None,
},
],
"total_entries": 2,
},
)
|
melviso/phycpp
|
beatle/activity/models/ui/dlg/cc/Member.py
|
Python
|
gpl-2.0
| 14,333
| 0.001535
|
"""Subclass of NewMember, which is generated by wxFormBuilder."""
import copy
import wx
from beatle import model
from beatle.lib import wxx
from beatle.activity.models.ui import ui as ui
# Implementing NewMember
class MemberDialog(ui.NewMember):
"""
This dialog allows to setup data member of class
or struct. You can set default value for using
in constructors or as initialization of static
members.
"""
@wxx.SetInfo(__doc__)
def __init__(self, parent, container):
"""Dialog initialization"""
import beatle.app.resources as rc
super(MemberDialog, self).__init__(parent)
self._container = container
scoped = lambda x: (hasattr(x, 'scoped') and x.scoped) or x.name
self._types = dict([scoped(x), x] for x in container.types)
self._autoname = '' # proposed name
# add types but not on-the-fly template type
self.m_type.AppendItems([x for x in self._types.keys() if x != '@'])
# we need to add types from template nested classes
classes = container.nested_classes
self._nested_template_types = []
for clase in classes:
for x in clase._template_types:
if x not in self._nested_template_types:
self._nested_template_types.append(scoped(x))
if len(self._nested_template_types) > 0:
self.m_type.AppendItems(self._nested_template_types)
self.choiceStr = ""
self.m_type.SetFocus()
icon = wx.EmptyIcon()
icon.CopyFromBitmap(rc.GetBitmap("member"))
self.SetIcon(icon)
self._register_keybindings()
def AutoName(self):
"""Suggest the argument name, based on type"""
iSel = self.m_type.GetCurrentSelection()
if iSel == wx.NOT_FOUND:
return
s = self.m_name.GetValue()
if self._autoname != s and s:
return
kwargs = {
'const': (self.m_const.IsChecked() and 'c') or '',
'reference': (self.m_reference.IsChecked() and 'r') or '',
'ptr': (self.m_ptr.IsChecked() and 'p') or '',
'pptr': (self.m_pptr.IsChecked() and 'p') or '',
'constptr': (self.m_constptr.IsChecked() and 'c') or '',
'array': (self.m_array.IsChecked() and 'a') or '',
'typename': self.m_type.GetString(iSel).replace('::', '_'),
}
#volatile = (self.m_volatile.IsChecked() and 'v') or ''
self._autoname = '{const}{reference}{ptr}{pptr}{constptr}{array}{typename}'.format(
**kwargs)
self.m_name.SetValue(self._autoname)
def _register_keybindings(self):
"""Register accelerators for static labels that must change the focus"""
newId_t = wx.NewId()
newId_n = wx.NewId()
newId_a = wx.NewId()
newId_d = wx.NewId()
newId_o = wx.NewId()
self.Bind(wx.EVT_MENU, self.OnActivateType, id=newId_t)
self.Bind(wx.EVT_MENU, self.OnActivateName, id=newId_n)
self.Bind(wx.EVT_MENU, self.OnActivateAccess, id=newId_a)
self.Bind(wx.EVT_MENU, self.OnActivateDefault, id=newId_d)
self.Bind(wx.EVT_MENU, self.OnActivateNotes, id=newId_o)
aTable = wx.AcceleratorTable([
wx.AcceleratorEntry(wx.ACCEL_ALT, ord('T'), newId_t),
wx.AcceleratorEntry(wx.ACCEL_ALT, ord('N'), newId_n),
wx.AcceleratorEntry(wx.ACCEL_ALT, ord('A'), newId_a),
wx.AcceleratorEntry(wx.ACCEL_ALT, ord('D'), newId_d),
wx.AcceleratorEntry(wx.ACCEL_ALT, ord('O'), newId_o)
])
self.SetAcceleratorTable(aTable)
def OnActivateType(self, event):
"""activate type combo"""
self.m_type.SetFocus()
def OnActivateName(self, event):
"""activate name entry"""
self.m_name.SetFocus()
def OnActivateAccess(self, event):
"""activate acces combo"""
self.m_choice2.SetFocus()
def OnActivateDefault(self, event):
"""activate default value"""
self.m_textCtrl8.SetFocus()
def OnActivateNotes(self, event):
"""Activate notes"""
self.m_richText1.SetFocus()
def OnEnterName(self, event):
"""This event is generated when the enter is pressed in the
name entry"""
self.m_choice2.SetFocus()
def OnTypeChanged(self, event):
"""This event happens when the return type is changed. The main goal
of this callback is handling template types for argument specification"""
iSel = self.m_type.GetCurrentSelection()
_type = self._types.get(self.m_type.GetString(iSel), None)
template_args = False
if _type is not None:
if _type._template is not None:
template_args = True
if template_args is True:
self.m_staticText67.Enable(True)
self.m_template_args.Enable(True)
self.m_staticText68.Enable(True)
else:
self.m_staticText67.Enable(False)
self.m_template_args.Enable(False)
self.m_staticText68.Enable(False)
self.m_template_args.SetValue('')
self.AutoName()
def CopyAttributes(self, member):
"""Get the atributes"""
member._name = self._name
member._typei = copy.copy(self._typei)
member._access = self._access
member._static = self._static
member._default = self._default
member._volatile = self._volatile
member._mutable = self._mutable
member._bitField = self._bitField
if self._bitField:
member._bitFieldSize = self._bitFieldSize
member._note = self._note
member.inner_class.AutoInit()
def SetAttributes(self, member):
"""Set the attributes"""
self.m_name.SetValue(member._name)
ti = member._typei
iSel = self.m_type.FindString(ti.scoped)
self.m_type.SetSelection(iSel)
iSel = self.m_choice2.FindString(member._access)
self.m_choice2.SetSelection(iSel)
self.m_checkBox105.SetValue(member._static)
self.m_textCtrl8.SetValue(member._default)
self.m_checkBox49.SetValue(member._volatile)
self.m_checkBox48.SetValue(member._mutable)
self.m_const.SetValue(ti._const)
self.m_ptr.SetValue(ti._ptr)
self.m_reference.SetValue(ti._ref)
self.m_pptr.SetValue(ti._ptr_to_ptr)
self.m_constptr.SetValue(ti._const_ptr)
self.m_array.SetValue(ti._array)
if ti._array is True:
self.
|
m_textCtrl7.Show(True)
self.m_textCtrl7.Enable(True)
self.m_textCtrl7.SetValue(str(ti._array_size))
else:
self.m_textCtrl7.SetValue('0')
self.m_che
|
ckBox51.SetValue(member._bitField)
if ti._type_args is not None:
self.m_staticText67.Enable(True)
self.m_template_args.Enable(True)
self.m_staticText68.Enable(True)
self.m_template_args.SetValue(ti._type_args)
if member._bitField is True:
self.m_textCtrl39.Show(True)
self.m_textCtrl39.Enable(True)
self.m_textCtrl39.SetValue(str(member._bitFieldSize))
self.m_richText1.SetValue(member._note)
self.SetTitle("Edit member")
def Validate(self):
"""Dialog validation"""
self._name = self.m_name.GetValue()
if len(self._name) == 0:
wx.MessageBox("Member name must not be empty", "Error",
wx.OK | wx.CENTER | wx.ICON_ERROR, self)
return False
iSel = self.m_type.GetCurrentSelection()
if iSel == wx.NOT_FOUND:
wx.MessageBox("Invalid type", "Error",
wx.OK | wx.CENTER | wx.ICON_ERROR, self)
return False
typename = self.m_type.GetString(iSel)
iSel = self.m_choice2.GetCurrentSelection()
if iSel == wx.NOT_FOUND:
wx.MessageBox("Invalid access", "Error",
wx.OK | wx.CENTER | wx.ICON_ERROR, self)
return False
self._static = self.m_checkBox105.IsChecked()
self._access = self.m_choice2.GetString(iSel)
|
sxjscience/tvm
|
python/tvm/micro/session.py
|
Python
|
apache-2.0
| 4,567
| 0.002847
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Defines a top-level glue class that operates the Transport and Flasher classes."""
import logging
import time
from .._ffi import get_global_func
from ..contrib import graph_runtime
from ..rpc import RPCSession
from .transport import TransportLogger
try:
from .base import _rpc_connect
except ImportError:
raise ImportError("micro tvm is not enabled. Set USE_MICRO to ON in config.cmake")
class Session:
"""MicroTVM Device Session
Parameters
----------
config : dict
configuration for this session (as generated by
`tvm.micro.device.host.default_config()`, for example)
Example
--------
.. code-block:: python
c_mod = ... # some module generated with "c" as the target
dev_config = micro.device.arm.stm32f746xx.default_config('127.0.0.1', 6666)
with tvm.micro.Session(dev_config) as sess:
micro_mod = sess.create_micro_mod(c_mod)
"""
def __init__(
self, binary=None, flasher=None, transport_context_manager=None, session_name="micro-rpc"
):
"""Configure a new session.
Parameters
----------
binary : MicroBinary
If given, `flasher` must also be given. During session initialization, this binary will
be flashed to the device before the transport is created.
flasher : Flasher
If given, `binary` must also be given. Used to flash `binary` during session
initialization.
transport_context_manager : ContextManager[transport.Transport]
If given, `flasher` and `binary` should not be given. On entry, this context manager
should establish a tarnsport between this TVM instance and the device.
session_name : str
Name of the session, used for debugging.
"""
self.binary = binary
self.flasher = flasher
self.transport_context_manager = transport_context_manager
self.session_name = session_name
self._rpc = None
self._graph_runtime = None
def get_system_lib(self):
return self._rpc.get_function("runtime.SystemLib")()
d
|
ef __enter__(self):
"""Initialize this session and establish an RPC session with the on-device RPC server.
Returns
-------
Session :
Returns self.
"""
if self.flasher is not None:
self.transport_context_manager = self.flasher.flash(self.binary)
time.sleep(3.0)
self.transport = TransportLogger(
self.session_name, self.transport_context_manager, level=logging.INFO
).__enter
|
__()
self._rpc = RPCSession(
_rpc_connect(self.session_name, self.transport.write, self.transport.read)
)
self.context = self._rpc.cpu(0)
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
"""Tear down this session and associated RPC session resources."""
self.transport.__exit__(exc_type, exc_value, exc_traceback)
def create_local_graph_runtime(graph_json_str, mod, ctx):
"""Create a local graph runtime driving execution on the remote CPU context given.
Parameters
----------
graph_json_str : str
A string containing the graph representation.
mod : tvm.runtime.Module
The remote module containing functions in graph_json_str.
ctx : tvm.Context
The remote CPU execution context.
Returns
-------
tvm.contrib.GraphRuntime :
A local graph runtime instance that executes on the remote device.
"""
device_type_id = [ctx.device_type, ctx.device_id]
fcreate = get_global_func("tvm.graph_runtime.create")
return graph_runtime.GraphModule(fcreate(graph_json_str, mod, *device_type_id))
|
vmayoral/basic_reinforcement_learning
|
tutorial5/tests/theano_mnist_mlp.py
|
Python
|
gpl-3.0
| 14,310
| 0.001048
|
"""
This tutorial introduces the multilayer perceptron using Theano.
A multilayer perceptron is a logistic regressor where
instead of feeding the input to the logistic regression you insert a
intermediate layer, called the hidden layer, that has a nonlinear
activation function (usually tanh or sigmoid) . One can use many such
hidden layers making the architecture deep. The tutorial will also tackle
the problem of MNIST digit classification.
.. math::
f(x) = G( b^{(2)} + W^{(2)}( s( b^{(1)} + W^{(1)} x))),
References:
- textbooks: "Pattern Recognition and Machine Learning" -
Christopher M. Bishop, section 5
"""
from __future__ import print_function
__docformat__ = 'restructedtext en'
import os
import sys
import timeit
import numpy
import theano
import theano.tensor as T
from logistic_sgd import LogisticRegression, load_data
# start-snippet-1
class HiddenLayer(object):
def __init__(self, rng, input, n_in, n_out, W=None, b=None,
activation=T.tanh):
"""
Typical hidden layer of a MLP: units are fully-connected and have
sigmoidal activation function. Weight matrix W is of shape (n_in,n_out)
and the bias vector b is of shape (n_out,).
NOTE : The nonlinearity used here is tanh
Hidden unit activation is given by: tanh(dot(input,W) + b)
:type rng: numpy.random.RandomState
:param rng: a random number generator used to initialize weights
:type input: theano.tensor.dmatrix
:param input: a symbolic tensor of shape (n_examples, n_in)
:type n_in: int
:param n_in: dimensionality of input
:type n_out: int
:param n_out: number of hidden units
:type activation: theano.Op or function
:param activation: Non linearity to be applied in the hidden
layer
"""
self.input = input
# end-snippet-1
# `W` is initialized with `W_values` which is uniformely sampled
# from sqrt(-6./(n_in+n_hidden)) and sqrt(6./(n_in+n_hidden))
# for tanh activation function
# the output of uniform if converted using asarray to dtype
# theano.config.floatX so that the code is runable on GPU
# Note : optimal initialization of weights is dependent on the
# activation function used (among other things).
# For example, results presented in [Xavier10] suggest that you
# should use 4 times larger initial weights for sigmoid
# compared to tanh
# We have no info for other function, so we use the same as
# tanh.
if W is None:
W_values = numpy.asarray(
rng.uniform(
low=-numpy.sqrt(6. / (n_in + n_out)),
high=numpy.sqrt(6. / (n_in + n_out)),
size=(n_in, n_out)
),
dtype=theano.config.floatX
)
if activation == theano.tensor.nnet.sigmoid:
W_values *= 4
W = theano.shared(value=W_values, name='W', borrow=True)
if b is None:
b_values = numpy.zeros((n_out,), dtype=theano.config.floatX)
b = theano.shared(value=b_values, name='b', borrow=True)
self.W = W
self.b = b
lin_output = T.dot(input, self.W) + self.b
self.output = (
lin_output if activation is None
else activation(lin_output)
)
# parameters of the model
self.params = [self.W, self.b]
# start-snippet-2
class MLP(object):
"""Multi-Layer Perceptron Class
A multilayer perceptron is a feedforward artificial neural network model
that has one layer or more of hidden units and nonlinear activations.
Intermediate layers usually have as activation function tanh or the
sigmoid function (defined here by a ``HiddenLayer`` class) while the
top layer is a softmax layer (defined here by a ``LogisticRegression``
class).
"""
def __init__(self, rng, input, n_in, n_hidden, n_out):
"""Initialize the parameters for the multilayer perceptron
:type rng: numpy.random.RandomState
:param rng: a random number generator used to initialize weights
:type input: theano.tensor.TensorType
:param input: symbolic variable that describes the input of the
architecture (one minibatch)
:type n_in: int
:param n_in: number of input units, the dimension of the space in
which the datapoints lie
:type n_hidden: int
:param n_hidden: number of hidden units
:type n_out: int
:param n_out: number of output units, the dimension of the space in
which the labels lie
"""
# Since we are dealing with a one hidden layer MLP, this will translate
# into a HiddenLayer with a tanh activation function connected to the
# LogisticRegression layer; the activation function can be replaced by
# sigmoid or any other nonlinear function
self.hiddenLayer = HiddenLayer(
rng=rng,
input=input,
n_in=n_in,
n_out=n_hidden,
activatio
|
n=T.tanh
)
# The logistic regression layer gets as input the hidden units
# of the hidden lay
|
er
self.logRegressionLayer = LogisticRegression(
input=self.hiddenLayer.output,
n_in=n_hidden,
n_out=n_out
)
# end-snippet-2 start-snippet-3
# L1 norm ; one regularization option is to enforce L1 norm to
# be small
self.L1 = (
abs(self.hiddenLayer.W).sum()
+ abs(self.logRegressionLayer.W).sum()
)
# square of L2 norm ; one regularization option is to enforce
# square of L2 norm to be small
self.L2_sqr = (
(self.hiddenLayer.W ** 2).sum()
+ (self.logRegressionLayer.W ** 2).sum()
)
# negative log likelihood of the MLP is given by the negative
# log likelihood of the output of the model, computed in the
# logistic regression layer
self.negative_log_likelihood = (
self.logRegressionLayer.negative_log_likelihood
)
# same holds for the function computing the number of errors
self.errors = self.logRegressionLayer.errors
# the parameters of the model are the parameters of the two layer it is
# made out of
self.params = self.hiddenLayer.params + self.logRegressionLayer.params
# end-snippet-3
# keep track of model input
self.input = input
def test_mlp(learning_rate=0.01, L1_reg=0.00, L2_reg=0.0001, n_epochs=1000,
dataset='mnist.pkl.gz', batch_size=200, n_hidden=100):
"""
Demonstrate stochastic gradient descent optimization for a multilayer
perceptron
This is demonstrated on MNIST.
:type learning_rate: float
:param learning_rate: learning rate used (factor for the stochastic
gradient
:type L1_reg: float
:param L1_reg: L1-norm's weight when added to the cost (see
regularization)
:type L2_reg: float
:param L2_reg: L2-norm's weight when added to the cost (see
regularization)
:type n_epochs: int
:param n_epochs: maximal number of epochs to run the optimizer
:type dataset: string
:param dataset: the path of the MNIST dataset file from
http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz
"""
datasets = load_data(dataset)
train_set_x, train_set_y = datasets[0]
valid_set_x, valid_set_y = datasets[1]
test_set_x, test_set_y = datasets[2]
# compute number of minibatches for training, validation and testing
n_train_batches = train_set_x.get_value(borrow=True).shape[0] // batch_size
n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] // batch_size
n_test_batches = test_set_x.get_value(borrow=True).shape[0] // batch_size
######################
# BUILD ACTUAL MODEL #
######################
print('... bu
|
cjh1/tomviz
|
acquisition/tests/conftest.py
|
Python
|
bsd-3-clause
| 1,440
| 0
|
import pytest
import requests
import time
from threading import Thread
from bottle import default_app, WSGIRefServer
from tomviz.acquisition import server
class Server(Thread):
def __init__(self, dev=False, port=9999):
super(Server, self).__init__()
self.host = 'localhost'
self.port = port
self.base_url = 'http://%s:%d' % (self.host, self.port)
|
self.url = '%s/acquisition' % self.base_url
self.dev = dev
self._server = WSGIRefServer(host=self.host, port=self.port)
def run(self):
self
|
.setup()
self._server.run(app=default_app())
def start(self):
super(Server, self).start()
# Wait for bottle to start
while True:
try:
requests.get(self.base_url)
break
except requests.ConnectionError:
time.sleep(0.1)
def setup(self, adapter=None):
server.setup(dev=self.dev, adapter=adapter)
def stop(self):
self._server.srv.shutdown()
# Force the socket to close so we can reuse the same port
self._server.srv.socket.close()
@pytest.fixture(scope="module")
def acquisition_server():
srv = Server()
srv.start()
yield srv
srv.stop()
srv.join()
@pytest.fixture(scope="module")
def acquisition_dev_server():
srv = Server(dev=True, port=9998)
srv.start()
yield srv
srv.stop()
srv.join()
|
google-research/language
|
language/compgen/nqg/tasks/compare_predictions.py
|
Python
|
apache-2.0
| 1,653
| 0.008469
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compare a txt file of predictions with gold targets from a TSV file."""
from absl import app
from absl import flags
from language.compgen.nqg.tasks import tsv_utils
from tensorflow.io import gfile
FLAGS = flags.FLAGS
flags.DE
|
FINE_string("gold", "", "tsv file containing gold targets.")
flags.DEFINE_string("predictions", "", "txt file with predicted targets.")
def main(unused_argv):
gold_examples = tsv_utils.read_tsv(FLAGS.gold)
preds = []
with gfile.GFile(FLAGS.predictions, "r") as f:
for line in f:
preds.append(line.rstrip())
correct = 0
incorrect = 0
for pred, gold_example in zip(preds, gold_examples):
|
if pred == gold_example[1]:
correct += 1
else:
incorrect += 1
print("Incorrect for example %s.\nTarget: %s\nPrediction: %s" %
(gold_example[0], gold_example[1], pred))
print("correct: %s" % correct)
print("incorrect: %s" % incorrect)
print("pct: %s" % str(float(correct) / float(correct + incorrect)))
if __name__ == "__main__":
app.run(main)
|
noyeitan/cubes
|
cubes/common.py
|
Python
|
mit
| 9,653
| 0.002695
|
# -*- encoding: utf-8 -*-
"""Utility functions for computing combinations of dimensions and hierarchy
levels"""
from __future__ import absolute_import
import itertools
import sys
import re
import os.path
import decimal
import datetime
import json
from collections import OrderedDict
from .errors import *
from . import compat
__all__ = [
"IgnoringDictionary",
"MissingPackage",
"localize_common",
"localize_attributes",
"get_localizable_attributes",
"decamelize",
"to_identifier",
"assert_instance",
"assert_all_instances",
"read_json_file",
"sorted_dependencies",
]
class IgnoringDictionary(OrderedDict):
"""Simple dictionary extension that will ignore any keys of which values
are empty (None/False)"""
def __setitem__(self, key, value):
if value is not None:
super(IgnoringDictionary, self).__setitem__(key, value)
def set(self, key, value):
"""Sets `value` for `key` even if value is null."""
super(IgnoringDictionary, self).__setitem__(key, value)
def __repr__(self):
items = []
for key, value in self.items():
item = '%s: %s' % (repr(key), repr(value))
items.append(item)
return "{%s}" % ", ".join(items)
def assert_instance(obj, class_, label):
"""Raises ArgumentError when `obj` is not instance of `cls`"""
if not isinstance(obj, class_):
raise ModelInconsistencyError("%s should be sublcass of %s, "
"provided: %s" % (label,
class_.__name__,
type(obj).__name__))
def assert_all_instances(list_, class_, label="object"):
"""Raises ArgumentError when objects in `list_` are not instances of
`cls`"""
for obj in list_ or []:
assert_instance(obj, class_, label="object")
class MissingPackageError(Exception):
"""Exception raised when encountered a missing package."""
pass
class MissingPackage(object):
"""Bogus class to handle missing optional packages - packages that are not
necessarily required for Cubes, but are needed for certain features."""
def __init__(self, package, feature = None, source = None, comment = None):
self.package = package
self.feature = feature
self.source = source
self.comment = comment
def __call__(self, *args, **kwargs):
self._fail()
def __getattr__(self, name):
self._fail()
def _fail(self):
if self.feature:
use = " to be able to use: %s" % self.feature
else:
use = ""
if self.source:
source = " from %s" % self.source
else:
source = ""
if self.comment:
comment = ". %s" % self.comment
else:
comment = ""
raise MissingPackageError("Optional package '%s' is not installed. "
"Please install the package%s%s%s" %
(self.package, source, use, comment))
def optional_import(name, feature=None, source=None, comment=None):
"""Optionally import package `name`. If package does not exist, import a
placeholder object, that raises an exception with more detailed
description about the missing package."""
try:
return __import__(name)
except ImportError:
return MissingPackage(name, feature, source, comment)
def expand_dictionary(record, separator = '.'):
"""Return expanded dictionary: treat keys are paths separated by
|
`separator`, create sub-dictionaries as necessary"""
result = {}
for key, value in record.items():
current = result
path = key.split(separator)
for part in path[:-1]:
if part not in current:
current[part] = {}
current = current[part]
current[path[-1]] = value
return result
def localize_common(obj, trans):
"""Localize common attributes: label and de
|
scription"""
if "label" in trans:
obj.label = trans["label"]
if "description" in trans:
obj.description = trans["description"]
def localize_attributes(attribs, translations):
"""Localize list of attributes. `translations` should be a dictionary with
keys as attribute names, values are dictionaries with localizable
attribute metadata, such as ``label`` or ``description``."""
for (name, atrans) in translations.items():
attrib = attribs[name]
localize_common(attrib, atrans)
def get_localizable_attributes(obj):
"""Returns a dictionary with localizable attributes of `obj`."""
# FIXME: use some kind of class attribute to get list of localizable attributes
locale = {}
try:
if obj.label:
locale["label"] = obj.label
except:
pass
try:
if obj.description:
locale["description"] = obj.description
except:
pass
return locale
def decamelize(name):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1 \2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1 \2', s1)
def to_identifier(name):
return re.sub(r' ', r'_', name).lower()
def to_label(name, capitalize=True):
"""Converts `name` into label by replacing underscores by spaces. If
`capitalize` is ``True`` (default) then the first letter of the label is
capitalized."""
label = name.replace("_", " ")
if capitalize:
label = label.capitalize()
return label
def coalesce_option_value(value, value_type, label=None):
"""Convert string into an object value of `value_type`. The type might be:
`string` (no conversion), `integer`, `float`, `list` – comma separated
list of strings.
"""
value_type = value_type.lower()
try:
if value_type in ('string', 'str'):
return_value = str(value)
elif value_type == 'list':
if isinstance(value, compat.string_type):
return_value = value.split(",")
else:
return_value = list(value)
elif value_type == "float":
return_value = float(value)
elif value_type in ["integer", "int"]:
return_value = int(value)
elif value_type in ["bool", "boolean"]:
if not value:
return_value = False
elif isinstance(value, compat.string_type):
return_value = value.lower() in ["1", "true", "yes", "on"]
else:
return_value = bool(value)
else:
raise ArgumentError("Unknown option value type %s" % value_type)
except ValueError:
if label:
label = "parameter %s " % label
else:
label = ""
raise ArgumentError("Unable to convert %svalue '%s' into type %s" %
(label, astring, value_type))
return return_value
def coalesce_options(options, types):
"""Coalesce `options` dictionary according to types dictionary. Keys in
`types` refer to keys in `options`, values of `types` are value types:
string, list, float, integer or bool."""
out = {}
for key, value in options.items():
if key in types:
out[key] = coalesce_option_value(value, types[key], key)
else:
out[key] = value
return out
def read_json_file(path, kind=None):
"""Read a JSON from `path`. This is convenience function that provides
more descriptive exception handling."""
kind = "%s " % str(kind) if kind else ""
if not os.path.exists(path):
raise ConfigurationError("Can not find %sfile '%s'"
% (kind, path))
try:
f = compat.open_unicode(path)
except IOError:
raise ConfigurationError("Can not open %sfile '%s'"
% (kind, path))
try:
content = json.load(f)
except ValueError as e:
raise SyntaxError("Syntax error in %sfile %s: %s"
% (kind, path, str(e)))
finally:
f.close()
|
semkiv/heppy_fcc
|
particles/p4.py
|
Python
|
gpl-3.0
| 1,250
| 0.0152
|
import math
class P4(object):
def p4(self):
'''4-momentum, px, py, pz, E'''
return self._tlv
def p3(self):
'''3-momentum px, py, pz'''
return self._tlv.Vect()
def e(self):
'''energy'''
return self._tlv.E()
def pt(self):
'''transverse momentum (magnitude of p3 in transverse plane)'''
return self._tlv.Pt()
def thet
|
a(self):
'''angle w/r to transverse plane'''
return math.pi/2 - self._tlv.Theta()
def eta(self):
'''pseudo-rapidity (-ln(tan self._tlv.Theta()/2)).
theta = 0 -> eta = +inf
theta = pi/2 -> 0
theta = pi -> eta = -inf
'''
return self._tlv.Eta()
def phi(self):
'''azymuthal angle (
|
from x axis, in the transverse plane)'''
return self._tlv.Phi()
def m(self):
'''mass'''
return self._tlv.M()
def __str__(self):
return 'pt = {e:5.1f}, e = {e:5.1f}, eta = {eta:5.2f}, theta = {theta:5.2f}, phi = {phi:5.2f}, mass = {m:5.2f}'.format(
pt = self.pt(),
e = self.e(),
eta = self.eta(),
theta = self.theta(),
phi = self.phi(),
m = self.m()
)
|
hnakamur/django-bootstrap-table-example
|
project/apiv2/urls.py
|
Python
|
mit
| 430
| 0.002326
|
from django.conf.
|
urls import url
from .viewsets import BookmarkViewSet
bookmark_list = BookmarkViewSet.as_view({
'get': 'list',
'post': 'create'
})
bookmark_detail = BookmarkViewSet.as_view({
'get': 'retrieve',
'patch': 'update',
'delete': 'destroy'
})
urlpatterns = [
url(r'^bookmarks/$', bookmark_list, name='bookmarks'),
url(r'^bookmarks/(?P<pk>[0-9]+)/$', bookmark_detail
|
, name='bookmark'),
]
|
dagwieers/ansible
|
lib/ansible/modules/remote_management/redfish/idrac_redfish_command.py
|
Python
|
gpl-3.0
| 5,884
| 0.00119
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2018 Dell EMC Inc.
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: idrac_redfish_command
version_added: "2.8"
short_description: Manages Out-Of-Band controllers using iDRAC OEM Redfish APIs
description:
- Builds Redfish URIs locally and sends them to remote OOB controllers to
perform an action.
- For use with Dell iDRAC operations that require Redfish OEM extensions
options:
category:
required: true
description:
- Category to execute on OOB controller
command:
required: true
description:
- List of commands to execute on OOB controller
baseuri:
required: true
description:
- Base URI of OOB controller
username:
required: true
description:
- User for authentication with OOB controller
password:
required: true
description:
- Password for authentication with OOB controller
timeout:
description:
- Timeout in seconds for URL requests to OOB controller
default: 10
type: int
version_added: '2.8'
author: "Jose Delarosa (@jose-delaros
|
a)"
|
'''
EXAMPLES = '''
- name: Create BIOS configuration job (schedule BIOS setting update)
idrac_redfish_command:
category: Systems
command: CreateBiosConfigJob
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
'''
RETURN = '''
msg:
description: Message with action result or error description
returned: always
type: str
sample: "Action was successful"
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.redfish_utils import RedfishUtils, HEADERS
from ansible.module_utils._text import to_native
class IdracRedfishUtils(RedfishUtils):
def create_bios_config_job(self):
result = {}
key = "Bios"
jobs = "Jobs"
# Search for 'key' entry and extract URI from it
response = self.get_request(self.root_uri + self.systems_uris[0])
if response['ret'] is False:
return response
result['ret'] = True
data = response['data']
if key not in data:
return {'ret': False, 'msg': "Key %s not found" % key}
bios_uri = data[key]["@odata.id"]
# Extract proper URI
response = self.get_request(self.root_uri + bios_uri)
if response['ret'] is False:
return response
result['ret'] = True
data = response['data']
set_bios_attr_uri = data["@Redfish.Settings"]["SettingsObject"][
"@odata.id"]
payload = {"TargetSettingsURI": set_bios_attr_uri}
response = self.post_request(
self.root_uri + self.manager_uri + "/" + jobs,
payload, HEADERS)
if response['ret'] is False:
return response
response_output = response['resp'].__dict__
job_id = response_output["headers"]["Location"]
job_id = re.search("JID_.+", job_id).group()
# Currently not passing job_id back to user but patch is coming
return {'ret': True, 'msg': "Config job %s created" % job_id}
CATEGORY_COMMANDS_ALL = {
"Systems": ["CreateBiosConfigJob"],
"Accounts": [],
"Manager": []
}
def main():
result = {}
module = AnsibleModule(
argument_spec=dict(
category=dict(required=True),
command=dict(required=True, type='list'),
baseuri=dict(required=True),
username=dict(required=True),
password=dict(required=True, no_log=True),
timeout=dict(type='int', default=10)
),
supports_check_mode=False
)
category = module.params['category']
command_list = module.params['command']
# admin credentials used for authentication
creds = {'user': module.params['username'],
'pswd': module.params['password']}
# timeout
timeout = module.params['timeout']
# Build root URI
root_uri = "https://" + module.params['baseuri']
rf_uri = "/redfish/v1/"
rf_utils = IdracRedfishUtils(creds, root_uri, timeout)
# Check that Category is valid
if category not in CATEGORY_COMMANDS_ALL:
module.fail_json(msg=to_native("Invalid Category '%s'. Valid Categories = %s" % (category, CATEGORY_COMMANDS_ALL.keys())))
# Check that all commands are valid
for cmd in command_list:
# Fail if even one command given is invalid
if cmd not in CATEGORY_COMMANDS_ALL[category]:
module.fail_json(msg=to_native("Invalid Command '%s'. Valid Commands = %s" % (cmd, CATEGORY_COMMANDS_ALL[category])))
# Organize by Categories / Commands
if category == "Systems":
# execute only if we find a System resource
result = rf_utils._find_systems_resource(rf_uri)
if result['ret'] is False:
module.fail_json(msg=to_native(result['msg']))
for command in command_list:
if command == "CreateBiosConfigJob":
# execute only if we find a Managers resource
result = rf_utils._find_managers_resource(rf_uri)
if result['ret'] is False:
module.fail_json(msg=to_native(result['msg']))
result = rf_utils.create_bios_config_job()
# Return data back or fail with proper message
if result['ret'] is True:
del result['ret']
module.exit_json(changed=True, msg='Action was successful')
else:
module.fail_json(msg=to_native(result['msg']))
if __name__ == '__main__':
main()
|
sysadminmatmoz/pmis
|
analytic_account_open/wizards/analytic_account_open.py
|
Python
|
agpl-3.0
| 1,741
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2015 Eficent - Jordi Ballester Alomar
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class AnalyticAccountOpen(models.TransientModel):
_name = 'analytic.account.open'
_description = 'Open single analytic account'
analytic_account_id = fields.Many2one(
'account.analytic.account',
'Analytic Account',
required=True
)
include_child = fields.Boolean(
'Include child accounts',
default=True
)
@api.model
def _get_child_analytic_accounts(self, curr_id):
result = {}
|
result[curr_id] = True
# Now
|
add the children
self.env.cr.execute('''
WITH RECURSIVE children AS (
SELECT parent_id, id
FROM account_analytic_account
WHERE parent_id = %s
UNION ALL
SELECT a.parent_id, a.id
FROM account_analytic_account a
JOIN children b ON(a.parent_id = b.id)
)
SELECT * FROM children order by parent_id
''', (curr_id,))
res = self.env.cr.fetchall()
for x, y in res:
result[y] = True
return result
@api.multi
def analytic_account_open_window(self):
self.ensure_one()
act_window_id = self.env.ref(
'analytic.action_account_analytic_account_form')
result = act_window_id.read()[0]
acc_id = self.analytic_account_id.id
acc_ids = []
if self.include_child:
acc_ids = self._get_child_analytic_accounts(acc_id)
else:
acc_ids.append(acc_id)
result['domain'] = "[('id','in', ["+','.join(map(str, acc_ids))+"])]"
return result
|
necolt/hudson-notifier
|
hudsonnotifier/AboutHudsonnotifierDialog.py
|
Python
|
gpl-3.0
| 2,628
| 0.012938
|
# -*- coding: utf-8 -*-
### BEGIN LICENSE
# Copyright (C) 2009 Philip Peitsch <[email protected]>
#This program is free software: you can redistribute it and/or modify it
#under the terms of the GNU General Public License version 3, as published
#by the Free Software Foundation.
#
#This program is distributed in the hope that it will be useful, but
#WITHOUT ANY WARRANTY; without even the implied warranties of
#MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
#PURPOSE. See the GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License along
#with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
import sys
import os
import gtk
from hudsonnotifier.hudsonnotifierconfig import getdatapath
class AboutHudsonnotifierDialog(gtk.AboutDialog):
__gtype_name__ = "AboutHudsonnotifierDialog"
def __init__(self):
"""__init__ - This function is typically not called directly.
Creation of a AboutHudsonnotifierDialog requires redeading the associated ui
file and parsing the ui definition extrenally,
and then calling AboutHudsonnotifierDialog.finish_initializing().
Use the convenience function NewAboutHudsonnotifierDialog to create
NewAboutHudsonnotifierDialog objects.
"""
|
pass
def finish_initializing(self, builder):
"""finish_initalizing should be called after parsing the ui definition
and creating a AboutHudsonnotifierDialog object with it in order to finish
initializing the start of the new AboutHudsonnotifierDi
|
alog instance.
"""
#get a reference to the builder and set up the signals
self.builder = builder
self.builder.connect_signals(self)
#code for other initialization actions should be added here
def NewAboutHudsonnotifierDialog():
"""NewAboutHudsonnotifierDialog - returns a fully instantiated
AboutHudsonnotifierDialog object. Use this function rather than
creating a AboutHudsonnotifierDialog instance directly.
"""
#look for the ui file that describes the ui
ui_filename = os.path.join(getdatapath(), 'ui', 'AboutHudsonnotifierDialog.ui')
if not os.path.exists(ui_filename):
ui_filename = None
builder = gtk.Builder()
builder.add_from_file(ui_filename)
dialog = builder.get_object("about_hudsonnotifier_dialog")
dialog.finish_initializing(builder)
return dialog
if __name__ == "__main__":
dialog = NewAboutHudsonnotifierDialog()
dialog.show()
gtk.main()
|
Khroki/MCEdit-Unified
|
pkgutil.py
|
Python
|
isc
| 20,304
| 0.000788
|
"""Utilities to support packages."""
# NOTE: This module must remain compatible with Python 2.3, as it is shared
# by setuptools for distribution with Python 2.3 and up.
import os
import sys
import imp
import os.path
from types import ModuleType
__all__ = [
'get_importer', 'iter_importers', 'get_loader', 'find_loader',
'walk_packages', 'iter_modules', 'get_data',
'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
]
def read_code(stream):
# This helper is needed in order for the PEP 302 emulation to
# correctly handle compiled files
import marshal
magic = stream.read(4)
if magic != imp.get_magic():
return None
stream.read(4) # Skip timestamp
return marshal.load(stream)
def simplegeneric(func):
"""Make a trivial single-dispatch generic function"""
registry = {}
def wrapper(*args, **kw):
ob = args[0]
try:
cls = ob.__class__
except AttributeError:
cls = type(ob)
try:
mro = cls.__mro__
except AttributeError:
try:
class cls(cls, object):
pass
mro = cls.__mro__[1:]
except TypeError:
mro = object, # must be an ExtensionClass or some such :(
for t in mro:
if t in registry:
return registry[t](*args, **kw)
else:
return func(*args, **kw)
try:
wrapper.__name__ = func.__name__
except (TypeError, AttributeError):
pass # Python 2.3 doesn't allow functions to be renamed
def register(typ, func=None):
if func is None:
return lambda f: register(typ, f)
registry[typ] = func
return func
wrapper.__dict__ = func.__dict__
wrapper.__doc__ = func.__doc__
wrapper.register = register
return wrapper
def walk_packages(path=None, prefix='', onerror=None):
"""Yields (module_loader, name, ispkg) for all modules recursively
on path, or, if path is None, all accessible modules.
'path' should be either None or a list of paths to look for
modules in.
'prefix' is a string to output on the front of every module name
on output.
Note that this function must import all *packages* (NOT all
modules!) on the given path, in order to access the __path__
attribute to find submodules.
'onerror' is a function which gets called with one argument (the
name of the package which was being imported) if any exception
occurs while trying to import a package. If no onerror function is
supplied, ImportErrors are caught and ignored, while all other
exceptions are propagated, terminating the search.
Examples:
# list all modules python can access
walk_packages()
# list all submodules of ctypes
walk_packages(ctypes.__path__, ctypes.__name__+'.')
"""
def seen(p, m={}):
if p in m:
return True
m[p] = True
for importer, name, ispkg in iter_modules(path, prefix):
yield importer, name, ispkg
if ispkg:
try:
__import__(name)
except ImportError:
if onerror is not None:
onerror(name)
except Exception:
if onerror is not None:
onerror(name)
else:
raise
else:
path = getattr(sys.modules[name], '__path__', None) or []
# don't traverse path items we've seen before
path = [p for p in path if not seen(p)]
for item in walk_packages(path, name+'.', onerror):
yield item
def iter_modules(
|
path=None, prefix=''):
"""Yields (module_loader, name, ispkg) for all submodules on path,
or, if path is None, all top-level modules on sys.path.
'path' should be either None or a list of paths to look for
modules in.
'prefix' is a string to output on the front of every module name
on output.
"""
if path is None:
importers = iter_importers()
else:
importers = map(get_importer, path)
|
yielded = {}
for i in importers:
for name, ispkg in iter_importer_modules(i, prefix):
if name not in yielded:
yielded[name] = 1
yield i, name, ispkg
#@simplegeneric
def iter_importer_modules(importer, prefix=''):
if not hasattr(importer, 'iter_modules'):
return []
return importer.iter_modules(prefix)
iter_importer_modules = simplegeneric(iter_importer_modules)
class ImpImporter:
"""PEP 302 Importer that wraps Python's "classic" import algorithm
ImpImporter(dirname) produces a PEP 302 importer that searches that
directory. ImpImporter(None) produces a PEP 302 importer that searches
the current sys.path, plus any modules that are frozen or built-in.
Note that ImpImporter does not currently support being used by placement
on sys.meta_path.
"""
def __init__(self, path=None):
self.path = path
def find_module(self, fullname, path=None):
# Note: we ignore 'path' argument since it is only used via meta_path
subname = fullname.split(".")[-1]
if subname != fullname and self.path is None:
return None
if self.path is None:
path = None
else:
path = [os.path.realpath(self.path)]
try:
file, filename, etc = imp.find_module(subname, path)
except ImportError:
return None
return ImpLoader(fullname, file, filename, etc)
def iter_modules(self, prefix=''):
if self.path is None or not os.path.isdir(self.path):
return
yielded = {}
import inspect
try:
filenames = os.listdir(self.path)
except OSError:
# ignore unreadable directories like import does
filenames = []
filenames.sort() # handle packages before same-named modules
for fn in filenames:
modname = inspect.getmodulename(fn)
if modname=='__init__' or modname in yielded:
continue
path = os.path.join(self.path, fn)
ispkg = False
if not modname and os.path.isdir(path) and '.' not in fn:
modname = fn
try:
dircontents = os.listdir(path)
except OSError:
# ignore unreadable directories like import does
dircontents = []
for fn in dircontents:
subname = inspect.getmodulename(fn)
if subname=='__init__':
ispkg = True
break
else:
continue # not a package
if modname and '.' not in modname:
yielded[modname] = 1
yield prefix + modname, ispkg
class ImpLoader:
"""PEP 302 Loader that wraps Python's "classic" import algorithm
"""
code = source = None
def __init__(self, fullname, file, filename, etc):
self.file = file
self.filename = filename
self.fullname = fullname
self.etc = etc
def load_module(self, fullname):
self._reopen()
try:
mod = imp.load_module(fullname, self.file, self.filename, self.etc)
finally:
if self.file:
self.file.close()
# Note: we don't set __loader__ because we want the module to look
# normal; i.e. this is just a wrapper for standard import machinery
return mod
@staticmethod
def get_data(pathname):
return open(pathname, "rb").read()
def _reopen(self):
if self.file and self.file.closed:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
self.file = open(self.filename, 'rU')
elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
self.file = open(self.filename, 'rb')
def _fix_name(self, fullname):
if fullname is
|
Huyuwei/tvm
|
nnvm/tests/python/frontend/darknet/test_forward.py
|
Python
|
apache-2.0
| 18,555
| 0.00388
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Compile Darknet Models
=====================
This article is a test script to test darknet models with NNVM.
All the required models and libraries will be downloaded from the internet
by the script.
"""
import numpy as np
import tvm
from tvm.contrib import graph_runtime
from tvm.contrib.download import download_testdata
download_testdata.__test__ = False
from nnvm import frontend
from tvm.relay.testing.darknet import LAYERTYPE
from tvm.relay.testing.darknet import __darknetffi__
import nnvm.compiler
DARKNET_LIB = 'libdarknet2.0.so'
DARKNETLIB_URL = 'https://github.com/siju-samuel/darknet/blob/master/lib/' \
+ DARKNET_LIB + '?raw=true'
LIB = __darknetffi__.dlopen(download_testdata(DARKNETLIB_URL, DARKNET_LIB, module='darknet'))
DARKNET_TEST_IMAGE_NAME = 'dog.jpg'
DARKNET_TEST_IMAGE_URL = 'https://github.com/siju-samuel/darknet/blob/master/data/' + DARKNET_TEST_IMAGE_NAME +'?raw=true'
DARKNET_TEST_IMAGE_PATH = download_testdata(DARKNET_TEST_IMAGE_URL, DARKNET_TEST_IMAGE_NAME, module='data')
def _read_memory_buffer(shape, data, dtype='float32'):
length = 1
for x in shape:
length *= x
data_np = np.zeros(length, dty
|
pe=dtype)
for i in range(length):
data_np[i] = data[i]
return data_np.reshape(shape)
def _get_tvm_output(net, data, build_dtype='float32'):
'''Compute TVM output'''
dtype = 'float32'
sym, params = frontend.darknet.from_d
|
arknet(net, dtype)
target = 'llvm'
shape_dict = {'data': data.shape}
graph, library, params = nnvm.compiler.build(sym, target, shape_dict,
build_dtype, params=params)
# Execute on TVM
ctx = tvm.cpu(0)
m = graph_runtime.create(graph, library, ctx)
# set inputs
m.set_input('data', tvm.nd.array(data.astype(dtype)))
m.set_input(**params)
m.run()
# get outputs
tvm_out = []
for i in range(m.get_num_outputs()):
tvm_out.append(m.get_output(i).asnumpy())
return tvm_out
def _load_net(cfg_url, cfg_name, weights_url, weights_name):
cfg_path = download_testdata(cfg_url, cfg_name, module='darknet')
weights_path = download_testdata(weights_url, weights_name, module='darknet')
net = LIB.load_network(cfg_path.encode('utf-8'), weights_path.encode('utf-8'), 0)
return net
def verify_darknet_frontend(net, build_dtype='float32'):
'''Test network with given input image on both darknet and tvm'''
def get_darknet_output(net, img):
LIB.network_predict_image(net, img)
out = []
for i in range(net.n):
layer = net.layers[i]
if layer.type == LAYERTYPE.REGION:
attributes = np.array([layer.n, layer.out_c, layer.out_h,
layer.out_w, layer.classes,
layer.coords, layer.background],
dtype=np.int32)
out.insert(0, attributes)
out.insert(0, _read_memory_buffer((layer.n*2, ), layer.biases))
layer_outshape = (layer.batch, layer.out_c,
layer.out_h, layer.out_w)
out.insert(0, _read_memory_buffer(layer_outshape, layer.output))
elif layer.type == LAYERTYPE.YOLO:
attributes = np.array([layer.n, layer.out_c, layer.out_h,
layer.out_w, layer.classes,
layer.total],
dtype=np.int32)
out.insert(0, attributes)
out.insert(0, _read_memory_buffer((layer.total*2, ), layer.biases))
out.insert(0, _read_memory_buffer((layer.n, ), layer.mask, dtype='int32'))
layer_outshape = (layer.batch, layer.out_c,
layer.out_h, layer.out_w)
out.insert(0, _read_memory_buffer(layer_outshape, layer.output))
elif i == net.n-1:
if layer.type == LAYERTYPE.CONNECTED:
darknet_outshape = (layer.batch, layer.out_c)
elif layer.type in [LAYERTYPE.SOFTMAX]:
darknet_outshape = (layer.batch, layer.outputs)
else:
darknet_outshape = (layer.batch, layer.out_c,
layer.out_h, layer.out_w)
out.insert(0, _read_memory_buffer(darknet_outshape, layer.output))
return out
dtype = 'float32'
img = LIB.letterbox_image(LIB.load_image_color(DARKNET_TEST_IMAGE_PATH.encode('utf-8'), 0, 0), net.w, net.h)
darknet_output = get_darknet_output(net, img)
batch_size = 1
data = np.empty([batch_size, img.c, img.h, img.w], dtype)
i = 0
for c in range(img.c):
for h in range(img.h):
for k in range(img.w):
data[0][c][h][k] = img.data[i]
i = i + 1
tvm_out = _get_tvm_output(net, data, build_dtype)
for tvm_outs, darknet_out in zip(tvm_out, darknet_output):
tvm.testing.assert_allclose(darknet_out, tvm_outs, rtol=1e-3, atol=1e-3)
def verify_rnn_forward(net):
'''Test network with given input data on both darknet and tvm'''
def get_darknet_network_predict(net, data):
return LIB.network_predict(net, data)
from cffi import FFI
ffi = FFI()
np_arr = np.zeros([1, net.inputs], dtype='float32')
np_arr[0, 84] = 1
cffi_arr = ffi.cast('float*', np_arr.ctypes.data)
tvm_out = _get_tvm_output(net, np_arr)[0]
darknet_output = get_darknet_network_predict(net, cffi_arr)
darknet_out = np.zeros(net.outputs, dtype='float32')
for i in range(net.outputs):
darknet_out[i] = darknet_output[i]
last_layer = net.layers[net.n-1]
darknet_outshape = (last_layer.batch, last_layer.outputs)
darknet_out = darknet_out.reshape(darknet_outshape)
tvm.testing.assert_allclose(darknet_out, tvm_out, rtol=1e-4, atol=1e-4)
def test_forward_extraction():
'''test extraction model'''
model_name = 'extraction'
cfg_name = model_name + '.cfg'
weights_name = model_name + '.weights'
cfg_url = 'https://github.com/pjreddie/darknet/blob/master/cfg/' + cfg_name + '?raw=true'
weights_url = 'http://pjreddie.com/media/files/' + weights_name + '?raw=true'
net = _load_net(cfg_url, cfg_name, weights_url, weights_name)
verify_darknet_frontend(net)
LIB.free_network(net)
def test_forward_alexnet():
'''test alexnet model'''
model_name = 'alexnet'
cfg_name = model_name + '.cfg'
weights_name = model_name + '.weights'
cfg_url = 'https://github.com/pjreddie/darknet/blob/master/cfg/' + cfg_name + '?raw=true'
weights_url = 'http://pjreddie.com/media/files/' + weights_name + '?raw=true'
net = _load_net(cfg_url, cfg_name, weights_url, weights_name)
verify_darknet_frontend(net)
LIB.free_network(net)
def test_forward_resnet50():
'''test resnet50 model'''
model_name = 'resnet50'
cfg_name = model_name + '.cfg'
weights_name = model_name + '.weights'
cfg_url = 'https://github.com/pjreddie/darknet/blob/master/cfg/' + cfg_name + '?raw=true'
weights_url = 'http://pjreddie.com/media/files/' + weights_name + '?raw=true'
net = _load_net(cfg_url, cfg_name, weights_url, weights_name)
verify_darknet_frontend(net)
LIB.free_network(net)
|
waneric/PyMapLib
|
src/gabbs/layers/Layer.py
|
Python
|
mit
| 2,213
| 0.001808
|
# -*- coding: utf-8 -*-
"""
Layer.py - base layer for gabbs maps
======================================================================
AUTHOR: Wei Wan, Purdue University
EMAIL: [email protected]
Copyright (c) 2016 Purdue University
See the file "license.terms" for information on usage and
redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES.
======================================================================
"""
from os.path import isfile
from PyQt4.QtGui import QAction, QIcon
from qgis.gui import *
from gabbs.layers.LayerProperty import *
from gabbs.MapUtils import iface, debug_trace
import math
class Layer(object):
"""Base class for layers"""
layerName = None
"""Layer type name in menu"""
layerIcon = None
"""Group icon in menu"""
layerTypeName = None
"""Layer type identificator used to store in project"""
layerTypeId = None
"""Numerical ID used in versions < 2.3"""
layerId = None
"""Store 2 qgis objects"""
layer = None
layerAction = None
layerAttribution = None
def __init__(self):
object.__init__(self)
def getLayer(self):
return self.layer
def getLayerId(self):
return self.layerId
def setAddLayerCallback(self, addLayerCallback):
"""Set post processing in add layer method in canvas class
"""
self.addLayerCallback = addLayerCallback
def loadStyleFile(self, symPath):
if isfile(symPath):
res = self.layer.loadNamedStyle(symPath)
if res[1]:
return True
else:
return False
else:
return False
def getScale(self, zoomlevel):
dpi = iface.mainWindow.physicalDpiX()
inchesPerMeter = 39.37
maxScalePerPixel = 156543.04
try:
zoomlevel
|
= int(zoomlevel)
scale = (dpi * inchesPerMeter * maxScalePerPixel) / (math.pow(2, zoomlevel))
|
scale = int(scale)
return scale
except TypeError:
raise
#pass
except Exception as e:
raise e
|
JonSteinn/Kattis-Solutions
|
src/Slatkisi/Python 3/main.py
|
Python
|
gpl-3.0
| 71
| 0.014085
|
cost, zeros = map(int, input().split())
print
|
(int(rou
|
nd(cost, -zeros)))
|
VTabolin/networking-vsphere
|
networking_vsphere/common/vmware_conf.py
|
Python
|
apache-2.0
| 3,247
| 0.002772
|
# Copyright 2015 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
|
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from networking_vsphere._i18n import _
from neutron
|
.agent.common import config
DEFAULT_BRIDGE_MAPPINGS = []
DEFAULT_UPLINK_MAPPINGS = []
DEFAULT_VLAN_RANGES = []
DEFAULT_TUNNEL_RANGES = []
DEFAULT_TUNNEL_TYPES = []
agent_opts = [
cfg.IntOpt('polling_interval', default=2,
help=_("The number of seconds the agent will wait between "
"polling for local device changes.")),
cfg.IntOpt('quitting_rpc_timeout', default=10,
help=_("Set new timeout in seconds for new rpc calls after "
"agent receives SIGTERM. If value is set to 0, rpc "
"timeout won't be changed")),
cfg.BoolOpt('log_agent_heartbeats', default=False,
help=_("Log agent heartbeats")),
cfg.IntOpt('report_interval',
default=30,
help='Seconds between nodes reporting state to server.'),
]
vmware_opts = [
cfg.FloatOpt(
'task_poll_interval',
default=2,
help=_('The interval of task polling in seconds.')),
cfg.IntOpt(
'api_retry_count',
default=10,
help=_('number of times an API must be retried upon '
'session/connection related errors')),
cfg.IntOpt(
'connections_pool_size',
default=100,
help=_('number of vsphere connections pool '
'must be higher for intensive operations')),
cfg.StrOpt('vsphere_login', default='administrator',
help=_("Vsphere login.")),
cfg.ListOpt('network_maps',
default=DEFAULT_BRIDGE_MAPPINGS,
help=_("List of <physical_network>:<bridge>.")),
cfg.ListOpt('uplink_maps',
default=DEFAULT_UPLINK_MAPPINGS,
help=_("List of <physical_network>:<active uplinks>:"
"<failover uplinks>."
"Use semicolon between uplink names")),
cfg.StrOpt('vsphere_hostname', default='vsphere',
help=_("Vsphere host name or IP.")),
cfg.StrOpt('vsphere_password', default='',
help=_("Vsphere password.")),
]
dvs_opts = [
cfg.BoolOpt('clean_on_restart',
default=True,
help=_("Run DVS cleaning procedure on agent restart.")),
cfg.BoolOpt('precreate_networks',
default=False,
help=_("Precreate networks on DVS")),
]
cfg.CONF.register_opts(dvs_opts, "DVS")
cfg.CONF.register_opts(agent_opts, "DVS_AGENT")
cfg.CONF.register_opts(vmware_opts, "ML2_VMWARE")
config.register_agent_state_opts_helper(cfg.CONF)
CONF = cfg.CONF
|
doctormo/python-crontab
|
tests/test_usage.py
|
Python
|
lgpl-3.0
| 7,441
| 0.000672
|
#!/usr/bin/env python
#
# Copyright (C) 2012 Jay Sigbrandt <[email protected]>
# Martin Owens <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
#
"""
Test crontab usage.
"""
import os
import sys
import unittest
import crontab
from datetime import date, time, datetime, timedelta
try:
from test import test_support
except ImportError:
from test import support as test_support
crontab.LOG.setLevel(crontab.logging.ERROR)
TEST_DIR = os.path.dirname(__file__)
class DummyStdout(object):
def write(self, text):
pass
BASIC = '@hourly firstcommand\n\n'
USER = '\n*/4 * * * * user_command # user_comment\n\n\n'
crontab.CRONCMD = "%s %s" % (sys.executable, os.path.join(TEST_DIR, 'data', 'crontest'))
def flush():
pass
class Attribute(object):
def __init__(self, obj, attr, value):
self.obj = obj
self.attr = attr
self.value = value
def __enter__(self, *args, **kw):
if hasattr(self.obj, self.attr):
self.previous = getattr(self.obj, self.attr)
setattr(self.obj, self.attr, self.value)
def __exit__(self, *args, **kw):
if hasattr(self, 'previous'):
setattr(self.obj, self.attr, self.previous)
else:
delattr(self.obj, self.attr)
class UseTestCase(unittest.TestCase):
"""Test use documentation in crontab."""
def setUp(self):
self.filename
|
s = []
def test_01_empty(self):
"""Open system crontab"""
cron = crontab.CronTab()
self.assertEqual(cron.render(), "")
self.assertEqual(cron.__unicode__(), "")
self.assertEqual(repr(cron), "<Unattached CronTab>")
def test_02_user(self):
"""Open a user's crontab"""
cron = crontab.CronTa
|
b(user='basic')
self.assertEqual(cron.render(), BASIC)
self.assertEqual(repr(cron), "<User CronTab 'basic'>")
def test_03_usage(self):
"""Dont modify crontab"""
cron = crontab.CronTab(tab='')
sys.stdout = DummyStdout()
sys.stdout.flush = flush
try:
exec(crontab.__doc__)
except ImportError:
pass
sys.stdout = sys.__stdout__
self.assertEqual(cron.render(), '')
def test_04_username(self):
"""Username is True"""
cron = crontab.CronTab(user=True)
self.assertNotEqual(cron.user, True)
self.assertEqual(cron.render(), USER)
self.assertEqual(repr(cron), "<My CronTab>")
def test_05_nouser(self):
"""Username doesn't exist"""
cron = crontab.CronTab(user='nouser')
self.assertEqual(cron.render(), '')
def test_06_touser(self):
"""Write to use API"""
cron = crontab.CronTab(tab=USER)
self.assertEqual(repr(cron), "<Unattached CronTab>")
cron.write_to_user('bob')
filename = os.path.join(TEST_DIR, 'data', 'spool', 'bob')
self.filenames.append(filename)
self.assertTrue(os.path.exists(filename))
self.assertEqual(repr(cron), "<User CronTab 'bob'>")
def test_07_ioerror_read(self):
"""No filename ioerror"""
with self.assertRaises(IOError):
cron = crontab.CronTab(user='error')
cron.read()
def test_07_ioerror_write(self):
"""User not specified, nowhere to write to"""
cron = crontab.CronTab()
with self.assertRaises(IOError):
cron.write()
def test_08_cronitem(self):
"""CronItem Standalone"""
item = crontab.CronItem(line='noline')
self.assertTrue(item.is_enabled())
with self.assertRaises(UnboundLocalError):
item.delete()
item.command = str('nothing')
self.assertEqual(item.render(), '* * * * * nothing')
def test_10_time_object(self):
"""Set slices using time object"""
item = crontab.CronItem(command='cmd')
self.assertEqual(str(item.slices), '* * * * *')
item.setall(time(1, 2))
self.assertEqual(str(item.slices), '2 1 * * *')
self.assertTrue(item.is_valid())
item.setall(time(0, 30, 0, 0))
self.assertEqual(str(item.slices), '30 0 * * *')
self.assertTrue(item.is_valid())
self.assertEqual(str(item), '30 0 * * * cmd')
def test_11_date_object(self):
"""Set slices using date object"""
item = crontab.CronItem(command='cmd')
self.assertEqual(str(item.slices), '* * * * *')
item.setall(date(2010, 6, 7))
self.assertEqual(str(item.slices), '0 0 7 6 *')
self.assertTrue(item.is_valid())
def test_12_datetime_object(self):
"""Set slices using datetime object"""
item = crontab.CronItem(command='cmd')
self.assertEqual(str(item.slices), '* * * * *')
item.setall(datetime(2009, 8, 9, 3, 4))
self.assertTrue(item.is_valid())
self.assertEqual(str(item.slices), '4 3 9 8 *')
def test_20_slice_validation(self):
"""CronSlices class and objects can validate"""
CronSlices = crontab.CronSlices
self.assertTrue(CronSlices('* * * * *').is_valid())
self.assertTrue(CronSlices.is_valid('* * * * *'))
self.assertTrue(CronSlices.is_valid('*/2 * * * *'))
self.assertTrue(CronSlices.is_valid('* 1,2 * * *'))
self.assertTrue(CronSlices.is_valid('* * 1-5 * *'))
self.assertTrue(CronSlices.is_valid('* * * * MON-WED'))
self.assertTrue(CronSlices.is_valid('@reboot'))
sliced = CronSlices('* * * * *')
sliced[0].parts = [300]
self.assertEqual(str(sliced), '300 * * * *')
self.assertFalse(sliced.is_valid())
self.assertFalse(CronSlices.is_valid('P'))
self.assertFalse(CronSlices.is_valid('*/61 * * * *'))
self.assertFalse(CronSlices.is_valid('* 1,300 * * *'))
self.assertFalse(CronSlices.is_valid('* * 50-1 * *'))
self.assertFalse(CronSlices.is_valid('* * * * FRO-TOO'))
self.assertFalse(CronSlices.is_valid('@retool'))
def test_25_open_pipe(self):
"""Test opening pipes"""
from crontab import open_pipe, CRONCMD
pipe = open_pipe(CRONCMD, h=None, a='one', abc='two')
(out, err) = pipe.communicate()
self.assertEqual(err, b'')
self.assertEqual(out, b'--abc=two|-a|-h|one\n')
def test_07_zero_padding(self):
"""Can we get zero padded output"""
cron = crontab.CronTab(tab="02 3-5 2,4 */2 01 cmd")
self.assertEqual(str(cron), '2 3-5 2,4 */2 1 cmd\n')
with Attribute(crontab, 'ZERO_PAD', True):
self.assertEqual(str(cron), '02 03-05 02,04 */2 01 cmd\n')
def tearDown(self):
for filename in self.filenames:
if os.path.exists(filename):
os.unlink(filename)
if __name__ == '__main__':
test_support.run_unittest(
UseTestCase,
)
|
friendly-of-python/flask-online-store
|
flask_online_store/forms/admin/category.py
|
Python
|
mit
| 461
| 0
|
from .. import BaseForm
from wtforms import StringField,
|
TextAreaField
from wtforms.validators import DataRequired
class CategoryForm(BaseForm):
name = StringField('name',
validators=[
DataRequired()
])
description = TextAreaField('description',
validators=[
DataRequi
|
red()
])
|
cvandeplas/plaso
|
plaso/parsers/plist.py
|
Python
|
apache-2.0
| 5,390
| 0.004824
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file contains the Property List (Plist) Parser.
Plaso's engine calls PlistParser when it encounters Plist files to be processed.
"""
import binascii
import logging
from binplist import binplist
from plaso.lib import errors
from plaso.lib import utils
from plaso.parsers import interface
from plaso.parsers import manager
class PlistParser(interface.BasePluginsParser):
"""De-serializes and parses plists the event objects are generated by plist.
The Plaso engine calls parsers by their Parse() method. This parser's
Parse() has GetTopLevel() which deserializes plist files using the binplist
library and calls plugins (PlistPlugin) registered through the
interface by their Process() to produce event objects.
Plugins are how this parser understands the content inside a plist file,
each plugin holds logic specific to a particular plist file. See the
interface and plist_plugins/ directory for examples of how plist plugins are
implemented.
"""
NAME = 'plist'
DESCRIPTION = u'Parser for binary and text plist files.'
_plugin_classes = {}
def __init__(self):
"""Initializes a parser object."""
super(PlistParser, self).__init__()
self._plugins = PlistParser.GetPluginObjects()
def GetTopLevel(self, file_object, file_name=''):
"""Returns the deserialized content of a plist as a dictionary object.
Args:
file_object: A file-like object to parse.
file_name: The name of the file-like object.
Returns:
A dictionary object representing the contents of the plist.
"""
try:
top_level_object = binplist.readPlist(file_object)
except binplist.FormatError as exception:
raise errors.UnableToParseFile(
u'[{0:s}] File is not a plist file: {1:s}'.format(
self.NAME, utils.GetUnicodeString(exception)))
except (
LookupError, binascii.Error, ValueError, AttributeError) as exception:
raise errors.UnableToParseFile(
u'[{0:s}] Unable to parse XML file, reason: {1:s}'.format(
self.NAME, exception))
except OverflowError as exception:
raise errors.UnableToParseFile(
u'[{0:s}] Unable to parse: {1:s} with error: {2:s}'.format(
self.NAME, file_name, exception))
if not top_level_object:
raise errors.UnableToParseFile(
u'[{0:s}] File is not a plist: {1:s}'.format(
self.NAME, utils.GetUnicodeString(file_name)))
# Since we are using readPlist from binplist now instead of manually
# opening up the BinarPlist file we loose this option. Keep it commented
# out for now but this needs to be
|
tested a bit more.
# TODO: Re-evaluate if we can delete this or still require it.
#if bpl.is_corrupt:
# logging.warning(
# u'[{0:s}] corruption detected in binary plist: {1:s}'.format(
|
# self.NAME, file_name))
return top_level_object
def Parse(self, parser_context, file_entry):
"""Parse and extract values from a plist file.
Args:
parser_context: A parser context object (instance of ParserContext).
file_entry: A file entry object (instance of dfvfs.FileEntry).
"""
# TODO: Should we rather query the stats object to get the size here?
file_object = file_entry.GetFileObject()
file_size = file_object.get_size()
if file_size <= 0:
file_object.close()
raise errors.UnableToParseFile(
u'[{0:s}] file size: {1:d} bytes is less equal 0.'.format(
self.NAME, file_size))
# 50MB is 10x larger than any plist seen to date.
if file_size > 50000000:
file_object.close()
raise errors.UnableToParseFile(
u'[{0:s}] file size: {1:d} bytes is larger than 50 MB.'.format(
self.NAME, file_size))
top_level_object = None
try:
top_level_object = self.GetTopLevel(file_object, file_entry.name)
except errors.UnableToParseFile:
file_object.close()
raise
if not top_level_object:
file_object.close()
raise errors.UnableToParseFile(
u'[{0:s}] unable to parse: {1:s} skipping.'.format(
self.NAME, file_entry.name))
file_system = file_entry.GetFileSystem()
plist_name = file_system.BasenamePath(file_entry.name)
for plugin_object in self._plugins:
try:
plugin_object.Process(
parser_context, plist_name=plist_name, top_level=top_level_object)
except errors.WrongPlistPlugin as exception:
logging.debug(u'[{0:s}] Wrong plugin: {1:s} for: {2:s}'.format(
self.NAME, exception[0], exception[1]))
file_object.close()
manager.ParsersManager.RegisterParser(PlistParser)
|
pyblub/pyload
|
pyload/utils/purge.py
|
Python
|
agpl-3.0
| 1,743
| 0
|
# -*- coding: utf-8 -*-
# @author: vuolter
from __future__ import absolute_import, unicode_literals
import os
import re
import sys
from future import standard_library
standard_library.install_aliases()
def char(text, chars, repl=''):
return re.sub(r'[{0}]+'.format(chars), repl, text)
_UNIXBADCHARS = ('\0', '/', '\\')
_MACBADCHARS = _UNIXBADCHARS + (':',)
_WINBADCHARS = _MACBADCHARS + ('<', '>', '"', '|', '?', '*')
_WINBADWORDS = (
'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9',
'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9',
'con', 'prn')
def name(text, sep='_', allow_whitespaces=False):
"""Remove invalid characters."""
if os.name == 'nt':
bc = _WINBADCHARS
elif sys.platform == 'darwin':
bc = _MACBADCHARS
else:
bc = _UNIXBADCHARS
repl = r''.join(bc)
if not allow_whitespaces:
repl += ' '
res = char(text, repl, sep).strip()
if os.name == 'nt' and res.lower() in _WINBADWORDS:
res = sep + res
return res
def patt
|
ern(text, rules):
for rule in rules:
try:
pattr, repl, flags = rule
except ValueError:
pattr, repl = rule
flags = 0
text = re.sub(pattr, repl, text, flags)
return text
def truncate(text, offset):
maxtrunc = len(text) // 2
if offset > maxtrunc:
raise ValueError('String too short to tr
|
uncate')
trunc = (len(text) - offset) // 3
return '{0}~{1}'.format(text[:trunc * 2], text[-trunc:])
def uniquify(seq):
"""Remove duplicates from list preserving order."""
seen = set()
seen_add = seen.add
return type(seq)(x for x in seq if x not in seen and not seen_add(x))
|
eukaryote/dotfiles
|
sublime3/.config/sublime-text-3/Packages/SublimeREPL/repls/killableprocess/__init__.py
|
Python
|
mit
| 118
| 0.016949
|
from .killabl
|
eprocess import Popen, mswindows
if mswindows:
from .winprocess import START
|
UPINFO, STARTF_USESHOWWINDOW
|
arnomoonens/DeepRL
|
yarll/scripts/run_model.py
|
Python
|
mit
| 2,858
| 0.004899
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
import os
import argparse
import tensorflow as tf
from gym import wrappers
from yarll.environment.registration import make
class ModelRunner(object):
"""
Run an already learned model.
Currently only supports one variation of an environment.
"""
def __init__(self, env, model_directory: str, save_directory: str, **usercfg) -> None:
super(ModelRunner, self).__init__()
self.env = env
self.model_directory = model_directory
self.save_directory = save_directory
self.config = dict(
episode_max_length=self.env.spec.tags.get('wrapper_config.TimeLimit.max_episode_steps'),
repeat_n_actions=1
)
self.config.update(usercfg)
self.session = tf.Session()
self.saver = tf.train.import_meta_graph(os.path.join(self.model_directory, "model.meta"))
self.saver.restore(self.session, os.path.join(self.model_directory, "model"))
self.action = tf.get_collection("action")[0]
self.states = tf.get_collection("states")[0]
def choose_action(self, state):
"""Choose an action."""
return self.session.run([self.action], feed_dict={self.states: [state]})[0]
def get_trajectory(self, render: bool = False):
"""
Run agent-environment loop for one whole episode (trajectory)
Re
|
turn dictionary of results
"""
state = self.env.reset()
for _ in range(self.config["episode_max_length"]):
action = self.choose_action(state)
for _ in range(self.config["repeat_n_actions"]):
_, _, done, _ = self.env.step(action)
if done: # Don't continue if episode has already ended
break
if done:
break
if render:
self.env.render()
return
|
def run(self):
for _ in range(self.config["n_iter"]):
self.get_trajectory()
parser = argparse.ArgumentParser()
parser.add_argument("environment", metavar="env", type=str, help="Gym environment to execute the model on.")
parser.add_argument("model_directory", type=str, help="Directory from where model files are loaded.")
parser.add_argument("save_directory", type=str, help="Directory where results of running the model are saved")
parser.add_argument("--iterations", default=100, type=int, help="Number of iterations to run the algorithm.")
def main():
args = parser.parse_args()
env = make(args.environment)
runner = ModelRunner(env, args.model_directory, args.save_directory, n_iter=args.iterations)
try:
runner.env = wrappers.Monitor(runner.env, args.save_directory, video_callable=False, force=True)
runner.run()
except KeyboardInterrupt:
pass
if __name__ == "__main__":
main()
|
google/jax
|
tests/linalg_test.py
|
Python
|
apache-2.0
| 64,375
| 0.007518
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the LAPAX linear algebra module."""
from functools import partial
import unittest
import numpy as np
import scipy
import scipy as osp
from absl.testing import absltest
from absl.testing import parameterized
import jax
from jax import jit, grad, jvp, vmap
from jax import lax
from jax import numpy as jnp
from jax import scipy as jsp
from jax._src import test_util as jtu
from jax.config import config
config.parse_flags_with_absl()
FLAGS = config.FLAGS
T = lambda x: np.swapaxes(x, -1, -2)
float_types = jtu.dtypes.floating
complex_types = jtu.dtypes.complex
class NumpyLinalgTest(jtu.JaxTestCase):
def testNotImplemented(self):
for name in jnp.linalg._NOT_IMPLEMENTED:
func = getattr(jnp.linalg, name)
with self.assertRaises(NotImplementedError):
func()
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_shape={}".format(jtu.format_shape_dtype_string(shape, dtype)),
"shape": shape, "dtype": dtype}
for shape in [(1, 1), (4, 4), (2, 5, 5), (200, 200), (1000, 0, 0)]
for dtype in float_types + complex_types))
def testCholesky(self, shape, dtype):
rng = jtu.rand_default(self.rng())
def args_maker():
factor_shape = shape[:-1] + (2 * shape[-1],)
a = rng(factor_shape, dtype)
return [np.matmul(a, jnp.conj(T(a)))]
self._CheckAgainstNumpy(np.linalg.cholesky, jnp.linalg.cholesky, args_maker,
tol=1e-3)
self._CompileAndCheck(jnp.linalg.cholesky, args_maker)
if jnp.finfo(dtype).bits == 64:
jtu.check_grads(jnp.linalg.cholesky, args_maker(), order=2)
def testCholeskyGradPrecision(self):
rng = jtu.rand_default(self.rng())
a = rng((3, 3), np.float32)
a = np.dot(a, a.T)
jtu.assert_dot_precision(
lax.Precision.HIGHEST, partial(jvp, jnp.linalg.cholesky), (a,), (a,))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_n={}".format(jtu.format_shape_dtype_string((n,n), dtype)),
"n": n, "dtype": dtype}
for n in [0, 2, 3, 4, 5, 25] # TODO(mattjj): complex64 unstable on large sizes?
for dtype in float_types + complex_types))
def testDet(self, n, dtype):
rng = jtu.rand_default(self.rng())
args_maker = lambda: [rng((n, n), dtype)]
self._CheckAgainstNumpy(np.linalg.det, jnp.linalg.det, args_maker, tol=1e-3)
self._CompileAndCheck(jnp.linalg.det, args_maker,
rtol={np.float64: 1e-13, np.complex128: 1e-13})
def testDetOfSingularMatrix(self):
x = jnp.array([[-1., 3./2], [2./3, -1.]], dtype=np.float32)
self.assertAllClose(np.float32(0), jsp.linalg.det(x))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_shape={}".format(jtu.format_shape_dtype_string(shape, dtype)),
"shape": shape, "dtype": dtype}
for shape in [(1, 1), (3, 3), (2, 4, 4)]
for dtype in float_types))
@jtu.skip_on_devices("tpu")
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def testDetGrad(self, shape, dtype):
rng = jtu.rand_default(self.rng())
a = rng(shape, dtype)
jtu.check_grads(jnp.linalg.det, (a,), 2, atol=1e-1, rtol=1e-1)
# make sure there are no NaNs when a matrix is zero
if len(shape) == 2:
pass
jtu.check_grads(
jnp.linalg.det, (jnp.zeros_like(a),), 1, atol=1e-1, rtol=1e-1)
else:
a[0] = 0
jtu.check_grads(jnp.linalg.det, (a,), 1, atol=1e-1, rtol=1e-1)
def testDetGradIssue6121(self):
f = lambda x: jnp.linalg.det(x).sum()
x = jnp.ones((16, 1, 1))
jax.grad(f)(x)
jtu.check_grads(f, (x,), 2, atol=1e-1, rtol=1e-1)
def testDetGradOfSingularMatrixCorank1(self):
# Rank 2 matrix with nonzero gradient
a = jnp.array([[ 50, -30, 45],
[-30, 90, -81],
[ 45, -81, 81]], dtype=jnp.float32)
jtu.check_grads(jnp.linalg.det, (a,), 1, atol=1e-1, rtol=1e-1)
def testDetGradOfSingularMatrixCorank2(self):
# Rank 1 matrix with zero gradient
b = jnp.array([[ 36, -42, 18],
[-42, 49, -21],
[ 18, -21, 9]], dtype=jnp.float32)
jtu.check_grads(jnp.linalg.det, (b,), 1, atol=1e-1, rtol=1e-1, eps=1e-1)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_m={}_n={}_q={}".format(
jtu.format_shape_dtype_string((m,), dtype),
jtu.format_shape_dtype_string((nq[0],), dtype),
jtu.format_shape_dtype_string(nq[1], dtype)),
"m": m, "nq": nq, "dtype": dtype}
for m in [1, 5, 7, 23]
for nq in zip([2, 4, 6, 36], [(1, 2), (2, 2), (1, 2, 3), (3, 3, 1, 4)])
for dtype in float_types))
def testTensorsolve(self, m, nq, dtype):
rng = jtu.rand_default(self.rng())
# According to numpy docs the shapes are as follows:
# Coefficient tensor (a), of shape b.shape + Q.
# And prod(Q) == prod(b.shape)
# Therefore, n = prod(q)
n, q = nq
b_shape = (n, m)
# To accomplish prod(Q) == prod(b.shape) we append the m extra dim
# to Q shape
Q = q + (m,)
args_maker = lambda: [
rng(b_shape + Q, dtype), # = a
rng(b_shape, dtype)] # = b
a, b = args_maker()
result = jnp.linalg.tensorsolve(*args_maker())
self.assertEqual(result.shape, Q)
self._CheckAgainstNumpy(np.linalg.tensorsolve,
jnp.linalg.tensorsolve, args_maker,
tol={np.float32: 1e-2, np.float64: 1e-3})
self._CompileAndCheck(jnp.linalg.tensorsolve,
args_maker,
rtol={np.float64: 1e-13})
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_shape={}".format(jtu.format_shape_dtype_string(shape, dtype)),
"shape": shape, "dtype": dtype}
for shape in [(0, 0), (1, 1), (3, 3), (4, 4), (10, 10), (200, 200),
(2, 2, 2), (2, 3, 3), (3, 2, 2)]
|
for dtype in float_types + complex_types))
def testSlogdet(self, shape, dtype):
rng = jtu.rand_default(self.rng())
|
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(np.linalg.slogdet, jnp.linalg.slogdet, args_maker,
tol=1e-3)
self._CompileAndCheck(jnp.linalg.slogdet, args_maker)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_shape={}".format(jtu.format_shape_dtype_string(shape, dtype)),
"shape": shape, "dtype": dtype}
for shape in [(1, 1), (4, 4), (5, 5), (2, 7, 7)]
for dtype in float_types + complex_types))
@jtu.skip_on_devices("tpu")
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def testSlogdetGrad(self, shape, dtype):
rng = jtu.rand_default(self.rng())
a = rng(shape, dtype)
jtu.check_grads(jnp.linalg.slogdet, (a,), 2, atol=1e-1, rtol=2e-1)
def testIssue1213(self):
for n in range(5):
mat = jnp.array([np.diag(np.ones([5], dtype=np.float32))*(-.01)] * 2)
args_maker = lambda: [mat]
self._CheckAgainstNumpy(np.linalg.slogdet, jnp.linalg.slogdet, args_maker,
tol=1e-3)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_leftvectors={}_rightvectors={}".format(
jtu.format_shape_dtype_string(shape, dtype),
compute_left_eigenvectors, compute_right_eigenvectors),
"shape": shape, "dtype": dtype,
"compute_left_eigenvectors": compute_left_eigenvectors,
"compute_right_eigenvectors": compute_right_eigenvectors}
for shape in [(0, 0), (4, 4), (5, 5), (50
|
repotvsupertuga/tvsupertuga.repository
|
script.module.resolveurl/lib/resolveurl/plugins/videozoo.py
|
Python
|
gpl-2.0
| 3,993
| 0.008264
|
"""
Kodi resolveurl plugin
Copyright (C) 2014 smokdpi
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urllib
import urllib2
from lib import jsunpack
from urlparse import urlparse
from resolveurl import common
from resolveurl.resolver import ResolveUrl, ResolverError
from resolveurl.hmf import HostedMediaFile
class VideoZooResolver(ResolveUrl):
name = "videozoo"
domains = ["byzoo.org", "playpanda.net", "videozoo.me", "videowing.me", "easyvideo.me", "play44.net", "playbb.me", "video44.net"]
pattern = 'http://((?:www\.)*(?:play44|playbb|video44|byzoo|playpanda|videozoo|videowing|easyvideo)\.(?:me|org|net|eu)/(?:embed[/0-9a-zA-Z]*?|gplus|picasa|gogo/)(?:\.php)*)\?.*?((?:vid|video|id|file)=[%0-9a-zA-Z_\-\./]+|.*)[\?&]*.*'
def __init__(self):
self.net = common.Net()
def get_url(self, host, media_id):
return self._default_get_url(host, media_id, 'http://{host}?vid={media_id}')
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
headers = {
'User-Agent': common.IOS_USER_AGENT,
'Referer': web_url
}
stream_url = ''
new_host = urlparse(web_url).netloc
html = self.net.http_GET(web_url, headers=headers).content
if 'videozoo' not in new_host:
r = re.search('(?:playlist:|timer\s*=\s*null;).+?url\s*[:=]+\s*[\'"]+(.+?)[\'"]+', html, re.DOTALL)
else:
r = re.search('\*/\s+?(eval\(function\(p,a,c,k,e,d\).+)\s+?/\*', html)
if r:
try:
r = jsunpack.unpack(r.group(1))
if r:
r = re.search('\[{"url":"(.+?)"', r.replace('\\', ''))
except:
if r:
re_src = re.search('urlResolvers\|2F(.+?)\|', r.group(1))
re_url = re.search('php\|3D(.+?)\|', r.group(1))
if re_src and re_url:
stream_url = 'http://%s/%s.php?url=%s' % (new_host, re_src.group(1), re_url.group(1))
stream_url = self._redirect_test(stream_url)
else:
raise ResolverError('File not found')
if r:
stream_url = urllib.unquote_plus(r.group(1))
if 'http' not in stream_url:
stream_url
|
= 'http://' + host + '/' + stream_url.replace('/gplus.php', 'gplus.php').replace('/picasa.php', 'picasa.php')
stream_url = self._redirect_test(stream_url)
if stream_url:
if 'google' in str
|
eam_url:
return HostedMediaFile(url=stream_url).resolve()
else:
return stream_url
else:
raise ResolverError('File not found')
def _redirect_test(self, url):
opener = urllib2.build_opener()
opener.addheaders = [('User-agent', common.IOS_USER_AGENT)]
opener.addheaders = [('Referer', urlparse(url).netloc)]
try:
resp = opener.open(url)
if url != resp.geturl():
return resp.geturl()
else:
return url
except urllib2.HTTPError, e:
if e.code == 403:
if url != e.geturl():
return e.geturl()
raise ResolverError('File not found')
|
dgaston/ddb-ngsflow-scripts
|
workflow-vcfanno_somatic_amplicon.py
|
Python
|
mit
| 2,541
| 0.002361
|
#!/usr/bin/env python
# Standard packages
import os
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import annotation
from ddb_ngsflow import pipeline
from ddb_ngsflow.align import bwa
from ddb_ngsflow.utils import utilitie
|
s
from ddb_ngsflow
|
.qc import qc
from ddb_ngsflow.coverage import sambamba
from ddb_ngsflow.variation import variation
from ddb_ngsflow.variation import freebayes
from ddb_ngsflow.variation import mutect
from ddb_ngsflow.variation import platypus
from ddb_ngsflow.variation import vardict
from ddb_ngsflow.variation import scalpel
from ddb_ngsflow.variation.sv import pindel
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Setting up analysis directory\n")
if not os.path.exists("Logs"):
os.makedirs("Logs")
if not os.path.exists("FinalVCFs"):
os.makedirs("FinalVCFs")
if not os.path.exists("FinalBAMs"):
os.makedirs("FinalBAMs")
if not os.path.exists("Intermediates"):
os.makedirs("Intermediates")
if not os.path.exists("Coverage"):
os.makedirs("Coverage")
if not os.path.exists("Reports"):
os.makedirs("Reports")
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
# Per sample jobs
for sample in samples:
vcfanno_job = Job.wrapJobFn(annotation.vcfanno, config, sample, samples,
"{}.snpEff.{}.vcf".format(sample, config['snpeff']['reference']),
cores=int(config['vcfanno']['num_cores']),
memory="{}G".format(config['vcfanno']['max_mem']))
# Create workflow from created jobs
root_job.addChild(vcfanno_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
brianz/servant
|
servant/transport/local.py
|
Python
|
lgpl-3.0
| 2,255
| 0.002661
|
import importlib
from .base import BaseTransport
from ..service import Service
class LocalTransport(BaseTransport):
def __init__(self):
super(LocalTransport, self).__init__()
self.__service = None
def __repr__(self):
return self.__class__.__name__
def configure(self, service_name='', service_version='', service_meta=None, **kwargs):
instance = self._import_service_and_instantiate_service(service_name, service_version)
self.service = instance
@property
def service(self):
raise AttributeError("Cannot access service property directly")
@service.setter
def service(self, service_instance):
self.__service = service_instance
def _import_service_and_instantiate_service(self, service_name, service_version):
if not service_name and service_version:
raise Exception(
'service_name and service_version are required '
'arguments for local transport')
module = importlib.import_module('%s.service' % (service_name,))
for name in dir(module):
if name.startswith('_'):
continue
obj = getattr(module, name)
if not self._looks_like_service_class(obj, service_name,
service_version):
continue
instance = obj()
# uber-safe final check to make sure we have the correct service class
if not isinstance(instance, Service):
continue
return instance
raise Exception(
'Could not find appropriate Service class. Services '
|
'must subclass servant.Service and define an action_map, '
'name and version.'
)
def _looks_like_service_class(self, obj, service_name, service_version):
return (
getattr(obj, 'name', '') == service_name and
getattr(obj, 'version', -1) == service_version and
isinstance(getattr(obj, 'action_map', None), dict) and
hasattr(obj, 'run_
|
actions')
)
def is_connected(self):
return True
def send(self, request):
return self.__service.handle_request(request)
|
eek6/squeakspace
|
www/proxy/scripts/local/node_addr.py
|
Python
|
gpl-3.0
| 2,510
| 0.004382
|
import squeakspace.common.util as ut
import squeakspace.common.util_http as ht
import squeakspace.proxy.server.db_sqlite3 as db
import squeakspace.common.squeak_ex as ex
import config
def post_handler(environ):
query = ht.parse_post_request(environ)
cookies = ht.parse_cookies(environ)
user_id = ht.get_required_cookie(cookies, 'user_id')
session_id = ht.get_required_cookie(cookies, 'session_id')
node_name = ht.get_required(query, 'node_name')
url = ht.get_required(query, 'url')
real_node_name = ht.get_required(query, 'real_node_name')
fingerprint = ht.get_optional(query, 'fingerprint')
conn = db.connect(config.db_path)
try:
c = db.cursor(conn)
db.set_node_addr(c, user_id, session_id, node_name, url, real_node_name, fingerprint)
db.commit(conn)
raise ht.ok_json({'status' : 'ok'})
except ex.SqueakException as e:
raise ht.convert_squeak_exception(e)
finally:
db.close(conn)
def get_handler(environ):
query = ht.parse_get_request(environ)
cookies = ht.parse_cookies(environ)
user_id = ht.get_required_cookie(cookies, 'user_id')
session_id = ht.get_required_cookie(cookies, 'session_id')
node_name = ht.get_required(query, 'node_name')
conn = db.connect(config.db_path)
try:
c = db.cursor(conn)
addr = db.read_node_addr(c, user_id, session_id, node_name)
raise ht.ok_json({'status' : 'ok', 'addr' : addr})
except ex.SqueakException as e:
raise ht.convert_squeak_exception(e)
finally:
db.close(conn)
def delete_handler(environ):
query = ht.parse_post_request(environ)
cookies = ht.parse_cookies(environ)
user_id = ht.get_required_cookie(cookies, 'user_id')
session_id = ht.get_required_cookie(cookies, 'session_id')
node_name = ht.get_required(query, 'node_name')
conn = db.connect(config.db_path)
try:
c = db.cursor(conn)
db.delete_node_addr(c, user_id, session_id, node_name)
db.commit(conn)
raise ht.ok_json({'status' : 'ok'})
except ex.SqueakException as e:
raise ht.convert_squeak_exception(e)
finally:
db.close(conn)
def main_handler(environ):
ht.dispatch_on_method(envir
|
on, {
'POST' : post_handler,
'GET
|
' : get_handler,
'DELETE' : delete_handler})
def application(environ, start_response):
return ht.respond_with_handler(environ, start_response, main_handler)
|
blitzrk/sublime_libsass
|
lib/deps.py
|
Python
|
mit
| 1,567
| 0.003191
|
from .pathutils import grep_r
from . import project
import os
import re
def is_partial(path):
'''Check if file is a Sass partial'''
return os.path.basename(path).startswith('_')
def partial_import_regex(partial):
'''Get name of Sass partial file as would be used for @import'''
def from_curdir(cwd):
relpath = os.path.relpath(partial, cwd)
dirname, basename = os.path.split(relpath)
name = os.path.splitext(basename)[0][1:]
partial_import = os.path.join(dirname, name).replace("\\","/")
import_stmt = re.compile('''@import\s+['"]{0}['"]'''.format(partial_import))
return import_stmt
return from_curdir
def get_rec(file_path, start, files=None, partials=None):
'''
Recursively find files importing `partial` in `start` and if any are partials
themselves, find those importing them.
'''
if files is None:
files = []
if partials is None:
partials = []
if not is_partial(file_path):
files.append(file_path)
return (files, partials)
else:
partials.append(file_path)
partial_fn = partial_import_regex(os.path.join(start, file_path))
for f in grep_r(partial_fn, start, exts=['.sass','.scs
|
s']):
if f not in files and f not in partials:
files, partials = get_rec(f, start, files, partials)
return (files, partials)
def get(path):
'''Get files af
|
fected by change in contents of `path`'''
rel, root = project.splitpath(path)
deps, _ = get_rec(rel, root)
return (deps, root)
|
e-sensing/wtss.py
|
src/wtss/__init__.py
|
Python
|
lgpl-3.0
| 1,040
| 0.000962
|
#
# Copyright (C) 2014 National Institute For Space Research (INPE) - Brazil.
#
# This file is part of Python Client API for Web Time Series Service.
#
# Web Time Series Service for Python is free software: you can
# redistribute it and/or modify it under the terms of the
# GNU Lesser General Public License as published by
# t
|
he Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# Web Time Series Service for Python is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# alo
|
ng with Web Time Series Service for Python. See LICENSE. If not, write to
# e-sensing team at <[email protected]>.
#
"""Python Client API for Web Time Series Services (WTSS)."""
from .wtss import wtss
from .wtss import time_series
|
artminster/artminster
|
contrib/billing/migrations/0002_auto__add_field_subscription_frequency.py
|
Python
|
mit
| 5,583
| 0.008239
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Subscription.frequency'
db.add_column('billing_subscription', 'frequency',
self.gf('django.db.models.fields.CharField')(default='MONTHLY', max_length=10),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Subscription.frequency'
db.delete_column('billing_subscription', 'frequency')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'billing.subscription': {
'Meta': {'object_name': 'Subscription'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'MONTHLY'", 'max_length': '10'}),
'id': ('django.db.models.fields.Aut
|
oField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'billing.usersubscription': {
'Meta': {'object_name': 'UserSubscription'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subscription': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['billing.Subscription']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['billing']
|
AlanJAS/iknowEditor
|
activity.py
|
Python
|
gpl-3.0
| 4,743
| 0.004217
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import gi
gi.require_version('Gtk', '3.0')
import sys
import pygame
from gi.repository import Gtk
from sugar3.activity.activity import Activity
from sugar3.graphics.toolbarbox import ToolbarBox
from sugar3.activity.widgets import ActivityToolbarButton
from sugar3.graphics.toolbutton import ToolButton
from sugar3.activity.widgets import StopButton
from sugar3.graphics.objectchooser import ObjectChooser
from gettext import gettext as _
import sugargame.canvas
import conozco
from points_list import Data
from save_util import save, fixValues
class IknowEditor(Activity):
def __init__(self, handle):
Activity.__init__(self, handle)
self.init_vars()
self.build_toolbar()
self.actividad = conozco.Conozco(self)
self.build_canvas()
self.run_canvas()
self.show_all()
def init_vars(self):
self._image = None
def build_toolbar(self):
self.max_participants = 1
toolbar_box = ToolbarBox()
self.set_toolbar_box(toolbar_box)
toolbar_box.show()
activity_button = ActivityToolbarButton(self)
toolbar_box.toolbar.insert(activity_button, -1)
activity_button.show()
# new pic button
new_pic = ToolButton('new-pic')
new_pic.connect('clicked', self._new_picture)
new_pic.set_tooltip(_('New picture'))
toolbar_box.toolbar.insert(new_pic, -1)
# add / remove point buttons
add_point = ToolButton("row-insert")
add_point.connect("clicked", self._add_point)
add_point.set_tooltip(_("Add a point"))
toolbar_box.toolbar.insert(add_point, -1)
rem_point = ToolButton("row-remove")
rem_point.connect("clicked", self._remove_point)
rem_point.set_tooltip(_("Remove the selected point"))
toolbar_box.toolbar.insert(rem_point, -1)
# save list button
save = ToolButton('filesave')
save.connect('clicked', self._save)
save.set_tooltip(_('Save data'))
toolbar_box.toolbar.insert(save, -1)
# separator and stop button
separator = Gtk.SeparatorToolItem()
separator.props.draw = False
separator.set_expand(True)
toolbar_box.toolbar.insert(separator, -1)
separator.show()
stop_button = StopButton(self)
toolbar_box.toolbar.insert(stop_button, -1)
stop_button.show()
def build_canvas(self):
self.table = Gtk.Table(1, 2, False)
self.box1 = Gtk.HBox()
|
self.box1.set_size_request(350, 350)
self.box1.show()
self.box2 = Gtk.HBox()
self.box2.set_size_request(50, 200)
self.box2.show()
self.table.attach(self.box1, 0, 1, 0, 1)
self.table.attach(self.box2, 1, 2, 0, 1)
self.labels_and_values = Data(self)
self.labels_and_values.connect("some-changed", self._some_changed)
self.box2.add(self.label
|
s_and_values)
self.set_canvas(self.table)
def run_canvas(self):
self.actividad.canvas = sugargame.canvas.PygameCanvas(self,
main=self.actividad.run,
modules=[pygame.display, pygame.font])
self.box1.add(self.actividad.canvas)
self.actividad.canvas.grab_focus()
def _save(self, widget):
l = self.labels_and_values.get_info()
scale = self.actividad.getScale()
shiftx = self.actividad.getShiftX()
shifty = self.actividad.getShiftY()
ready = fixValues(l, scale, shiftx, shifty)
save(ready)
def _new_picture(self, widget):
try:
chooser = ObjectChooser(parent=self)
except:
chooser = None
f = None
if chooser is not None:
result = chooser.run()
if result == Gtk.ResponseType.ACCEPT:
dsobject = chooser.get_selected_object()
f = dsobject.file_path
if f is not None:
self._image = pygame.image.load(f)
self.actividad.set_background(self._image)
def _add_point(self, widget, label="", value="City", dx='0', dy='-14'):
pos = self.labels_and_values.add_value(label, value, dx, dy)
def _remove_point(self, widget):
path = self.labels_and_values.remove_selected_value()
self._update_points()
def _add_coor(self, pos):
if self._image is not None:
self.labels_and_values.update_selected_value(pos)
def _some_changed(self, treeview, path, new_label):
self._update_points()
def _update_points(self):
l = self.labels_and_values.get_info()
self.actividad.update_points(l)
|
FlintHill/SUAS-Competition
|
tests/unit_tests/test_suassystem_utils_data_functions.py
|
Python
|
mit
| 1,006
| 0.001988
|
import unittest
import os
from PIL import Image
from SUASSystem.utils import crop_target
class SUASSystemUtilsDataFunctionsTestCase(unittest.TestCase):
def test_crop_image(self):
"""
Test the crop image method.
"""
input_image_path = "tests/images/image2_test_image_bounder.jpg"
output_
|
crop_image_path = "tests/images/test_crop.jpg"
top_left_coords = [250.0, 200.0]
bottom_right_coords = [350.0, 300.0]
crop_target(input_image_path, output_crop_image_path, top_left_coords, bottom_right_coords)
saved_crop = Image.open(output_crop_image_path).load()
input_image = Image.open(input_image_path).load()
self.assertEqual(saved_crop[0, 0], input_image[250, 200])
self.assertEqual(saved_crop[1, 1], input_image[251, 201])
|
self.assertEqual(saved_crop[50, 50], input_image[300, 250])
self.assertEqual(saved_crop[99, 99], input_image[349, 299])
os.remove("tests/images/test_crop.jpg")
|
leohmoraes/weblate
|
weblate/trans/scripts.py
|
Python
|
gpl-3.0
| 2,637
| 0
|
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <[email protected]>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Hook scripts handling"""
import os.path
import subprocess
from weblate.trans.util import get_clean_env
def get_script_name(name):
'''
Returns script name from string possibly containing full path and
parameters.
'''
return os.path.basename(name).split()[0]
def run_post_push_script(component):
"""Run post push hook"""
run_hook(component, component.post_push_script)
def run_post_update_script(component):
"""Run post update hook"""
run_hook(component, component.post_update_script)
def run_pre_commit_script(component, filename):
"""
Pre commit hook
"""
run_hook(component, component.pre_commit_script, filena
|
me)
def run_post_commit_script(component, filename):
"""
Post commit hook
"""
run_hook(component, component.post_commit_script, filename)
def run_hook(component, script, *args):
"""
Generic script hook executor.
"""
if script:
command = [script]
if args:
command.extend(args)
env
|
ironment = get_clean_env()
if component.is_repo_link:
target = component.linked_subproject
else:
target = component
environment['WL_VCS'] = target.vcs
environment['WL_REPO'] = target.repo
environment['WL_PATH'] = target.get_path()
environment['WL_FILEMASK'] = component.filemask
environment['WL_FILE_FORMAT'] = component.file_format
try:
subprocess.check_call(
command,
env=environment,
cwd=component.get_path(),
)
return True
except (OSError, subprocess.CalledProcessError) as err:
component.log_error(
'failed to run hook script %s: %s',
script,
err
)
return False
|
BizShuk/code_sandbox
|
python/raw_input_test.py
|
Python
|
mit
| 120
| 0.016667
|
import sys
|
#line = sys.stdin.read()
#print line
datas = []
for line in sys.stdin:
datas.append(line)
print datas
| |
pybursa/homeworks
|
a_lusher/hw3/Lusher_Alexander_home_work_3_.py
|
Python
|
gpl-2.0
| 2,380
| 0.059714
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
def add(x, y):
a=1
while a>0:
a = x & y
b = x ^ y
x = b
y = a << 1
return b
def vowel_count(word):
vowels_counter = 0
for letter in word:
if letter.isalpha():
if letter.upper() in 'AEIOUY':
vowels_counter += 1
return vowels_counter
if __name__ == '__main__':
# Assignment N 1
text="Proin eget tortor risus. Cras ultricies ligula sed magna dictum porta. Proin eget tortor risus. Curabitur non nulla sit amet nisl tempus convallis quis ac lectus. Donec rutrum congue leo eget malesuada."
list=text.split()
max_vowel_number=0
for i in range(0,len(list)-1):
print "word=",list[i]," number of vowels",vowel_count(list[i])
if vowel_count(list[i])>max_vowel_number:
max_vowel_number=vowel_count(list[i])
print "Maximum number of vowels is",max_vowel_number
# Assignment N 2
text="Proin eget tortor risus. Cras ultricies ligula sed magna dictum porta. Proin eget tortor risus. Curabitur non nulla sit amet nisl tempus convallis quis ac lectus. Donec rutrum congue leo eget malesuada."
list=text.split()
length=len(list[0])
words=[]
words.append(list[0])
for i in range(1,len(list)-1):
if length<len(list[i]):
length=len(list[i])
words[:] = []
words.append(list[i])
elif length==len(list[i]):
words.append(list[i])
print "maximum length=",length,"words are",words
# Assignment N 3
text="Lorem ipsum dolor sit amet, consectetur
|
adipiscing elit. Nulla quis lorem ut libero malesuada feugiat. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec rutrum congue leo eget malesuada. Cras ultricies ligula sed magna dic
|
tum porta."
list=text.split()
i=len(text)-1
mirrored_text=''
while i>=0:
mirrored_text=mirrored_text+(text[i])
i-=1
print mirrored_text
# Assignment N 4
import os
content=dir(os)
content_len=len(content)
for k in range(0,content_len-1):
s="os"+"."+content[k]+".__doc__"
print(eval(s))
import sys
content=dir(sys)
content_len=len(content)
for k in range(0,content_len-1):
s="sys"+"."+content[k]+".__doc__"
print(eval(s))
# Assignment N 5
input=12345
a=str(input)
str_len=len(a)
i=0
total=int(a[i])
while i<str_len-1:
total=add(total,int(a[add(i,1)]))
i=add(i,1)
print total
|
abramhindle/UnnaturalCodeFork
|
python/testdata/launchpad/lib/lp/services/temporaryblobstorage/webservice.py
|
Python
|
agpl-3.0
| 959
| 0
|
# Copyright 22011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""All the interfaces that are exposed through the webservice.
There is a declaration in ZCML somewhere that looks like:
<webservice:register module="lp.patchwebservice" />
which tells `lazr.restful` that it should look for webservice exports here.
"""
__metaclass__ = type
__all__ =
|
[
'ITemporaryBlobStorage',
'ITemporaryStorageManager',
]
from lp.services.temporaryblobstorage.interfaces import (
ITemporaryBlobStorage,
ITemporaryStorageManager,
)
from lp.services.webservice.apihelpers import (
patch_operations_explicit_version,
)
# ITemporaryBlobStorage
patch_operations_explicit_version(
ITemporaryBlobStorage, 'beta', "getProcessedData", "hasBeenProcessed")
# ITemporaryStorageManager
patch_operations_explicit_version(
ITemporary
|
StorageManager, 'beta', "fetch")
|
simgunz/anki
|
qt/aqt/preferences.py
|
Python
|
agpl-3.0
| 10,181
| 0.000982
|
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import anki.lang
import aqt
from aqt import AnkiQt
from aqt.profiles import RecordingDriver, VideoDriver
from aqt.qt import *
from aqt.utils import (
TR,
HelpPage,
disable_help_button,
openHelp,
showInfo,
showWarning,
tr,
)
def video_driver_name_for_platform(driver: VideoDriver) -> str:
if driver == VideoDriver.ANGLE:
return tr(TR.PREFERENCES_VIDEO_DRIVER_ANGLE)
elif driver == VideoDriver.Software:
if isMac:
return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_MAC)
else:
return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_OTHER)
else:
if isMac:
return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_MAC)
else:
return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_OTHER)
class Preferences(QDialog):
def __init__(self, mw: AnkiQt) -> None:
QDialog.__init__(self, mw, Qt.Window)
self.mw = mw
self.prof = self.mw.pm.profile
self.form = aqt.forms.preferences.Ui_Preferences()
self.form.setupUi(self)
disable_help_button(self)
self.form.buttonBox.button(QDialogButtonBox.Help).setAutoDefault(False)
self.form.buttonBox.button(QDialogButtonBox.Close).setAutoDefault(False)
qconnect(
self.form.buttonBox.helpRequested, lambda: openHelp(HelpPage.PREFERENCES)
)
self.silentlyClose = True
self.prefs = self.mw.col.get_preferences()
self.setupLang()
self.setupCollection()
self.setupNetwork()
self.setupBackup()
self.setupOptions()
self.show()
def accept(self) -> None:
# avoid exception if main window is already closed
if not self.mw.col:
return
self.updateCollection()
self.updateNetwork()
self.updateBackup()
self.updateOptions()
self.mw.pm.save()
self.mw.reset()
self.done(0)
aqt.dialogs.markClosed("Preferences")
def reject(self) -> None:
self.accept()
# Language
######################################################################
def setupLang(self) -> None:
f = self.form
f.lang.addItems([x[0] for x in anki.lang.langs])
f.lang.setCurrentIndex(self.langIdx())
qconnect(f.lang.currentIndexChanged, self.onLangIdxChanged)
def langIdx(self) -> int:
codes = [x[1] for x in anki.lang.langs]
lang = anki.lang.currentLang
if lang in anki.lang.compatMap:
lang = anki.lang.compatMap[lang]
else:
lang = lang.replace("-", "_")
try:
return codes.index(lang)
except:
return codes.index("en_US")
def onLangIdxChanged(self, idx: int) -> None:
code = anki.lang.langs[idx][1]
self.mw.pm.setLang(code)
showInfo(
tr(TR.PREFERENCES_PLEASE_RESTART_ANKI_TO_COMPLETE_LANGUAGE), parent=self
)
# Collection options
######################################################################
def setupCollection(self) -> None:
import anki.consts as c
f = self.form
qc = self.mw.col.conf
self.setup_video_driver()
f.newSpread.addItems(list(c.newCardSchedulingLabels(self.mw.col).values()))
f.useCurrent.setCurrentIndex(int(not qc.get("addToCur", True)))
s = self.prefs.sched
f.lrnCutoff.setValue(int(s.learn_ahead_secs / 60.0))
f.timeLimit.setValue(int(s.time_limit_secs / 60.0))
f.showEstimates.setChecked(s.show_intervals_on_buttons)
f.showProgress.setChecked(s.show_remaining_due_counts)
f.newSpread.setCurrentIndex(s.new_review_mix)
f.dayLearnFirst.setChecked(s.day_learn_first)
f.dayOffset.setValue(s.rollover)
if s.scheduler_version < 2:
f.dayLearnFirst.setVisible(False)
f.legacy_timezone.setVisible(False)
else:
f.legacy_timezone.setChecked(not s.new_timezone)
def setup_video_driver(self) -> None:
self.video_drivers = VideoDriver.all_for_platform()
names = [
tr(TR.PREFERENCES_VIDEO_DRIVER, driver=video_driver_name_for_platform(d))
for d in self.video_drivers
]
self.form.video_driver.addItems(names)
self.form.video_driver.setCurrentIndex(
self.video_drivers.index(self.mw.pm.video_driver())
)
def update_video_driver(self) -> None:
new_driver = self.video_drivers[self.form.video_driver.currentIndex()]
if new_driver != self.mw.pm.video_driver():
self.mw.pm.set_video_driver(new_driver)
showInfo(tr(TR.PREFERENCES_CHANGES_WILL_TAKE_EFFECT_WHEN_YOU))
def updateCollection(self) -> None:
f = self.form
d = self.mw.col
self.update_video_driver()
qc = d.conf
qc["addToCur"] = not f.useCurrent.currentIndex()
s = self.prefs.sched
s.show_remaining_due_counts = f.showProgress.isChecked()
s.show_intervals_on_buttons = f.showEstimates.isChecked()
s.new_review_mix = f.newSpread.currentIndex()
s.time_limit_secs = f.timeLimit.value() * 60
s.learn_ahead_secs = f.lrnCutoff.value() * 60
s.day_learn_first = f.dayLearnFirst.isChecked()
s.rollover = f.dayOffset.value()
s.new_timezone = not f.legacy_timezone.isChecked()
self.mw.col.set_preferences(self.prefs)
d.setMod()
# Network
######################################################################
def setupNetwork(self) -> None:
self.form.media_log.setText(tr(TR.SYNC_MEDIA_LOG_BUTTON))
qconnect(self.form.media_log.clicked, self.on_media_log)
self.form.syncOnProgramOpen.setChecked(self.prof["autoSync"])
self.form.syncMedia.setChecked(self.prof["syncMedia"])
self.form.autoSyncMedia.setChecked(self.mw.pm.auto_sync_media_minutes() != 0)
if not self.prof["syncKey"]:
self._hideAuth()
else:
self.form.syncUser.setText(self.prof.get("syncUser", ""))
qconnect(self.form.syncDeauth.clicked, self.onSyncDeauth)
self.form.syncDeauth.setText(tr(TR.SYNC_LOG_OUT_BUTTON))
def on_media_log(self) -> None:
self.mw.media_syncer.show_sync_log()
def _hideAuth(self) -> None:
self.form.syncDeauth.setVisible(False)
self.form.syncUser.setText("")
self.form.syncLabel.setText(
tr(TR.PREFERENCES_SYNCHRONIZATIONNOT_CURRENTLY_ENABLED_
|
CLICK_THE_SYNC)
)
def onSyncDeauth(self) -> None:
if self.mw.media_syncer.is_syncing():
showWarning("Can't log out while sync in progress.")
return
self.prof["syncKey"] = None
self.mw.col.media.force_resync()
self._hideAuth()
def updateNetwork(self
|
) -> None:
self.prof["autoSync"] = self.form.syncOnProgramOpen.isChecked()
self.prof["syncMedia"] = self.form.syncMedia.isChecked()
self.mw.pm.set_auto_sync_media_minutes(
self.form.autoSyncMedia.isChecked() and 15 or 0
)
if self.form.fullSync.isChecked():
self.mw.col.modSchema(check=False)
self.mw.col.setMod()
# Backup
######################################################################
def setupBackup(self) -> None:
self.form.numBackups.setValue(self.prof["numBackups"])
def updateBackup(self) -> None:
self.prof["numBackups"] = self.form.numBackups.value()
# Basic & Advanced Options
######################################################################
def setupOptions(self) -> None:
self.form.pastePNG.setChecked(self.prof.get("pastePNG", False))
self.form.uiScale.setValue(int(self.mw.pm.uiScale() * 100))
self.form.pasteInvert.setChecked(self.prof.get("pasteInvert", False))
self.form.showPlayButtons.setChecked(self.prof.get("showPlayButtons", True))
self.form.nightMode.setChecked(self.mw.pm.night_mode())
|
bitmazk/django-registration-email
|
registration_email/backends/default/urls.py
|
Python
|
unlicense
| 1,615
| 0
|
"""Custom urls.py for django-registration."""
from django.conf import settings
from django.conf.urls import include, url
from django.views.generic import TemplateView
from registration.backends.default.views import (
ActivationView,
|
RegistrationView,
)
from registration_email.forms import EmailRegistrationForm
ur
|
lpatterns = [
# django-registration views
url(r'^activate/complete/$',
TemplateView.as_view(
template_name='registration/activation_complete.html'),
name='registration_activation_complete'),
url(r'^activate/(?P<activation_key>\w+)/$',
ActivationView.as_view(
template_name='registration/activate.html',
get_success_url=getattr(
settings, 'REGISTRATION_EMAIL_ACTIVATE_SUCCESS_URL',
lambda request, user: '/'),
),
name='registration_activate'),
url(r'^register/$',
RegistrationView.as_view(
form_class=EmailRegistrationForm,
get_success_url=getattr(
settings, 'REGISTRATION_EMAIL_REGISTER_SUCCESS_URL',
lambda request, user: '/'),
),
name='registration_register'),
url(r'^register/complete/$',
TemplateView.as_view(
template_name='registration/registration_complete.html'),
name='registration_complete'),
url(r'^register/closed/$',
TemplateView.as_view(
template_name='registration/registration_closed.html'),
name='registration_disallowed'),
# django auth urls
url(r'', include('registration_email.auth_urls')),
]
|
gtrdp/twitter-clustering
|
crawling/crawl.py
|
Python
|
mit
| 1,795
| 0.007242
|
#!/usr/bin/env python
import json
DEBUG = False
import sys
import tweepy
import time
#consumer_key = 'HcMP89vDDumRhHeQBYbE3Asnp'
#consumer_secret = 'kcXfsNyBl7tan1u2DgV7E10MpsVxhbwTjmbjp3YL9XfDdMJiYt'
#access_key = '67882386-IXbLKaQEtTbZF9yotuLTjgitqjwBkouIstmlW4ecG'
#access_secret = 'SyVrXlIDkidYr3JlNiTQ8tjZ973gIKy5mfpEwFpQWN3Gy'
consumer_key = 'Mcof8aJtJVDqQwz4OMDn2AyZu'
consumer_secret = 'mjsHber2Gj79uc2unbzSRdwGyNyZGjEPBEn4ZHXQZW8FeGeSkv'
access_key = '833745600743079936-hK2K3umAtnfYYuLGLDwD7uzj9ssPCDU'
access_secret = '2Odz7Cky2gb3dZJsO1E65zNL8i84ZnoxLrM9uihSEDb6M'
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_key, access_secret)
api
|
= tweepy.API(auth)
class CustomStreamListener(tweepy.StreamListener):
def __init__(self, data_dir):
# query_fname = format_filename(query)
time_now = time.strftime("%Y-%m-%d_%H.%M.%S")
self.outfile = "%s/stream_%s.json" % (data_dir, time_now)
|
def on_data(self, data):
try:
with open(self.outfile, 'a') as f:
f.write(data)
print(data)
return True
except BaseException as e:
print("Error on_data: %s" % str(e))
time.sleep(5)
return True
def on_error(self, status_code):
print >> sys.stderr, 'Encountered error with status code:', status_code
return True # Don't kill the stream
def on_timeout(self):
print >> sys.stderr, 'Timeout...'
return True # Don't kill the stream
# run the code with try to handle the exception
try:
sapi = tweepy.streaming.Stream(auth, CustomStreamListener('twitter-data'))
sapi.filter(track=["transjakarta", "trans jakarta", "bus way", "busway"], languages=["in"])
except:
pass
|
wmarshall484/streamsx.topology
|
com.ibm.streamsx.topology/opt/python/packages/streamsx/topology/mqtt.py
|
Python
|
apache-2.0
| 7,558
| 0.003705
|
# coding=utf-8
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2016
"""
Publish and subscribe to MQTT messages.
Additional information at http://mqtt.org and
http://ibmstreams.github.io/streamsx.messaging
"""
from future.builtins import *
from streamsx.topology.topology import *
from streamsx.topology import schema
class MqttStreams(object):
"""
A simple connector to a MQTT broker for publishing
string tuples to MQTT topics, and
subscribing to MQTT topics and creating streams.
A connector is for a specific MQTT Broker as specified in
the configuration object config. Any number of publish()and subscribe()
connections may be created from a single mqtt_streams connector.
Sample use:
::
topo = Topology("An MQTT application")
# define configuration information
config = {}
config['clientID'] = "test_MQTTpublishClient"
config['qos'] = int("1") #(needs to be int vs long)
config['keepAliveInterval'] = int(20) (needs to be int vs long)
config['commandTimeout'] = 30000 (needs to be int vs long)
config['period'] = 5000 (needs to be int vs long)
config['messageQueueSize'] = 10 (needs to be int vs long)
config['reconnectionBound'] = int(20)
config['retain'] = True
config['password'] = "foobar"
config['trustStore'] = "/tmp/no-such-trustStore"
config['trustStorePassword'] = "woohoo"
config['keyStore'] = "/tmp/no-such-keyStore"
config['keyStorePassword'] = "woohoo"
# create the connector's configuration property map
config['serverURI'] = "tcp://localhost:1883"
config['userID'] = "user1id"
config[' password'] = "user1passwrd"
# create the connector
mqstream = MqttStreams(topo, config)
# publish a python source
|
stream to the topic "python.topic1"
topic = "python.topic1"
src = topo.source(test_functions.mqtt_publish)
mqs = mqstream.publish(src, topic)
|
# subscribe to the topic "python.topic1"
topic = ["python.topic1", ]
mqs = mqstream.subscribe(topic)
mqs.print()
Configuration properties apply to publish and
subscribe unless stated otherwise.
serverURI
Required String. URI to the MQTT server, either
tcp://<hostid>[:<port>]}
or ssl://<hostid>[:<port>]}.
The port defaults to 1883 for "tcp:" and 8883 for "ssl:" URIs.
clientID
Optional String. A unique identifier for a connection
to the MQTT server.
he MQTT broker only allows a single
onnection for a particular clientID.
By default a unique client ID is automatically
generated for each use of publish() and subscribe().
The specified clientID is used for the first
publish() or subscribe() use and
suffix is added for each subsequent uses.
keepAliveInterval
Optional Integer. Automatically generate a MQTT
ping message to the server if a message or ping hasn't been
sent or received in the last keelAliveInterval seconds.
Enables the client to detect if the server is no longer available
without having to wait for the TCP/IP timeout.
A value of 0 disables keepalive processing.
The default is 60.
commandTimeout
Optional Long. The maximum time in milliseconds
to wait for a MQTT connect or publish action to complete.
A value of 0 causes the client to wait indefinitely.
The default is 0.
period
Optional Long. The time in milliseconds before
attempting to reconnect to the server following a connection failure.
The default is 60000.
userID
Optional String. The identifier to use when authenticating
with a server configured to require that form of authentication.
password
Optional String. The identifier to use when authenticating
with server configured to require that form of authentication.
trustStore
Optional String. The pathname to a file containing the
public certificate of trusted MQTT servers. If a relative path
is specified, the path is relative to the application directory.
Required when connecting to a MQTT server with an
ssl:/... serverURI.
trustStorePassword
Required String when trustStore is used.
The password needed to access the encrypted trustStore file.
keyStore
Optional String. The pathname to a file containing the
MQTT client's public private key certificates.
If a relative path is specified, the path is relative to the
application directory.
Required when an MQTT server is configured to use SSL client authentication.
keyStorePassword
Required String when keyStore is used.
The password needed to access the encrypted keyStore file.
messageQueueSize
[subscribe] Optional Integer. The size, in number
of messages, of the subscriber's internal receive buffer. Received
messages are added to the buffer prior to being converted to a
stream tuple. The receiver blocks when the buffer is full.
The default is 50.
retain
[publish] Optional Boolean. Indicates if messages should be
retained on the MQTT server. Default is false.
qos
Optional Integer. The default
MQTT quality of service used for message handling.
The default is 0.
"""
def __init__(self, topology, config):
self.topology = topology
self.config = config.copy()
self.opCnt = 0
def publish(self, pub_stream, topic):
parms = self.config.copy()
parms['topic'] = topic
parms['dataAttributeName'] = "string"
if (++self.opCnt > 1):
# each op requires its own clientID
clientId = parms['clientID']
if (clientId is not None and len(clientId) > 0):
parms['clientID'] = clientId + "-" + str(id(self)) + "-" + str(self.opCnt)
# convert pub_stream outputport schema from spl po to spl rstring type
forOp = pub_stream._map(streamsx.topology.functions.identity, schema.CommonSchema.String)
op = self.topology.graph.addOperator(kind="com.ibm.streamsx.messaging.mqtt::MQTTSink")
op.addInputPort(outputPort=forOp.oport)
op.setParameters(parms)
return None
def subscribe(self, topic):
parms = self.config.copy()
if (parms['retain'] is not None):
del parms['retain']
parms['topics'] = topic
parms['topicOutAttrName'] = "topic"
parms['dataAttributeName'] = "string"
if (++self.opCnt > 1):
# each op requires its own clientID
clientId = parms['clientID']
if (clientId is not None and len(clientId) > 0):
parms['clientID'] = clientId + "-" + str(id(self)) + "-" + str(self.opCnt)
op = self.topology.graph.addOperator(kind="com.ibm.streamsx.messaging.mqtt::MQTTSource")
oport = op.addOutputPort(schema=schema.StreamSchema("tuple<rstring topic, rstring string>"))
op.setParameters(parms)
pop = self.topology.graph.addPassThruOperator()
pop.addInputPort(outputPort=oport)
pOport = pop.addOutputPort(schema=schema.CommonSchema.String)
return Stream(self.topology, pOport)
|
Gigers/data-struct
|
algoritimos/Python/fatorial-while.py
|
Python
|
bsd-2-clause
| 158
| 0.012658
|
def fat(n):
result = 1
while n > 0:
result = result * n
n = n -
|
1
return resu
|
lt
# testes
print("Fatorial de 3: ", fat(3));
|
Dani4kor/Checkio
|
days-diff.py
|
Python
|
mit
| 504
| 0.003968
|
fro
|
m datetime import datetime
def days_diff(date1, date2):
"""
Find absolute diff in days between dates
"""
days = datetime(*date1) - datetime(*date2)
print abs(days)
return abs(days.days)
if __name__ == '__main__':
# These "asserts" using only for self-checking and not necessary for auto-testing
assert days_diff((1982, 4, 19), (1982, 4, 22)) == 3
assert days_diff((2014, 1, 1), (2014, 8, 27)) == 238
assert days_diff((2014, 8, 27), (2014, 1, 1)) ==
|
238
|
bgarrels/sky
|
sky/legacy/comparison.py
|
Python
|
bsd-3-clause
| 685
| 0.014599
|
import sys
import requests
try:
from .helper import *
except SystemError:
from helper import *
def compareRequestsAndSelenium(url):
html1 = str(requests.get(url).text)
try:
driver = webdriver.Firefox()
driver.maximize_window()
driver.get(url)
html2 = str(driver.page_source)
finally:
driver.close()
view_diff(url, html1, html2)
# url = 'http://www.healthgrades.com/physician/dr-jeannine-villella-y4jts'
# compareRequestsAndSelenium(url)
# url = 'https://www.betterdoctor.com/wendy-tcheng'
# co
|
mpareRequestsAndSelenium(url)
if __name__ == '__main__':
compareReque
|
stsAndSelenium(sys.argv[1])
|
ecdpalma/napscheduler
|
napscheduler/util.py
|
Python
|
mit
| 6,708
| 0.001044
|
"""
This module contains several handy functions primarily meant for internal use.
"""
from datetime import date, datetime, timedelta
from time import mktime
import re
import sys
from types import MethodType
__all__ = ('asint', 'asbool', 'convert_to_datetime', 'timedelta_seconds',
'time_difference', 'datetime_ceil', 'combine_opts',
'get_callable_name', 'obj_to_ref', 'ref_to_obj', 'maybe_ref',
'to_unicode', 'iteritems', 'itervalues', 'xrange')
def asint(text):
"""
Safely converts a string to an integer, returning None if the string
is None.
:type text: str
:rtype: int
"""
if text is not None:
return int(text)
def asbool(obj):
"""
Interprets an object as a boolean value.
:rtype: bool
"""
|
if isinstance(obj, str):
obj = obj.strip().lower()
if obj in ('true', 'yes', 'on', 'y', 't', '1'):
return True
if obj in ('false', 'no', '
|
off', 'n', 'f', '0'):
return False
raise ValueError('Unable to interpret value "%s" as boolean' % obj)
return bool(obj)
_DATE_REGEX = re.compile(
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
r'(?: (?P<hour>\d{1,2}):(?P<minute>\d{1,2}):(?P<second>\d{1,2})'
r'(?:\.(?P<microsecond>\d{1,6}))?)?')
def convert_to_datetime(input):
"""
Converts the given object to a datetime object, if possible.
If an actual datetime object is passed, it is returned unmodified.
If the input is a string, it is parsed as a datetime.
Date strings are accepted in three different forms: date only (Y-m-d),
date with time (Y-m-d H:M:S) or with date+time with microseconds
(Y-m-d H:M:S.micro).
:rtype: datetime
"""
if isinstance(input, datetime):
return input
elif isinstance(input, date):
return datetime.fromordinal(input.toordinal())
elif isinstance(input, str):
m = _DATE_REGEX.match(input)
if not m:
raise ValueError('Invalid date string')
values = [(k, int(v or 0)) for k, v in m.groupdict().items()]
values = dict(values)
return datetime(**values)
raise TypeError('Unsupported input type: %s' % type(input))
def timedelta_seconds(delta):
"""
Converts the given timedelta to seconds.
:type delta: timedelta
:rtype: float
"""
return delta.days * 24 * 60 * 60 + delta.seconds + \
delta.microseconds / 1000000.0
def time_difference(date1, date2):
"""
Returns the time difference in seconds between the given two
datetime objects. The difference is calculated as: date1 - date2.
:param date1: the later datetime
:type date1: datetime
:param date2: the earlier datetime
:type date2: datetime
:rtype: float
"""
later = mktime(date1.timetuple()) + date1.microsecond / 1000000.0
earlier = mktime(date2.timetuple()) + date2.microsecond / 1000000.0
return later - earlier
def datetime_ceil(dateval):
"""
Rounds the given datetime object upwards.
:type dateval: datetime
"""
if dateval.microsecond > 0:
return dateval + timedelta(seconds=1,
microseconds= -dateval.microsecond)
return dateval
def combine_opts(global_config, prefix, local_config={}):
"""
Returns a subdictionary from keys and values of ``global_config`` where
the key starts with the given prefix, combined with options from
local_config. The keys in the subdictionary have the prefix removed.
:type global_config: dict
:type prefix: str
:type local_config: dict
:rtype: dict
"""
prefixlen = len(prefix)
subconf = {}
for key, value in global_config.items():
if key.startswith(prefix):
key = key[prefixlen:]
subconf[key] = value
subconf.update(local_config)
return subconf
def get_callable_name(func):
"""
Returns the best available display name for the given function/callable.
"""
f_self = getattr(func, '__self__', None) or getattr(func, 'im_self', None)
if f_self and hasattr(func, '__name__'):
if isinstance(f_self, type):
# class method
return '%s.%s' % (f_self.__name__, func.__name__)
# bound method
return '%s.%s' % (f_self.__class__.__name__, func.__name__)
if hasattr(func, '__call__'):
if hasattr(func, '__name__'):
# function, unbound method or a class with a __call__ method
return func.__name__
# instance of a class with a __call__ method
return func.__class__.__name__
raise TypeError('Unable to determine a name for %s -- '
'maybe it is not a callable?' % repr(func))
def obj_to_ref(obj):
"""
Returns the path to the given object.
"""
ref = '%s:%s' % (obj.__module__, get_callable_name(obj))
try:
obj2 = ref_to_obj(ref)
if obj != obj2:
raise ValueError
except Exception:
raise ValueError('Cannot determine the reference to %s' % repr(obj))
return ref
def ref_to_obj(ref):
"""
Returns the object pointed to by ``ref``.
"""
if not isinstance(ref, basestring):
raise TypeError('References must be strings')
if not ':' in ref:
raise ValueError('Invalid reference')
modulename, rest = ref.split(':', 1)
try:
obj = __import__(modulename)
except ImportError:
raise LookupError('Error resolving reference %s: '
'could not import module' % ref)
try:
for name in modulename.split('.')[1:] + rest.split('.'):
obj = getattr(obj, name)
return obj
except Exception:
raise LookupError('Error resolving reference %s: '
'error looking up object' % ref)
def maybe_ref(ref):
"""
Returns the object that the given reference points to, if it is indeed
a reference. If it is not a reference, the object is returned as-is.
"""
if not isinstance(ref, str):
return ref
return ref_to_obj(ref)
def to_unicode(string, encoding='ascii'):
"""
Safely converts a string to a unicode representation on any
Python version.
"""
if hasattr(string, 'decode'):
return string.decode(encoding, 'ignore')
return string # pragma: nocover
if sys.version_info < (3, 0): # pragma: nocover
iteritems = lambda d: d.iteritems()
itervalues = lambda d: d.itervalues()
xrange = xrange
basestring = basestring
else: # pragma: nocover
iteritems = lambda d: d.items()
itervalues = lambda d: d.values()
xrange = range
basestring = str
|
ClearCorp/odoo-clearcorp
|
exchange_rate_calculated/models/__init__.py
|
Python
|
agpl-3.0
| 144
| 0
|
# -*- coding: utf-
|
8 -*-
# © <YEAR(S)> ClearCorp
# License AGPL-3.0 or later (http:/
|
/www.gnu.org/licenses/agpl.html).
import account_move_line
|
markokr/cc
|
cc/daemon/infosender.py
|
Python
|
bsd-2-clause
| 5,065
| 0.0077
|
#! /usr/bin/env python
"""Read infofiles.
"""
import glob
import os, os.path
import sys
import threading
import time
import skytools
import cc.util
from cc import json
from cc.daemon import CCDaemon
from cc.message import is_msg_req_valid
from cc.reqs import InfofileMessage
class InfoStamp:
def __init__(self, fn, st):
self.filename = fn
self.filestat = st
self.modified = 1
def check_send(self, st):
if (st.st_mtime != self.filestat.st_mtime
or st.st_size != self.filestat.st_size):
# st changed, new mod
self.modified = 1
self.filestat = st
return 0
elif self.modified:
return 1
else:
return 0
class InfofileCollector(CCDaemon):
log = skytools.getLogger('d:InfofileCollector')
def reload(self):
super(InfofileCollector, self).reload()
self.infodir = self.cf.getfile('infodir')
self.infomask = self.cf.get('infomask')
self.compression = self.cf.get ('compression', 'none')
if self.compression not in (None, '', 'none', 'gzip', 'bzip2'):
self.log.error ("unknown compression: %s", self.compression)
self.compression_level = self.cf.getint ('compression-level', '')
self.maint_period = self.cf.getint ('maint-period', 60 * 60)
self.stats_period = self.cf.getint ('stats-period', 30)
self.msg_suffix = self.cf.get ('msg-suffix', '')
if self.msg_suffix and not is_msg_req_valid (self.msg_suffix):
self.log.error ("invalid msg-suffix: %s", self.msg_suffix)
self.msg_suffix = None
self.use_blob = self.cf.getbool ('use-blob', True)
def startup(self):
super(InfofileCollector, self).startup()
# fn -> stamp
self.infomap = {}
# activate periodic maintenance
self.do_maint()
def process_file(self, fs):
f = open(fs.filename, 'rb')
st = os.fstat(f.fileno())
if fs.check_send(st):
body = f.read()
if len(body) != st.st_size:
return
fs.modified = 0
self.log.debug('Sending: %s', fs.filename)
self.send_file(fs, body)
self.sta
|
t_inc('count')
f.close()
def send_file(self, fs, body):
cfb = cc.util.compress (body, self.compression, {'level': self.compression_level})
self.log.debug ("file compressed from %i to %i", len(body), len(cfb))
if self.use_blob:
data = ''
blob = cfb
else:
data = cfb.encode('base64')
|
blob = None
msg = InfofileMessage(
filename = fs.filename.replace('\\', '/'),
mtime = fs.filestat.st_mtime,
comp = self.compression,
data = data)
if self.msg_suffix:
msg.req += '.' + self.msg_suffix
self.ccpublish (msg, blob)
self.stat_inc ('infosender.bytes.read', len(body))
self.stat_inc ('infosender.bytes.sent', len(cfb))
def find_new(self):
fnlist = glob.glob (os.path.join (self.infodir, self.infomask))
newlist = []
for fn in fnlist:
try:
st = os.stat(fn)
except OSError, e:
self.log.info('%s: %s', fn, e)
continue
if fn not in self.infomap:
fstamp = InfoStamp(fn, st)
self.infomap[fn] = fstamp
else:
old = self.infomap[fn]
if old.check_send(st):
newlist.append(old)
self.log.debug ("files found - all: %i, new: %i", len(fnlist), len(newlist))
return newlist
def _work (self):
self.connect_cc()
newlist = self.find_new()
for fs in newlist:
try:
self.process_file(fs)
except (OSError, IOError), e:
self.log.info('%s: %s', fs.filename, e)
self.stat_inc('changes', len(newlist))
def work (self):
t = time.time()
while self.looping and self.stats_period > time.time() - t:
self._work()
self.sleep(1)
return 1
def stop (self):
""" Called from signal handler """
super(InfofileCollector, self).stop()
self.log.info ("stopping")
self.maint_timer.cancel()
def do_maint (self):
""" Drop removed files from our cache """
self.log.info ("cleanup")
current = glob.glob (os.path.join (self.infodir, self.infomask))
removed = set(self.infomap) - set(current)
for fn in removed:
self.log.debug ("forgetting file %s", fn)
del self.infomap[fn]
self.log.info ("current: %i, removed: %i", len(current), len(removed))
self.maint_timer = threading.Timer (self.maint_period, self.do_maint)
self.maint_timer.start()
if __name__ == '__main__':
s = InfofileCollector('infofile_collector', sys.argv[1:])
s.start()
|
hashview/hashview
|
migrations/versions/ded3fd1d7f9d_.py
|
Python
|
gpl-3.0
| 850
| 0.002353
|
"""empty message
Revision ID: ded3fd1d7f9d
Revises: b70e85abec53
Create Date: 2020-12-30 22:46:59.418950
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = 'ded3fd1d7f9d'
down_revision = 'b70e85abec53'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hashfiles', sa.Column('ch
|
ecksum', sa.String(length=256), nullable=False))
op.drop_column('hashfiles', 'hash_str')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hashfiles', sa.Column('hash_str', mysql.VARCHAR(l
|
ength=256), nullable=False))
op.drop_column('hashfiles', 'checksum')
# ### end Alembic commands ###
|
runt18/nupic
|
src/nupic/regions/TestRegion.py
|
Python
|
agpl-3.0
| 15,152
| 0.011022
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
# This is a PyRegion-based python test regions for exploring/testing CLA Network
# mechanisms
from abc import ABCMeta, abstractmethod
from nupic.bindings.regions.PyRegion import PyRegion
from nupic.data.dictutils import DictObj
cla
|
ss RegionIdentityPolicyBase(object):
""" A base class that must be subclassed by users in order to define the
TestRegion instance's specialization. See also setIdentityPolicyInstance().
"""
__metaclass__ = ABCMeta
@abstractmethod
def initialize(self, testRegionObj):
""" Called from the scope of the region's PyRegion.initialize() method.
testRegionObj:
|
TestRegion instance with which this policy is
associated.
"""
@abstractmethod
def compute(self, inputs, outputs):
"""Perform the main computation
This method is called in each iteration for each phase the node supports.
Called from the scope of the region's PyRegion.compute() method.
inputs: dict of numpy arrays (one per input)
outputs: dict of numpy arrays (one per output)
"""
@abstractmethod
def getOutputElementCount(self, name):
"""Return the number of elements in the given output of the region
Called from the scope of the region's PyRegion.getOutputElementCount() method.
name: the name of the output
"""
@abstractmethod
def getName(self):
""" Return the name of the region
"""
class TestRegion(PyRegion):
"""
TestRegion is designed for testing and exploration of CLA Network
mechanisms. Each TestRegion instance takes on a specific role via
the associated TestRegionRole policy (TBD).
"""
def __init__(self,
**kwargs):
super(PyRegion, self).__init__(**kwargs)
# Learning, inference, and other parameters.
# By default we start out in stage learn with inference disabled
# The specialization policy is what gives this region instance its identity.
# Users set this via setIdentityPolicyInstance() before running the network
self.identityPolicy = None
# Debugging support, used in _conditionalBreak
self.breakPdb = False
self.breakKomodo = False
# Construct ephemeral variables (those that aren't serialized)
self.__constructEphemeralInstanceVars()
# Variables set up in initialize()
#self._sfdr = None # FDRCSpatial instance
return
def __constructEphemeralInstanceVars(self):
""" Initialize ephemeral instance variables (those that aren't serialized)
"""
assert not hasattr(self, 'ephemeral')
self.ephemeral = DictObj()
self.ephemeral.logPathInput = ''
self.ephemeral.logPathOutput = ''
self.ephemeral.logPathOutputDense = ''
self.ephemeral._fpLogInput = None
self.ephemeral._fpLogOutput = None
self.ephemeral._fpLogOutputDense = None
return
#############################################################################
#
# Initialization code
#
#############################################################################
def initialize(self, dims, splitterMaps):
""" Called by network after all links have been set up
dims, splitterMaps: Unused legacy args
"""
self.identityPolicy.initialize(self)
_debugOut(self.identityPolicy.getName())
return
#############################################################################
#
# Core compute methods: learning, inference, and prediction
#
#############################################################################
def compute(self, inputs, outputs):
"""
Run one iteration of the region's compute.
The guts of the compute are contained in the _compute() call so that
we can profile it if requested.
"""
# Uncomment this to find out who is generating divide by 0, or other numpy warnings
# numpy.seterr(divide='raise', invalid='raise', over='raise')
self.identityPolicy.compute(inputs, outputs)
_debugOut(("%s: inputs=%s; outputs=%s") % \
(self.identityPolicy.getName(),inputs, outputs))
return
#############################################################################
#
# NuPIC 2 Support
# These methods are required by NuPIC 2
#
#############################################################################
def getOutputElementCount(self, name):
nOutputElements = self.identityPolicy.getOutputElementCount(name)
return nOutputElements
# TODO: as a temporary hack, getParameterArrayCount checks to see if there's a
# variable, private or not, with that name. If so, it attempts to return the
# length of that variable.
def getParameterArrayCount(self, name, index):
p = self.getParameter(name)
if (not hasattr(p, '__len__')):
raise Exception("Attempt to access parameter '{0!s}' as an array but it is not an array".format(name))
return len(p)
# TODO: as a temporary hack, getParameterArray checks to see if there's a
# variable, private or not, with that name. If so, it returns the value of the
# variable.
def getParameterArray(self, name, index, a):
p = self.getParameter(name)
if (not hasattr(p, '__len__')):
raise Exception("Attempt to access parameter '{0!s}' as an array but it is not an array".format(name))
if len(p) > 0:
a[:] = p[:]
return
#############################################################################
#
# Region API support methods: getSpec, getParameter, and setParameter
#
#############################################################################
@classmethod
def getSpec(cls):
"""Return the base Spec for TestRegion.
"""
spec = dict(
description="TestRegion",
singleNodeOnly=True,
inputs=dict(
bottomUpIn=dict(
description="""The input vector.""",
dataType='Real32',
count=0,
required=False,
regionLevel=True,
isDefaultInput=True,
requireSplitterMap=False),
topDownIn=dict(
description="""The top-down input signal, generated from
feedback from upper levels""",
dataType='Real32',
count=0,
required = False,
regionLevel=True,
isDefaultInput=False,
requireSplitterMap=False),
),
outputs=dict(
bottomUpOut=dict(
description="""The output signal generated from the bottom-up inputs
from lower levels.""",
dataType='Real32',
count=0,
regionLevel=True,
isDefaultOutput=True),
topDownOut=dict(
description="""The top-down output signal, generated from
feedback from upper levels""",
dataType='Real32',
count=0,
regionLevel=True,
isDefaultOutput=False),
),
parameters=dict(
logPathInput=dict(
description='Optional name of input log file. If set, every input vector'
' will be logged to this file.',
accessMode='ReadWrite',
dataType='Byte',
count=0,
constraints=''),
lo
|
UdK-VPT/Open_eQuarter
|
mole3/tests/plugin_interaction_test.py
|
Python
|
gpl-2.0
| 4,712
| 0.002971
|
import unittest
import os, sys, imp
from qgis import utils
from qgis.core import QgsVectorLayer, QgsField, QgsProject, QGis
from qgis.PyQt.QtCore import QVariant
from .qgis_models import set_up_interface
from mole3.qgisinteraction import layer_interaction as li
from mole3.qgisinteraction import plugin_interaction as pi
from mole3.tests.qgis_models import HybridLayer
class PstPluginInteractionTest(unittest.TestCase):
def create_layer_with_features(self, name, type='Polygon'):
v_layer_name = li.biuniquify_layer_name(name)
if type == 'Point':
v_layer = QgsVectorLayer('{}?crs=EPSG:3857'.format(type), v_layer_name, 'memory', False)
else:
v_layer = HybridLayer(type, v_layer_name)
provider = v_layer.dataProvider()
v_layer.startEditing()
attributes = [QgsField('COLOR_RED', QVariant.String),
QgsField('COLOR_GRE', QVariant.String),
QgsField('COLOR_BLU', QVariant.String),
QgsField('COLOR_ALP', QVariant.String)]
provider.addAttributes(attributes)
v_layer.commitChanges()
return v_layer
def add_pointsamplingtool_to_plugins(self):
plugin_folder = os.path.join(utils.plugin_paths[0], 'pointsamplingtool', '__init__.py')
self.assertTrue(os.path.exists(str(plugin_folder)), 'Path to plugin not found. ({})'.format(str(plugin_folder)))
sys.modules['pointsamplingtool'] = imp.load_source('pointsamplingtool', plugin_folder)
def setUp(self):
self.qgis_app, self.canvas, self.iface = set_up_interface()
utils.plugin_paths = [os.path.expanduser('~/.qgis2/python/plugins')]
utils.updateAvailablePlugins()
utils.loadPlugin('pointsamplingtool')
utils.iface = self.iface
utils.startPlugin('pointsamplingtool')
def tearDown(self):
if self.qgis_app is not None:
del(self.qgis_app)
def test_if_plugin_is_available(self):
self.assertNotEqual(utils.available_plugins, [], 'No plugins were loaded.')
self.assertIn('pointsamplingtool', utils.available_plugins)
def test_if_plugin_is_accessible(self):
self.add_pointsamplingtool_to_plugins()
psti = pi.PstInteraction(utils.iface)
self.assertIsNotNone(psti)
def test_if_all_fields_are_selected(self):
self.add_pointsamplingtool_to_plugins()
registry = QgsProject.instance()
point_layer = self.create_layer_with_features('point', 'Point')
poly_layer1 = self.create_layer_with_features('poly1')
poly_layer2 = self.create_layer_with_features('poly2')
registry.addMapLayer(point_layer)
registry.addMapLayer(poly_layer1)
registry.addMapLayer(poly_layer2)
psti = pi.PstInteraction(utils.iface)
psti.set_input_layer(point_layer.name())
selected_fields = psti.pst_dialog.fieldsTable
psti.select_and_rename_files_for_sampling()
fields_point = point_layer.dataProvider().fields()
fields_poly1 = poly_layer1.dataProvider().fields()
fields_poly2 = poly_layer2.dataProvider().fields()
rows_expected = fields_point.count() + fields_poly1.count() + fields_poly2.count()
self.assertEqual(selected_fields.rowCount(), rows_expected)
def test_if_field_names_are_unique(self):
self.add_pointsamplingtool_to_plugins()
registry = QgsProject.instance()
point_layer = self.create_layer_with_features('test_pointlayer', 'Point')
poly_layer1 = self.create_layer_with_features('test_polygonlayer1')
poly_layer2 = self.create_layer_with_features('test
|
_polygonlayer2')
registry.addMapLayer(point_layer)
registry.addMapLayer(poly_layer1)
registry.addMapLayer(poly_layer2)
psti = pi.PstInteraction(util
|
s.iface)
psti.set_input_layer(point_layer.name())
map = psti.select_and_rename_files_for_sampling()
appendix = ['R', 'G', 'B', 'a']
poly_fields = psti.pst_dialog.rastItems[poly_layer1.name()]
for i in range(1, len(poly_fields)):
self.assertEqual(poly_fields[i][1], '01{}_{}'.format(poly_layer1.name()[:6], appendix[i-1]))
poly_fields = psti.pst_dialog.rastItems[poly_layer2.name()]
for i in range(1, len(poly_fields)):
self.assertEqual(poly_fields[i][1], '02{}_{}'.format(poly_layer1.name()[:6], appendix[i-1]))
self.assertEqual(map[poly_layer1.name()], '01{}'.format(poly_layer1.name()[:6]))
self.assertEqual(map[poly_layer2.name()], '02{}'.format(poly_layer2.name()[:6]))
if __name__ == '__main__':
unittest.main()
|
robwarm/gpaw-symm
|
gpaw/test/big/scf/b256H2O/b256H2O.py
|
Python
|
gpl-3.0
| 4,905
| 0.031804
|
# the problem described below was fixed in 9758!
# keep_htpsit=False fails since 9473,
# on some installations (?) with:
# case A (see below in the code):
# RuntimeError: Could not locate the Fermi level!
# or the energies from the 2nd one behave strange, no convergence:
# iter: 1 18:21:49 +1.7 -3608.512512 0 19
# iter: 2 18:22:31 +1.9 -3148.936317 0
# iter: 3 18:23:13 +2.1 -2375.137532 0
# iter: 4 18:23:58 +2.4 -0.9 -1040.851545 216 11
# iter: 5 18:24:43 +2.6 -1.0 822.569589 597 14
# case B (see below in the code):
# No convergence when starting from a converged (keep_htpsit=True) run!
# WFS error grows to positive values!
# Is it an extreme case of https://trac.fysik.dtu.dk/projects/gpaw/ticket/51 ?
import os
import sys
from ase import Atoms
from gpaw import GPAW
from gpaw import ConvergenceError
from gpaw.mpi import rank
from gpaw.eigensolvers.rmm_diis_old import RMM_DIIS
from gpaw import setup_paths
if len(sys.argv) == 1:
run = 'A'
else:
run = sys.argv[1]
assert run in ['A', 'B']
# Use setups from the $PWD and $PWD/.. first
setup_paths.insert(0, '.')
setup_paths.insert(0, '../')
positions=[
(-0.069, 0.824,-1.295), ( 0.786, 0.943,-0.752), (-0.414,-0.001,-0.865),
(-0.282,-0.674,-3.822), ( 0.018,-0.147,-4.624), (-0.113,-0.080,-3.034),
( 2.253, 1.261, 0.151), ( 2.606, 0.638,-0.539), ( 2.455, 0.790, 1.019),
( 3.106,-0.276,-1.795), ( 2.914, 0.459,-2.386), ( 2.447,-1.053,-1.919),
( 6.257,-0.625,-0.626), ( 7.107,-1.002,-0.317), ( 5.526,-1.129,-0.131),
( 5.451,-1.261,-2.937), ( 4.585,-0.957,-2.503), ( 6.079,-0.919,-2.200),
(-0.515, 3.689, 0.482), (-0.218, 3.020,-0.189), ( 0.046, 3.568, 1.382),
(-0.205, 2.640,-3.337), (-1.083, 2.576,-3.771), (-0.213, 1.885,-2.680),
( 0.132, 6.301,-0.278), ( 1.104, 6.366,-0.068), (-0.148, 5.363,-0.112),
(-0.505, 6.680,-3.285), (-0.674, 7.677,-3.447), (-0.965, 6.278,-2.517),
( 4.063, 3.342,-0.474), ( 4.950, 2.912,-0.663), ( 3.484, 2.619,-0.125),
( 2.575, 2.404,-3.170), ( 1.694, 2.841,-3.296), ( 3.049, 2.956,-2.503),
( 6.666, 2.030,-0.815), ( 7.476, 2.277,-0.316), ( 6.473, 1.064,-0.651),
( 6.860, 2.591,-3.584), ( 6.928, 3.530,-3.176), ( 6.978, 2.097,-2.754),
( 2.931, 6.022,-0.243), ( 3.732, 6.562,-0.004), ( 3.226, 5.115,-0.404),
( 2.291, 7.140,-2.455), ( 1.317, 6.937,-2.532), ( 2.586, 6.574,-1.669),
( 6.843, 5.460, 1.065), ( 7.803, 5.290, 0.852), ( 6.727, 5.424, 2.062),
( 6.896, 4.784,-2.130), ( 6.191, 5.238,-2.702), ( 6.463, 4.665,-1.259),
( 0.398, 0.691, 4.098), ( 0.047, 1.567, 3.807), ( 1.268, 0.490, 3.632),
( 2.687, 0.272, 2.641), ( 3.078, 1.126, 3.027), (
|
3.376,-0.501, 2.793),
( 6.002,-0.525, 4.002), ( 6.152, 0.405, 3.660), ( 5.987,-0.447, 4.980),
( 0.649, 3.541, 2.897), ( 0.245, 4.301, 3.459), ( 1.638, 3.457, 3.084),
(-0.075,
|
5.662, 4.233), (-0.182, 6.512, 3.776), (-0.241, 5.961, 5.212),
( 3.243, 2.585, 3.878), ( 3.110, 2.343, 4.817), ( 4.262, 2.718, 3.780),
( 5.942, 2.582, 3.712), ( 6.250, 3.500, 3.566), ( 6.379, 2.564, 4.636),
( 2.686, 5.638, 5.164), ( 1.781, 5.472, 4.698), ( 2.454, 6.286, 5.887),
( 6.744, 5.276, 3.826), ( 6.238, 5.608, 4.632), ( 7.707, 5.258, 4.110),
( 8.573, 8.472, 0.407), ( 9.069, 7.656, 0.067), ( 8.472, 8.425, 1.397),
( 8.758, 8.245, 2.989), ( 9.294, 9.091, 3.172), ( 7.906, 8.527, 3.373),
( 4.006, 7.734, 3.021), ( 4.685, 8.238, 3.547), ( 3.468, 7.158, 3.624),
( 5.281, 6.089, 6.035), ( 5.131, 7.033, 6.378), ( 4.428, 5.704, 5.720),
( 5.067, 7.323, 0.662), ( 5.785, 6.667, 0.703), ( 4.718, 7.252, 1.585)]
prefix = 'b256H2O'
L = 9.8553729
atoms = Atoms('32(OH2)',
positions=positions)
atoms.set_cell((L,L,L),scale_atoms=False)
atoms.set_pbc(1)
r = [1, 1, 2]
atoms = atoms.repeat(r)
n = [56 * ri for ri in r]
# nbands (>=128) is the number of bands per 32 water molecules
nbands = 2*6*11 # 132
for ri in r: nbands = nbands*ri
# the next line decreases memory usage
es = RMM_DIIS(keep_htpsit=False)
calc = GPAW(nbands=nbands,
# uncomment next two lines to use lcao/sz
#mode='lcao',
#basis='sz',
gpts=tuple(n),
#maxiter=5,
width = 0.01,
eigensolver = es,
txt=prefix + '.txt',
)
if run == 'A':
atoms.set_calculator(calc)
pot = atoms.get_potential_energy()
elif run == 'B':
# converge first with keep_htpsit=True
calc.set(eigensolver='rmm-diis')
calc.set(txt=prefix + '_True.txt')
atoms.set_calculator(calc)
pot = atoms.get_potential_energy()
# fails to converge with keep_htpsit=False
calc.set(eigensolver=es)
calc.set(maxiter=200)
calc.set(txt=prefix + '_False.txt')
atoms.set_calculator(calc)
pot = atoms.get_potential_energy()
|
simonqiang/gftest
|
app/__init__.py
|
Python
|
mit
| 888
| 0.003378
|
from flask import Flask
from flask.ext.bootstrap import Bootstrap
from flask.ext.mail import Mail
from flask.ext.moment import Moment
from flask.ext.sqlalchemy i
|
mport SQLAlchemy
from config import config
from flask.ext.redis import Redis
bootstrap = Bootstrap()
mail = Mail()
moment = Moment()
db = SQLAlchemy()
redis1 = Redis()
def create_app(config_name):
|
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
app.config['REDIS_HOST'] = 'localhost'
app.config['REDIS_PORT'] = 6379
app.config['REDIS_DB'] = 0
bootstrap.init_app(app)
mail.init_app(app)
moment.init_app(app)
db.init_app(app)
redis1.init_app(app)
from .main import main as main_blueprint
# from .main.common import common
app.register_blueprint(main_blueprint)
# app.register_blueprint(common)
return app
|
carolFrohlich/nipype
|
nipype/interfaces/mne/__init__.py
|
Python
|
bsd-3-clause
| 55
| 0
|
# -*- coding: utf-8 -*-
|
from .base import WatershedBE
|
M
|
fkaa/iceball
|
tools/kv62pmf.py
|
Python
|
gpl-3.0
| 3,453
| 0.019983
|
"""
A tool for converting kv6 models into pmf.
GreaseMonkey, 2013 - Public Domain
WARNING: I haven't checked to ensure that X,Y are around the right way.
If you find your models have been flipped inadvertently, let me know! --GM
"""
from __future__ import print_function
import sys, struct
# Backwards compatibility - make new code work on old version, not v
|
ice-versa
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY2:
# This script didn't use range() anyway, so no problem overwriting it in Py2
import __builtin__
range = getattr(__builtin__, "xrange")
_ord = ord
else:
_ord = lambda x: x
USAGE_MSG = """
usage:
python2 kv62pmf.py in.kv6 out.pmf ptsize ptspacing bonename
"""
if len(sys.argv) <= 4:
print(USAGE_MSG)
exit()
if not sys.argv[3].isdigit():
raise Exception("expected
|
a number for the 3rd argument")
if not sys.argv[4].isdigit():
raise Exception("expected a number for the 4th argument")
ptsize = int(sys.argv[3])
ptspacing = int(sys.argv[4])
if ptsize < 1 or ptsize > 65535:
raise Exception("point size out of range (1..65535)")
bonename = sys.argv[4]
if PY3:
bonename = bonename.encode()
if len(bonename) > 15:
raise Exception("bone name too large")
infp = open(sys.argv[1],"rb")
if infp.read(4) != b"Kvxl":
raise Exception("not a KV6 file")
xsiz, ysiz, zsiz, xpivot, ypivot, zpivot, blklen = struct.unpack("<IIIfffI", infp.read(28))
print(xsiz, ysiz, zsiz, xpivot, ypivot, zpivot)
xpivot = int(xpivot*ptspacing+0.5)
ypivot = int(ypivot*ptspacing+0.5)
zpivot = int(zpivot*ptspacing+0.5)
# yeah i know this is basically worst case assuming x,y,z pivot is within the model bounds
if max(max(xsiz,ysiz),zsiz)*ptspacing > 65535:
raise Exception("point size a bit TOO large to fit into a pmf")
if blklen > 4096:
raise Exception("kv6 has too many blocks to fit into a pmf")
def parseblk(s):
return struct.unpack("<BBBBHBB",s)
blkdata = [parseblk(infp.read(8)) for i in range(blklen)]
xoffset = [struct.unpack("<I", infp.read(4))[0] for i in range(xsiz)]
xyoffset = [struct.unpack("<H", infp.read(2))[0] for i in range(xsiz*ysiz)]
assert blklen == sum(xoffset)
assert blklen == sum(xyoffset)
# Corollary: sum(xoffset) == sum(xyoffset)
# Proof: Left as an exercise to the reader.
magic_spal = infp.read(4)
palette = None
if magic_spal == b"":
pass # no palette
elif magic_spal == b"SPal":
palette = [[_ord(v) for v in infp.read(3)] for i in range(256)]
else:
raise Exception("expected palette at end of file")
infp.close()
#
#
#
# pretty simple really
outfp = open(sys.argv[2], "wb")
# start with the header of "PMF",0x1A,1,0,0,0
outfp.write(b"PMF\x1A\x01\x00\x00\x00")
# then there's a uint32_t denoting how many body parts there are
outfp.write(struct.pack("<I",1))
# then, for each body part,
# there's a null-terminated 16-byte string (max 15 chars) denoting the part
outfp.write(bonename + b"\x00"*(16-len(bonename)))
# then there's a uint32_t denoting how many points there are in this body part
outfp.write(struct.pack("<I",blklen))
# then there's a whole bunch of this:
# uint16_t radius;
# int16_t x,y,z;
# uint8_t b,g,r,reserved;
bi = 0
oi = 0
for cx in range(xsiz):
for cy in range(ysiz):
for i in range(xyoffset[oi]):
b,g,r,l,ypos,vis,unk1 = blkdata[bi]
outfp.write(struct.pack("<HhhhBBBB"
,ptsize
,cx*ptspacing-xpivot
,ypos*ptspacing-zpivot
,cy*ptspacing-ypivot
,b,g,r,0))
bi += 1
oi += 1
# rinse, lather, repeat
outfp.close()
|
tokamstud/enron-analysis
|
src/complex/hive_prep.py
|
Python
|
gpl-3.0
| 2,895
| 0.071157
|
from mrjob.job import MRJob
from mrjob.step import MRStep
def get_id_from_line(line):
if line.find('.","Message-ID: <') > 0:
start = line.find("Message-ID")+13
i=0
for char in line[start:]:
i=i+1
if (not (char.isdigit() or (char == '.'))):
stop = i+start-2
break
return line[start:stop]
class MRMultilineInput(MRJob):
def steps(self):
return [
|
MRStep(mapper_init=self.mapper_init_count,
mapper=self.mapper_count),
MRStep(mapper=self.mapper_child)
# STEP 1
def mapper_init_count(self):
self.message_id = ''
self.in_body = False
self.body = []
self.after_key = False
self.beginning = False
self.key = False
|
def mapper_count(self, _, line):
line = line.strip()
if (line.find('.","Message-ID: <') > 0) and self.in_body and not self.beginning:
yield self.message_id, self.body
self.message_id = ''
self.body = []
self.in_body = False
self.after_key = False
self.beginning = False
self.key = False
if self.in_body and not self.after_key:
self.beginning = False
self.body.append(line)
if line.find('.","Message-ID: <') > 0 and not self.key:
if not self.in_body:
self.in_body = True
self.beginning = True
self.after_key = True
self.key = True
start = line.find("Message-ID")+13
i=0
for char in line[start:]:
i=i+1
if (not (char.isdigit() or (char == '.'))):
stop = i+start-2
break
self.message_id = line[start:stop]
self.after_key = False
# STEP 2
def mapper_child(self, message_id, values):
clean_body = ''
clean_date = ''
clean_from = ''
clean_to = ''
clean_values = []
start = 0
for idx, line in enumerate(values):
if "Date:" in line:
clean_date = line[5:].strip()
if line.find("From:") == 0:
clean_from = line[5:].strip()
if line.find("To:") == 0:
clean_to = line[3:].strip()
if "X-FileName:" in line:
start = idx+1
break
for i in range(start,len(values)):
if "-Original Message-" in values[i]:
break
clean_body=clean_body + values[i] + " "
clean_values.append(clean_date)
clean_values.append(clean_from)
#clean_values.append(clean_to)
#clean_values.append(clean_body.strip())
clean_values.append("TEST BODY")
newval = values
for element in values:
if "subject:" in element.lower():
subject = element
break
if "re:" in subject.lower():
newval.append("child")
elif "fw:" not in subject.lower():
newval.append("parent")
for element in newval:
if "Subject:" in element:
subject = element
break
relation = values[-1]
i = 0
colon = 0
if "<" not in subject:
for char in subject:
i=i+1
if char == ":":
colon = i
sub = subject[colon+1:].strip()
sub_relation = []
sub_relation.append(sub)
sub_relation.append(relation)
yield sub_relation, (message_id,clean_values)
if __name__ == '__main__':
MRMultilineInput.run()
|
fishtown-analytics/dbt
|
scripts/build-dbt.py
|
Python
|
apache-2.0
| 29,183
| 0.000069
|
import json
import os
import re
import shutil
import subprocess
import sys
import tempfile
import textwrap
import time
import venv # type: ignore
import zipfile
from typing import Dict
from argparse import ArgumentParser
from dataclasses import dataclass
from pathlib import Path
from urllib.request import urlopen
from typing import Optional, Iterator, Tuple, List, Iterable
HOMEBREW_PYTHON = (3, 8)
# This should match the pattern in .bumpversion.cfg
VERSION_PATTERN = re.compile(
r'(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)'
r'((?P<prerelease>[a-z]+)(?P<num>\d+))?'
)
class Version:
def __init__(self, raw: str) -> None:
self.raw = raw
match = VERSION_PATTERN.match(self.raw)
assert match is not None, f'Invalid version: {self.raw}'
groups = match.groupdict()
self.major: int = int(groups['major'])
self.minor: int = int(groups['minor'])
self.patch: int = int(groups['patch'])
self.prerelease: Optional[str] = None
self.num: Optional[int] = None
if groups['num'] is not None:
self.prerelease = groups['prerelease']
self.num = int(groups['num'])
def __str__(self):
return self.raw
def homebrew_class_name(self) -> str:
name = f'DbtAT{self.major}{self.minor}{self.patch}'
if self.prerelease is not None and self.num is not None:
name = f'{name}{self.prerelease.title()}{self.num}'
return name
def homebrew_filename(self):
version_str = f'{self.major}.{self.minor}.{self.patch}'
if self.prerelease is not None and self.num is not None:
version_str = f'{version_str}-{self.prerelease}{self.num}'
return f'dbt@{version_str}.rb'
@dataclass
class Arguments:
version: Version
part: str
path: Path
homebrew_path: Path
homebrew_set_default: bool
set_version: bool
build_pypi: bool
upload_pypi: bool
test_upload: bool
build_homebrew: bool
build_docker: bool
upload_docker: bool
write_requirements: bool
write_dockerfile: bool
@classmethod
def parse(cls) -> 'Arguments':
parser = ArgumentParser(
prog="Bump dbt's version, build packages"
)
parser.add_argument(
'version',
type=Version,
help="The version to set",
)
parser.add_argument(
'part',
type=str,
help="The part of the version to update",
)
parser.add_argument(
'--path',
type=Path,
help='The path to the dbt repository',
default=Path.cwd(),
)
parser.add_argument(
'--homebrew-path',
type=Path,
help='The path to the dbt homebrew install',
default=(Path.cwd() / '../homebrew-dbt'),
)
parser.add_argument(
'--homebrew-set-default',
action='store_true',
help='If set, make this homebrew version the default',
)
parser.add_argument(
'--no-set-version',
dest='set_version',
action='store_false',
help='Skip bumping the version',
)
parser.add_argument(
'--no-build-pypi',
dest='build_pypi',
action='store_false',
help='skip building pypi',
)
parser.add_argument(
'--no-build-docker',
dest='build_docker',
action='store_false',
help='skip building docker images',
)
parser.add_argument(
'--no-upload-docker',
dest='upload_docker',
action='store_false',
help='skip uploading docker images',
)
uploading = parser.add_mutually_exclusive_group()
uploading.add_argument(
'--upload-pypi',
dest='force_upload_pypi',
action='store_true',
help='upload to pypi even if building is disabled'
)
uploading.add_argument(
'--no-upload-pypi',
dest='no_upload_pypi',
action='store_true',
help='skip uploading to pypi',
)
parser.add_argument(
'--no-upload',
dest='test_upload',
action='store_false',
help='Skip uploading to pypitest',
)
parser.add_argument(
'--no-build-homebrew',
dest='build_homebrew',
action='store_false',
help='Skip building homebrew packages',
)
parser.add_argument(
'--no-write-requirements',
dest='write_requirements',
action='store_false',
help='Skip writing the requirements file. It must exist.'
)
parser.add_argument(
'--no-write-dockerfile',
dest='write_dockerfile',
action='store_false',
help='Skip writing the dockerfile. It must exist.'
)
parsed = parser.parse_args()
upload_pypi = parsed.build_pypi
if parsed.force_upload_pypi:
upload_pypi = True
elif parsed.no_upload_pypi:
upload_pypi = False
return cls(
version=parsed.version,
part=parsed.part,
path=parsed.path,
homebrew_path=parsed.homebrew_path,
homebrew_set_default=parsed.homebrew_set_default,
set_version=parsed.set_version,
build_pypi=parsed.build_pypi,
upload_pypi=upload_pypi,
test_upload=parsed.test_upload,
build_homebrew=parsed.build_homebrew,
build_docker=parsed.build_docker,
upload_docker=parsed.upload_docker,
write_requirements=parsed.write_requirements,
write_dockerfile=parsed.write_dockerfile,
)
def collect_output(cmd, cwd=None, stderr=subprocess.PIPE) -> str:
try:
result = subprocess.run(
cmd, cwd=cwd, check=True, stdout=subprocess.PIPE, stderr=stderr
)
except subprocess.CalledProcessError as exc:
print(f'Command {exc.cmd} failed')
if exc.output:
print(exc.output.decode('utf-8'))
if exc.stderr:
print(exc.stderr.decode('utf-8'), file=sys.stderr)
raise
return result.s
|
tdout.decode('utf-8')
def run_command(cmd, cwd=None) -> None:
result = collect_output(cmd, stderr=subprocess.STDOUT, cwd=cwd)
print(result)
def set_version(path: Path, version: Version, part: str):
# bumpversio
|
n --commit --no-tag --new-version "${version}" "${port}"
cmd = [
'bumpversion', '--commit', '--no-tag', '--new-version',
str(version), part
]
print(f'bumping version to {version}')
run_command(cmd, cwd=path)
print(f'bumped version to {version}')
class PypiBuilder:
_SUBPACKAGES = (
'core',
'plugins/postgres',
'plugins/redshift',
'plugins/bigquery',
'plugins/snowflake',
)
def __init__(self, dbt_path: Path):
self.dbt_path = dbt_path
@staticmethod
def _dist_for(path: Path, make=False) -> Path:
dist_path = path / 'dist'
if dist_path.exists():
shutil.rmtree(dist_path)
if make:
os.makedirs(dist_path)
build_path = path / 'build'
if build_path.exists():
shutil.rmtree(build_path)
return dist_path
@staticmethod
def _build_pypi_package(path: Path):
print(f'building package in {path}')
cmd = ['python', 'setup.py', 'sdist', 'bdist_wheel']
run_command(cmd, cwd=path)
print(f'finished building package in {path}')
@staticmethod
def _all_packages_in(path: Path) -> Iterator[Path]:
path = path / 'dist'
for pattern in ('*.tar.gz', '*.whl'):
yield from path.glob(pattern)
def _build_subpackage(self, name: str) -> Iterator[Path]:
subpath = self.dbt_path / name
self._dist_for(subpath)
self._build_pypi_package(subpath)
return self._all_packages_in(s
|
NTUTVisualScript/Visual_Script
|
static/javascript/blockly/i18n/create_messages.py
|
Python
|
mit
| 6,374
| 0.010041
|
#!/usr/bin/python
# Generate .js files defining Blockly core and language messages.
#
# Copyright 2013 Google Inc.
# https://developers.google.com/blockly/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import codecs
import os
import re
import sys
from common import read_json_file
_NEWLINE_PATTERN = re.compile('[\n\r]')
def string_is_ascii(s):
try:
s.decode('ascii')
return True
except UnicodeEncodeError:
return False
def load_constants(filename):
"""Read in constants file, which must be output in every language."""
constant_defs = read_json_file(filename);
constants_text = '\n'
for key in constant_defs:
value = constant_defs[key]
value = value.replace('"', '\\"')
constants_text += u'\nBlockly.Msg["{0}"] = \"{1}\";'.format(
key, value)
return constants_text
def main():
"""Generate .js files defining Blockly core and language messages."""
# Process command-line arguments.
parser = argparse.ArgumentParser(description='Convert JSON files to JS.')
parser.add_argument('--source_lang', default='en',
help='ISO 639-1 source language code')
parser.add_argument('--source_lang_file',
default=os.path.join('json', 'en.json'),
help='Path to .json file for source language')
parser.add_argument('--source_syno
|
nym_file',
default=os.path.join('json', 'synonyms.json'),
help='Path to .json file with synonym definitions')
parser.add_argument('--source_constants_file',
default=os.path.join('json', 'constants.json'),
|
help='Path to .json file with constant definitions')
parser.add_argument('--output_dir', default='js/',
help='relative directory for output files')
parser.add_argument('--key_file', default='keys.json',
help='relative path to input keys file')
parser.add_argument('--quiet', action='store_true', default=False,
help='do not write anything to standard output')
parser.add_argument('files', nargs='+', help='input files')
args = parser.parse_args()
if not args.output_dir.endswith(os.path.sep):
args.output_dir += os.path.sep
# Read in source language .json file, which provides any values missing
# in target languages' .json files.
source_defs = read_json_file(os.path.join(os.curdir, args.source_lang_file))
# Make sure the source file doesn't contain a newline or carriage return.
for key, value in source_defs.items():
if _NEWLINE_PATTERN.search(value):
print('ERROR: definition of {0} in {1} contained a newline character.'.
format(key, args.source_lang_file))
sys.exit(1)
sorted_keys = source_defs.keys()
sorted_keys.sort()
# Read in synonyms file, which must be output in every language.
synonym_defs = read_json_file(os.path.join(
os.curdir, args.source_synonym_file))
synonym_text = '\n'.join([u'Blockly.Msg["{0}"] = Blockly.Msg["{1}"];'
.format(key, synonym_defs[key]) for key in synonym_defs])
# Read in constants file, which must be output in every language.
constants_text = load_constants(os.path.join(os.curdir, args.source_constants_file))
# Create each output file.
for arg_file in args.files:
(_, filename) = os.path.split(arg_file)
target_lang = filename[:filename.index('.')]
if target_lang not in ('qqq', 'keys', 'synonyms', 'constants'):
target_defs = read_json_file(os.path.join(os.curdir, arg_file))
# Verify that keys are 'ascii'
bad_keys = [key for key in target_defs if not string_is_ascii(key)]
if bad_keys:
print(u'These keys in {0} contain non ascii characters: {1}'.format(
filename, ', '.join(bad_keys)))
# If there's a '\n' or '\r', remove it and print a warning.
for key, value in target_defs.items():
if _NEWLINE_PATTERN.search(value):
print(u'WARNING: definition of {0} in {1} contained '
'a newline character.'.
format(key, arg_file))
target_defs[key] = _NEWLINE_PATTERN.sub(' ', value)
# Output file.
outname = os.path.join(os.curdir, args.output_dir, target_lang + '.js')
with codecs.open(outname, 'w', 'utf-8') as outfile:
outfile.write(
"""// This file was automatically generated. Do not modify.
'use strict';
goog.provide('Blockly.Msg.{0}');
goog.require('Blockly.Msg');
""".format(target_lang.replace('-', '.')))
# For each key in the source language file, output the target value
# if present; otherwise, output the source language value with a
# warning comment.
for key in sorted_keys:
if key in target_defs:
value = target_defs[key]
comment = ''
del target_defs[key]
else:
value = source_defs[key]
comment = ' // untranslated'
value = value.replace('"', '\\"')
outfile.write(u'Blockly.Msg["{0}"] = "{1}";{2}\n'
.format(key, value, comment))
# Announce any keys defined only for target language.
if target_defs:
extra_keys = [key for key in target_defs if key not in synonym_defs]
synonym_keys = [key for key in target_defs if key in synonym_defs]
if not args.quiet:
if extra_keys:
print(u'These extra keys appeared in {0}: {1}'.format(
filename, ', '.join(extra_keys)))
if synonym_keys:
print(u'These synonym keys appeared in {0}: {1}'.format(
filename, ', '.join(synonym_keys)))
outfile.write(synonym_text)
outfile.write(constants_text)
if not args.quiet:
print('Created {0}.'.format(outname))
if __name__ == '__main__':
main()
|
yohn89/pythoner.net
|
pythoner/accounts/admin.py
|
Python
|
gpl-3.0
| 915
| 0.006557
|
# -*- coding: utf-8 -*-
"""
pythoner.net
Copyright (C) 2013 PYTHONER.ORG
This program is free software: you can redistribute it and/or modify
it under the terms of the G
|
NU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public Lice
|
nse for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.contrib import admin
from models import *
class ProfileAdmin(admin.ModelAdmin):
list_display = ('screen_name','city','introduction')
admin.site.register(UserProfile,ProfileAdmin)
|
stackArmor/security_monkey
|
security_monkey/watchers/elasticsearch_service.py
|
Python
|
apache-2.0
| 4,759
| 0.003572
|
# Copyright 2015 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module: security_monkey.watchers.keypair
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Mike Grima <[email protected]>
"""
import json
from security_monkey.decorators import record_exception
from security_monkey.decorators import iter_account_region
from security_monkey.watcher import Watcher, ChangeItem
from security_monkey.datastore import Account
from security_monkey import app, ARN_PREFIX
class ElasticSearchService(Watcher):
index = 'elasticsearchservice'
i_am_singular = 'ElasticSearch Service Access Policy'
i_am_plural = 'ElasticSearch Service Access Policies'
def __init__(self, accounts=None, debug=False):
super(ElasticSearchService, self).__init__(accounts=accounts, debug=debug)
def slurp(self):
"""
:returns: item_list - list of ElasticSearchService Items
:return: exception_map - A dict where the keys are a tuple containing the
location of the exception and the value is the actual exception
"""
self.prep
|
_for_slurp()
@iter_account_region(index=self.index, accounts=self.accounts, service_name='es')
def slurp_items(**kwargs):
item_list = []
exception_map = {}
kwargs['exception_map'] = exception_map
account_db = Account.query.filter(Account.name == kwargs['account_name']).first()
account_num = account_db.identifier
es_info = self.get_all_es_domains_in_region(**kwargs)
if es
|
_info is None:
return item_list, exception_map
(client, domains) = es_info
app.logger.debug("Found {} {}".format(len(domains), ElasticSearchService.i_am_plural))
for domain in domains:
if self.check_ignore_list(domain["DomainName"]):
continue
# Fetch the policy:
item = self.build_item(domain["DomainName"], client, account_num, **kwargs)
if item:
item_list.append(item)
return item_list, exception_map
return slurp_items()
@record_exception(source='{index}-watcher'.format(index=index), pop_exception_fields=False)
def get_all_es_domains_in_region(self, **kwargs):
from security_monkey.common.sts_connect import connect
client = connect(kwargs['account_name'], "boto3.es.client", region=kwargs['region'])
app.logger.debug("Checking {}/{}/{}".format(ElasticSearchService.index, kwargs['account_name'], kwargs['region']))
# No need to paginate according to: client.can_paginate("list_domain_names")
domains = self.wrap_aws_rate_limited_call(client.list_domain_names)["DomainNames"]
return client, domains
@record_exception(source='{index}-watcher'.format(index=index), pop_exception_fields=False)
def build_item(self, domain, client, account_num, **kwargs):
arn = ARN_PREFIX + ':es:{region}:{account_number}:domain/{domain_name}'.format(
region=kwargs['region'],
account_number=account_num,
domain_name=domain)
config = {
'arn': arn
}
domain_config = self.wrap_aws_rate_limited_call(client.describe_elasticsearch_domain_config,
DomainName=domain)
# Does the cluster have a policy?
if domain_config["DomainConfig"]["AccessPolicies"]["Options"] == "":
config['policy'] = {}
else:
config['policy'] = json.loads(domain_config["DomainConfig"]["AccessPolicies"]["Options"])
config['name'] = domain
return ElasticSearchServiceItem(region=kwargs['region'], account=kwargs['account_name'], name=domain, arn=arn, config=config)
class ElasticSearchServiceItem(ChangeItem):
def __init__(self, region=None, account=None, name=None, arn=None, config={}):
super(ElasticSearchServiceItem, self).__init__(
index=ElasticSearchService.index,
region=region,
account=account,
name=name,
arn=arn,
new_config=config)
|
helloTC/LearnPython
|
fluent_python/array_of_sequences/tuple_as_record.py
|
Python
|
mit
| 1,025
| 0.00878
|
outdata1 = divmod(20,8)
# prefix an argument with a star when calling a functi
|
on to unpack tuple
t = (20,8)
outdata2 = divmod(*t)
import os
# Note that filename = hh.grad
_, filename = os.path.split('/nfs/j3/hh.grad')
# Using * to grab excess items
# Can be used in pyth
|
on3, but not in python2
# a, b, *rest = range(5)
# a, b, *rest = range(3)
# a, b, *rest = range(2)
# a, *body, c, d = range(5)
# *head, b, c, d = range(5)
# Nested tuple unpacking
a = [('good', (334,213)),
('bad', (231,234))]
for cond, (x, y) in a:
print('x = {0}, y = {1}'.format(x, y))
# Namedtuple
from collections import namedtuple
place = namedtuple('place', 'condition coordinate')
tokyo = place('good', (334,213))
print(tokyo)
# _fields class attribute, _make(iterable) class method, _asdict() instance method
print(place._fields)
LatLong = namedtuple('LatLong', 'lat long')
delhi_data = ('Delhi NCR', LatLong(28.61, 77.21))
delhi = place._make(delhi_data)
for key, value in delhi._asdict().items():
print(key + ':', value)
|
rimbalinux/MSISDNArea
|
django/db/backends/util.py
|
Python
|
bsd-3-clause
| 4,684
| 0.007472
|
import datetime
import decimal
from time import time
from django.utils.hashcompat import md5_constructor
from django.utils.log import getLogger
logger = getLogger('django.db.backends')
class CursorDebugWrapper(object):
def __init__(self, cursor, db):
self.cursor = cursor
self.db = db # Instance of a BaseDatabaseWrapper subclass
def execute(self, sql, params=()):
start = time()
try:
return self.cursor.execute(sql, params)
finally:
stop = time()
duration = stop - start
sql = self.db.ops.last_executed_query(self.cursor, sql, params)
self.db.queries.append({
'sql': sql,
'time': "%.3f" % duration,
})
logger.debug('(%.3f) %s; args=%s' % (duration, sql, params),
extra={'duration':duration, 'sql':sql, 'params':params}
)
def executemany(self, sql, param_list):
start = time()
try:
return self.cursor.executemany(sql, param_list)
finally:
stop = time()
duration = stop - start
self.db.queries.append({
'sql': '%s times: %s' % (len(param_list), sql),
'time': "%.3f" % duration,
})
logger.debug('(%.3f) %s; args=%s' % (duration, sql, param_list),
extra={'duration':duration, 'sql':sql, 'params':param_list}
)
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return iter(self.cursor)
###############################################
# Converters from database (string) to Python #
###############################################
def typecast_date(s):
return s and datetime.date(*map(int, s.split('-'))) or None # returns None if s is null
def typecast_time(s): # does NOT store time zone information
if not s: return None
hour, minutes, seconds = s.split(':')
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:
microseconds = '0'
return datetime.time(int(hour), int(minutes), int(seconds), int(float('.'+microseconds) * 1000000))
def typecast_timestamp(s): # does NOT store time zone information
# "2005-07-29 15:48:00.590358-05"
# "2005-07-29 09:56:00-05"
if not s: return None
if not ' ' in s: return typecast_date(s)
d, t = s.split()
# Extract timezone information, if it exists. Currently we just throw
# it away, but in the future we may make use of it.
if '-' in t:
t, tz = t.split('-', 1)
tz = '-' + tz
elif '+' in t:
t, tz = t.split('+', 1)
tz = '+' + tz
else:
tz = ''
dates = d.split('-')
times = t.split(':')
seconds = times[2]
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:
microseconds = '0'
return datetime.datetime(int(dates[0]), int(dates[1]), int(dates[2]),
int(times[0]), int(times[1]), int(seconds), int((microseconds + '000000')[:6]))
def typecast_boolean(s):
if s is None: return None
if not s: return False
return str(s)[0].lower() == 't'
def typecast_decimal(s):
if s is None or s == '':
return None
return decimal.Decimal(s)
###############################################
# Converters from Python to database (string) #
###############################################
def rev_typecast_boolean(obj, d):
return obj and '1' or '0'
def rev_typecast_decimal(d):
if d is None:
return None
return str(d)
def truncate_name(name, length=None, hash_len=4):
"""Shortens a string to a repeatable mangled version with the given length.
"""
if length is None or len(name) <= length:
return name
hash = md5_constructor(name).hexdigest()[:hash_len]
return '%s%s'
|
% (name[:length-hash_len], hash)
def format_number(value, max_digits, decimal_places):
"""
Formats a number into a string with the requisite number of digits and
decimal places.
"""
if isinstance(value, decimal.Decimal):
context = decimal.getcontext().copy()
context.prec = max_digits
return u'%s' % str(value.quantize(decimal.Decimal(".1") ** decim
|
al_places, context=context))
else:
return u"%.*f" % (decimal_places, value)
|
ramseyboy/tabletop-scanner
|
tabletopscanner/boardgamegeekapi/search.py
|
Python
|
apache-2.0
| 674
| 0.001484
|
import xml.etree.cElementTree as et
from collections import OrderedDict
from tableto
|
pscanner.boardgamegeekapi.parsers import Deserializer
class SearchParser(Deserializer):
def deserialize(self, xml):
tree = et.fromstring(xml)
return [SearchParser.__make_search_result(el) for el in tree.findall('item')]
@staticmethod
def __mak
|
e_search_result(el):
geekid = geekid = el.attrib['id']
name = el.find('name').attrib['value']
yearpublished = el.find('yearpublished').attrib['value']
return OrderedDict({
'geekid': geekid,
'name': name,
'yearpublished': yearpublished
})
|
abo-abo/edx-platform
|
cms/djangoapps/contentstore/features/common.py
|
Python
|
agpl-3.0
| 12,319
| 0.000812
|
# pylint: disable=C0111
# pylint: disable=W0621
from lettuce import world, step
from nose.tools import assert_true, assert_in, assert_false # pylint: disable=E0611
from auth.authz import get_user_by_email, get_course_groupname_for_role
from django.conf import settings
from selenium.webdriver.common.keys import Keys
import time
import os
from django.contrib.auth.models import Group
from logging import getLogger
logger = getLogger(__name__)
from terrain.browser import reset_data
TEST_ROOT = settings.COMMON_TEST_DATA_ROOT
@step('I (?:visit|access|open) the Studio homepage$')
def i_visit_the_studio_homepage(_step):
# To make this go to port 8001, put
# LETTUCE_SERVER_PORT = 8001
# in your settings.py file.
world.visit('/')
signin_css = 'a.action-signin'
assert world.is_css_present(signin_css)
@step('I am logged into Studio$')
def i_am_logged_into_studio(_step):
log_into_studio()
@step('I confirm the alert$')
def i_confirm_with_ok(_step):
world.browser.get_alert().accept()
@step(u'I press the "([^"]*)" delete icon$')
def i_press_the_category_delete_icon(_step, category):
if category == 'section':
css = 'a.delete-button.delete-section-button span.delete-icon'
elif category == 'subsection':
css = 'a.delete-button.delete-subsection-button span.delete-icon'
else:
assert False, 'Invalid category: %s' % category
world.css_click(css)
@step('I have opened a new course in Studio$')
def i_have_opened_a_new_course(_step):
open_new_course()
@step('(I select|s?he selects) the new course')
def select_new_course(_step, whom):
course_link_css = 'a.course-link'
world.css_click(course_link_css)
@step(u'I press the "([^"]*)" notification button$')
def press_the_notification_button(_step, name):
# Because the notification uses a CSS transition,
# Selenium will always report it as being visible.
# This makes it very difficult to successfully click
# the "Save" button at the UI level.
# Instead, we use JavaScript to reliably click
# the button.
btn_css = 'div#page-notification a.action-%s' % name.lower()
world.trigger_event(btn_css, event='focus')
world.browser.execute_script("$('{}').click()".format(btn_css))
world.wait_for_ajax_complete()
@step('I change the "(.*)" field to "(.*)"$')
def i_change_field_to_value(_step, field, value):
field_css = '#%s' % '-'.join([s.lower() for s in field.split()])
ele = world.css_find(field_css).first
ele.fill(value)
ele._element.send_keys(Keys.ENTER)
@step('I reset the database')
def reset_the_db(_step):
"""
When running Lettuce tests using examples (i.e. "Confirmation is
shown on save" in course-settings.feature), the normal hooks
aren't called between examples. reset_data should run before each
scenario to flush the test database. When this doesn't happen we
get errors due to trying to insert a non-unique entry. So instead,
we delete the database manually. This has the effect of removing
any users and courses that have been created during the test run.
"""
reset_data(None)
@step('I see a confirmation that my changes have been saved')
def i_see_a_confirmation(step):
confirmation_css = '#alert-confirmation'
assert world.is_css_present(confirmation_css)
def open_new_course():
world.clear_courses()
create_studio_user()
log_into_studio()
create_a_course()
def create_studio_user(
uname='robot',
email='[email protected]',
password='test',
is_staff=False):
studio_user = world.UserFactory(
username=uname,
email=email,
password=password,
is_staff=is_staff)
registration = world.RegistrationFactory(user=studio_user)
registration.register(studio_user)
registration.activate()
return studio_user
def fill_in_course_info(
name='Robot Super Course',
org='MITx',
num='101',
run='2013_Spring'):
world.css_fill('.new-course-name', name)
world.css_fill('.new-course-org', org)
world.css_fill('.new-course-number', num)
world.css_fill('.new-course-run', run)
def log_int
|
o_studio(
uname='robot',
email='[email protected]',
password='test',
name='Robot Studio'):
world.log_in(username=uname, password=password, email=email, name=name)
# Navigate to the studio dashboard
world.visit('/')
assert_in(uname, world.css_text('h2.title', timeout=10))
def add_course_author(user, course):
"""
Add the user to the instructor group of the course
so they will have the permissions to see it in studio
"""
for role
|
in ("staff", "instructor"):
groupname = get_course_groupname_for_role(course.location, role)
group, __ = Group.objects.get_or_create(name=groupname)
user.groups.add(group)
user.save()
def create_a_course():
course = world.CourseFactory.create(org='MITx', course='999', display_name='Robot Super Course')
world.scenario_dict['COURSE'] = course
user = world.scenario_dict.get("USER")
if not user:
user = get_user_by_email('[email protected]')
add_course_author(user, course)
# Navigate to the studio dashboard
world.visit('/')
course_link_css = 'a.course-link'
world.css_click(course_link_css)
course_title_css = 'span.course-title'
assert_true(world.is_css_present(course_title_css))
def add_section(name='My Section'):
link_css = 'a.new-courseware-section-button'
world.css_click(link_css)
name_css = 'input.new-section-name'
save_css = 'input.new-section-name-save'
world.css_fill(name_css, name)
world.css_click(save_css)
span_css = 'span.section-name-span'
assert_true(world.is_css_present(span_css))
def add_subsection(name='Subsection One'):
css = 'a.new-subsection-item'
world.css_click(css)
name_css = 'input.new-subsection-name-input'
save_css = 'input.new-subsection-name-save'
world.css_fill(name_css, name)
world.css_click(save_css)
def set_date_and_time(date_css, desired_date, time_css, desired_time):
world.css_fill(date_css, desired_date)
# hit TAB to get to the time field
e = world.css_find(date_css).first
# pylint: disable=W0212
e._element.send_keys(Keys.TAB)
world.css_fill(time_css, desired_time)
e = world.css_find(time_css).first
e._element.send_keys(Keys.TAB)
time.sleep(float(1))
@step('I have enabled the (.*) advanced module$')
def i_enabled_the_advanced_module(step, module):
step.given('I have opened a new course section in Studio')
world.css_click('.nav-course-settings')
world.css_click('.nav-course-settings-advanced a')
type_in_codemirror(0, '["%s"]' % module)
press_the_notification_button(step, 'Save')
@world.absorb
def create_course_with_unit():
"""
Prepare for tests by creating a course with a section, subsection, and unit.
Performs the following:
Clear out all courseware
Create a course with a section, subsection, and unit
Create a user and make that user a course author
Log the user into studio
Open the course from the dashboard
Expand the section and click on the New Unit link
The end result is the page where the user is editing the new unit
"""
world.clear_courses()
course = world.CourseFactory.create()
world.scenario_dict['COURSE'] = course
section = world.ItemFactory.create(parent_location=course.location)
world.ItemFactory.create(
parent_location=section.location,
category='sequential',
display_name='Subsection One',
)
user = create_studio_user(is_staff=False)
add_course_author(user, course)
log_into_studio()
world.css_click('a.course-link')
world.wait_for_js_to_load()
css_selectors = [
'div.section-item a.expand-collapse-icon', 'a.new-unit-item'
]
for selector in css_selectors:
world.css_click(selector)
world.wait_for_mathjax()
world.wait_for_xmodule()
assert world.is_css_present('ul.new-component-type')
@step('I
|
briehl/wjr_sdk_test
|
test/MyContigFilter_server_test.py
|
Python
|
mit
| 4,171
| 0.007672
|
import unittest
import os
import json
import time
from os import environ
from ConfigParser import ConfigParser
from pprint import pprint
from biokbase.workspace.client import Workspace as workspaceService
from MyContigFilter.MyContigFilterImpl import MyContigFilter
class MyContigFilterTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
token = environ.get('KB_AUTH_TOKEN', None)
cls.ctx = {'token': token, 'provenance': [{'service': 'MyContigFilter',
'method': 'please_never_use_it_in_production', 'method_params': []}],
'authenticated': 1}
config_file = environ.get('KB_DEPLOYMENT_CONFIG', None)
cls.cfg = {}
config = ConfigParser()
config.read(config_file)
for nameval in config.items('MyContigFilter'):
cls.cfg[nameval[0]] = nameval[1]
cls.wsURL = cls.cfg['workspace-url']
cls.wsClient = workspaceService(cls.wsURL, token=token)
cls.serviceImpl = MyContigFilter(cls.cfg)
@classmethod
def tearDownClass(cls):
if hasattr(cls, 'wsName'):
cls.wsClient.delete_workspace({'workspace': cls.wsName})
print('Test workspace was deleted')
def getWsClient(self):
return self.__class__.wsClient
def getWsName(self):
if hasattr(self.__class__, 'wsName'):
return self.__class__.wsName
suffix = int(time.time() * 1000)
wsName = "test_MyContigFilter_" + str(suffix)
ret = self.getWsClient().create_workspace({'workspace'
|
: wsName})
self.__class__.wsName = wsName
return wsName
def getImpl(self):
return self.__class__.serviceImpl
def getContext(self):
return self.__class__.ctx
def test_filter_contigs_ok(self):
obj_name = "contigset.1"
contig1 = {'id': '1', 'length': 10, 'md5': 'md5', 'seq
|
uence': 'agcttttcat'}
contig2 = {'id': '2', 'length': 5, 'md5': 'md5', 'sequence': 'agctt'}
contig3 = {'id': '3', 'length': 12, 'md5': 'md5', 'sequence': 'agcttttcatgg'}
obj1 = {'contigs': [contig1, contig2, contig3], 'id': 'id', 'md5': 'md5', 'name': 'name',
'source': 'source', 'source_id': 'source_id', 'type': 'type'}
self.getWsClient().save_objects({'workspace': self.getWsName(), 'objects':
[{'type': 'KBaseGenomes.ContigSet', 'name': obj_name, 'data': obj1}]})
ret = self.getImpl().filter_contigs(self.getContext(), {'workspace': self.getWsName(),
'contigset_id': obj_name, 'min_length': '10', 'output_name': 'my_output'})
obj2 = self.getWsClient().get_objects([{'ref': self.getWsName()+'/'+'my_output'}])[0]['data']
self.assertEqual(len(obj2['contigs']), 2)
self.assertTrue(len(obj2['contigs'][0]['sequence']) >= 10)
self.assertTrue(len(obj2['contigs'][1]['sequence']) >= 10)
self.assertEqual(ret[0]['n_initial_contigs'], 3)
self.assertEqual(ret[0]['n_contigs_removed'], 1)
self.assertEqual(ret[0]['n_contigs_remaining'], 2)
def test_filter_contigs_err1(self):
with self.assertRaises(ValueError) as context:
self.getImpl().filter_contigs(self.getContext(), {'workspace': self.getWsName(),
'contigset_id': 'fake', 'min_length': 10, 'output_name': 'fake'})
self.assertTrue('Error loading original ContigSet object' in str(context.exception))
def test_filter_contigs_err2(self):
with self.assertRaises(ValueError) as context:
self.getImpl().filter_contigs(self.getContext(), {'workspace': self.getWsName(),
'contigset_id': 'fake', 'min_length': '-10', 'output_name': 'fake'})
self.assertTrue('min_length parameter shouldn\'t be negative' in str(context.exception))
def test_filter_contigs_err3(self):
with self.assertRaises(ValueError) as context:
self.getImpl().filter_contigs(self.getContext(), {'workspace': self.getWsName(),
'contigset_id': 'fake', 'min_length': 'ten', 'output_name': 'fake'})
self.assertTrue('Cannot parse integer from min_length parameter' in str(context.exception))
|
piotroxp/scibibscan
|
scib/lib/python3.5/site-packages/astropy/modeling/tests/example_models.py
|
Python
|
mit
| 8,459
| 0.000236
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Here are all the test parameters and values for the each
`~astropy.modeling.FittableModel` defined. There is a dictionary for 1D and a
dictionary for 2D models.
Explanation of keywords of the dictionaries:
"parameters" : list or dict
Model parameters, the model is tested with. Make sure you keep the right
order. For polynomials you can also use a dict to specify the
coefficients. See examples below.
"x_values" : list
x values where the model is evaluated.
"y_values" : list
Reference y values for the in x_values given positions.
"z_values" : list
Reference z values for the in x_values and y_values given positions.
(2D model option)
"x_lim" : list
x test range for the model fitter. Depending on the model this can differ
e.g. the PowerLaw model should be tested over a few magnitudes.
"y_lim" : list
y test range for the model fitter. Depending on the model this can differ
e.g. the PowerLaw model should be tested over a few magnitudes. (2D model
option)
"log_fit" : bool
PowerLaw models should be tested over a few magnitudes. So log_fit should
be true.
"requires_scipy" : bool
If a model requires scipy (Bessel functions etc.) set this flag.
"integral" : float
Approximate value of the integral in the range x_lim (and y_lim).
"deriv_parameters" : list
If given the test of the derivative will use these parameters to create a
model (optional)
"deriv_initial" : list
If given the test of the derivative will use these parameters as initial
values for the fit (optional)
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from ..functional_models import (
Gaussian1D, Sine1D, Box1D, Linear1D, Lorentz1D,
MexicanHat1D, Trapezoid1D, Const1D, Moffat1D,
Gaussian2D, Const2D, Box2D, MexicanHat2D,
TrapezoidDisk2D, AiryDisk2D, Moffat2D, Di
|
sk2D,
Ring2D)
from ..polynomial import Polynomial1D, Polynomial2D
from ..powerlaws import (
PowerLaw1D, BrokenPowerLaw1D, ExponentialCutoffPowerLaw1D,
LogParabola1D)
import numpy as np
#1D Models
models_1D = {
Gaussian1D: {
'parameters': [1, 0, 1],
'x_values': [0, np.sqrt(2), -np.sqrt(2)],
'y_values': [1.0, 0.367879, 0.367879],
'x_lim': [-10, 10],
'integral': np.sqrt(2 * np.pi)
},
Sine1D: {
|
'parameters': [1, 0.1],
'x_values': [0, 2.5],
'y_values': [0, 1],
'x_lim': [-10, 10],
'integral': 0
},
Box1D: {
'parameters': [1, 0, 10],
'x_values': [-5, 5, 0, -10, 10],
'y_values': [1, 1, 1, 0, 0],
'x_lim': [-10, 10],
'integral': 10
},
Linear1D: {
'parameters': [1, 0],
'x_values': [0, np.pi, 42, -1],
'y_values': [0, np.pi, 42, -1],
'x_lim': [-10, 10],
'integral': 0
},
Lorentz1D: {
'parameters': [1, 0, 1],
'x_values': [0, -1, 1, 0.5, -0.5],
'y_values': [1., 0.2, 0.2, 0.5, 0.5],
'x_lim': [-10, 10],
'integral': 1
},
MexicanHat1D: {
'parameters': [1, 0, 1],
'x_values': [0, 1, -1, 3, -3],
'y_values': [1.0, 0.0, 0.0, -0.088872, -0.088872],
'x_lim': [-20, 20],
'integral': 0
},
Trapezoid1D: {
'parameters': [1, 0, 2, 1],
'x_values': [0, 1, -1, 1.5, -1.5, 2, 2],
'y_values': [1, 1, 1, 0.5, 0.5, 0, 0],
'x_lim': [-10, 10],
'integral': 3
},
Const1D: {
'parameters': [1],
'x_values': [-1, 1, np.pi, -42., 0],
'y_values': [1, 1, 1, 1, 1],
'x_lim': [-10, 10],
'integral': 20
},
Moffat1D: {
'parameters': [1, 0, 1, 2],
'x_values': [0, 1, -1, 3, -3],
'y_values': [1.0, 0.25, 0.25, 0.01, 0.01],
'x_lim': [-10, 10],
'integral': 1,
'deriv_parameters': [23.4, 1.2, 2.1, 2.3],
'deriv_initial': [10, 1, 1, 1]
},
PowerLaw1D: {
'parameters': [1, 1, 2],
'constraints': {'fixed': {'x_0': True}},
'x_values': [1, 10, 100],
'y_values': [1.0, 0.01, 0.0001],
'x_lim': [1, 10],
'log_fit': True,
'integral': 0.99
},
BrokenPowerLaw1D: {
'parameters': [1, 1, 2, 3],
'constraints': {'fixed': {'x_break': True}},
'x_values': [0.1, 1, 10, 100],
'y_values': [1e2, 1.0, 1e-3, 1e-6],
'x_lim': [0.1, 100],
'log_fit': True
},
ExponentialCutoffPowerLaw1D: {
'parameters': [1, 1, 2, 3],
'constraints': {'fixed': {'x_0': True}},
'x_values': [0.1, 1, 10, 100],
'y_values': [9.67216100e+01, 7.16531311e-01, 3.56739933e-04,
3.33823780e-19],
'x_lim': [0.01, 100],
'log_fit': True
},
LogParabola1D: {
'parameters': [1, 2, 3, 0.1],
'constraints': {'fixed': {'x_0': True}},
'x_values': [0.1, 1, 10, 100],
'y_values': [3.26089063e+03, 7.62472488e+00, 6.17440488e-03,
1.73160572e-06],
'x_lim': [0.1, 100],
'log_fit': True
},
Polynomial1D: {
'parameters': {'degree': 2, 'c0': 1., 'c1': 1., 'c2': 1.},
'x_values': [1, 10, 100],
'y_values': [3, 111, 10101],
'x_lim': [-3, 3]
}
}
#2D Models
models_2D = {
Gaussian2D: {
'parameters': [1, 0, 0, 1, 1],
'constraints': {'fixed': {'theta': True}},
'x_values': [0, np.sqrt(2), -np.sqrt(2)],
'y_values': [0, np.sqrt(2), -np.sqrt(2)],
'z_values': [1, 1. / np.exp(1) ** 2, 1. / np.exp(1) ** 2],
'x_lim': [-10, 10],
'y_lim': [-10, 10],
'integral': 2 * np.pi,
'deriv_parameters': [137., 5.1, 5.4, 1.5, 2., np.pi/4],
'deriv_initial': [10, 5, 5, 4, 4, .5]
},
Const2D: {
'parameters': [1],
'x_values': [-1, 1, np.pi, -42., 0],
'y_values': [0, 1, 42, np.pi, -1],
'z_values': [1, 1, 1, 1, 1],
'x_lim': [-10, 10],
'y_lim': [-10, 10],
'integral': 400
},
Box2D: {
'parameters': [1, 0, 0, 10, 10],
'x_values': [-5, 5, -5, 5, 0, -10, 10],
'y_values': [-5, 5, 0, 0, 0, -10, 10],
'z_values': [1, 1, 1, 1, 1, 0, 0],
'x_lim': [-10, 10],
'y_lim': [-10, 10],
'integral': 100
},
MexicanHat2D: {
'parameters': [1, 0, 0, 1],
'x_values': [0, 0, 0, 0, 0, 1, -1, 3, -3],
'y_values': [0, 1, -1, 3, -3, 0, 0, 0, 0],
'z_values': [1.0, 0.303265, 0.303265, -0.038881, -0.038881,
0.303265, 0.303265, -0.038881, -0.038881],
'x_lim': [-10, 11],
'y_lim': [-10, 11],
'integral': 0
},
TrapezoidDisk2D: {
'parameters': [1, 0, 0, 1, 1],
'x_values': [0, 0.5, 0, 1.5],
'y_values': [0, 0.5, 1.5, 0],
'z_values': [1, 1, 0.5, 0.5],
'x_lim': [-3, 3],
'y_lim': [-3, 3]
},
AiryDisk2D: {
'parameters': [7, 0, 0, 10],
'x_values': [0, 1, -1, -0.5, -0.5],
'y_values': [0, -1, 0.5, 0.5, -0.5],
'z_values': [7., 6.50158267, 6.68490643, 6.87251093, 6.87251093],
'x_lim': [-10, 10],
'y_lim': [-10, 10],
'requires_scipy': True
},
Moffat2D: {
'parameters': [1, 0, 0, 1, 2],
'x_values': [0, 1, -1, 3, -3],
'y_values': [0, -1, 3, 1, -3],
'z_values': [1.0, 0.111111, 0.008264, 0.008264, 0.00277],
'x_lim': [-3, 3],
'y_lim': [-3, 3]
},
Polynomial2D: {
'parameters': {'degree': 1, 'c0_0': 1., 'c1_0': 1., 'c0_1': 1.},
'x_values': [1, 2, 3],
'y_values': [1, 3, 2],
'z_values': [3, 6, 6],
'x_lim': [1, 100],
'y_lim': [1, 100]
},
Disk2D: {
'parameters': [1, 0, 0, 5],
'x_values': [-5, 5, -5, 5, 0, -10, 10],
'y_values': [-5, 5, 0, 0, 0, -10, 10],
'z_values': [0, 0, 1, 1, 1, 0, 0],
'x_lim': [-10, 10],
'y_lim': [-10, 10],
'integral': np.pi * 5 ** 2
},
Ring2D: {
'
|
danzelmo/dstl-competition
|
global_vars.py
|
Python
|
mit
| 32
| 0.03125
|
DATA_DIR
|
= '/media/d/ssd2/dstl
|
/'
|
uclastudentmedia/django-massmedia
|
massmedia/models.py
|
Python
|
apache-2.0
| 14,915
| 0.007643
|
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.template.defaultfilters import slugify
from django.conf import settings
from django.core.files.base import ContentFile
from django.template.loader import get_template
from django.template import TemplateDoesNotExist,Template,Context
from massmedia import settings as appsettings
from cStringIO import S
|
tringIO
import mimetypes
import os
import zipfile
from django_extensions.db.fields import AutoSlugField
# Patch mimetypes w/ any extra types
mimetypes.types_map.update(appsettings.EXTRA_MIME_TYPES)
try:
i
|
mport cPickle as pickle
except ImportError:
import pickle
try:
from iptcinfo import IPTCInfo
iptc = 1
except ImportError:
iptc = 0
# Try to load a user-defined category model
if appsettings.CATEGORIES_MODULE:
CATEGORIES_MODULE = appsettings.CATEGORIES_MODULE
else:
# Otherwise use dummy category
CATEGORIES_MODULE = 'Category'
class Category(models.Model):
name = models.CharField(max_length=150)
def __unicode__(self): return self.name
try:
import Image as PilImage
except ImportError:
try:
from PIL import Image as PilImage
except ImportError:
PilImage = 0
try:
from hachoir_core.error import HachoirError
from hachoir_core.stream import InputStreamError
from hachoir_parser import createParser
from hachoir_metadata import extractMetadata
except ImportError:
extractMetadata = None
class upload_to(object):
"""
This tricky little bugger allows us to use all lowercase urls and stuff.
"""
def __init__(self, format, field='file'):
self.format = format
self.field = field
def __call__(self, instance, filename):
get_filename = instance._meta.get_field(self.field).get_filename
return os.path.join(self.get_directory_name(), get_filename(filename))
def get_directory_name(self):
import datetime
return os.path.normpath(datetime.datetime.now().strftime(self.format)).lower()
def parse_metadata(path):
try:
parser = createParser(unicode(path))
except InputStreamError:
return
if not parser:
return
try:
metadata = extractMetadata(parser, appsettings.INFO_QUALITY)
except HachoirError:
return
if not metadata:
return
data = {}
text = metadata.exportPlaintext(priority=None, human=False)
for line in text:
if not line.strip().startswith('-'):
key = line.strip().lower().split(':')[0]
value = []
else:
key = line.strip().split('- ')[1].split(': ')[0]
value = line.split(key)[1][2:]
if key in data:
if hasattr(data[key],'__iter__'):
value = data[key] + [value]
else:
value = [data[key],value]
if value:
data[key] = value
return data
class PickledObjectField(models.Field):
""" Django snippet - http://www.djangosnippets.org/snippets/513/ """
__metaclass__ = models.SubfieldBase
def to_python(self, value):
try:
return pickle.loads(str(value))
except:
# If an error was raised, just return the plain value
return value
def get_db_prep_save(self, value):
if value is not None:
value = pickle.dumps(value)
return str(value)
def get_internal_type(self):
return 'TextField'
def get_db_prep_lookup(self, lookup_type, value):
if lookup_type == 'exact':
value = self.get_db_prep_save(value)
return super(PickledObjectField, self).get_db_prep_lookup(lookup_type, value)
elif lookup_type == 'in':
value = [self.get_db_prep_save(v) for v in value]
return super(PickledObjectField, self).get_db_prep_lookup(lookup_type, value)
else:
raise TypeError('Lookup type %s is not supported.' % lookup_type)
class Media(models.Model):
title = models.CharField(max_length=255)
slug = AutoSlugField(max_length=50, overwrite=True, populate_from=("title",))
creation_date = models.DateTimeField(auto_now_add=True)
author = models.ForeignKey(User, blank=True, null=True, limit_choices_to={'is_staff':True})
one_off_author = models.CharField('one-off author', max_length=100, blank=True)
credit = models.CharField(max_length=150, blank=True)
caption = models.TextField(blank=True)
metadata = PickledObjectField(blank=True)
sites = models.ManyToManyField(Site,related_name='%(class)s_sites')
categories = models.ManyToManyField(CATEGORIES_MODULE, blank=True)
reproduction_allowed = models.BooleanField("we have reproduction rights for this media", default=True)
public = models.BooleanField(help_text="this media is publicly available", default=True)
external_url = models.URLField(blank=True,null=True,help_text="If this URLField is set, the media will be pulled externally")
mime_type = models.CharField(max_length=150,blank=True,null=True)
width = models.IntegerField(blank=True, null=True)
height = models.IntegerField(blank=True, null=True)
widget_template = models.CharField(max_length=255,blank=True,null=True,
help_text='The template name used to generate the widget (defaults to mime_type layout)')
class Meta:
ordering = ('-creation_date',)
abstract = True
unique_together = (('slug', 'creation_date'),)
def __unicode__(self):
return self.title
def get_absolute_url(self):
if self.external_url:
return self.external_url
if hasattr(self,'file') and getattr(self,'file',None):
return self.absolute_url((
settings.MEDIA_URL,
'/'.join([self.creation_date.strftime("%Y"), self.creation_date.strftime("%b").lower(), self.creation_date.strftime("%d")]),
os.path.basename(self.file.path)))
return ''
def absolute_url(self, format):
raise NotImplementedError
def save(self, *args, **kwargs):
if self.file and not self.mime_type:
self.mime_type = mimetypes.guess_type(self.file.path)[0]
if not(self.metadata) and self.file and extractMetadata:
self.metadata = parse_metadata(self.file.path) or ''
super(Media, self).save(*args, **kwargs)
def get_mime_type(self):
if self.mime_type:
return self.mime_type
if self.metadata and 'mime_type' in self.metadata:
return self.metadata['mime_type']
return
def get_template(self):
mime_type = self.get_mime_type()
if self.widget_template:
if appsettings.TEMPLATE_MODE == appsettings.FILE_SYSTEM:
return get_template(self.widget_template)
else:
return MediaTemplate.objects.get(name=self.widget_template).template()
elif mime_type is None:
if appsettings.TEMPLATE_MODE == appsettings.FILE_SYSTEM:
if appsettings.USE_VOXANT and isinstance(self, VoxantVideo):
return get_template('massmedia/voxant.html')
else:
return get_template('massmedia/generic.html')
else:
return MediaTemplate.objects.get(mimetype='').tempate()
else:
if appsettings.TEMPLATE_MODE == appsettings.FILE_SYSTEM:
try:
return get_template('massmedia/%s.html'%mime_type)
except TemplateDoesNotExist:
try:
return get_template('massmedia/%s/generic.html'%mime_type.split('/')[0])
except TemplateDoesNotExist:
return get_template('massmedia/generic.html')
else:
try:
return MediaTemplate.objects.get(mimetype=mime_type)
except MediaTemp
|
tgymartin/green-fingers-2d
|
DW/part2_and_part3/Cohort_6_Team_6/part3_code/prototype.py
|
Python
|
gpl-3.0
| 3,524
| 0.018729
|
#!/usr/bin/env python
'''
2D Group Members:
> Charlotte Phang
> Lau Wenkie
> Mok Jun Neng
> Martin Tan
> Dicson Candra
'''
#Import relevant modules
import RPi.GPIO as GPIO
import os
import glob
import time
from PIDsm import PID_ControllerSM
### PIN NUMBERS ###
tempPin = 4
motorPin = 12
fanPin = 13
### PARAMETERS ###
pwmFreq = 100
#Code to read temperature from the ####################### sensor
class tempSensor:
#Location of file to read from for temperature: /sys/bus/w1/devices/28-000008ae29b8/w1_slave
#to manually read, "cat /sys/bus/w1/devices/28-000008ae29b8/w1_slave" in terminal
def __init__(self):
os.system
|
('modprobe w1-gpio')
os.system('modprobe w1-therm')
#define directory of the temperature data in the linux filesystem
self.base_dir = '/sys/bus/w1/devices/'
self.device_folder = glob.glob(self.base_dir + '28*')[0]
self.device_file = self.device_folder + '/w1_slave'
def read_temp_raw(self): #reading raw output of the 1 wire bus
f = open(self.device_file, 'r') #open file defined in self.dev
|
ice_file
lines = f.readlines()
f.close() #close file to reset the file pointer
return lines
def __call__(self): #function to extract temperature data from the raw data in string
lines = self.read_temp_raw()
while lines[0].strip()[-3:] != 'YES':
time.sleep(0.2)
lines = self.read_temp_raw()
equals_pos = lines[1].find('t=')
if equals_pos != -1:
temp_string = lines[1][equals_pos+2:]
temp_c = float(temp_string) / 1000.0
return temp_c
#Set up global variables
GPIO.setmode(GPIO.BCM) #use BCM pin numbering system
GPIO.setup(tempPin, GPIO.IN, GPIO.PUD_UP) #set up the 1 wire interface
GPIO.setup(motorPin, GPIO.OUT) #setup the motor pin
GPIO.setup(fanPin, GPIO.OUT) #setup the fan pin
#define the fan and pump pins as PWM pins and initialise them at 0% PWM (off)
pump = GPIO.PWM(motorPin, pwmFreq)
pump.start(0.0)
fan = GPIO.PWM(fanPin, pwmFreq)
fan.start(0.0)
#create controller object from MotorSM class
targetTemperature = raw_input('Please key in your desired target temperature: ')
motorController = PID_ControllerSM(float(targetTemperature),30,0,10)
motorController.start()
fanController = PID_ControllerSM(float(targetTemperature),50,0,5)
fanController.start()
#create sensor object
temp = tempSensor()
def main(): #main code to loop indefinitely here
#check current temperature
currentTemp = temp()
print 'Current temp: %.3f' %(currentTemp) #for monitoring in the terminal
motorOutput = motorController.step(currentTemp) #get the amount of PWM to output to fan and pump from the state machine
fanOutput = fanController.step(currentTemp)
pump.ChangeDutyCycle(motorOutput) #output the pump PWM. ChangeDutyCycle takes a value from 0 to 100%
fan.ChangeDutyCycle(fanOutput) #output the fan PWM
#####################################################################################
### Run the main code unless user terminates using Ctrl+C. ###
### Before exiting, code will reset and release GPIO control to deactivate motor. ###
#####################################################################################
while True:
try:
main() #execute main()
except KeyboardInterrupt:
print 'Cleaning and Exiting...'
GPIO.cleanup() #clean up the pins and exit the program
print 'Done'
exit()
|
Pyco7/django-ajax-views
|
ajaxviews/conf.py
|
Python
|
mit
| 1,622
| 0.001233
|
from django.conf import settings as django_settings
# noinspection PyPep8Naming
class LazySettings:
@property
def REQUIRE_MAIN_NAME(self):
return getattr(django_settings, 'REQUIRE_MAIN_NAME', 'main')
@property
def DEFAULT_PAGINATE_BY(self):
return getattr(django_settings, 'DEFAULT_PAGINATE_BY', 30)
@property
def FILTER_SEARCH_INPUT_BY(self):
return getattr(django_settings, 'FILTER_SEARCH_INPUT_BY', 10)
@property
def AUTO_PAGE_SIZE(self):
return getattr(django_settings, 'AUTO_PAGE_SIZE', True)
@property
def AUTO_FORM_HEADLINE(self):
return getattr(django_settings, 'AUTO_FORM_HEADLINE', True)
@property
def CREATE_FORM_HEADLINE_PREFIX(self):
return getattr(django_settings, 'CREATE_FORM_HEADLINE_PREFIX', 'Add')
@property
def UPDATE_FORM_HEADLINE_PREFIX(self):
return getattr(django_settings, 'UPDATE_FORM_HEADLINE_PREFIX', 'Edit')
@property
def FORM_RELATED_OBJECT_IDS(self):
return getattr(django_settings, 'FORM_RELATED_OBJECT_IDS', True)
@property
def GENERIC_FORM_BASE_TEMPLATE(self):
return getattr(django_settings, 'GENERIC_FORM_BASE_TEMPLATE', 'ajaxviews/generic_form.html')
@property
def AUTO_DELETE_URL(self):
return getattr(django_settings, 'AUTO_DELETE_URL', True)
@property
def FORM_DELETE_CO
|
NFIRMATION(self):
return getattr(django_settings, 'FORM_DELETE_CONFIRMATION', True)
@property
def AUTO_SUCCESS_URL(self):
return getattr(django_settings, 'AUTO_SUCCESS_URL', True)
sett
|
ings = LazySettings()
|
francois/pycounters
|
counters/base_counters.py
|
Python
|
mit
| 499
| 0.022044
|
import re
import time
class BaseCounters:
def __init__(self):
self.keyr
|
e = re.compile('\A[\w.]+\Z')
def ping(self, key):
self.validate_key(key)
self.do_ping(key, int(time.time()))
def hit(self, key, n=1):
self.validate_key(key)
self.do_hit(key, n)
def validate_key(self, key):
if re.match(self.keyre, key):
pass
else:
raise ValueError("Counters keys must only contain letters, numbers, the underscore (_) and fullstop (.), received \"%s
|
\"" % key)
|
vinodkc/spark
|
python/pyspark/rdd.py
|
Python
|
apache-2.0
| 126,212
| 0.001965
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
import sys
import os
import operator
import shlex
import warnings
import heapq
import bisect
import random
from subprocess import Popen, PIPE
from threading import Thread
from collections import defaultdict
from itertools import chain
from functools import reduce
from math import sqrt, log, isinf, isnan, pow, ceil
from typing import (
Any,
Callable,
Dict,
Generic,
Hashable,
Iterable,
Iterator,
IO,
List,
NoReturn,
Optional,
Sequence,
Tuple,
Union,
TypeVar,
cast,
overload,
TYPE_CHECKING,
)
from pyspark.java_gateway import local_connect_and_auth
from pyspark.serializers import (
AutoBatchedSerializer,
BatchedSerializer,
NoOpSerializer,
CartesianDeserializer,
CloudPickleSerializer,
PairDeserializer,
CPickleSerializer,
Serializer,
pack_long,
read_int,
write_int,
)
from pyspark.join import (
python_join,
python_left_outer_join,
python_right_outer_join,
python_full_outer_join,
python_cogroup,
)
from pyspark.statcounter import StatCounter
from pyspark.rddsampler import RDDSampler, RDDRangeSampler, RDDStratifiedSampler
from pyspark.storagelevel import StorageLevel
from pyspark.resource.requests import ExecutorResourceRequests, TaskResourceRequests
from pyspark.resource.profile import ResourceProfile
from pyspark.resultiterable import ResultIterable
from pyspark.shuffle import (
Aggregator,
ExternalMerger,
get_used_memory,
ExternalSorter,
ExternalGroupBy,
)
from pyspark.traceback_utils import SCCallSiteSync
from pyspark.util import fail_on_stopiteration, _parse_memory
if TYPE_CHECKING:
import socket
import io
from pyspark._typing import NonUDFType
from pyspark._typing import S, NumberOrArray
from pyspark.context import SparkContext
from pyspark.sql.pandas._typing import (
PandasScalarUDFType,
PandasGroupedMapUDFType,
PandasGroupedAggUDFType,
PandasWindowAggUDFType,
PandasScalarIterUDFType,
PandasMapIterUDFType,
PandasCogroupedMapUDFType,
ArrowMapIterUDFType,
)
|
from pyspark.sql.dataframe import
|
DataFrame
from pyspark.sql.types import AtomicType, StructType
from pyspark.sql._typing import AtomicValue, RowLike, SQLBatchedUDFType
from py4j.java_gateway import JavaObject # type: ignore[import]
from py4j.java_collections import JavaArray # type: ignore[import]
T = TypeVar("T")
T_co = TypeVar("T_co", covariant=True)
U = TypeVar("U")
K = TypeVar("K", bound=Hashable)
V = TypeVar("V")
V1 = TypeVar("V1")
V2 = TypeVar("V2")
V3 = TypeVar("V3")
__all__ = ["RDD"]
class PythonEvalType:
"""
Evaluation type of python rdd.
These values are internal to PySpark.
These values should match values in org.apache.spark.api.python.PythonEvalType.
"""
NON_UDF: "NonUDFType" = 0
SQL_BATCHED_UDF: "SQLBatchedUDFType" = 100
SQL_SCALAR_PANDAS_UDF: "PandasScalarUDFType" = 200
SQL_GROUPED_MAP_PANDAS_UDF: "PandasGroupedMapUDFType" = 201
SQL_GROUPED_AGG_PANDAS_UDF: "PandasGroupedAggUDFType" = 202
SQL_WINDOW_AGG_PANDAS_UDF: "PandasWindowAggUDFType" = 203
SQL_SCALAR_PANDAS_ITER_UDF: "PandasScalarIterUDFType" = 204
SQL_MAP_PANDAS_ITER_UDF: "PandasMapIterUDFType" = 205
SQL_COGROUPED_MAP_PANDAS_UDF: "PandasCogroupedMapUDFType" = 206
SQL_MAP_ARROW_ITER_UDF: "ArrowMapIterUDFType" = 207
def portable_hash(x: Hashable) -> int:
"""
This function returns consistent hash code for builtin types, especially
for None and tuple with None.
The algorithm is similar to that one used by CPython 2.7
Examples
--------
>>> portable_hash(None)
0
>>> portable_hash((None, 1)) & 0xffffffff
219750521
"""
if "PYTHONHASHSEED" not in os.environ:
raise RuntimeError("Randomness of hash of string should be disabled via PYTHONHASHSEED")
if x is None:
return 0
if isinstance(x, tuple):
h = 0x345678
for i in x:
h ^= portable_hash(i)
h *= 1000003
h &= sys.maxsize
h ^= len(x)
if h == -1:
h = -2
return int(h)
return hash(x)
class BoundedFloat(float):
"""
Bounded value is generated by approximate job, with confidence and low
bound and high bound.
Examples
--------
>>> BoundedFloat(100.0, 0.95, 95.0, 105.0)
100.0
"""
confidence: float
low: float
high: float
def __new__(cls, mean: float, confidence: float, low: float, high: float) -> "BoundedFloat":
obj = float.__new__(cls, mean)
obj.confidence = confidence
obj.low = low
obj.high = high
return obj
def _create_local_socket(sock_info: "JavaArray") -> "io.BufferedRWPair":
"""
Create a local socket that can be used to load deserialized data from the JVM
Parameters
----------
sock_info : tuple
Tuple containing port number and authentication secret for a local socket.
Returns
-------
sockfile file descriptor of the local socket
"""
sockfile: "io.BufferedRWPair"
sock: "socket.socket"
port: int = sock_info[0]
auth_secret: str = sock_info[1]
sockfile, sock = local_connect_and_auth(port, auth_secret)
# The RDD materialization time is unpredictable, if we set a timeout for socket reading
# operation, it will very possibly fail. See SPARK-18281.
sock.settimeout(None)
return sockfile
def _load_from_socket(sock_info: "JavaArray", serializer: Serializer) -> Iterator[Any]:
"""
Connect to a local socket described by sock_info and use the given serializer to yield data
Parameters
----------
sock_info : tuple
Tuple containing port number and authentication secret for a local socket.
serializer : :py:class:`Serializer`
The PySpark serializer to use
Returns
-------
result of :py:meth:`Serializer.load_stream`,
usually a generator that yields deserialized data
"""
sockfile = _create_local_socket(sock_info)
# The socket will be automatically closed when garbage-collected.
return serializer.load_stream(sockfile)
def _local_iterator_from_socket(sock_info: "JavaArray", serializer: Serializer) -> Iterator[Any]:
class PyLocalIterable:
"""Create a synchronous local iterable over a socket"""
def __init__(self, _sock_info: "JavaArray", _serializer: Serializer):
port: int
auth_secret: str
jsocket_auth_server: "JavaObject"
port, auth_secret, self.jsocket_auth_server = _sock_info
self._sockfile = _create_local_socket((port, auth_secret))
self._serializer = _serializer
self._read_iter: Iterator[Any] = iter([]) # Initialize as empty iterator
self._read_status = 1
def __iter__(self) -> Iterator[Any]:
while self._read_status == 1:
# Request next partition data from Java
write_int(1, self._sockfile)
self._sockfile.flush()
# If response is 1 then there is a partition to read, if 0 then fully consumed
self._read_status = read_int(self._sockfile)
if self._read_status == 1:
# Load the partition data as a stream and read
|
panoplyio/panoply-python-sdk
|
panoply/errors/exceptions.py
|
Python
|
mit
| 1,017
| 0
|
from datetime import datetime
class PanoplyException(Exception):
def __init__(self, args=None, retryable=True):
|
super(PanoplyException, self).__init__(args)
self.retryable = retryable
class IncorrectParamError(Exception):
def __init__(self, msg: str = "Incorrect input parametr"):
super().__init__(msg)
class DataSourceException(Exception):
def __init__(self, message, code, exception_cls,
phase, source_type, source_id, database_id):
super().__init__(m
|
essage)
self.message = message
self.code = code
self.phase = phase
self.source_type = source_type
self.source_id = source_id
self.database_id = database_id
self.exception_cls = exception_cls
self.created_at = datetime.utcnow()
class TokenValidationException(PanoplyException):
def __init__(self, original_error, args=None, retryable=True):
super().__init__(args, retryable)
self.original_error = original_error
|
Tong-Chen/scikit-learn
|
examples/linear_model/plot_ransac.py
|
Python
|
bsd-3-clause
| 1,671
| 0
|
"""
===========================================
Robust linear model estimation using RANSAC
===========================================
In this example we see how to robustly fit a linear model to faulty data using
the RANSAC algorithm.
"""
import numpy as np
from matplotlib import pyplot as plt
from sklearn import linear_model, datasets
n_samples = 1000
n_outliers = 50
X, y, coef = datasets.make_regression(
|
n_samples=n_samples, n_features=1,
n_informative=1, noise=10,
coef=True, random_state=0)
# Add outlier data
np.random.seed(0)
X[:n_outliers] = 3 + 0.5 * np.random.normal(size=(n_outliers, 1))
y[:n_outliers] = -3 + 10 * np.random.normal(size=n_outliers)
|
# Fit line using all data
model = linear_model.LinearRegression()
model.fit(X, y)
# Robustly fit linear model with RANSAC algorithm
model_ransac = linear_model.RANSACRegressor(linear_model.LinearRegression())
model_ransac.fit(X, y)
inlier_mask = model_ransac.inlier_mask_
outlier_mask = np.logical_not(inlier_mask)
# Predict data of estimated models
line_X = np.arange(-5, 5)
line_y = model.predict(line_X[:, np.newaxis])
line_y_ransac = model_ransac.predict(line_X[:, np.newaxis])
# Compare estimated coefficients
print "Estimated coefficients (true, normal, RANSAC):"
print coef, model.coef_, model_ransac.estimator_.coef_
plt.plot(X[inlier_mask], y[inlier_mask], '.g', label='Inliers')
plt.plot(X[outlier_mask], y[outlier_mask], '.r', label='Outliers')
plt.plot(line_X, line_y, '-k', label='Linear regressor')
plt.plot(line_X, line_y_ransac, '-b', label='RANSAC regressor')
plt.legend(loc='lower right')
plt.show()
|
oscarforri/ambrosio
|
ambrosio/channels/TelegramChannel.py
|
Python
|
gpl-3.0
| 1,405
| 0.003559
|
from Channel import Channel
import telepot
class AmbrosioBot(telepot.Bot):
"""AmbrosioBot is my telgram bot"""
def __init__(self, token):
super(AmbrosioBot, self).__init__(token)
self.clist = None
self.chat_id = None
def set_list(self,clist):
self.clist = clist
def on_chat_message(self, msg):
content_type, chat_type, chat_id, = telepot.glance(msg)
if content_type == 'text':
command =msg['text']
if self.clist is not None:
self.clist.append(command)
self.chat_id = chat_id
def respond(self, response):
if self.chat_id is not None:
self.sendMessage(self.chat_id, response)
class TelegramChannel(Channel):
"""channel class received commands from telegram"""
def __init__(self, name="TelegramChannel"):
super(TelegramChannel, self).__init__(name)
self.bot = AmbrosioBot("1898
|
84221:AAHls9d0EkCDfU0wgQ-acs5Z39aibA7BZmc")
self.messages = []
self.bot.set_list(self.messages)
|
self.bot.notifyOnMessage()
def get_msg(self):
if self.msg_avail():
return self.messages.pop(0)
def msg_avail(self):
return len(self.messages) > 0
def respond(self, response):
if response is None:
response = "Command not understand"
self.bot.respond(response)
|
geminipy/geminipy
|
geminipy/__init__.py
|
Python
|
gpl-3.0
| 10,473
| 0.000095
|
"""
This module contains a class to make requests to the Gemini API.
Author: Mike Marzigliano
"""
import time
import json
import hmac
import base64
import hashlib
import requests
class Geminipy(object):
"""
A class to make requests to the Gemini API.
Make public or authenticated requests according to the API documentation:
https://docs.gemini.com/
"""
live_url = 'https://api.gemini.com'
sandbox_url = 'https://api.sandbox.gemini.com'
base_url = sandbox_url
api_key = ''
secret_key = ''
def __init__(self, api_key='', secret_key='', live=False):
"""
Initialize the class.
Arguments:
api_key -- your Gemini API key
secret_key -- your Gemini API secret key for signatures
live -- use the live API? otherwise, use the sandbox (default False)
"""
self.api_key = api_key
self.secret_key = secret_key
if live:
self.base_url = self.live_url
# public requests
def symbols(self):
"""Send a request for all trading symbols, return the response."""
url = self.base_url + '/v1/symbols'
return requests.get(url)
def pubticker(self, symbol='btcusd'):
"""Send a request for latest ticker info, return the response."""
url = self.base_url + '/v1/pubticker/' + symbol
return requests.get(url)
def book(self, symbol='btcusd', limit_bids=0, limit_asks=0):
"""
Send a request to get the public order book, return the response.
Arguments:
symbol -- currency symbol (default 'btcusd')
limit_bids -- limit the number of bids returned (default 0)
limit_asks -- limit the number of asks returned (default 0)
"""
url = self.base_url + '/v1/book/' + symbol
params = {
'limit_bids': limit_bids,
'limit_asks': limit_asks
}
return requests.get(url, params)
def trades(self, symbol='btcusd', since=0, limit_trades=50,
include_breaks=0):
"""
Send a request to get all public trades, return the response.
Arguments:
symbol -- currency symbol (default 'btcusd')
since -- only return trades after this unix timestamp (default 0)
limit_trades -- maximum number of trades to return (default 50).
include_breaks -- whether to display broken trades (default False)
"""
url = self.base_url + '/v1/trades/' + symbol
params = {
'since': since,
'limit_trades': limit_trades,
'include_breaks': include_breaks
}
return requests.get(url, params)
def auction(self, symbol='btcusd'):
"""Send a request for latest auction info, return the response."""
url = self.base_url + '/v1/auction/' + symbol
return requests.get(url)
def auction_history(self, symbol='btcusd', since=0,
limit_auction_results=50, include_indicative=1):
"""
Send a request for auction history info, return the response.
Arguments:
symbol -- currency symbol (default 'btcusd')
since -- only return auction events after this timestamp (default 0)
limit_auction_results -- maximum number of auction events to return
(default 50).
include_indicative -- whether to include publication of indicative
prices and quantities. (default True)
"""
url = self.base_url + '/v1/auction/' + symbol + '/history'
params = {
'since': since,
'limit_auction_results': limit_auction_results,
'include_indicative': include_indicative
}
return requests.get(url, params)
# authenticated requests
def new_order(self, amount, price, side, client_order_id=None,
symbol='btcusd', type='exchange limit', options=None):
"""
Send a request to place an order, return the response.
Arguments:
amount -- quoted decimal amount of BTC to purchase
price -- quoted decimal amount of USD to spend per BTC
side -- 'buy' or 'sell'
client_order_id -- an optional client-specified order id (default None)
symbol -- currency symbol (default 'btcusd')
type -- the order type (default 'exchange limit')
"""
request = '/v1/order/new'
url = self.base_url + request
params = {
'request': request,
'nonce': self.get_nonce(),
'symbol': symbol,
'amount': amount,
'price': price,
'side': side,
'type': type
}
if client_order_id is not None:
params['client_order_id'] = client_order_id
if options is not None:
params['options'] = options
return requests.post(url, headers=self.prepare(params))
def cancel_order(self, order_id):
"""
Send a request to cancel an order, return the response.
Arguments:
order_id - the order id to cancel
"""
request = '/v1/order/cancel'
url = self.base_url + request
params = {
'request': request,
'nonce': self.get_nonce(),
'order_id': order_id
}
return requests.post(url, headers=self.prepare(params))
def cancel_session(self):
"""Send a request to cancel all session orders, return the response."""
request = '/v1/order/cancel/session'
url = self.base_url + request
params = {
'request': request,
'nonce': self.get_nonce()
}
return requests.post(url, headers=self.prepare(params))
def cancel_all(self):
"""Send a request to cancel all orders, return the response."""
request = '/v1/order/cancel/all'
url = self.base_url + request
params = {
'request': request,
'nonce': self.get_nonce()
}
return requests.post(url, headers=self.prepare(params))
def order_status(self, order_id):
"""
Send a request to get an order status, return the response.
Arguments:
order_id -- the order id to get information on
"""
request = '/v1/order/status'
url = self.base_url + request
params = {
|
'request': request,
'nonce': self.get_nonce(),
'order_id': order_id
}
return requests.post(url, headers=self.prepare(params))
def active_orders(self):
"""Send a request to get active orders, return the response."""
request = '/v1/orders'
url = self.base_url + requ
|
est
params = {
'request': request,
'nonce': self.get_nonce()
}
return requests.post(url, headers=self.prepare(params))
def past_trades(self, symbol='btcusd', limit_trades=50, timestamp=0):
"""
Send a trade history request, return the response.
Arguements:
symbol -- currency symbol (default 'btcusd')
limit_trades -- maximum number of trades to return (default 50)
timestamp -- only return trades after this unix timestamp (default 0)
"""
request = '/v1/mytrades'
url = self.base_url + request
params = {
'request': request,
'nonce': self.get_nonce(),
'symbol': symbol,
'limit_trades': limit_trades,
'timestamp': timestamp
}
return requests.post(url, headers=self.prepare(params))
def tradevolume(self):
"""Send a request to get your trade volume, return the response."""
request = '/v1/tradevolume'
url = self.base_url + request
params = {
'request': request,
'nonce': self.get_nonce()
}
return requests.post(url, headers=self.prepare(params))
def balances(self):
"""Send an account balance request, return the response."""
request = '/v1/balances'
url = self.base_url + request
|
staranjeet/fjord
|
vendor/packages/click/examples/complex/complex/commands/cmd_status.py
|
Python
|
bsd-3-clause
| 277
| 0
|
import click
from complex.cli import pass_context
@click.command('status', short_help='Shows file changes.')
@pass_context
def cli(ctx):
"""Shows file changes in the current working directory."""
ctx.log('Changed files: none')
ctx.
|
vlog('bla bla bla,
|
debug info')
|
NSLS-II/replay
|
replay/search/__init__.py
|
Python
|
bsd-3-clause
| 527
| 0
|
from __future__ import absolute_
|
import
import six
import logging
from .. import py3_errmsg
logger = logging.getLogger(__name__)
try:
import enaml
except ImportError:
|
if six.PY3:
logger.exception(py3_errmsg)
else:
raise
else:
from .model import (GetLastModel, DisplayHeaderModel, WatchForHeadersModel,
ScanIDSearchModel)
with enaml.imports():
from .view import (GetLastView, GetLastWindow, WatchForHeadersView,
ScanIDSearchView)
|
rapydo/do
|
controller/commands/swarm/join.py
|
Python
|
mit
| 1,350
| 0.000741
|
import typer
from controller import log
from controller.app import Application
from controller.deploy.docker import Docker
@Application.app.command(help="Provide instructions to j
|
oin new nodes")
def join(
manager: bool = typer.Option(
False, "--manager", show_default=False, help="join new node with manager role"
)
) -> None:
Application.print_command(
Application.serialize_param
|
eter("--manager", manager, IF=manager),
)
Application.get_controller().controller_init()
docker = Docker()
manager_address = "N/A"
# Search for the manager address
for node in docker.client.node.list():
role = node.spec.role
state = node.status.state
availability = node.spec.availability
if (
role == "manager"
and state == "ready"
and availability == "active"
and node.manager_status
):
manager_address = node.manager_status.addr
if manager:
log.info("To add a manager to this swarm, run the following command:")
token = docker.swarm.get_token("manager")
else:
log.info("To add a worker to this swarm, run the following command:")
token = docker.swarm.get_token("worker")
print("")
print(f"docker swarm join --token {token} {manager_address}")
print("")
|
impactlab/jps-handoff
|
webapp/viewer/migrations/0007_auto_20150408_1402.py
|
Python
|
mit
| 935
| 0.00107
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('viewer', '0006_meter_on_auditlist'),
]
operations = [
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)
|
),
('name', models.CharField(max_length=64)),
],
options={
},
bases=(models.Model,),
),
migrations.RenameField(
model_name='profiledatapoint',
old_name='kwh',
new_name='kw',
),
migrations.AddField(
model_name='meter',
name='groups',
field=models.ManyToM
|
anyField(to='viewer.Group'),
preserve_default=True,
),
]
|
jwlawson/tensorflow
|
tensorflow/contrib/py2tf/pyct/parser.py
|
Python
|
apache-2.0
| 1,152
| 0.003472
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed
|
under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# Se
|
e the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Converting code to AST.
Adapted from Tangent.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import textwrap
import gast
from tensorflow.python.util import tf_inspect
def parse_object(obj):
"""Return the AST of given object."""
return parse_str(tf_inspect.getsource(obj))
def parse_str(src):
"""Return the AST of given piece of code."""
return gast.parse(textwrap.dedent(src))
|
juliarizza/certificate_generator
|
models/mail.py
|
Python
|
gpl-3.0
| 2,605
| 0.001153
|
# -*- coding: utf-8 -*-
import os
import ConfigParser
import smtplib
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
from email.MIMEBase import MIMEBase
from email import encoders
from global_functions import app_dir
class Mailer():
"""
Instance to manage the mailing.
"""
def __init__(self):
"""
Setup all needed info.
"""
# Gets all the connection info from the .ini file
self.Config = ConfigParser.ConfigParser()
self.Config.read(os.path.join(app_dir, "institution.ini"))
self.server = unicode(self.Config.get("Mail", "server"))
self.port = int(self.Config.get("Mail", "port"))
self.email = unicode(self.Config.get("Mail", "email"))
self.password = unicode(self.Config.get("Mail", "password"))
def connect(self):
"""
Connects to the mail server using the .ini info.
"""
self.smtp_server = smtplib.SMTP(self.server, self.port)
self.smtp_server.ehlo()
self.smtp_server.starttls()
try:
self.smtp_server.login(self.email, self.password)
return 1
except:
return 0
def send_certificate(self, path, send_to):
"""
Send each certificate from the configured email.
"""
# Email info
msg = MIMEMultipart()
msg["From"] = self.email
msg["To"] = send_to
msg["Subject"] = u"Certificado"
body = u"""Em anexo a este e-mail encontra-se o seu certificado de participação de um de nossos eventos.
Qualquer problema, entre em contato respondendo a este e-mail ou procure-nos em:
{address}
Fone: {phone}
""".format(
|
address=unicode(self.Config.get("Contact", "address")),
phone=unicode(self.Config.get("Contact", "phone"))
)
msg.attach(MIMEText(unicode(body), 'plain',
|
'utf-8'))
# Add the certificate file
attachment = open(unicode(path), "rb")
filename = os.path.basename(unicode(path))
part = MIMEBase('application', 'octet-stream')
part.set_payload(attachment.read())
encoders.encode_base64(part)
part.add_header(u'Content-Disposition',
"attachment; filename= %s" % filename)
msg.attach(part)
text = msg.as_string()
# Send the email
self.smtp_server.sendmail(self.email, send_to, text)
def quit(self):
# Quits the connection
self.smtp_server.quit()
|
bpaniagua/SPHARM-PDM
|
Modules/Scripted/ShapeAnalysisModule/ShapeAnalysisModule.py
|
Python
|
apache-2.0
| 88,012
| 0.010192
|
import os, sys
import unittest
import vtk, qt, ctk, slicer
from slicer.ScriptedLoadableModule import *
import logging
import csv
from slicer.util import VTKObservationMixin
import platform
import time
import urllib
import shutil
from CommonUtilities import *
from packaging import version
def _setSectionResizeMode(header, *args, **kwargs):
if version.parse(qt.Qt.qVersion()) < version.parse("5.0.0"):
header.setResizeMode(*args, **kwargs)
else:
header.setSectionResizeMode(*args, **kwargs)
#
# ShapeAnalysisModule
#
class ShapeAnalysisModule(ScriptedLoadableModule):
"""Uses ScriptedLoadableModule base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def __init__(self, parent):
ScriptedLoadableModule.__init__(self, parent)
self.parent.title = "Shape Analysis Module"
self.parent.categories = ["SPHARM"]
self.parent.dependencies = []
self.parent.contributors = ["Laura Pascal (Kitware Inc.), Beatriz Paniagua (Kitware Inc.), Hina Shah (Kitware Inc.)"]
self.parent.helpText = """
SPHARM-PDM is a tool that computes point-based models using a parametric
boundary description for the computing of Shape Analysis.
"""
self.parent.acknowledgementText = """
This work was supported by NIH NIBIB R01EB021391
(Shape Analysis Toolbox for Medical Image Computing Projects).
"""
#
# ShapeAnalysisModuleWidget
#
class ShapeAnalysisModuleWidget(ScriptedLoadableModuleWidget):
"""Uses ScriptedLoadableModuleWidget base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def setup(self):
ScriptedLoadableModuleWidget.setup(self)
#
# Global variables
#
self.Logic = ShapeAnalysisModuleLogic()
self.progressbars_layout = None
#
# Interface
#
loader = qt.QUiLoader()
self.moduleName = 'ShapeAnalysisModule'
scriptedModulesPath = eval('slicer.modules.%s.path' % self.moduleName.lower())
scriptedModulesPath = os.path.dirname(scriptedModulesPath)
path = os.path.join(scriptedModulesPath, 'Resources', 'UI', '%s.ui' % self.moduleName)
qfile = qt.QFile(path)
qfile.open(qt.QFile.ReadOnly)
widget = loader.load(qfile, self.parent)
self.layout = self.parent.layout()
self.widget = widget
self.layout.addWidget(widget)
# Global variables of the Interface
# Group Project IO
self.CollapsibleButton_GroupProjectIO = self.getWidget('CollapsibleButton_GroupProjectIO')
self.GroupProjectInputDirectory = self.getWidget('DirectoryButton_GroupProjectInputDirectory')
self.GroupProjectOutputDirectory = self.getWidget('DirectoryButton_GroupProjectOutputDirectory')
self.Debug = self.getWidget('checkBox_Debug')
# Post Processed Segmentation
self.CollapsibleButton_SegPostProcess = self.getWidget('CollapsibleButton_SegPostProcess')
self.OverwriteSegPostProcess = self.getWidget('checkBox_OverwriteSegPostProcess')
self.label_RescaleSegPostPr
|
ocess = self.getWidget('label_RescaleSegPostProcess')
self.RescaleSegPostProcess = self.getWidget('checkBox_RescaleSegPostProcess')
self.sx = self.getWidget('SliderWidget_sx')
|
self.sy = self.getWidget('SliderWidget_sy')
self.sz = self.getWidget('SliderWidget_sz')
self.label_sx = self.getWidget('label_sx')
self.label_sy = self.getWidget('label_sy')
self.label_sz = self.getWidget('label_sz')
self.LabelState = self.getWidget('checkBox_LabelState')
self.label_ValueLabelNumber = self.getWidget('label_ValueLabelNumber')
self.ValueLabelNumber = self.getWidget('SliderWidget_ValueLabelNumber')
# Generate Mesh Parameters
self.CollapsibleButton_GenParaMesh = self.getWidget('CollapsibleButton_GenParaMesh')
self.OverwriteGenParaMesh = self.getWidget('checkBox_OverwriteGenParaMesh')
self.NumberofIterations = self.getWidget('SliderWidget_NumberofIterations')
# Parameters to SPHARM Mesh
self.CollapsibleButton_ParaToSPHARMMesh = self.getWidget('CollapsibleButton_ParaToSPHARMMesh')
self.OverwriteParaToSPHARMMesh = self.getWidget('checkBox_OverwriteParaToSPHARMMesh')
self.SubdivLevelValue = self.getWidget('SliderWidget_SubdivLevelValue')
self.SPHARMDegreeValue = self.getWidget('SliderWidget_SPHARMDegreeValue')
self.thetaIterationValue = self.getWidget('spinBox_thetaIterationValue')
self.phiIterationValue = self.getWidget('spinBox_phiIterationValue')
self.medialMesh = self.getWidget('checkBox_medialMesh')
# Advanced Post Processed Segmentation
self.CollapsibleButton_AdvancedPostProcessedSegmentation = self.getWidget('CollapsibleButton_AdvancedPostProcessedSegmentation')
self.GaussianFiltering = self.getWidget('checkBox_GaussianFiltering')
self.label_VarianceX = self.getWidget('label_VarianceX')
self.VarianceX = self.getWidget('SliderWidget_VarianceX')
self.label_VarianceY = self.getWidget('label_VarianceY')
self.VarianceY = self.getWidget('SliderWidget_VarianceY')
self.label_VarianceZ = self.getWidget('label_VarianceZ')
self.VarianceZ = self.getWidget('SliderWidget_VarianceZ')
# Advanced Parameters to SPHARM Mesh
self.CollapsibleButton_AdvancedParametersToSPHARMMesh = self.getWidget('CollapsibleButton_AdvancedParametersToSPHARMMesh')
self.useRegTemplate = self.getWidget('checkBox_useRegTemplate')
self.label_regTemplate = self.getWidget('label_regTemplate')
self.regTemplate = self.getWidget('PathLineEdit_regTemplate')
self.useFlipTemplate = self.getWidget('checkBox_useFlipTemplate')
self.label_flipTemplate = self.getWidget('label_flipTemplate')
self.flipTemplate = self.getWidget('PathLineEdit_flipTemplate')
self.choiceOfFlip = self.getWidget('comboBox_choiceOfFlip')
self.sameFlipForAll = self.getWidget('checkBox_sameFlipForAll')
self.tableWidget_ChoiceOfFlip = self.getWidget('tableWidget_ChoiceOfFlip')
# Visualization
self.CollapsibleButton_Visualization = self.getWidget('CollapsibleButton_Visualization')
self.visualizationInSPV = self.getWidget('pushButton_visualizationInSPV')
self.CheckableComboBox_visualization = self.getWidget('CheckableComboBox_visualization')
self.tableWidget_visualization = self.getWidget('tableWidget_visualization')
# Apply CLIs
self.ApplyButton = self.getWidget('applyButton')
self.progress_layout = self.getWidget('progress_layout')
# Connections
# Group Project IO
self.CollapsibleButton_GroupProjectIO.connect('clicked()',
lambda: self.onSelectedCollapsibleButtonOpen(
self.CollapsibleButton_GroupProjectIO))
self.GroupProjectInputDirectory.connect('directoryChanged(const QString &)', self.onInputDirectoryChanged)
self.GroupProjectOutputDirectory.connect('directoryChanged(const QString &)', self.onOutputDirectoryChanged)
self.Debug.connect('clicked(bool)', self.onDebug)
# Post Processed Segmentation
self.CollapsibleButton_SegPostProcess.connect('clicked()',
lambda: self.onSelectedCollapsibleButtonOpen(
self.CollapsibleButton_SegPostProcess))
self.OverwriteSegPostProcess.connect('clicked(bool)', self.onOverwriteFilesSegPostProcess)
self.RescaleSegPostProcess.connect('stateChanged(int)', self.onSelectSpacing)
self.sx.connect('valueChanged(double)', self.onSxValueChanged)
self.sy.connect('valueChanged(double)', self.onSyValueChanged)
self.sz.connect('valueChanged(double)', self.onSzValueChanged)
self.LabelState.connect('clicked(bool)', self.onSelectValueLabelNumber)
self.ValueLabelNumber.connect('valueChanged(double)', self.onLabelNumberValueChanged)
# Generate Mesh Parameters
self.CollapsibleButton_GenParaMesh.connect('clicked()',
lambda: self.onSelectedCollapsibleButtonOpen(
self.CollapsibleButton_GenParaMesh))
self.OverwriteGenP
|
fregaham/DISP
|
formencode/validators.py
|
Python
|
gpl-2.0
| 71,779
| 0.001936
|
## FormEncode, a Form processor
## Copyright (C) 2003, Ian Bicking <[email protected]>
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation; either
## version 2.1 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##
## NOTE: In the context of the Python environment, I interpret "dynamic
## linking" as importing -- thus the LGPL applies to the contents of
## the modules, but make no requirements on code importing these
## modules.
"""
Validator/Converters for use with FormEncode.
"""
import re
DateTime = None
mxlookup = None
httplib = None
urlparse = None
socket = None
from interfaces import *
from api import *
sha = random = None
try:
import sets
except ImportError:
sets = None
import cgi
import fieldstorage
True, False = (1==1), (0==1)
############################################################
## Utility methods
############################################################
# These all deal with accepting both mxDateTime and datetime
# modules and types
datetime_module = None
mxDateTime_module = None
def import_datetime(module_type):
global datetime_module, mxDateTime_module
if module_type is None:
try:
if datetime_module is None:
import datetime as datetime_module
return datetime_module
except ImportError:
if mxDateTime_module is None:
from mx import DateTime as mxDateTime_module
return mxDateTime_module
module_type = module_type.lower()
assert module_type in ('datetime', 'mxdatetime')
if module_type == 'datetime':
if datetime_module is None:
import datetime as datetime_module
return datetime_module
else:
if mxDateTime_module is None:
from mx import DateTime as mxDateTime_module
return mxDateTime_module
def datetime_now(module):
if module.__name__ == 'datetime':
return module.datetime.now()
else:
return module.now()
def datetime_makedate(module, year, month, day):
if module.__name__ == 'datetime':
return module.date(year, month, day)
else:
try:
return module.DateTime(year, month, day)
except module.RangeError, e:
raise ValueError(str(e))
############################################################
## Wrapper Validators
############################################################
class ConfirmType(FancyValidator):
"""
Confirms that the input/output is of the proper type.
Uses the parameters:
subclass:
The class or a tuple of classes; the item must be an instance
of the class or a subclass.
type:
A type or tuple of types (or classes); the item must be of
the exact class or type
|
. Subclasses are not allowed.
Examples::
>>> cint = ConfirmType(subclass=int)
>>> cint.to_python(True)
True
>>> cint.to_python('1')
Traceback (most recent call last):
...
Invalid: '1' is not a subclass of <type 'int'>
>>> cintfloat = ConfirmType(subclass=(float, int))
>>> cintfloat.to_python(1.0), cintfloat.from_python(1.0)
(1.0, 1.0)
>>> cintfloat.to_python(1), cintfl
|
oat.from_python(1)
(1, 1)
>>> cintfloat.to_python(None)
Traceback (most recent call last):
...
Invalid: None is not a subclass of one of the types <type 'float'>, <type 'int'>
>>> cint2 = ConfirmType(type=int)
>>> cint2(accept_python=False).from_python(True)
Traceback (most recent call last):
...
Invalid: True must be of the type <type 'int'>
"""
subclass = None
type = None
messages = {
'subclass': "%(object)r is not a subclass of %(subclass)s",
'inSubclass': "%(object)r is not a subclass of one of the types %(subclassList)s",
'inType': "%(object)r must be one of the types %(typeList)s",
'type': "%(object)r must be of the type %(type)s",
}
def __init__(self, *args, **kw):
FancyValidator.__init__(self, *args, **kw)
if self.subclass:
if isinstance(self.subclass, list):
self.subclass = tuple(self.subclass)
elif not isinstance(self.subclass, tuple):
self.subclass = (self.subclass,)
self.validate_python = self.confirm_subclass
if self.type:
if isinstance(self.type, list):
self.type = tuple(self.type)
elif not isinstance(self.type, tuple):
self.type = (self.type,)
self.validate_python = self.confirm_type
def confirm_subclass(self, value, state):
if not isinstance(value, self.subclass):
if len(self.subclass) == 1:
msg = self.message('subclass', state, object=value,
subclass=self.subclass[0])
else:
subclass_list = ', '.join(map(str, self.subclass))
msg = self.message('inSubclass', state, object=value,
subclassList=subclass_list)
raise Invalid(msg, value, state)
def confirm_type(self, value, state):
for t in self.type:
if type(value) is t:
break
else:
if len(self.type) == 1:
msg = self.message('type', state, object=value,
type=self.type[0])
else:
msg = self.message('inType', state, object=value,
typeList=', '.join(map(str, self.type)))
raise Invalid(msg, value, state)
return value
class Wrapper(FancyValidator):
"""
Used to convert functions to validator/converters.
You can give a simple function for `to_python`, `from_python`,
`validate_python` or `validate_other`. If that function raises an
exception, the value is considered invalid. Whatever value the
function returns is considered the converted value.
Unlike validators, the `state` argument is not used. Functions
like `int` can be used here, that take a single argument.
Examples::
>>> def downcase(v):
... return v.lower()
>>> wrap = Wrapper(to_python=downcase)
>>> wrap.to_python('This')
'this'
>>> wrap.from_python('This')
'This'
>>> wrap2 = Wrapper(from_python=downcase)
>>> wrap2.from_python('This')
'this'
>>> wrap2.from_python(1)
Traceback (most recent call last):
...
Invalid: 'int' object has no attribute 'lower'
>>> wrap3 = Wrapper(validate_python=int)
>>> wrap3.to_python('1')
'1'
>>> wrap3.to_python('a')
Traceback (most recent call last):
...
Invalid: invalid literal for int(): a
"""
func_to_python = None
func_from_python = None
func_validate_python = None
func_validate_other = None
def __init__(self, *args, **kw):
for n in ['to_python', 'from_python', 'validate_python',
'validate_other']:
if kw.has_key(n):
kw['func_%s' % n] = kw[n]
del kw[n]
FancyValidator.__init__(self, *args, **kw)
self._to_python = self.wrap(self.func_to_python)
self._from_python = self.wrap(self.func_from_python)
self.validate_python = self.wrap(self.func_validate_python)
self.validate_other = self.wrap(self.func_validate_other
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.