repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
sinotradition/sinoera
|
sinoera/ganzhi/guimao33.py
|
Python
|
apache-2.0
| 231
| 0.031111
|
#!/usr/bin/python
#coding=utf-8
'''
@author: sheng
@contact: [email protected]
@copyright: License according to the project li
|
cense.
'''
NAME='guimao33'
SPELL='guǐmǎo'
CN='癸卯'
SEQ='40'
if __name__=='__main__':
pass
| |
lexotero/python-redsys
|
redsys/__init__.py
|
Python
|
mit
| 68
| 0
|
from .Com
|
merce import Commerc
|
e
from .Transaction import Transaction
|
frc1418/2015-robot
|
robot/common/distance_sensors.py
|
Python
|
apache-2.0
| 1,861
| 0.014508
|
import wpilib
import math
class SharpIR2Y0A02:
'''
Sharp IR sensor GP2Y0A02YK0F
Long distance sensor: 20cm to 150cm
Output is in centimeters
Distance can be calculated using 62.28*x ^ -1.092
'''
def __init__(self,num):
self.distance = wpilib.AnalogInput(num)
def getDistance(self):
'''Returns distance in centimeters'''
# Don't allow zero/negative values
v = max(self.distance.getVoltage(), 0.00001)
d = 62.28*math.pow(v, -1.092)
# Constrain outpu
|
t
return max(min(d, 145.0), 22.5)
def getVoltage(self):
return self.distance.getVoltage()
class SharpIRGP2Y0A41SK0F:
'''
|
Sharp IR sensor GP2Y0A41SK0F
Short distance sensor: 4cm to 40cm
Output is in centimeters=
'''
#short Distance
def __init__(self,num):
self.distance = wpilib.AnalogInput(num)
def getDistance(self):
'''Returns distance in centimeters'''
# Don't allow zero/negative values
v = max(self.distance.getVoltage(), 0.00001)
d = 12.84*math.pow(v, -0.9824)
# Constrain output
return max(min(d, 25), 4.0)
def getVoltage(self):
return self.distance.getVoltage()
class CombinedSensor:
def __init__(self, longDist, longOff, shortDist, shortOff):
self.longDistance = longDist
self.shortDistance = shortDist
self.longOff = longOff
self.shortOff = shortOff
def getDistance(self):
long = self.longDistance.getDistance()
short = self.shortDistance.getDistance()
#if short < 25:
# return short - self.shortOff
#else:
return max(long - self.longOff, 0)
|
SymbiFlow/pycapnp
|
buildutils/detect.py
|
Python
|
bsd-2-clause
| 4,766
| 0.000629
|
"""Detect zmq version"""
#
# Copyright (C) PyZMQ Developers
#
# This file is part of pyzmq, copied and adapted from h5py.
# h5py source used under the New BSD license
#
# h5py: <http://code.google.com/p/h5py/>
#
# Distributed under the terms of the New BSD License. The full license is in
# the file COPYING.BSD, distributed as part of this software.
#
#
# Adapted for use in pycapnp from pyzmq. See https://github.com/zeromq/pyzmq
# for original project.
import shutil
import sys
import os
import logging
import platform
from distutils import ccompiler
from distutils.ccompiler import get_default_compiler
impor
|
t tempfile
from .misc import get_compiler, get_output_error
from .patch import patch_lib_paths
pjoin = os.path.join
#
# Utility functions (adapted from h5py: http://h5py.googlecode.com)
#
def test_compilation(cfile, com
|
piler=None, **compiler_attrs):
"""Test simple compilation with given settings"""
cc = get_compiler(compiler, **compiler_attrs)
efile, _ = os.path.splitext(cfile)
cpreargs = lpreargs = []
if sys.platform == 'darwin':
# use appropriate arch for compiler
if platform.architecture()[0] == '32bit':
if platform.processor() == 'powerpc':
cpu = 'ppc'
else:
cpu = 'i386'
cpreargs = ['-arch', cpu]
lpreargs = ['-arch', cpu, '-undefined', 'dynamic_lookup']
else:
# allow for missing UB arch, since it will still work:
lpreargs = ['-undefined', 'dynamic_lookup']
if sys.platform == 'sunos5':
if platform.architecture()[0] == '32bit':
lpreargs = ['-m32']
else:
lpreargs = ['-m64']
extra_compile_args = compiler_attrs.get('extra_compile_args', [])
if os.name != 'nt':
extra_compile_args += ['--std=c++14']
extra_link_args = compiler_attrs.get('extra_link_args', [])
if cc.compiler_type == 'msvc':
extra_link_args += ['/MANIFEST']
objs = cc.compile([cfile], extra_preargs=cpreargs, extra_postargs=extra_compile_args)
cc.link_executable(objs, efile, extra_preargs=lpreargs, extra_postargs=extra_link_args)
return efile
def detect_version(basedir, compiler=None, **compiler_attrs):
"""Compile, link & execute a test program, in empty directory `basedir`.
The C compiler will be updated with any keywords given via setattr.
Parameters
----------
basedir : path
The location where the test program will be compiled and run
compiler : str
The distutils compiler key (e.g. 'unix', 'msvc', or 'mingw32')
**compiler_attrs : dict
Any extra compiler attributes, which will be set via ``setattr(cc)``.
Returns
-------
A dict of properties for zmq compilation, with the following two keys:
vers : tuple
The ZMQ version as a tuple of ints, e.g. (2,2,0)
settings : dict
The compiler options used to compile the test function, e.g. `include_dirs`,
`library_dirs`, `libs`, etc.
"""
if compiler is None:
compiler = get_default_compiler()
cfile = pjoin(basedir, 'vers.cpp')
shutil.copy(pjoin(os.path.dirname(__file__), 'vers.cpp'), cfile)
# check if we need to link against Realtime Extensions library
if sys.platform.startswith('linux'):
cc = ccompiler.new_compiler(compiler=compiler)
cc.output_dir = basedir
if not cc.has_function('timer_create'):
if 'libraries' not in compiler_attrs:
compiler_attrs['libraries'] = []
compiler_attrs['libraries'].append('rt')
cc = get_compiler(compiler=compiler, **compiler_attrs)
efile = test_compilation(cfile, compiler=cc)
patch_lib_paths(efile, cc.library_dirs)
rc, so, se = get_output_error([efile])
if rc:
msg = "Error running version detection script:\n%s\n%s" % (so, se)
logging.error(msg)
raise IOError(msg)
handlers = {'vers': lambda val: tuple(int(v) for v in val.split('.'))}
props = {}
for line in (x for x in so.split('\n') if x):
key, val = line.split(':')
props[key] = handlers[key](val)
return props
def test_build(**compiler_attrs):
"""do a test build of libcapnp"""
tmp_dir = tempfile.mkdtemp()
# line()
# info("Configure: Autodetecting Cap'n Proto settings...")
# info(" Custom Cap'n Proto dir: %s" % prefix)
try:
detected = detect_version(tmp_dir, None, **compiler_attrs)
finally:
erase_dir(tmp_dir)
# info(" Cap'n Proto version detected: %s" % v_str(detected['vers']))
return detected
def erase_dir(path):
"""Erase directory"""
try:
shutil.rmtree(path)
except Exception:
pass
|
incredible-vision/show-and-tell
|
data_loader.py
|
Python
|
mit
| 3,497
| 0.003432
|
import torch
import torchvision.transforms as transforms
import torch.utils.data as data
import os
import json
import pickle
import argparse
from PIL import Image
import numpy as np
from utils import Vocabulary
class CocoDataset(data.Dataset):
def __init__(self, root, anns, vocab, mode='train',transform=None):
self.root = root
self.anns = json.load(open(anns))
self.vocab = pickle.load(open(vocab, 'rb'))
self.transform = transform
self.data = [ann for ann in self.anns if ann['split'] == mode]
def __getitem__(self, index):
data = self.data
vocab = self.vocab
# load image
path = os.path.join(self.root, data[index]['file_path'])
img = Image.open(path).convert('RGB')
if self.transform is not None:
img = self.transform(img)
# load caption
cap = data[index]['final_caption']
caption = []
caption.append(vocab('<start>'))
caption.extend([vocab(word) for word in cap])
caption.append(vocab('<end>'))
target = torch.IntTensor(caption)
return img, target, data[index]['imgid']
def __len__(self):
return len(self.data)
def collate_fn(data):
# sort the data in descending order
data.sort(key=lambda x: len(x[1]), reverse=True)
images, captions, imgids = zip(*data)
# merge images (from tuple of 3D tensor to 4D tensor).
images = torch.stack(images, 0)
# merge captions (from tuple of 1D tensor to 2D tensor).
lengths = [len(cap) for cap in captions]
targets = torch.zeros(len(captions), max(lengths)).long()
for i, cap in enumerate(captions):
end = lengths[i]
targets[i, :end] = cap[:end]
return images, targets, lengths, imgids
def get_loader(opt, mode='train', shuffle=True, num_workers=1, transform=None):
coco = CocoDataset(root=opt.root_dir,
anns=opt.data_json,
vocab=opt.voca
|
b_path,
mode=mode,
transform=transform)
data_loader = torch.utils.data.DataLoader(dataset=
|
coco,
batch_size=opt.batch_size,
shuffle=shuffle,
num_workers=num_workers,
collate_fn=collate_fn)
return data_loader
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--root_dir', type=str, default='/home/myunggi/Research/show-and-tell', help="root directory of the project")
parser.add_argument('--data_json', type=str, default='data/data.json', help='input data list which includes captions and image information')
parser.add_argument('--vocab_path', type=str, default='data/vocab.pkl', help='vocabulary wrapper')
parser.add_argument('--crop_size', type=int, default=224, help='image crop size')
parser.add_argument('--batch_size', type=int, default=128, help='batch size')
args = parser.parse_args()
transform = transforms.Compose([
transforms.RandomCrop(args.crop_size),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225))
])
data_loader = get_loader(args, transform=transform)
total_iter = len(data_loader)
for i, (img, target, length) in enumerate(data_loader):
print('done')
|
norayr/unisubs
|
apps/auth/migrations/0032_remove_thumb_options.py
|
Python
|
agpl-3.0
| 21,604
| 0.008193
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
'accountlinker.thirdpartyaccount': {
'Meta': {'unique_together': "(('type', 'username'),)", 'object_name': 'ThirdPartyAccount'},
'full_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'oauth_access_token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'oauth_refresh_token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'auth.announcement': {
'Meta': {'ordering': "['-created']", 'object_name': 'Announcement'},
'content': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'auth.awards': {
'Meta': {'object_name': 'Awards'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'points': ('django.db.models.fields.IntegerField', [], {}),
'type': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True'})
},
'auth.customuser': {
'Meta': {'object_name': 'CustomUser', '_ormbases': ['auth.User']},
'autoplay_preferences': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'award_points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'can_send_messages': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}),
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'is_partner': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'notify_by_email': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'notify_by_message': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Partner']", 'null': 'True', 'blank': 'True'}),
'picture': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'blank': 'True'}),
'preferred_language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'third_party_accounts': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'users'", 'symmetrical': 'False', 'to': "orm['accountlinker.ThirdPartyAccount']"}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'valid_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.emailconfirmation': {
'Meta': {'object_name': 'EmailConfirmation'},
'confirmation_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sent': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.logintoken': {
'Meta': {'object_name': 'LoginToken'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'login_token'", 'unique': 'True', 'to': "orm['auth.CustomUser']"})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.D
|
ateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
|
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'auth.userlanguage': {
'Meta': {'unique_together': "(['user', 'language'],)", 'object_name': 'UserLanguage'},
'follow_requests': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'proficiency': ('dj
|
johan--/Geotrek
|
geotrek/common/tests/test_parsers.py
|
Python
|
bsd-2-clause
| 5,465
| 0.004762
|
# -*- encoding: utf-8 -*-
import mock
import os
from shutil import rmtree
from tempfile import mkdtemp
from django.test import TestCase
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test.utils import override_settings
from django.template.base import TemplateDoesNotExist
from paperclip.models import Attachment
from geotrek.common.models import Organism, FileType
from geotrek.common.parsers import ExcelParser, AttachmentParserMixin
class OrganismParser(ExcelParser):
model = Organism
fields = {'organism': 'nOm'}
class OrganismEidParser(ExcelParser):
model = Organism
fields = {'organism': 'nOm'}
eid = 'organism'
class AttachmentParser(AttachmentParserMixin, OrganismEidParser):
non_fields = {'attachments': 'photo'}
class ParserTests(TestCase):
def test_bad_parser_class(self):
with self.assertRaises(CommandError) as cm:
call_command('import', 'geotrek.common.DoesNotExist', '', verbosity=0)
self.assertEqual(unicode(cm.exception), u"Failed to import parser class 'geotrek.common.DoesNotExist'")
def test_bad_filename(self):
with self.assertRaises(CommandError) as cm:
call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', 'find_me/I_am_not_there.shp', verbosity=0)
self.assertEqual(unicode(cm.exception), u"File does not exists at: find_me/I_am_not_there.shp")
def test_create(self):
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', filename, verbosity=0)
self.assertEqual(Organism.objects.count(), 1)
organism = Organism.objects.get()
self.assertEqual(organism.organism, u"Comité Théodule")
def test_duplicate_without_eid(self):
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', filename, verbosity=0)
call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', filename, verbosity=0)
self.assertEqual(Organism.objects.count(), 2)
def test_unmodified_with_eid(self):
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename, verbosity=0)
call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename, verbosity=0)
self.assertEqual(Organism.objects.count(), 1)
def test_updated_with_eid(self):
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
filename2 = os.path.join(os.path.dirname(__file__), 'data', 'organism2.xls')
call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename, verbosity=0)
call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename2, verbosity=0)
self.assertEqual(Organism.objects.count(), 2)
organisms = Organism.objects.order_by('pk')
self.assertEqual(organisms[0].organism, u"Comité Théodule")
self.assertEqual(organisms[1].organism, u"Comité Hippolyte")
def test_report_format_text(self):
parser = OrganismParser()
self.assertRegexpMatches(parser.report(), '0/0 lines imported.')
self.assertNotRegexpMatches(parser.report(), '<div id=\"collapse-\$celery_id\" class=\"collapse\">')
def test_report_format_html(self):
parser = OrganismParser()
self.assertRegexpMatches(parser.report(output_format='html'), '<div id=\"collapse-\$celery_id\" class=\"collapse\">')
def test_report_format_bad(self):
parser = OrganismParser()
with self.assertRaises(TemplateDoesNotExist):
parser.report(output_format='toto')
@override_settings(MEDIA_ROOT=mkdtemp('geotrek_test'))
class AttachmentParserTests(TestCase):
def setUp(self):
self.filetype = FileType.objects.create(type=u"Photographie")
def tearDown(self):
rmtree(settings.MEDIA_ROOT)
@mock.patch('requests.get')
def test_attachment(self, mocked):
mocked.return_value.status_code = 200
mocked.return_value.content = ''
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.AttachmentParser', filename, verbosity=0)
organism = Organism.objects.get()
attachmen
|
t = Attachment.objects.get()
self.assertEqual(attachment.content_object, organism)
self.assertEqual(attachment.attachment_file.name, 'paperclip/common_organism/{pk}/titi.png'.format(pk=organism.pk))
self.assertEqual(attachment.filetype, self.filetype)
@mock.patch('requests.get')
|
def test_attachment_not_updated(self, mocked):
mocked.return_value.status_code = 200
mocked.return_value.content = ''
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.AttachmentParser', filename, verbosity=0)
call_command('import', 'geotrek.common.tests.test_parsers.AttachmentParser', filename, verbosity=0)
self.assertEqual(mocked.call_count, 1)
self.assertEqual(Attachment.objects.count(), 1)
|
bskari/sparkfun-avc
|
main.py
|
Python
|
mit
| 13,115
| 0.001067
|
"""Main command module that starts the different threads."""
from ws4py.server.cherrypyserver import WebSocketPlugin
from ws4py.server.cherrypyserver import WebSocketTool
import argparse
import cherrypy
import datetime
import logging
import os
import serial
import signal
import subprocess
import sys
import threading
import time
from control.command import Command
from control.driver import Driver, STEERING_GPIO_PIN, STEERING_NEUTRAL_US, THROTTLE_GPIO_PIN, THROTTLE_NEUTRAL_US
from control.simple_waypoint_generator import SimpleWaypointGenerator
from control.chase_waypoint_generator import ChaseWaypointGenerator
from control.extension_waypoint_generator import ExtensionWaypointGenerator
from control.sup800f import switch_to_nmea_mode
from control.sup800f_telemetry import Sup800fTelemetry
from control.telemetry import Telemetry
from control.telemetry_dumper import TelemetryDumper
from control.web_telemetry.status_app import StatusApp as WebTelemetryStatusApp
from messaging import config
from messaging.async_logger import AsyncLogger, AsyncLoggerReceiver
from messaging.message_consumer import consume_messages
from messaging.message_producer import MessageProducer
from monitor.status_app import StatusApp as MonitorApp
from monitor.web_socket_logging_handler import WebSocketLoggingHandler
# pylint: disable=global-statement
# pylint: disable=broad-except
def override_imports_for_non_rpi():
"""Overrides modules that only work on the Raspberry Pi. Importing RPIO
(used in button) on a non Raspberry Pi raises a SystemError, so for testing
on other systems, just ignore it.
"""
class Dummy(object): # pylint: disable=missing-docstring,too-few-public-methods
def __getattr__(self, attr):
return lambda *arg, **kwarg: time.sleep(0.01)
# pylint: disable=invalid-name
global Button
Button = lambda *arg: Dummy()
serial.Serial = lambda *arg: Dummy()
global Driver
Driver = lambda *arg: Dummy()
global Sup800fTelemetry
Sup800fTelemetry = lambda *arg: Dummy()
global switch_to_nmea_mode
switch_to_nmea_mode = lambda *arg: Dummy()
# Ignore messages
drop = lambda message: None
drop2 = lambda: consume_messages(config.COMMAND_FORWARDED_EXCHANGE, drop)
thread = threading.Thread(target=drop2)
thread.name = config.COMMAND_FORWARDED_EXCHANGE
thread.start()
try:
from control.button import Button
except SystemError:
print('Disabling button because not running on Raspberry Pi')
override_imports_for_non_rpi()
THREADS = []
POPEN = None
DRIVER = None
EMIT_INITIALIZED = False
class CherryPyServer(threading.Thread):
"""Runs the various web apps in a thread."""
def __init__(self, port, address, telemetry, waypoint_generator):
super(CherryPyServer, self).__init__()
self.name = self.__class__.__name__
# Web monitor
config = MonitorApp.get_config(os.path.abspath(os.getcwd()))
status_app = cherrypy.tree.mount(
MonitorApp(telemetry, waypoint_generator, port),
'/',
config
)
cherrypy.config.update({
'server.socket_host': address,
'server.socket_port': port,
'server.ssl_module': 'builtin',
'server.ssl_certificate': 'control/web_telemetry/cert.pem',
'server.ssl_private_key': 'control/web_telemetry/key.pem',
'engine.autoreload.on': False,
})
WebSocketPlugin(cherrypy.engine).subscribe()
cherrypy.tools.websocket = WebSocketTool()
# Web telemetry
config = WebTelemetryStatusApp.get_config(os.path.abspath(os.getcwd()))
web_telemetry_app = cherrypy.tree.mount(
WebTelemetryStatusApp(telemetry, port),
'/telemetry',
config
)
# OMG, shut up CherryPy, nobody cares about your problems
for app in (status_app, web_telemetry_app, cherrypy):
app.log.access_log.setLevel(logging.ERROR)
app.log.error_log.setLevel(logging.ERROR)
def run(self):
"""Runs the thread and server in a thread."""
cherrypy.engine.start()
@staticmethod
def kill():
"""Stops the thread and server."""
cherrypy.engine.exit()
def terminate(signal_number, stack_frame): # pylint: disable=unused-argument
"""Terminates the program. Used when a signal is received."""
print(
'Received signal {signal_number}, quitting'.format(
signal_number=signal_number
)
)
if POPEN is not None and POPEN.poll() is None:
print('Killing image capture')
try:
POPEN.kill()
except OSError:
pass
DRIVER.drive(0.0, 0.0)
time.sleep(0.2)
try:
with open('/dev/pi-blaster', 'w') as blaster:
time.sleep(0.1)
blaster.write(
'{pin}={throttle}\n'.format(
pin=THROTTLE_GPIO_PIN,
throttle=THROTTLE_
|
NEUTRAL_US
)
)
time.sleep(0.1)
blaster.write(
'{pin}={steering}\n'.format(
pin=STEERING_GPIO_PIN,
steering=STEERING_NEUTRAL_US
)
)
time.sleep(0.1)
except IOError:
pass
for socket in os.listdir(os.sep.join(('.', 'messaging', 'sockets'))):
MessageProducer(socket).kill()
time.sleep(0.1)
|
for thread in THREADS:
thread.kill()
thread.join()
# Some threads should still be active
expected = set(('MainThread', '_TimeoutMonitor'))
actives = set((thread.name for thread in threading.enumerate()))
if not (actives <= expected):
print('Trying to exit while {} threads are still active!'.format(
threading.active_count()
))
for thread in threading.enumerate():
print(thread.name)
sys.exit(0)
def get_configuration(value, default):
"""Returns a system configuration value."""
if value in os.environ:
return os.environ[value]
return default
def start_threads(
waypoint_generator,
logger,
web_socket_handler,
max_throttle,
kml_file_name,
):
"""Runs everything."""
logger.info('Creating Telemetry')
telemetry = Telemetry(kml_file_name)
telemetry_dumper = TelemetryDumper(
telemetry,
waypoint_generator,
web_socket_handler
)
logger.info('Done creating Telemetry')
global DRIVER
DRIVER = Driver(telemetry)
DRIVER.set_max_throttle(max_throttle)
logger.info('Setting SUP800F to NMEA mode')
serial_ = serial.Serial('/dev/ttyAMA0', 115200)
serial_.setTimeout(1.0)
for _ in range(10):
serial_.readline()
try:
switch_to_nmea_mode(serial_)
except: # pylint: disable=W0702
logger.error('Unable to set mode')
for _ in range(10):
serial_.readline()
logger.info('Done')
# The following objects must be created in order, because of message
# exchange dependencies:
# sup800f_telemetry: reads from command forwarded
# command: reads from command, writes to command forwarded
# button: writes to command
# cherry_py_server: writes to command
# TODO(2016-08-21) Have something better than sleeps to work around race
# conditions
logger.info('Creating threads')
sup800f_telemetry = Sup800fTelemetry(serial_)
time.sleep(0.5)
command = Command(telemetry, DRIVER, waypoint_generator)
time.sleep(0.5)
button = Button()
port = int(get_configuration('PORT', 8080))
address = get_configuration('ADDRESS', '0.0.0.0')
cherry_py_server = CherryPyServer(
port,
address,
telemetry,
waypoint_generator
)
time.sleep(0.5)
global THREADS
THREADS += (
button,
cherry_py_server,
command,
sup800f_telemetry,
telemetry_dumper,
)
for thread in THREADS:
thread.start()
logger.info('Started all threads')
# Use a fake timeout so that the main thread can still receive signals
sup800f_teleme
|
stefco/dotfiles
|
winscripts/plexmovie.py
|
Python
|
mit
| 4,386
| 0.003648
|
"Add a movie to Plex."
import sys
from pathlib import Path
from argparse import ArgumentParser
from tkinter import filedialog, messagebox, simpledialog, Tk, Frame, Label
from tkinter.ttk import Combobox, Button
class FeaturettePicker:
FEATURETTES = {
"Behind the Scenes": "behindthescenes",
"Deleted Scenes": "deleted",
"Featurette": "featurette",
"Interview": "interview",
"Scene": "scene",
"Short": "short",
"Trailer": "trailer",
"Other": "other",
}
def __init__(self, parent, file):
self.file = Path(file)
self.label = Label(parent, text=self.file.name, justify='left')
# can also position using "grid" instead of "pack", but no both
self.label.pack(fill="x", padx=5, pady=5)
self.parent = parent
self.parent.bind("<Return>", self.ok)
self.parent.bind("<Escape>", self.cancel)
self.box = Frame(parent)
self.ok_button = Button(self.box, text="Add", command=self.ok, default='active')
self.ok_button.pack(padx=5, pady=5, side='right')
self.cancel_button = Button(self.box, text="Cancel", command=self.cancel)
self.cancel_button.pack(padx=5, pady=5, side='right')
self.combo = Combobox(parent, values=[*self.FEATURETTES])
self.combo.pack(fill="x", padx=5, pady=5)
self.box.pack()
self.result = None
def run(self):
self.parent.mainloop()
return self.result
def cancel(self):
try:
self.parent.withdraw()
finally:
self.parent.quit()
def ok(self):
self.result = self.combo.get()
self.cancel()
def get_parser():
parser = ArgumentParser()
parser.add_argument('infile')
return parser
def main(infile):
try:
tkr = Tk()
tkr.withdraw()
src = Path(infile)
root = Path(src.anchor)
outname = simpledialog\
.askstring("Movie Name", f"Selected: {src}.\n\nName of movie (and year in parens):",
initialvalue=src.name, parent=tkr) or sys.exit()
dst = root/'media-library'/'movies'/outname
ext = simpledialog\
.askstring("Extension",
"File extension (If you want to specify a variant, do so here by prepending "
"it to the extension, e.g. ` - [OldVersion].mp4`):",
initialvalue=src.suffix, parent=tkr)
out = dst/(outname+ext)
files = []
while messagebox.askyesno("Special Features", "Add more files as special features?"):
for f in filedialog.askopenfilenames(parent=tkr, initialdir="."):
fsrc = Path(f)
ftype = FeaturettePicker(Tk(), fsrc).run()
if ftype is None:
return
name = simpledialog.askstring("Featurette Name", "Featurette Name:",
|
initialvalue=fsrc.name[:-len(fsrc.suffix)], parent=tkr)
if name is None:
return
fext = simpledialog.askstring("Extension",
f"File extension for {fsrc.name}:",
initialvalue=fsrc.suffix, parent=tkr)
fdst = dst/f"{name}-{FeaturettePic
|
ker.FEATURETTES[ftype]}{fext}"
files.append((fsrc, fdst))
msg = ("\n\nFeatures:\n\n"+"\n".join(f"{s} -> {d.name}" for (s, d) in files)+"\n\n") if files else ""
if not messagebox.askokcancel("Proceed?",
f"Ready to link {src.name} -> {out}. {msg}Proceed?",
parent=tkr):
return
dst.mkdir(exist_ok=True)
for s, d in [(src, out)] + files:
if d.exists():
overwrite = messagebox.askyesnocancel("Overwrite?", f"{d} exists. Overwrite?")
if overwrite is None:
return
if overwrite:
d.unlink()
else:
continue
s.link_to(d)
messagebox.showinfo("Success", "Success.")
except Exception as e:
messagebox.showerror("Fatal Error", f"Fatal error: {e}")
raise
if __name__ == "__main__":
main(get_parser().parse_args().infile)
|
clu8/RainbowTable
|
crack.py
|
Python
|
mit
| 2,202
| 0.024069
|
#!/usr/bin/env python3
import rainbow
import hashlib
import string
import time
import random
"""SHA-256 hash function
Precondition: Input plaintext as string
Postcondition: Returns hash as string
"""
def sha256(plaintext):
return hashlib.sha256(bytes(plaintext, 'utf-8')).hexdigest()
"""Returns a reduction function which generates an n-digit lowercase password from a hash
"""
def reduce_lower(n):
"""Reduction function
Precondition: hash is H(previousPlaintext)
Postcondition: returns randomly distributed n-digit lowercase plaintext password
"""
def result(hash, col):
plaintextKey = (int(hash[:9], 16) ^ col) % (26 ** n)
plaintext = ""
for _ in range(n):
plaintext += string.ascii_lowercase[plaintextKey % 26]
plaintextKey //= 26
return plaintext
return result
"""Returns a function which generates a random n-digit lowercase password
"""
def gen_lower(n):
def result():
password = ""
for _ in range(n):
password += random.choice(string.ascii_lowercase)
return password
return result
"""Precondition: Input a function which generates a random password, or input no arguments to generate a random password
Postcondition: Cracks H(password) and prints elapsed time
"""
def test(table, hash_function, gen_password_function, password=""):
if password == "":
password = gen_password_function()
print("Cracking password: {0}\nH(password): {1}".format(password, hash_function(password)))
cracked = table.crack(hash_function(password))
if cracked:
print("Success! Password: {0}".format(cracked))
return True
else:
print("Unsuccessful :(")
return False
# Tests random passwords multiple times and prints success rate and average crack time.
def bulk_test(table, hash_function, gen_password_function, numTests):
start = time.time()
numSuccess = 0
for i in range(numTests):
print("\nTest {0} of {1}".format(i + 1, numTests))
numSuccess += test(table, hash_function, gen_password_function)
print("""\n{0} out of {1} random hashes were successful!\n
Av
|
erage time per hash (including failures): {2} secs.""" \
.format(numSuccess, numTests, (time.time() - start) / numTests))
table = rainbow.RainbowTable(sha256, reduce_lower(4
|
), gen_lower(4))
|
xe1gyq/GiekIs
|
examples/au.py
|
Python
|
apache-2.0
| 1,636
| 0.011002
|
#!/usr/bin/env python3
# NOTE: this example requires PyAudio because it uses the Microphone class
import speech_recogniti
|
on as sr
# this is called from the background thread
def callback(recognizer, audio):
# received audio data, now we'll recognize it using Google Speech Recognition
try:
# for testing purposes, we're just using the default API key
|
# to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")`
# instead of `r.recognize_google(audio)`
# print("Google Speech Recognition thinks you said " + recognizer.recognize_google(audio))
r.recognize_google(audio, key="")
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
r = sr.Recognizer()
m = sr.Microphone()
with m as source:
r.adjust_for_ambient_noise(source) # we only need to calibrate once, before we start listening
# start listening in the background (note that we don't have to do this inside a `with` statement)
stop_listening = r.listen_in_background(m, callback)
# `stop_listening` is now a function that, when called, stops background listening
# do some other computation for 5 seconds, then stop listening and keep doing other computations
import time
for _ in range(50): time.sleep(0.1) # we're still listening even though the main thread is doing other things
stop_listening() # calling this function requests that the background listener stop listening
while True: time.sleep(0.1)
|
BaluDontu/docker-volume-vsphere
|
esx_service/utils/auth.py
|
Python
|
apache-2.0
| 13,215
| 0.003405
|
# Copyright 2016 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Module to provide APIs for authorization checking for VMDK ops.
"""
import logging
import auth_data
import sqlite3
import convert
import auth_data_const
import volume_kv as kv
import threadutils
# All supported vmdk commands
CMD_CREATE = 'create'
CMD_REMOVE = 'remove'
CMD_ATTACH = 'attach'
CMD_DETACH = 'detach'
SIZE = 'size'
# thread local storage in this module namespace
thread_local = threadutils.get_local_storage()
def get_auth_mgr():
""" Get a connection to auth DB. """
global thread_local
if not hasattr(thread_local, '_auth_mgr'):
thread_local._auth_mgr = auth_data.AuthorizationDataManager()
thread_local._auth_mgr.connect()
return thread_local._auth_mgr
def get_tenant(vm_uuid):
""" Get tenant which owns this VM by querying the auth DB. """
_auth_mgr = get_auth_mgr()
try:
cur = _auth_mgr.conn.execute(
"SELECT tenant_id FROM vms WHERE vm_id = ?",
(vm_uuid, )
)
result = cur.fetchone()
except sqlite3.Error as e:
logging.error("Error %s when querying from vms table for vm_id %s", e, vm_uuid)
return str(e), None, None
if result:
logging.debug("get tenant vm_uuid=%s tenant_id=%s", vm_uuid, result[0])
tenant_uuid = None
tenant_name = None
if result:
tenant_uuid = result[0]
try:
cur = _auth_mgr.conn.execute(
"SELECT name FROM tenants WHERE id = ?",
(tenant_uuid, )
)
result = cur.fetchone()
except sqlite3.Error as e:
logging.error("Error %s when querying from tenants table for tenant_id %s",
e, tenant_uuid)
return str(e), None, None
if result:
tenant_name = result[0]
logging.debug("Found tenant_uuid %s, tenant_name %s", tenant_uuid, tenant_name)
return None, tenant_uuid, tenant_name
def get_privileges(tenant_uuid, datastore):
""" Return privileges for given (tenant_uuid, datastore) pair by
querying the auth DB.
"""
_auth_mgr = get_auth_mgr()
privileges = []
logging.debug("get_privileges tenant_uuid=%s datastore=%s", tenant_uuid, datastore)
try:
cur = _auth_mgr.conn.execute(
"SELECT * FROM privileges WHERE tenant_id = ? and datastore = ?",
(tenant_uuid, datastore)
)
privileges = cur.fetchone()
except sqlite3.Error as e:
logging.error("Error %s when querying privileges table for tenant_id %s and datastore %s",
e, tenant_uuid, datastore)
return str(e), None
return None, privileges
def has_privilege(privileges, type):
""" Check the privileges has the specific type of privilege set. """
if not privileges:
return False
logging.debug("%s=%d", type, privileges[type])
return privileges[type]
def get_vol_size(opts):
""" get volume size. """
if not opts or not opts.has_key(SIZE):
logging.warning("Volume size not specified")
return kv.DEFAULT_DISK_SIZE
return opts[SIZE].upper()
def check_max_volume_size(opts, privileges):
""" Check whether the size of the volume to be created exceeds
the max volume size specified in the privileges.
"""
if privileges:
vol_size_in_MB = convert.convert_to_MB(get_vol_size(opts))
max_vol_size_in_MB = privileges[auth_data_const.COL_MAX_VOLUME_SIZE]
logging.debug("vol_size_in_MB=%d max_vol_size_in_MB=%d",
vol_size_in_MB, max_vol_size_in_MB)
# if max_vol_size_in_MB which read from DB is 0, which means
# no max_vol_size limit, function should return True
if max_vol_size_in_MB == 0:
return True
return vol_size_in_MB <= max_vol_size_in_MB
else:
# no privileges
return True
def get_total_storage_used(tenant_uuid, datastore):
""" Return total storage used by (tenant_uuid, datastore)
by querying auth DB.
"""
_auth_mgr = get_auth_mgr()
total_storage_used = 0
try:
cur = _auth_mgr.conn.execute(
"SELECT SUM(volume_size) FROM volumes WHERE tenant_id = ? and datastore = ?",
(tenant_uuid, datastore)
)
except sqlite3.Error as e:
logging.error("Error %s when querying storage table for tenant_id %s and datastore %s",
e, tenant_uuid, datastore)
return str(e), total_storage_used
result = cur.fetchone()
if result:
if result[0]:
total_storage_used = result[0]
logging.debug("total storage used for (tenant %s datastore %s) is %s MB", tenant_uuid,
datastore, total_storage_used)
return None, total_storage_used
def check_usage_quota(opts, tenant_uuid, datastore, privileges):
""" Check if the volume can be created without violating the quota. """
if privileges:
vol_size_in_MB = convert.convert_to_MB(get_vol_size(opts))
error_info, total_storage_used = get_total_storage_used(tenant_uuid, datastore)
if error_info:
# cannot get the total_storage_used, to be safe, return False
return False
usage_quota = privileges[auth_data_const.COL_USAGE_QUOTA]
# if usage_quota which read from DB is 0, which means
# no usage_quota, function should return True
if usage_quota == 0:
return Tr
|
ue
return vol_size_in_MB + total_storage_used <= usage_quota
else:
# no privileges
return True
def check_privileges_for_command(cmd, opts, tenant_uuid, datastore, privileges):
"""
Check whether the (tenant
|
_uuid, datastore) has the privileges to run
the given command.
"""
result = None
cmd_need_mount_privilege = [CMD_ATTACH, CMD_DETACH]
if cmd in cmd_need_mount_privilege:
if not has_privilege(privileges, auth_data_const.COL_MOUNT_VOLUME):
result = "No mount privilege"
if cmd == CMD_CREATE:
if not has_privilege(privileges, auth_data_const.COL_CREATE_VOLUME):
result = "No create privilege"
if not check_max_volume_size(opts, privileges):
result = "volume size exceeds the max volume size limit"
if not check_usage_quota(opts, tenant_uuid, datastore, privileges):
result = "The total volume size exceeds the usage quota"
if cmd == CMD_REMOVE:
if not has_privilege(privileges, auth_data_const.COL_DELETE_VOLUME):
result = "No delete privilege"
return result
def tables_exist():
""" Check tables needed for authorization exist or not. """
_auth_mgr = get_auth_mgr()
try:
cur = _auth_mgr.conn.execute("SELECT name FROM sqlite_master WHERE type = 'table' and name = 'tenants';")
result = cur.fetchall()
except sqlite3.Error as e:
logging.error("Error %s when checking whether table tenants exists or not", e)
return str(e), False
if not result:
error_info = "table tenants does not exist"
logging.error(error_info)
return error_info, False
try:
cur = _auth_mgr.conn.execute("SELECT name FROM sqlite_master WHERE type = 'table' and name = 'vms';")
result = cur.fetchall()
except sqlite3.Error as e:
logging.error("Error %s when checking whether table vms exists or not", e)
return str(e), False
if not result:
error_info = "table vms does not exist"
logging.error(error_inf
|
google-code/abc2esac
|
abcesac/tests/music.py
|
Python
|
gpl-3.0
| 4,909
| 0.017315
|
import unittest
from fractions import Fraction as F
from abcesac.music import *
class KeyTestCase(unittest.TestCase):
def test_get_notes(self):
got = Key('C').get_notes()
want = ['C','D','E','F','G','A','B']
self.assertEquals(got, want)
got = Key('D').get_notes()
want = ['D','E','F#','G','A','B','C#']
self.assertEquals(got, want)
got = Key('E').get_notes()
want = ['E','F#','G#','A','B','C#','D#']
self.assertEquals(got, want)
got = Key('Eb').get_notes()
want = ['Eb','F','G','Ab','Bb','C','D']
self.assertEquals(got, want)
def test_interval(self):
got = Key('C').interval('C', 5)
self.assertEquals(got, 'A')
got = Key('C').interval('B', 5)
self.assertEquals(got, 'G')
got = Key('C').interval('B', 6)
self.assertEquals(got, 'A')
got = Key('G#').interval('B', 6)
self.assertEquals(got, 'A#')
def test_length_value(self):
got = length_value(F(1,8))
self.assertEquals(got, (F(1,8),0))
got = length_value(F(2,8))
self.assertEquals(got, (F(1,4),0))
got = length_value(F(3,8))
self.assertEquals(got, (F(1,4),1))
got = length_value(F(4,8))
self.assertEquals(got, (F(1,2),0))
got = length_value(F(5,8))
self.assertEquals(got, (F(1,2),1))
got = length_value(F(6,8))
self.assertEquals(got, (F(1,2),1))
got = length_value(F(7,8))
self.assertEquals(got, (F(1,2),2))
got = length_value(F(8,8))
self.assertEquals(got, (F(1,1),0))
got = length_value(F(9,8))
self.assertEquals(got, (F(1,1),1))
got = length_value(F(10,8))
self.assertEquals(got, (F(1,1),1))
got = length_value(F(16,8))
self.assertEquals(got, (F(2,1),0))
def test_tuplets(self):
tuplet = Tuplet(3, 2)
tuplet.add_note(Note(name='C', length=F(1,8)))
tuplet.add_note(Note(name='C', length=F(1,8)))
tuplet.add_note(Note(name='C', length=F(1,8)))
self.assertEquals(tuplet.length, F(2,8))
tuplet = Tuplet(3, 2)
tuplet.add_note(Note(name='C', length=F(1,8)))
tuplet.add_note(Note(name='C', length=F(1,8)))
tuplet.add_note(Note(name='C', length=F(1,16)))
tuplet.add_note(Note(name='C', length=F(1,16)))
self.assertEquals(tuplet.length, F(2,8))
tuplet = Tuplet(5, 3)
tuplet.add_note(Note(name='C', length=F(1,8)))
tuplet.add_note(Note(name='C', length=F(1,8)))
tuplet.add_note(Note(name='C', length=F(1,8)))
tuplet.add_note(Note(name='C', length=F(1,8)))
tuplet.add_note(Note(name='C', length=F(1,16)))
tuplet.add_note(Note(name='C', length=F(1,16)))
self.assertEquals(tuplet.l
|
ength, F(3,8))
tuplet = Tuplet(7, 3)
tuplet.add_note(Note(name='C', length=F(1,16)))
tuplet.add_note(Note(name='C', length=F(1,16)))
tuplet.add_note(Note(name='C', length=F(1,16)))
tuplet.add_note(Note(name='C', length=F(1,16)))
tuplet.add_note(Note(name='C', l
|
ength=F(1,16)))
tuplet.add_note(Note(name='C', length=F(1,16)))
tuplet.add_note(Note(name='C', length=F(1,16)))
self.assertEquals(tuplet.length, F(3,16))
def test_modes(self):
got = Key('C').mode_scale('major')
want = ['C', 'D', 'E', 'F', 'G', 'A', 'B']
self.assertEquals(got, want)
got = Key('C').mode_scale('ionian')
want = ['C', 'D', 'E', 'F', 'G', 'A', 'B']
self.assertEquals(got, want)
got = Key('C').mode_scale('minor')
want = ['C', 'D', 'Eb', 'F', 'G', 'Ab', 'Bb']
self.assertEquals(got, want)
got = Key('E').mode_scale('minor')
want = ['E', 'F#', 'G', 'A', 'B', 'C', 'D']
self.assertEquals(got, want)
got = Key('C').mode_scale('dorian')
want = ['C', 'D', 'Eb', 'F', 'G', 'A', 'Bb']
self.assertEquals(got, want)
got = Key('C').mode_scale('phrygian')
want = ['C', 'Db', 'Eb', 'F', 'G', 'Ab', 'Bb']
self.assertEquals(got, want)
got = Key('C').mode_scale('lydian')
want = ['C', 'D', 'E', 'F#', 'G', 'A', 'B']
self.assertEquals(got, want)
got = Key('C').mode_scale('mixolydian')
want = ['C', 'D', 'E', 'F', 'G', 'A', 'Bb']
self.assertEquals(got, want)
got = Key('C').mode_scale('aeolian')
want = ['C', 'D', 'Eb', 'F', 'G', 'Ab', 'Bb']
self.assertEquals(got, want)
got = Key('C').mode_scale('locrian')
want = ['C', 'Db', 'Eb', 'F', 'Gb', 'Ab', 'Bb']
self.assertEquals(got, want)
got = Key('D').mode_scale('major')
want = ['D', 'E', 'F#', 'G', 'A', 'B', 'C#']
self.assertEquals(got, want)
if __name__ == '__main__':
unittest.main()
|
tjduigna/exa
|
exa/core/container.py
|
Python
|
apache-2.0
| 24,987
| 0.002441
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2019, Exa Analytics Development Team
# Distributed under the terms of the Apache License 2.0
"""
Container
########################
The :class:`~exa.core.container.Container` class is the primary object for
data processing, analysis, and visualization. In brief, containers are composed
of data objects whose contents are used for 2D and 3D visualization. Containers
also provide some content management and data relationship features.
See Also:
For a description of data objects see :mod:`~exa.core.numerical`.
"""
import os
import numpy as np
import pandas as pd
import networkx as nx
from sys import getsizeof
from copy import deepcopy
from collections import defaultdict
from .numerical import check_key, Field, Series, DataFrame
from exa.util.utility import convert_bytes
from exa.util import mpl
import matplotlib.pyplot as plt
class Container(object):
"""
Container class responsible for all features related to data management.
"""
_getter_prefix = 'compute'
_cardinal = None # Name of the cardinal data table
def copy(self, name=None, description=None, meta=None):
"""
Create a copy of the current object (may alter the container's name,
description, and update the metadata if needed).
"""
cls = self.__class__
kwargs = self._rel(copy=True)
kwargs.update(self._data(copy=True))
if name is not None:
kwargs['name'] = name
if description is not None:
kwargs['description'] = description
if meta is not None:
kwargs['meta'] = meta
return cls(**kwargs)
def concat(self, *args, **kwargs):
"""
Concatenate any number of container objects with the current object into
a single container object.
See Also:
For argument description, see :func:`~exa.core.container.concat`.
"""
raise NotImplementedError()
def slice_naive(self, key):
"""
Naively slice each data object in the container by the object's index.
Args:
key: Int, slice, or list by which to extra "sub"-container
Returns:
sub: Sub container of the same format with a view of the data
Warning:
To ensure that a new container is created, use the copy method.
.. code-block:: Python
mycontainer[slice].copy()
"""
kwargs = {'name': self.name, 'description': self.description, 'meta': self.meta}
for name, data in self._data().items():
k = name[1:] if name.startswith('_') else name
kwargs[k] = data.slice_naive(key)
return self.__class__(**kwargs)
def slice_cardinal(self, key):
"""
Slice the container according to its (primary) cardinal axis.
The "cardinal" axis can have any name so long as the name matches a
data object attached to the container. The index name for this object
should also match the value of the cardinal axis.
The algorithm builds a network graph representing the data relationships
(including information about the type of relationship) and then traverses
the edge tree (starting from the cardinal table). Each subsequent child
object in the tree is sliced based on its relationship with its parent.
Note:
Breadth first traversal is performed.
Warning:
This function does not make a copy (if possible): to ensure a new
object is created (a copy) use :func:`~exa.core.container.Container.copy`
after slicing.
.. code-block:: Python
myslice = mycontainer[::2].copy()
See Also:
For data network generation, see :func:`~exa.core.container.Container.network`.
For information about relationships between data objects see
:mod:`~exa.core.numerical`.
"""
if self._cardinal:
cls = self.__class__
key = check_key(self[self._cardinal], key, cardinal=True)
g = self.network(fig=False)
kwargs = {self._cardinal: self[self._cardinal].ix[key], 'name': self.name,
'description': self.description, 'meta': self.meta}
# Next traverse, breadth first, all data objects
for parent, child in nx.bfs_edges(g, self._cardinal):
if child in kwargs:
continue
typ = g.edge_types[(parent, child)]
if self._cardinal in self[child].columns and hasattr(self[child], 'slice_cardinal'):
kwargs[child] = self[child].slice_cardinal(key)
elif typ == 'index-index':
# Select from the child on the parent's index (the parent is
# in the kwargs already).
kwargs[child] = self[child].ix[kwargs[parent].index.values]
elif typ == 'index-column':
# Select from the child where the column (of the same name as
# the parent) is in the parent's index values
cdf = self[child]
kwargs[child] = cdf[cdf[parent].isin(kwargs[parent].index.values)]
elif typ == 'column-index':
# Select from the child where the child's index is in the
#
|
column of the parent. Note that this relationship
|
cdf = self[child]
cin = cdf.index.name
cols = [col for col in kwargs[parent] if cin == col or (cin == col[:-1] and col[-1].isdigit())]
index = kwargs[parent][cols].stack().astype(np.int64).values
kwargs[child] = cdf[cdf.index.isin(index)]
return cls(**kwargs)
def cardinal_groupby(self):
"""
Create an instance of this class for every step in the cardinal dimension.
"""
if self._cardinal:
g = self.network(fig=False)
cardinal_indexes = self[self._cardinal].index.values
selfs = {}
cls = self.__class__
for cardinal_index in cardinal_indexes:
kwargs = {self._cardinal: self[self._cardinal].ix[[cardinal_index]]}
for parent, child in nx.bfs_edges(g):
if child in kwargs:
continue
typ = g.edge_types[(parent, child)]
if self._cardinal in self[child].columns and hasattr(self[child], 'slice_cardinal'):
kwargs[child] = self[child].slice_cardinal(key)
elif typ == 'index-index':
# Select from the child on the parent's index (the parent is
# in the kwargs already).
kwargs[child] = self[child].ix[kwargs[parent].index.values]
elif typ == 'index-column':
# Select from the child where the column (of the same name as
# the parent) is in the parent's index values
cdf = self[child]
kwargs[child] = cdf[cdf[parent].isin(kwargs[parent].index.values)]
elif typ == 'column-index':
# Select from the child where the child's index is in the
# column of the parent. Note that this relationship
cdf = self[child]
cin = cdf.index.name
cols = [col for col in kwargs[parent] if cin == col or (cin == col[:-1] and col[-1].isdigit())]
index = kwargs[parent][cols].stack().astype(np.int64).values
kwargs[child] = cdf[cdf.index.isin(index)]
selfs[cardinal_index] = cls(**kwargs)
return selfs
def info(self):
"""
Display information about the container's data objects (note that info
on metadata and visualization objects is also provided).
Note:
Sizes are reported in bytes.
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_07_01/aio/operations/_service_endpoint_policy_definitions_operations.py
|
Python
|
mit
| 24,059
| 0.005362
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ServiceEndpointPolicyDefinitionsOperations:
"""ServiceEndpointPolicyDefinitionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
service_endpoint_policy_name: str,
service_endpoint_policy_d
|
efinition_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
# Construct URL
url = self._delete_initial.metadata['url']
|
# type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceEndpointPolicyName': self._serialize.url("service_endpoint_policy_name", service_endpoint_policy_name, 'str'),
'serviceEndpointPolicyDefinitionName': self._serialize.url("service_endpoint_policy_definition_name", service_endpoint_policy_definition_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
service_endpoint_policy_name: str,
service_endpoint_policy_definition_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified ServiceEndpoint policy definitions.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_endpoint_policy_name: The name of the Service Endpoint Policy.
:type service_endpoint_policy_name: str
:param service_endpoint_policy_definition_name: The name of the service endpoint policy
definition.
:type service_endpoint_policy_definition_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
service_endpoint_policy_name=service_endpoint_policy_name,
service_endpoint_policy_definition_name=service_endpoint_policy_definition_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceEndpointPolicyName': self._serialize.url("service_endpoint_policy_name", service_endpoint_policy_name, 'str'),
'serviceEndpointPolicyDefinitionName': self._serialize.url("service_endpoint_policy_definition_name", service_endpoint_policy_definition_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
|
stencila/hub
|
worker/jobs/pull/http_test.py
|
Python
|
apache-2.0
| 1,623
| 0.000616
|
from contextlib import ContextDecorator
from unittest import mock
import httpx
import pytest
from util.working_directory import working_directory
from .http import pull_http
class MockedHttpxStreamResponse(ContextDecorator):
"""
VCR does not like recording HTTPX stream requests so mock it.
"""
def __init__(self, method, url, **kwargs):
self.response = httpx.get(url)
def __getattr__(se
|
lf, attr):
return getattr(self.respon
|
se, attr)
def __enter__(self, *args, **kwargs):
return self
def __exit__(self, *args, **kwargs):
return self
@pytest.mark.vcr
@mock.patch("httpx.stream", MockedHttpxStreamResponse)
def test_extension_from_mimetype(tempdir):
with working_directory(tempdir.path):
files = pull_http({"url": "https://httpbin.org/get"})
assert files["get.json"]["mimetype"] == "application/json"
files = pull_http({"url": "https://httpbin.org/image/png"}, path="image")
assert files["image.png"]["mimetype"] == "image/png"
files = pull_http({"url": "https://httpbin.org/html"}, path="content")
assert files["content.html"]["mimetype"] == "text/html"
files = pull_http({"url": "https://httpbin.org/html"}, path="foo.bar")
assert files["foo.bar"]["mimetype"] is None
# For some reason the status code does not work with VCR record
def test_status_codes(tempdir):
with pytest.raises(RuntimeError) as excinfo:
pull_http({"url": "https://httpbin.org/status/404"})
assert "Error when fetching https://httpbin.org/status/404: 404" in str(
excinfo.value
)
|
jtraver/dev
|
python3/graphics/modulefinder1.py
|
Python
|
mit
| 416
| 0
|
#!/usr/bin/env python3
# https://docs.python.org/3/library/modulefinder.html
from modulefinder import ModuleFinder
finder = ModuleFinder()
finder.run_script('graph1.py')
print('Loaded modules
|
:')
for name, mod in finder.modules.items():
print('%s: ' % name, end='')
print(','.join(list(mod.globalnames.keys())[:3]))
print('-'*5
|
0)
print('Modules not imported:')
print('\n'.join(finder.badmodules.keys()))
|
akretion/bank-statement-reconcile
|
__unported__/statement_voucher_killer/voucher.py
|
Python
|
agpl-3.0
| 6,659
| 0.00015
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2013 Camptocamp SA (http://www.camptocamp.com)
# @author Nicolas Bessi
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT
import time
class AccountStatementFromInvoiceLines(orm.TransientModel):
_inherit = "account.statement.from.invoice.lines"
def populate_statement(self, cr, uid, ids, context=None):
"""Taken from account voucher as no hook is available. No function
no refactoring, just trimming the part that generates voucher"""
|
if context is None:
context = {}
statement_id = context.get('statement_id', False)
if not statement_id:
return {'type': 'ir.actions.act_window_close'}
data = self.read(cr, uid, ids, context=context)[0]
line_ids = data['line_ids']
if not line_ids:
r
|
eturn {'type': 'ir.actions.act_window_close'}
line_obj = self.pool['account.move.line']
statement_obj = self.pool['account.bank.statement']
statement_line_obj = self.pool['account.bank.statement.line']
currency_obj = self.pool['res.currency']
line_date = time.strftime(DEFAULT_SERVER_DATE_FORMAT)
statement = statement_obj.browse(
cr, uid, statement_id, context=context)
# for each selected move lines
for line in line_obj.browse(cr, uid, line_ids, context=context):
ctx = context.copy()
# take the date for computation of currency => use payment date
ctx['date'] = line_date
amount = 0.0
if line.debit > 0:
amount = line.debit
elif line.credit > 0:
amount = -line.credit
if line.amount_currency:
amount = currency_obj.compute(
cr, uid, line.currency_id.id, statement.currency.id,
line.amount_currency, context=ctx)
elif (line.invoice and
line.invoice.currency_id.id != statement.currency.id):
amount = currency_obj.compute(
cr, uid, line.invoice.currency_id.id, statement.currency.id,
amount, context=ctx)
context.update({'move_line_ids': [line.id],
'invoice_id': line.invoice.id})
s_type = 'general'
if line.journal_id.type in ('sale', 'sale_refund'):
s_type = 'customer'
elif line.journal_id.type in ('purchase', 'purhcase_refund'):
s_type = 'supplier'
vals = self._prepare_statement_line_vals(
cr, uid, line, s_type, statement_id, amount, context=context)
statement_line_obj.create(cr, uid, vals, context=context)
return {'type': 'ir.actions.act_window_close'}
def _prepare_statement_line_vals(self, cr, uid, move_line, s_type,
statement_id, amount, context=None):
return {'name': move_line.name or '?',
'amount': amount,
'type': s_type,
'partner_id': move_line.partner_id.id,
'account_id': move_line.account_id.id,
'statement_id': statement_id,
'ref': move_line.ref,
'voucher_id': False,
'date': time.strftime('%Y-%m-%d'),
}
class AccountPaymentPopulateStatement(orm.TransientModel):
_inherit = "account.payment.populate.statement"
def populate_statement(self, cr, uid, ids, context=None):
"""Taken from payment addon as no hook is vailable. No function
no refactoring, just trimming the part that generates voucher"""
line_obj = self.pool['payment.line']
statement_obj = self.pool['account.bank.statement']
statement_line_obj = self.pool['account.bank.statement.line']
currency_obj = self.pool['res.currency']
if context is None:
context = {}
data = self.read(cr, uid, ids, [], context=context)[0]
line_ids = data['lines']
if not line_ids:
return {'type': 'ir.actions.act_window_close'}
statement = statement_obj.browse(
cr, uid, context['active_id'], context=context)
for line in line_obj.browse(cr, uid, line_ids, context=context):
ctx = context.copy()
# Last value_date earlier,but this field exists no more now
ctx['date'] = line.ml_maturity_date
amount = currency_obj.compute(
cr, uid, line.currency.id, statement.currency.id,
line.amount_currency, context=ctx)
if not line.move_line_id.id:
continue
context.update({'move_line_ids': [line.move_line_id.id]})
vals = self._prepare_statement_line_vals(
cr, uid, line, -amount, statement, context=context)
st_line_id = statement_line_obj.create(cr, uid, vals,
context=context)
line_obj.write(
cr, uid, [line.id], {'bank_statement_line_id': st_line_id})
return {'type': 'ir.actions.act_window_close'}
def _prepare_statement_line_vals(self, cr, uid, payment_line, amount,
statement, context=None):
return {
'name': payment_line.order_id.reference or '?',
'amount': amount,
'type': 'supplier',
'partner_id': payment_line.partner_id.id,
'account_id': payment_line.move_line_id.account_id.id,
'statement_id': statement.id,
'ref': payment_line.communication,
'date': (payment_line.date or payment_line.ml_maturity_date or
statement.date)
}
|
zenieldanaku/DyDCreature_Editor
|
azoe/widgets/basewidget.py
|
Python
|
mit
| 2,314
| 0
|
from pygame.sprite import DirtySprite
from pygame import draw
class BaseWidget(DirtySprite):
"""clase base para todos los widgets"""
focusable = True
# si no es focusable, no se le llaman focusin y focusout
# (por ejemplo, un contenedor, una etiqueta de texto)
hasFocus = False
# indica si el widget está en foco o no.
enabled = True
# un widget con enabled==False no recibe ningun evento
nombre = ''
# identifica al widget en el renderer
hasMouseOver = False
# indica si el widget tuvo el mouse encima o no, por el onMouseOut
opciones = None
# las opciones con las que se inicializo
setFocus_onIn = False
# if True: Renderer.setFocus se dispara onMouseIn también.
KeyCombination = ''
layer = 0
rect = None
x, y = 0, 0
def __init__(self, parent=None, **opciones):
if parent is not None:
self.parent = parent
self.layer = self.parent.layer + 1
self.opciones = opciones
super().__init__()
def on_focus_in(self):
self.hasFocus = True
def on_focus_out(self):
self.hasFocus = False
def on_mouse_down(self, mousedata):
pass
def on_mouse_up(self, mousedata):
pass
def on_mouse_over(self):
pass
def on_mouse_in(self):
self.hasMouseOver = True
def on_mouse_out(self):
self.hasMouseOver = False
def on_key_down(self, keydata):
pass
def on_key_up(self, keydata):
pass
def on_destruction(self):
# esta funcion se llama cuando el widget es quitado del renderer.
pass
@staticmethod
def _biselar(imagen, color_luz, color_sombra):
w, h = imagen.get_size()
draw.line(imagen, color_sombra, (0, h - 2), (w - 1, h - 2),
|
2)
draw.line(imagen, color_sombra, (w - 2, h - 2), (w - 2, 0), 2)
draw.lines(imagen, color_luz, 0, [(w - 2, 0), (0, 0), (0, h - 4)], 2)
return imagen
def reubicar_en_ventana(self, dx=0, dy=0):
self.rect.move_ip(dx, dy)
self.x += dx
self.y += dy
self.dirty = 1
def __
|
repr__(self):
return self.nombre
def is_visible(self):
return self._visible
|
rochacbruno/dynaconf
|
example/django_pure/polls/views.py
|
Python
|
mit
| 125
| 0
|
from django.conf import settings
from django.http import HttpResponse
def index(requ
|
est):
return Htt
|
pResponse("Hello")
|
suutari/shoop
|
shuup_tests/browser/admin/test_refunds.py
|
Python
|
agpl-3.0
| 4,879
| 0.003689
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import decimal
import os
import time
import pytest
from django.core.urlresolvers import reverse
from shuup.testing.browser_utils import (
click_element, wait_until_appeared, wait_until_condition
)
from shuup.testing.factories import (
create_order_with_product, get_default_product, get_default_shop,
get_default_supplier
)
from shuup.testing.utils import initialize_admin_browser_test
from shuup.utils.i18n import format_money
pytestmark = pytest.mark.skipif(os.environ.get("SHUUP_BROWSER_TESTS", "0") != "1", reason="No browser tests run.")
@pytest.mark.browser
@pytest.mark.djangodb
def test_refunds(browser, admin_user, live_server, settings):
order = create_order_with_product(
get_default_product(), get_default_supplier(), 10, decimal.Decimal("10"), n_lines=10,
shop=get_default_shop())
order2 = create_order_with_product(
get_default_product(), get_default_supplier(), 10, decimal.Decimal("10"), n_lines=10,
shop=get_default_shop())
order2.create_payment(order2.taxful_total_price)
initialize_admin_browser_test(browser, live_server, settings)
_test_toolbar_visibility(browser, live_server, order)
_test_create_full_refund(browser, live_server, order)
_test_refund_view(browser, live_server, order2)
def _check_create_refund_link(browser, order, present):
url = reverse("shuup_admin:order.create-refund", kwargs={"pk": order.pk})
wait_until_condition(browser, lambda x: (len(x.find_by_css("a[href='%s']" % url)) > 0) == present)
def _test_toolbar_visibility(browser, live_server, order):
url = reverse("shuup_admin:order.detail", kwargs={"pk": order.pk})
browser.visit("%s%s" % (live_server, url))
wait_until_appeared(browser, "#order_details")
_check_create_refund_link(browser, order, False)
order.create_payment(order.taxful_total_price)
browser.reload()
wait_until_appeared(browser, "#order_details")
_check_create_refund_link(browser, order, True)
def _test_create_full_refund(browser, live_server, order):
url = reverse("shuup_admin:order.create-refund", kwargs={"pk": order.pk})
browser.visit("%s%s" % (live_server, url))
wait_until_condition(browser, lambda x: x.is_text_present("Refunded: %s" % format_money(order.shop.create_price("0.00"))))
wait_until_condition(browser, lambda x: x.is_text_present("Remaining: %s" % format_money(order.taxful_total_price)))
url = reverse("shuup_admin:order.create-full-refund", kwargs={"pk": order.pk})
click_element(browser, "a[href='%s']" % url)
wait_until_condition(browser, lambda x: x.is_text_present("Refund Amount: %s" % format_money(order.taxful_total_price)))
click_element(browser, "#create-full-refund")
wait_until_appeared(browser, "#order_details")
_check_create_refund_link(browser, order, False)
order.refresh_from_db()
assert not order.taxful_total_price
assert order.is_paid()
assert order.is_fully_shipped()
def _test_refund_view(browser, live_server, order):
url = reverse("shuup_admin:order.create-refund", kwargs={"pk": order.pk})
browser.visit("%s%s" % (live_server, url))
wait_until_condition(browser, lambda x: x.is_text_present("Refunded: %s" % format_money(order.shop.create_price(
|
"0.00"))))
assert len(browser.find_by_css("#id_
|
form-0-line_number option")) == 12 # blank + arbitrary amount + num lines
click_element(browser, "#select2-id_form-0-line_number-container")
wait_until_appeared(browser, "input.select2-search__field")
browser.execute_script('$($(".select2-results__option")[1]).trigger({type: "mouseup"})') # select arbitrary amount
wait_until_condition(browser, lambda x: len(x.find_by_css("#id_form-0-text")))
wait_until_condition(browser, lambda x: len(x.find_by_css("#id_form-0-amount")))
browser.find_by_css("#id_form-0-text").first.value = "test"
browser.find_by_css("#id_form-0-amount").first.value = "900"
click_element(browser, "#add-refund")
click_element(browser, "#select2-id_form-1-line_number-container")
wait_until_appeared(browser, "input.select2-search__field")
browser.execute_script('$($(".select2-results__option")[2]).trigger({type: "mouseup"})') # select first line
browser.find_by_css("#id_form-1-amount").first.value == "100"
browser.find_by_css("#id_form-1-quantity").first.value == "10"
click_element(browser, "button[form='create_refund']")
_check_create_refund_link(browser, order, True) # can still refund quantity
order.refresh_from_db()
assert not order.taxful_total_price
assert order.is_paid()
assert not order.is_fully_shipped()
|
datapythonista/datapythonista.github.io
|
docs/cart_talk.py
|
Python
|
apache-2.0
| 5,718
| 0.003498
|
"""Source code used for the talk:
http://www.slideshare.net/MarcGarcia11/cart-not-only-classification-and-regression-trees
"""
# data
import pandas as pd
data = {'age': [38, 49, 27, 19, 54, 29, 19, 42, 34, 64,
19, 62, 27, 77, 55, 41, 56, 32, 59, 35],
'distance': [6169.98, 7598.87, 3276.07, 1570.43, 951.76, 139.97, 4476.89,
8958.77, 1336.44, 6138.85, 2298.68, 1167.92, 676.30, 736.85,
1326.52, 712.13, 3083.07, 1382.64, 2267.55, 2844.18],
'attended': [False, False, False, True, True, True, False, True, True, True,
Fal
|
se,
|
True, True, True, False, True, True, True, True, False]}
df = pd.DataFrame(data)
# base_plot
from bokeh.plotting import figure, show
def base_plot(df):
p = figure(title='Event attendance',
plot_width=900,
plot_height=400)
p.xaxis.axis_label = 'Distance'
p.yaxis.axis_label = 'Age'
p.circle(df[df.attended]['distance'],
df[df.attended]['age'],
color='red',
legend='Attended',
fill_alpha=0.2,
size=10)
p.circle(df[~df.attended]['distance'],
df[~df.attended]['age'],
color='blue',
legend="Didn't attend",
fill_alpha=0.2,
size=10)
return p
_ = show(base_plot())
# tree_to_nodes
from collections import namedtuple
from itertools import starmap
def tree_to_nodes(dtree):
nodes = starmap(namedtuple('Node', 'feature,threshold,left,right'),
zip(map(lambda x: {0: 'age', 1: 'distance'}.get(x),
dtree.tree_.feature),
dtree.tree_.threshold,
dtree.tree_.children_left,
dtree.tree_.children_right))
return list(nodes)
# cart_plot
from collections import namedtuple, deque
from functools import partial
class NodeRanges(namedtuple('NodeRanges', 'node,max_x,min_x,max_y,min_y')):
pass
def cart_plot(df, dtree, nodes, limit=None):
nodes = tree_to_nodes(dtree)
plot = base_plot()
add_line = partial(plot.line, line_color='black', line_width=2)
stack = deque()
stack.append(NodeRanges(node=nodes[0],
max_x=df['distance'].max(),
min_x=df['distance'].min(),
max_y=df['age'].max(),
min_y=df['age'].min()))
count = 1
while len(stack):
node, max_x, min_x, max_y, min_y = stack.pop()
feature, threshold, left, right = node
if feature == 'distance':
add_line(x=[threshold, threshold],
y=[min_y, max_y])
elif feature == 'age':
add_line(x=[min_x, max_x],
y=[threshold, threshold])
else:
continue
stack.append(NodeRanges(node=nodes[left],
max_x=threshold if feature == 'distance' else max_x,
min_x=min_x,
max_y=threshold if feature == 'age' else max_y,
min_y=min_y))
stack.append(NodeRanges(node=nodes[right],
max_x=max_x,
min_x=threshold if feature == 'distance' else min_x,
max_y=max_y,
min_y=threshold if feature == 'age' else min_y))
if limit is not None and count >= limit:
break
else:
count += 1
show(plot)
# decision_tree_model
def decision_tree_model(age, distance):
if distance >= 2283.11:
if age >= 40.00:
if distance >= 6868.86:
if distance >= 8278.82:
return True
else:
return False
else:
return True
else:
return False
else:
if age >= 54.50:
if age >= 57.00:
return True
else:
return False
else:
return True
# entropy
import math
def entropy(a, b):
total = a + b
prob_a = a / total
prob_b = b / total
return - prob_a * math.log(prob_a, 2) - prob_b * math.log(prob_b, 2)
# get_best_split
def get_best_split(x, y):
best_split = None
best_entropy = 1.
for feature in x.columns.values:
column = x[feature]
for value in column.iterrows():
a = y[column < value] == class_a_value
b = y[column < value] == class_b_value
left_weight = (a + b) / len(y.index)
left_entropy = entropy(a, b)
a = y[column >= value] == class_a_value
b = y[column >= value] == class_b_value
right_weight = (a + b) / len(y.index)
right_entropy = entropy(a, b)
split_entropy = left_weight * left_entropy + right_weight * right_entropy
if split_entropy < best_entropy:
best_split = (feature, value)
best_entropy = split_entropy
return best_split
# train_decision_tree
def train_decision_tree(x, y):
feature, value = get_best_split(x, y)
x_left, y_left = x[x[feature] < value], y[x[feature] < value]
if len(y_left.unique()) > 1:
left_node = train_decision_tree(x_left, y_left)
else:
left_node = None
x_right, y_right = x[x[feature] >= value], y[x[feature] >= value]
if len(y_right.unique()) > 1:
right_node = train_decision_tree(x_right, y_right)
else:
right_node = None
return (feature, value, left_node, right_node)
|
nullishzero/Portage
|
pym/_emerge/JobStatusDisplay.py
|
Python
|
gpl-2.0
| 7,763
| 0.031302
|
# Copyright 1999-2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from __future__ import unicode_literals
import formatter
import io
import sys
import time
import portage
from portage import os
from portage import _encodings
from portage import _unicode_encode
from portage.output import xtermTitle
from _emerge.getloadavg import getloadavg
if sys.hexversion >= 0x3000000:
basestring = str
class JobStatusDisplay(object):
_bound_properties = ("curval", "failed", "running")
# Don't update the display unless at least this much
# time has passed, in units of seconds.
_min_display_latency = 2
_default_term_codes = {
'cr' : '\r',
'el' : '\x1b[K',
'nel' : '\n',
}
_termcap_name_map = {
'carriage_return' : 'cr',
'clr_eol' : 'el',
'newline' : 'nel',
}
def __init__(self, quiet=False, xterm_titles=True):
object.__setattr__(self, "quiet", quiet)
object.__setattr__(self, "xterm_titles", xterm_titles)
object.__setattr__(self, "maxval", 0)
object.__setattr__(self, "merges", 0)
object.__setattr__(self, "_changed", False)
object.__setattr__(self, "_displayed", False)
object.__setattr__(self, "_last_display_time", 0)
self.reset()
isatty = os.environ.get('TERM') != 'dumb' and \
hasattr(self.out, 'isatty') and \
self.out.isatty()
object.__setattr__(self, "_isatty", isatty)
if not isatty or not self._init_term():
term_codes = {}
for k, capname in self._termcap_name_map.items():
term_codes[k] = self._default_term_codes[capname]
object.__setattr__(self, "_term_codes", term_codes)
encoding = sys.getdefaultencoding()
for k, v in self._term_codes.items():
if not isinstance(v, basestring):
self._term_codes[k] = v.decode(encoding, 'replace')
if self._isatty:
width = portage.output.get_term_size()[1]
else:
width = 80
self._set_width(width)
def _set_width(self, width):
if width == getattr(self, 'width', None):
return
if width <= 0 or width > 80:
width = 80
object.__setattr__(self, "width", width)
object.__setattr__(self, "_jobs_column_width", width - 32)
@property
def out(self):
"""Use a lazy reference to sys.stdout, in case the API consumer has
temporarily overridden stdout."""
return sys.stdout
def _write(self, s):
# avoid potential UnicodeEncodeError
s = _unicode_encode(s,
encoding=_encodings['stdio'], errors='backslashreplace')
out = self.out
if sys.hexversion >= 0x3000000:
out = out.buffer
out.write(s)
out.flush()
def _init_term(self):
"""
Initialize term control codes.
@rtype: bool
@return: True if term codes were successfully initialized,
False otherwise.
"""
term_type = os.environ.get("TERM", "").strip()
if not term_type:
return False
tigetstr = None
try:
import curses
try:
curses.setupterm(term_type, self.out.fileno())
tigetstr = curses.tigetstr
except curses.error:
pass
except ImportError:
pass
if tigetstr is None:
return False
term_codes = {}
for k, capname in self._termcap_name_map.items():
# Use _native_string for PyPy compat (bug #470258).
code = tigetstr(portage._native_string(capname))
if code is None:
code = self._default_term_codes[capname]
term_codes[k] = code
object.__setattr__(self, "_term_codes", term_codes)
return True
def _format_msg(self, msg):
return ">>> %s" % msg
def _erase(self):
self._write(
self._term_codes['carriage_return'] + \
self._term_codes['clr_eol'])
self._displayed = False
def _display(self, line):
self._write(line)
self._displayed = True
def _update(self, msg):
if not self._isatty:
self._write(self._format_msg(msg) + self._term_codes['newline'])
self._displayed = True
return
if self._displayed:
self._erase()
self._display(self._format_msg(msg))
def displayMessage(self, msg):
was_displayed = self._displayed
if self._isatty and self._displayed:
self._erase()
self._write(self._format_msg(msg) + self._term_codes['newline'])
self._displayed = False
if was_displayed:
self._changed = True
self.display()
def reset(self):
self.maxval = 0
self.merges = 0
for name in self._bound_properties:
object.__setattr__(self, name, 0)
if self._displayed:
self._write(self._term_codes['newline'])
self._displayed = False
def __setattr__(self, name, value):
old_value = getattr(self, name)
if value == old_value:
return
object.__setattr__(self, name, value)
if name in self._bound_properties:
self._property_change(name, old_value, value)
def _property_change(self, name, old_value, new_value):
self._changed = True
self.display()
def _load_avg_str(self):
try:
avg = getloadavg()
except OSError:
return 'unknown'
max_avg = max(avg)
if max_avg < 10:
digits = 2
elif max_avg < 100:
digits = 1
else:
digits = 0
return ", ".join(("%%.%df" % digits ) % x for x in avg)
def display(self):
"""
Display status on stdout, but only if something has
changed since the last call. This always returns True,
for continuous scheduling via timeout_add.
"""
if self.quiet:
return True
current_time = time.time()
time_delta = current_time - self._last_display_time
if self._displayed and \
not self._changed:
if not self._isatty:
return True
if time_delta < self._min_display_latency:
return True
self._last_display_time = current_time
self._changed = False
self._display_status()
return True
def _display_status(self):
# Don't use len(self._completed_tasks) here since that also
# can include uninstall tasks.
curval_str = "%s" % (self.curval,)
maxval_str = "%s" % (self.maxval,)
running_str = "%s" % (self.running,)
failed_str = "%s" % (self.failed,)
load_avg_str = self._load_avg_str()
color_output = io.StringIO()
plain_output = io.StringIO()
style_file = portage.output.ConsoleStyleFile(color_output)
style_file.write_listener = plain_output
style_writer = portage.output.StyleWriter(file=style_file, maxcol=9999)
style_writer.style_listener = style_file.new_styles
f = formatter.AbstractFormatter(style_writer)
number_style = "INFORM"
f.add_literal_data("Jobs: ")
f.push_style(number_style)
f.add_literal_data(curval_str)
f.pop_style()
f.add_literal_data(" of ")
f.push_style(number_style)
f.add_literal_data(maxval_str)
f.pop_style()
f.add_literal_data(" complete")
if self.running:
f.add_literal_data(", ")
f.push_style(number_style)
f.add_literal_data(running_str)
f.pop_style()
f.add_literal_data(" running")
if self.failed:
f.add_literal_data(", ")
f.push_style(number_style)
f.add_literal_data(failed_str)
f.pop_style()
f.add_literal_data(" failed")
padding = self._jobs_column_width - len(plain_output.getvalue())
if padding > 0:
f.add_literal_data(padding * " ")
f.add_literal_data("Load avg: ")
f.add_literal_data(load_avg_str)
# Truncate to fit width, to avoid making the terminal scroll if the
# line overflows (happens when the load average is large).
plain_output = plain_output.getvalue()
if self._isatty and len(plain_output) > self.width:
# Use plain_output here since it's easier to truncate
# properly than the color output which contains console
# color codes.
self._update(plain_output[:self.width])
else:
self._update(color_output.getvalue())
if self.xterm_titles:
# If the HOSTNAME variable is
|
exported, include it
# in the xterm title, just like emergelog() does.
# See bug #390699.
title_str = " ".join(plain_output.split())
hostname = os.environ.get("HOSTNAME")
if hostname is not None:
|
title_str = "%s: %s" % (hostname, title_str)
xtermTitle(title_str)
|
lehmacdj/.dotfiles
|
bin/bitrate.py
|
Python
|
gpl-3.0
| 314
| 0
|
#!/usr/bin/env python3
from mutagen.mp3 import MP3
import sys
if len(sys.argv) < 2:
print('error: didn\'t pass enough arguments')
print('us
|
age: ./bitrate.py <
|
file name>')
print('usage: find the bitrate of an mp3 file')
exit(1)
f = MP3(sys.argv[1])
print('bitrate: %s' % (f.info.bitrate / 1000))
|
joshsmith2/superlists
|
lists/migrations/0001_initial.py
|
Python
|
gpl-2.0
| 492
| 0.002033
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(serialize=False, prim
|
ary_key=True, auto_
|
created=True, verbose_name='ID')),
],
options={
},
bases=(models.Model,),
),
]
|
scottsilverlabs/raspberrystem
|
rstem/projects/led_matrix_games/aspirin.py
|
Python
|
apache-2.0
| 9,366
| 0.009075
|
from rstem import led_matrix, accel
import RPi.GPIO as GPIO
import random
import time
import sys
# notify of progress
print("P50")
sys.stdout.flush()
# set up led matrix
#led_matrix.init_grid(2,2)
led_matrix.init_matrices([(0,8),(8,8),(8,0),(0,0)])
# set up accelometer
accel.init(1)
# notify of progress
print("P60")
sys.stdout.flush()
# set up buttons
A = 4
B = 17
UP = 25
DOWN = 24
LEFT = 23
RIGHT = 18
START = 27
SELECT = 22
# accelometer threshold
THRESHOLD = 3
class State(object):
PLAYING, IDLE, SCORE, EXIT = range(4)
# starting variables
state = State.IDLE
field = None
title = led_matrix.LEDText("ASPIRIN - Press A to use accelometer or B to use buttons")
# notify of progress
print("P90")
sys.stdout.flush()
class Direction(object):
LEFT, RIGHT, UP, DOWN = range(4)
class Apple(object):
def __init__(self, position):
self.position = position
def draw(self):
led_matrix.point(*self.position)
class Striker(object):
def __init__(self, start_pos, direction):
self.position = start_pos # starting position of the striker
self.direction = direction
def draw(self):
led_matrix.point(*self.position, color=3)
def move(self):
# check if the striker hit the wall and needs
|
to bounce back
if self.direction == Direction.LEFT and self.position[0] == 0:
self.direction = Direction.RIGHT
elif self.direction == Direction.RIGHT and self.p
|
osition[0] == led_matrix.width()-1:
self.direction = Direction.LEFT
elif self.direction == Direction.DOWN and self.position[1] == 0:
self.direction = Direction.UP
elif self.direction == Direction.UP and self.position[1] == led_matrix.height()-1:
self.direction = Direction.DOWN
if self.direction == Direction.LEFT:
self.position = (self.position[0]-1, self.position[1])
elif self.direction == Direction.RIGHT:
self.position = (self.position[0]+1, self.position[1])
elif self.direction == Direction.DOWN:
self.position = (self.position[0], self.position[1]-1)
elif self.direction == Direction.UP:
self.position = (self.position[0], self.position[1]+1)
class Player(object):
def __init__(self, position=None, accel=False):
# set position to be center of screen if position is not given
if position is None:
self.position = (int(led_matrix.width()/2), int(led_matrix.height()/2))
else:
self.position = position
self.accel = accel # True if controls are the accelometer, False if controls are buttons
def draw(self):
led_matrix.point(*self.position, color=8)
def move(self, direction):
if direction == Direction.UP:
if self.position[1] < led_matrix.height()-1:
self.position = (self.position[0], self.position[1]+1)
elif direction == Direction.DOWN:
if self.position[1] > 0:
self.position = (self.position[0], self.position[1]-1)
elif direction == Direction.LEFT:
if self.position[0] > 0:
self.position = (self.position[0]-1, self.position[1])
elif direction == Direction.RIGHT:
if self.position[0] < led_matrix.width()-1:
self.position = (self.position[0]+1, self.position[1])
else:
raise ValueError("Invalid direction given.")
class Field(object):
def __init__(self, player):
self.player = player
empty_strikers = set()
# initialize empty strikers
for x_pos in range(led_matrix.width()):
empty_strikers.add(Striker((x_pos, 0), Direction.UP))
for y_pos in range(led_matrix.height()):
empty_strikers.add(Striker((0, y_pos), Direction.RIGHT))
self.empty_strikers = empty_strikers # strikers not used yet
self.strikers = set() # active strikers
self.apple = None
def draw(self):
self.player.draw()
self.apple.draw()
# strikers = self.horizontal_strikers.union(self.vertical_strikers)
for striker in self.strikers:
striker.draw()
def player_collided_with_apple(self):
return self.player.position == self.apple.position
def player_collided_with_striker(self):
# strikers = self.horizontal_strikers.union(self.vertical_strikers)
for striker in self.strikers:
if self.player.position == striker.position:
return True
return False
def new_apple(self):
# set up list of x and y choices
x_pos = list(range(led_matrix.width()))
y_pos = list(range(led_matrix.height()))
# remove the position that player is currently in
del x_pos[self.player.position[0]]
del y_pos[self.player.position[1]]
self.apple = Apple((random.choice(x_pos), random.choice(y_pos)))
def add_striker(self):
if len(self.empty_strikers) == 0:
return False # no more strikers to make, you win!!
new_striker = random.choice(list(self.empty_strikers))
self.strikers.add(new_striker)
self.empty_strikers.remove(new_striker)
return True
# set up buttons
GPIO.setmode(GPIO.BCM)
def button_handler(channel):
global state
global field
if channel in [START, SELECT]:
state = State.EXIT
elif state in [State.IDLE, State.SCORE] and channel in [A, B]:
# Reset field and player to start a new game
player = Player(accel=(channel == A))
field = None
field = Field(player)
field.new_apple() # add the first apple
state = State.PLAYING
# elif state == State.PLAYING and (not field.player.accel) and channel in [UP, DOWN, LEFT, RIGHT]:
# if channel == UP:
# field.player.move(Direction.UP)
# elif channel == DOWN:
# field.player.move(Direction.DOWN)
# elif channel == LEFT:
# field.player.move(Direction.LEFT)
# elif channel == RIGHT:
# field.player.move(Direction.RIGHT)
for button in [UP, DOWN, LEFT, RIGHT, START, A, B, SELECT]:
GPIO.setup(button, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.add_event_detect(button, GPIO.FALLING, callback=button_handler, bouncetime=100)
# notify of progress
print("P100")
sys.stdout.flush()
# notify menu we are ready for the led matrix
print("READY")
sys.stdout.flush()
# FSM =======
while True:
if state == State.PLAYING:
led_matrix.erase()
# move player with accelometer, otherwise poll the buttons
if field.player.accel:
angles = accel.angles()
# "Simple" lowpass filter for velocity data
x = angles[0]
y = angles[1]
# alpha = 0.2
# velocity = 0.0
# x_diff = velocity*alpha + (angles[0]*2*8/90)*(1 - alpha)
# y_diff = velocity*alpha + (angles[1]*2*8/90)*(1 - alpha)
if x > THRESHOLD:
field.player.move(Direction.RIGHT)
elif x < -THRESHOLD:
field.player.move(Direction.LEFT)
if y > THRESHOLD:
field.player.move(Direction.DOWN)
elif y < -THRESHOLD:
field.player.move(Direction.UP)
else:
if GPIO.input(UP) == 0:
field.player.move(Direction.UP)
if GPIO.input(DOWN) == 0:
field.player.move(Direction.DOWN)
if GPIO.input(LEFT) == 0:
field.player.move(Direction.LEFT)
if GPIO.input(RIGHT) == 0:
field.player.move(Direction.RIGHT)
# move the strikers
for striker in field.strikers:
striker.move()
# draw all the objects on the field
field.draw()
led_matrix.show()
# check for collisions
if field.player_collid
|
carlitos26/RESTful-Web-service
|
browser-version/app/modules.py
|
Python
|
gpl-3.0
| 1,874
| 0.012807
|
import sqlite3 as sql
from flask.json import jsonify
from flask import current_app
def total_entries():
with sql.connect("names.db") as con:
cur = con.cursor()
|
entries = cur.execute("SELECT count(*) FROM names").fetchone()
con.commit()
return '{}\n'.format('{}\n'.format(entries)[1:-3])
def select_entries_by_name(name):
with sql.connect("names.db") as con:
cur = con.cursor()
query = cur.execute("SELECT id, year, gender, count FROM names WHERE name = '{}';".format(name))
con.commit()
cached = current_app.cache.get('a_key')
if cached:
return cached #"The value is cached: {}\n".format(cached)
result = [dict({'id': row[0], 'year': row[1], 'gender': row[2], 'count': row[3]}) for row in query.fetchall()]
current_app.cache.set('a_key', result, timeout=180)
return result
#return jsonify({'Entries for %s' % name: entries})
def insert_name(name,year,gender,count):
with sql.connect("names.db") as con:
cur = con.cursor()
try:
cur.execute("INSERT INTO names (name,year,gender,count) VALUES ('{}',{},'{}',{})".format(name,year,gender,count) )
con.commit()
new_id = cur.lastrowid
return str(new_id)
except Exception as e:
print e
return 'The baby is already present in the DataBase.'
def first_and_last(name):
with sql.connect("names.db") as con:
cur = con.cursor()
last = cur.execute("select MAX(year) from names where name='{}';".format(name) ).fetchone()
first = cur.execute("select MIN(year) from names where name='{}';".format(name) ).fetchone()
con.commit()
return 'Last year is: %s \nFirst year is: %s' % ( \
'{}'.format('{}\n'.format(last)[1:-3]), \
'{}'.format('{}\n'.format(first)[1:-3]))
|
|
airbnb/airflow
|
tests/dags/test_task_view_type_check.py
|
Python
|
apache-2.0
| 1,768
| 0
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use
|
this file except in compliance
# with the License. You ma
|
y obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
DAG designed to test a PythonOperator that calls a functool.partial
"""
import functools
import logging
from datetime import datetime
from airflow.models import DAG
from airflow.operators.python import PythonOperator
DEFAULT_DATE = datetime(2016, 1, 1)
default_args = dict(start_date=DEFAULT_DATE, owner='airflow')
class CallableClass:
"""
Class that is callable.
"""
def __call__(self):
"""A __call__ method """
def a_function(_, __):
"""A function with two args """
partial_function = functools.partial(a_function, arg_x=1)
class_instance = CallableClass()
logging.info('class_instance type: %s', type(class_instance))
dag = DAG(dag_id='test_task_view_type_check', default_args=default_args)
dag_task1 = PythonOperator(
task_id='test_dagrun_functool_partial',
dag=dag,
python_callable=partial_function,
)
dag_task2 = PythonOperator(
task_id='test_dagrun_instance',
dag=dag,
python_callable=class_instance,
)
|
ooovector/qtlab_replacement
|
single_shot_readout.py
|
Python
|
gpl-3.0
| 8,239
| 0.028766
|
from . import data_reduce
import numpy as np
from . import readout_classifier
class single_shot_readout:
"""
Single shot readout class
Args:
adc (Instrument): a device that measures a complex vector for each readout trigger (an ADC)
prepare_seqs (list of pulses.sequence): a dict of sequences of control pulses. The keys are use for state identification.
ro_seq (pulses.sequence): a sequence of control pulses that is used to generate the reaout pulse of the DAC.
pulse_generator (pulses.pulse_generator): pulse generator used to concatenate and set waveform sequences on the DAC.
ro_delay_seq (pulses.sequence): Sequence used to align the DAC and ADC (readout delay compensation)
adc_measurement_name (str): name of measurement on ADC
"""
def __init__(self, adc, prepare_seqs, ro_seq, pulse_generator, ro_delay_seq = None, _readout_classifier = None, adc_measurement_name='Voltage'):
self.adc = adc
self.ro_seq = ro_seq
self.prepare_seqs = prepare_seqs
self.ro_delay_seq = ro_delay_seq
self.pulse_generator = pulse_generator
self.repeat_samples = 2
self.save_last_samples = False
self.train_test_split = 0.8
self.measurement_name = ''
# self.dump_measured_samples = False
self.measure_avg_samples = True
#self.measure_cov_samples = False
self.measure_hists = True
self.measure_feature_w_threshold = True
#self.measure_features = True
#self.cutoff_start = 0
if not _readout_classifier:
self.readout_classifier = readout_classifier.linear_classifier()
else:
self.readout_classifier = _readout_classifier
self.adc_measurement_name = adc_measurement_name
self.filter_binary = {'get_points':lambda: (self.adc.get_points()[adc_measurement_name][0],),
'get_dtype': lambda: int,
'get_opts': lambda: {},
'filter': self.filter_binary_func}
# def measure_delay(self, ro_channel):
# import matplotlib.pyplot as plt
# from scipy.signal import resample
# self.pulse_generator.set_seq(self.ro_delay_seq)
# first_nonzero = int(np.nonzero(np.abs(self.pulse_generator.channels[ro_channel].get_waveform()))[0][0]/self.pulse_generator.channels[ro_channel].get_clock()*self.adc.get_clock())
# ro_dac_waveform = self.pulse_generator.channels[ro_channel].awg_I.get_waveform(channel=self.pulse_generator.channels[ro_channel].awg_ch_I)+\
# 1j*self.pulse_generator.channels[ro_channel].awg_Q.get_waveform(channel=self.pulse_generator.channels[ro_channel].awg_ch_Q)
# ro_dac_waveform = resample(ro_dac_waveform, num=int(len(ro_dac_waveform)/self.pulse_generator.channels[ro_channel].get_clock()*self.adc.get_clock()))
# ro_adc_waveform = np.mean(self.adc.measure()['Voltage'], axis=0)
# ro_dac_waveform = ro_dac_waveform - np.mean(ro_dac_waveform)
# ro_adc_waveform = ro_adc_waveform - np.mean(ro_adc_waveform)
# xc = np.abs(np.correlate(ro_dac_waveform, ro_adc_waveform, 'same'))
# xc_max = np.argmax(xc)
# delay = int((xc_max - first_nonzero)/2)
# #plt.figure('delay')
# #plt.plot(ro_dac_waveform[first_nonzero:])
# #plt.plot(ro_adc_waveform[delay:])
# #plt.plot(ro_adc_waveform)
# #print ('Measured delay is {} samples'.format(delay), first_nonzero, xc_max)
# return delay
def calibrate(self):
X = []
y = []
for class_id, prepare_seq in enumerate(self.prepare_seqs):
for i in range(self.repeat_samples):
# pulse sequence to prepare state
self.pulse_generator.set_seq(prepare_seq+self.ro_seq)
measurement = self.adc.measure()
if type(self.adc_measurement_name) is list:
raise ValueError('Multiqubit readout not implemented') #need multiqubit readdout implementation
else:
X.append(measurement[self.adc_measurement_name])
y.extend([class_id]*len(self.adc.get_points()[self.adc_measurement_name][0][1]))
X = np.reshape(X, (-1, len(self.adc.get_points()[self.adc_measurement_name][-1][1]))) # last dimension is the feature dimension
y = np.asarray(y)
# if self.dump_measured_samples or self.save_last_samples:
# self.calib_X = X#np.reshape(X, (len(self.prepare_seqs), -1, len(self.adc.get_points()[self.adc_measurement_name][-1][1])))
# self.calib_y = y
scores = readout_classifier.evaluate_classifier(self.readout_classifier, X, y)
self.readout_classifier.fit(X, y)
self.scores = scores
self.confusion_matrix = readout_classifier.confusion_matrix(y, self.readout_classifier.predict(X))
def get_opts(self):
opts = {}
scores = {score_name:{'log':False} for score_name in readout_classifier.readout_classifier_scores}
opts.update(scores)
if self.measure_avg_samples:
avg_samples = {'avg_sample'+str(_class):{'log':False} for _class in self.readout_classifier.class_list}
#features = {'feature'+str(_class):{'log':False} for _class in self.readout_classifier.class_list}
opts.update(avg_samples)
#meas.update(features)
if self.measure_hists:
#hists = {'hists':{'log':Fas}}
opts['hists'] = {'log':False}
opts['proba_points'] = {'log':False}
if self.measure_feature_w_threshold:
opts['feature'] = {'log':False}
opts['threshold'] = {'log':False}
return opts
def measure(self):
self.calibrate()
meas = {}
# if self.dump_measured_samples:
# self.dump_samples(name=self.measurement_name)
meas.update(self.scores)
if self.measure_avg_samples:
avg_samples = {'avg_sample'+str(_class):self.readout_classifier.class_averages[_class] for _class in self.readout_classifier.class_list}
#features = {'feature'+str(_class):self.readout_classifier.class_features[_class] for _class in self.readout_classifier.class_list}
meas.update(avg_samples)
#meas.update(features)
if self.measure_hists:
meas['hists'] = self.readout_classifier.hists
meas['proba_points'] = self.readout_classifier.proba_points
if self.measure_feature_w_threshold:
meas['feature'] = self.readout_classifier.feature
meas['threshold'] = self.readout_classifier.threshold
return meas
def get_points(self):
points = {}
scores = {score_name:[] for score_name in readout_classifier.readout_classifier_scores}
points.update(scores)
if self.measure_avg_samples:
avg_samples = {'avg_sample'+str(_class):[('Time',np.arange(self.adc.get_nop())/self.adc.get_clock(), 's')] for _class in self.readout_classifier.class_list}
#features = {'feature'+str(_class):[('Time',np.arange(self.adc.get_nop())/self.adc.get_clock(), 's')] for _class in self.readout_classifier.class_list}
points.update(avg_samples)
#points.update(features)
if self.measure_hists:
points['hists'] = [('class', self.readout_classifier.class_list, ''), ('bin', np.arange(self.readout_classifier.nbins), '')]
points['proba_points'] = [('bin', np.arange(self.readout_classifier.nbins), '')]
if self.measure_feature_w_threshold:
points['feature'] = [('Time',np.arange(self.adc.get_nop(
|
))/self.adc.get_clock(), 's')]
points['threshold'] = []
return points
def get_dtype(self):
dtypes = {}
scores = {score_name:float for score_name in readout_classifier.readout_classifier_scores}
dtypes.update(scores)
if self.measure_avg_samples:
avg_samples = {'avg_sample'+str(_class):self.adc.get_dtype()[self.adc_measurement_name] for _class in self.readout_classifier.class_list}
features = {'feature'+str(_class):se
|
lf.adc.get_dtype()[self.adc_measurement_name] for _class in self.readout_classifier.class_list}
dtypes.update(avg_samples)
dtypes.update(features)
if self.measure_hists:
dtypes['hists'] = float
dtypes['proba_points'] = float
if self.measure_feature_w_threshold:
dtypes['feature'] = np.complex
dtypes['threshold'] = float
return dtypes
# def dump_samples(self, name):
# from .save_pkl import save_pkl
# header = {'type':'Readout classification X', 'name':name}
# measurement = {'Readout classification X':(['Sample ID', 'time'],
# [np.arange(self.calib_X.shape[0]), np.arange(self.calib_X.shape[1])/self.adc.get_clock()],
# self.calib_X),
# 'Readout classification y':(['Sample ID'],
# [np.arange(self.calib_X.shape[0])],
# self.calib_y)}
# save_pkl(header, measurement, plot=False)
def filter_binary_func(self, x):
return sel
|
jeffhammond/spaghetty
|
branches/old/python/archive/build_executables_basic.py
|
Python
|
bsd-2-clause
| 11,373
| 0.006419
|
#!/usr/bin/python
import fileinput
import string
import sys
import os
ar = 'ar'
fortran_compiler = 'ftn'
fortran_opt_flags = '-O3'
fortran_link_flags = '-O1'
c_compiler = 'cc'
c_opt_flags = '-O3'
src_dir = './src/'
obj_dir = './obj/'
exe_dir = './exe/'
lib_name = 'tce_sort_f77_basic.a'
count = '100'
rank = '30'
ranks = [rank,rank,rank,rank]
size = int(ranks[0])*int(ranks[1])*int(ranks[2])*int(ranks[3])
sizechar = str(size)
def perm(l):
sz = len(l)
if sz <= 1:
return [l]
return [p[:i]+[l[0]]+p[i:] for i in xrange(sz) for p in perm(l[1:])]
indices = ['4','3','2','1']
#all_permutations = [indices]
#transpose_list = [indices]
#loop_list = [indices]
all_permutations = perm(indices)
transpose_list = perm(indices)
loop_list = perm(indices)
print fortran_compiler+' '+fortran_opt_flags+' -c tce_sort_hirata.F'
os.system(fortran_compiler+' '+fortran_opt_flags+' -c tce_sort_hirata.F')
os.system('ar -r '+lib_name+' tce_sort_hirata.o')
print fortran_compiler+' '+fortran_opt_flags+' -c glass_correct.F'
os.system(fortran_compiler+' '+fortran_opt_flags+' -c glass_correct.F')
os.system('ar -r '+lib_name+' glass_correct.o')
print c_compiler+' '+c_opt_flags+' -c tce_sort_4kg.c'
os.system(c_compiler+' '+c_opt_flags+' -c tce_sort_4kg.c')
os.system('ar -r '+lib_name+' tce_sort_4kg.o')
print c_compiler+' '+c_opt_flags+' -c tce_sort_4kg_4321.c'
os.system(c_compiler+' '+c_opt_flags+' -c tce_sort_4kg_4321.c')
os.system('ar -r '+lib_name+' tce_sort_4kg_4321.o')
for transpose_order in transpose_list:
dummy = 0
A = transpose_order[0]
B = transpose_order[1]
C = transpose_order[2]
D = transpose_order[3]
driver_name = 'transpose_
|
'+A+B+C+D
print driver_name
source_name = driver_name+'_driver.F'
lst_name = driver_name+'_driver.lst'
source_file = open(source_name,'w')
source_fi
|
le.write(' PROGRAM ARRAYTEST\n')
source_file.write('#include "mpif.h"\n')
source_file.write(' REAL*8 before('+ranks[0]+','+ranks[0]+','+ranks[0]+','+ranks[0]+')\n')
source_file.write(' REAL*8 after_jeff('+sizechar+')\n')
source_file.write(' REAL*8 after_hirata('+sizechar+')\n')
source_file.write(' REAL*8 after_glass('+sizechar+')\n')
source_file.write(' REAL*8 factor\n')
source_file.write(' REAL*8 Tstart,Tfinish,Thirata,Tglass,Tjeff\n')
source_file.write(' REAL*8 Tspeedup,Tbest\n')
source_file.write(' INTEGER*4 i,j,k,l\n')
source_file.write(' INTEGER*4 aSize(4)\n')
source_file.write(' INTEGER*4 perm(4)\n')
source_file.write(' INTEGER*4 fastest(4)\n')
source_file.write(' INTEGER ierror\n')
source_file.write(' LOGICAL glass_correct\n')
source_file.write(' EXTERNAL glass_correct\n')
source_file.write(' call mpi_init(ierror)\n')
source_file.write(' aSize(1) = '+ranks[0]+'\n')
source_file.write(' aSize(2) = '+ranks[1]+'\n')
source_file.write(' aSize(3) = '+ranks[2]+'\n')
source_file.write(' aSize(4) = '+ranks[3]+'\n')
source_file.write(' perm(1) = '+A+'\n')
source_file.write(' perm(2) = '+B+'\n')
source_file.write(' perm(3) = '+C+'\n')
source_file.write(' perm(4) = '+D+'\n')
source_file.write(' DO 70 i = 1, '+ranks[0]+'\n')
source_file.write(' DO 60 j = 1, '+ranks[1]+'\n')
source_file.write(' DO 50 k = 1, '+ranks[2]+'\n')
source_file.write(' DO 40 l = 1, '+ranks[3]+'\n')
source_file.write(' before(i,j,k,l) = l + k*10 + j*100 + i*1000\n')
source_file.write('40 CONTINUE\n')
source_file.write('50 CONTINUE\n')
source_file.write('60 CONTINUE\n')
source_file.write('70 CONTINUE\n')
source_file.write(' factor = 1.0\n')
source_file.write(' Tbest=999999.0\n')
source_file.write(' Tstart=0.0\n')
source_file.write(' Tfinish=0.0\n')
source_file.write(' CALL CPU_TIME(Tstart)\n')
source_file.write(' DO 30 i = 1, '+count+'\n')
source_file.write(' CALL tce_sort_4(before, after_hirata,\n')
source_file.write(' & aSize(1), aSize(2), aSize(3), aSize(4),\n')
source_file.write(' & perm(1), perm(2), perm(3), perm(4), factor)\n')
source_file.write('30 CONTINUE\n')
source_file.write(' CALL CPU_TIME(Tfinish)\n')
source_file.write(' Thirata=(Tfinish-Tstart)\n')
source_file.write(' Tstart=0.0\n')
source_file.write(' Tfinish=0.0\n')
source_file.write(' Tstart=rtc()\n')
source_file.write(' IF( ((perm(1).eq.4).and.(perm(2).eq.3)).and.\n')
source_file.write(' & ((perm(3).eq.2).and.(perm(4).eq.1)) ) THEN\n')
source_file.write(' CALL CPU_TIME(Tstart)\n')
source_file.write(' DO 31 i = 1, '+count+'\n')
source_file.write(' CALL tce_sort_4kg_4321_(before, after_glass,\n')
source_file.write(' & aSize(1), aSize(2), aSize(3), aSize(4),\n')
source_file.write(' & factor)\n')
source_file.write('31 CONTINUE\n')
source_file.write(' CALL CPU_TIME(Tfinish)\n')
source_file.write(' ELSEIF(glass_correct(perm(1), perm(2), perm(3), perm(4))) THEN\n')
source_file.write(' CALL CPU_TIME(Tstart)\n')
source_file.write(' DO 32 i = 1, '+count+'\n')
source_file.write(' CALL tce_sort_4kg_(before, after_glass,\n')
source_file.write(' & aSize(1), aSize(2), aSize(3), aSize(4),\n')
source_file.write(' & perm(1), perm(2), perm(3), perm(4), factor)\n')
source_file.write('32 CONTINUE\n')
source_file.write(' CALL CPU_TIME(Tfinish)\n')
source_file.write(' ENDIF\n')
#source_file.write(' Tfinish=rtc()\n')
source_file.write(' Tglass=(Tfinish-Tstart)\n')
source_file.write(' IF(glass_correct(perm(1), perm(2), perm(3), perm(4))) THEN\n')
#source_file.write(' PRINT*," i after_glass(i)\n')
#source_file.write(' & after_hirata(i)"\n')
source_file.write(' DO 33 i = 1, '+sizechar+'\n')
source_file.write(' IF (after_glass(i).ne.after_hirata(i)) THEN\n')
source_file.write(' PRINT*,"glass error ",i,after_glass(i),after_hirata(i)\n')
source_file.write(' ENDIF\n')
source_file.write('33 CONTINUE\n')
source_file.write(' ENDIF\n')
source_file.write(' write(6,*) "TESTING TRANPOSE TYPE '+A+B+C+D+'"\n')
source_file.write(' write(6,*) "==================="\n')
source_file.write(' write(6,*) "The compilation flags were:"\n')
for option in range(0,len(fortran_opt_flags.split())):
source_file.write(' write(6,*) "'+fortran_opt_flags.split()[option]+'"\n')
source_file.write(' write(6,*) "==================="\n')
source_file.write(' write(6,*) "Hirata Reference = ",Thirata,"seconds"\n')
source_file.write(' IF(glass_correct(perm(1), perm(2), perm(3), perm(4))) THEN\n')
source_file.write(' write(6,*) "KGlass Reference = ",Tglass,"seconds"\n')
source_file.write(' ENDIF\n')
source_file.write(' write(6,1001) "Algorithm","Jeff","Speedup","Best","Best Speedup"\n')
for loop_order in loop_list:
dummy = dummy+1
a = loop_order[0]
b = loop_order[1]
c = loop_order[2]
d = loop_order[3]
subroutine_name = 'trans_'+A+B+C+D+'_loop_'+a+b+c+d+'_'
source_file.write(' Tstart=0.0\n')
source_file.write(' Tfinish=0.0\n')
source_file.write(' CALL CPU_TIME(Tstart)\n')
source_file.write(' DO '+str(100+dummy)+' i = 1, '+count+'\n')
source_file.write(' CALL '+subroutine_name+'(before, after_jeff,\n')
source_file.write(' & aSize(1), aSize(2), aSize(3), aSize(4)
|
ahMarrone/solar_radiation_model
|
tests/performance_test.py
|
Python
|
mit
| 1,278
| 0
|
import unittest
from models import heliosat
import numpy as np
from netcdf import netcdf as nc
from datetime import datetime
import os
import glob
class TestPerformance(unittest.TestCase):
def setUp(self):
# os.system('rm -rf static.nc temporal_cache products')
os.system('rm -rf temporal_cache products/estimated')
os.system('rm -rf temporal_cache')
os.system('cp -rf data_argentina mock_data')
self.files = glob.glob('mock_data/goes13.*.BAND_01.nc')
def tearDown(self):
os.system('rm -rf mock_data')
def test_main(self):
begin = datetime.now()
heliosat.workwith('mock_data/goes13.2015.*.BAND_01.nc', 32)
end = datetime.now()
elapsed = (end - begin).total_seconds()
first, last = min(self.files), max(self.files)
to_dt = heliosat.to_datetime
processed = (to_dt(last) - to_dt(first)).total_seconds()
processed_days = processed / 3600. / 24
scale_shapes = (2245. / 86) * (3515. / 180) * (30. / processed_days)
estimated = elapsed * scale_shapes / 3600.
|
print "Scaling total time to %.2f hours." % estimated
print "Efficiency achieved: %.2
|
f%%" % (3.5 / estimated * 100.)
if __name__ == '__main__':
unittest.run()
|
the-blue-alliance/the-blue-alliance
|
src/backend/common/sitevars/apistatus.py
|
Python
|
mit
| 1,216
| 0
|
import datetime
from typing import
|
Optional, TypedDict
from backend.common.sitevars.sitevar import Sitevar
class WebConfig(TypedDict):
travis_job: str
tbaClient_endpoints_sha: str
current_commit: str
deploy_time: str
endpoints_sha: str
commit_time: str
class AndroidConfig(TypedDict):
min_app_version: int
latest_app_version: int
class IOSConfig(TypedDict):
min_ap
|
p_version: int
latest_app_version: int
class ContentType(TypedDict):
current_season: int
max_season: int
web: Optional[WebConfig]
android: Optional[AndroidConfig]
ios: Optional[IOSConfig]
class ApiStatus(Sitevar[ContentType]):
@staticmethod
def key() -> str:
return "apistatus"
@staticmethod
def description() -> str:
return "For setting max year, min app versions, etc."
@staticmethod
def default_value() -> ContentType:
current_year = datetime.datetime.now().year
return ContentType(
current_season=current_year,
max_season=current_year,
web=None,
android=None,
ios=None,
)
@classmethod
def status(cls) -> ContentType:
return cls.get()
|
jackfirth/pyramda
|
pyramda/relation/max_test.py
|
Python
|
mit
| 127
| 0
|
from .max import max
from pyramda.private.asserts import assert_equal
def max_test():
a
|
ssert_equal(ma
|
x([1, 3, 4, 2]), 4)
|
srmagura/potential
|
scripts/optimize_basis.py
|
Python
|
gpl-3.0
| 1,662
| 0.006619
|
"""
Script for selecting a good number of basis functions.
Too many or too few basis functions will introduce numerical error.
True solution must be known.
Run the program several times, varying the value of the -N option.
There may be a way to improve on this brute force method.
"""
# To allow __main__ in subdirectory
import sys
sys.path.append(sys.path[0] + '/..')
import argparse
import numpy as np
import ps.ps
import io_util
import problems
import problems.boundary
import copy
from multiprocessing import Pool
parser = argparse.ArgumentParser()
io_util.add_arguments(parser, ('problem', 'N'))
args = parser.parse_args()
problem = problems.problem_dict[args.problem]()
boundary = problems.boundary.OuterSine(problem.R)
problem.boundary = boundary
# Options to pass to the solver
options = {
'problem': problem,
'N': args.N,
'scheme_order': 4,
}
meta_options = {
'procedure_name': 'optimize_basis',
}
io_util.print_options(options, meta_options)
def m
|
y_print(t):
print('n_circle={} n_radius={} error={}'.format(*t))
def worker(t):
options['n_circle'] = t[0]
options['n_radius'] = t[1]
my_solver = ps.ps.PizzaSolver(options)
result = my_solver.run()
t = (t[0], t[1], result.error)
my_print(t)
return t
all_options = []
# Tweak the following ranges
|
as needed
for n_circle in range(30, 100, 5):
for n_radius in range(17, n_circle, 4):
all_options.append((n_circle, n_radius))
with Pool(4) as p:
results = p.map(worker, all_options)
min_error = float('inf')
for t in results:
if t[2] < min_error:
min_error = t[2]
min_t = t
print()
my_print(min_t)
|
ljwolf/spvcm
|
spvcm/svc/__init__.py
|
Python
|
mit
| 23
| 0
|
fro
|
m .model import SV
|
C
|
vipul-tm/DAG
|
dags-ttpl/sync.py
|
Python
|
bsd-3-clause
| 19,607
| 0.033304
|
import os
import socket
from airflow import DAG
from airflow.contrib.hooks import SSHHook
from airflow.operators import PythonOperator
from airflow.operators import BashOperator
from airflow.operators import BranchPythonOperator
from airflow.hooks.mysql_hook import MySqlHook
from airflow.hooks import RedisHook
from airflow.hooks.mysql_hook import MySqlHook
from datetime import datetime, timedelta
from airflow.models import Variable
from airflow.operators import TriggerDagRunOperator
from airflow.operators.subdag_operator import SubDagOperator
from pprint import pprint
import itertools
import socket
import sys
import time
import re
import random
import logging
import traceback
import os
import json
#################################################################DAG CONFIG####################################################################################
default_args = {
'owner': 'wireless',
'depends_on_past': False,
'start_date': datetime(2017, 03, 30,13,00),
'email': ['[email protected]'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=1),
'provide_context': True,
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
}
PARENT_DAG_NAME = "SYNC"
main_etl_dag=DAG(dag_id=PARENT_DAG_NAME, default_args=default_args, schedule_interval='@once')
SQLhook=MySqlHook(mysql_conn_id='application_db')
redis_hook_2 = RedisHook(redis_conn_id="redis_hook_2")
#################################################################FUCTIONS####################################################################################
def get_host_ip_mapping():
path = Variable.get("hosts_mk_path")
try:
host_var = load_file(path)
ipaddresses = host_var.get('ipaddresses')
return ipaddresses
except IOError:
logging.error("File Name not correct")
return None
except Exception:
logging.error("Please check the HostMK file exists on the path provided ")
return None
def load_file(file_path):
#Reset the global vars
host_vars = {
"all_hosts": [],
"ipaddresses": {},
"host_attributes": {},
"host_contactgroups": [],
}
try:
execfile(file_path, host_vars, host_vars)
del host_vars['__builtins__']
except IOError, e:
pass
return host_vars
def process_host_mk():
path = Variable.get("hosts_mk_path")
hosts = {}
site_mapping = {}
all_site_mapping =[]
all_list = []
device_dict = {}
start = 0
tech_wise_device_site_mapping = {}
try:
text_file = open(path, "r")
except IOError:
logging.error("File Name not correct")
return "notify"
except Exception:
logging.error("Please check the HostMK file exists on the path provided ")
return "notify"
lines = text_file.readlines()
host_ip_mapping = get_host_ip_mapping()
for line in lines:
if "all_hosts" in line:
start = 1
if start == 1:
hosts["hostname"] = line.split("|")[0]
hosts["device_type"] = line.split("|")[1]
site_mapping["hostname"] = line.split("|")[0].strip().strip("'")
site_mapping['site'] = line.split("site:")[1].split("|")[0].strip()
site_mapping['device_type'] = line.split("|")[1].strip()
all_list.append(hosts.copy())
all_site_mapping.append(site_mapping.copy())
if ']\n' in line:
start = 0
all_list[0]['hostname'] = all_list[0].get("hostname").strip('all_hosts += [\'')
all_site_mapping[0] ['hostname'] = all_site_mapping[0].get("hostname").strip('all_hosts += [\'')
break
print "LEN of ALL LIST is %s"%(len(all_list))
if len(all_list) > 1:
for device in all_list:
device_dict[device.get("hostname").strip().strip("'")] = device.get("device_type").strip()
Variable.set("hostmk.dict",str(device_dict))
for site_mapping in all_site_mapping:
if site_mapping.get('device_type') not in tech_wise_device_site_mapping.keys():
tech_wise_device_site_mapping[site_mapping.get('device_type')] = {site_mapping.get('site'):[{"hostname":site_mapping.get('hostname'),"ip_address":host_ip_mapping.get(site_mapping.get('hostname'))}]}
else:
if site_mapping.get('site') not in tech_wise_device_site_mapping.get(site_mapping.get('device_type')).keys():
tech_wise_device_site_mapping.get(site_mapping.get('device_type'))[site_mapping.get('site')] = [{"hostname":site_mapping.get('hostname'),"ip_address":host_ip_mapping.get(site_mapping.get('hostname'))}]
else:
tech_wise_device_site_mapping.get(site_mapping.get('device_type')).get(site_mapping.get('site')).append({"hostname":site_mapping.get('hostname'),"ip_address":host_ip_mapping.get(site_mapping.get('hostname'))})
Variable.set("hostmk.dict.site_mapping",str(tech_wise_device_site_mapping))
count = 0
for x in tech_wise_device_site_mapping:
for y in tech_wise_device_site_mapping.get(x):
count = count+len(tech_wise_device_site_mapping.get(x).get(y))\
print "COUNT : %s"%(count)
return 0
else:
return -4
def dict_rows(cur):
desc = cur.description
return [
dict(zip([col[0] for col in desc], row))
for row in cur.fetchall()
]
def execute_query(query):
conn = SQLhook.get_conn()
cursor = conn.cursor()
cursor.execute(query)
data = dict_rows(cursor)
cursor.close()
return data
def createDict(data):
#TODOL There are 3 levels of critality handle all those(service_critical,critical,dtype_critical)
rules = {}
ping_rule_dict = {}
operator_name_with_operator_in = eval(Variable.get("special_operator_services")) #here we input the operator name in whcih we wish to apply IN operator
service_name_with_operator_in = []
for operator_name in operator_name_with_operator_in:
service_name = "_".join(operator_name.split("_")[:-1])
service_name_with_operator_in.append(service_name)
for device in data:
service_name = device.get('service')
device_type = device.get('devicetype')
if device.get('dtype_ds_warning') and device.get('dtype_ds_critical'):
device['critical'] = device.get('dtype_ds_critical')
device['warning'] = device.get('dtype_ds_warning')
elif device.get('service_warning') and device.get('service_critical'):
device['critical'] = device.get('service_critical')
device['warning'] = device.get('service_warning')
if service_name == 'radwin_uas' and device['critical'] == "":
continue
if service_name:
name = str(service_name)
rules[name] = {}
if device.get('critical'):
rules[name]={"Severity1":["critical",{'name': str(name)+"_critical", 'operator': 'greater_than' if ("_rssi" not in name) and ("_uas" not in name) else "less_than_equal_to", 'value': device.get('critical') or device.get('dtype_ds_critical')}]}
else:
rules[name]={"Severity1":["critical",{'name': str(name)+"_critical", 'operator': 'greater_than', 'value': ''}]}
if device.get('warning'):
rules[name].update({"Severity2":["warning",{'name': str(name)+"_warning", 'operator': 'greater_than' if ("_rssi" not in name) and ("_uas" not in name) else "less_than_equal_to" , 'valu
|
e': device.get('warning') or device.get('dtype_ds_warning')}]})
else:
rules[name].update({"Severity2":["warning",{'name': str(name)+"_warning", 'operator': 'greater_than', 'value': ''}]})
if device_type not in ping_rule_dict:
if device.get('ping_pl_critical') and device.get('ping_pl_warning') and device.get('ping_rta_critical') and device.g
|
et('ping_rta_warning'):
ping_rule_dict[device_type] = {
'ping_pl_critical' : device.get('ping_pl_critical'),
'ping_pl_warning': device.get('ping_pl_warning') ,
'ping_rta_critical': device.get('ping_rta_critical'),
'ping_rta_warning': device.get('ping_rta_warning')
}
for device_type in ping_rule_dict:
if ping_rule_dict.get(device_type).get('ping_pl_critical'):
rules[device_type+"_pl"]={}
rules[device_type+"_pl"].update({"Severity1":["critical",{'name': device_type+"_pl_critical", 'operator': 'greater_than', 'value': float(ping_rule_dict.get(device_type).get('ping_pl_critical')) or ''}]})
if ping_rule_dict.get(device_type).get('ping_pl_warning'):
rules[device_type+"_pl"].update({"Severity2":["warning",{'name': device_type+"_pl_warning", 'operator': 'greater_than', 'value': float(ping_rule_dict.get(device_type).get('ping_p
|
samabhi/pstHealth
|
venv/bin/createfontdatachunk.py
|
Python
|
mit
| 600
| 0
|
#!/Users/abhisheksamdaria/GitHub/pstHealth/venv/bin/python2.7
fro
|
m __future__ import print_function
import base64
import os
import sys
if __name__ == "__main__":
# create font data chunk for embedding
font = "Tests/images/courB08"
print(" f._load_pilfont_data(")
print(" # %s" % os.path.basename(font))
print(" BytesIO(base64.decodestring(b'''")
base64.encode(open(font + ".pil", "rb"), sys.stdout)
print("''')), Image.open(BytesIO(b
|
ase64.decodestring(b'''")
base64.encode(open(font + ".pbm", "rb"), sys.stdout)
print("'''))))")
# End of file
|
e-Luminate/eluminate_web
|
eluminate_web/apps/events/migrations/0006_auto__add_field_event_photo.py
|
Python
|
gpl-3.0
| 7,343
| 0.007899
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Event.photo'
db.add_column('events_event', 'photo',
self.gf('django.db.models.fields.files.ImageField')(default='', max_length=200, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Event.photo'
db.delete_column('events_event', 'photo')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object
|
_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to':
|
"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'events.day': {
'Meta': {'object_name': 'Day'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'events.event': {
'Meta': {'object_name': 'Event'},
'collaborators': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'collaboration_events'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['participant.Participant']"}),
'days': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['events.Day']", 'symmetrical': 'False'}),
'description': ('django.db.models.fields.TextField', [], {}),
'end_time': ('django.db.models.fields.TimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['maps.Location']", 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'participant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'own_events'", 'to': "orm['participant.Participant']"}),
'photo': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'start_time': ('django.db.models.fields.TimeField', [], {})
},
'maps.location': {
'Meta': {'unique_together': "(('user', 'name'),)", 'object_name': 'Location'},
'area': ('django.contrib.gis.db.models.fields.PolygonField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marker': ('django.contrib.gis.db.models.fields.PointField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'location_set'", 'to': "orm['auth.User']"})
},
'participant.category': {
'Meta': {'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'})
},
'participant.participant': {
'Meta': {'object_name': 'Participant'},
'approved_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['participant.Category']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '200', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'website': ('django.db.models.fields.CharField', [], {'max_length': '2000'})
}
}
complete_apps = ['events']
|
hwangsyin/cbrc-devteam-blog
|
service/service.py
|
Python
|
apache-2.0
| 5,715
| 0.006211
|
import settings
import mysql.connector
from domain.domain import Article
from domain.domain import Project
from domain.domain import User
from domain.domain import Tag
import service.database as db
# 文章管理
class ArticleService:
# 查询最近发表的文章
def query_most_published_article(self):
conn = db.get_connection()
sql = "".join(["select a.id as id,a.author_id as author_id,",
"u.name as author_name,a.title as title,a.content as content,a.create_time as create_time,",
"a.publish_time as publish_time,a.last_update_time as last_update_time",
" from article as a left join user as u on a.author_id=u.id",
" order by a.publish_time desc limit 0,%(page_size)s"])
cursor = conn.cursor()
cursor.execute(sql, {"page_size": settings.app_settings["page_size"]})
articles = None
for (id, author_id, author_name, title,
content, create_time, publish_time, last_update_time) in cursor:
if (not articles):
articles = []
article = Article()
articles.append(article)
article.id = id
if (author_id):
u = User()
article.author = u
u.id = author_id
u.name = author_name
article.title = title
article.content = content
article.create_time = create_time
article.publish_time = publish_time
article.last_update_time = last_update_time
cursor.close()
conn.close()
return articles
# 根据标签查询文章列表
def query_article_by_tag(self, tag_id):
if (not tag_id):
|
return None
_tag_id = None
try:
_tag_id = int(tag_id)
except ValueError:
return None
sql = "".join(["select a.id as id,a.author_id as author_id,u.name as author_name",
",a.title as title,a.create_time as create_time,a.publish_time as publish_time",
",a.last_update_time as last_update_time",
" from article as a left join user as u
|
on a.author_id=u.id",
" where a.publish_time is not null and a.id in (select article_id from article_tag where tag_id=%(tag_id)s)"])
conn = db.get_connection()
cursor = conn.cursor()
cursor.execute(sql, {"tag_id": _tag_id})
articles = None
for (id, author_id, author_name, title, create_time, publish_time, last_update_time) in cursor:
if (not articles):
articles = []
a = Article()
articles.append(a)
a.id = id
a.title = title
a.create_time = create_time
a.publish_time = publish_time
a.last_update_time = last_update_time
if (author_id):
u = User()
a.author = u
u.id = author_id
u.name = author_name
cursor.close()
conn.close()
return articles
# 根据文章 ID 查询文章
def find(self, article_id):
conn = db.get_connection()
sql = "".join(["select a.id as id,a.author_id as author_id,",
"u.name as author_name,a.title as title,a.content as content,a.create_time as create_time,",
"a.publish_time as publish_time,a.last_update_time as last_update_time",
" from article as a left join user as u on a.author_id=u.id",
" where a.id=%(article_id)s"])
cursor = conn.cursor()
cursor.execute(sql, {"article_id": article_id})
article = None
for (id, author_id, author_name, title, content, create_time, publish_time, last_update_time) in cursor:
if (not article):
article = Article()
article.id = id
article.title = title
article.content = content
article.create_time = create_time
article.publish_time = publish_time
article.last_update_time = last_update_time
if (author_id):
u = User()
article.author = u
u.id = author_id
u.name = author_name
cursor.close()
conn.close()
return article
# 添加新文章
def add(self, article):
# implement
return 1
# 标签管理
class TagService:
def list_all(self):
conn = db.get_connection()
if (not conn):
return None
sql = "".join(["select t.id as id, t.name as name, t.author_id as author_id, u.name as author_name",
",t.create_time as create_time,t.last_update_time as last_update_time",
" from tag as t left join user as u on t.author_id=u.id order by t.create_time desc"])
cursor = conn.cursor()
cursor.execute(sql)
tags = None
for (id, name, author_id, author_name, create_time, last_update_time) in cursor:
if (not tags):
tags = []
t = Tag()
tags.append(t)
t.id = id
t.name = name
t.create_time = create_time
t.last_update_time = last_update_time
if (author_id):
u = User()
t.author = u
u.id = author_id
u.name = author_name
cursor.close()
conn.close()
return tags
article_service = ArticleService()
tag_service = TagService()
|
mpetyx/palmdrop
|
venv/lib/python2.7/site-packages/cms/tests/plugins.py
|
Python
|
apache-2.0
| 46,414
| 0.001982
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
import datetime
from cms.api import create_page, publish_page, add_plugin
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from cms.models import Page, Placeholder
from cms.models.pluginmodel import CMSPlugin, PluginModelBase
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cms.plugins.utils import get_plugins_for_page
from cms.plugins.file.models import File
from cms.plugins.inherit.models import InheritPagePlaceholder
from cms.plugins.link.forms import LinkForm
from cms.plugins.link.models import Link
from cms.plugins.picture.models import Picture
from cms.plugins.text.models import Text
from cms.plugins.text.utils import (plugin_tags_to_id_list, plugin_tags_to_admin_html)
from cms.plugins.twitter.models import TwitterRecentEntries
from cms.test_utils.project.pluginapp.models import Article, Section
from cms.test_utils.project.pluginapp.plugins.manytomany_rel.models import (
ArticlePluginModel)
from cms.test_utils.testcases import CMSTestCase, URL_CMS_PAGE, URL_CMS_PLUGIN_MOVE, \
URL_CMS_PAGE_ADD, URL_CMS_PLUGIN_ADD, URL_CMS_PLUGIN_EDIT, URL_CMS_PAGE_CHANGE, URL_CMS_PLUGIN_REMOVE, \
URL_CMS_PLUGIN_HISTORY_EDIT
from cms.sitemaps.cms_sitemap import CMSSitemap
from cms.test_utils.util.context_managers import SettingsOverride
from cms.utils.copy_plugins import copy_plugins_to
from django.utils import timezone
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.management import call_command
from django.forms.widgets import Media
from django.test.testcases import TestCase
import os
class DumbFixturePlugin(CMSPluginBase):
model = CMSPlugin
name = "Dumb Test Plugin. It does nothing."
render_template = ""
admin_preview = False
allow_children = True
def render(self, context, instance, placeholder):
return context
class PluginsTestBaseCase(CMSTestCase):
def setUp(self):
self.super_user = User(username="test", is_staff=True, is_active=True, is_superuser=True)
self.super_user.set_password("test")
self.super_user.save()
self.slave = User(username="slave", is_staff=True, is_active=True, is_superuser=False)
self.slave.set_password("slave")
self.slave.save()
self.FIRST_LANG = settings.LANGUAGES[0][0]
self.SECOND_LANG = settings.LANGUAGES[1][0]
self._login_context = self.login_user_context(self.super_user)
self._login_context.__enter__()
def tearDown(self):
self._login_context.__exit__(None, None, None)
def approve_page(self, page):
response = self.client.get(URL_CMS_PAGE + "%d/approve/" % page.pk)
self.assertRedirects(response, URL_CMS_PAGE)
# reload page
return self.reload_page(page)
def get_request(self, *args, **kwargs):
request = super(PluginsTestBaseCase, self).get_request(*args, **kwargs)
request.placeholder_media = Media()
return request
class PluginsTestCase(PluginsTestBaseCase
|
):
def _create_text_plugin_on_page(self, page):
plugin_data =
|
{
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
created_plugin_id = int(response.content)
self.assertEquals(created_plugin_id, CMSPlugin.objects.all()[0].pk)
return created_plugin_id
def _edit_text_plugin(self, plugin_id, text):
edit_url = "%s%s/" % (URL_CMS_PLUGIN_EDIT, plugin_id)
response = self.client.get(edit_url)
self.assertEquals(response.status_code, 200)
data = {
"body": text
}
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
txt = Text.objects.get(pk=plugin_id)
return txt
def test_add_edit_plugin(self):
"""
Test that you can add a text plugin
"""
# add a new text plugin
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
created_plugin_id = self._create_text_plugin_on_page(page)
# now edit the plugin
txt = self._edit_text_plugin(created_plugin_id, "Hello World")
self.assertEquals("Hello World", txt.body)
# edit body, but click cancel button
data = {
"body": "Hello World!!",
"_cancel": True,
}
edit_url = '%s%d/' % (URL_CMS_PLUGIN_EDIT, created_plugin_id)
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
txt = Text.objects.all()[0]
self.assertEquals("Hello World", txt.body)
def test_plugin_history_view(self):
"""
Test plugin history view
"""
import reversion
page_data = self.get_new_page_data()
# two versions created by simply creating the page
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
page_id = int(page.pk)
# page version 3
created_plugin_id = self._create_text_plugin_on_page(page)
# page version 4
txt = self._edit_text_plugin(created_plugin_id, "Hello Foo")
self.assertEquals("Hello Foo", txt.body)
# page version 5
txt = self._edit_text_plugin(created_plugin_id, "Hello Bar")
self.assertEquals("Hello Bar", txt.body)
versions = [v.pk for v in reversed(reversion.get_for_object(page))]
history_url = '%s%d/' % (
URL_CMS_PLUGIN_HISTORY_EDIT % (page_id, versions[-2]),
created_plugin_id)
response = self.client.get(history_url)
self.assertEquals(response.status_code, 200)
self.assertIn('Hello Foo', response.content)
def test_plugin_order(self):
"""
Test that plugin position is saved after creation
"""
page_en = create_page("PluginOrderPage", "col_two.html", "en",
slug="page1", published=True, in_navigation=True)
ph_en = page_en.placeholders.get(slot="col_left")
# We check created objects and objects from the DB to be sure the position value
# has been saved correctly
text_plugin_1 = add_plugin(ph_en, "TextPlugin", "en", body="I'm the first")
text_plugin_2 = add_plugin(ph_en, "TextPlugin", "en", body="I'm the second")
db_plugin_1 = CMSPlugin.objects.get(pk=text_plugin_1.pk)
db_plugin_2 = CMSPlugin.objects.get(pk=text_plugin_2.pk)
with SettingsOverride(CMS_PERMISSION=False):
self.assertEqual(text_plugin_1.position, 1)
self.assertEqual(db_plugin_1.position, 1)
self.assertEqual(text_plugin_2.position, 2)
self.assertEqual(db_plugin_2.position, 2)
## Finally we render the placeholder to test the actual content
rendered_placeholder = ph_en.render(self.get_context(page_en.get_absolute_url()), None)
self.assertEquals(rendered_placeholder, "I'm the firstI'm the second")
def test_add_cancel_plugin(self):
"""
Test that you can cancel a new plugin before editing and
that the plugin is removed.
"""
# add a new text plugin
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(re
|
orbitfp7/nova
|
nova/tests/unit/scheduler/filters/test_numa_topology_filters.py
|
Python
|
apache-2.0
| 7,379
| 0.000678
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_serialization import jsonutils
from nova import objects
from nova.objects import base as obj_base
from nova.scheduler.filters import numa_topology_filter
from nova import test
from nova.tests.unit import fake_instance
from nova.tests.unit.scheduler import fakes
from nova.virt import hardware
class TestNUMATopologyFilter(test.NoDBTestCase):
def setUp(self):
super(TestNUMATopologyFilter, self).setUp()
self.filt_cls = numa_topology_filter.NUMATopologyFilter()
def test_numa_topology_filter_pass(self):
instance_topology = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(id=0, cpuset=set([1]), memory=512),
objects.InstanceNUMACell(id=1, cpuset=set([3]), memory=512)
])
instance = fake_instance.fake_instance_obj(mock.sentinel.ctx)
instance.numa_topology = instance_topology
filter_properties = {
'request_spec': {
'instance_properties': jsonutils.to_primitive(
obj_base.obj_to_primitive(instance))}}
host = fakes.FakeHostState('host1', 'node1',
{'numa_topology': fakes.NUMA_TOPOLOGY,
'pci_stats': None})
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_numa_topology_filter_numa_instance_no_numa_host_fail(self):
instance_topology = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(id=0, cpuset=set([1]), memory=512),
objects.InstanceNUMACell(id=1, cpuset=set([3]), memory=512)
])
instance = fake_instance.fake_instance_obj(mock.sentinel.ctx)
instance.numa_topology = instance_topology
filter_properties = {
'request_spec': {
'instance_properties': jsonutils.to_primitive(
obj_base.obj_to_primitive(instance))}}
host = fakes.FakeHostState('host1', 'node1', {'pci_stats': None})
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_numa_topology_filter_numa_host_no_numa_instance_pass(self):
instance = fake_instance.fake_instance_obj(mock.sentinel.ctx)
instance.numa_topology = None
filter_properties = {
'request_spec': {
'instance_properties': jsonutils.to_primitive(
obj_base.obj_to_primitive(instance))}}
host = fakes.FakeHostState('host1', 'node1',
{'numa_topology': fakes.NUMA_TOPOLOGY})
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_numa_topology_filter_fail_fit(self):
instance_topology = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(id=0, cpuset=set([1]), memory=512),
objects.InstanceNUMACell(id=1, cpuset=set([2]), memory=512),
objects.InstanceNUMACell(id=2, cpuset=set([3]), memory=512)
])
instance = fake_instance.fake_instance_obj(mock.sentinel.ctx)
instance.numa_topology = instance_topology
filter_properties = {
'request_spec': {
'instance_properties': jsonutils.to_primitive(
obj_base.obj_to_primitive(instance))}}
host = fakes.FakeHostState('host1', 'node1',
{'numa_topology': fakes.NUMA_TOPOLOGY,
'pci_stats': None})
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_numa_topology_filter_fail_memory(self):
self.flags(ram_allocation_ratio=1)
instance_topology = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(id=0, cpuset=set([1]),
memory=1024),
objects.InstanceNUMACell(id=1, cpuset=set([3]), memory=512)
])
instance = fake_instance.fake_instance_obj(mock.sentinel.ctx)
instance.numa_topology = instance_topology
filter_properties = {
'request_spec': {
'instance_properties': jsonutils.to_primitive(
obj_base.obj_to_primitive(instance))}}
host = fakes.FakeHostState('host1', 'node1',
{'numa_topology': fakes.NUMA_TOPOLOGY,
'pci_stats': None})
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_numa_topology_filter_fail_cpu(self):
self.flags(cpu_allocation_ratio=1)
instance_topology = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(id=0, cpuset=set([1]), memory=512),
objects.InstanceNUMACell(id=1, cpuset=set([3, 4, 5]),
memory=512)])
instance = fake_instance.fake_instance_obj(mock.sentinel.ctx)
instance.numa_topology = instance_topology
filter_properties = {
'request_spec': {
'instance_properties': jsonutils.to_primitive(
obj_base.obj_to_primitive(instance))}}
host = fakes.FakeHostState('host1', 'node1',
{'numa_topology': fakes.NUMA_TOPOLOGY,
'pci_stats': None})
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_numa_topology_filter_pass_set_limit(self):
self.flags(cpu_allocation_ratio=21)
self.flags(ram_allocation_ratio=1.3)
instance_topology = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(id=0, cpuset=set([1]), memory=512),
objects.InstanceNUMACell(id=1, cpuset=set([3]), memory=512)
])
instance = fake_instance.fake_instance_obj(mock.sentinel.ctx)
instance.numa_topology = instance_topology
filter_properties = {
'request_spec': {
'instance_properties': jsonutils.to_primitive(
obj_base.obj_to_primitive(instance))}}
host = fakes.FakeHostState('host1', 'node1',
{'numa_topology': fakes.NUMA_TOPOLOGY,
'pci_stats': None})
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
limits_topology = hardw
|
are.VirtNUMALimitTopology.from_json(
host.limits['numa_topology'])
self.assertEqual(limits_topology.cells[0].cpu_limit, 42)
self.assertEqual(limits_topology.cells[1].cpu_limit, 42)
self.assertEqual(limits_topology.cells[0].memory_limit,
|
665)
self.assertEqual(limits_topology.cells[1].memory_limit, 665)
|
inclement/plyer
|
setup.py
|
Python
|
mit
| 1,374
| 0
|
#!/usr/bin/env python
from os.path import dirname, join
import plyer
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
curdir = dirname(__file__)
packages = [
'plyer',
'plyer.platforms',
'plyer.platforms.linux',
'plyer.platforms.android',
'plyer.platforms.win',
'plyer.platforms.win.libs',
'plyer.platforms.ios',
'plyer.platforms.macosx',
]
setup(
name='plyer',
version=plyer.__version__,
description='Platform-independant wrapper for platform-dependant APIs',
long_description=open(join(curdir, 'README.rst')).read(),
author='Kivy team',
author_email='[email protected]',
url='https://plyer.readthedocs.org/en/latest/',
packages=packages,
package_data={'': ['LICENSE', 'README.rst']},
package_dir={'plyer': 'plyer'},
include_package_data=True,
license=open(join(curdir, 'LICENSE')).read(),
zip_safe=False,
classifiers=(
'Development S
|
tatus :: 4 - Beta',
'Intended
|
Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
),
)
|
tomato42/fsresck
|
fsresck/nbd/request.py
|
Python
|
gpl-2.0
| 3,965
| 0
|
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Description: File system resilience testing application
# Author: Hubert Kario <[email protected]>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Copyright (c) 2015 Hubert Kario. All rights reserved.
#
# This copyrighted material is made available to anyone wishing
# to use, modify, copy, or redistribute it subject to the terms
# and conditions of the GNU General Public License version 2.
#
# This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free
# Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""Handling of NBD requests."""
import struct
from .constants import Magic, RequestType
from ..compat import compat_str
class Error(Exception):
"""Exception describing what went wrong."""
def __repr__(self):
"""Format exception."""
return "request.{0}".format(super(Error, self).__repr__())
class NBDRequest(object):
"""Representation of single NBD protocol request."""
def __init__(self, req_type, handle, data_from, data_length, data=None):
"""Make a NBD protocol request object."""
self.req_type = req_type
self.handle = handle
self.data_from = data_from
self.data_length = data_length
self.data = data
def __eq__(self, other):
"""Check if the other object is equal to this object."""
return (isinstance(other, self.__class__) and
self.__dict__ == other.__dict__)
def __ne__(self, other):
"""Check if the other object is different from this object."""
return not self.__eq__(other)
def recvexactly(sock, size, flags=0):
"""recv exactly size bytes from socket."""
buff = bytearray(size)
view = memoryview(buff)
pos = 0
while pos < size:
read = sock.recv_into(view[pos:], size - pos, flags)
if not read:
raise Error("Incomplete read, expected {0}, read {1}"
.format(size, size))
pos += read
return buff
class NBDRequestSocket(object):
"""Handle requests on NBD socket."""
request_fmt = ">IIQQI"
request_length = struct.calcsize(request_fmt)
def __init__(self, sock):
"""Initialize the socket wrapper."""
self.sock = sock
def recv(self):
"""Receive a single request from socket and return it."""
data = recvexactly(self.sock, self.request_length)
assert len(data) == self.request_length
data = compat_str(data)
result_tuple = struct.unpack(self.request_fmt, data)
magic, req_type, handle, data_from, data_length = result_tuple
if magic != Magic.NBD_REQUEST_MAGIC:
raise Error("Request magic invalid: {0}".format(magic))
if req_type != RequestType.NBD_CMD_WRITE:
return NBDRequest(req_type, handle, data_from, data_length)
payload = recvexactly(self.sock, data_len
|
gth)
return NBDRequest(req_type, handle, data_from, data_length, payload)
def send(self, request):
"""Send a single request through socket."""
data = struct.pack(self.request_fmt,
Magic.NBD_REQUEST_MAGIC,
request.req_type,
|
request.handle,
request.data_from,
request.data_length)
if request.req_type == RequestType.NBD_CMD_WRITE:
data = data + request.data
self.sock.sendall(data)
|
metomi/rose
|
metomi/rosie/svn_pre_commit.py
|
Python
|
gpl-3.0
| 13,444
| 0.000149
|
#!/usr/bin/env python3
# -----------------------------------------------------------------------------
# Copyright (C) British Crown (Met Office) & Contributors.
#
# This file is part of Rose, a framework for meteorological suites.
#
# Rose is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rose is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rose. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
"""A pre-commit hook on a Rosie Subversion repository.
Ensure that commits conform to the rules of Rosie.
"""
from fnmatch import fnmatch
import re
import shlex
import sys
import traceback
import metomi.rose
from metomi.rose.config import ConfigSyntaxError
from metomi.rose.macro import (
add_meta_paths,
get_reports_as_text,
load_meta_config,
)
from metomi.rose.macros import DefaultValidators
from metomi.rose.opt_parse import RoseOptionParser
from metomi.rose.popen import RosePopenError
from metomi.rose.reporter import Reporter
from metomi.rose.resource import ResourceLocator
from metomi.rose.scheme_handler import SchemeHandlersManager
from metomi.rosie.svn_hook import (
BadChange,
BadChanges,
InfoFileError,
RosieSvnHook,
)
class RosieSvnPreCommitHook(RosieSvnHook):
"""A pre-commit hook on a Rosie Subversion repository.
Ensure that commits conform to the rules of Rosie.
"""
IGNORES = "svnperms.conf"
RE_ID_NAMES = [r"[Ra-z]", r"[Oa-z]", r"[S\d]", r"[I\d]", r"[E\d]"]
TRUNK_KNOWN_KEYS_FILE = "trunk/rosie-keys"
def __init__(self, event_handler=None, popen=None):
super(RosieSvnPreCommitHook, self).__init__(event_handler, popen)
self.usertools_manager = SchemeHandlersManager(
[self.path], "rosie.usertools", ["verify_users"]
)
def _get_access_info(self, info_node):
"""Return (owner, access_list) from "info_node"."""
owner = info_node.get_value(["owner"])
access_list = info_node.get_value(["access-list"], "").split()
access_list.sort()
return owner, access_list
def _verify_users(
self, status, path, txn_owner, txn_access_list, bad_changes
):
"""Check txn_owner and txn_access_list.
For any invalid users, append to bad_changes and return True.
"""
# The owner and names in access list must be real users
conf = ResourceLocator.default().get_conf()
user_tool_name = conf.get_value(["rosa-svn", "user-tool"])
if not user_tool_name:
return False
user_tool = self.usertools_manager.get_handler(user_tool_name)
txn_users = set([txn_owner] + txn_access_list)
txn_users.discard("*")
bad_users = user_tool.verify_users(txn_users)
for bad_user in bad_users:
if txn_owner == bad_user:
bad_change = BadChange(
status, path, BadChange.USER, "owner=" + bad_user
)
bad_changes.append(bad_change)
if bad_user in txn_access_list:
bad_change = BadChange(
status, path, BadChange.USER, "access-list=" + bad_user
)
bad_changes.append(bad_change)
return bool(bad_users)
def run(self, repos, txn):
"""Apply the rule engine on transaction "txn" to repository "repos"."""
changes = set() # set([(status, path), ...])
for line in self._svnlook("changed", "-t", txn, repos).splitlines():
status, path = line.split(None, 1)
changes.add((status, path))
bad_changes = []
author = None
super_users = None
rev_info_map = {}
txn_info_map = {}
conf = ResourceLocator.default().get_conf()
ignores_str = conf.get_value(["rosa-svn", "ignores"], self.IGNORES)
ignores = shlex.split(ignores_str)
for status, path in sorted(changes):
if any(fnmatch(path, ignore) for ignore in ignores):
continue
names = path.split("/", self.LEN_ID + 1)
tail = None
if not names[-1]:
tail = names.pop()
# Directories above the suites must match the ID patterns
is_bad = False
for name, pattern in zip(names, self.RE_ID_NAMES):
if not re.compile(r"\A" + pattern + r"\Z").match(name):
is_bad = True
break
if is_bad:
msg = "Directories above the suites must match the ID patterns"
bad_changes.append(BadChange(status, path, content=msg))
continue
# At levels above the suites, can only add directories
if len(names) < self.LEN_ID:
if status[0] != self.ST_ADDED:
msg = (
"At levels above the suites, "
"can only add directories"
)
bad_changes.append(BadChange(status, path, content=msg))
continue
# Cannot have a file at the branch level
if len(names) == self.LEN_ID + 1 and tail is None:
msg = "Cannot have a file at the branch level"
bad_changes.append(BadChange(status, path, content=msg))
continue
# New suite should have an info file
if len(names) == self.LEN_ID and status == self.ST_ADDED:
if (self.ST_ADDED, path + "trunk/") not in changes:
bad_changes.append(
BadChange(status, path, BadChange.NO_TRUNK)
)
continue
path_trunk_info_file = path + self.TRUNK_INFO_FILE
if (self.ST_ADDED, path_trunk_info_file) not in changes and (
self.ST_UPDATED,
path_trunk_info_file,
) not in changes:
bad_changes.append(
BadChange(status, path, BadChange.NO_INFO)
)
continue
sid = "".join(names[0 : self.LEN_ID])
branch = names[self.LEN_ID] if len(names) > self.LEN_ID else None
path_head = "/".join(sid) + "/"
path_tail = path[len(path_head) :]
is_meta_suite = sid == "ROSIE"
if status != self.ST_DELETED:
# Check info file
if sid not in txn_info_map:
try:
txn_info_map[sid] = self._load_info(
repos, sid, branch=branch, transaction=txn
)
err = None
except ConfigSyntaxError as exc:
err = InfoFileError(InfoFileError.VALUE, exc)
except RosePopenError as ex
|
c:
err = InfoFileError(InfoFileError.NO_INFO, exc.stderr)
if err:
bad_changes.append(err)
txn_
|
info_map[sid] = err
continue
# Suite must have an owner
txn_owner, txn_access_list = self._get_access_info(
txn_info_map[sid]
)
if not txn_owner:
bad_changes.append(
InfoFileError(InfoFileError.NO_OWNER)
)
continue
# No need to check other non-trunk changes
if branch and branch != "trunk":
continue
# For meta suite, make sure keys in keys file can be
|
redhat-imaging/imagefactory
|
imagefactory_plugins/Rackspace/__init__.py
|
Python
|
apache-2.0
| 668
| 0
|
# encoding: utf-8
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Ap
|
ache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limita
|
tions under the License.
from .Rackspace import Rackspace as delegate_class
|
dpdani/tBB
|
tBB/async_stdio.py
|
Python
|
gpl-3.0
| 1,064
| 0.00094
|
# From: https://gist.github.com/nathan-hoad/8966377
import os
import asyncio
import sys
from asyncio.streams import StreamWriter, FlowControlMixin
reader, writer = None, None
@asyncio.
|
coroutine
def stdio(loop=None):
if loop is None:
loop = asyncio.get_event_loop()
|
reader = asyncio.StreamReader()
reader_protocol = asyncio.StreamReaderProtocol(reader)
writer_transport, writer_protocol = yield from loop.connect_write_pipe(FlowControlMixin, os.fdopen(0, 'wb'))
writer = StreamWriter(writer_transport, writer_protocol, None, loop)
yield from loop.connect_read_pipe(lambda: reader_protocol, sys.stdin)
return reader, writer
@asyncio.coroutine
def async_input(message):
if isinstance(message, str):
message = message.encode('utf8')
global reader, writer
if (reader, writer) == (None, None):
reader, writer = yield from stdio()
writer.write(message)
yield from writer.drain()
line = yield from reader.readline()
return line.decode('utf8').replace('\r', '').replace('\n', '')
|
ingenieroariel/geonode
|
geonode/groups/forms.py
|
Python
|
gpl-3.0
| 5,314
| 0.000188
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django import forms
from django.core.validators import validate_email, ValidationError
from slugify import slugify
from django.utils.translation import ugettext as _
from modeltranslation.forms import TranslationModelForm
from django.contrib.auth import get_user_model
from geonode.groups.models import GroupProfile
class GroupForm(TranslationModelForm):
slug = forms.SlugField(
max_length=20,
help_text=_("a short version of the name consisting only of letters, numbers, underscores and hyphens."),
widget=forms.HiddenInput,
required=False)
def clean_slug(self):
if GroupProfile.objects.filter(
slug__iexact=self.cleaned_data["slug"]).count() > 0:
raise forms.ValidationError(
_("A group already exists with that slug."))
return self.cleaned_data["slug"].lower()
def clean_title(self):
if GroupProfile.objects.filter(
title__iexact=self.cleaned_data["title"]).count() > 0:
raise forms.ValidationError(
_("A group already exists with that name."))
return self.cleaned_data["title"]
def clean(self):
cleaned_data = self.cleaned_data
name = cleaned_data.get("title")
slug = slugify(name)
cleaned_data["slug"] = slug
return cleaned_data
class Meta:
model = GroupProfile
exclude = ['group']
class GroupUpdateForm(forms.ModelForm):
def clean_name(self):
if GroupProfile.objects.filter(
name__iexact=self.cleaned_data["title"]).count() > 0:
if self.cleaned_data["title"] == self.instance.name:
pass # same instance
else:
raise forms.ValidationError(
_("A group already exists with that name."))
return self.cleaned_data["title"]
class Meta:
model = GroupProfile
exclude = ['group']
class GroupMemberForm(forms.Form):
role = forms.ChoiceField(choices=[
("member", "Member"),
("manager", "Manager"),
])
user_identifiers = forms.CharField(
widget=forms.TextInput(
attrs={
'class': 'user-select'}))
def clean_user_identifiers(self):
value = self.cleaned_data["user_identifiers"]
new_members, errors = [], []
for ui in value.split(","):
ui = ui.strip()
try:
validate_email(ui)
try:
new_members.append(get_user_model().objects.get(email=ui))
except get_user_model().DoesNotExist:
new_members.append(ui)
except ValidationError:
try:
new_members.append(
get_user_model().objects.get(
username=ui))
except get_user_model().DoesNotExist:
errors.append(ui)
if errors:
message = (
"The following are not valid email addresses or "
"usernames: %s; not added to the group" %
", ".join(errors))
raise forms.ValidationError(message)
return new_members
class GroupInviteForm(forms.Form):
invite_role = forms.ChoiceField(label="Role", choices=[
("member", "Member"),
("manager", "Manager"),
])
invite_user_identifiers = forms.CharField(
label="E-mail addresses list",
widget=forms.Textarea)
def clean_user_identifiers(self):
value = self.cleaned_data["invite_user_identifiers"]
invitees, errors = [], []
|
for ui in value.split(","):
ui = ui.strip()
try:
validate_email(ui)
try:
invitees.append(get_user_model().objects.get(email=ui))
except get_user_model().DoesNotExist:
invitees.append(ui)
except ValidationError:
try:
invitees.append(get_user_model().objects.get(username=ui))
except get_user_model().DoesNotExist:
|
errors.append(ui)
if errors:
message = (
"The following are not valid email addresses or "
"usernames: %s; no invitations sent" %
", ".join(errors))
raise forms.ValidationError(message)
return invitees
|
sim0629/irc
|
irc/server.py
|
Python
|
lgpl-2.1
| 17,734
| 0.002425
|
# -*- coding: utf-8 -*-
"""
irc/server.py
Copyright © 2009 Ferry Boender
Copyright © 2012 Jason R. Coombs
This server has basic support for:
* Connecting
* Channels
* Nicknames
* Public/private messages
It is MISSING support for notably:
* Server linking
* Modes (user and channel)
* Proper error reporting
* Basically everything else
It is mostly useful as a testing tool or perhaps for building something like a
private proxy on. Do NOT use it in any kind of production code or anything that
will ever be connected to by the public.
"""
#
# Very simple hacky ugly IRC server.
#
# Todo:
# - Encode format for each message and reply with events.codes['needmoreparams']
# - starting server when already started doesn't work properly. PID file is not changed, no error messsage is displayed.
# - Delete channel if last user leaves.
# - [ERROR] <socket.error instance at 0x7f9f203dfb90> (better error msg required)
# - Empty channels are left behind
# - No Op assigned when new channel is created.
# - User can /join multiple times (doesn't add more to channel, does say 'joined')
# - PING timeouts
# - Allow all numerical commands.
# - Users can send commands to channels they are not in (PART)
# Not Todo (Won't be supported)
# - Server linking.
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function, absolute_import
import argparse
import logging
import socket
import select
import re
from . import client
from . import _py2_compat
from . import logging as log_util
from . import events
from . import buffer
SRV_WELCOME = "Welcome to %s v%s, the ugliest IRC server in the world." % (
__name__, client.VERSION)
log = logging.getLogger(__name__)
class IRCError(Exception):
"""
Exception thrown by IRC command handlers to notify client of a server/client error.
"""
def __init__(self, code, value):
self.code = code
self.value = value
def __str__(self):
return repr(self.value)
@classmethod
def from_name(cls, name, value):
return cls(events.codes[name], value)
class IRCChannel(object):
"""
Object representing an IRC channel.
"""
def __init__(self, name, topic='No topic'):
self.name = name
self.topic_by = 'Unknown'
self.topic = topic
self.clients = set()
class IRCClient(_py2_compat.socketserver.BaseRequestHandler):
"""
IRC client connect and command handling. Client connection is handled by
the `handle` method which sets up a two-way communication with the client.
It then handles commands sent by the client by dispatching them to the
handle_ methods.
"""
class Disconnect(BaseException): pass
def __init__(self, request, client_address, server):
self.user = None
self.host = client_address # Client's hostname / ip.
self.realname = None # Client's real name
self.nick = None # Client's currently registered nickname
self.send_queue = [] # Messages to send to client (strings)
self.channels = {} # Channels the client is in
_py2_compat.socketserver.BaseRequestHandler.__init__(self, request,
client_address, server)
def handle(self):
log.info('Client connected: %s', self.client_ident())
self.buffer = buffer.LineBuffer()
try:
while True:
self._handle_one()
except self.Disconnect:
self.request.close()
def _handle_one(self):
"""
Handle one read/write cycle.
"""
ready_to_read, ready_to_write, in_error = select.select(
[self.request], [self.request], [self.request], 0.1)
if in_error:
raise self.Disconnect()
# Write any commands to the client
while self.send_queue and ready_to_write:
msg = self.send_queue.pop(0)
self._send(msg)
# See if the client has any commands for us.
if ready_to_read:
self._handle_incoming()
def _handle_incoming(self):
try:
data = self.request.recv(1024)
except Exception:
raise self.Disconnect()
if not data:
raise self.Disconnect()
self.buffer.feed(data)
for line in self.buffer:
self._handle_line(line)
def _handle_line(self, line):
try:
log.debug('from %s: %s' % (self.client_ident(), line))
command, sep, params = line.partition(' ')
handler = getattr(self, 'handle_%s' % command.lower(), None)
if not handler:
log.info('No handler for command: %s. '
'Full line: %s' % (command, line))
raise IRCError.from_name('unknowncommand',
'%s :Unknown command' % command)
response = handler(params)
except AttributeError as e:
log.error(_py2_compat.str(e))
raise
except IRCError as e:
response = ':%s %s %s' % (self.server.servername, e.code, e.value)
log.error(response)
except Exception as e:
response = ':%s ERROR %r' % (self.server.servername, e)
log.error(response)
raise
if response:
self._send(response)
def _send(self, msg):
log.debug('to %s: %s', self.client_ident(), msg)
self.request.send(msg + '\r\n')
def handle_nick(self, params):
"""
Handle the initial setting of the user's nickname and nick changes.
"""
nick = params
# Valid nickname?
if re.search('[^a-zA-Z0-9\-\[\]\'`^{}_]', nick):
raise IRCError.from_name('erroneusnickname', ':%s' % nick)
if self.server.clients.get(nick, None) == self:
# Already registered
|
to user
return
if ni
|
ck in self.server.clients:
# Someone else is using the nick
raise IRCError.from_name('nicknameinuse', 'NICK :%s' % (nick))
if not self.nick:
# New connection and nick is available; register and send welcome
# and MOTD.
self.nick = nick
self.server.clients[nick] = self
response = ':%s %s %s :%s' % (self.server.servername,
events.codes['welcome'], self.nick, SRV_WELCOME)
self.send_queue.append(response)
response = ':%s 376 %s :End of MOTD command.' % (
self.server.servername, self.nick)
self.send_queue.append(response)
return
# Nick is available. Change the nick.
message = ':%s NICK :%s' % (self.client_ident(), nick)
self.server.clients.pop(self.nick)
self.nick = nick
self.server.clients[self.nick] = self
# Send a notification of the nick change to all the clients in the
# channels the client is in.
for channel in self.channels.values():
self._send_to_others(message, channel)
# Send a notification of the nick change to the client itself
return message
def han
|
oyiadin/Songs-Distributor
|
utils.py
|
Python
|
mit
| 1,548
| 0.00646
|
import random
import time
import datetime
from consts import *
__all__ = ['gen_valid_id', 'gen_list_page', 'log']
def gen_valid_id(collection):
def gen_id():
|
_id = ''
for i in range(4):
_id += random.choice('0123456789')
return _id
id = gen_id()
while collection.find_one({'id': id}):
id = gen_id()
return id
def gen_list_page(collection, status, page=1):
page = int(page)
left = (page - 1) * 15
right = left + 15
all = collection.find({'status': status}).sort([('id', 1)])
max_page = int((all.count()-1) / 15) + 1 if
|
all.count() else 0
if page > max_page:
return PAGE_NOT_EXIST
elif page < 1:
return ARGS_INCORRECT
header = '===== {0}/{1} =====\n'.format(page, max_page)
selected = all[left:right]
return header + '\n'.join([
'{id} {title} ({comment})'.format(**i) for i in selected])
def log(m):
with open('log', 'a') as f:
if m.type == 'text': exp=m.content
elif m.type == 'image': exp=m.img
elif m.type == 'link': exp=';'.join([m.title, m.description, m.url])
else: exp=str(dict(m))
f.write(LOG.format(datetime.datetime.fromtimestamp(
time.time()).strftime('%Y-%m-%d %H:%M:%S'), m.source, m.type, exp))
def add_key(key, value):
from pymongo import MongoClient
collection = MongoClient()['SongsDistributor']['collection']
for i in ('checked', 'pending'):
collection.update_many({'status': i}, {'$set': {key: value}})
print('ok')
|
corystreet/pyOdbcToTde
|
Static/TableauSDK-9100.15.0828.1711/tableausdk/Exceptions.py
|
Python
|
gpl-2.0
| 1,040
| 0.003846
|
# -
|
----------------------------------------------------------------------------
#
# This file is the copyrighted property of Tableau Software and is protected
# by registered patents and other applicable U.S. and international laws and
# regulations.
#
# Unlicensed use of the contents of this file is prohibited. Please refer to
# the NOTICES.txt fi
|
le for further details.
#
# -----------------------------------------------------------------------------
from ctypes import *
from . import Libs
class TableauException(Exception):
def __init__(self, errorCode, message):
Exception.__init__(self, message)
self.errorCode = errorCode
self.message = message
def __str__(self):
return 'TableauException ({0}): {1}'.format(self.errorCode, self.message)
def GetLastErrorMessage():
common_lib = Libs.LoadLibs().load_lib('Common')
common_lib.TabGetLastErrorMessage.argtypes = []
common_lib.TabGetLastErrorMessage.restype = c_wchar_p
return wstring_at(common_lib.TabGetLastErrorMessage())
|
gusDuarte/sugar
|
src/jarabe/model/speech.py
|
Python
|
gpl-2.0
| 7,214
| 0.000832
|
# Copyright (C) 2011 One Laptop Per Child
#
# This program is f
|
ree software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT
|
ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import logging
from gi.repository import GConf
from gi.repository import Gst
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GObject
DEFAULT_PITCH = 0
DEFAULT_RATE = 0
_speech_manager = None
class SpeechManager(GObject.GObject):
__gtype_name__ = 'SpeechManager'
__gsignals__ = {
'play': (GObject.SignalFlags.RUN_FIRST, None, []),
'pause': (GObject.SignalFlags.RUN_FIRST, None, []),
'stop': (GObject.SignalFlags.RUN_FIRST, None, [])
}
MIN_PITCH = -100
MAX_PITCH = 100
MIN_RATE = -100
MAX_RATE = 100
def __init__(self, **kwargs):
GObject.GObject.__init__(self, **kwargs)
self._player = _GstSpeechPlayer()
self._player.connect('play', self._update_state, 'play')
self._player.connect('stop', self._update_state, 'stop')
self._player.connect('pause', self._update_state, 'pause')
self._voice_name = self._player.get_default_voice()
self._pitch = DEFAULT_PITCH
self._rate = DEFAULT_RATE
self._is_playing = False
self._is_paused = False
self.restore()
def _update_state(self, player, signal):
self._is_playing = (signal == 'play')
self._is_paused = (signal == 'pause')
self.emit(signal)
def get_is_playing(self):
return self._is_playing
is_playing = GObject.property(type=bool, getter=get_is_playing,
setter=None, default=False)
def get_is_paused(self):
return self._is_paused
is_paused = GObject.property(type=bool, getter=get_is_paused,
setter=None, default=False)
def get_pitch(self):
return self._pitch
def get_rate(self):
return self._rate
def set_pitch(self, pitch):
self._pitch = pitch
self.save()
def set_rate(self, rate):
self._rate = rate
self.save()
def say_text(self, text):
if text:
self._player.speak(self._pitch, self._rate, self._voice_name, text)
def say_selected_text(self):
clipboard = Gtk.Clipboard.get(Gdk.SELECTION_PRIMARY)
clipboard.request_text(self.__primary_selection_cb, None)
def pause(self):
self._player.pause_sound_device()
def restart(self):
self._player.restart_sound_device()
def stop(self):
self._player.stop_sound_device()
def __primary_selection_cb(self, clipboard, text, user_data):
self.say_text(text)
def save(self):
client = GConf.Client.get_default()
client.set_int('/desktop/sugar/speech/pitch', self._pitch)
client.set_int('/desktop/sugar/speech/rate', self._rate)
logging.debug('saving speech configuration pitch %s rate %s',
self._pitch, self._rate)
def restore(self):
client = GConf.Client.get_default()
self._pitch = client.get_int('/desktop/sugar/speech/pitch')
self._rate = client.get_int('/desktop/sugar/speech/rate')
logging.debug('loading speech configuration pitch %s rate %s',
self._pitch, self._rate)
class _GstSpeechPlayer(GObject.GObject):
__gsignals__ = {
'play': (GObject.SignalFlags.RUN_FIRST, None, []),
'pause': (GObject.SignalFlags.RUN_FIRST, None, []),
'stop': (GObject.SignalFlags.RUN_FIRST, None, [])
}
def __init__(self):
GObject.GObject.__init__(self)
self._pipeline = None
def restart_sound_device(self):
if self._pipeline is None:
logging.debug('Trying to restart not initialized sound device')
return
self._pipeline.set_state(Gst.State.PLAYING)
self.emit('play')
def pause_sound_device(self):
if self._pipeline is None:
return
self._pipeline.set_state(Gst.State.PAUSED)
self.emit('pause')
def stop_sound_device(self):
if self._pipeline is None:
return
self._pipeline.set_state(Gst.State.NULL)
self.emit('stop')
def make_pipeline(self, command):
if self._pipeline is not None:
self.stop_sound_device()
del self._pipeline
self._pipeline = Gst.parse_launch(command)
bus = self._pipeline.get_bus()
bus.add_signal_watch()
bus.connect('message', self.__pipe_message_cb)
def __pipe_message_cb(self, bus, message):
if message.type == Gst.MessageType.EOS:
self._pipeline.set_state(Gst.State.NULL)
self.emit('stop')
elif message.type == Gst.MessageType.ERROR:
self._pipeline.set_state(Gst.State.NULL)
self.emit('stop')
def speak(self, pitch, rate, voice_name, text):
# TODO workaround for http://bugs.sugarlabs.org/ticket/1801
if not [i for i in text if i.isalnum()]:
return
self.make_pipeline('espeak name=espeak ! autoaudiosink')
src = self._pipeline.get_by_name('espeak')
src.props.text = text
src.props.pitch = pitch
src.props.rate = rate
src.props.voice = voice_name
src.props.track = 2 # track for marks
self.restart_sound_device()
def get_all_voices(self):
all_voices = {}
for voice in Gst.ElementFactory.make('espeak', None).props.voices:
name, language, dialect = voice
if dialect != 'none':
all_voices[language + '_' + dialect] = name
else:
all_voices[language] = name
return all_voices
def get_default_voice(self):
"""Try to figure out the default voice, from the current locale ($LANG)
Fall back to espeak's voice called Default."""
voices = self.get_all_voices()
locale = os.environ.get('LANG', '')
language_location = locale.split('.', 1)[0].lower()
language = language_location.split('_')[0]
# if the language is es but not es_es default to es_la (latin voice)
if language == 'es' and language_location != 'es_es':
language_location = 'es_la'
best = voices.get(language_location) or voices.get(language) \
or 'default'
logging.debug('Best voice for LANG %s seems to be %s',
locale, best)
return best
def get_speech_manager():
global _speech_manager
if _speech_manager is None:
_speech_manager = SpeechManager()
return _speech_manager
|
LEMS/pylems
|
lems/api.py
|
Python
|
lgpl-3.0
| 328
| 0
|
"""
PyLEMS
|
API module.
:author: Gautham Ganapathy
:organization: LEMS (https://github.com/organizations/LEMS)
"""
from lems.model.fundamental import *
from lems.model.structure import *
from lems.model.dynamics import *
from lems.model.simulation import *
from lems.model.component import *
from lems.model.model import Mod
|
el
|
pmaconi/RedditGoggles
|
reddit-goggles.py
|
Python
|
mit
| 11,792
| 0.037907
|
import argparse, collections, configparser, json, math, mysql.connector as sql, praw, os, requests, sys, time
from datetime import datetime
from pprint import pprint
from mysql.connector import errorcode
from requests import HTTPError
from requests import ConnectionError
from fcntl import flock, LOCK_EX, LOCK_NB
# Print strings in verbose mode
def verbose(info) :
if args.verbose:
printUTF8(info)
def printUTF8(info) :
print(info.encode('ascii', 'replace').decode())
# Connect to MySQL using config entries
def connect() :
db_params = {
'user' : config["MySQL"]["user"],
'password' : config["MySQL"]["password"],
'host' : config["MySQL"]["host"],
'port' : int(config["MySQL"]["port"]),
'database' : config["MySQL"]['database'],
'charset' : 'utf8',
'collation' : 'utf8_general_ci',
'buffered' : True
}
return sql.connect(**db_params)
# Get all jobs from the database
def getJobs(conn) :
cursor = conn.cursor()
query = ("SELECT job_id, zombie_head, state, query, description, submission_cooldown_seconds \
FROM job \
WHERE job.state > 0 AND zombie_head = %s \
ORDER BY job_id")
cursor.execute(query,[args.head])
return cursor
# Perform search
def search(r, query) :
# Attempt to reach Reddit
attempt = 1
while attempt <= 3 :
try :
submissions = list(r.search(query, limit=None))
return submissions
except (ConnectionError, HTTPError) as err :
sleep_time = 2**(attempt - 1)
verbose("Connection attempt " + str(attempt) + " failed. "
"Sleeping for " + str(sleep_time) + " second(s).")
time.sleep(sleep_time)
attempt = attempt + 1
print("***** Error: Unable to query Reddit. Terminating.")
sys.exit(1)
# Replace 'MoreComments object'
def getComments(comment) :
attempt = 1
while attempt <= 3 :
try :
comments = comment.comments(update=False)
return comments
except (ConnectionError, HTTPError) as err :
sleep_time = 2**(attempt - 1)
verbose("Connection attempt " + str(attempt) + " failed. "
"Sleeping for " + str(sleep_time) + " second(s).")
time.sleep(sleep_time)
attempt = attempt + 1
except (AttributeError, TypeError) :
return None
return None
# Add a submission to the DB
def addSubmission(conn, job_id, submission) :
cursor = conn.cursor()
query = "REPLACE INTO submission (job_id, submission_id, subreddit_id, " \
"subreddit, title, author, url, permalink, thumbnail, name, selftext, " \
"over_18, is_self, created_utc, num_comments, ups, downs, score) VALUES " \
"(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) "
values = [
job_id,
submission.id,
submission.subreddit_id,
submission.subreddit.display_name,
submission.title,
submission.author.name,
submission.url,
submission.permalink,
submission.thumbnail,
submission.name,
submission.selftext,
submission.over_18,
submission.is_self,
datetime.fromtimestamp(submission.created_utc).strftime('%Y-%m-%d %H:%M:%S'),
submission.num_comments,
submission.ups,
submission.downs,
submission.score
]
try :
cursor.execute(query, values)
conn.commit()
return True
except sql.Error as err :
verbose("")
verbose(">>>> Warning: Could not add Submission: " + str(err))
verbose(" Query: " + cursor.statement)
return False
finally :
cursor.close()
# Add an entry to the submission score history
def addSubmissionScoreHistory(conn, job_id, submission) :
cursor = conn.cursor()
query = "INSERT INTO submission_score_history (job_id, submission_id, timestamp, ups, " \
"downs, score) VALUES (%s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE job_id=job_id"
values = [
job_id,
submission.id,
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
submission.ups,
submission.downs,
submission.score
]
try :
cursor.execute(query, values)
conn.commit()
except sql.Error as err :
verbose("")
verbose(">>>> Warning: Could not add Submission score history: " + str(err))
verbose(" Query: " + cursor.statement)
finally :
cursor.close()
# Get the submission's last run time
def getSubmissionRunTime(conn, job_id, submission_id) :
cursor = conn.cursor()
query = "SELECT last_run FROM submission WHERE job_id=%s AND submission_id=%s LIMIT 1"
values = [
job_id,
submission_id
]
try :
cursor.execute(query, values)
for(last_run) in cursor :
if (last_run[0] is not None) :
return last_run[0]
return -1
except sql.Error as err :
verbose(">>>> Warning: Could not get the submission last run time: " + str(err))
verbose(" Query: " + cursor.statement)
finally:
cursor.close()
# Update the submission's last run time
def updateSubmissionRunTime(conn, job_id, submission_id) :
cursor = conn.cursor()
query = "UPDATE submission SET last_run=%s WHERE job_id=%s AND submission_id=%s"
values = [
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
job_id,
submission_id
]
try :
cursor.execute(query, values)
conn.commit()
except sql.Error as err :
verbose(">>>> Warning: Could not update submission run time: " + str(err))
verbose(" Query: " + cursor.statement)
finally:
cursor.close()
# Add a comment to the DB
def addComment(conn, job_id, submission_id, comme
|
nt) :
cursor = conn.cursor()
query = "REPLACE INTO comment (job_id, submission_id, comment_id, " \
"parent_id, author, body, created_utc, ups, downs) VALUES " \
"(%s, %s, %s, %s, %s, %s, %s, %s, %s) "
values = [
job_id,
submission_id,
comment.id,
comment.parent_id,
None if comment.author is None else comment.author.name,
comment.body,
datetime.fromtimes
|
tamp(comment.created_utc).strftime('%Y-%m-%d %H:%M:%S'),
comment.ups,
comment.downs
]
try :
cursor.execute(query, values)
conn.commit()
return True
except sql.Error as err :
verbose("")
verbose(">>>> Warning: Could not add Comment: " + str(err))
verbose(" Query: " + cursor.statement)
return False
finally :
cursor.close()
# Add an entry to the comment score history
def addCommentScoreHistory(conn, job_id, comment) :
cursor = conn.cursor()
query = "INSERT INTO comment_score_history (job_id, comment_id, timestamp, ups, " \
"downs) VALUES (%s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE job_id=job_id"
values = [
job_id,
comment.id,
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
comment.ups,
comment.downs
]
try :
cursor.execute(query, values)
conn.commit()
except sql.Error as err :
verbose("")
verbose(">>>> Warning: Could not add Submission score history: " + str(err))
verbose(" Query: " + cursor.statement)
finally :
cursor.close()
# Add an entry into the job history table
def addJobHistory(conn, job_id, success, total_results = 0) :
return
cursor = conn.cursor()
query = "INSERT INTO job_history (job_id, timestamp, status, total_results) " \
"VALUES(%s, %s, %s, %s, %s)"
values = [
job_id,
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
"success" if success else "failure",
total_results
]
try :
cursor.execute(query, values)
conn.commit()
except sql.Error as err :
verbose(">>>> Warning: Could not add job_history entry: " + str(err))
verbose(" Query: " + cursor.statement)
finally:
cursor.close()
# Update the stored job's last run time and total results
def updateJobStats(conn, job_id, total_results) :
cursor = conn.cursor()
query = "UPDATE job SET last_count=%s, last_run=%s WHERE job_id=%s"
values = [
total_results,
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
job_id
]
try :
cursor.execute(query, values)
conn.commit()
except sql.Error as err :
verbose(">>>> Warning: Could not update job: " + str(err))
verbose(" Query: " + cursor.statement)
finally:
cursor.close()
# Recursively parse all of the comments
def parseCommentTree(conn, job_id, submission_id, comment) :
global submission_count, submission_total, comment_count, comment_total
queue = collections.deque()
queue.append(comment)
while len(queue) > 0:
next = queue.popleft();
if isinstance(next, praw.objects.MoreComments) :
more_comments = getComments(next)
if more_comments is not None :
queue.extendleft(more_comments)
else :
success = addComment(conn, job_id, submission_
|
psych0der/resumizr
|
resumizr/urls.py
|
Python
|
mit
| 3,010
| 0.01495
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
from django.core.urlresolvers import reverse_lazy
from django.views.generic import RedirectView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'', include('social.apps.django_app.urls', namespace='social')),
# API
url(r'^usernames/(?P<username>\w+)/$','api.views.username_availability',name='username_availability'),
url(r'^users/social-data/(?P<backend>\w+)/$','api.views.fetch_social_data',name='fetch_social_data'),
url(r'^users/refresh-social-data/(?P<backend>\w+)/$','api.views.refresh_social_data',name='refresh_social_data'),
url(r'^users/save-data/(?P<resumeId>\d+)/$','api.views.save_data'),
url(r'^users/get-data/(?P<resumeId>\d+)/$','api.views.get_resume_data'),
url(r'^user/get-all-cv/$','dashboard.views.get_all_resumes'),
url(r'^user/dashboard/$','dashboard.views.show_dashboard'),
url(r'^user/create-new-cv/$','dashboard.views.create_new_resume'),
# test api
url(r'^fb-graph-test/$','api.views.fb_graph_test'),
url(r'^github-api-test/$','api.views.github_api_test'),
url(r'^linkedin-api-test/$','api.views.linkedin_api_test'),
# forget password implementation
url(r'^forgot-password/$','api.views.password_reset_middleware', name='forgot_password'),
url(r'^users/password/reset/$', 'django.contrib.auth.views.password_reset',
{'post_reset_redirect' : '/users/password/reset/done/'}),
url(r'^users/password/reset/done/$', 'django.contrib.auth.views.password_reset_done'),
url(r'^users/password/reset/(?P<uidb64>[0-9A-Za-z]+)-(?P<token>.+)/$', 'django.con
|
trib.auth.views.password_reset_confirm',
{'post_reset_redirect' : '/users/password/done/'}),
url(r'^users/password/done/$', 'django.contrib.auth.views.password_reset_complete'),
url(r'^$', 'api.views.home',name='home'),
url(r'^signup/(?P<backend>[^/]+)/$', 'api.views.signup', name='signup'),
url(r'^signup/$' , RedirectView.as_view(url='/signup/username/')),
ur
|
l(r'^email-sent/', 'api.views.validation_sent'),
url(r'^resumizr-login/(?P<backend>[^/]+)/$', 'api.views.username_login', name='username_login'),
url(r'^login/$','api.views.login', name='login'),
url(r'^logout/$','api.views.logout', name='logout'),
url(r'^app/$','api.views.app',name='app'),
url(r'^admin/', include(admin.site.urls)),
url(r'^generate/cvform/(?P<resumeNum>\d+)/$','api.views.generateForm', name='generateform'),
url(r'^write/cv_to_pdf/$','pdfconvertor.views.writepdf', name='writepdf'),
url(r'^preview/cv/$','api.views.previewCv', name='preview'),
url(r'^landing_page/','api.views.landing_page', name='landing_page'),
)
#development media server
if settings.DEBUG:
urlpatterns += patterns(
'django.views.static',
(r'media/(?P<path>.*)',
'serve',
{'document_root': settings.MEDIA_ROOT}), )
|
hradec/gaffer
|
python/GafferUI/Slider.py
|
Python
|
bsd-3-clause
| 18,399
| 0.051796
|
##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import math
import six
import IECore
import Gaffer
import GafferUI
from Qt import QtCore
from Qt import QtGui
from Qt import QtWidgets
class Slider( GafferUI.Widget ) :
ValueChangedReason = IECore.Enum.create( "Invalid", "SetValues", "Click", "IndexAdded", "IndexRemoved", "DragBegin", "DragMove", "DragEnd", "Increment" )
# The min and max arguments define the numeric values at the ends of the slider.
# By default, values outside this range will be clamped, but hardMin and hardMax
# may be specified to move the point at which the clamping happens outside of the
# slider itself.
#
# A single slider may show more than one value. Multiple values may be specified
# by passing a list to the `values` argument, or calling `setValues()` after
# construction.
def __init__( self, values=0.5, min=0, max=1, hardMin=None, hardMax=None, **kw ) :
if "value" in kw :
# Backwards compatibility with old `value` argument
assert( values == 0.5 )
values = kw["value"]
del kw["value"]
GafferUI.Widget.__init__( self, _Widget(), **kw )
self.__min = min
self.__max = max
self.__hardMin = hardMin if hardMin is not None else self.__min
self.__hardMax = hardMax if hardMax is not None else self.__max
self.__selectedIndex = None
self.__sizeEditable = False
self.__minimumSize = 1
self.__increment = None
self.__snapIncrement = None
self.__hoverPositionVisible = False
self.__hoverEvent = None # The mouseMove event that gives us hover status
self.leaveSignal().connect( Gaffer.WeakMethod( self.__leave ), scoped = False )
self.mouseMoveSignal().connect( Gaffer.WeakMethod( self.__mouseMove ), scoped = False )
self.buttonPressSignal().connect( Gaffer.WeakMethod( self.__buttonPress ), scoped = False )
self.dragBeginSignal().connect( Gaffer.WeakMethod( self.__dragBegin ), scoped = False )
self.dragEnterSignal().connect( Gaffer.WeakMethod( self.__dragEnter ), scoped = False )
self.dragMoveSignal().connect( Gaffer.WeakMethod( self.__dragMove ), scoped = False )
self.dragEndSignal().connect( Gaffer.WeakMethod( self.__dragEnd ), scoped = False )
self.keyPressSignal().connect( Gaffer.WeakMethod( self.__keyPress ), scoped = False )
self.__values = []
if isinstance( values, ( six.integer_types, float ) ) :
self.__setValuesInternal( [ values ], self.ValueChangedReason.SetValues )
else :
self.__setValuesInternal( values, self.ValueChangedReason.SetValues )
## Convenience function to call setValues( [ value ] )
def setValue( self, value ) :
self.setValues( [ value ] )
## Convenience function returning getValues()[0] if there
# is only one value, and raising ValueError if not.
def getValue( self ) :
if len( self.__values ) != 1 :
raise ValueError
return self.__values[0]
def setValues( self, values ) :
self.__setValuesInternal( values, self.ValueChangedReason.SetValues )
def getValues( self ) :
return self.__values
## A signal emitted whenever a value has been changed. Slots should
# have the signature slot( Slider, ValueChangedReason ).
def valueChangedSignal( self ) :
try :
return self.__valueChangedSignal
except :
self.__valueChangedSignal = Gaffer.Signals.Signal2()
return self.__valueChangedSignal
## Returns True if a user would expect the specified sequence
# of changes to be merged into one undoable event.
@classmethod
def changesShouldBeMerged( cls, firstReason, secondReason ) :
if type( firstReason ) != type( secondReason ) :
return False
return ( firstReason, secondReason ) in (
# click and drag
( cls.ValueChangedReason.Click, cls.ValueChangedReason.DragBegin ),
( cls.ValueChangedReason.DragBegin, cls.ValueChangedReason.DragMove ),
( cls.ValueChangedReason.DragMove, cls.ValueChangedReason.DragMove ),
( cls.ValueChangedReason.DragMove, cls.ValueChangedReason.DragEnd ),
# increment
( cls.ValueChangedReason.Increment, cls.ValueChangedReason.Increment ),
)
def setRange( self, min, max, hardMin=None, hardMax=None ) :
if hardMin is None :
hardMin = min
if hardMax is None :
hardMax = max
if min==self.__min and max==self.__max and hardMin==self.__hardMin and hardMax==self.__hardMax :
return
self.__min = min
self.__max = max
self.__hardMin = hardMin
self.__hardMax = hardMax
self.__setValuesInternal( self.__values, self.ValueChangedReason.Invalid ) # reclamps the values to the range if necessary
self._qtWidget().update()
def getRange( self ) :
return self.__min, self.__max, self.__hardMin, self.__hardMax
def indexRemovedSignal( self ) :
signal = getattr( self, "_indexRemovedSignal", None )
if signal is None :
signal = GafferUI.WidgetEventSignal()
self._indexRemovedSignal = signal
return signal
def setSelectedIndex( self, index ) :
if self.__selectedIndex == index :
return
if index is not None :
if not len( self.__value
|
s ) or index < 0 or in
|
dex >= len( self.__values ) :
raise IndexError
self.__selectedIndex = index
self._qtWidget().update()
signal = getattr( self, "_selectedIndexChangedSignal", None )
if signal is not None :
signal( self )
## May return None to indicate that no index is selected.
def getSelectedIndex( self ) :
return self.__selectedIndex
def selectedIndexChangedSignal( self ) :
signal = getattr( self, "_selectedIndexChangedSignal", None )
if signal is None :
signal = GafferUI.WidgetSignal()
self._selectedIndexChangedSignal = signal
return signal
## Determines whether or not values may be added/removed
def setSizeEditable( self, editable ) :
self.__sizeEditable = editable
def getSizeEditable( self ) :
return self.__sizeEditable
## Sets a size after which no more values can
# be removed.
def setMinimumSize( self, minimumSize ) :
self.__minimumSize = minimumSize
def getMinimumSize( self ) :
return self.__minimumSize
## Sets the value increment added/subtracted
# when using the cursor keys. The default value of None
# uses an increment equivalent to the size of one pixel at
# the current slider size. An increment of 0 can be specified
# to disable the behaviour entirely.
def setIncrement( self, increment ) :
self.__increment = increment
def getIncrement( self ) :
return self.__increment
## Sets the increment used for snapp
|
Fizzadar/pyinfra
|
tests/test_api/test_api.py
|
Python
|
mit
| 2,193
| 0
|
from unittest import TestCase
from paramiko import SSHException
from pyinfra.api import Config, State
from pyinfra.api.connect import connect_all
from pyinfra.api.exceptions import NoGroupError, NoHost
|
Error, PyinfraError
from ..paramiko_util import PatchSSHTestCase
from ..util import make_inventory
class TestInventoryApi(TestCase):
def test_inventory_creation(self):
inventory = make_inventory()
# Check length
assert len(inventory.hosts) == 2
# Get a host
host = inventory.get_host('somehost')
assert host.data.ssh_user == 'vagrant'
# Check our gr
|
oup data
assert inventory.get_group_data('test_group') == {
'group_data': 'hello world',
}
def test_tuple_host_group_inventory_creation(self):
inventory = make_inventory(
hosts=[
('somehost', {'some_data': 'hello'}),
],
tuple_group=([
('somehost', {'another_data': 'world'}),
], {
'tuple_group_data': 'word',
}),
)
# Check host data
host = inventory.get_host('somehost')
assert host.data.some_data == 'hello'
assert host.data.another_data == 'world'
# Check group data
assert host.data.tuple_group_data == 'word'
def test_host_and_group_errors(self):
inventory = make_inventory()
with self.assertRaises(NoHostError):
inventory.get_host('i-dont-exist')
with self.assertRaises(NoGroupError):
inventory.get_group('i-dont-exist')
class TestStateApi(PatchSSHTestCase):
def test_fail_percent(self):
inventory = make_inventory((
'somehost',
('thinghost', {'ssh_hostname': SSHException}),
'anotherhost',
))
state = State(inventory, Config(FAIL_PERCENT=1))
# Ensure we would fail at this point
with self.assertRaises(PyinfraError) as context:
connect_all(state)
assert context.exception.args[0] == 'Over 1% of hosts failed (33%)'
# Ensure the other two did connect
assert len(state.active_hosts) == 2
|
rlrs/deep-rl
|
run_double.py
|
Python
|
mit
| 1,928
| 0.001556
|
"""
Copyright 2016 Rasmus Larsen
This software may be modified and distributed under the terms
of the MIT license. See the LICENSE.txt file for details.
"""
import sys
import time
from sacred import Experiment
from core.ALEEmulator import ALEEmulator
from dqn.Agent import Agent
from dqn.DoubleDQN import DoubleDQN
ex = Experiment('double-dqn')
@ex.config
def net_config():
conv_layers = 3
conv_units = [32, 64, 64]
filter_sizes = [8, 4, 3]
strides = [4, 2, 1]
state_frames = 4
fc_layers = 1
fc_units = [512]
in_width = 84
in_height = 84
discount = 0.99
device = '/gpu:0'
lr = 0.00025
opt_decay = 0.95
momentum = 0.0
opt_eps = 0.01
target_sync = 1e4
clip_delta = 1.0
tensorboard = False
tensorboard_freq = 50
@ex.config
def emu_config():
rom_path = '../ale-git/roms/'
rom_name = 'breakout'
display_screen = True
frame_skip = 4
repeat_prob = 0.0
color_avg = True
random_seed = 42
random_start = 30
@ex.config
def agent_config():
hist_size = 1e5
eps = 1.0
eps_min = 0.1
eps_decay = (eps - eps_min)
|
/ 1e6
batch_size = 32
train_start
|
= 5e3
train_frames = 5e6
test_freq = 5e4
test_frames = 5e3
update_freq = 4
@ex.command
def test(_config):
emu = ALEEmulator(_config)
_config['num_actions'] = emu.num_actions
net = DoubleDQN(_config)
net.load(_config['rom_name'])
agent = Agent(emu, net, _config)
agent.next(0) # put a frame into the replay memory, TODO: should not be necessary
agent.test()
@ex.automain
def main(_config, _log):
sys.stdout = open('log_' + _config['rom_name'] + time.strftime('%H%M%d%m', time.gmtime()), 'w', buffering=True)
print "#{}".format(_config)
emu = ALEEmulator(_config)
_config['num_actions'] = emu.num_actions
net = DoubleDQN(_config)
agent = Agent(emu, net, _config)
agent.train()
|
Hammer2900/SunflowerX
|
application/plugins/find_file_extensions/size.py
|
Python
|
gpl-3.0
| 2,732
| 0.00183
|
import gtk
from plugin_base.find_extension import FindExtension
class SizeFindFiles(FindExtension):
"""Size extension for find files tool"""
def __init__(self, parent):
FindExtension.__init__(self, parent)
# create container
table = gtk.Table(2, 4, False)
table.set_border_width(5)
table.set_col_spacings(5)
# create interface
self._adjustment_max = gtk.Adjustment(value=50.0, lower=0.0, upper=100000.0, step_incr=0.1, page_incr=10.0)
self._adjustment_min = gtk.Adjustment(value=0.0, lower=0.0, upper=10.0, step_incr=0.1, page_incr=10.0)
label = gtk.Label('<b>{0}</b>'.format(_('Match file size')))
label.set_alignment(0.0, 0.5)
label.set_use_markup(True)
label_min = gtk.Label(_('Minimum:'))
label_min.set_alignment(0, 0.5)
label_min_unit = gtk.Label(_('MB'))
label_max = gtk.Label(_('Maximum:'))
label_max.set_alignment(0, 0.5)
label_max_unit = gtk.Label(_('MB'))
self._entry_max = gtk.SpinButton(adjustment=self._adjustment_max, digits=2)
self._entry_min = gtk.SpinButton(adjustment=self._adjustment_min, digits=2)
self._entry_max.connect('value-changed', self._max_value_
|
changed)
|
self._entry_min.connect('value-changed', self._min_value_changed)
self._entry_max.connect('activate', self._parent.find_files)
self._entry_min.connect('activate', lambda entry: self._entry_max.grab_focus())
# pack interface
table.attach(label, 0, 3, 0, 1, xoptions=gtk.FILL)
table.attach(label_min, 0, 1, 1, 2, xoptions=gtk.FILL)
table.attach(self._entry_min, 1, 2, 1, 2, xoptions=gtk.FILL)
table.attach(label_min_unit, 2, 3, 1, 2, xoptions=gtk.FILL)
table.attach(label_max, 0, 1, 2, 3, xoptions=gtk.FILL)
table.attach(self._entry_max, 1, 2, 2, 3, xoptions=gtk.FILL)
table.attach(label_max_unit, 2, 3, 2, 3, xoptions=gtk.FILL)
self.vbox.pack_start(table, False, False, 0)
def _max_value_changed(self, entry):
"""Assign value to adjustment handler"""
self._adjustment_min.set_upper(entry.get_value())
def _min_value_changed(self, entry):
"""Assign value to adjustment handler"""
self._adjustment_max.set_lower(entry.get_value())
def get_title(self):
"""Return i18n title for extension"""
return _('Size')
def is_path_ok(self, path):
"""Check is specified path fits the cirteria"""
size = self._parent._provider.get_stat(path).size
size_max = self._entry_max.get_value() * 1048576
size_min = self._entry_min.get_value() * 1048576
return size_min < size < size_max
|
naggie/dsblog
|
dsblog/environment.py
|
Python
|
mit
| 1,659
| 0.010247
|
import yaml
from os import makedirs
from os.path import join,dirname,realpath,isdir
script_dir = dirname(realpath(__file__))
default_yml_filepath = join(script_dir,'defaults.yml')
defaults = {
"output_dir": 'output',
"header_img_dir": 'imgs/headers/',
"scaled_img_dir": 'imgs/scaled/',
"original_img_dir": 'imgs/original/',
"header_img_url": 'imgs/headers/',
"scaled_img_url": 'imgs/scaled/',
"original_img_url": 'imgs/original/',
"template_dir": join(script_dir,'templates'),
"max_article_img_width": 710,
"max_avatar_width": 710,
"database_file": "database.yml",
"static_dir": join(script_dir,'static'),
"copyright_msg": None,
"extra_links": [],
"import_to_discourse": False,
"strapline": None,
}
config = dict()
def getConfig():
if not config:
raise RuntimeError('config not loaded yet')
return config
def loadConfig(yml_filepath):
config.update(defaults)
with open(yml_filepath) as f:
patch = yaml.load(f.read())
config.update(patch)
# make paths absolute
config['header_img_dir'] = join(config['output_dir'],config['header_img_dir'])
config['scaled_img_dir'] = join(config['output_dir'],config['scaled_img_dir'])
config['original_img_dir'] = join(config['output_dir'],config[
|
'original_img_dir'])
config['database_file'] = join(config['output_dir'],config['database_f
|
ile'])
def makeDirs():
if not config:
raise RuntimeError('config not loaded yet')
for key in ['header_img_dir','scaled_img_dir','original_img_dir']:
path = config[key]
if not isdir(path):
makedirs(path)
|
CLVsol/odoo_addons
|
clv_medicament_template/history/clv_medicament_template_history.py
|
Python
|
agpl-3.0
| 4,727
| 0.008462
|
# -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp import models, fields, api
from datetime import *
class clv_medicament_template_history(models.Model):
_name = 'clv_medicament.template.history'
medicament_template_id = fields.Many2one('clv_medicament.template', 'Medicament Template', required=True)
user_id = fields.Many2one ('res.users', 'User', required=True)
date = fields.Datetime("Date", required=True)
state = fields.Selection([('draft','Draft'),
('revised','Revised'),
('waiting','Waiting'),
('done','Done'),
('canceled','Canceled'),
], string='Status', default='draft', readonly=True, required=True, help="")
notes = fields.Text(string='Notes')
_order = "date desc"
_defaults = {
'user_id': lambda obj,cr,uid,context: uid,
'date': lambda *a: datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
}
class clv_medicament_template(models.Model):
_inherit = 'clv_medicament.template'
history_ids = fields.One2many('clv_medicament.template.history', 'medicament_template_id', 'Medicament Template History', readonly=True)
active_history = fields.Boolean('Active History',
he
|
lp="If unchecked, it will allow you to disable the history without removing it.",
default=True)
@api.one
def insert_clv_medicament_template_history(self, medicament_template_id, state, notes):
if self.active_history:
values = {
'medicament_template_id': medicament_template_id,
's
|
tate': state,
'notes': notes,
}
self.pool.get('clv_medicament.template.history').create(self._cr, self._uid, values)
@api.multi
def write(self, values):
if (not 'state' in values) and (not 'date' in values):
notes = values.keys()
self.insert_clv_medicament_template_history(self.id, self.state, notes)
return super(clv_medicament_template, self).write(values)
@api.one
def button_draft(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'draft'
self.insert_clv_medicament_template_history(self.id, 'draft', '')
@api.one
def button_revised(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'revised'
self.insert_clv_medicament_template_history(self.id, 'revised', '')
@api.one
def button_waiting(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'waiting'
self.insert_clv_medicament_template_history(self.id, 'waiting', '')
@api.one
def button_done(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'done'
self.insert_clv_medicament_template_history(self.id, 'done', '')
@api.one
def button_cancel(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'canceled'
self.insert_clv_medicament_template_history(self.id, 'canceled', '')
@api.one
def set_to_draft(self, *args):
self.state = 'draft'
self.create_workflow()
return True
|
bonno800/pynet
|
week9exercise9-a-b.py
|
Python
|
apache-2.0
| 275
| 0
|
import mytest
pr
|
int '----This is func1----'
mytest.world.func1()
print '----This is func2----'
mytest.simple.func2()
print '----This is func3----'
mytest.whatever.func3()
print '----This is myobj using
|
MyClass----'
myobj = mytest.MyClass('nick', 'florida')
myobj.hello()
|
cirruspath/python-oauth2-middleware
|
pom/server/pomserver.py
|
Python
|
apache-2.0
| 7,922
| 0.012497
|
# Copyright 2014 Cirruspath, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: James Horey
# Email: [email protected]
#
from flask import Flask, request, redirect, url_for
import json
import os
from pom.triggers.poster import Poster
from pom.triggers.github import GitHub
from pom.triggers.salesforce import Salesforce
from pom.clients.oauth2 import OAuth2
import requests
from requests.exceptions import ConnectionError
import sys
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
import urllib
from uuid import uuid4
import yaml
app = Flask(__name__)
#
# Read in all the known oauth providers.
#
CONFIG_DIR = os.path.dirname(os.path.dirname(__file__)) + "/config"
providers = {}
for f in os.listdir(CONFIG_DIR + "/providers"):
n, e = os.path.splitext(f)
print "Source: " + n
providers[n] = OAuth2(n, CONFIG_DIR + "/providers/" + f, os.environ['POM_APPS'])
#
# Instantiate all the triggers.
#
triggers = []
yaml_file = open(CONFIG_DIR + "/pom.yaml", 'r')
config = yaml.load(yaml_file)
redirect_uri = config["callback"]
if 'triggers' in config:
trigger_list = config["triggers"].split(",")
for t in trigger_list:
if t == "github":
triggers.append(GitHub())
elif t == "salesforce":
triggers.append(Salesforce())
elif t == "poster":
triggers.append(Poster())
else:
trigger_list = []
#
# Responses store the oauth state machine.
#
responses = {}
#
# The default page redirects the user to the source OAuth page.
#
@app.route('/', methods=['GET'])
def authorize():
state = str(uuid4())
if 'session' in request.args:
session = request.args['session']
else:
session = state
if 'source' in request.args:
source = providers[request.args['source']]
print "Using the %s OAuth server" % request.args['source']
else:
print "Using the Salesforce OAuth server"
source = providers["salesforce"]
payload = { 'scope' : source.scopes,
'state' : state,
'redirect_uri' : redirect_uri + '/' + source.name,
'response_type' : 'code',
'client_id' : source.consumer_key,
'access_type'
|
: 'offline'}
url = source.authorize_url + "?" + urllib.urlencode(payload)
responses[state] = { 'stage' : 'authorize',
'session' : session }
if 'redirect' in request.args:
responses[state]['redirect'] = request.args['redirect']
print "Using the %s
|
user redirect" % responses[state]['redirect']
return redirect(url)
#
# Fetch a new access token using a refresh token.
#
@app.route('/refresh', methods=['DELETE'])
def revoke_access_token():
refresh_token = request.args['refresh']
source = providers[request.args['source']]
payload = { 'token' : refresh_token }
resp = requests.post(source.revoke_url, params = payload)
return resp.text
#
# Fetch a new access token using a refresh token.
#
@app.route('/refresh', methods=['GET'])
def refresh_access_token():
refresh_token = request.args['refresh']
source = providers[request.args['source']]
print "refreshing with " + refresh_token
payload = { 'client_id' : source.consumer_key,
'client_secret' : source.consumer_secret,
'grant_type' : 'refresh_token',
'refresh_token' : refresh_token }
resp = requests.post(source.token_url, params = payload)
return resp.text
def _get_access_token(source, auth_code, state, session, redirect=None):
try:
payload = { 'client_id' : source.consumer_key,
'client_secret' : source.consumer_secret,
'grant_type' : 'authorization_code',
'code' : auth_code,
'redirect_uri' : redirect_uri + '/' + source.name}
headers = {'Accept' : 'application/json'}
# headers = {'content-type': 'application/x-www-form-urlencoded',
# 'content-length' : 256}
res = requests.post(source.access_token_url,
data = payload,
headers = headers)
if res.status_code == requests.codes.ok:
resp_json = res.json()
print "JSON response: " + str(resp_json)
if 'access_token' in resp_json:
resp = None
resp_json['source'] = source.name
if redirect:
resp_json['_user_redirect'] = redirect
for t in triggers:
resp = t.consume_access_key(resp_json)
responses[state] = { 'stage' : 'authorized',
'resp' : resp_json }
if resp:
return resp.text
else:
return json.dumps( {'status' : 'authorized',
'session' : session } )
else:
error_msg = "unauthorized"
else:
error_msg = "unreachable"
return json.dumps( {"status" : "failed",
"error" : error_msg,
"session" : session } )
except ConnectionError as e:
print str(e)
#
# The generic callback method. Should be supplemented with the provider source
# name so that we know what to do.
#
@app.route('/callback/<source_name>', methods=['GET'])
def callback(source_name):
source = providers[source_name]
if 'code' in request.args:
auth_code = request.args["code"]
state = request.args["state"]
session = responses[state]['session']
if 'redirect' in responses[state]:
redirect = responses[state]['redirect']
else:
redirect = None
responses[state]['stage'] = 'callback'
return _get_access_token(source, auth_code, state, session, redirect)
else:
return json.dumps( {'status' : 'failed',
'error' : 'authentication' } )
#
# Retrieve the access & refresh keys.
#
@app.route('/key', methods=['GET'])
def key():
if 'session' in request.args and request.args['session'] in responses:
resp = responses[request.args['session']]
if resp['stage'] == 'authorized':
return resp['resp']['access_key']
return json.dumps( {'status' : 'failed',
'error' : 'could not find access key' } )
def main():
if 'POM_SSL' in os.environ:
key_dir = os.environ['POM_SSL']
else:
key_dir = os.path.dirname(os.path.dirname(__file__)) + "/keys"
if not 'POM_APPS' in os.environ:
print "POM_APPS should be set to a directory with our application OAuth credentials"
exit(1)
print "Using SSL certificate in " + key_dir
try:
http_server = HTTPServer(WSGIContainer(app),
ssl_options={
"certfile": key_dir + "/server.crt",
"keyfile": key_dir + "/server.key",
})
http_server.listen(port=int(sys.argv[2]),
address=sys.argv[1])
IOLoop.instance().start()
except:
pass
|
has2k1/plotnine
|
plotnine/tests/test_geom_ribbon_area.py
|
Python
|
gpl-2.0
| 2,328
| 0
|
import numpy as np
import pandas as pd
from plotnine import (ggplot, aes, geom_area, geom_ribbon,
facet_wrap, scale_x_continuous, theme)
n = 4 # No. of ribbions in a vertical stack
m = 100 # Points
width = 2*np.pi # width of each ribbon
x = np.linspace(0, width, m)
df = pd.DataFrame({
'x': np.tile(x, n),
'ymin': np.hstack([np.sin(x)+2*i for i in range(n)]),
'ymax': np.hstack([np.sin(x)+2*i+1 for i in range(n)]),
'z': np.repeat(range(n), m)
})
_theme = theme(subplots_adjust={'right': 0.85})
def test_ribbon_aesthetics():
p = (ggplot(df, aes('x', ymin='ymin', ymax='ymax',
group='factor(z)')) +
geom_ribbon() +
geom_ribbon(aes('x+width', alpha='z')) +
geom_ribbon(aes('x+2*width', linetype='factor(z)'),
color='black', fill=None, size=2) +
ge
|
om_ribbon(aes('x+3*width', color='z'),
fill=None, size=2) +
geom_ribbon(aes('x+4*width', fill='factor(z)')) +
geom_ribbon(aes('x+5*width', size='z'),
color='black', fill=None) +
|
scale_x_continuous(
breaks=[i*2*np.pi for i in range(7)],
labels=['0'] + [r'${}\pi$'.format(2*i) for i in range(1, 7)])
)
assert p + _theme == 'ribbon_aesthetics'
def test_area_aesthetics():
p = (ggplot(df, aes('x', 'ymax+2', group='factor(z)')) +
geom_area() +
geom_area(aes('x+width', alpha='z')) +
geom_area(aes('x+2*width', linetype='factor(z)'),
color='black', fill=None, size=2) +
geom_area(aes('x+3*width', color='z'),
fill=None, size=2) +
geom_area(aes('x+4*width', fill='factor(z)')) +
geom_area(aes('x+5*width', size='z'),
color='black', fill=None) +
scale_x_continuous(
breaks=[i*2*np.pi for i in range(7)],
labels=['0'] + [r'${}\pi$'.format(2*i) for i in range(1, 7)])
)
assert p + _theme == 'area_aesthetics'
def test_ribbon_facetting():
p = (ggplot(df, aes('x', ymin='ymin', ymax='ymax',
fill='factor(z)')) +
geom_ribbon() +
facet_wrap('~ z')
)
assert p + _theme == 'ribbon_facetting'
|
anaran/olympia
|
lib/es/utils.py
|
Python
|
bsd-3-clause
| 1,403
| 0
|
import os
import amo.search
from .models import Reindexing
from django.core.management.base import CommandError
# shortcut functions
is_reindexing_amo = Reindexing.objects.is_reindexing_amo
flag_reindexing_amo = Reindexing.objects.flag_reindexing_amo
unflag_reindexing_amo = Reindexing.objects.unflag_reindexing_amo
get_indices = Reindexing.objects.get_indices
def index_objects(ids, model, search, index=None, transforms=None):
if index is None:
index = model._get_index()
indices = Reindexing.objects.get_indices(index)
if transforms is None:
transforms = []
qs = model.objects.no_cache().filter(id__in=ids)
for t in transforms:
qs = qs.transform(t)
for ob in qs:
data = search.extract(ob)
for index in indices:
model.index(data, bulk=True, id=ob.id, index=index)
amo.search.ge
|
t_es().flush_bulk(forced=True)
def raise_if_reindex_in_progress(site):
"""Checks if the database indexation flag is on for the given site.
If it's on, and if no "FORCE_INDEXING" variable is present in the env,
raises a CommandError.
"""
already_reindexing = Reindexing.objects._is_reindexing(site)
if already_reindexing and 'FORCE_INDEXING' not in os.environ:
raise CommandError("Indexation already occuring. Add a FORCE_INDEXING "
|
"variable in the environ to force it")
|
Cinemair/cinemair-server
|
cinemair/shows/migrations/0002_auto_20150712_2126.py
|
Python
|
mit
| 430
| 0.002326
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('shows', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='show',
op
|
tions={'verbose_name_plural': 'shows', 'ordering': ['d
|
atetime', 'cinema', 'id'], 'verbose_name': 'show'},
),
]
|
alehaa/james
|
jamesci/status.py
|
Python
|
gpl-3.0
| 1,897
| 0.000527
|
# This file is part of James CI.
#
# James CI is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# James CI is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY
|
or FITNESS FOR
#
|
A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with James CI. If not, see <http://www.gnu.org/licenses/>.
#
#
# Copyright (C)
# 2017 Alexander Haase <[email protected]>
#
import enum
@enum.unique
class Status(enum.IntEnum):
"""
This enum class will be used for defining the status of a
:py:class:`~.Pipeline` or :py:class:`~.Job`. Multiple statuses may be
compared by their value.
.. note::
The minimum of a list of statuses will be the *worst* status of the list.
However, if the list has a status of :py:attr:`created`,
:py:attr:`pending` or :py:attr:`running`, these will have priority,
indicating not all jobs have finished yet.
"""
created = enum.auto()
pending = enum.auto()
running = enum.auto()
canceled = enum.auto()
errored = enum.auto()
failed = enum.auto()
success = enum.auto()
def __str__(self):
"""
Return the status name as string. This function is required to remove
the enum's class name prefix when string representation is required.
"""
return self.name
def final(self):
"""
:return: If the status is a final state or not.
:rtype: bool
"""
return (self in [self.canceled, self.errored, self.failed,
self.success])
|
nanditav/15712-TensorFlow
|
tensorflow/contrib/learn/python/learn/estimators/linear.py
|
Python
|
apache-2.0
| 31,382
| 0.004716
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Linear Estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import re
import six
from tensorflow.contrib import layers
from tensorflow.contrib.framework import deprecated
from tensorflow.contrib.framework import deprecated_arg_values
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.learn.python.learn import evaluable
from tensorflow.contrib.learn.python.learn import trainable
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.contrib.learn.python.learn.utils import export
from tensorflow.contrib.linear_optimizer.python import sdca_optimizer
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import session_run_hook
from tensorflow.python.training import training as train
# The default learning rate of 0.2 is a historical artifact of the initial
# implementation, but seems a reasonable choice.
_LEARNING_RATE = 0.2
def _get_optimizer(spec):
if isinstance(spec, six.string_types):
return layers.OPTIMIZER_CLS_NAMES[spec](
learning_rate=_LEARNING_RATE)
elif callable(spec):
return spec()
return spec
# TODO(ispir): Remove this function by fixing '_infer_model' with single outputs
# and as_iteable case.
def _as_iterable(preds, output):
for pred in preds:
yield pred[output]
def _add_bias_column(feature_columns, columns_to_tensors, bias_variable,
labels, columns_to_variables):
# TODO(b/31008490): Move definition to a common constants place.
bias_column_name = "tf_virtual_bias_column"
if any(col.name is bias_column_name for col in feature_columns):
raise ValueError("%s is a reserved column name." % bias_column_name)
bias_column = layers.real_valued_column(bias_column_name)
columns_to_tensors[bias_column] = array_ops.ones_like(labels,
dtype=dtypes.float32)
columns_to_variables[bias_column] = [bias_variable]
def _linear_model_fn(features, labels, mode, params):
"""A model_fn for linear models that use a gradient-based optimizer.
Args:
features: `Tensor` or dict of `Tensor` (depends on data passed to `fit`).
labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of
dtype `int32` or `int64` in the range `[0, n_classes)`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters.
The following hyperparameters are expected:
* head: A `Head` instance.
* feature_columns: An iterable containing all the feature columns used by
the model.
* optimizer: string, `Optimizer` object, or callable that defines the
optimizer to use for training.
* gradient_clip_norm: A float > 0. If provided, gradients are
clipped to their global norm with this clipping ratio.
* num_ps_replicas: The number of parameter server replicas.
* joint_weights: If True, the weights for all columns will be stored in a
single (possibly partitioned) variable. It's more efficient, but it's
incompatible with SDCAOptimizer, and requires all feature columns are
sparse and use the 'sum' combiner.
Returns:
An `estimator.ModelFnOps` instance.
Raises:
ValueError: If mode is not any of the `ModeKeys`.
"""
head = params["head"]
feature_columns = params["feature_columns"]
optimizer = params["optimizer"]
gradient_clip_norm = params.get("gradient_clip_norm", None)
num_ps_replicas = params.get("num_ps_replicas", 0)
joint_weights = params.get("joint_weights", False)
if not isinstance(features, dict):
features = {"": features}
parent_scope = "linear"
partitioner = partiti
|
oned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas,
min_slice_size=64 << 20)
with variable_scope.variable_scope(
parent_scope, values=features.values(), partitioner=partitioner) as scope:
if joint_weights:
logits, _, _ = (
layers.joint_weighted_sum_from_feature_columns(
columns_to_tensors=features,
feature_column
|
s=feature_columns,
num_outputs=head.logits_dimension,
weight_collections=[parent_scope],
scope=scope))
else:
logits, _, _ = (
layers.weighted_sum_from_feature_columns(
columns_to_tensors=features,
feature_columns=feature_columns,
num_outputs=head.logits_dimension,
weight_collections=[parent_scope],
scope=scope))
def _train_op_fn(loss):
global_step = contrib_variables.get_global_step()
my_vars = ops.get_collection("linear")
grads = gradients.gradients(loss, my_vars)
if gradient_clip_norm:
grads, _ = clip_ops.clip_by_global_norm(grads, gradient_clip_norm)
return (optimizer.apply_gradients(
zip(grads, my_vars), global_step=global_step))
return head.head_ops(features, labels, mode, _train_op_fn, logits)
def sdca_model_fn(features, labels, mode, params):
"""A model_fn for linear models that use the SDCA optimizer.
Args:
features: A dict of `Tensor` keyed by column name.
labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of
dtype `int32` or `int64` in the range `[0, n_classes)`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters.
The following hyperparameters are expected:
* head: A `Head` instance. Type must be one of `_BinarySvmHead`,
`_RegressionHead` or `_MultiClassHead`.
* feature_columns: An iterable containing all the feature columns used by
the model.
* optimizer: An `SDCAOptimizer` instance.
* weight_column_name: A string defining the weight feature column, or
None if there are no weights.
* update_weights_hook: A `SessionRunHook` object or None. Used to update
model weights.
Returns:
An `estimator.ModelFnOps` instance.
Raises:
ValueError: If `optimizer` is not an `SDCAOptimizer` instance.
ValueError: If the type of head is neither `_BinarySvmHead`, nor
`_RegressionHead` nor `_MultiClassHead`.
ValueError: If mode is not any of the `ModeKeys`.
"""
head = params["head"]
feature_columns = params["feature_columns"]
optimizer = params["optimizer"]
weight_column_name = params["weight_column_name"]
update_weights_hook = params.get("update_weights_hook", None)
if not isinstance(optimizer, sdca_optimizer.SDCAOptimizer):
raise ValueError("Optimizer must be of type SDCAOptimizer")
if isinstance(head, head_lib._BinarySvmHead): # pylint: disable=protected-access
loss_type = "hinge_loss"
elif isinstance(head, head_lib._MultiClassHead): # pylint: disable=protected-access
loss_type = "logistic_loss"
|
i3visio/osrframework
|
osrframework/wrappers/papaly.py
|
Python
|
agpl-3.0
| 3,867
| 0.004397
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
_
|
_author__ = "Felix Brezo, Yaiza Rubio <cont
|
[email protected]>"
__version__ = "2.0"
from osrframework.utils.platforms import Platform
class Papaly(Platform):
"""A <Platform> object for Papaly."""
def __init__(self):
self.platformName = "Papaly"
self.tags = ["social"]
########################
# Defining valid modes #
########################
self.isValidMode = {}
self.isValidMode["phonefy"] = False
self.isValidMode["usufy"] = True
self.isValidMode["searchfy"] = False
######################################
# Search URL for the different modes #
######################################
# Strings with the URL for each and every mode
self.url = {}
#self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>"
self.url["usufy"] = "https://papaly.com/<usufy>"
#self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>"
######################################
# Whether the user needs credentials #
######################################
self.needsCredentials = {}
#self.needsCredentials["phonefy"] = False
self.needsCredentials["usufy"] = False
#self.needsCredentials["searchfy"] = False
#################
# Valid queries #
#################
# Strings that will imply that the query number is not appearing
self.validQuery = {}
# The regular expression '.+' will match any query
#self.validQuery["phonefy"] = ".*"
self.validQuery["usufy"] = ".+"
#self.validQuery["searchfy"] = ".*"
###################
# Not_found clues #
###################
# Strings that will imply that the query number is not appearing
self.notFoundText = {}
#self.notFoundText["phonefy"] = []
self.notFoundText["usufy"] = ["<title>Page not found</title>"]
#self.notFoundText["searchfy"] = []
#########################
# Fields to be searched #
#########################
self.fieldsRegExp = {}
# Definition of regular expressions to be searched in phonefy mode
#self.fieldsRegExp["phonefy"] = {}
# Example of fields:
#self.fieldsRegExp["phonefy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in usufy mode
self.fieldsRegExp["usufy"] = {}
# Example of fields:
#self.fieldsRegExp["usufy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in searchfy mode
#self.fieldsRegExp["searchfy"] = {}
# Example of fields:
#self.fieldsRegExp["searchfy"]["i3visio.location"] = ""
################
# Fields found #
################
# This attribute will be feeded when running the program.
self.foundFields = {}
|
silveregg/moto
|
moto/dynamodb/models.py
|
Python
|
apache-2.0
| 9,875
| 0.00081
|
from __future__ import unicode_literals
from collections import defaultdict
import datetime
import json
from moto.compat import OrderedDict
from moto.core import BaseBackend
from moto.core.utils import unix_time
from .comparisons import get_comparison_func
class DynamoJsonEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, 'to_json'):
return obj.to_json()
def dynamo_json_dump(dynamo_object):
return json.dumps(dynamo_object, cls=DynamoJsonEncoder)
class DynamoType(object):
"""
http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelDataTypes
"""
def __init__(self, type_as_dict):
self.type = list(type_as_dict.keys())[0]
self.value = list(type_as_dict.values())[0]
def __hash__(self):
return hash((self.type, self.value))
def __eq__(self, other):
return (
self.type == other.type and
self.value == other.value
)
def __repr__(self):
return "DynamoType: {0}".format(self.to_json())
def to_json(self):
return {self.type: self.value}
def compare(self, range_comparison, range_objs):
"""
Compares this type against comparison filters
"""
range_values = [obj.value for obj in range_objs]
comparison_func = get_comparison_func(range_comparison)
return comparison_func(self.value, *range_values)
class Item(object):
def __init__(self, hash_key, hash_key_type, range_key, range_key_type, attrs):
self.hash_key = hash_key
self.hash_key_type = hash_key_type
self.range_key = range_key
self.range_key_type = range_key_type
self.attrs = {}
for key, value in attrs.items():
self.attrs[key] = DynamoType(value)
def __repr__(self):
return "Item: {0}".format(self.to_json())
def to_json(self):
attributes = {}
for attribute_key, attribute in self.attrs.items():
attributes[attribute_key] = attribute.value
return {
"Attributes": attributes
}
def describe_attrs(self, attributes):
if attributes:
included = {}
for key, value in self.attrs.items():
if key in attributes:
included[key] = value
else:
included = self.attrs
return {
"Item": included
}
class Table(object):
def __init__(self, name, hash_key_attr, hash_key_type,
range_key_attr=None, range_key_type=None, read_capacity=None,
write_capacity=None):
self.name = name
self.hash_key_attr = hash_key_attr
self.hash_key_type = hash_key_type
self.range_key_attr = range_key_attr
self.range_key_type = range_key_type
self.read_capacity = read_capacity
self.write_capacity = write_capacity
self.created_at = datetime.datetime.utcnow()
self.items = defaultdict(dict)
@property
def has_range_key(self):
return self.range_key_attr is not None
@property
def describe(self):
results = {
"Table": {
"CreationDateTime": unix_time(self.created_at),
"KeySchema": {
"HashKeyElement": {
"AttributeName": self.hash_key_attr,
"AttributeType": self.hash_key_type
},
},
"ProvisionedThroughput": {
"ReadCapacityUnits": self.read_capacity,
"WriteCapacityUnits": self.write_capacity
},
"TableName": self.name,
"TableStatus": "ACTIVE",
"ItemCount": len(self),
"TableSizeBytes": 0,
}
}
if self.has_range_key:
results["Table"]["KeySchema"]["RangeKeyElement"] = {
"AttributeName": self.range_key_attr,
"AttributeType": self.range_key_type
}
return results
def __len__(self):
count = 0
for key, value in self.items.items():
if self.has_range_key:
count += len(value)
else:
count += 1
return count
def __nonzero__(self):
return True
def __bool__(self):
return self.__nonzero__()
def put_item(self, item_attrs):
hash_value = DynamoType(item_attrs.get(self.hash_key_attr))
if self.has_range_key:
range_value = DynamoType(item_attrs.get(self.range_key_attr))
else:
range_value = None
item = Item(hash_value, self.hash_key_type, range_value, self.range_key_type, item_attrs)
if range_value:
self.items[hash_value][range_value] = item
else:
self.items[hash_value] = item
return item
def get_item(self, hash_key, range_key):
if self.has_range_key and not range_key:
raise ValueError("Table has a range key, but no range key was passed into get_item")
try:
if range_key:
return self.items[hash_key][range_key]
else:
return self.items[hash_key]
except KeyError:
return None
def query(self, hash_key, range_comparison, range_objs):
results = []
last_page = True # Once pagination is implemented, change this
if self.range_key_attr:
possible_results = self.items[hash_key].values()
else:
possible_results = list(self.all_items())
if range_comparison:
for result in possible_results:
if result.range_key.compare(range_comparison, range_objs):
results.append(result)
else:
# If we're not filtering on range key, return all values
results = possible_results
return results, last_page
def all_items(self):
for hash_set in self.items.values():
if self.range_key_attr:
for item in hash_set.values():
yield item
else:
yield hash_set
def scan(self, filters):
results = []
scanned_count = 0
last_page = True # Once pagination is implemented, change this
for result in self.all_items():
scanned_count += 1
passes_all_conditions = True
for attribute_name, (comparison_operator, comparison_objs) in filters.items():
attribute = result.attrs.get(attribute_name)
if attribute:
# Attribute found
if not attribute.compare(comparison_operator, comparison_objs):
passes_all_conditions = False
break
elif comparison_operator == 'NULL':
# Comparison is NULL and we
|
don't have the attribute
continue
else:
# No attribute found and comparison is no NULL. This item fails
|
passes_all_conditions = False
break
if passes_all_conditions:
results.append(result)
return results, scanned_count, last_page
def delete_item(self, hash_key, range_key):
try:
if range_key:
return self.items[hash_key].pop(range_key)
else:
return self.items.pop(hash_key)
except KeyError:
return None
class DynamoDBBackend(BaseBackend):
def __init__(self):
self.tables = OrderedDict()
def create_table(self, name, **params):
table = Table(name, **params)
self.tables[name] = table
return table
def delete_table(self, name):
return self.tables.pop(name, None)
def update_table_throughput(self, name, new_read_units, new_write_units):
table = self.tables[name]
table.read_capacity = new_read_units
table.write_capacity = new_write_units
return table
def put_i
|
jordanemedlock/psychtruths
|
temboo/core/Library/Genability/TariffData/GetTariff.py
|
Python
|
apache-2.0
| 4,018
| 0.004978
|
# -*- coding: utf-8 -*-
###############################################################################
#
# GetTariff
# Returns an individual Tariff object with a given id.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetTariff(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetTariff Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetTariff, self).__init__(temboo_session, '/Library/Genability/TariffData/GetTariff')
def new_input_set(self):
return GetTariffInputSet()
def _make_result_set(self, result, path):
return GetTariffResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetTariffChoreographyExecution(session, exec_id, path)
class GetTariffInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetTariff
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AppID(self, value):
"""
Set the value of the AppID input for this Choreo. ((conditional, string) The App ID provided by Genability.)
"""
super(GetTariffInputSet, self)._set_input('AppID', value)
def set_AppKey(self, value):
|
"""
Set the value of the AppKey input for this Choreo. ((required, string) The App Key provided by Genability.)
"""
super(GetTariffInputSet, self)._set_input('AppKey', value)
def set_MasterTariffID(self, value):
"""
Set the value of the MasterTariffID input for this Choreo. ((required, integer) The master tariff id. This can be retrieved in the output of the GetTariffs Choreo.)
"""
|
super(GetTariffInputSet, self)._set_input('MasterTariffID', value)
def set_PopulateProperties(self, value):
"""
Set the value of the PopulateProperties input for this Choreo. ((optional, boolean) Set to "true" to populate the properties for the returned Tariffs.)
"""
super(GetTariffInputSet, self)._set_input('PopulateProperties', value)
def set_PopulateRates(self, value):
"""
Set the value of the PopulateRates input for this Choreo. ((optional, boolean) Set to "true" to populate the rate details for the returned Tariffs.)
"""
super(GetTariffInputSet, self)._set_input('PopulateRates', value)
class GetTariffResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetTariff Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Genability.)
"""
return self._output.get('Response', None)
class GetTariffChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetTariffResultSet(response, path)
|
esthermm/odoo-addons
|
procurement_service_project/models/sale_order.py
|
Python
|
agpl-3.0
| 1,663
| 0
|
# -*- coding: utf-8 -*-
# (c) 2016 Alfredo de la Fuente - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, fields, api
class SaleOrder(models.Model):
_inherit = 'sale.order'
@api.multi
def action_button_confirm(self):
procurement_obj = self.env['procurement.order']
procurement_group_obj = self.env['procurement.group']
res = super(SaleOrder, self).action_button_confirm()
for line in self.order_line:
valid = self._validate_service_project_for_procurement(
line.product_id)
if valid:
if not self.procurement_group_id:
vals = self._prepare_procurement_group(self)
group = procurement_group_obj.create(vals)
self.write({'procurement_group_id': group.id})
vals = self._prepare_order_line_procurement(
self, line, group_id=self.procurement_group_id.id)
vals['name'] = self.name + ' - ' + line.product_id.name
procurement = procurement_obj.create(vals)
procurement.run()
return res
def _validate_service_project_for_procurement(self, product):
routes = product.route_ids.filtered(lambda r: r.id in [
self.env.ref('procurement_service_project.route_serv_project').id])
return product.type == 'service' and routes
c
|
lass SaleOrderLine(models.Model):
_
|
inherit = 'sale.order.line'
service_project_task = fields.Many2one(
comodel_name='project.task', string='Generated task from procurement',
copy=False)
|
VigTech/Vigtech-Services
|
principal/views.py
|
Python
|
lgpl-3.0
| 45,303
| 0.014399
|
# -*- encoding: utf-8 -*-
# from django.shortcuts import render, render_to_response, redirect, get_object_or_404, get_list_or_404, Http404
from django.core.cache import cache
from django.shortcuts import *
from django.views.generic import TemplateView, FormView
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse_lazy
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.template import RequestContext
from django import template
from models import proyecto
from .forms import *
#from Logica.ConexionBD import adminBD
import funciones
import sys
#~ from administradorConsultas import AdministradorConsultas # Esta la comente JAPeTo
#~ from manejadorArchivos import obtener_autores # Esta la comente JAPeTo
#~ from red import Red # Esta la comente JAPeTo
from Logica import ConsumirServicios, procesamientoScopusXml, procesamiento
|
Arxiv
# import igraph
import traceback
import json
import django.utils
from Logica.ConexionBD.adminBD import AdminBD
from principal.parameters import *
from principal.permisos import *
# sys.setdefaultencoding is cancelled by site.py
reload(sys) # to re-enable sys.setdefaultencoding()
sys.setdefaultencoding('utf-8')
# Create your views here.
# @login_required
#ruta = "/home/administrador/ManejoVigtech/ArchivosProyectos/"
sesion_proyecto=None
proyectos_list =None
model_proyecto =None
id_pro
|
yecto = None
##nombre_proyecto = None
class home(TemplateView):
template_name = "home.html"
def get_context_data(self, **kwargs):
global proyectos_list
global model_proyecto
try:
existe_proyecto = False
proyectos_list = get_list_or_404(proyecto, idUsuario=self.request.user)
for project in proyectos_list:
if project == model_proyecto:
existe_proyecto = True
if not (existe_proyecto):
model_proyecto = None
except:
# print traceback.format_exc()
proyectos_list = None
model_proyecto = None
return {'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}
class RegistrarUsuario(FormView):
template_name = "registrarUsuario.html"
form_class = FormularioRegistrarUsuario
success_url = reverse_lazy('RegistrarUsuarios')
def form_valid(self, form):
user = form.save()
messages.success(self.request, "Se ha creado exitosamente el usuario")
return redirect('login')
def cambia_mensaje(crfsession,proyecto,usuario,borrar, mensaje,valor):
# print ">>>> AQUI ESTOY"+str(borrar)+" & "+str(mensaje)
try:
cache_key = "%s_%s_%s" % (crfsession,proyecto.replace(" ",""),usuario)
data = cache.get(cache_key)
if data:
data['estado'] = valor
data['mensaje'] += mensaje
if borrar :
data['mensaje'] = mensaje
cache.set(cache_key, data)
else:
cache.set(cache_key, {
'estado': 0,
'mensaje' : mensaje
})
except:
pass
@login_required
def nuevo_proyecto(request):
global id_proyecto
global model_proyecto
global proyectos_list
if request.method == 'POST':
form = FormularioCrearProyecto(request.POST)
fraseB = request.POST.get('fraseB')
fraseA = request.POST.get('fraseA')
autor = request.POST.get('autor')
words = request.POST.get('words')
before = request.POST.get('before')
after = request.POST.get('after')
limArxiv = request.POST.get('limArxiv')
limSco = request.POST.get('limSco')
cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"",0)
busqueda = fraseB + "," + words + "," + fraseA + "," + autor + "," + before + "," + after
# print "busca "+busqueda+", by japeto"
if form.is_valid():
nombreDirectorio = form.cleaned_data['nombre']
articulos = {}
modelo_proyecto = form.save(commit=False)
modelo_proyecto.idUsuario = request.user
# print "formulario valido, by japeto"
# print "2"
# proyectos_list = get_list_or_404(proyecto, idUsuario=request.user)
# proyectos_list = get_list_or_404(proyecto, idUsuario=request.user)
#modelo_proyecto.calificacion=5
modelo_proyecto.fraseBusqueda = busqueda
modelo_proyecto.save()
proyectos_list = get_list_or_404(proyecto, idUsuario=request.user)
model_proyecto = get_object_or_404(proyecto, id_proyecto=modelo_proyecto.id_proyecto)
id_proyecto = model_proyecto.id_proyecto
#Creacion del directorio donde se guardaran los documentos respectivos del proyecto creado.
mensajes_pantalla="<p class='text-primary'><span class='fa fa-send fa-fw'></span>Se ha creado el Directorio para el proyecto</p>"
funciones.CrearDirectorioProyecto(modelo_proyecto.id_proyecto, request.user)
cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,mensajes_pantalla,6)
# print "se crea directorio, by japeto"
if fraseB != "":
try:
"""
Descarga de documentos de Google Arxiv
"""
# print "descarga de documentos, by japeto"
cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-primary'><span class='fa fa-send fa-fw'></span>Descarga de documentos de Arxiv</p>",12)
articulos_arxiv= ConsumirServicios.consumir_arxiv(fraseB, request.user.username, str(modelo_proyecto.id_proyecto), limArxiv)
cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>Descarga de documentos terminada</p>",18)
except:
cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-danger'><span class='fa fa-times fa-fw'></span><b>PROBLEMA: </b>Descarga de documentos de Arxiv</p>",12)
cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"STOP",0)
print traceback.format_exc()
try:
"""
Descarga de documentos de Google Scopus
"""
cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-primary'><span class='fa fa-send fa-fw'></span>Descarga de documentos de Scopus</p>",24)
articulos_scopus = ConsumirServicios.consumir_scopus(fraseB, request.user.username, str(modelo_proyecto.id_proyecto), limSco)
cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>Descarga de documentos terminada</p>",30)
except:
cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-danger'><span class='fa fa-times fa-fw'></span><b>PROBLEMA: </b>Descarga de documentos de Scopus</p>",24)
cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"STOP",0)
print traceback.format_exc()
try:
"""
Inserción de metadatos Arxiv
"""
cambia_mensaje(request.POST.get('csrfmiddleware
|
cernops/keystone
|
keystone/tests/unit/common/test_manager.py
|
Python
|
apache-2.0
| 1,531
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from keystone import catalog
from keystone.common import manager
from keystone.tests import unit
class TestCreateLegacyDriver(unit.BaseTestCase):
@mock.patch('oslo_log.versionutils.repor
|
t_deprecated_feature')
def test_class_is_properly_deprecated(self, mock_reporter):
Driver = manager.create_legacy_drive
|
r(catalog.CatalogDriverV8)
# NOTE(dstanek): I want to subvert the requirement for this
# class to implement all of the abstract methods.
Driver.__abstractmethods__ = set()
impl = Driver()
details = {
'as_of': 'Liberty',
'what': 'keystone.catalog.core.Driver',
'in_favor_of': 'keystone.catalog.core.CatalogDriverV8',
'remove_in': mock.ANY,
}
mock_reporter.assert_called_with(mock.ANY, mock.ANY, details)
self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0])
self.assertIsInstance(impl, catalog.CatalogDriverV8)
|
mozilla/addons-server
|
src/olympia/users/tests/test_admin.py
|
Python
|
bsd-3-clause
| 38,889
| 0.001363
|
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.models import AnonymousUser
from django.contrib.messages.storage import default_storage as default_messages_storage
from django.db import connection
from django.test import RequestFactory
from django.test.utils import CaptureQueriesContext
from django.urls import reverse
from django.utils.dateformat import DateFormat
from unittest import mock
from pyquery import PyQuery as pq
from olympia import amo, core
from olympia.abuse.models import AbuseReport
from olympia.activity.models import ActivityLog
from olympia.addons.models import AddonUser
from olympia.amo.tests import (
addon_factory,
collection_factory,
TestCase,
user_factory,
version_factory,
)
from olympia.api.models import APIKey, APIKeyConfirmation
from olympia.bandwagon.models import Collection
from olympia.ratings.models import Rating
from olympia.reviewers.models import ReviewerScore
from olympia.users.admin import UserAdmin
from olympia.users.models import (
EmailUserRestriction,
IPNetworkUserRestriction,
UserProfile,
UserRestrictionHistory,
)
class TestUserAdmin(TestCase):
def setUp(self):
self.user = user_factory()
self.list_url = reverse('admin:users_userprofile_changelist')
self.detail_url = reverse(
'admin:users_userprofile_change', args=(self.user.pk,)
)
self.delete_url = reverse(
'admin:users_userprofile_delete', args=(self.user.pk,)
)
def test_search_for_multiple_users(self):
user = user_factory(email='[email protected]')
self.grant_permission(user, 'Users:Edit')
self.client.login(email=user.email)
another_user = user_factory()
response = self.client.get(
self.list_url,
{'q': f'{self.user.pk},{another_user.pk},foobaa'},
follow=True,
)
assert response.status_code == 200
doc = pq(response.content)
assert str(self.user.pk) in doc('#result_list').text()
assert str(another_user.pk) in doc('#result_list').text()
def test_search_for_multiple_user_ids(self):
"""Test the optimization when just searching for matching ids."""
user = user_factory(email='[email protected]')
self.grant_permission(user, 'Users:Edit')
self.client.login(email=user.email)
another_user = user_factory()
with CaptureQueriesContext(connection) as queries:
response = self.client.get(
self.list_url,
{'q': f'{self.user.pk},{another_user.pk}'},
follow=True,
)
queries_str = '; '.join(q['sql'] for q in queries.captured_queries)
in_sql = f'`users`.`id` IN ({self.user.pk}, {another_user.pk})'
assert in_sql in queries_str
assert len(queries.captured_queries) == 6
assert response.status_code == 200
doc = pq(response.content)
assert str(self.user.pk) in doc('#result_list').text()
assert str(another_user.pk) in doc('#result_list').text()
def test_search_ip_as_int_isnt_considered_an_ip(self):
user = user_factory(email='[email protected]')
self.grant_permission(user, 'Users:Edit')
self.client.login(email=user.email)
self.user.update(last_login_ip='127.0.0.1')
response = self.client.get(self.list_url, {'q': '2130706433'}, follow=True)
assert response.status_code == 200
doc = pq(response.content)
assert not doc('#result_list tbody tr')
assert not doc('.column-_ratings_all__ip_address')
def test_search_for_single_ip(self):
user = user_factory(email='[email protected]')
self.grant_permission(user, 'Users:Edit')
self.client.login(email=user.email)
user_factory(last_login_ip='127.0.0.1') # Extra user that shouldn't match
self.user.update(email='[email protected]', last_login_ip='127.0.0.2') # Will match
response = self.client.get(self.list_url, {'q': '127.0.0.2'}, follow=True)
assert response.status_code == 200
doc = pq(response.content)
# Make sure it's the right user.
assert doc('.field-email').text() == self.user.email
# Make sure last login is now displayed, and has the right value.
assert doc('.field-last_login_ip').text() == '127.0.0.2'
def test_search_for_single_ip_multiple_results_for_different_reasons(self):
user = user_factory(email='[email protected]')
self.grant_permission(user, 'Users:Edit')
self.client.login(email=user.email)
extra_user = user_factory(
email='[email protected]', last_login_ip='127.0.0.1'
) # Extra user that matches but not thanks to their last_login_ip...
UserRestrictionHistory.objects.create(user=extra_user, ip_address='127.0.0.2')
extra_extra_user = user_factory(email='[email protected]')
UserRestrictionHistory.objects.create(
user=extra_extra_user, last_login_ip='127.0.0.2'
)
self.user.update(email='[email protected]', last_login_ip='127.0.0.2')
response = self.client.get(self.list_url, {'q': '127.0.0.2'}, follow=True)
assert response.status_code == 200
doc = pq(response.content)
# Make sure it's the right users.
assert doc('.field-email').text() == ' '.join(
[
extra_extra_user.email,
extra_user.email,
self.user.email,
]
)
# Make sure last login is now displayed, and has the right values.
assert doc('.field-last_logi
|
n_ip').text() == '127.0.0.1 127.0.0.1 127.0.0.2'
# Sam
|
e for the others that match
assert doc('.field-restriction_history__ip_address').text() == '- 127.0.0.2 -'
assert (
doc('.field-restriction_history__last_login_ip').text() == '127.0.0.2 - -'
)
def test_search_for_multiple_ips(self):
user = user_factory(email='[email protected]')
self.grant_permission(user, 'Users:Edit')
self.client.login(email=user.email)
self.user.update(email='[email protected]', last_login_ip='127.0.0.2')
response = self.client.get(
self.list_url, {'q': '127.0.0.2,127.0.0.3'}, follow=True
)
assert response.status_code == 200
doc = pq(response.content)
# Make sure it's the right user.
assert doc('.field-email').text() == self.user.email
# Make sure last login is now displayed, and has the right value.
assert doc('.field-last_login_ip').text() == '127.0.0.2'
def test_search_for_multiple_ips_with_deduplication(self):
user = user_factory(email='[email protected]')
self.grant_permission(user, 'Users:Edit')
self.client.login(email=user.email)
# Will match once with the last_login
self.user.update(email='[email protected]', last_login_ip='127.0.0.2')
# Will match twice: once with the last login, once with the restriction history
# ip_address. Only one result will be shown since the 2 rows would be the same.
extra_user = user_factory(email='[email protected]', last_login_ip='127.0.0.2')
UserRestrictionHistory.objects.create(user=extra_user, ip_address='127.0.0.2')
# Will match 4 times: last_login, restriction history (last_login_ip and
# ip_address), ratings ip_address. There will be 2 results shown because of the
# 2 different user restriction history matching.
extra_extra_user = user_factory(
email='[email protected]', last_login_ip='127.0.0.3'
)
UserRestrictionHistory.objects.create(
user=extra_extra_user, last_login_ip='127.0.0.2', ip_address='10.0.0.42'
)
UserRestrictionHistory.objects.create(
user=extra_extra_user, ip_address='127.0.0.2', last_login_ip='10.0.0.36'
)
addon = addon_factory()
Rating.objects.create(
user=extra_extra_user,
rating=4,
ip_address='127.0.0.3',
addon
|
ir-lab/intprim
|
intprim/__init__.py
|
Python
|
mit
| 231
| 0
|
from intprim.bayesian_interaction_primitives import *
im
|
port intprim.basis
import intprim.constants
import intprim.examples
import intprim.filter
import intprim.filter.align
import intprim.filter.spatiotemporal
import intprim.util
| |
Halfish/lstm-ctc-ocr
|
1_generateImage.py
|
Python
|
apache-2.0
| 1,086
| 0.003683
|
# coding: utf-8
import random
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
import sys
import os
# how many pictures to generate
num = 10
if len(sys.argv) > 1:
num = int(sys.argv[1])
def genline(text, font, filename):
'''
generate one line
'''
w, h = font.getsize(text)
image = Image.new('RGB', (w + 15, h + 15), 'white')
brush = ImageDraw.Draw(image)
brush.text((8, 5), text, font=font, fill=(0, 0, 0))
image.save(filename + '.jpg')
with open(filename + '.txt', '
|
w') as f:
f.write(text)
f.close()
if __nam
|
e__ == '__main__':
if not os.path.isdir('./lines/'):
os.mkdir('./lines/')
for i in range(num):
fontname = './fonts/simkai.ttf'
fontsize = 24
font = ImageFont.truetype(fontname, fontsize)
text = str(random.randint(1000000000, 9999999999))
text = text + str(random.randint(1000000000, 9999999999))
#text = str(random.randint(1000, 9999))
filename = './lines/' + str(i + 1)
genline(text, font, filename)
pass
|
chosenone75/Neural-Networks
|
tf/CNN-Sentence-Classification/train_CNN4Text.py
|
Python
|
gpl-3.0
| 10,181
| 0.021904
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 20 23:53:19 2017
@author: chosenone
Train CNN for Text Classification
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import os
import time
import datetime
import data_util
from CNN4Text import CNN4Text
from tensorflow.contrib import learn
#==============================================================================
# parameters
#==============================================================================
#==============================================================================
# data parameters
#==============================================================================
tf.flags.DEFINE_float("validation_set_percentage",0.1,
"the percentage of training examples that will be used for validation set")
tf.flags.DEFINE_string("data_postive_path","./data/rt-polaritydata/rt-polarity.pos",
"file path for postive data")
tf.flags.DEFINE_string("data_negative_path","./data/rt-polaritydata/rt-polarity.neg",
"file path for negative data")
#==============================================================================
# model hyperparameters
#==============================================================================
tf.flags.DEFINE_float("learning_rate",0.001,"learning rate(default 0.001)")
tf.flags.DEFINE_integer("embedding_size",128,"the size of word embeeding (default 128)")
tf.flags.DEFINE_integer("num_filters",128,"the number of filters for each filter size(default 128)")
tf.flags.DEFINE_string("filter_sizes","3,4,5","comma-separated filter sizes(default 3,4,5)")
tf.flags.DEFINE_float("keep_prob",0.5,"the probability used for dropout(default 0.5)")
tf.flags.DEFINE_float("l2_reg_lambda",0.0,"the l2 regularization lambda(default 0)")
#================
|
==============================================================
# train parameters
#==============================================================================
tf.flags.DEFINE_integer("batch_size",64,"Batch size (default size 64)")
tf.flags.DEFINE_integer("num_epochs",200,"Epoch sizes(default size 200)")
tf.flags.DEFINE_integer("evaluate_interval",100,"Evaluate
|
model interval(default 100)")
tf.flags.DEFINE_integer("checkpoint_interval",100,"Save Checkpoint Interval(default 100)")
tf.flags.DEFINE_integer("num_checkpoints",5,"number of checkpoints to save(default 5)")
#==============================================================================
# misc parameters
#==============================================================================
tf.flags.DEFINE_bool("allow_soft_parameters",True,"allow soft device placement(default true)")
tf.flags.DEFINE_bool("log_device_placement",False,"Log placement of ops on devices(default false)")
FLAGS = tf.flags.FLAGS
FLAGS._parse_flags()
print("\nParameters:")
for attr,value in sorted(FLAGS.__flags.items()):
print("%s:%s" % (attr.upper(),value))
print("\n")
#==============================================================================
# Data Preparation
#==============================================================================
# Load data
print("Loading Data...\n")
x_data,y = data_util.load_data_and_labels(FLAGS.data_postive_path,FLAGS.data_negative_path)
# construct vocabulary
max_sentence_length = max([len(sent.split(" ")) for sent in x_data])
vocab_processor = learn.preprocessing.VocabularyProcessor(max_sentence_length)
x = np.array(list(vocab_processor.fit_transform(x_data)))
print(max_sentence_length)
# shuffle data
np.random.seed(10)
shuffled_indices = np.random.permutation(np.arange(len(y)))
x_shuffled = x[shuffled_indices]
y_shuffled = y[shuffled_indices]
# split train-test set
# have a try with k-fold cross-validation later.
validation_set_index = -1 * int(FLAGS.validation_set_percentage * float(len(y)))
x_train,x_val = x_shuffled[:validation_set_index],x_shuffled[validation_set_index:]
y_train,y_val = y_shuffled[:validation_set_index],y_shuffled[validation_set_index:]
print("Vocabulary Size: %s" % len(vocab_processor.vocabulary_._mapping))
print("Length of train/validation set: %d , %d ." % (len(y_train),len(y_val)))
#==============================================================================
# Training
#==============================================================================
with tf.Graph().as_default():
session_config = tf.ConfigProto(allow_soft_placement=FLAGS.allow_soft_parameters,
log_device_placement=FLAGS.log_device_placement)
sess = tf.Session(config=session_config)
with sess.as_default():
cnn = CNN4Text(sequence_length=x_train.shape[1],
num_classes=y_train.shape[1],
vocab_size=len(vocab_processor.vocabulary_),
embedding_size=FLAGS.embedding_size,
filter_sizes=list(map(int,FLAGS.filter_sizes.split(","))),
num_filters=FLAGS.num_filters,
l2_reg_lambda=FLAGS.l2_reg_lambda)
# the detail of train procedure
global_step = tf.Variable(0,name="global_step",trainable=False)
optimizer = tf.train.AdamOptimizer(learning_rate=FLAGS.learning_rate)
grads_and_vars = optimizer.compute_gradients(cnn._loss)
train_op = optimizer.apply_gradients(grads_and_vars,global_step=global_step)
grad_summaries = []
for g,v in grads_and_vars:
if g is not None:
grad_hist_summary = tf.summary.histogram("%s/grad/hist" % v.name,g)
sparsity_summary = tf.summary.scalar("%s/grad/hist" % v.name,tf.nn.zero_fraction(g))
grad_summaries.append(grad_hist_summary)
grad_summaries.append(sparsity_summary)
grad_summayies_merged = tf.summary.merge(grad_summaries)
# output path of summary
timestamp = str(int(time.time()))
output_path = os.path.abspath(os.path.join(os.path.curdir,"runs",timestamp))
print("Writing into Output Path: %s ..." % output_path)
# summary for loss and accuracy
loss_summary = tf.summary.scalar("loss",cnn._loss)
acc_summary = tf.summary.scalar("accuracy",cnn._accuracy)
# train summaries
train_summary_op = tf.summary.merge([loss_summary,acc_summary,grad_summayies_merged])
train_summary_path = os.path.join(output_path,"summary","train")
train_summary_writer = tf.summary.FileWriter(train_summary_path,sess.graph)
#validation summaries
validation_summary_op = tf.summary.merge([loss_summary,acc_summary])
validation_summary_path = os.path.join(output_path,"summary","validation")
validation_summary_writer = tf.summary.FileWriter(validation_summary_path,sess.graph)
checkpoint_path = os.path.abspath(os.path.join(output_path,"checkpoints"))
checkpoint_prefix = os.path.join(checkpoint_path,"model")
if not os.path.exists(checkpoint_path):
os.makedirs(checkpoint_path)
saver = tf.train.Saver(tf.global_variables(),max_to_keep=FLAGS.num_checkpoints)
# save vocabulary
vocab_processor.save(os.path.join(output_path,"vocab"))
sess.run(tf.global_variables_initializer())
# a single training step
def train_step(x_batch,y_batch,writer=None):
'''
a single training step
'''
feed_dict = {cnn._input_x:x_batch,
cnn._input_y:y_batch,
cnn._keep_prob:FLAGS.keep_prob}
_,step,summaries,loss,accuracy = sess.run([train_op,global_step,train_summary_op
,cnn._loss,cnn._accuracy],feed_dict)
time_str = datetime.datetime.now().isoformat()
print("%s: Step: %d,Loss: %.4f,Accuracy: %.4f" % (time_str,step,loss,accuracy))
|
flyhung/CMSIS-DAP
|
tools/get_binary.py
|
Python
|
apache-2.0
| 1,166
| 0.002573
|
"""
CMSIS-DAP Interface Firmware
Copyright (c) 2009-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Extract and patch the interface without bootloader
"""
from options import get_options
from paths import get_interface_path, TMP_DIR
from utils i
|
mport gen_binary, is_lpc, split_path
from os.path import join
if __name__ == '__main__':
options = get_options()
in_path = get_interface_path(options.interface, options.target, bootloade
|
r=False)
_, name, _ = split_path(in_path)
out_path = join(TMP_DIR, name + '.bin')
print '\nELF: %s' % in_path
gen_binary(in_path, out_path, is_lpc(options.interface))
print "\nBINARY: %s" % out_path
|
github/codeql
|
python/ql/test/library-tests/ControlFlow/except/test.py
|
Python
|
mit
| 1,251
| 0.02558
|
#Ensure there is an exceptional edge from the following case
def f2():
b, d = Base, Derived
try:
class MyNewClass(b, d):
pass
except:
e2
def f3():
sequence_of_four = a_global
try:
a, b, c = sequence_of_four
except:
e3
#Always treat locals as non-raising to keep DB size down.
def f4():
if cond:
local = 1
try:
local
except:
e4
def f5():
try:
a_global
except:
e5
def f6():
local = a_global
try:
local()
except:
e6
#Literals can't raise
def f7():
try:
4
except:
e7
def f8():
try:
a + b
except:
e8
#OK assignments
def f9():
try:
a, b = 1, 2
except:
e9
def fa():
seq = a
|
_global
try:
a = seq
except:
ea
def fb():
a, b, c = a_global
try:
seq = a, b, c
except:
eb
#Ensure that a.b and c[d] can raise
def fc():
a, b = a_global
try:
return a[b]
except:
ec
def fd():
a = a_global
try:
return a.b
except:
ed
def fe():
try:
call()
except:
ee
e
|
lse:
ef
|
mahabs/nitro
|
nssrc/com/citrix/netscaler/nitro/resource/config/ns/nspbr6_args.py
|
Python
|
apache-2.0
| 1,047
| 0.025788
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed t
|
o in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class nspbr6_args :
""" Provides additional arguments required for fetching the nspbr6 resource.
"""
def __init__(se
|
lf) :
self._detail = False
@property
def detail(self) :
"""To get a detailed view.
"""
try :
return self._detail
except Exception as e:
raise e
@detail.setter
def detail(self, detail) :
"""To get a detailed view.
"""
try :
self._detail = detail
except Exception as e:
raise e
|
vodik/plumbum
|
tests/test_color.py
|
Python
|
mit
| 2,444
| 0.020458
|
import pytest
from plumbum.colorlib.styles import ANSIStyle, Color, AttributeNotFound, ColorNotFound
from plumbum.colorlib.names import color_html, FindNearest
class TestNearestColor:
def test_exact(self):
assert FindNearest(0,0,0).all_fast() == 0
for n,color in enumerate(color_html):
# Ignoring duplicates
if n not in (16, 21, 46, 51, 196, 201, 226, 231, 244):
rgb = (int(color[1:3],16), int(color[3:5],16), int(color[5:7],16))
assert FindNearest(*rgb).all_fast() == n
def test_nearby(self):
assert FindNearest(1,2,2).all_fast() == 0
assert FindNearest(7,7,9).all_fast() == 232
def test_simplecolor(self):
assert FindNearest(1,2,4).only_basic() == 0
assert FindNearest(0,255,0).only_basic() == 2
assert FindNearest(100,100,0).only_basic() == 3
assert FindNearest(140,140,140).only_basic() == 7
class TestColorLoad:
def test_rgb(self):
blue = Color(0,0,255) # Red, Green, Blue
assert blue.rgb == (0,0,255)
def test_simple_name(self):
green = Color.from_simple('green')
assert green.number == 2
def test_different_names(self):
assert Color('Dark Blue') == Color('Dark_Blue')
assert Color('Dark_blue') == Color('Dark_Blue')
assert Color('DARKBLUE') == Color('Dark_Blue')
assert Color('DarkBlue') == Color('Dark_Blue')
assert Color('Dark Green') == Color('Dark_Green')
def test_loading_methods(self):
assert Color("Yellow") == Color.from_full("Yellow")
assert (Color.from_full("yellow").representation !=
Color.from_simple("yellow").representation)
class TestANSIColor:
@classmethod
def setup_class(cls):
ANSIStyle.use_color = True
def test_ansi(self):
assert str(ANSIStyle(fgcolor=Color('reset'))) == '\033[39m'
assert str(ANSIStyle
|
(fgcolor=Color.from_full('green
|
'))) == '\033[38;5;2m'
assert str(ANSIStyle(fgcolor=Color.from_simple('red'))) == '\033[31m'
class TestNearestColor:
def test_allcolors(self):
myrange = (0,1,2,5,17,39,48,73,82,140,193,210,240,244,250,254,255)
for r in myrange:
for g in myrange:
for b in myrange:
near = FindNearest(r,g,b)
assert near.all_slow() == near.all_fast(), 'Tested: {0}, {1}, {2}'.format(r,g,b)
|
sebastien/wwwclient
|
tests/site-google.py
|
Python
|
lgpl-3.0
| 902
| 0.012195
|
#!/usr/bin/env python
# vim: tw=80 ts=4 sw=4 noet
from os.path import join, basename, dirname, abspath
import _import
from wwwclient import browse, scrape
HTML = scrape.HTML
s = browse.Session("http://www.google.com")
f = s.form().fill(q="python web scraping")
s.submit(f, action="btnG", method="GET")
tree = scrape
|
.HTML.tree(s.page())
nodes = tree.
|
cut(below=3)
nodes = nodes.filter(accept=lambda n:n.name.lower() in ("table","p"))
for node in nodes.children:
print HTML.text(node)
if node.name == "p":
link = node.find(withName="a")[0]
print "-->", link.attribute("href")
print HTML.links(link)
else:
print "---------"
# Google results are not properly closed, so we had to identify patterns where
# there were a closing tag should be inserted
# close_on = ("td", "a", "img", "br", "a")
# scrape.do(scrape.HTML.iterate, session.last().data(), closeOn=close_on, write=sys.stdout)
# EOF
|
psi4/psi4
|
tests/psi4numpy/cphf/input.py
|
Python
|
lgpl-3.0
| 2,679
| 0.005972
|
#! Tests out the CG solver with CPHF Polarizabilities
import time
import numpy as np
import psi4
psi4.set_output_file("output.dat")
# Benzene
mol = psi4.geometry("""
0 1
O 0.000000000000 0.000000000000 -0.075791843589
H 0.000000000000 -0.866811828967 0.601435779270
H 0.000000000000 0.866811828967 0.601435779270
symmetry c1
""")
psi4.set_options({"basis": "aug-cc-pVDZ",
"scf_type": "df",
"e_convergence": 1e-8,
"save_jk": True,
})
scf_e, scf_wfn = psi4.energy("SCF", return_wfn=True)
# Orbitals
Co = scf_wfn.Ca_subset("AO", "OCC")
Cv = scf_wfn.Ca_subset("AO", "VIR")
# Mints object
mints = psi4.core.MintsHelper(scf_wfn.basisset())
# RHS Dipoles
dipoles_xyz = []
for dip in mints.ao_dipole():
Fia = psi4.core.triplet(Co, dip, Cv, True, False, False)
Fia.scale(-2.0)
dipoles_xyz.append(Fia)
# Build up the preconditioner
precon = psi4.core.Matrix(Co.shape[1], Cv.shape[1])
occ = np.array(scf_wfn.epsilon_a_subset("AO", "OCC"))
vir = np.array(scf_wfn.epsilon_a_subset("AO", "VIR"))
precon.np[:] = (-occ.reshape(-1, 1) + vir)
# Build a preconditioner function
def precon_
|
func(ma
|
trices, active_mask):
ret = []
for act, mat in zip(active_mask, matrices):
if act:
p = mat.clone()
p.apply_denominator(precon)
ret.append(p)
else:
ret.append(False)
return ret
def wrap_Hx(matrices, active_mask):
x_vec = [mat for act, mat in zip(active_mask, matrices) if act]
Hx_vec = scf_wfn.cphf_Hx(x_vec)
ret = []
cnt = 0
for act, mat in zip(active_mask, matrices):
if act:
ret.append(Hx_vec[cnt])
cnt += 1
else:
ret.append(False)
return ret
# Solve
ret, resid = psi4.p4util.solvers.cg_solver(dipoles_xyz, wrap_Hx, precon_func, rcond=1.e-6)
polar = np.empty((3, 3))
for numx in range(3):
for numf in range(3):
polar[numx, numf] = -1 * ret[numx].vector_dot(dipoles_xyz[numf])
psi4.core.print_out("\n " + "CPHF Dipole Polarizability:".center(44) + "\n")
tops = ("X", "Y", "Z")
psi4.core.print_out(" %12s %12s %12s\n" % tops)
for n, p in enumerate(tops):
psi4.core.print_out(" %3s %12.4f %12.4f %12.4f\n" % (p, polar[n][0], polar[n][1], polar[n][2]))
psi4.core.print_out("\n")
psi4.compare_values(8.01554, polar[0][0], 3, 'Dipole XX Polarizability') # TEST
psi4.compare_values(12.50363, polar[1][1], 3, 'Dipole YY Polarizability') # TEST
psi4.compare_values(10.04161, polar[2][2], 3, 'Dipole ZZ Polarizability') # TEST
|
imperial-genomics-facility/data-management-python
|
igf_data/illumina/runparameters_xml.py
|
Python
|
apache-2.0
| 3,007
| 0.018291
|
from bs4 import BeautifulSoup
class RunParameter_xml:
'''
A class for reading runparameters xml file from Illumina sequencing runs
:param xml_file: A runparameters xml file
'''
def __init__(self, xml_file):
self.xml_file = xml_file
self._read_xml()
def _read_xml(self):
'''
Internal function for reading the xml file using BS4
'''
try:
xml_file = self.xml_file
with open(xml_file, 'r') as fp:
soup = BeautifulSoup(fp, "html5lib")
self._soup = soup
except Exception as e:
raise ValueError(
'Failed to parse xml file {0}, error {1}'.\
format(self.xml_file, e))
def get_nova_workflow_type(self):
try:
soup = self._soup
workflowtype = None
|
if soup.workflowtype:
workflowtype = \
soup.workflowtype.contents[0]
return workflowtype
except Exception as e:
raise ValueError('Failed to get NovaSeq workflow type')
def get_novaseq_flowcell(self)
|
:
try:
soup = self._soup
flowcell_id = None
workflowtype = self.get_nova_workflow_type()
if workflowtype is None or \
workflowtype != 'NovaSeqXp':
raise ValueError(
'Missing NovaSeq workflow type: {0}'.\
format(workflowtype))
if soup.rfidsinfo and \
soup.rfidsinfo.flowcellserialbarcode:
flowcell_id = \
soup.rfidsinfo.flowcellmode.contents[0]
if flowcell_id is None:
raise ValueError(
'Missing NovaSeq flowcell id, file: {0}'.\
format(self.xml_file))
except Exception as e:
raise ValueError(
'Failed to get NovaSeq flowcell id, error: {0}'.format(e))
def get_novaseq_flowcell_mode(self):
try:
soup = self._soup
flowcell_mode = None
workflowtype = self.get_nova_workflow_type()
if workflowtype is None or \
workflowtype != 'NovaSeqXp':
raise ValueError(
'Missing NovaSeq workflow type: {0}'.\
format(workflowtype))
if soup.rfidsinfo and \
soup.rfidsinfo.flowcellmode:
flowcell_mode = \
soup.rfidsinfo.flowcellmode.contents[0]
if flowcell_mode is None:
raise ValueError(
'Missing NovaSeq flowcell mode, file: {0}'.\
format(self.xml_file))
except Exception as e:
raise ValueError(
'Failed to get NovaSeq flowcell mode, error: {0}'.format(e))
def get_hiseq_flowcell(self):
'''
A method for fetching flowcell details for hiseq run
:returns: Flowcell info or None (for MiSeq, NextSeq or NovaSeq runs)
'''
try:
soup = self._soup
if soup.flowcell:
flowcell = soup.flowcell.contents[0]
else:
flowcell = None
return flowcell
except Exception as e:
raise ValueError(
'Failed to get flowcell for hiseq, error: {0}'.\
format(e))
|
odoo-arg/odoo_l10n_ar
|
l10n_ar_reject_checks/models/account_own_check.py
|
Python
|
agpl-3.0
| 2,297
| 0.002612
|
# -*- encod
|
ing: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This p
|
rogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
from openerp.exceptions import ValidationError
class AccountOwnCheck(models.Model):
_inherit = 'account.own.check'
@api.multi
def cancel_check(self):
""" Lo que deberia pasar con el cheque cuando se cancela """
if any(check.state != 'draft' for check in self):
raise ValidationError("Solo se puede cancelar un cheque en estado borrador")
self.next_state('draft_canceled')
@api.multi
def revert_canceled_check(self):
""" Lo que deberia pasar con el cheque cuando se revierte una cancelacion """
if any(check.state != 'canceled' for check in self):
raise ValidationError("Funcionalidad unica para cheques cancelados")
self.cancel_state('canceled')
@api.multi
def reject_check(self):
""" Lo que deberia pasar con el cheque cuando se rechaza """
if any(check.state != 'handed' for check in self):
raise ValidationError("No se puede rechazar un cheque que no esta entregado")
self.next_state('handed')
@api.multi
def revert_reject(self):
""" Lo que deberia pasar con el cheque cuando se revierte un rechazo """
if any(check.state != 'rejected' for check in self):
raise ValidationError("Funcionalidad unica para cheques rechazados")
self.cancel_state('rejected')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Gknoblau/gladitude
|
twitter/twitter_tweepy.py
|
Python
|
mit
| 3,076
| 0.002276
|
import datetime
import tweepy
from geopy.geocoders import Nominatim
import json
from secret import *
import boto3
import re
import preprocessor as p
import time
p.set_options(p.OPT.URL, p.OPT
|
.EMOJI)
# Get the service resource.
dynamodb = boto3.resource('dynamodb', region_name='us-west-2')
table = dynamodb.Table('fuck')
geolocator = Nominatim()
epoch = datetime.datetime.utcfromtimestamp(0)
with open('zip2fips.json') as data_file:
zip2fips = json.load(data_file)
def get_fips(coords):
location = geolocator.reverse('{:f}, {:f}'.format(coords[0], coords[1]))
zipcode = None
fips = None
if 'address' in locatio
|
n.raw:
if 'country_code' in location.raw['address']:
if location.raw['address']['country_code'] == 'us':
if 'postcode' in location.raw['address']:
zipcode = location.raw['address']['postcode']
else:
print("postcode not in location address")
try:
fips = zip2fips[location.raw['address']['postcode']]
except IndexError:
print("FIPS could not be found")
return fips, zipcode
else:
print("Not in the US")
else:
print("No Country code is in the address")
else:
print("No address")
class TwitterStreamListener(tweepy.StreamListener):
def on_status(self, status):
try:
if status.geo != None:
fips, zipcode = get_fips(status.geo['coordinates'])
if fips is None:
print("Fips is None")
raise Exception
if zipcode is None:
print("Zipcode is None")
raise Exception
txt = re.sub('[!@#$]', '', status.text)
txt = p.clean(txt)
try:
table.update_item(
Key={
'fips': int(fips)
},
UpdateExpression='ADD tweet :val1',
ExpressionAttributeValues={
':val1': set([txt])
}
)
except:
print("it crashed")
print("FIPS:" + fips)
print("TXT:" + txt)
except Exception as e:
print(e)
def on_error(self, status):
print(status)
if __name__ == "__main__":
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
# for i in range(4):
# t = threading.Thread(target=worker)
# t.daemon = True # thread dies when main thread (only non-daemon thread) exits.
# t.start()
stream = tweepy.Stream(auth, TwitterStreamListener())
while True:
try:
stream.filter(locations=[-125.0011, 24.9493, -66.9326, 49.5904])
except:
continue
time.wait(10)
#stream.sample(1)
|
grapesmoker/nba
|
game/Season.py
|
Python
|
gpl-2.0
| 12,250
| 0.002286
|
from __future__ import division
__author__ = 'jerry'
from utils.settings import pbp, seasons
from Game import Game
class NoCollectionError(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class SeasonDataError(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class Season:
# This is a helper class to manage a full season's worth of data
_coll = pbp
def __init__(self, season=None, collection=None):
self._games = []
self._data = []
self._start_date = None
self._end_date = None
self._season = season
self._asg = None
self._index = 0
if collection is not None:
self.__class__._coll = collection
self._coll = collection
elif self._coll is None:
if self.__class__._coll is None:
raise NoCollectionError('Must have a collection in MongoDB!')
else:
self._coll = self.__class__._coll
if season is not None:
self.get_by_season(season)
def get_by_season(self, season):
season_data = seasons.find_one({'season': season})
self._start_date = season_data['start']
self._end_date = season_data['end']
self._asg = season_data['allStarGame']
data = self._coll.find({'game_date': {'$gte': self._start_date, '$lte': self._end_date}})
self._data = data
self.set_data(self._data)
def set_data(self, data):
self._games = []
self._games = sorted([Game(event_id=game['id']) for game in data])
# for game_json in data:
# event_id = game_json['id']
# game = Game(event_id=event_id)
# self._games.append(game)
#self._games = sorted(self._games)
#self._start_date = self._games[0].date
#self._end_date = self._games[-1].date
def __str__(self):
return '{}-{} NBA Season'.format(self.season, self.season + 1)
def __iter__(self):
self._index = 0
return self
def next(self):
try:
game = self.games[self._index]
except IndexError:
raise StopIteration
self._index += 1
return game
@property
def games(self):
return self._games
@property
def start_date(self):
return self._start_date
@property
def end_date(self):
return self._end_date
@property
def season(self):
return self._season
def __len__(self):
return len(self.season)
@property
def regular_season_games(self):
return [game for game in self._games if game.game_type == 'Regular Season']
@property
def postseason_games(self):
return [game for game in self._games if game.game_type == 'Postseason']
def get_all_games_in_range(self, start_date=None, end_date=None):
if start_date is not None and end_date is None:
games = [game for game in self.games if start_date <= game.date]
elif start_date is None and end_date is not None:
games = [game for game in self.games if game.date <= end_date]
elif start_date is not None and end_date is not None:
games = [game for game in self.games if start_date <= game.date <= end_date]
else:
games = self.games
return games
def get_team_games_in_range(self, team, start_date=None, end_date=None):
games = [game for game in self.get_all_games_in_range(start_date, end_date)
if game.is_away(team) or game.is_home(team)]
return games
def get_player_games_in_range(self, player, start_date=None, end_date=None):
games = [game for game in self.get_all_games_in_range(start_date, end_date)
if game.player_in_game(player)]
return games
def drtg(self, team, start_date=None, end_date=None):
games = self.get_team_games_in_range(team, start_date, end_date)
pts_against = 0
possessions = 0
for game in games:
opponent = game.opponent(team)
pts_against += game.score(opponent)
possessions += game.possessions(opponent)
drtg = 100 * pts_against / possessions
return drtg
def ortg(self, team, start_date=None, end_date=None):
games = self.get_team_games_in_range(team, start_date, end_date)
pts_scored = 0
possessions = 0
for game in games:
pts_scored += game.score(team)
possessions += game.possessions(team)
ortg = 100 * pts_scored / possessions
return ortg
def player_ortg(self, player, start_date=None, end_date=None):
games_played = self.get_player_games_in_range(player, start_date, end_date)
ast = 0
fgm = 0
fga = 0
fta = 0
ftm = 0
tov = 0
threes = 0
orb = 0
pts = 0
mp = 0
team_fgm = 0
team_fga = 0
team_ast = 0
team_mp = 0
team_ftm = 0
team_fta = 0
team_orb = 0
team_pts = 0
team_3pm = 0
team_tov = 0
opp_dreb = 0
for game in games_played:
player_data = game.player_boxscore(player)
team = game.player_team(player)
opponent = game.opponent(team)
team_data = game.team_boxscore(team)['teamStats']
opponent_data = game.team_boxscore(opponent)['teamStats']
ast += player_data['assists']
fgm += player_data['fieldGoals']['made']
fga += player_data['fieldGoals']['attempted']
fta += player_data['freeThrows']['attempted']
ftm += player_data['freeThrows']['made']
tov += player_data['turnovers']
threes += player_data['threePointFieldGoals']['made']
orb += player_data['rebounds']['offensive']
pts += player_data['points']
mp += player_data['totalSecondsPlayed'] / 60.0
team_fgm += team_data['fieldGoals']['made']
team_fga += team_data['fieldGoals']['attempted']
team_ast += team_data['assists']
team_mp += team_data['minutes']
team_ftm += team_data['freeThrows']['made']
team_fta += team_data['freeThrows']['attempted']
team_orb += team_data['rebounds']['offensive']
team_pts += team_data['points']
team_3pm += team_data['threePointFieldGoals']['made']
team_tov += team_data['turnovers']['total']
opp_dreb += opponent_data['rebounds']['defensive']
team_orb_pct = team_orb / (opp_dreb + team_orb)
ft_part = (1 - (1 - (ftm / fta))**2) * 0.4 * fta
ast_part = 0.5 * (((team_pts - team_ftm) - (pts - ftm)) / (2 * (team_fga - fga))) * ast
q_ast = ((mp / (team_mp / 5)) * (1.14 * ((team_ast - ast) / team_fgm))) + ((((team
|
_ast / team_mp) * mp * 5 - ast) / ((team_fgm / team_mp) * mp * 5 - fgm)) * (1 - (mp / (team_mp / 5))))
fg_part = fgm * (1 - 0.5 * ((pts - ftm
|
) / (2 * fga)) * q_ast)
team_scoring_poss = team_fgm + (1 - (1 - (team_ftm / team_fta))**2) * team_fta * 0.4
team_play_pct = team_scoring_poss / (team_fga + team_fta * 0.4 + team_tov)
team_orb_weight = ((1 - team_orb_pct) * team_play_pct) / ((1 - team_orb_pct) * team_play_pct + team_orb_pct * (1 - team_play_pct))
orb_part = orb * team_orb_weight * team_play_pct
scr_poss = (fg_part + ast_part + ft_part) * (1 - (team_orb / team_scoring_poss) * team_orb_weight * team_play_pct) + orb_part
fg_x_poss = (fga - fgm) * (1 - 1.07 * team_orb_pct)
ft_x_poss = ((1 - (ftm / fta))**2) * 0.4 * fta
tot_poss = scr_poss + fg_x_poss + ft_x_poss + tov
pprod_fg_part = 2 * (fgm + 0.5 * threes) * (1 - 0.5 * ((pts - ftm) / (2 * fga)) * q_ast)
pprod_ast_part = 2 * ((team_fgm - fgm + 0.5 * (team_3pm - threes)) / (team_fgm - fgm)) * 0.5 * (((team_pts - team_ftm) - (pts - ftm)) / (2 * (team_fga - fga))) * ast
|
bobisme/hello
|
python/hello3.py
|
Python
|
mit
| 50
| 0
|
#!
|
/usr/bin/env python3
print('hel
|
lo hello hello')
|
pythonvietnam/pbc082015
|
VuQuangThang/21082015_B2/bai4.py
|
Python
|
gpl-2.0
| 433
| 0.080831
|
#def binh_phuong()
try:
a=int(raw_input("Nhap so n>=0 \n"))
while a<=0:
a=int(raw_input("Nhap lai so n>=0\n "))
print "%d" %(a)
b=pow(a,2)
c=int(raw_input("Doan so binh phuong cua ban\n"))
while c!=b:
if c<b:
|
print"chua dung, cao len 1 chut\n"
c=input()
else:
print"qua rui giam xuong ti\n"
c=input()
print "Chinh xac ket qua la %d" %
|
(c)
except:
print "Ban nhap khong dung kieu Integer"
|
planBrk/domaincrawler
|
test/test_link_aggregator.py
|
Python
|
apache-2.0
| 3,054
| 0.007531
|
import unittest
import logging
from domaincrawl.link_aggregator import LinkAggregator
from domaincrawl.link_filters import DomainFilter, is_acceptable_url_scheme
from domaincrawl.site_graph import SiteGraph
from domaincrawl.util import URLNormalizer, extract_domain_port
class LinkAggregatorTest(unittest.TestCase):
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG, datefmt='%m/%d/%Y %I:%M:%S %p')
def test_link_dedup(self):
base_url = "acme.com:8999"
base_domain, port = extract_domain_port(base_url)
logger = logging.getLogger()
url_norm = URLNormalizer(base_domain, port)
normalized_url = url_norm.normalize_with_domain(base_url)
logger.debug("Constructed normalized base url : %s"%normalized_url)
domain_filter = DomainFilter(base_domain, logger)
site_graph = SiteGraph(logger)
link_aggregator = LinkAggregator(logger, site_graph, link_mappers=[url_norm.normalize_with_domain], link_filters=[domain_filter.passes, is_acceptable_url_scheme])
valid_links = ["/a/b","/a/b/./","http://acme.com:8002/a","https://acme.com:8002/b?q=asd#frag"]
expected_links = ["http://acme.com:8999/a/b","http://acme.com:8002/a","https://acme.com:8002/b"]
# Th
|
is time, we also specify a referrer page
filtered_links = link_aggregator.filter_update_links(valid_links, normalized_url)
self.assertListEqual(expected_links,filtered_links)
self.assertSetEqual(set(expected_links),link_aggregator._links)
|
# Second invocation should result in deduplication
filtered_links = link_aggregator.filter_update_links(valid_links, None)
self.assertTrue(len(filtered_links) == 0)
self.assertSetEqual(set(expected_links),link_aggregator._links)
# None of the invalid links should pass
invalid_links = ["mailto://[email protected]","code.acme.com","code.acme.com/b","https://127.122.9.1"]
filtered_links = link_aggregator.filter_update_links(invalid_links, None)
self.assertTrue(len(filtered_links) == 0)
self.assertSetEqual(set(expected_links),link_aggregator._links)
# A new valid link should pass
new_valid_links = ["http://acme.com:8999/"]
filtered_links = link_aggregator.filter_update_links(new_valid_links, None)
expected_result = ["http://acme.com:8999"]
self.assertListEqual(expected_result,filtered_links)
expected_result_set = set(expected_links)
expected_result_set.update(set(expected_result))
self.assertSetEqual(expected_result_set,link_aggregator._links)
self.assertEqual(len(expected_result_set), site_graph.num_nodes())
for link in expected_result_set:
self.assertTrue(site_graph.has_vertex(link))
self.assertEqual(len(expected_links), site_graph.num_edges())
for link in expected_links:
self.assertTrue(site_graph.has_edge(normalized_url, link))
|
JackMc/CourseScraper
|
web/main.py
|
Python
|
mit
| 311
| 0
|
from flask import Flask, request, session, g, redirect, url_for, \
abort, flash
import db
import routes
DATABASE = 'test.db'
DEBUG = True
SECRET_KEY = 'key'
USERNAME = 'ad
|
min'
P
|
ASSWORD = 'password'
app = Flask(__name__)
app.config.from_object(__name__)
if __name__ == '__main__':
app.run()
|
jenshnielsen/hemelb
|
Tools/hemeTools/parsers/snapshot/__init__.py
|
Python
|
lgpl-3.0
| 11,074
| 0.00587
|
#
# Copyright (C) University College London, 2007-2012, all rights reserved.
#
# This file is part of HemeLB and is provided to you under the terms of
# the GNU LGPL. Please see LICENSE in the top level directory for full
# details.
#
import numpy as np
import xdrlib
import warnings
from .. import HemeLbMagicNumber
SnapshotMagicNumber = 0x736e7004
def HemeLbSnapshot(filename):
"""Guess which file format we were given and use the correct class
to open it.
We have to handle a number of cases:
- the original text format;
- the XDR copy thereof, and
- the updated (August 2011) version with format magic and version
numbers and more metadata.
"""
start = file(filename).read(8)
reader = xdrlib.Unpacker(start)
firstInt = reader.unpack_uint()
if firstInt == HemeLbMagicNumber:
assert reader.unpack_uint() == SnapshotMagicNumber
cls = VersionedXdrSnapshot
elif firstInt == 0 or firstInt == 1 or firstInt == 2:
# It is the basic Xdr format that starts with the stablity flag
cls = XdrSnapshotVersionOne
# Maybe text? If so, the first character should be a '0', '1' or '2', followed by a newline
elif (start[0] == '0' or start[0] == '1' or start == '2') and start[1] == '\n':
cls = TextSnapshot
else:
raise ValueError('Cannot determine version of snapshot file "%s"' % filename)
return cls(filename)
class BaseSnapshot(np.recarray):
"""Base class wrapping a HemeLB snapshot.
Snap is basically a numpy record array with the following fields:
- id (int) -- an id number (basically the index of the point in the
file
- position (3x float) -- the position in input space (m)
- grid (3x int) -- the (x, y, z) coordinates in lattice units
- pressure (float) -- the pressure in physical units (mmHg)
- velocity (3x float) -- (x,y,z) components of the velocity field
in physical units (m/s)
- stress (float) -- the von Mises stress in physical units (Pa)
It has a number of additional properties (see __readHeader for full details)
"""
_raw_row = [('id', int),
('position', float, (3,)),
('grid', int, (3,)),
('pressure', float),
('velocity', float, (3,)),
('stress', float)]
_readable_row = np.dtype(_raw_row[2:])
row = np.dtype(_raw_row)
_attrs = {'stable': None,
'voxel_size': None,
'origin': np.array([np.nan, np.nan, np.nan]),
'bb_min': None,
'bb_max': None,
'bb_len': None,
'voxel_count': None}
# header = len(_attrs)
def __new__(cls, filename):
"""Create a new instance. Numpy array subclasses use this
method instead of __init__ for initialization.
"""
headerDict = cls._readHeader(filename)
noindex = cls._load(filename, headerDict)
index = np.recarray(shape=noindex.shape, dtype=cls.row)
for el in cls._raw_row[2:]:
key = el[0]
index.__setattr__(key, noindex.__getattribute__(key))
continue
index.id = np.arange(len(noindex))
try:
index.position = cls._computePosition(index.grid, headerDict)
except:
index.position = np.nan
pass
obj = index.view(cls)
# Set the attributes on the snapshot
for headerField in headerDict:
setattr(obj, headerField, headerDict[headerField])
continue
return obj
def __array_finalize__(self, parent):
"""Numpy special method."""
if parent is None:
return
for a in self._attrs:
setattr(self, a, getattr(parent, a, self._attrs[a]))
continue
return
pass
class PositionlessSnapshot(BaseSnapshot):
"""Base class for the original text snapshots and the XDR
equivalent. These lack the data required to compute the positions
of grid points. It is supplied through the coords.asc file
generated by the old setuptool.
"""
def computePosition(self, coordsFile):
"""Given the coordinate file from the segtool, calculate all
the lattice positions' coordinates.
"""
from os.path import exists
if exists (coordsFile):
from ...coordinates import Transformer
trans = Transformer(coordsFile)
self.position = 1e-3 * trans.siteToStl(self.grid + self.bb_min)
return
else:
# The coords file is missing!
warnings.warn('Missing coordinates file "%s", assuming origin at [0,0,0]' % coordsFile, stacklevel=2)
self.position = (self.grid + self.bb_min) * self.voxel_size # + origin, but we'll just assume it's zero here.
pass
class TextSnapshot(PositionlessSnapshot):
"""Read a text snapshot.
"""
nHeaderLines = 6
@classmethod
def _readHeader(cls, filename):
"""Read the header lines, according to:
0- Flag for simulation stability, 0 or 1
1- Voxel size in physical units (units of m)
2- vertex coords of the minimum bounding box with minimum values (x, y and z values)
3- vertex coords of the minimum bounding box with maximum values (x, y and z values)
4- #voxels within the minimum bounding box along the x, y, z axes (3 values)
5- total number of fluid voxels
"""
f = file(filename)
stable = int(f.readline())
voxel_size = float(f.readline())
bb_min = np.array([int(x) for x in f.readline().split()])
bb_max = np.array([int(x) for x in f.readline().split()])
bb_len = np.array([int(x) for x in f.readline().split()])
voxel_count = int(f.readline())
return {'stable': stable,
'voxel_size': voxel_size,
'bb_min': bb_min,
'bb_max': bb_max,
'bb_len': bb_len,
'voxel_
|
count': voxel_count}
@classmethod
def _load(cls, filename, header):
return np.loadtxt(filename,
skiprows=cls.nHeaderLines,
dtype=cls._readable_row).view(np.recarray)
pass
class XdrVoxelFormatOneSnapshot(object):
@classmethod
def _load(cls, filename, header):
# Skip past t
|
he header, slurp data, create XDR object
f = file(filename)
f.seek(cls._headerLengthBytes)
reader = xdrlib.Unpacker(f.read())
ans = np.recarray((header['voxel_count'],), dtype=cls._readable_row)
# Read all the voxels.
for i in xrange(header['voxel_count']):
ans[i] = ((reader.unpack_int(),
reader.unpack_int(),
reader.unpack_int()),
reader.unpack_float(),
(reader.unpack_float(),
reader.unpack_float(),
reader.unpack_float()),
reader.unpack_float())
continue
reader.done()
return ans
pass
class XdrSnapshotVersionOne(PositionlessSnapshot, XdrVoxelFormatOneSnapshot):
"""Read an old-style XDR snapshot.
"""
# int float 3x int 3x int 3x int int
_headerLengthBytes = 4 + 8 + 3*4 + 3*4 + 3*4 + 4
@classmethod
def _readHeader(cls, filename):
"""Read the header lines, according to:
0- Flag for simulation stability, 0 or 1
1- Voxel size in physical units (units of m)
2- vertex coords of the minimum bounding box with minimum values (x, y and z values)
3- vertex coords of the minimum bounding box with maximum values (x, y and z values)
4- #voxels within the minimum bounding box along the x, y, z axes (3 values)
5- total number of fluid voxels
"""
reader = xdrlib.Unpacker(file(filename).read(cls._headerLengthBytes))
header = {}
header['stable'] = r
|
popazerty/EG-2
|
lib/python/Components/Task.py
|
Python
|
gpl-2.0
| 15,627
| 0.03398
|
# A Job consists of many "Tasks".
# A task is the run of an external tool, with proper methods for failure handling
from Tools.CList import CList
class Job(object):
NOT_STARTED, IN_PROGRESS, FINISHED, FAILED = range(4)
def __init__(self, name):
self.tasks = [ ]
self.resident_tasks = [ ]
self.workspace = "/tmp"
self.current_task = 0
self.callback = None
self.name = name
self.finished = False
self.end = 100
self.__progress = 0
self.weightScale = 1
self.afterEvent = None
self.state_changed = CList()
self.status = self.NOT_STARTED
self.onSuccess = None
# description is a dict
def fromDescription(self, description):
pass
def createDescription(self):
return None
def getProgress(self):
if self.current_task == len(self.tasks):
return self.end
t = self.tasks[self.current_task]
jobprogress = t.weighting * t.progress / float(t.end) + sum([task.weighting for task in self.tasks[:self.current_task]])
return int(jobprogress*self.weightScale)
progress = property(getProgress)
def getStatustext(self):
return { self.NOT_STARTED: _("Waiting"), self.IN_PROGRESS: _("In progress"), self.FINISHED: _("Finished"), self.FAILED: _("Failed") }[self.status]
def task_progress_changed_CB(self):
self.state_changed()
def addTask(self, task):
task.job = self
task.task_progress_changed = self.task_progress_changed_CB
self.tasks.append(task)
def start(self, callback):
assert self.callback is None
self.callback = callback
self.restart()
def restart(self):
self.status = self.IN_PROGRESS
self.state_changed()
self.runNext()
sumTaskWeightings = sum([t.weighting for t in self.tasks]) or 1
self.weightScale = self.end / float(sumTaskWeightings)
def runNext(self):
if self.current_task == len(self.tasks):
if len(self.resident_tasks) == 0:
self.status = self.FINISHED
self.state_changed()
self.callback(self, None, [])
self.callback = None
else:
print "still waiting for %d resident task(s) %s to finish" % (len(self.resident_tasks), str(self.resident_tasks))
else:
self.tasks[self.current_task].run(self.taskCallback)
self.state_changed()
def taskCallback(self, task, res, stay_resident = False):
cb_idx = self.tasks.index(task)
if stay_resident:
if cb_idx not in self.resident_tasks:
self.resident_tasks.append(self.current_task)
print "task going resident:", task
else:
print "task keeps staying resident:", task
return
if len(res):
print ">>> Error:", res
self.status = self.FAILED
self.state_changed()
self.callback(self, task, res)
if cb_idx != self.current_task:
if cb_idx in self.resident_tasks:
print "resident task finished:", task
self.resident_tasks.remove(cb_idx)
if not res:
self.state_changed()
self.current_task += 1
self.runNext()
def retry(self):
assert self.status == self.FAILED
self.restart()
def abort(self):
if self.current_task < len(self.tasks):
self.tasks[self.current_task].abort()
for i in self.resident_tasks:
self.tasks[i].abort()
def cancel(self):
self.abort()
def __str__(self):
return "Components.Task.Job name=%s #tasks=%s" % (self.name, len(self.tasks))
class Task(object):
def __init__(self, job, name):
self.name = name
self.immediate_preconditions = [ ]
self.global_preconditions = [ ]
self.postconditions = [ ]
self.returncode = None
self.initial_input = None
self.job = None
self.end = 100
self.weighting = 100
self.__progress = 0
self.cmd = None
self.cwd = "/tmp"
self.args = [ ]
self.cmdline = None
self.task_progress_changed = None
self.output_line = ""
job.addTask(self)
self.container = None
def setCommandline(self, cmd, args):
self.cmd = cmd
self.args = args
def setTool(self, tool):
self.cmd = tool
self.args = [tool]
self.global_preconditions.append(ToolExistsPrecondition())
self.postconditions.append(ReturncodePostcondition())
def setCmdline(self, cmdline):
self.cmdline = cmdline
def checkPreconditions(self, immediate = False):
not_met = [ ]
if immediate:
preconditions = self.immediate_preconditions
else:
preconditions = self.global_preconditions
for precondition in preconditions:
if not precondition.check(self):
not_met.append(precondition)
return not_met
def _run(self):
if (self.cmd is None) and (self.cmdline is None):
self.finish()
return
from enigma import eConsoleAppContainer
self.container = eConsoleAppContainer()
self.container.appClosed.append(self.processFinished)
self.container.stdoutAvail.append(self.processStdout)
self.container.stderrAvail.append(self.processStderr)
if self.cwd is not None:
self.container.setCWD(self.cwd)
if not self.cmd and self.cmdline:
print "execute:", self.container.execute(self.cmdline), self.cmdline
else:
assert self.cmd is not None
assert len(self.args) >= 1
print "execute:", self.container.execute(self.cmd, *self.args), ' '.join(self.args)
if self.initial_input:
self.writeInput(self.initial_input)
def run(self, callback):
failed_preconditions = self.checkPreconditions(True) + self.checkPreconditions(False)
if failed_preconditions:
print "[Task] preconditions failed"
callback(self, failed_preconditions)
return
self.callback = callback
try:
self.prepare()
self._run()
except Exception, ex:
print "[Task] exception:", ex
self.postconditions = [FailedPostcondition(ex)]
self.finish()
def prepare(self):
pass
def cleanup(self, failed):
pass
def processStdout(self, data):
self.processOutput(data)
def processStderr(self, data):
self.processOutput(data)
def processOutput(self, data):
self.output_line += data
while True:
i = self.output_line.find('\n')
if i == -1:
break
self.processOutputLine(self.output_line[:i+1])
self.output_line = self.output_line[i+1:]
def processOutputLine(self, line):
print "[Task %s]" % self.name, line[:-1]
pass
def processFinished(self, returncode):
self.returncode = returncode
self.finish()
def abort(self):
if self.container:
self.container.kill()
self.finish(aborted = True)
def finish(self, aborted = False):
self.afterRun()
not_met = [ ]
if a
|
borted:
not_met.append(AbortedPostcondition())
else:
for postcondition in self.postconditions:
if not postcondition.check(self):
not_met.append(postcondition)
self.cleanup(not_met)
self.callback(self, not_met)
def afterRun(self):
pass
def writeInput(sel
|
f, input):
self.container.write(input)
def getProgress(self):
return self.__progress
def setProgress(self, progress):
if progress > self.end:
progress = self.end
if progress < 0:
progress = 0
self.__progress = progress
if self.task_progress_changed:
self.task_progress_changed()
progress = property(getProgress, setProgress)
def __str__(self):
return "Components.Task.Task name=%s" % self.name
class LoggingTask(Task):
def __init__(self, job, name):
Task.__init__(self, job, name)
self.log = []
def processOutput(self, data):
print "[%s]" % self.name, data,
self.log.append(data)
class PythonTask(Task):
def _run(self):
from twisted.internet import threads
from enigma import eTimer
self.aborted = False
self.pos = 0
threads.deferToThread(self.work).addBoth(self.onComplete)
self.timer = eTimer()
self.timer.callback.append(self.onTimer)
self.timer.start(5)
def work(self):
raise NotImplemented, "work"
def abort(self):
self.aborted = True
if self.callback is None:
self.finish(aborted = True)
def onTimer(self):
self.setProgress(self.pos)
def onComplete(self, result):
self.postconditions.append(FailedPostcondition(result))
self.timer.stop()
del self.timer
self.finish()
class ConditionTask(Task):
"""
Reactor-driven pthread_condition.
Wait for something to happen. Call trigger when something occurs that
is likely to make check() return true. Raise exception in check() to
signal error.
Default is to call trigger() once per second, override prepare/cleanup
to do something else (like waiting for hotplug)...
"""
def __init__(self, job, name, timeoutCount=None):
Task.__init__
|
ntt-sic/heat
|
heat/tests/test_neutron_loadbalancer.py
|
Python
|
apache-2.0
| 34,298
| 0.000087
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from testtools import skipIf
from heat.common import exception
from heat.common import template_format
from heat.engine import clients
from heat.engine import scheduler
from heat.engine.resources.neutron import loadbalancer
from heat.openstack.common.importutils import try_import
from heat.tests import fakes
from heat.tests import utils
from heat.tests.common import HeatTestCase
from heat.tests.v1_1 import fakes as nova_fakes
neutronclient = try_import('neutronclient.v2_0.client')
health_monitor_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to test load balancer resources",
"Parameters" : {},
"Resources" : {
"monitor": {
"Type": "OS::Neutron::HealthMonitor",
"Properties": {
"type": "HTTP",
"delay": 3,
"max_retries": 5,
"timeout": 10
}
}
}
}
'''
pool_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to test load balancer resources",
"Parameters" : {},
"Resources" : {
"pool": {
"Type": "OS::Neutron::Pool",
"Properties": {
"protocol": "HTTP",
"subnet_id": "sub123",
"lb_method": "ROUND_ROBIN",
"vip": {
"protocol_port": 80
}
}
}
}
}
'''
member_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to test load balancer member",
"Resources" : {
"member": {
"Type": "OS::Neutron::PoolMember",
"Properties": {
"protocol_port": 8080,
"pool_id": "pool123",
"address": "1.2.3.4"
}
}
}
}
'''
lb_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to test load balancer resources",
"Parameters" : {},
"Resources" : {
"lb": {
"Type": "OS::Neutron::LoadBalancer",
"Properties": {
"protocol_port": 8080,
"pool_id": "pool123",
"members": ["1234"]
}
}
}
}
'''
pool_with_session_persistence_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to test load balancer resources wit",
"Parameters" : {},
"Resources" : {
"pool": {
"Type": "OS::Neutron::Pool",
"Properties": {
"protocol": "HTTP",
"subnet_id": "sub123",
"lb_method": "ROUND_ROBIN",
"vip": {
"protocol_port": 80,
"session_persistence": {
"type": "APP_COOKIE",
"cookie_name": "cookie"
}
}
}
}
}
}
'''
@skipIf(neutronclient is None, 'neutronclient unavailable')
class HealthMonitorTest(HeatTestCase):
def setUp(self):
super(HealthMonitorTest, self).setUp()
self.m.StubOutWithMock(neutronclient.Client, 'create_health_monitor')
self.m.StubOutWithMock(neutronclient.Client, 'delete_health_monitor')
self.m.StubOutWithMock(neutronclient.Client, 'show_health_monitor')
self.m.StubOutWithMock(neutronclient.Client, 'update_health_monitor')
self.m.StubOutWithMock(clients.OpenStackClients, 'keystone')
utils.setup_dummy_db()
def create_health_monitor(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_health_monitor({
'health_monitor': {
'delay': 3, 'max_retries': 5, 'type': u'HTTP',
'timeout': 10, 'admin_state_up': True}}
).AndReturn({'health_monitor': {'id': '5678'}})
snippet = template_format.parse(health_monitor_template)
stack = utils.parse_stack(snippet)
return loadbalancer.HealthMonitor(
'monitor', snippet['Resources']['monitor'], stack)
def test_create(self):
rsrc = self.create_health_monitor()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_create_failed(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_health_monitor({
'health_monitor': {
'delay': 3, 'max_retries': 5, 'type': u'HTTP',
'timeout': 10, 'admin_state_up': True}}
).AndRaise(loadbalancer.NeutronClientException())
self.m.ReplayAll()
snippet = template_format.parse(health_monitor_template)
stack = utils.parse_stack(snippet)
rsrc = loadbalancer.HealthMonitor(
'monitor', snippet['Resources']['monitor'], stack)
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.create))
self.assertEqual(
'NeutronClientException: An unknown exception occurred.',
str(error))
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
self.m.VerifyAll()
def test_delete(self):
neutronclient.Client.delete_health_monitor('5678')
neutronclient.Client.show_health_monitor('5678').AndRaise(
loadbalancer.NeutronClientException(status_code=404))
rsrc = self.create_health_monitor()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete_already_gone(self):
neutronclient.Client.delete_health_monitor('5678').AndRaise(
loadbalancer.NeutronClientException(status_code=404))
rsrc = self.create_health_monitor()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete_failed(self):
neutronclient.Client.delete_health_monitor('5678').AndRaise(
loadbalancer.NeutronClientException(status_code=400))
rsrc = self.create_health_monitor()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.delete))
self.assertEqual(
'NeutronClientException: An unknown exception occurred.',
str(error))
self.assertEqual((rsrc.DELETE, rsrc.FAILED), rsrc.state)
self.m.VerifyAll()
def test_attribute(self):
rsrc = self.create_health_monitor()
neutronclient.Client.show_health_monitor('5678').MultipleTimes(
).AndReturn(
{'health_monitor': {'admin_state_up': True, 'delay': 3}})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertIs(True, rsrc.FnGetAtt('admin_state_up'))
self.assertEqual(3, rsrc.FnGetAtt('delay'))
self.m.VerifyAll()
def test_attribute_failed(self):
rsrc = self.create_health_monitor()
self.m.ReplayAll()
|
scheduler.TaskRunner(rsrc.create)()
error = self.assertRaises(exception.InvalidTemplateAttribute,
|
rsrc.FnGetAtt, 'subnet_id')
self.assertEqual(
'The Referenced Attribute (monitor subnet_id) is incorrect.',
str(error))
self.m.VerifyAll()
def test_update(self):
rsrc = self.create_health_monitor()
neutronclient.Client.update_health_monitor(
'5678', {'health_monitor': {'delay': 10}})
s
|
adrianholovaty/django
|
tests/regressiontests/csrf_tests/tests.py
|
Python
|
bsd-3-clause
| 13,454
| 0.002155
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.context_processors import csrf
from django.http import HttpRequest, HttpResponse
from django.middleware.csrf import CsrfViewMiddleware, CSRF_KEY_LENGTH
from django.template import RequestContext, Template
from django.test import TestCase
from django.views.decorators.csrf import csrf_exempt, requires_csrf_token, ensure_csrf_cookie
# Response/views used for CsrfResponseMiddleware and CsrfViewMiddleware tests
def post_form_response():
resp = HttpResponse(content=u"""
<html><body><h1>\u00a1Unicode!<form method="post"><input type="text" /></form></body></html>
""", mimetype="text/html")
return resp
def post_form_view(request):
"""A view that returns a POST form (without a token)"""
return post_form_response()
# Response/views used for template tag tests
def token_view(request):
"""A view that uses {% csrf_token %}"""
context = RequestContext(request, processors=[csrf])
template = Template("{% csrf_token %}")
return HttpResponse(template.render(context))
def non_token_view_using_request_processor(request):
"""
A view that doesn't use the token, but does use the csrf view processor.
"""
context = RequestContext(request, processors=[csrf])
template = Template("")
return HttpResponse(template.render(context))
class TestingHttpRequest(HttpRequest):
"""
A version of HttpRequest that allows us to change some things
more easily
"""
def is_secure(self):
return getattr(self, '_is_secure_override', False)
class CsrfViewMiddlewareTest(TestCase):
# The csrf token is potentially from an untrusted source, so could have
# characters that need dealing with.
_csrf_id_cookie = "<1>\xc2\xa1"
_csrf_id = "1"
def _get_GET_no_csrf_cookie_request(self):
return TestingHttpRequest()
def _get_GET_csrf_cookie_request(self):
req = TestingHttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = self._csrf_id_cookie
return req
def _get_POST_csrf_cookie_request(self):
req = self._get_GET_csrf_cookie_request()
req.method = "POST"
return req
def _get_POST_no_csrf_cookie_request(self):
req = self._get_GET_no_csrf_cookie_request()
req.method = "POST"
return req
def _get_POST_request_with_token(self):
req = self._get_POST_csrf_cookie_request()
req.POST['csrfmiddlewaretoken'] = self._csrf_id
return req
def _check_token_present(self, response, csrf_id=None):
self.assertContains(response, "name='csrfmiddlewaretoken' value='%s'" % (csrf_id or self._csrf_id))
def test_process_view_token_too_long(self):
"""
Check that if the token is longer than expected, it is ignored and
a new token is created.
"""
req = self._get_GET_no_csrf_cookie_request()
req.COOKIES[settings.CSRF_COOKIE_NAME] = 'x' * 10000000
CsrfViewMiddleware().process_view(req, token_view, (), {})
resp = token_view(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, False)
self.assertEqual(len(csrf_cookie.value), CSRF_KEY_LENGTH)
def test_process_response_get_token_used(self):
"""
When get_token is used, check that the cookie is created and headers
patched.
"""
req = self._get_GET_no_csrf_cookie_request()
# Put tests for CSRF_COOKIE_* settings here
with self.settings(CSRF_COOKIE_NAME='myname',
CSRF_COOKIE_DOMAIN='.example.com',
CSRF_COOKIE_PATH='/test/',
CSRF_COOKIE_SECURE=True):
# token_view calls get_token() indirectly
CsrfViewMiddleware().process_view(req, token_view, (), {})
resp = token_view(req)
resp2 = CsrfViewMiddleware(
|
).process_response(req, res
|
p)
csrf_cookie = resp2.cookies.get('myname', False)
self.assertNotEqual(csrf_cookie, False)
self.assertEqual(csrf_cookie['domain'], '.example.com')
self.assertEqual(csrf_cookie['secure'], True)
self.assertEqual(csrf_cookie['path'], '/test/')
self.assertTrue('Cookie' in resp2.get('Vary',''))
def test_process_response_get_token_not_used(self):
"""
Check that if get_token() is not called, the view middleware does not
add a cookie.
"""
# This is important to make pages cacheable. Pages which do call
# get_token(), assuming they use the token, are not cacheable because
# the token is specific to the user
req = self._get_GET_no_csrf_cookie_request()
# non_token_view_using_request_processor does not call get_token(), but
# does use the csrf request processor. By using this, we are testing
# that the view processor is properly lazy and doesn't call get_token()
# until needed.
CsrfViewMiddleware().process_view(req, non_token_view_using_request_processor, (), {})
resp = non_token_view_using_request_processor(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, False)
self.assertEqual(csrf_cookie, False)
# Check the request processing
def test_process_request_no_csrf_cookie(self):
"""
Check that if no CSRF cookies is present, the middleware rejects the
incoming request. This will stop login CSRF.
"""
req = self._get_POST_no_csrf_cookie_request()
req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {})
self.assertEqual(403, req2.status_code)
def test_process_request_csrf_cookie_no_token(self):
"""
Check that if a CSRF cookie is present but no token, the middleware
rejects the incoming request.
"""
req = self._get_POST_csrf_cookie_request()
req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {})
self.assertEqual(403, req2.status_code)
def test_process_request_csrf_cookie_and_token(self):
"""
Check that if both a cookie and a token is present, the middleware lets it through.
"""
req = self._get_POST_request_with_token()
req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {})
self.assertEqual(None, req2)
def test_process_request_csrf_cookie_no_token_exempt_view(self):
"""
Check that if a CSRF cookie is present and no token, but the csrf_exempt
decorator has been applied to the view, the middleware lets it through
"""
req = self._get_POST_csrf_cookie_request()
req2 = CsrfViewMiddleware().process_view(req, csrf_exempt(post_form_view), (), {})
self.assertEqual(None, req2)
def test_csrf_token_in_header(self):
"""
Check that we can pass in the token in a header instead of in the form
"""
req = self._get_POST_csrf_cookie_request()
req.META['HTTP_X_CSRFTOKEN'] = self._csrf_id
req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {})
self.assertEqual(None, req2)
def test_put_and_delete_rejected(self):
"""
Tests that HTTP PUT and DELETE methods have protection
"""
req = TestingHttpRequest()
req.method = 'PUT'
req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {})
self.assertEqual(403, req2.status_code)
req = TestingHttpRequest()
req.method = 'DELETE'
req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {})
self.assertEqual(403, req2.status_code)
def test_put_and_delete_allowed(self):
"""
Tests that HTTP PUT and DELETE methods can get through with
X-CSRFToken and a cookie
"""
req = self._get_GET_csrf_cookie_request()
req.method = 'PUT'
req.META['HTTP_X_CSRFTOKEN'] = self._csrf_id
req2 = CsrfViewMiddleware().process_view(re
|
sunrin92/LearnPython
|
1-lpthw/ex32.py
|
Python
|
mit
| 812
| 0.004926
|
the_count = [1, 2, 3, 4, 5]
fruits = ['apple', 'oranges', 'pears', 'apricots',]
change = [1, 'pennies', 2, 'dimes', 3, 'quarters',]
#this first kind of for-loop goes through a list
for number in the_count:
print("This is count %d" % number)
# same as above
for fruit in fruits:
print("A fruit of type: %s" % fruit)
# also we ca
|
n go through mixed lists too
# notice we have to use %r since we don't know what's in it
for i in change:
print("I got %r " % i)
# we can alse build lists, first start with an empty one
elements = []
# then use the range function to do 0 to 5 counts
for i in range(0,6):
print("Adding %d to the list." % i)
# append is a function that lists understand
elements.append(i)
# now we can print them out too
for i in elements:
print("Element was: %d"
|
% i)
|
gyimothilaszlo/interval-music-maker
|
AudioMerger.py
|
Python
|
mit
| 1,005
| 0.034826
|
from pydub import *
class AudioMerger:
voice_tags = ["one", "two", "three", "four", "five", "ten", "RUN", "relax", "completed"]
def __init__(
|
self, music):
self.music = music
self.additionalGain = 8
self.voices={}
for voice in self.voice_tags:
sound = AudioSegment.from_file('voices/' + voice + '.wav')
sound += self.additionalGain
self.voices[voice] = sound
def addCountdown(self, startTime, isRun = True):
for i in range(1, 6):
voice = self.voices[self.voice_tags[i - 1]]
self.music = self.music.overlay(voice, position = (startTime - i) * 1000)
|
self.music = self.music.overlay(self.voices["ten"], position = (startTime - 10) * 1000)
voice = self.voices["RUN" if isRun else "relax"]
self.music = self.music.overlay(voice, position = startTime * 1000)
def addCompleted(self, startTimeSec):
self.music = self.music.overlay(self.voices["completed"], position = (startTimeSec * 1000))
def exportMusic(self, fname):
self.music.export(fname + ".mp3", format="mp3")
|
pankajlal/prabandh
|
books/migrations/0004_auto_20160703_2143.py
|
Python
|
apache-2.0
| 384
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-03 16:13
from __future__ import unicod
|
e_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('books', '0003_book_owner'),
]
operations = [
migrations.RenameModel(
old_name='Book',
new_name='BookItem',
),
|
]
|
akash1808/tempest
|
tempest/api/compute/volumes/test_volumes_get.py
|
Python
|
apache-2.0
| 3,025
| 0
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "L
|
icense"); you may
# not use this file except i
|
n compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common.utils import data_utils
from testtools import matchers
from tempest.api.compute import base
from tempest import config
from tempest import test
CONF = config.CONF
class VolumesGetTestJSON(base.BaseV2ComputeTest):
@classmethod
def skip_checks(cls):
super(VolumesGetTestJSON, cls).skip_checks()
if not CONF.service_available.cinder:
skip_msg = ("%s skipped as Cinder is not available" % cls.__name__)
raise cls.skipException(skip_msg)
@classmethod
def setup_clients(cls):
super(VolumesGetTestJSON, cls).setup_clients()
cls.client = cls.volumes_extensions_client
@test.idempotent_id('f10f25eb-9775-4d9d-9cbe-1cf54dae9d5f')
def test_volume_create_get_delete(self):
# CREATE, GET, DELETE Volume
volume = None
v_name = data_utils.rand_name('Volume')
metadata = {'Type': 'work'}
# Create volume
volume = self.client.create_volume(display_name=v_name,
metadata=metadata)
self.addCleanup(self.delete_volume, volume['id'])
self.assertIn('id', volume)
self.assertIn('displayName', volume)
self.assertEqual(volume['displayName'], v_name,
"The created volume name is not equal "
"to the requested name")
self.assertTrue(volume['id'] is not None,
"Field volume id is empty or not found.")
# Wait for Volume status to become ACTIVE
self.client.wait_for_volume_status(volume['id'], 'available')
# GET Volume
fetched_volume = self.client.show_volume(volume['id'])
# Verification of details of fetched Volume
self.assertEqual(v_name,
fetched_volume['displayName'],
'The fetched Volume is different '
'from the created Volume')
self.assertEqual(volume['id'],
fetched_volume['id'],
'The fetched Volume is different '
'from the created Volume')
self.assertThat(fetched_volume['metadata'].items(),
matchers.ContainsAll(metadata.items()),
'The fetched Volume metadata misses data '
'from the created Volume')
|
virtualelephant/openstack-heat-bde-plugin
|
plugin/BigDataExtensions.py
|
Python
|
apache-2.0
| 17,510
| 0.003198
|
#!/usr/bin/python
#
# OpenStack Heat Plugin for interfacing with VMware Big Data Extensions
#
# Chris Mutchler - [email protected]
# http://www.VirtualElephant.com
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import json
import base64
import requests
import subprocess
import pyVmomi
from pyVim import connect
from pyVim.connect import SmartConnect, Disconnect
from pyVmomi import vmodl, vim
from heat.engine import constraints, properties, resource
from heat.openstack.common import log as logging
from neutronclient.neutron import client
logger = logging.getLogger(__name__)
class BigDataExtensions(resource.Resource):
PROPERTIES = (
BDE_ENDPOINT, VCM_SERVER, USERNAME, PASSWORD,
CLUSTER_
|
NAME, CLUSTER_TYPE, NETWORK, CLUSTER_PASSWORD, CLUSTER_RP,
VIO_CONFIG, BDE_CONFIG, SECURITY_GROUP, SUBNET
) = (
'bde_endpoint', 'vcm_server', 'username', 'password',
'cluster_name', 'cluster_type', 'network', 'cluster_password', 'cluster_rp',
'vio_config', 'bde_config', 'security_group', 'subnet'
)
properties_schema = {
BDE_ENDPOINT: properties.Schem
|
a(
properties.Schema.STRING,
required=True,
default='bde.localdomain'
),
VCM_SERVER: properties.Schema(
properties.Schema.STRING,
required=True,
default='vcenter.localdomain'
),
USERNAME: properties.Schema(
properties.Schema.STRING,
required=True,
default='[email protected]'
),
PASSWORD: properties.Schema(
properties.Schema.STRING,
required=True,
default='password'
),
CLUSTER_NAME: properties.Schema(
properties.Schema.STRING,
required=True
),
CLUSTER_TYPE: properties.Schema(
properties.Schema.STRING,
required=True
),
NETWORK: properties.Schema(
properties.Schema.STRING,
required=True
),
CLUSTER_PASSWORD: properties.Schema(
properties.Schema.STRING,
required=False
),
CLUSTER_RP: properties.Schema(
properties.Schema.STRING,
required=True,
default='openstackRP'
),
VIO_CONFIG: properties.Schema(
properties.Schema.STRING,
required=True,
default='/usr/local/bin/etc/vio.config'
),
BDE_CONFIG: properties.Schema(
properties.Schema.STRING,
required=False,
default='/usr/local/bin/etc/bde.config'
),
SECURITY_GROUP: properties.Schema(
properties.Schema.STRING,
required=False,
default='9d3ecec8-e0e3-4088-8c71-8c35cd67dd8b'
),
SUBNET: properties.Schema(
properties.Schema.STRING,
required=True
)
}
def _open_connection(self):
bde_server = self.properties.get(self.BDE_ENDPOINT)
bde_user = self.properties.get(self.USERNAME)
bde_pass = self.properties.get(self.PASSWORD)
header = {'content-type': 'application/x-www-form-urlencoded'}
prefix = 'https://'
port = ':8443'
auth_string = "/serengeti/j_spring_security_check"
data = 'j_username=' + bde_user + '&j_password=' + bde_pass
s = requests.session()
url = prefix + bde_server + port + auth_string
r = s.post(url, data, headers=header, verify=False)
logger.info(_("VirtualElephant::VMware::BDE - Authentication status code %s") % r.json)
return s
def _close_connection(self):
bde_server = self.properties.get(self.BDE_ENDPOINT)
header = {'content-type': 'application/x-www-form-urlencoded'}
url = 'https://' + bde_server + ':8443/serengeti/j_spring_security_logout'
s = requests.session()
r = s.post(url, headers=header, verify=False)
logger.info(_("VirtualElephant::VMware::BDE - Log out status code %s") % r.json)
return
def _create_nsx_ports(self):
# Load VIO environment variables from /usr/local/etc/vio.config
in_file = "/usr/local/etc/vio.config"
f = open(in_file, "ro")
for line in f:
if "OS_AUTH_URL" in line:
trash, os_auth_url = map(str, line.split("="))
os_auth_url = os_auth_url.rstrip('\n')
logger.info(_("VirtualElephant::VMware::BDE - DEBUG os_auth_url %s") % os_auth_url)
elif "OS_TENANT_ID" in line:
trash, os_tenant_id = map(str,line.split("="))
os_tenant_id = os_tenant_id.rstrip('\n')
elif "OS_TENANT_NAME" in line:
trash, os_tenant_name = map(str, line.split("="))
os_tenant_name = os_tenant_name.rstrip('\n')
elif "OS_USERNAME" in line:
trash, os_username = map(str, line.split("="))
os_username = os_username.rstrip('\n')
elif "OS_PASSWORD" in line:
trash, os_password = map(str, line.split("="))
os_password = os_password.rstrip('\n')
elif "OS_URL" in line:
trash, os_url = map(str, line.split("="))
os_url = os_url.rstrip('\n')
elif "OS_TOKEN" in line:
trash, os_token = map(str, line.split("="))
os_token = os_token.rstrip('\n')
d = {}
d['username'] = os_username
d['password'] = os_password
d['auth_url'] = os_auth_url
d['tenant_name'] = os_tenant_name
d['token'] = os_token
d['url'] = os_url
logger.info(_("VirtualElephant::VMware::BDE - Loaded VIO credentials - %s") % d)
# Using BDE API and vSphere API return the MAC address
# for the virtual machines created by BDE.
bde_server = self.properties.get(self.BDE_ENDPOINT)
vcm_server = self.properties.get(self.VCM_SERVER)
admin_user = self.properties.get(self.USERNAME)
admin_pass = self.properties.get(self.PASSWORD)
cluster_name = self.properties.get(self.CLUSTER_NAME)
network_id = self.properties.get(self.NETWORK)
security_group = self.properties.get(self.SECURITY_GROUP)
prefix = 'https://'
port = ':8443'
logger.info(_("VirtualElephant::VMware::BDE - Creating NSX ports for network %s") % network_id)
# Get the node names for the cluster from BDE
curr = self._open_connection()
header = {'content-type': 'application/json'}
api_call = '/serengeti/api/cluster/' + cluster_name
url = prefix + bde_server + port + api_call
r = curr.get(url, headers=header, verify=False)
raw_json = json.loads(r.text)
cluster_data = raw_json["nodeGroups"]
# Open connect to the vSphere API
si = SmartConnect(host=vcm_server, user=admin_user, pwd=admin_pass, port=443)
search_index = si.content.searchIndex
root_folder = si.content.rootFolder
for ng in cluster_data:
nodes = ng["instances"]
for node in nodes:
logger.info(_("VirtualElephant::VMware::BDE - Creating NSX port for %s") % node.get("name"))
vm_name = node.get("name")
vm_moId = node.get("moId")
port_name = vm_name + "-port0"
# moId is not in format we need to match
(x,y,z) = vm_moId.split(":")
vm_moId = "'vim." + y + ":" + z + "'"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.