repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/team_fight_tactics/urls/TftEndpoint.py
|
Python
|
mit
| 307
| 0
|
from ... import Endpoint, UrlConfig
class TftEndpoint:
def __init__(self, url: str, **kwargs):
self._url = f"/tft{url}"
def __call__(se
|
lf, **kwargs):
final_url = f"{UrlConfig.tft_url}{self._url}"
endpoint = Endpoint(final_url, **kwargs)
return endpoint(**
|
kwargs)
|
factorlibre/l10n-spain
|
l10n_es_ticketbai_api_batuz/models/lroe_operation_response.py
|
Python
|
agpl-3.0
| 16,313
| 0.001533
|
# Copyright (2021) Binovo IT Human Project SL
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
import base64
from enum import
|
Enum
from odoo import models, fields, api, _
from ..l
|
roe.lroe_xml_schema import LROEXMLSchema,\
LROEXMLSchemaModeNotSupported,\
LROEOperationTypeEnum
from .lroe_operation import LROEOperationEnum, LROEModelEnum
from odoo.addons.l10n_es_ticketbai_api.models.ticketbai_response\
import TicketBaiResponseState, TicketBaiInvoiceResponseCode, \
TicketBaiCancellationResponseCode
from odoo.addons.l10n_es_ticketbai_api.utils import utils as tbai_utils
from odoo.exceptions import ValidationError
class LROEOperationResponseState(Enum):
BUILD_ERROR = '-2'
REQUEST_ERROR = '-1'
CORRECT = 'Correcto'
PARTIALLY_CORRECT = 'Parcialmente correcto'
INCORRECT = 'Incorrecto'
class LROEOperationResponseLineState(Enum):
CORRECT = 'Correcto'
CORRECT_WITH_ERRORS = 'Aceptado con errores'
INCORRECT = 'Incorrecto'
class LROEOperationResponseLineCode(tbai_utils.EnumValues):
DUPLICATED_RECORD = 'B4_2000003'
ALREADY_CANCELLED_RECORD = 'B4_2000006'
class LROEOperationResponse(models.Model):
_name = 'lroe.operation.response'
_description = "LROE Operation Response"
lroe_operation_id = fields.Many2one(
comodel_name='lroe.operation',
required=True,
ondelete='cascade')
response_line_ids = fields.One2many(
comodel_name='lroe.operation.response.line',
inverse_name='lroe_response_id',
string="Response Line")
xml = fields.Binary(string='XML Response')
xml_fname = fields.Char('XML File Name')
state = fields.Selection(selection=[
(LROEOperationResponseState.BUILD_ERROR.value, 'Build error'),
(LROEOperationResponseState.REQUEST_ERROR.value, 'Request error'),
(LROEOperationResponseState.CORRECT.value, 'Correct'),
(LROEOperationResponseState.PARTIALLY_CORRECT.value, 'Partially correct'),
(LROEOperationResponseState.INCORRECT.value, 'Incorrect')], required=True)
code = fields.Char()
description = fields.Char()
lroe_record_id = fields.Char()
lroe_record_number = fields.Char()
lroe_record_date = fields.Char()
@staticmethod
def get_tbai_state(lroe_response_operation):
if lroe_response_operation == LROEOperationResponseState.BUILD_ERROR.value or\
lroe_response_operation == LROEOperationResponseState.REQUEST_ERROR.value:
return lroe_response_operation
if lroe_response_operation == LROEOperationResponseState.CORRECT.value:
return TicketBaiResponseState.RECEIVED.value
if lroe_response_operation == LROEOperationResponseState.INCORRECT.value:
return TicketBaiResponseState.REJECTED.value
if lroe_response_operation\
== LROEOperationResponseState.PARTIALLY_CORRECT.value\
or lroe_response_operation\
== LROEOperationResponseLineState.CORRECT_WITH_ERRORS.value:
# TODO LROE: en caso de e envío de un único fichero se nos
# puede dar esta respuesta ??? que hacemos ???
return TicketBaiResponseState.RECEIVED.value
return None
@api.model
def prepare_lroe_error_values(self, lroe_operation, msg, **kwargs):
values = kwargs
tbai_response_model = self.env['tbai.response']
tbai_response_dict = {
'tbai_invoice_id': lroe_operation.tbai_invoice_ids[0].id,
'state': LROEOperationResponse.get_tbai_state(
LROEOperationResponseState.REQUEST_ERROR.value
)
}
tbai_response_obj = tbai_response_model.create(tbai_response_dict)
values.update({
'lroe_operation_id': lroe_operation.id,
'state': LROEOperationResponseState.BUILD_ERROR.value,
'description': _("Internal API or Operation error") + msg,
'response_line_ids': [(0, 0, {
'state': LROEOperationResponseLineState.INCORRECT.value,
'tbai_response_id': tbai_response_obj.id
})]
})
return values
@api.model
def prepare_lroe_response_values(self, lroe_srv_response, lroe_operation, **kwargs):
def validate_response_line_state(response_line_record_state):
if response_line_record_state not in [
LROEOperationResponseLineState.CORRECT.value,
LROEOperationResponseLineState.CORRECT_WITH_ERRORS.value,
LROEOperationResponseLineState.INCORRECT.value
]:
raise ValidationError(_('LROEOperationResponseLineState not VALID !'))
def get_lroe_response_xml_header():
return xml_root.get('Cabecera')
def get_lroe_response_xml_presenter():
return xml_root.get('DatosPresentacion')
def get_lroe_response_xml_records():
xml_lroe_records = xml_root.get('Registros').get('Registro')
len_lroe_records = 0
if isinstance(xml_lroe_records, dict):
len_lroe_records = 1
elif isinstance(xml_lroe_records, list):
len_lroe_records = len(xml_lroe_records)
return len_lroe_records, xml_lroe_records
def get_lroe_xml_schema():
if not lroe_operation:
raise ValidationError(_('LROE Operation required!'))
operation_type = None
lroe_operation_model = (
"pj_240"
if LROEModelEnum.model_pj_240.value == lroe_operation.model
else "pf_140"
)
if lroe_operation.type in (
LROEOperationEnum.create.value,
LROEOperationEnum.update.value,
):
lroe_operation_type = "resp_alta"
elif lroe_operation.type == LROEOperationEnum.cancel.value:
lroe_operation_type = 'resp_cancel'
if lroe_operation.lroe_chapter_id.code == '1':
lroe_operation_chapter = 'sg_invoice'
elif lroe_operation.lroe_chapter_id.code == '2':
lroe_operation_chapter = 'invoice_in'
if hasattr(
LROEOperationTypeEnum,
"%s_%s_%s"
% (
lroe_operation_type,
lroe_operation_chapter,
lroe_operation_model,
),
):
operation_type = getattr(
LROEOperationTypeEnum,
"%s_%s_%s"
% (
lroe_operation_type,
lroe_operation_chapter,
lroe_operation_model,
),
).value
xml_schema = LROEXMLSchema(operation_type)
else:
raise LROEXMLSchemaModeNotSupported(
"Batuz LROE XML model not supported!")
return operation_type, xml_schema
def set_tbai_response_lroe_line():
response_line_record_data = response_line_record.get('SituacionRegistro')
response_line_record_state = response_line_record_data.get('EstadoRegistro')
validate_response_line_state(response_line_record_state)
response_line_record_code = ''
response_line_record_message = ''
if not response_line_record_state\
== LROEOperationResponseLineState.CORRECT.value:
response_line_record_code =\
response_line_record_data.get('CodigoErrorRegistro')
response_line_record_message = '(ES): '\
+ response_line_record_data.get('DescripcionErrorRegistroES') \
+ '(EU): '\
+ response_line_record_data.get('DescripcionErrorRegistroEU')
tbai_response_model = tbai_response_obj = self.env['tbai.response']
if lroe_operation.tbai_invoice_ids:
tbai_msg_description = response_line_record_message
tbai_msg_code =\
TicketBaiInvoiceResponseCode.INVOICE_ALREADY_REGISTERED.value \
|
ferrants/qball-python
|
setup.py
|
Python
|
mit
| 547
| 0.003656
|
"""
(c) Copyright 2014. All Rights Reserved.
qball module setup and package.
"""
from setuptools import setup
setup(
name='qball',
author='Matt Ferrante',
author_email='[email protected]',
description='
|
Python integration for qball',
license='(c) Copyright 2014. All Rights Reserved.',
packages=['qball'],
install_requires=['httpli
|
b2 >= 0.8'],
setup_requires=['httplib2'],
version='1.1.0',
url="https://github.com/ferrants/qball-python",
keywords = ['locking', 'resource locking', 'webservice'],
)
|
allancarlos123/Solfege
|
solfege/fpeditor.py
|
Python
|
gpl-3.0
| 34,015
| 0.003381
|
# vim: set fileencoding=utf-8 :
# GNU Solfege - free ear training software
# Copyright (C) 2009, 2011 Tom Cato Amundsen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import logging
import os
import StringIO
import subprocess
import gtk
from solfege.esel import SearchView
if __name__ == '__main__':
from solfege import i18n
i18n.setup(".", "C")
import solfege.statistics
solfege.db = solfege.statist
|
ics.DB()
import solfege
from solfege import cfg
from solfege import filesystem
from solfege import gu
from solfege import frontpage as pd
from solfege import lessonfile
from solfege import osutils
class LessonFilePreviewWidget(gtk.VBox):
def __init__(self, model):
gtk.VBox.__in
|
it__(self)
self.m_model = model
self.set_size_request(200, 200)
l = gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup("<b>Title:</b>")
self.pack_start(l, False)
self.g_title = gtk.Label()
self.g_title.set_alignment(0.0, 0.5)
self.pack_start(self.g_title, False)
l = gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup("<b>Module:</b>")
self.pack_start(l, False)
self.g_module = gtk.Label()
self.g_module.set_alignment(0.0, 0.5)
self.pack_start(self.g_module, False)
l = gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup("<b>Used in topcis:</b>")
self.pack_start(l, False)
self.g_topic_box = gtk.VBox()
self.pack_start(self.g_topic_box, False)
self.show_all()
def update(self, dlg):
fn = dlg.get_preview_filename()
if fn:
fn = gu.decode_filename(fn)
for child in self.g_topic_box.get_children():
child.destroy()
fn = lessonfile.mk_uri(fn)
try:
self.set_sensitive(True)
self.g_title.set_text(lessonfile.infocache.get(fn, 'title'))
self.g_module.set_text(lessonfile.infocache.get(fn, 'module'))
self.g_ok_button.set_sensitive(True)
for x in self.m_model.iterate_topics_for_file(fn):
l = gtk.Label(x)
l.set_alignment(0.0, 0.5)
self.g_topic_box.pack_start(l, False)
if not self.g_topic_box.get_children():
l = gtk.Label(u"-")
l.set_alignment(0.0, 0.5)
self.g_topic_box.pack_start(l, False)
except (lessonfile.InfoCache.FileNotFound,
lessonfile.InfoCache.FileNotLessonfile), e:
self.g_title.set_text(u'')
self.g_module.set_text(u'')
self.g_ok_button.set_sensitive(False)
self.set_sensitive(False)
self.show_all()
return True
class SelectLessonFileDialog(gtk.FileChooserDialog):
def __init__(self, parent):
gtk.FileChooserDialog.__init__(self, _("Select lesson file"),
parent=parent,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,))
self.set_select_multiple(True)
pv = LessonFilePreviewWidget(parent.m_model)
pv.g_ok_button = self.add_button("gtk-ok", gtk.RESPONSE_OK)
pv.g_ok_button.set_sensitive(False)
pv.show()
self.set_preview_widget(pv)
self.connect('selection-changed', pv.update)
class SelectLessonfileBySearchDialog(gtk.Dialog):
def __init__(self):
gtk.Dialog.__init__(self, buttons=(gtk.STOCK_CLOSE, gtk.RESPONSE_ACCEPT))
view = SearchView(_('Search for exercises. Each exercise you click will be added to the section of the front page.'),
fields=['link-with-filename-tooltip', 'module'])
view.on_link_clicked = self.on_link_clicked
self.vbox.pack_start(view)
self.show_all()
def on_link_clicked(self, widget, filename):
self.m_filename = filename
self.response(gtk.RESPONSE_OK)
def editor_of(obj):
"""
Return the toplevel page, the one that is a Editor object.
"""
p = obj
while not isinstance(p, Editor):
p = p.m_parent
return p
def parent_page(obj):
"""
Return the parent page of obj. Return None if this is the toplevel page.
"""
p = obj
while True:
try:
p = p.m_parent
except AttributeError:
return None
if isinstance(p, Page):
return p
if p is None:
return None
class Section(gtk.VBox):
"""
A section consists of a heading and a list of links.
self.g_link_box is a vbox that contains the links.
"""
def __init__(self, model, parent):
gtk.VBox.__init__(self)
self.m_model = model
self.m_parent = parent
assert isinstance(model, pd.LinkList)
hbox = gtk.HBox()
hbox.set_spacing(6)
self.pack_start(hbox, False)
# This is displayed and used when we edit the heading
self.g_heading_entry = gtk.Entry()
self.g_heading_entry.set_no_show_all(True)
hbox.pack_start(self.g_heading_entry)
self.g_heading = gtk.Label()
self.g_heading.set_alignment(0.0, 0.5)
# FIXME escape m_name
self.g_heading.set_markup("<b>%s</b>" % model.m_name)
hbox.pack_start(self.g_heading, False)
#
button_hbox = gtk.HBox()
button_hbox.set_spacing(0)
hbox.pack_start(button_hbox, False)
im = gtk.Image()
im.set_from_stock(gtk.STOCK_EDIT, gtk.ICON_SIZE_MENU)
button = gtk.Button()
button.add(im)
button.connect('clicked', self.on_edit_heading)
button_hbox.pack_start(button, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_ADD, gtk.ICON_SIZE_MENU)
button = gtk.Button()
button.add(im)
button.connect('button-release-event', self.on_add)
button_hbox.pack_start(button, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_REMOVE, gtk.ICON_SIZE_MENU)
button = gtk.Button()
button.add(im)
button.connect('button-release-event', self.on_remove)
button_hbox.pack_start(button, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_CUT, gtk.ICON_SIZE_MENU)
b = gtk.Button()
b.add(im)
b.connect('clicked', self.on_cut)
button_hbox.pack_start(b, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_PASTE, gtk.ICON_SIZE_MENU)
b = gtk.Button()
b.add(im)
b.connect('clicked', self.on_paste, -1)
Editor.clipboard.register_paste_button(b, (pd.LinkList, pd.Page, unicode))
button_hbox.pack_start(b, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_GO_DOWN, gtk.ICON_SIZE_MENU)
self.g_move_down_btn = gtk.Button()
self.g_move_down_btn.add(im)
self.g_move_down_btn.connect('clicked',
self.m_parent.move_section_down, self)
button_hbox.pack_start(self.g_move_down_btn, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_GO_UP, gtk.ICON_SIZE_MENU)
self.g_move_up_btn = gtk.Button()
self.g_move_up_btn.add(im)
self.g_move_up_btn.connect('clicked',
self.m_parent.move_section_up, self)
button_hbox.pack_start(self.g_move_up_btn, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_GO_BACK, gtk.ICON_SIZE_MENU)
self.g_move_left_btn = g
|
johnyf/gr1experiments
|
examples/jcss12/amba_generator.py
|
Python
|
bsd-3-clause
| 15,381
| 0.00013
|
#!/usr/bin/env python
"""Generate AMBA AHB specifications for given number of masters.
Translated and adapted from Perl original distributed with Anzu.
https://www.iaik.tugraz.at/content/research/opensource/anzu/#download
"""
import argparse
import math
from omega.logic.syntax import conj
def build_state_str(state_name, op, num_states, value,
padd_value='0', add_next=''):
result = ''
binary = bin(value).lstrip('-0b').zfill(1)[::-1]
for j in xrange(num_states):
if result != '':
result += op
bin_val = padd_value
if j < len(binary):
bin_val = binary[j]
result += '{add_next}({state_name}{j} = {bin_val})'.format(
add_next=add_next, state_name=state_name, j=j, bin_val=bin_val)
return result
def build_hmaster_str(master_bits, value):
return build_state_str('hmaster', ' & ', master_bits, value)
def generate_spec(num_masters, use_ba):
# init
master_bits = int(math.ceil(math.log(num_masters) / math.log(2.0)))
master_bits_plus_one = math.ceil(math.log(num_masters + 1) / math.log(2))
assert master_bits > 0, master_bits
assert master_bits_plus_one > 0, master_bits_plus_one
env_initial = list()
sys_initial = ''
env_transitions = ''
sys_transitions = list()
env_fairness = ''
sys_fairness = ''
input_vars = list()
output_vars = list()
###############################################
# ENV_INITIAL and INPUT_VARIABLES
###############################################
env_initial.append('hready = 0')
input_vars += ['hready', 'hburst0', 'hburst1']
for i in xrange(num_masters):
s = 'hbusreq{i} = 0'.format(i=i)
env_initial.append(s)
s = 'hlock{i} = 0'.format(i=i)
env_initial.append(s)
s = 'hbusreq{i}'.format(i=i)
input_vars.append(s)
s = 'hlock{i}'.format(i=i)
input_vars.append(s)
env_initial.append('hburst0 = 0')
env_initial.append('hburst1 = 0')
###############################################
# ENV_TRANSITION
###############################################
for i in xrange(num_masters):
# env_transitions += "#Assumption 3:\n"
env_transitions += "[]( hlock{i} = 1 -> hbusreq{i} = 1 ) & \n".format(i=i)
###############################################
# ENV_FAIRNESS
###############################################
env_fairness += (
# "# Assumption 1: \n"
"[](<>(stateA1_1 = 0)) & \n"
# "\n# Assumption 2:\n"
"[](<>(hready = 1))\n")
###############################################
# SYS_INITIAL + OUTPUT_VARIABLES
###############################################
for i in xrange(master_bits):
sys_initial += 'hmaster{i} = 0 & \n'.format(i=i)
output_vars.append('hmaster{i}'.format(i=i))
output_vars += [
"hmastlock", "start", "locked", "decide", 'hgrant0',
"busreq", "stateA1_0", "stateA1_1", "stateG2",
"stateG3_0", "stateG3_1", "stateG3_2"]
c = [
"hmastlock = 0",
"start = 1",
"decide = 1",
"locked = 0",
"hgrant0 = 1"]
sys_initial += '&\n'.join(c) + '&\n'
for i in xrange(1, num_masters):
sys_initial += "hgrant{i} = 0 & \n".format(i=i)
var = 'hgrant{
|
i}'.format(i=i)
output_vars.append(var)
# busreq = hbusreq[hmaster]
sys_initial += (
"busreq=0 & \n"
# Assumption 1:
"stateA1_0 = 0 & \n"
"stateA1_1 = 0 & \n"
# Guarantee 2:
"stateG2 = 0 & \n"
# Guarantee 3:
|
"stateG3_0 = 0 & \n"
"stateG3_1 = 0 & \n"
"stateG3_2 = 0 & \n")
# Guarantee 10:
for i in xrange(1, num_masters):
sys_initial += "stateG10_{i} = 0 & \n".format(i=i)
var = 'stateG10_{i}'.format(i=i)
output_vars.append(var)
###############################################
# SYS_TRANSITION
###############################################
# busreq = hbusreq[hmaster]
for i in xrange(num_masters):
hmaster = build_hmaster_str(master_bits, i)
hmaster_X = build_state_str("hmaster", " & ", master_bits, i, 0, 'X')
sys_transitions.append((
"[]({hmaster} -> (hbusreq{i} = 0 <-> busreq=0))").format(
i=i, hmaster=hmaster))
# Assumption 1:
# state 00
sys_transitions.append(
# "# Assumption 1:\n"
"[](((stateA1_1 = 0) & (stateA1_0 = 0) & "
"((hmastlock = 0) | (hburst0 = 1) | (hburst1 = 1))) ->\n"
" X((stateA1_1 = 0) & (stateA1_0 = 0))) & \n"
"[](((stateA1_1 = 0) & (stateA1_0 = 0) & "
" (hmastlock = 1) & (hburst0 = 0) & (hburst1 = 0)) ->\n"
" X((stateA1_1 = 1) & (stateA1_0 = 0))) & \n"
# state 10
"[](((stateA1_1 = 1) & (stateA1_0 = 0) & (busreq = 1)) ->\n"
" X((stateA1_1 = 1) & (stateA1_0 = 0))) & \n"
"[](((stateA1_1 = 1) & (stateA1_0 = 0) & (busreq = 0) & "
"((hmastlock = 0) | (hburst0 = 1) | (hburst1 = 1))) ->\n"
" X((stateA1_1 = 0) & (stateA1_0 = 0))) & \n"
"[](((stateA1_1 = 1) & (stateA1_0 = 0) & (busreq = 0) & "
" (hmastlock = 1) & (hburst0 = 0) & (hburst1 = 0)) ->\n"
" X((stateA1_1 = 0) & (stateA1_0 = 1))) & \n"
# state 01
"[](((stateA1_1 = 0) & (stateA1_0 = 1) & (busreq = 1)) ->\n"
" X((stateA1_1 = 1) & (stateA1_0 = 0))) & \n"
"[](((stateA1_1 = 0) & (stateA1_0 = 1) & "
" (hmastlock = 1) & (hburst0 = 0) & (hburst1 = 0)) ->\n"
" X((stateA1_1 = 1) & (stateA1_0 = 0))) & \n"
"[](((stateA1_1 = 0) & (stateA1_0 = 1) & (busreq = 0) & "
"((hmastlock = 0) | (hburst0 = 1) | (hburst1 = 1))) ->\n"
" X((stateA1_1 = 0) & (stateA1_0 = 0))) & \n"
# Guarantee 1:
# sys_transitions += "\n# Guarantee 1:\n"
"[]((hready = 0) -> X(start = 0)) & \n"
# Guarantee 2:
# sys_transitions += "\n# Guarantee 2:\n"
"[](((stateG2 = 0) & "
"((hmastlock = 0) | (start = 0) | "
"(hburst0 = 1) | (hburst1 = 1))) -> "
"X(stateG2 = 0)) & \n"
"[](((stateG2 = 0) & "
" (hmastlock = 1) & (start = 1) & "
"(hburst0 = 0) & (hburst1 = 0)) -> "
"X(stateG2 = 1)) & \n"
"[](((stateG2 = 1) & (start = 0) & (busreq = 1)) -> "
"X(stateG2 = 1)) & \n"
"[](((stateG2 = 1) & (start = 1)) -> false) & \n"
"[](((stateG2 = 1) & (start = 0) & (busreq = 0)) -> "
"X(stateG2 = 0)) & \n"
# Guarantee 3:
# sys_transitions += "\n# Guarantee 3:\n"
'[](((stateG3_0 = 0) & (stateG3_1 = 0) & (stateG3_2 = 0) & \n'
' ((hmastlock = 0) | (start = 0) | ((hburst0 = 1) | (hburst1 = 0)))) ->\n'
' (X(stateG3_0 = 0) & X(stateG3_1 = 0) & X(stateG3_2 = 0))) &\n'
'[](((stateG3_0 = 0) & (stateG3_1 = 0) & (stateG3_2 = 0) & \n'
' ((hmastlock = 1) & (start = 1) & '
'((hburst0 = 0) & (hburst1 = 1)) & (hready = 0))) -> \n'
' (X(stateG3_0 = 1) & X(stateG3_1 = 0) & X(stateG3_2 = 0))) &\n'
'[](((stateG3_0 = 0) & (stateG3_1 = 0) & (stateG3_2 = 0) & \n'
' ((hmastlock = 1) & (start = 1) & '
'((hburst0 = 0) & (hburst1 = 1)) & (hready = 1))) -> \n'
' (X(stateG3_0 = 0) & X(stateG3_1 = 1) & X(stateG3_2 = 0))) &\n'
' \n'
'[](((stateG3_0 = 1) & (stateG3_1 = 0) & '
'(stateG3_2 = 0) & ((start = 0) & (hready = 0))) -> \n'
' (X(stateG3_0 = 1) & X(stateG3_1 = 0) & X(stateG3_2 = 0))) &\n'
'[](((stateG3_0 = 1) & (stateG3_1 = 0) & '
'(stateG3_2 = 0) & ((start = 0) & (hready = 1))) -> \n'
' (X(stateG3_0 = 0) & X(stateG3_1 = 1) & X(stateG3_2 = 0))) &\n'
'\n'
'[](((stateG3_0 = 1) & (stateG3_1 = 0) & '
'(stateG3_2 = 0) & ((start = 1))) -> false) &\n'
'\n'
' \n'
'[](((stateG3_0 = 0) & (stateG3_1 = 1) & '
'(stateG3_2 = 0) & ((start = 0) & (hready = 0))) -> \n'
' (X(stateG3_0 = 0) & X(stateG3_1 = 1) & X(stateG3_2 = 0))) &\n'
'[](((stateG3_0 = 0) & (stateG3_1 = 1) & '
'(stateG3_2 = 0) & ((start =
|
cmac4603/Home-Utilities-App
|
wx_str_test.py
|
Python
|
gpl-2.0
| 253
| 0.003953
|
import pyowm
owm = pyowm.OWM('fa7813518ed203b759f116a3bac9bcce')
observation = owm.weather_at_place('London,uk')
w = observation.get_weather()
wtemp = str(w.get_tempera
|
ture('celsius'))
print(wtemp.
|
strip('{}'))
wtemp_list = list(wtemp)
print(wtemp_list)
|
mahabs/nitro
|
nssrc/com/citrix/netscaler/nitro/resource/config/network/bridgegroup_nsip6_binding.py
|
Python
|
apache-2.0
| 7,494
| 0.037764
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class bridgegroup_nsip6_binding(base_resource) :
""" Binding class showing the nsip6 that can be bound to bridgegroup.
"""
def __init__(self) :
self._ipaddress = ""
self._td = 0
self._netmask = ""
self._rnat = False
self._id = 0
self.___count = 0
@property
def id(self) :
"""The integer that uniquely identifies the bridge group.<br/>Minimum value = 1<br/>Maximum value = 1000.
"""
try :
return self._id
except Exception as e:
raise e
@id.setter
def id(self, id) :
"""The integer that uniquely identifies the bridge group.<br/>Minimum value = 1<br/>Maximum value = 1000
"""
try :
self._id = id
except Exception as e:
raise e
@property
def td(self) :
"""Integer value that uniquely identifies the traffic domain in which you want to configure the entity. If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID of 0.<br/>Minimum value = 0<br/>Maximum value = 4094.
"""
try :
return self._td
except Exception as e:
raise e
@td.setter
def td(self, td) :
"""Integer value that uniquely identifies the traffic domain in which you want to configure the entity. If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID of 0.<br/>Minimum value = 0<br/>Maximum value = 4094
"""
try :
self._td = td
except Exception as e:
raise e
@property
def netmask(self) :
"""The network mask for the subnet defined for the bridge group.
"""
try :
return self._netmask
except Exception as e:
raise e
@netmask.setter
def netmask(self, netmask) :
"""The network mask for the subnet defined for the bridge group.
"""
try :
self._netmask = netmask
except Exception as e:
raise e
@property
def ipaddress(self) :
"""The IP address assigned to the bridge group.
"""
try :
return self._ipaddress
except Exception as e:
raise e
@ipaddress.setter
def ipaddress(self, ipaddress) :
"""The IP address assigned to the bridge group.
"""
try :
self._ipaddress = ipaddress
except Exception as e:
raise e
@property
def rnat(self) :
"""Temporary flag used for internal purpose.
"""
try :
return self._rnat
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(bridgegroup_nsip6_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.bridgegroup_nsip6_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.id) :
return str(self.id)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(
|
resource) is not list :
updateresource = bridgegroup_nsip6_binding()
updateresource.ipaddress = resource.ipaddress
updateresource.netmask = resource.netmask
return updateresour
|
ce.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [bridgegroup_nsip6_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].ipaddress = resource[i].ipaddress
updateresources[i].netmask = resource[i].netmask
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = bridgegroup_nsip6_binding()
deleteresource.ipaddress = resource.ipaddress
deleteresource.netmask = resource.netmask
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [bridgegroup_nsip6_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].ipaddress = resource[i].ipaddress
deleteresources[i].netmask = resource[i].netmask
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, id) :
""" Use this API to fetch bridgegroup_nsip6_binding resources.
"""
try :
obj = bridgegroup_nsip6_binding()
obj.id = id
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, id, filter_) :
""" Use this API to fetch filtered set of bridgegroup_nsip6_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = bridgegroup_nsip6_binding()
obj.id = id
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, id) :
""" Use this API to count bridgegroup_nsip6_binding resources configued on NetScaler.
"""
try :
obj = bridgegroup_nsip6_binding()
obj.id = id
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, id, filter_) :
""" Use this API to count the filtered set of bridgegroup_nsip6_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = bridgegroup_nsip6_binding()
obj.id = id
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class bridgegroup_nsip6_binding_response(base_response) :
def __init__(self, length=1) :
self.bridgegroup_nsip6_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.bridgegroup_nsip6_binding = [bridgegroup_nsip6_binding() for _ in range(length)]
|
kohnle-lernmodule/KITexe201based
|
exe/engine/config.py
|
Python
|
gpl-2.0
| 25,533
| 0.005953
|
# ===========================================================================
# eXe config
# Copyright 2004-2006, University of Auckland
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
Config settings loaded from exe.conf
Is responsible for the system-wide settings we use
O/S specific config classes are derieved from here
"""
from exe.engine.configparser import ConfigParser
from exe.engine.path import Path
from exe.engine.locales import chooseDefaultLocale
from exe.engine import version
import logging
from logging.handlers import RotatingFileHandler
import sys
import os
import gettext
import tempfile
import twisted
import shutil
from exe import globals as G
from exe.engine.stylestore import StyleStore
from exe.webui import common
x_ = lambda s: s
class Config(object):
"""
The Config class contains the configuration information for eXe.
"""
# To build link to git revision
baseGitWebURL = 'https://forja.cenatic.es/plugins/scmgit/cgi-bin/gitweb.cgi?p=iteexe
|
/iteexe.git'
# Class attributes
optionNames = {
'system': ('webDir', 'jsDir', 'port', 'dataDir',
'configDir', 'localeDir', 'browser', 'mediaProfilePath',
|
'videoMediaConverter_ogv', 'videoMediaConverter_3gp',
'videoMediaConverter_mpg',
'videoMediaConverter_avi', 'audioMediaConverter_ogg',
'audioMediaConverter_au', 'audioMediaConverter_mp3',
'audioMediaConverter_wav', 'ffmpegPath'),
'user': ('locale', 'lastDir', 'showPreferencesOnStart','defaultStyle', 'showIdevicesGrouped','docType','editorMode'),
}
idevicesCategories = {
'activity': [x_('Non-Interactive Activities')],
'reading activity': [x_('Non-Interactive Activities')],
'dropdown activity': [x_('Interactive Activities')],
'java applet': [x_('Non-Textual Information')],
'wiki article': [x_('Non-Textual Information')],
'case study': [x_('Non-Interactive Activities')],
'preknowledge': [x_('Textual Information')],
'scorm quiz': [x_('Interactive Activities')],
'fpd - multi choice activity': [x_('FPD')],
'fpd - cloze activity': [x_('FPD')],
'fpd - cloze activity (modified)': [x_('FPD')],
'fpd - multi select activity': [x_('FPD')],
'fpd - true/false activity': [x_('FPD')],
'fpd - situation': [x_('FPD')],
'fpd - quotation': [x_('FPD')],
'fpd - you should know': [x_('FPD')],
'fpd - highlighted': [x_('FPD')],
'fpd - translation': [x_('FPD')],
'fpd - guidelines students': [x_('FPD')],
'fpd - guidelines teacher': [x_('FPD')],
'fpd - a step ahead': [x_('FPD')],
'fpd - a piece of advice': [x_('FPD')],
'fpd - think about it (with feedback)': [x_('FPD')],
'fpd - think about it (without feedback)': [x_('FPD')],
'fpd - free text': [x_('FPD')],
'image gallery': [x_('Non-Textual Information')],
'image magnifier': [x_('Non-Textual Information')],
'note': [x_('Textual Information')],
'objectives': [x_('Textual Information')],
'multi-choice': [x_('Interactive Activities')],
'multi-select': [x_('Interactive Activities')],
'true-false question': [x_('Interactive Activities')],
'reflection': [x_('Non-Interactive Activities')],
'cloze activity': [x_('Interactive Activities')],
'rss': [x_('Non-Textual Information')],
'external web site': [x_('Non-Textual Information')],
'free text': [x_('Textual Information')],
'click in order game': [x_('Experimental')],
'hangman game': [x_('Experimental')],
'place the objects': [x_('Interactive Activities')],
'memory match game': [x_('Experimental')],
'file attachments': [x_('Non-Textual Information')],
'sort items': [x_('Experimental')],
'sort items': [x_('Interactive Activities')],
'scorm test cloze': [x_('Interactive Activities')],
'scorm test cloze (multiple options)': [x_('Interactive Activities')],
'scorm test dropdown': [x_('Interactive Activities')],
'scorm test multiple choice': [x_('Interactive Activities')]
}
@classmethod
def getConfigPath(cls):
obj = cls.__new__(cls)
obj.configParser = ConfigParser()
obj._overrideDefaultVals()
obj.__setConfigPath()
return obj.configPath
def __init__(self):
"""
Initialise
"""
self.configPath = None
self.configParser = ConfigParser(self.onWrite)
# Set default values
# exePath is the whole path and filename of the exe executable
self.exePath = Path(sys.argv[0]).abspath()
# webDir is the parent directory for styles,scripts and templates
self.webDir = self.exePath.dirname()
self.jsDir = self.exePath.dirname()
# localeDir is the base directory where all the locales are stored
self.localeDir = self.exePath.dirname()/"locale"
# port is the port the exe webserver will listen on
# (previous default, which earlier users might still use, was 8081)
self.port = 51235
# dataDir is the default directory that is shown to the user
# to save packages and exports in
self.dataDir = Path(".")
# configDir is the dir for storing user profiles
# and user made idevices and the config file
self.configDir = Path(".")
#FM: New Styles Directory path
self.stylesDir =Path(self.configDir/'style').abspath()
#FM: Default Style name
self.defaultStyle= u"KIC-IE"
# browser is the name of a predefined browser specified at http://docs.python.org/library/webbrowser.html.
# None for system default
self.browser = None
# docType is the HTML export format
self.docType = 'XHTML'
# locale is the language of the user
self.locale = chooseDefaultLocale(self.localeDir)
# internalAnchors indicate which exe_tmp_anchor tags to generate for each tinyMCE field
# available values = "enable_all", "disable_autotop", or "disable_all"
self.internalAnchors = "enable_all"
self.lastDir = None
self.showPreferencesOnStart = "1"
self.showIdevicesGrouped = "1"
# tinymce option
self.editorMode = 'permissive'
# styleSecureMode : if this [user] key is = 0 , exelearning can run python files in styles
# as websitepage.py , ... ( deactivate secure mode )
self.styleSecureMode="1"
# styles is the list of style names available for loading
self.styles = []
# The documents that we've recently looked at
self.recentProjects = []
# canonical (English) names of iDevices not to show in the iDevice pane
self.hiddeniDevices = []
#Media conversion programs used for XML export system
self.videoMediaConverter_ogv = ""
self.videoMediaConverter_3gp = ""
self.videoMediaConverter_avi = ""
self.videoMediaConverter_mpg = ""
self.audioMediaConverter_ogg = ""
|
kdheepak89/pypdevs
|
test/test_realtime.py
|
Python
|
apache-2.0
| 1,022
| 0.001957
|
# Copyright 2014 Modelling, Simulation and Design Lab (MSDL) at
# McGill University and the University of Antwerp (http://msdl.cs.mcgill.ca/)
#
# Licensed under the Apache License, Vers
|
ion 2.0 (the "License");
# you may not use this file except in complian
|
ce with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import subprocess
import os
import sys
from testRealtime import TestRealtime
if __name__ == '__main__':
realtime = unittest.TestLoader().loadTestsFromTestCase(TestRealtime)
allTests = unittest.TestSuite()
allTests.addTest(realtime)
unittest.TextTestRunner(verbosity=2, failfast=True).run(allTests)
|
hoaaoh/Audio2Vec
|
src/trans_len.py
|
Python
|
apache-2.0
| 1,528
| 0.013089
|
#!/usr/bin/env python3
import csv
import argparse
FLAG = None
def write_file(feats,lab_list, fn):
with open(fn,'w') as f:
for num, i in enumerate(feats):
for j in range(len(i)):
f.write(str(i[j]) + ',')
f.write(str([len(i)-1]) + '\n')
return
def transform(feats, lens):
dim = FLAG.feat_dim
trans_feats = []
for i in range(len(feats)):
trans_feats.append(feats[i][:single_len[lens[i]*dim]])
return trans_feats
def read_feat(fn):
feats = []
labs = []
with open(fn,'r') as f:
reader = csv.reader(f)
for row in reader:
feats.append(list(map(float,row[:-1])))
labs.append(float(row[-1]))
return feats, labs
def read_len(fn):
len_list = []
with open(fn,'r') as f:
for line in f:
len_list.append(int(line.rstrip()))
return len_list
def main():
len_list = read_len(FLAG.len_file)
ark_list, lab_list = read_feat(FLAG.ark_file)
return
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Transfrom the fulfilled zeros to no')
parser.add_argument('--feat_dim',type=int, default=39,
help='each
|
frame feat dimension')
parser.add_argument('ark_file',
help='the transforming ark file')
parser.add_argument('len_file',
help='meaning the length of each utterance')
parser.add_argument('out_ark',
help='the output file')
FLAG
|
= parser.parse_args()
main()
|
shaileshgoogler/pyglet
|
tools/ddsview.py
|
Python
|
bsd-3-clause
| 4,680
| 0.002564
|
#!/usr/bin/env python
'''Simple viewer for DDS texture files.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
from ctypes import *
import getopt
import sys
import textwrap
from SDL import *
from pyglet.gl.VERSION_1_1 import *
import p
|
yglet.dds
import pyglet.event
import pyglet.image
import pyglet.sprite
import pyglet.window
from OpenGL.GLU import *
def usage():
print textwrap.dedent('''
Usage: ddsview.py [--header] texture1.dds texture2.dds ...
--header Dump
|
the header of each file instead of displaying.
Within the program, press:
left/right keys Flip between loaded textures
up/down keys Increase/decrease mipmap level for a texture
space Toggle flat or sphere view
Click and drag with mouse to reposition texture with wrapping.
''')
texture_index = 0
textures = []
mipmap_level = 0
last_pos = None
texture_offset = [0, 0]
view = 'flat'
sphere_angle = 0
def keydown(character, symbol, modifiers):
global mipmap_level, texture_index
if symbol == SDLK_DOWN:
mipmap_level = max(0, mipmap_level - 1)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, mipmap_level)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, mipmap_level)
elif symbol == SDLK_UP:
mipmap_level = mipmap_level + 1
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, mipmap_level)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, mipmap_level)
elif symbol == SDLK_LEFT:
texture_index = max(0, texture_index - 1)
elif symbol == SDLK_RIGHT:
texture_index = min(len(textures) - 1, texture_index + 1)
elif symbol == SDLK_SPACE:
toggle_view()
return True
def mousemotion(x, y):
global last_pos
state, x, y = SDL_GetMouseState()
if state & SDL_BUTTON(1):
texture_offset[0] += x - last_pos[0]
texture_offset[1] += y - last_pos[1]
update_texture_matrix()
last_pos = x, y
def update_texture_matrix():
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glTranslatef(-texture_offset[0] / float(textures[texture_index].size[0]),
-texture_offset[1] / float(textures[texture_index].size[1]),
0)
glMatrixMode(GL_MODELVIEW)
def toggle_view():
global view
if view != 'flat':
pyglet.event.pop()
pyglet.window.set_2d()
view = 'flat'
else:
pyglet.event.push()
pyglet.event.on_mousemotion(sphere_mousemotion)
pyglet.window.set_3d()
glEnable(GL_LIGHT0)
glLightfv(GL_LIGHT0, GL_POSITION, (c_float * 4)(0.5, 0.5, 1, 0))
view = 'sphere'
def sphere_mousemotion(x, y):
# TODO: virtual trackball
return True
def draw_sphere():
global sphere_angle
glPushMatrix()
glTranslatef(0., 0., -4)
glRotatef(sphere_angle, 0, 1, 0)
glRotatef(90, 1, 0, 0)
sphere_angle += 0.01
glPushAttrib(GL_ENABLE_BIT)
glEnable(GL_DEPTH_TEST)
glEnable(GL_LIGHTING)
glEnable(GL_TEXTURE_2D)
glBindTexture(GL_TEXTURE_2D, textures[texture_index].id)
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE)
sphere = gluNewQuadric()
gluQuadricTexture(sphere, True)
gluSphere(sphere, 1.0, 100, 100)
gluDeleteQuadric(sphere)
glPopAttrib()
glPopMatrix()
def main(args):
header = False
options, args = getopt.getopt(args[1:], 'h', ['help', 'header'])
for option, value in options:
if option in ('-h', '--help'):
usage()
sys.exit()
elif option == '--header':
header = True
if len(args) < 1:
usage()
sys.exit()
if header:
for arg in args:
print pyglet.dds.DDSURFACEDESC2(open(arg,
'r').read(pyglet.dds.DDSURFACEDESC2.get_size()))
else:
pyglet.window.set_window(resizable=True)
global textures, texture_index
textures = [pyglet.dds.load_dds(arg) for arg in args]
texture_index = 0
pyglet.window.resize(*textures[0].size)
pyglet.event.push()
pyglet.event.on_keydown(keydown)
pyglet.event.on_mousemotion(mousemotion)
global last_pos
state, x, y = SDL_GetMouseState()
last_pos = x, y
glClearColor(0, 0, 0, 0)
while not pyglet.event.is_quit():
pyglet.event.pump()
pyglet.window.clear()
if view == 'flat':
textures[texture_index].draw()
elif view == 'sphere':
draw_sphere()
pyglet.window.flip()
if __name__ == '__main__':
main(sys.argv)
|
lrq3000/pyFileFixity
|
pyFileFixity/lib/gooey/python_bindings/argparse_to_json.py
|
Python
|
mit
| 5,242
| 0.013163
|
"""
Converts argparse parser actions into json "Build Specs"
"""
import argparse
from argparse import (
_CountAction,
_HelpAction,
_StoreConstAction,
_StoreFalseAction,
_StoreTrueAction,
ArgumentParser, _SubParsersAction)
from collections import OrderedDict
from functools import partial
VALID_WIDGETS = (
'FileChooser',
'MultiFileChooser',
'FileSaver',
'DirChooser',
'DateChooser',
'TextField',
'Dropdown',
'Counter',
'RadioGroup',
'CheckBox',
'MultiDirChooser'
)
class UnknownWidgetType(Exception):
pass
class UnsupportedConfiguration(Exception):
pass
def convert(parser):
widget_dict = getattr(parser, 'widgets', {})
actions = parser._actions
if has_subparsers(actions):
if has_required(actions):
raise UnsupportedConfiguration("Gooey doesn't currently support required arguments when subparsers are present.")
layout_type = 'column'
layout_data = {name.lower(): process(sub_parser, widget_dict) for name, sub_parser in get_subparser(actions).choices.iteritems()}
else:
layout_type = 'standard'
layout_data = process(parser, widget_dict)
return {
'layout_type': layout_type,
'widgets': layout_data
}
def process(parser, widget_dict):
mutually_exclusive_group = [
mutex_action
for group_actions in parser._mutually_exclusive_groups
for mutex_action in group_actions._group_actions]
base_actions = [action for action in parser._actions
if action not in mutually_exclusive_group
and action.dest != 'help']
required_actions = filter(is_required, base_actions)
optional_actions = filter(is_optional, base_actions)
return list(categorize(required_actions, widget_dict, required=True)) + \
list(categorize(optional_actions, widget_dict)) + \
build_radio_group(mutually_exclusive_group)
def categorize(actions, widget_dict, required=False):
_get_widget = partial(get_widget, widgets=widget_dict)
for action in actions:
if is_standard(action):
yield as_json(action, _get_widget(action) or 'TextField', required)
elif is_choice(action):
yield as_json(action, _get_widget(action) or 'Dropdown', required)
elif is_flag(action):
yield as_json(action, _get_widget(action) or 'CheckBox', required)
elif is_counter(action):
_json = as_json(action, _get_widget(action) or 'Dropdown', required)
# pre-fill th
|
e 'counter' dropdown
_json['data']['choices'] = map(str, range(1, 11))
yield _json
else:
raise UnknownWidgetType(action)
def get_widget(action, widgets):
supplied_widget = widgets.get(action.dest, None)
type_arg_widget = 'FileChooser' if action.type == argparse.FileType else None
return supplied_widget or type_arg_widget or None
def is_required(action):
'''_actions which are positional or possessing the `required` flag '''
return not action.option_strings and not
|
isinstance(action, _SubParsersAction) or action.required == True
def has_required(actions):
return filter(None, filter(is_required, actions))
def is_subparser(action):
return isinstance(action,_SubParsersAction)
def has_subparsers(actions):
return filter(is_subparser, actions)
def get_subparser(actions):
return filter(is_subparser, actions)[0]
def is_optional(action):
'''_actions not positional or possessing the `required` flag'''
return action.option_strings and not action.required
def is_choice(action):
''' action with choices supplied '''
return action.choices
def is_standard(action):
""" actions which are general "store" instructions.
e.g. anything which has an argument style like:
$ script.py -f myfilename.txt
"""
boolean_actions = (
_StoreConstAction, _StoreFalseAction,
_StoreTrueAction
)
return (not action.choices
and not isinstance(action, _CountAction)
and not isinstance(action, _HelpAction)
and type(action) not in boolean_actions)
def is_flag(action):
""" _actions which are either storeconst, store_bool, etc.. """
action_types = [_StoreTrueAction, _StoreFalseAction, _StoreConstAction]
return any(map(lambda Action: isinstance(action, Action), action_types))
def is_counter(action):
""" _actions which are of type _CountAction """
return isinstance(action, _CountAction)
def build_radio_group(mutex_group):
if not mutex_group:
return []
options = [
{
'display_name': mutex_arg.dest,
'help': mutex_arg.help,
'nargs': mutex_arg.nargs or '',
'commands': mutex_arg.option_strings,
'choices': mutex_arg.choices,
} for mutex_arg in mutex_group
]
return [{
'type': 'RadioGroup',
'group_name': 'Choose Option',
'required': False,
'data': options
}]
def as_json(action, widget, required):
if widget not in VALID_WIDGETS:
raise UnknownWidgetType('Widget Type {0} is unrecognized'.format(widget))
return {
'type': widget,
'required': required,
'data': {
'display_name': action.dest,
'help': action.help,
'nargs': action.nargs or '',
'commands': action.option_strings,
'choices': action.choices or [],
'default': action.default
}
}
|
google/jax
|
jax/interpreters/ad.py
|
Python
|
apache-2.0
| 32,505
| 0.012121
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import functools
from functools import partial
import itertools as it
from typing import Any, Callable, Dict
import jax
from jax.interpreters import partial_eval as pe
from jax.config import config
from jax import core
from jax._src.dtypes import dtype, float0
from jax.core import (Trace, Tracer, get_aval, call_p, Primitive, Literal,
raise_to_shaped)
from jax._src.ad_util import (add_jaxvals, add_jaxvals_p, zeros_like_jaxval,
zeros_like_aval, zeros_like_p, Zero)
from jax._src.util import (unzip2, safe_map, safe_zip, split_list, wrap_name,
as_hashable_function, cache)
from jax.tree_util import register_pytree_node
from jax import linear_util as lu
from jax._src.api_util import flatten_fun, flatten_fun_nokwargs
from jax.tree_util import tree_flatten, tree_unflatten, Partial
from jax._src import source_info_util
zip = safe_zip
map = safe_map
def identity(x): return x
def jvp(fun: lu.WrappedFun, has_aux=False, instantiate=True,
transform_stack=True) -> Any:
if not has_aux:
return jvpfun(jvp_subtrace(fun), instantiate, transform_stack)
else:
fun, aux = jvp_subtrace_aux(fun)
return jvpfun(fun, instantiate, transform_stack), aux
@lu.transformation
def jvpfun(instantiate, transform_stack, primals, tangents):
tangents = [Zero.from_value(t) if not isinstance(t, Zero)
and dtype(t) is float0 else t for t in tangents]
ctx = (source_info_util.transform_name_stack('jvp') if transform_stack
else contextlib.nullcontext())
with core.new_main(JVPTrace) as main, ctx:
out_primals, out_tangents = yield (main, primals, tangents), {}
del main
if type(instantiate) is bool:
instantiate = [instantiate] * len(out_tangents)
out_tangents = [instantiate_zeros(t) if inst else t for t, inst
in zip(out_tangents, instantiate)]
yield out_primals, out_tangents
@lu.transformation
def jvp_subtrace(main, primals, tangents):
trace = JVPTrace(main, core.cur_sublevel())
for x in list(primals) + list(tangents):
if isinstance(x, Tracer):
assert x._trace.level < trace.level
in_tracers = [JVPTracer(trace, x, t) if type(t) is not Zero else x
for x, t in zip(primals, tangents)]
ans = yield in_tracers, {}
out_tracers = map(trace.full_raise, ans)
yield unzip2([(out_tracer.primal, out_tracer.tangent)
for out_tracer in out_tracers])
@lu.transformation_with_aux
def jvp_subtrace_aux(main, primals, tangents):
trace = JVPTrace(main, core.cur_sublevel())
for x in list(primals) + list(tangents):
if isinstance(x, Tracer):
assert x._trace.level < trace.level
ans, aux = yield map(partial(JVPTracer, trace), primals, tangents), {}
ans_tracers = map(trace.full_raise, ans)
out_primals, out_tangents = unzip2((t.primal, t.tangent) for t in ans_tracers)
aux_primals = [core.full_lower(x.primal)
if isinstance(x, JVPTracer) and x._trace.level == trace.level
else x for x in aux]
yield (out_primals, out_tangents), aux_primals
def linearize(traceable, *primals, **kwargs):
has_aux = kwargs.pop('has_aux', False)
if not has_aux:
jvpfun = jvp(traceable)
else:
jvpfun, aux = jvp(traceable, has_aux=True)
in_pvals = (tuple(pe.PartialVal.known(p) for p in primals)
+ tuple(pe.PartialVal.unknown(get_aval(p).at_least_vspace())
for p in primals))
_, in_tree = tree_flatten(((primals, primals), {}))
jvpfun_flat, out_tree = flatten_fun(jvpfun, in_tree)
jaxpr, out_pvals, consts = pe.trace_to_jaxpr(jvpfun_flat, in_pvals)
out_primals_pvals, out_tangents_pvals = tree_unflatten(out_tree(), out_pvals)
assert all(out_primal_pval.is_known() for out_primal_pval in out_primals_pvals)
_, out_primals_consts = unzip2(out_primals_pvals)
jaxpr.invars = jaxpr.invars[len(primals):]
jaxpr.outvars = jaxpr.outvars[len(out_primals_pvals):]
if not has_aux:
return out_primals_consts, out_tangents_pvals, jaxpr, consts
else:
return out_primals_consts, out_tangents_pvals, jaxpr, consts, aux()
def vjp(traceable
|
, primals, has_aux=False, reduce_axes=()):
if n
|
ot has_aux:
out_primals, pvals, jaxpr, consts = linearize(traceable, *primals)
else:
out_primals, pvals, jaxpr, consts, aux = linearize(traceable, *primals, has_aux=True)
def unbound_vjp(pvals, jaxpr, consts, *cts):
cts = tuple(map(ignore_consts, cts, pvals))
dummy_args = [UndefinedPrimal(v.aval) for v in jaxpr.invars]
arg_cts = backward_pass(jaxpr, reduce_axes, True, consts, dummy_args, cts)
return map(instantiate_zeros, arg_cts)
# Ensure that vjp_ is a PyTree so that we can pass it from the forward to the backward
# pass in a custom VJP.
vjp_ = Partial(partial(unbound_vjp, pvals, jaxpr), consts)
if not has_aux:
return out_primals, vjp_
else:
return out_primals, vjp_, aux
def ignore_consts(ct, pval):
aval, const = pval
if isinstance(aval, core.AbstractValue):
return ct
elif aval is None:
return core.unit
else:
raise TypeError(aval)
def unpair_pval(pval):
aval, const = pval
const_1, const_2 = const
if aval is None:
return (None, const_1), (None, const_2)
else:
aval_1, aval_2 = aval
return (aval_1, const_1), (aval_2, const_2)
def replace_float0s(primal, tangent):
if dtype(tangent) is float0:
return zeros_like_jaxval(primal)
else:
return tangent
def recast_to_float0(primal, tangent):
if core.primal_dtype_to_tangent_dtype(dtype(primal)) == float0:
return Zero(get_aval(primal).at_least_vspace())
else:
return tangent
# NOTE: The FIXMEs below are caused by primal/tangent mixups (type errors if you will)
def backward_pass(jaxpr: core.Jaxpr, reduce_axes, transform_stack, consts, primals_in, cotangents_in):
if all(type(ct) is Zero for ct in cotangents_in):
return map(lambda v: Zero(v.aval), jaxpr.invars)
def write_cotangent(prim, v, ct):
# assert v not in primal_env
assert ct is not Zero, (prim, v.aval) # check for an old harmless type error
if ct is None or type(v) is Literal:
return
if type(ct) is Zero:
# FIXME: This triggers a lot of failures!
# assert v.aval == ct.aval, (prim, v.aval, ct.aval)
return
axes_to_reduce = tuple(axis_name for axis_name in reduce_axes
if axis_name in core.get_aval(ct).named_shape
and axis_name not in v.aval.named_shape)
if axes_to_reduce:
ct = jax.lax.psum(ct, axis_name=axes_to_reduce)
ct_env[v] = add_tangents(ct_env[v], ct) if v in ct_env else ct
if config.jax_enable_checks:
ct_aval = core.get_aval(ct_env[v])
joined_aval = core.lattice_join(v.aval, ct_aval).strip_weak_type().strip_named_shape()
assert v.aval.strip_weak_type().strip_named_shape() == joined_aval, (prim, v.aval, ct_aval)
def read_cotangent(v):
return ct_env.pop(v, Zero(v.aval))
def read_primal(v):
if type(v) is Literal:
return v.val
else:
return primal_env.get(v, UndefinedPrimal(v.aval))
def write_primal(v, val):
if not is_undefined_primal(val):
primal_env[v] = val
primal_env: Dict[Any, Any] = {}
write_primal(core.unitvar, core.unit)
map(write_primal, jaxpr.constvars, consts)
# FIXME: invars can contain both primal and tangent values, and this line
# forces primal_in to contain UndefinedPrimals for tangent values!
map(write_primal, jaxpr.invars, primals_in)
ct_env: Dict[Any, Any] = {}
ctx = (source_info_util.transform_name_stack('transpos
|
zcbenz/cefode-chromium
|
third_party/mesa/MesaLib/src/gallium/drivers/llvmpipe/lp_tile_shuffle_mask.py
|
Python
|
bsd-3-clause
| 716
| 0.111732
|
tile = [[0,1,4,5],
[2,3,6,7],
[8,9,12,13],
[10,11,14,15]]
shift = 0
align = 1
value = 0L
holder = []
import sys
basemask = [0x
fd = sys.stdout
indent = " "*9
for c in range(4):
fd.write(indent + "*pdst++ = \n");
for l,line in enumerate(tile):
fd.write(indent + " %s_mm_shuffle_epi8(line%d, (__m128i){"%(l and '+' or ' ',l))
for i,pos in enumerate(line):
mask = 0x00ffffffff & (~(0xffL << shift))
value = mask | ((pos) << shift)
holder.append(value)
if holder and (i + 1)
|
%2 == 0:
fd.write("0x%8.0x"%(holder[0] + (holder[1] << 32)))
holder = []
if (i) %4 == 1:
fd.write( ',')
fd.write("})%s\n"%((l == 3) an
|
d ';' or ''))
print
shift += 8
|
aangert/PiParty
|
common.py
|
Python
|
mit
| 5,854
| 0.01247
|
import asyncio
import colorsys
import enum
import functools
import psmove
import time
import traceback
import random
SETTINGSFILE = 'joustsettings.yaml'
#Human speeds[slow, mid, fast]
#SLOW_WARNING = [0.1, 0.15, 0.28]
#SLOW_MAX = [0.25, 0.8, 1]
#FAST_WARNING = [0.5, 0.6, 0.8]
#FAST_MAX = [1, 1.4, 1.8]
SLOW_WARNING = [1.2, 1.3, 1.6, 2.0, 2.5]
SLOW_MAX = [1.3, 1.5, 1.8, 2.5, 3.2]
FAST_WARNING = [1.4, 1.6, 1.9, 2.7, 2.8]
FAST_MAX = [1.6, 1.8, 2.8, 3.2, 3.5]
#WERE_SLOW_WARNING = [0.2, 0.3, 0.4]
#WERE_SLOW_MAX = [0.7, 0.9, 1.1]
#WERE_FAST_WARNING = [0.6, 0.7, 0.9]
#WERE_FAST_MAX = [1.1, 1.5, 2.0]
WERE_SLOW_WARNING = [1.2, 1.4, 1.7, 2.1, 2.9]
WERE_SLOW_MAX = [1.3, 1.6, 1.9, 2.6, 3.9]
WERE_FAST_WARNING = [1.4, 1.7, 2.0, 2.8, 3.5]
WERE_FAST_MAX = [1.6, 1.9, 2.9, 3.3, 4.9]
#ZOMBIE_WARNING = [0.5, 0.6, 0.8]
#ZOMBIE_MAX = [0.8, 1, 1.4]
ZOMBIE_WARNING = [1.2, 1.5, 1.8, 2.6, 2.7]
ZOMBIE_MAX = [1.4, 1.7, 2.7, 3.1, 3.4]
def get_move(serial, move_num):
time.sleep(0.02)
move = psmove.PSMove(move_num)
time.sleep(0.05)
if mo
|
ve.get_serial() != serial:
for move_num in range(psmove.count_connected()):
move = psmove.PS
|
Move(move_num)
if move.get_serial() == serial:
print("returning " +str(move.get_serial()))
return move
return None
else:
return move
def lerp(a, b, p):
return a*(1 - p) + b*p
class Games(enum.Enum):
JoustFFA = (0, 'Joust Free-for-All', 2)
JoustTeams = (1, 'Joust Teams', 3)
JoustRandomTeams = (2, 'Joust Random Teams', 3)
Traitor = (3, 'Traitors', 6)
WereJoust = (4, 'Werewolves', 3)
Zombies = (5, 'Zombies', 4)
Commander = (6, 'Commander', 4)
Swapper = (7, 'Swapper', 3)
FightClub = (8, 'Fight Club', 2)
Tournament = (9, 'Tournament', 3)
NonStop = (10, 'Non Stop Joust', 2)
Ninja = (11, 'Ninja Bomb', 2)
Random = (12, 'Random', 2)
def __new__(cls, value, pretty_name, min_players):
"""This odd constructor lets us keep Foo.value as an integer, but also
add some extra properties to each option."""
obj = object.__new__(cls)
obj._value_ = value
obj.pretty_name = pretty_name
obj.minimum_players = min_players
return obj
def next(self):
"""Return the next game mode after this one in the list. Wraps around after hitting bottom."""
return Games((self.value + 1) % len(Games))
def previous(self):
"""Return the previous game mode after this one in the list. Wraps around after hitting bottom."""
return Games((self.value - 1) % len(Games))
#These buttons are based off of
#The mapping of PS Move controllers
class Button(enum.Flag):
NONE = 0
TRIANGLE = psmove.Btn_TRIANGLE
CIRCLE = psmove.Btn_CIRCLE
CROSS = psmove.Btn_CROSS
SQUARE = psmove.Btn_SQUARE
SELECT = psmove.Btn_SELECT
START = psmove.Btn_START
SYNC = psmove.Btn_PS
MIDDLE = psmove.Btn_MOVE
TRIGGER = psmove.Btn_T
SHAPES = TRIANGLE | CIRCLE | CROSS | SQUARE
UPDATE = SELECT | START
all_shapes = [Button.TRIANGLE, Button.CIRCLE, Button.CROSS, Button.SQUARE]
battery_levels = {
psmove.Batt_MIN: "Low",
psmove.Batt_20Percent: "20%",
psmove.Batt_40Percent: "40%",
psmove.Batt_60Percent: "60%",
psmove.Batt_80Percent: "80%",
psmove.Batt_MAX: "100%",
psmove.Batt_CHARGING: "Charging",
psmove.Batt_CHARGING_DONE: "Charged",
}
# Common colors lifted from https://xkcd.com/color/rgb/
# TODO: Add more colors -- probably need to have 14 player colors at least.
class Color(enum.Enum):
BLACK = 0x000000
WHITE = 0xffffff
RED = 0xff0000
GREEN = 0x00ff00
BLUE = 0x0000ff
YELLOW = 0xffff14
PURPLE = 0x7e1e9c
ORANGE = 0xf97306
PINK = 0xff81c0
TURQUOISE = 0x06c2ac
BROWN = 0x653700
def rgb_bytes(self):
v = self.value
return v >> 16, (v >> 8) & 0xff, v & 0xff
# Red is reserved for warnings/knockouts.
PLAYER_COLORS = [ c for c in Color if c not in (Color.RED, Color.WHITE, Color.BLACK) ]
def async_print_exceptions(f):
"""Wraps a coroutine to print exceptions (other than cancellations)."""
@functools.wraps(f)
async def wrapper(*args, **kwargs):
try:
await f(*args, **kwargs)
except asyncio.CancelledError:
raise
except:
traceback.print_exc()
raise
return wrapper
# Represents a pace the game is played at, encapsulating the tempo of the music as well
# as controller sensitivity.
class GamePace:
__slots__ = ['tempo', 'warn_threshold', 'death_threshold']
def __init__(self, tempo, warn_threshold, death_threshold):
self.tempo = tempo
self.warn_threshold = warn_threshold
self.death_threshold = death_threshold
def __str__(self):
return '<GamePace tempo=%s, warn=%s, death=%s>' % (self.tempo, self.warn_threshold, self.death_threshold)
# TODO: These are placeholder values.
# We can't take the values from joust.py, since those are compared to the sum of the
# three accelerometer dimensions, whereas we compute the magnitude of the acceleration
# vector.
SLOW_PACE = GamePace(tempo=0.4, warn_threshold=2, death_threshold=4)
MEDIUM_PACE = GamePace(tempo=1.0, warn_threshold=3, death_threshold=5)
FAST_PACE = GamePace(tempo=1.5, warn_threshold=5, death_threshold=9)
FREEZE_PACE = GamePace(tempo=0, warn_threshold=1.1, death_threshold=1.2)
REQUIRED_SETTINGS = [
'play_audio',
'move_can_be_admin',
'current_game',
'enforce_minimum',
'sensitivity',
'play_instructions',
'random_modes',
'color_lock',
'color_lock_choices',
'red_on_kill',
'random_teams',
'menu_voice',
'random_team_size',
'force_all_start',
]
|
ressu/SickGear
|
sickbeard/search.py
|
Python
|
gpl-3.0
| 30,487
| 0.004395
|
# Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import os
import re
import threading
import datetime
import traceback
import sickbeard
from common import SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, Quality, SEASON_RESULT, MULTI_EP_RESULT
from sickbeard import logger, db, show_name_helpers, exceptions, helpers
from sickbeard import sab
from sickbeard import nzbget
from sickbeard import clients
from sickbeard import history
from sickbeard import notifiers
from sickbeard import nzbSplitter
from sickbeard import ui
from sickbeard import encodingKludge as ek
from sickbeard import failed_history
from sickbeard.exceptions import ex
from sickbeard.providers.generic import GenericProvider
from sickbeard.blackandwhitelist import BlackAndWhiteList
from sickbeard import common
def _downloadResult(result):
"""
Downloads a result to the appropriate black hole folder.
Returns a bool representing success.
result: SearchResult instance to download.
"""
resProvider = result.provider
if resProvider == None:
logger.log(u"Invalid provider name - this is a coding error, report it please", logger.ERROR)
return False
# nzbs with an URL can just be downloaded from the provider
if result.resultType == "nzb":
newResult = resProvider.downloadResult(result)
# if it's an nzb data result
elif result.resultType == "nzbdata":
# get the final file path to the nzb
fileName = ek.ek(os.path
|
.join, sickbeard.NZB_DIR, result.name + ".nzb")
logger.log(u"Saving NZB to " + fileName)
newResult = True
# save the data to disk
try:
with ek.ek(open, fileName, 'w') as fileOut:
fileOut.write(result.extraInfo[0])
helpers.chmodAsParent(fileName)
except EnvironmentError, e:
logger.log(u"Error trying to save NZB to black hole: " + ex(e), l
|
ogger.ERROR)
newResult = False
elif resProvider.providerType == "torrent":
newResult = resProvider.downloadResult(result)
else:
logger.log(u"Invalid provider type - this is a coding error, report it please", logger.ERROR)
newResult = False
return newResult
def snatchEpisode(result, endStatus=SNATCHED):
"""
Contains the internal logic necessary to actually "snatch" a result that
has been found.
Returns a bool representing success.
result: SearchResult instance to be snatched.
endStatus: the episode status that should be used for the episode object once it's snatched.
"""
if result is None:
return False
result.priority = 0 # -1 = low, 0 = normal, 1 = high
if sickbeard.ALLOW_HIGH_PRIORITY:
# if it aired recently make it high priority
for curEp in result.episodes:
if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7):
result.priority = 1
if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None:
endStatus = SNATCHED_PROPER
# NZBs can be sent straight to SAB or saved to disk
if result.resultType in ("nzb", "nzbdata"):
if sickbeard.NZB_METHOD == "blackhole":
dlResult = _downloadResult(result)
elif sickbeard.NZB_METHOD == "sabnzbd":
dlResult = sab.sendNZB(result)
elif sickbeard.NZB_METHOD == "nzbget":
is_proper = True if endStatus == SNATCHED_PROPER else False
dlResult = nzbget.sendNZB(result, is_proper)
else:
logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR)
dlResult = False
# TORRENTs can be sent to clients or saved to disk
elif result.resultType == "torrent":
# torrents are saved to disk when blackhole mode
if sickbeard.TORRENT_METHOD == "blackhole":
dlResult = _downloadResult(result)
else:
# make sure we have the torrent file content
if not result.content:
if not result.url.startswith('magnet'):
result.content = result.provider.getURL(result.url)
if not result.content:
logger.log(
u"Torrent content failed to download from " + result.url, logger.ERROR
)
# Snatches torrent with client
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
dlResult = client.sendTORRENT(result)
else:
logger.log(u"Unknown result type, unable to download it", logger.ERROR)
dlResult = False
if not dlResult:
return False
if sickbeard.USE_FAILED_DOWNLOADS:
failed_history.logSnatch(result)
ui.notifications.message('Episode snatched', result.name)
history.logSnatch(result)
# don't notify when we re-download an episode
sql_l = []
for curEpObj in result.episodes:
with curEpObj.lock:
if isFirstBestMatch(result):
curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
else:
curEpObj.status = Quality.compositeStatus(endStatus, result.quality)
sql_l.append(curEpObj.get_sql())
if curEpObj.status not in Quality.DOWNLOADED:
notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
if len(sql_l) > 0:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
return True
def filter_release_name(name, filter_words):
"""
Filters out results based on filter_words
name: name to check
filter_words : Words to filter on, separated by comma
Returns: False if the release name is OK, True if it contains one of the filter_words
"""
if filter_words:
filters = [re.compile('.*%s.*' % filter.strip(), re.I) for filter in filter_words.split(',')]
for regfilter in filters:
if regfilter.search(name):
logger.log(u"" + name + " contains pattern: " + regfilter.pattern, logger.DEBUG)
return True
return False
def pickBestResult(results, show, quality_list=None):
logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG)
# build the black And white list
bwl = None
if show:
if show.is_anime:
bwl = BlackAndWhiteList(show.indexerid)
else:
logger.log("Could not create black and white list no show was given", logger.DEBUG)
# find the best result for the current episode
bestResult = None
for cur_result in results:
logger.log("Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality])
if bwl:
if not bwl.is_valid(cur_result):
logger.log(cur_result.name+" does not match the blacklist or the whitelist, rejecting it. Result: " + bwl.get_last_result_msg(), logger.MESSAGE)
continue
if quality_list and cur_result.quality not in quality_list:
logger.log(cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG)
continue
if show.rls_ignore_words and filter_release_name(cur_result.name, show.rls_ignore_words):
logger.log(u"Ignoring " + cur_result.name + " based on ignored words filter: " + show.rls_ignore_words,
logger.MESSAGE)
|
rogerscristo/BotFWD
|
env/lib/python3.6/site-packages/telegram/ext/typehandler.py
|
Python
|
mit
| 4,005
| 0.003745
|
#!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2017
# Leandro Toledo de Souza <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distribut
|
ed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesse
|
r Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains the TypeHandler class."""
from .handler import Handler
class TypeHandler(Handler):
"""Handler class to handle updates of custom types.
Attributes:
type (:obj:`type`): The ``type`` of updates this handler should process.
callback (:obj:`callable`): The callback function for this handler.
strict (:obj:`bool`): Optional. Use ``type`` instead of ``isinstance``.
Default is ``False``
pass_update_queue (:obj:`bool`): Optional. Determines whether ``update_queue`` will be
passed to the callback function.
pass_job_queue (:obj:`bool`): Optional. Determines whether ``job_queue`` will be passed to
the callback function.
Args:
type (:obj:`type`): The ``type`` of updates this handler should process, as
determined by ``isinstance``
callback (:obj:`callable`): A function that takes ``bot, update`` as positional arguments.
It will be called when the :attr:`check_update` has determined that an update should be
processed by this handler.
strict (:obj:`bool`, optional): Use ``type`` instead of ``isinstance``.
Default is ``False``
pass_update_queue (:obj:`bool`, optional): If set to ``True``, a keyword argument called
``update_queue`` will be passed to the callback function. It will be the ``Queue``
instance used by the :class:`telegram.ext.Updater` and :class:`telegram.ext.Dispatcher`
that contains new updates which can be used to insert updates. Default is ``False``.
pass_job_queue (:obj:`bool`, optional): If set to ``True``, a keyword argument called
``job_queue`` will be passed to the callback function. It will be a
:class:`telegram.ext.JobQueue` instance created by the :class:`telegram.ext.Updater`
which can be used to schedule new jobs. Default is ``False``.
"""
def __init__(self, type, callback, strict=False, pass_update_queue=False,
pass_job_queue=False):
super(TypeHandler, self).__init__(
callback, pass_update_queue=pass_update_queue, pass_job_queue=pass_job_queue)
self.type = type
self.strict = strict
def check_update(self, update):
"""Determines whether an update should be passed to this handlers :attr:`callback`.
Args:
update (:class:`telegram.Update`): Incoming telegram update.
Returns:
:obj:`bool`
"""
if not self.strict:
return isinstance(update, self.type)
else:
return type(update) is self.type
def handle_update(self, update, dispatcher):
"""Send the update to the :attr:`callback`.
Args:
update (:class:`telegram.Update`): Incoming telegram update.
dispatcher (:class:`telegram.ext.Dispatcher`): Dispatcher that originated the Update.
"""
optional_args = self.collect_optional_args(dispatcher)
return self.callback(dispatcher.bot, update, **optional_args)
|
noah/riddim
|
lib/data.py
|
Python
|
mit
| 674
| 0.014837
|
# -*- coding: utf-8 -*-
import sys
import time
from config import Config
from multiprocessing import managers, connection
def _new_init_timeout():
return time.time() + 0.2
sys.modules['multiprocessing'].__dict__['managers'].__dict__['connection']._init_timeout = _new_init_timeout
from multiprocessing.managers import BaseManager
class DataManager(BaseManager): pass
def set_data(port, k, v):
# create a shar
|
ed Data object
DataManager.register('get_data')
manager = DataManager(address=(Config.hostname, port + 1),
authkey=Config.authkey)
manager.connect()
data = manager.get_d
|
ata()
data[k] = v
|
Otterpaw/Python-Roguelike
|
floor_object.py
|
Python
|
mit
| 920
| 0.029348
|
class Floor_Object(object):
"""docstring for Floor_Object"""
def __init__(self, coordinates, icon, name, interactions, description):
super(Floor_Object, self).__init__()
self.coordinates = coordinates
self.icon = icon
self.name = name
self.interactions = interactions
self.description = description
class Chest(Floor_O
|
bject):
"""A container holding items"""
def __init__(self, coordinates, icon, name, interactions, description, item_list, is_locked, key_name):
super(Chest, self).__init__(coordinates, icon, name, interactions, description)
self.item_list = item_list
self.is_locked = is_locked
self.key_name =
|
key_name
class Item_Pile(Floor_Object):
"""A list of items present on a tile"""
def __init__(self, coordinates, icon, name, interactions, description, item_list):
super(Item_Pile, self).__init__(coordinates, icon, name, interactions, description)
self.item_list = item_list
|
hethune/tutorials
|
pymongo/openweathermap.py
|
Python
|
mit
| 1,579
| 0.008233
|
import json
import urllib2
import time
import math
from pymongo import MongoClient
from pymongo import ASCENDING, DESCENDING
def debug(info):
print info
def log(info):
print info
def parseJson(url):
try:
data = json.load(urllib2.urlopen(url))
return data
except ValueError as e:
log(e)
exit()
except:
log("Url Error: " + url)
exit()
def openDBCollection(database, collectionName):
client = MongoClient()
db = client[database]
collection = db[collectionName]
# In case we need to make results unique
# col
|
lection.ensure_index([("name", ASCENDING), ("start", ASCENDING)], unique=True, dropDups=True)
return collection
def validateData(raw):
data = [];
f
|
or key in raw:
value = raw[key]
if isinstance(value, basestring) and value.lower() == "error":
log("Failed retrieve latency for " + key)
else:
value["name"] = key
data.append(value)
return data
def write(collection, posts):
for post in posts:
try:
post_id = collection.insert(post)
debug(post_id)
except Exception:
log("Insertion failed for" + post["name"])
return True
def main(url):
# url = "http://stackoverflow.com/questions/1479776/too-many-values-to-unpack-exception"
data = parseJson(url)
posts = validateData(data)
collection = openDBCollection('latency', 'dmos')
write(collection, posts)
url = "http://api.openweathermap.org/data/2.5/weather?q=London,uk"
main(url)
|
frappe/frappe
|
frappe/tests/test_defaults.py
|
Python
|
mit
| 2,355
| 0.022505
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import frappe, unittest
from frappe.defaults import *
class TestDefaults(unittest.TestCase):
def test_global(self):
clear_user_default("key1")
set_global_default("key1", "value1")
self.assertEqual(get_global_default("key1"), "value1")
set_global_default("key1", "value2")
self.assertEqual(get_global_default("key1"), "value2")
add_global_default("key1", "value3")
self.assertEqual(get_global_default("key1"), "value2")
self.assertEqual(get_defaults()["key1"], ["value2", "value3"])
self.assertEqual(get_user_default_as_list("key1"), ["value2", "value3"])
def test_user(self):
set_user_default("key1", "2value1")
self.assertEqual(get_user_default_as_list(
|
"key1"), ["2value1"])
set_user_default("key1", "2value2")
self.assertEqual(get_user_default("key1"), "2value2")
add_user_d
|
efault("key1", "3value3")
self.assertEqual(get_user_default("key1"), "2value2")
self.assertEqual(get_user_default_as_list("key1"), ["2value2", "3value3"])
def test_global_if_not_user(self):
set_global_default("key4", "value4")
self.assertEqual(get_user_default("key4"), "value4")
def test_clear(self):
set_user_default("key5", "value5")
self.assertEqual(get_user_default("key5"), "value5")
clear_user_default("key5")
self.assertEqual(get_user_default("key5"), None)
def test_clear_global(self):
set_global_default("key6", "value6")
self.assertEqual(get_user_default("key6"), "value6")
clear_default("key6", value="value6")
self.assertEqual(get_user_default("key6"), None)
def test_user_permission_on_defaults(self):
self.assertEqual(get_global_default("language"), "en")
self.assertEqual(get_user_default("language"), "en")
self.assertEqual(get_user_default_as_list("language"), ["en"])
old_user = frappe.session.user
user = '[email protected]'
frappe.set_user(user)
perm_doc = frappe.get_doc(dict(
doctype='User Permission',
user=frappe.session.user,
allow="Language",
for_value="en-GB",
)).insert(ignore_permissions = True)
self.assertEqual(get_global_default("language"), None)
self.assertEqual(get_user_default("language"), None)
self.assertEqual(get_user_default_as_list("language"), [])
frappe.delete_doc('User Permission', perm_doc.name)
frappe.set_user(old_user)
|
tannishk/airmozilla
|
airmozilla/base/tests/test_helpers.py
|
Python
|
bsd-3-clause
| 3,664
| 0
|
from nose.tools import eq_
from django.test.client import RequestFactory
from airmozilla.base.tests.testbase import DjangoTestCase
from airmozilla.base.helpers import abs_static, show_duration
class TestAbsStaticHelpers(DjangoTestCase):
def tearDown(self):
super(TestAbsStaticHelpers, self).tearDown()
# This is necessary because funfactory (where we use the static()
# helper function) uses staticfiles_storage which gets lazy loaded
# and remembered once in memory.
# By overriding it like this it means we can change settings
# and have it reflected immediately
from funfactory import helpers
from django.contrib.staticfiles.storage import ConfiguredStorage
helpers.staticfiles_storage = ConfiguredStorage()
# cache.clear()
def test_abs_static(self):
context = {}
context['request'] = RequestFactory().get('/')
result = abs_static(context, 'foo.png')
eq_(result, 'http://testserver/static/foo.png')
def test_abs_static_already(self):
context = {}
context['request'] = RequestFactory().get('/')
result = abs_static(context, '/media/foo.png')
eq_(result, 'http://testserver/media/foo.png')
result = abs_static(context, '//my.cdn.com/media/foo.png')
eq_(result, 'http://my.cdn.com/media/foo.png')
def test_abs_static_with_STATIC_URL(self):
context = {}
context['request'] = RequestFactory().get('/')
with self.settings(STATIC_URL='//my.cdn.com/static/'):
result = abs_st
|
atic(context, 'foo.png')
eq_(result, 'http://my.cdn.com/static/fo
|
o.png')
def test_abs_static_with_already_STATIC_URL(self):
context = {}
context['request'] = RequestFactory().get('/')
with self.settings(STATIC_URL='//my.cdn.com/static/'):
result = abs_static(context, '//my.cdn.com/static/foo.png')
eq_(result, 'http://my.cdn.com/static/foo.png')
def test_abs_static_with_HTTPS_STATIC_URL(self):
context = {}
context['request'] = RequestFactory().get('/')
with self.settings(STATIC_URL='https://my.cdn.com/static/'):
result = abs_static(context, 'foo.png')
eq_(result, 'https://my.cdn.com/static/foo.png')
def test_abs_static_with_STATIC_URL_with_https(self):
context = {}
context['request'] = RequestFactory().get('/')
context['request']._is_secure = lambda: True
assert context['request'].is_secure()
with self.settings(STATIC_URL='//my.cdn.com/static/'):
result = abs_static(context, 'foo.png')
eq_(result, 'https://my.cdn.com/static/foo.png')
class TestDuration(DjangoTestCase):
def test_show_duration_long_format(self):
result = show_duration(60 * 60)
eq_(result, "1 hour")
result = show_duration(60)
eq_(result, "1 minute")
result = show_duration(2 * 60 * 60 + 10 * 60)
eq_(result, "2 hours 10 minutes")
result = show_duration(1 * 60 * 60 + 1 * 60)
eq_(result, "1 hour 1 minute")
result = show_duration(1 * 60 * 60 + 1 * 60 + 1)
eq_(result, "1 hour 1 minute")
result = show_duration(2 * 60 * 60 + 2 * 60)
eq_(result, "2 hours 2 minutes")
result = show_duration(1 * 60 * 60 + 1 * 60 + 1, include_seconds=True)
eq_(result, "1 hour 1 minute 1 second")
result = show_duration(1 * 60 * 60 + 1 * 60 + 2, include_seconds=True)
eq_(result, "1 hour 1 minute 2 seconds")
result = show_duration(49)
eq_(result, "49 seconds")
|
youtube/cobalt
|
cobalt/bindings/path_generator.py
|
Python
|
bsd-3-clause
| 7,751
| 0.004774
|
# Copyright 2017 The Cobalt Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper class for getting names and paths related to interfaces."""
import os
from cobalt.build.path_conversion import ConvertPath
def _NormalizeSlashes(path):
if os.path.sep == '\\':
return path.replace('\\', '/')
else:
return path
class PathBuilder(object):
"""Provides helper functions for getting paths related to an interface."""
def __init__(self, engine_prefix, info_provider, interfaces_root,
generated_root_directory):
self.interfaces_root = _NormalizeSlashes(interfaces_root)
self.generated_root = _NormalizeSlashes(generated_root_directory)
self.engine_prefix = engine_prefix
self.info_provider = info_provider
self.interfaces_info = info_provider.interfaces_info
@property
def generated_conversion_header_path(self):
return os.path.join(self.generated_root,
'%s_gen_type_conversion.h' % self.engine_prefix)
@property
def generated_conversion_include_path(self):
return os.path.relpath(self.generated_conversion_header_path,
self.generated_root)
def NamespaceComponents(self, interface_name):
"""Get the interface's namespace as a list of namespace components."""
# Get the IDL filename relative to the cobalt directory, and split the
# directory to get the list of namespace components.
if interface_name in self.interfaces_info:
interface_info = self.interfaces_info[interface_name]
idl_path = interface_info['full_path']
elif interface_name in self.info_provider.enumerations:
enum_info = self.info_provider.enumerations[interface_name]
idl_path = enum_info['full_path']
else:
raise KeyError('Unknown interface name %s', interface_name)
rel_idl_path = os.path.relpath(idl_path, self.interfaces_root)
components = os.path.dirname(rel_idl_path).split(os.sep)
# Check if this IDL's path lies in our interfaces root. If it does not,
# we treat it as an extension IDL.
real_interfaces_root = os.path.realpath(self.interfaces_root)
real_idl_path = os.path.realpath(os.path.dirname(idl_path))
interfaces_root_is_in_components_path = (os.path.commonprefix(
[real_interfaces_root, real_idl_path]) == real_interfaces_root)
if interfaces_root_is_in_components_path:
return [os.path.basename(self.interfaces_root)] + components
else:
# If our IDL path lies outside of the cobalt/ directory, assume it is
# an externally defined web extension and assign it the 'webapi_extension'
# namespace.
return [os.path.basename(self.interfaces_root), 'webapi_extension']
def Namespace(self, interface_name):
"""Get the interface's namespace."""
return '::'.join(self.NamespaceComponents(interface_name))
def BindingsClass(self, interface_name):
"""Get the name of the generated bindings class."""
return self.engine_prefix.capitalize() + interface_name
def FullBindingsClassName(self, interface_name):
"""Get the fully qualified name of the generated bindings class."""
return '%s::%s' % (self.Namespace(interface_name),
self.BindingsClass(interface_name))
def FullClassName(self, interface_name):
"""Get the fully qualified name of the implementation class."""
components = self.NamespaceComponents(interface_name)
return '::'.join(components + [interface_name])
def ImplementationHeaderPath(self, interface_name):
"""Get an #include path to the interface's implementation .h file."""
interface_info = self.interfaces_info[interface_name]
path = ConvertPath(
interface_info['full_path'], forward_slashes=True, output_extension='h')
return os.path.relpath(path, os.path.dirname(self.interfaces_root))
def BindingsHeaderIncludePath(self, interface_name):
"""Get an #include path to the interface's generated .h file."""
path = self.BindingsHeaderFullPath(interface_name)
return os.path.relpath(path, self.generated_root)
def BindingsHeaderFullPath(self, interface_name):
"""Get the full path to the interface's implementation .h file."""
interface_info = self.interfaces_info[interface_name]
return ConvertPath(
interface_info['full_path'],
forward_slashes=True,
output_directory=self.generated_root,
output_prefix='%s_' % self.engine_prefix,
output_extension='h',
base_directory=os.path.dirname(self.interfaces_root))
def BindingsImplementationPath(self, interface_name):
"""Get the full path to the interface's implementation .cc file."""
interface_info = self.interfaces_info[interface_name]
return ConvertPath(
interface_info['full_path'],
forward_slashes=True,
output_directory=self.generated_root,
output_prefix='%s_' % self.engine_prefix,
output_extension='cc',
base_directory=os.path.dirname(self.interfaces_root))
def DictionaryHeaderIncludePath(self, dictionary_name):
"""Get the #include path to the dictionary's header."""
path = self.DictionaryHeaderFullPath(dictionary_name)
return os.path.relpath(path, self.generated_root)
def DictionaryHeaderFullPath(self, dictionary_name):
"""Get the full path to the dictionary's generated implementation header."""
interface_info = self.interfaces_info[dictionary_name]
return ConvertPath(
interface_info['full_path'],
forward_slashes=True,
output_directory=self.generated_root,
output_extension='h',
base_directory=os.path.dirname(self.interfaces_root))
def DictionaryConversionImplementationPath(self, dictionary_name):
"""Get the full path to the dictionary's conversion header."""
interface_info = self.interfaces_info[dictionary_name]
return ConvertPath(
interface_info['full_path'],
forward_slashes=True,
output_directory=self.generated_root,
output_prefix='%s_' % self.engine_prefix,
output_extension='cc',
base_directory=os.path.dirname(self.interfaces_root))
def EnumHeaderIncludePath(self, enum_name):
"""Get the #include path to the dictionary's header."""
path = self.EnumHeaderFullPath(enum_name)
return os.path.relpath(path, self.generated_root)
def EnumHeaderFullPath(self, enum_name):
"""Get the full path to the dictionary's generated implementation header.""
|
"
in
|
terface_info = self.info_provider.enumerations[enum_name]
return ConvertPath(
interface_info['full_path'],
forward_slashes=True,
output_directory=self.generated_root,
output_extension='h',
base_directory=os.path.dirname(self.interfaces_root))
def EnumConversionImplementationFullPath(self, enum_name):
"""Get the full path to the dictionary's conversion header."""
interface_info = self.info_provider.enumerations[enum_name]
return ConvertPath(
interface_info['full_path'],
forward_slashes=True,
output_directory=self.generated_root,
output_prefix='%s_' % self.engine_prefix,
output_extension='cc',
base_directory=os.path.dirname(self.interfaces_root))
|
NUPT-Pig/python_test
|
tkinter_gui.py
|
Python
|
gpl-2.0
| 1,385
| 0.012274
|
from Tkinter import *
root = Tk()
root.title('first test window')
#root.geometry('300x200')
frm = Frame(root)
f
|
rm_l = Frame(frm)
Label(frm_l, text='left_top').pack(side=TOP)
Label(frm_l, text='left_bottom').pack(side=BOTTOM)
frm_l.pack(side=LEFT)
frm_r = Frame(frm)
Label(frm_r, text='right_top').pack(side=TOP)
Label(frm
|
_r, text='right_bottom').pack(side=BOTTOM)
frm_r.pack(side=RIGHT)
frm.pack(side=TOP)
##########################################################
frm1 = Frame(root)
var = StringVar()
Entry(frm1, textvariable=var).pack(side=TOP)
var.set('entry text')
t = Text(frm1)
t.pack(side=TOP)
def print_entry():
t.insert(END, var.get())
Button(frm1, text='copy', command=print_entry).pack(side=TOP)
frm1.pack(side=TOP)
##########################################################
frm2 = Frame(root)
redbutton = Button(frm2, text="Red", fg="red")
redbutton.pack( side = LEFT)
greenbutton = Button(frm2, text="Brown", fg="brown")
greenbutton.pack( side = LEFT )
bluebutton = Button(frm2, text="Blue", fg="blue")
bluebutton.pack( side = LEFT )
blackbutton = Button(frm2, text="Black", fg="black")
blackbutton.pack( side = BOTTOM)
frm2.pack(side=TOP)
######################################################
frm3 = Frame(root)
b = Button(frm3, text='move')
b.place(bordermode=OUTSIDE, height=100, width=100, x=50, y=50)
b.pack()
frm3.pack(side=TOP)
root.mainloop()
|
anusornc/vitess
|
test/queryservice_tests/stream_tests.py
|
Python
|
bsd-3-clause
| 6,435
| 0.014452
|
import json
import threading
import time
import traceback
import urllib
from vtdb import cursor
from vtdb import dbexceptions
import environment
import framework
class TestStream(framework.TestCase):
def tearDown(self):
self.env.conn.begin()
self.env.execute("delete from vtocc_big")
self.env.conn.commit()
# UNION queries like this used to crash vtocc, only straight SELECT
# would go through. This is a unit test to show it is fixed.
def test_union(self):
cu = self.env.execute("select 1 from dual union select 1 from dual",
cursorclass=cursor.StreamCursor)
count = 0
while True:
row = cu.fetchone()
if row is None:
break
count += 1
self.assertEqual(count, 1)
def test_customrules(self):
bv = {'asdfg': 1}
try:
self.env.execute("select * from vtocc_test where intval=:asdfg", bv,
cursorclass=cursor.StreamCursor)
self.fail("Bindvar asdfg should not be allowed by custom rule")
except dbexceptions.DatabaseError as e:
self.assertContains(str(e), "error: Query disallowed")
# Test dynamic custom rule for vttablet
if self.env.env == "vttablet":
if environment.topo_server().flavor() == 'zookeeper':
# Make a change to the rule
self.env.change_customrules()
time.sleep(3)
try:
self.env.execute("select * from vtocc_test where intval=:asdfg", bv,
cursorclass=cursor.StreamCursor)
except dbexceptions.DatabaseError as e:
self.fail("Bindvar asdfg should be allowed after a change of custom rule, Err=" + str(e))
self.env.restore_customrules()
time.sleep(3)
try:
self.env.execute("select * from vtocc_test where intval=:asdfg", bv,
cursorclass=cursor.StreamCursor)
self.fail("Bindvar asdfg should not be allowed by custom rule")
except dbexceptions.DatabaseError as e:
self.assertContains(str(e), "error: Query disallowed")
def test_basic_stream(self):
self._populate_vtocc_big_table(100)
loop_count = 1
# select lots of data using a non-streaming query
if True:
for i in xrange(loop_count):
cu = self.env.execute("select * from vtocc_big b1, vtocc_big b2")
rows = cu.fetchall()
self.assertEqual(len(rows), 10000)
self.check_row_10(rows[10])
# select lots of data using a streaming query
if True:
for i in xrange(loop_count):
cu = cursor.StreamCur
|
sor(self.env.conn)
cu.execute("select * from vtocc_big b1, vtocc_big b2", {})
count = 0
while True:
row = cu.fetchone()
if row is None:
break
if count == 10:
self.check_row_10(row)
count += 1
self.assertEqual(count, 10000)
def test_streaming_error(self):
with self.assertRaises(dbexceptions.Databas
|
eError):
cu = self.env.execute("select count(abcd) from vtocc_big b1",
cursorclass=cursor.StreamCursor)
def check_row_10(self, row):
# null the dates so they match
row = list(row)
row[6] = None
row[11] = None
row[20] = None
row[25] = None
self.assertEqual(row, [10L, 'AAAAAAAAAAAAAAAAAA 10', 'BBBBBBBBBBBBBBBBBB 10', 'C', 'DDDDDDDDDDDDDDDDDD 10', 'EEEEEEEEEEEEEEEEEE 10', None, 'FF 10', 'GGGGGGGGGGGGGGGGGG 10', 10L, 10L, None, 10L, 10, 0L, 'AAAAAAAAAAAAAAAAAA 0', 'BBBBBBBBBBBBBBBBBB 0', 'C', 'DDDDDDDDDDDDDDDDDD 0', 'EEEEEEEEEEEEEEEEEE 0', None, 'FF 0', 'GGGGGGGGGGGGGGGGGG 0', 0L, 0L, None, 0L, 0])
def test_streaming_terminate(self):
try:
self._populate_vtocc_big_table(100)
query = 'select * from vtocc_big b1, vtocc_big b2, vtocc_big b3'
cu = cursor.StreamCursor(self.env.conn)
thd = threading.Thread(target=self._stream_exec, args=(cu,query))
thd.start()
tablet_addr = "http://" + self.env.conn.addr
connId = self._get_conn_id(tablet_addr)
self._terminate_query(tablet_addr, connId)
thd.join()
with self.assertRaises(dbexceptions.DatabaseError) as cm:
cu.fetchall()
errMsg1 = "error: the query was killed either because it timed out or was canceled: Lost connectioy to MySQL server during query (errno 2013)"
errMsg2 = "error: Query execution was interrupted (errno 1317)"
self.assertTrue(cm.exception not in (errMsg1, errMsg2), "did not raise interruption error: %s" % str(cm.exception))
cu.close()
except Exception, e:
self.fail("Failed with error %s %s" % (str(e), traceback.print_exc()))
def _populate_vtocc_big_table(self, num_rows):
self.env.conn.begin()
for i in xrange(num_rows):
self.env.execute("insert into vtocc_big values " +
"(" + str(i) + ", " +
"'AAAAAAAAAAAAAAAAAA " + str(i) + "', " +
"'BBBBBBBBBBBBBBBBBB " + str(i) + "', " +
"'C', " +
"'DDDDDDDDDDDDDDDDDD " + str(i) + "', " +
"'EEEEEEEEEEEEEEEEEE " + str(i) + "', " +
"now()," +
"'FF " + str(i) + "', " +
"'GGGGGGGGGGGGGGGGGG " + str(i) + "', " +
str(i) + ", " +
str(i) + ", " +
"now()," +
str(i) + ", " +
str(i%100) + ")")
self.env.conn.commit()
# Initiate a slow stream query
def _stream_exec(self, cu, query):
cu.execute(query, {})
# Get the connection id from status page
def _get_conn_id(self, tablet_addr):
streamqueryz_url = tablet_addr + "/streamqueryz?format=json"
retries = 3
streaming_queries = []
while len(streaming_queries) == 0:
content = urllib.urlopen(streamqueryz_url).read()
streaming_queries = json.loads(content)
retries -= 1
if retries == 0:
self.fail("unable to fetch streaming queries from %s" % streamqueryz_url)
else:
time.sleep(1)
connId = streaming_queries[0]['ConnID']
return connId
# Terminate the query via streamqueryz admin page
def _terminate_query(self, tablet_addr, connId):
terminate_url = tablet_addr + "/streamqueryz/terminate?format=json&connID=" + str(connId)
urllib.urlopen(terminate_url).read()
|
devs1991/test_edx_docmode
|
venv/lib/python2.7/site-packages/django/db/migrations/questioner.py
|
Python
|
agpl-3.0
| 7,694
| 0.002339
|
from __future__ import print_function, unicode_literals
import importlib
import os
import sys
from django.apps import apps
from django.db.models.fields import NOT_PROVIDED
from django.utils import datetime_safe, six, timezone
from django.utils.six.moves import input
from .loader import MIGRATIONS_MODULE_NAME
class MigrationQuestioner(object):
"""
Gives the autodetector responses to questions it might have.
This base class has a built-in noninteractive mode, but the
interactive subclass is what the command-line arguments will use.
"""
def __init__(self, defaults=None, specified_apps=None, dry_run=None):
self.defaults = defaults or {}
self.specified_apps = specified_apps or set()
self.dry_run = dry_run
def ask_initial(self, app_label):
"Should we create an initial migration for the app?"
# If it was specified on the command line, definitely true
if app_label in self.specified_apps:
return True
# Otherwise, we look to see if it has a migrations module
# without any Python files in it, apart from __init__.py.
# Apps from the new app template will have these; the python
# file check will ensure we skip South ones.
try:
app_config = apps.get_app_config(app_label)
except LookupError: # It's a fake app.
return self.defaults.get("ask_initial", False)
migrations_import_path = "%s.%s" % (app_config.name, MIGRATIONS_MODULE_NAME)
try:
migrations_module = importlib.import_module(migrations_import_path)
except ImportError:
return self.defaults.get("ask_initial", False)
else:
if hasattr(migrations_module, "__file__"):
filenames = os.listdir(os.path.dirname(migrations_module.__file__))
elif hasattr(migrations_module, "__path__"):
if len(migrations_module.__path__) > 1:
return False
filenames = os.listdir(list(migrations_module.__path__)[0])
return not any(x.endswith(".py") for x in filenames if x != "__init__.py")
def ask_not_null_addition(self, field_name, model_name):
"Adding a NOT NULL field to a model"
# None means quit
return None
def ask_not_null_alteration(self, field_name, model_name):
"Changing a NULL field to NOT NULL"
# None means quit
return None
def ask_rename(self, model_name, old_name, new_name, field_instance):
"Was this field really renamed?"
return self.defaults.get("ask_rename", False)
def ask_rename_model(self, old_model_state, new_model_state):
"Was this model really renamed?"
return self.defaults.get("ask_rename_model", False)
def ask_merge(self, app_label):
"Do you really
|
want to merge these migrations?"
return self.defaults.get("ask_merge", False)
class InteractiveMigrationQuestioner(MigrationQuestioner):
def _boolean_input(self, question, default=None):
result = input("%
|
s " % question)
if not result and default is not None:
return default
while len(result) < 1 or result[0].lower() not in "yn":
result = input("Please answer yes or no: ")
return result[0].lower() == "y"
def _choice_input(self, question, choices):
print(question)
for i, choice in enumerate(choices):
print(" %s) %s" % (i + 1, choice))
result = input("Select an option: ")
while True:
try:
value = int(result)
if 0 < value <= len(choices):
return value
except ValueError:
pass
result = input("Please select a valid option: ")
def _ask_default(self):
print("Please enter the default value now, as valid Python")
print("The datetime and django.utils.timezone modules are available, so you can do e.g. timezone.now()")
while True:
if six.PY3:
# Six does not correctly abstract over the fact that
# py3 input returns a unicode string, while py2 raw_input
# returns a bytestring.
code = input(">>> ")
else:
code = input(">>> ").decode(sys.stdin.encoding)
if not code:
print("Please enter some code, or 'exit' (with no quotes) to exit.")
elif code == "exit":
sys.exit(1)
else:
try:
return eval(code, {}, {"datetime": datetime_safe, "timezone": timezone})
except (SyntaxError, NameError) as e:
print("Invalid input: %s" % e)
def ask_not_null_addition(self, field_name, model_name):
"Adding a NOT NULL field to a model"
if not self.dry_run:
choice = self._choice_input(
"You are trying to add a non-nullable field '%s' to %s without a default; "
"we can't do that (the database needs something to populate existing rows).\n"
"Please select a fix:" % (field_name, model_name),
[
"Provide a one-off default now (will be set on all existing rows)",
"Quit, and let me add a default in models.py",
]
)
if choice == 2:
sys.exit(3)
else:
return self._ask_default()
return None
def ask_not_null_alteration(self, field_name, model_name):
"Changing a NULL field to NOT NULL"
if not self.dry_run:
choice = self._choice_input(
"You are trying to change the nullable field '%s' on %s to non-nullable "
"without a default; we can't do that (the database needs something to "
"populate existing rows).\n"
"Please select a fix:" % (field_name, model_name),
[
"Provide a one-off default now (will be set on all existing rows)",
("Ignore for now, and let me handle existing rows with NULL myself "
"(e.g. because you added a RunPython or RunSQL operation to handle "
"NULL values in a previous data migration)"),
"Quit, and let me add a default in models.py",
]
)
if choice == 2:
return NOT_PROVIDED
elif choice == 3:
sys.exit(3)
else:
return self._ask_default()
return None
def ask_rename(self, model_name, old_name, new_name, field_instance):
"Was this field really renamed?"
msg = "Did you rename %s.%s to %s.%s (a %s)? [y/N]"
return self._boolean_input(msg % (model_name, old_name, model_name, new_name,
field_instance.__class__.__name__), False)
def ask_rename_model(self, old_model_state, new_model_state):
"Was this model really renamed?"
msg = "Did you rename the %s.%s model to %s? [y/N]"
return self._boolean_input(msg % (old_model_state.app_label, old_model_state.name,
new_model_state.name), False)
def ask_merge(self, app_label):
return self._boolean_input(
"\nMerging will only work if the operations printed above do not conflict\n" +
"with each other (working on different fields or models)\n" +
"Do you want to merge these migration branches? [y/N]",
False,
)
|
antoinecarme/sklearn2sql_heroku
|
tests/classification/BinaryClass_10/ws_BinaryClass_10_DecisionTreeClassifier_db2_code_gen.py
|
Python
|
bsd-3-clause
| 149
| 0.013423
|
from sklearn2sql_h
|
eroku.tests.classification import generic as class_gen
class_gen.test_model("DecisionTreeClassifier
|
" , "BinaryClass_10" , "db2")
|
thumbor/thumbor
|
tests/filters/test_watermark.py
|
Python
|
mit
| 10,644
| 0.000188
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com [email protected]
from preggy import expect
from tornado.testing import gen_test
from tests.fixtures.watermark_fixtures import (
POSITIONS,
RATIOS,
SOURCE_IMAGE_SIZES,
WATERMARK_IMAGE_SIZES,
)
from thumbor.filters import watermark
from thumbor.testing import FilterTestCase
class WatermarkFilterTestCase(FilterTestCase):
@gen_test
async def test_watermark_filter_centered(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,center,center,60)",
)
expected = self.get_fixture("watermarkCenter.jpg")
ssim = self.get_s
|
sim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_centered_x(self):
image = await self.get
|
_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,center,40,20)",
)
expected = self.get_fixture("watermarkCenterX.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_centered_y(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,80,center,50)",
)
expected = self.get_fixture("watermarkCenterY.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_repeated(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,repeat,repeat,70)",
)
expected = self.get_fixture("watermarkRepeat.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_repeated_x(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,repeat,center,70)",
)
expected = self.get_fixture("watermarkRepeatX.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_repeated_y(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,30,repeat,30)",
)
expected = self.get_fixture("watermarkRepeatY.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_detect_extension_simple(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark,30,-50,60)",
)
expected = self.get_fixture("watermarkSimple.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_simple(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,30,-50,60)",
)
expected = self.get_fixture("watermarkSimple.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_calculated(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,4p,-30p,60)",
)
expected = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,32,-160,60)",
)
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_calculated_center(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,4p,center,60)",
)
expected = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,32,center,60)",
)
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_calculated_repeat(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,repeat,30p,60)",
)
expected = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,repeat,160,60)",
)
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_calculated_position(self):
watermark.Filter.pre_compile()
filter_instance = watermark.Filter("http://dummy,0,0,0", self.context)
for length, pos, expected in POSITIONS:
test = {
"length": length,
"pos": pos,
}
expect(
filter_instance.detect_and_get_ratio_position(pos, length)
).to_be_equal_with_additional_info(expected, **test)
@gen_test
async def test_watermark_filter_simple_big(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermarkBig.png,-10,-100,50)",
)
expected = self.get_fixture("watermarkSimpleBig.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_simple_50p_width(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,30,-50,20,50)",
)
expected = self.get_fixture("watermarkResize50pWidth.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_simple_70p_height(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,30,-50,20,none,70)",
)
expected = self.get_fixture("watermarkResize70pHeight.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_simple_60p_80p(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,-30,-200,20,60,80)",
)
expected = self.get_fixture("watermarkResize60p80p.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_calculated_resizing(self):
watermark.Filter.pre_compile()
filter_instance = watermark.Filter("http://dummy,0,0,0", self.context)
for source_image_width, source_image_height in SOURCE_IMAGE_SIZES:
for (
watermark_source_image_width,
watermark_source_image_height,
) in WATERMARK_IMAGE_SIZES:
for w_ratio, h_ratio in RATIOS:
max_width = (
source_image_width * (float(w_ratio) / 100)
if w_ratio
else float("inf")
)
max_height = (
source_image_height * (float(h_ratio) / 100)
if h_ratio
|
Erotemic/ibeis
|
ibeis/gui/id_review_api.py
|
Python
|
apache-2.0
| 32,750
| 0.003908
|
# -*- coding: utf-8 -*-
"""
CommandLine:
python -m ibeis.gui.inspect_gui --test-test_review_widget --show
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from functools import partial
from ibeis.viz import viz_helpers as vh
import guitool_ibeis as gt
import numpy as np
import utool as ut
(print, rrr, profile) = ut.inject2(__name__, '[id_review_api]')
MATCHED_STATUS_TEXT = 'Matched'
REVIEWED_STATUS_TEXT = 'Reviewed'
REVIEW_CFG_DEFAULTS = {
'ranks_top': 5,
'directed': False,
'name_scoring': True,
'filter_reviewed': True,
'filter_photobombs': True,
'filter_true_matches': True,
'show_chips': True,
'filter_duplicate_true_matches': False,
}
@profile
def get_review_edges(cm_list, ibs=None, review_cfg={}):
r"""
Needs to be moved to a better file. Maybe something to do with
identification.
Returns a list of matches that should be inspected
This function is more lightweight than orgres or allres.
Used in id_review_api and interact_qres2
Args:
cm_list (list): list of chip match objects
ranks_top (int): put all ranks less than this number into the graph
directed (bool):
Returns:
tuple: review_edges = (qaid_arr, daid_arr, score_arr, rank_arr)
CommandLine:
python -m ibeis.gui.id_review_api get_review_edges:0
Example0:
>>> # ENABLE_DOCTEST
>>> from ibeis.gui.id_review_api import * # NOQA
>>> import ibeis
>>> ibs = ibeis.opendb('PZ_MTEST')
>>> qreq_ = ibeis.main_helpers.testdata_qreq_()
>>> cm_list = qreq_.execute()
>>> review_cfg = dict(ranks_top=5, directed=True, name_scoring=False,
>>> filter_true_matches=True)
>>> review_edges = get_review_edges(cm_list, ibs=ibs, review_cfg=review_cfg)
>>> print(review_edges)
Example1:
>>> # UNSTABLE_DOCTEST
>>> from ibeis.gui.id_review_api import * # NOQA
>>> import ibeis
>>> cm_list, qreq_ = ibeis.testdata_cmlist('PZ_MTEST', a='default:qsize=5,dsize=20')
>>> review_cfg = dict(ranks_top=5, directed=True, name_scoring=False,
>>> filter_reviewed=False, filter_true_matches=True)
>>> review_edges = get_review_edges(cm_list, review_cfg=review_cfg, ibs=ibs)
>>> print(review_edges)
Example3:
>>> # UNSTABLE_DOCTEST
>>> from ibeis.gui.id_review_api import * # NOQA
>>> import ibeis
>>> cm_list, qreq_ = ibeis.testdata_cmlist('PZ_MTEST', a='default:qsize=1,dsize=100')
>>> review_cfg = dict(ranks_top=1, directed=False, name_scoring=False,
>>> filter_reviewed=False, filter_true_matches=True)
>>> review_edges = get_review_edges(cm_list, review_cfg=review_cfg, ibs=ibs)
>>> print(review_edges)
Example4:
>>> # UNSTABLE_DOCTEST
>>> from ibeis.gui.id_review_api import * # NOQA
>>> import ibeis
>>> cm_list, qreq_ = ibeis.testdata_cmlist('PZ_MTEST', a='default:qsize=10,dsize=10')
>>> ranks_top = 3
>>> review_cfg = dict(ranks_top=3, directed=False, name_scoring=False,
>>> filter_reviewed=False, filter_true_matches=True)
>>> review_edges = get_review_edges(cm_list, review_cfg=review_cfg, ibs=ibs)
>>> print(review_edges)
"""
import vtool_ibeis as vt
from ibeis.algo.hots import chip_match
automatch_kw = REVIEW_CFG_DEFAULTS.copy()
automatch_kw = ut.update_existing(automatch_kw, review_cfg)
print('[resorg] get_review_edges(%s)' % (ut.repr2(automatch_kw)))
print('[resorg] len(cm_list) = %d' % (len(cm_list)))
qaids_stack = []
daids_stack = []
ranks_stack = []
scores_stack = []
# For each QueryResult, Extract inspectable candidate matches
if isinstance(cm_list, dict):
cm_list = list(cm_list.values())
if len(cm_list) == 0:
return ([], [], [], [])
for cm in cm_list:
if isinstance(cm, chip_match.ChipMatch):
daids = cm.get_top_aids(ntop=automatch_kw['ranks_top'])
scores = cm.get_top_scores(ntop=automatch_kw['ranks_top'])
ranks = np.arange(len(daids))
qaids = np.full(daids.shape, cm.qaid, dtype=daids.dtype)
else:
(qaids, daids, scores, ranks) = cm.get_match_tbldata(
ranks_top=automatch_kw['ranks_top'],
name_scoring=automatch_kw['name_scoring'],
ibs=ibs)
qaids_stack.append(qaids)
daids_stack.append(daids)
scores_stack.append(scores)
ranks_stack.append(ranks)
# Stack them into a giant array
qaid_arr = np.hstack(qaids_stack)
daid_arr = np.hstack(daids_stack)
score_arr = np.hstack(scores_stack)
rank_arr = np.hstack(ranks_stack)
# Sort by scores
sortx = score_arr.argsort()[::-1]
qaid_arr = qaid_arr[sortx]
daid_arr = daid_arr[sortx]
score_arr = score_arr[sortx]
rank_arr = rank_arr[sortx]
# IS_REVIEWED DOES NOT WORK
if automatch_kw['filter_reviewed']:
_is_reviewed = ibs.get_annot_pair_is_reviewed(qaid_arr.tolist(),
daid_arr.tolist())
is_unreviewed = ~np.array(_is_reviewed, dtype=np.bool)
qaid_arr = qaid_arr.compress(is_unreviewed)
daid_arr = daid_arr.compress(is_unreviewed)
score_arr = score_arr.compress(is_unreviewed)
rank_arr = rank_arr.compress(is_unreviewed)
# Remove directed edges
if not automatch_kw['directed']:
#nodes = np.unique(directed_edges.flatten())
directed_edges = np.vstack((qaid_arr, daid_arr)).T
#idx1, idx2 = vt.intersect2d_indices(directed_edges, directed_edges[:, ::-1])
unique_rowx = vt.find_best_undirected_edge_indexes(directed_edges,
score_arr)
qaid_arr = qaid_arr.take(unique_rowx)
daid_arr = daid_arr.take(unique_rowx)
score_arr = score_arr.take(unique_rowx)
rank_arr = rank_arr.take(unique_rowx)
# Filter Double Name Matches
if automatch_kw['filter_duplicate_true_matches']:
# filter_dup_namepairs
qnid_arr = ibs.get_annot_nids(qaid_arr)
dnid_arr = ibs.get_annot_nids(daid_arr)
if not automatch_kw['directed']:
directed_name_edges = np.vstack((qnid_arr, dnid_arr)).T
unique_rowx2 = vt.find_best_undirected_edge_indexes(
directed_name_edges, score_arr)
else:
namepair_id_list = np.array(vt.compute_unique_data_ids_(
list(zip(qnid_arr, dnid_arr))))
unique_namepair_ids, namepair_groupxs = vt.group_indices(namepair_id_list)
score_namepair_groups = vt.apply_grouping(score_arr, namepair_groupxs)
unique_rowx2 = np.array(sorted([
groupx[score_group.argmax()]
for groupx, score_group in zip(namepair_groupxs, score_namepair_groups)
]), dtype=np.int32)
qaid_arr = qaid_arr.take(unique_rowx2)
daid_arr = daid_arr.take(unique_rowx2)
score_arr = score_arr.take(unique_rowx2)
rank_arr = rank_arr.take(unique_rowx2)
# Filter all true matches
if automatch_kw['filter_true_matches']:
qnid_arr = ibs.get_annot_nids(qaid_arr)
dnid_arr = ibs.get_annot_nids(daid_arr)
valid_flags = qnid_arr != dnid_arr
qaid_arr = qaid_arr.compress(valid_flags)
daid_arr = daid_arr.compress(valid_flags)
score_arr = score_arr.compress(valid_flags)
rank_arr = rank_arr.compress(valid_flags)
if automatch_kw['filter_photobombs']:
|
unique_aids = ut.unique(ut.flatten([qaid_arr, daid_arr]))
#grouped_aids, unique_nids = ibs.group_annots_by_name(unique_aids)
invalid_nid_map = get_photobomber_map(ibs, qaid_arr)
nid2_aids = ut.group_
|
items(unique_aids, ibs.get_annot_nids(unique_aids))
expanded_aid_map = ut.ddict(set)
for nid1, other_nids in invalid_nid_map.items():
for a
|
breuderink/psychic
|
psychic/plots.py
|
Python
|
bsd-3-clause
| 1,878
| 0.014377
|
import matplotlib.pyplot as plt
import numpy as np
import scalpplot
from scalpplot import plot_scalp
from positions import POS_10
|
_5
from scipy import signal
def plot_timeseries(frames, time=None, offset=None, color='k', linestyle='-'):
frames = np.asarray(frames)
if
|
offset == None:
offset = np.max(np.std(frames, axis=0)) * 3
if time == None:
time = np.arange(frames.shape[0])
plt.plot(time, frames - np.mean(frames, axis=0) +
np.arange(frames.shape[1]) * offset, color=color, ls=linestyle)
def plot_scalpgrid(scalps, sensors, locs=POS_10_5, width=None,
clim=None, cmap=None, titles=None):
'''
Plots a grid with scalpplots. Scalps contains the different scalps in the
rows, sensors contains the names for the columns of scalps, locs is a dict
that maps the sensor-names to locations.
Width determines the width of the grid that contains the plots. Cmap selects
a colormap, for example plt.cm.RdBu_r is very useful for AUC-ROC plots.
Clim is a list containing the minimim and maximum value mapped to a color.
Titles is an optional list with titles for each subplot.
Returns a list with subplots for further manipulation.
'''
scalps = np.asarray(scalps)
assert scalps.ndim == 2
nscalps = scalps.shape[0]
subplots = []
if not width:
width = int(min(8, np.ceil(np.sqrt(nscalps))))
height = int(np.ceil(nscalps/float(width)))
if not clim:
clim = [np.min(scalps), np.max(scalps)]
plt.clf()
for i in range(nscalps):
subplots.append(plt.subplot(height, width, i + 1))
plot_scalp(scalps[i], sensors, locs, clim=clim, cmap=cmap)
if titles:
plt.title(titles[i])
# plot colorbar next to last scalp
bb = plt.gca().get_position()
plt.colorbar(cax=plt.axes([bb.xmax + bb.width/10, bb.ymin, bb.width/10,
bb.height]), ticks=np.linspace(clim[0], clim[1], 5).round(2))
return subplots
|
minesense/VisTrails
|
vistrails/packages/spreadsheet/init.py
|
Python
|
bsd-3-clause
| 8,965
| 0.004016
|
###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: [email protected]
##
## This file is part of VisTrails.
##
## "Re
|
distribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditio
|
ns are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""Spreadsheet Package for VisTrails
"""
from __future__ import division
import copy
import os
from PyQt4 import QtCore, QtGui
import sys
from vistrails.core import debug
from vistrails.core.modules import basic_modules
from vistrails.core.modules.module_registry import get_module_registry
from vistrails.core.modules.utils import create_descriptor_string
from vistrails.core.system import vistrails_root_directory
from vistrails.core.upgradeworkflow import UpgradeWorkflowHandler, \
UpgradePackageRemap, UpgradeModuleRemap
from .spreadsheet_controller import spreadsheetController
from .spreadsheet_registry import spreadsheetRegistry
# This must be here because of VisTrails protocol
basicWidgets = None
def importReturnLast(name):
""" importReturnLast(name: str) -> package
Import a package whose name is specified in name and return right-most
package on the package name
"""
mod = __import__(name)
components = name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def addWidget(packagePath):
""" addWidget(packagePath: str) -> package
Add a new widget type to the spreadsheet registry supplying a
basic set of spreadsheet widgets
"""
try:
registry = get_module_registry()
widget = importReturnLast(packagePath)
if hasattr(widget, 'widgetName'):
widgetName = widget.widgetName()
else:
widgetName = packagePath
widget.registerWidget(registry, basic_modules, basicWidgets)
spreadsheetRegistry.registerPackage(widget, packagePath)
debug.log(' ==> Successfully import <%s>' % widgetName)
except Exception, e:
debug.log(' ==> Ignored package <%s>' % packagePath, e)
widget = None
return widget
def importWidgetModules(basicWidgets):
""" importWidgetModules(basicWidgets: widget) -> None
Find all widget package under ./widgets/* to add to the spreadsheet registry
"""
packageName = __name__.lower().endswith('.init') and \
__name__[:-5] or __name__
widgetDir = os.path.join(
os.path.join(os.path.dirname(vistrails_root_directory()),
*packageName.split('.')),
'widgets')
candidates = os.listdir(widgetDir)
for folder in candidates:
if os.path.isdir(os.path.join(widgetDir, folder)) and folder != '.svn':
addWidget('.'.join([packageName, 'widgets', folder]))
def initialize(*args, **keywords):
""" initialize() -> None
Package-entry to initialize the package
"""
import vistrails.core.application
if not vistrails.core.application.is_running_gui():
raise RuntimeError, "GUI is not running. The Spreadsheet package requires the GUI"
# initialize widgets
debug.log('Loading Spreadsheet widgets...')
global basicWidgets
if basicWidgets==None:
basicWidgets = addWidget('vistrails.packages.spreadsheet.basic_widgets')
importWidgetModules(basicWidgets)
def menu_items():
"""menu_items() -> tuple of (str,function)
It returns a list of pairs containing text for the menu and a
callback function that will be executed when that menu item is selected.
"""
def show_spreadsheet():
spreadsheetWindow = spreadsheetController.findSpreadsheetWindow()
spreadsheetWindow.show()
spreadsheetWindow.activateWindow()
spreadsheetWindow.raise_()
lst = []
lst.append(("Show Spreadsheet", show_spreadsheet))
return tuple(lst)
def finalize():
spreadsheetWindow = spreadsheetController.findSpreadsheetWindow(
show=False, create=False)
if spreadsheetWindow is not None:
### DO NOT ADD BACK spreadsheetWindow.destroy()
### That will crash VisTrails on Mac.
### It is not supposed to be called directly
spreadsheetWindow.cleanup()
spreadsheetWindow.deleteLater()
def upgrade_cell_to_output(module_remap, module_id, pipeline,
old_name, new_module,
end_version, input_port_name,
start_version=None, output_version=None):
"""This function upgrades a *Cell module to a *Output module.
The upgrade only happens if the original module doesn't have any connection
on the cell input ports that can't be translated.
This is to ease the transition to *Output modules, but we don't want (or
need) to break anything; the *Cell modules still exist, so they can stay.
"""
if not isinstance(module_remap, UpgradePackageRemap):
module_remap = UpgradePackageRemap.from_dict(module_remap)
old_module = pipeline.modules[module_id]
old_module_name = create_descriptor_string(old_module.package,
old_module.name,
old_module.namespace,
False)
if old_module_name != old_name:
return module_remap
used_input_ports = set(old_module.connected_input_ports.keys())
for func in old_module.functions:
used_input_ports.add(func.name)
if used_input_ports != set([input_port_name]):
return module_remap
_old_remap = module_remap
module_remap = copy.copy(module_remap)
assert _old_remap.remaps is not module_remap.remaps
remap = UpgradeModuleRemap(start_version, end_version, output_version,
module_name=old_name,
new_module=new_module)
remap.add_remap('dst_port_remap', input_port_name, 'value')
remap.add_remap('function_remap', input_port_name, 'value')
module_remap.add_module_remap(remap)
return module_remap
def handle_module_upgrade_request(controller, module_id, pipeline):
module_remap = {
'CellLocation': [
(None, '0.9.3', None, {
'src_port_remap': {
'self': 'value'},
}),
],
'SheetReference': [
(None, '0.9.3', None, {
'src_port_remap': {
'self': 'value'},
}),
],
'SingleCellSheetReference': [
|
KL-WLCR/incubator-airflow
|
tests/operators/sensors.py
|
Python
|
apache-2.0
| 11,787
| 0.000339
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import sys
import time
import unittest
from datetime import timedelta
from mock import patch
from airflow import DAG, configuration, settings
from airflow.exceptions import (AirflowException,
AirflowSensorTimeout,
AirflowSkipException)
from airflow.models import TaskInstance
from airflow.operators.bash_operator import BashOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.sensors import HttpSensor, BaseSensorOperator, HdfsSensor, ExternalTaskSensor
from airflow.utils.decorators import apply_defaults
from airflow.utils.state import State
from airflow.utils import timezone
from airflow.utils.timezone import datetime
try:
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
mock = None
configuration.load_test_config()
DEFAULT_DATE = datetime(2015, 1, 1)
TEST_DAG_ID = 'unit_test_dag'
class TimeoutTestSensor(BaseSensorOperator):
"""
Sensor that always returns the return_value provided
:param return_value: Set to true to mark the task as SKIPPED on failure
:type return_value: any
"""
@apply_defaults
def __init__(
self,
return_value=False,
*args,
**kwargs):
self.return_value = return_value
super(TimeoutTestSensor, self).__init__(*args, **kwargs)
def poke(self, context):
return self.return_value
def execute(self, context):
started_at = timezone.utcnow()
time_jump = self.params.get('time_jump')
while not self.poke(context):
if time_jump:
started_at -= time_jump
if (timezone.utcnow() - started_at).total_seconds() > self.timeout:
if self.soft_fail:
raise AirflowSkipException('Snap. Time is OUT.')
else:
raise AirflowSensorTimeout('Snap. Time is OUT.')
time.sleep(self.poke_interval)
self.log.info("Success criteria met. Exiting.")
class SensorTimeoutTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE
}
dag = DAG(TEST_DAG_ID, default_args=args)
self.dag = dag
def test_timeout(self):
t = TimeoutTestSensor(
task_id='test_timeout',
execution_timeout=timedelta(days=2),
return_value=False,
poke_interval=5,
params={'time_jump': timedelta(days=2, seconds=1)},
dag=self.dag
)
self.assertRaises(
AirflowSensorTimeout,
t.run,
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
class HttpSensorTests(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE
}
dag = DAG(TEST_DAG_ID, default_args=args)
self.dag = dag
def test_poke_exception(self):
"""
Exception occurs in poke function should not be ignored.
"""
def resp_check(resp):
raise AirflowException('AirflowException raised here!')
task = HttpSensor(
task_id='http_sensor_poke_exception',
http_conn_id='http_default',
endpoint='',
request_params={},
response_check=resp_check,
poke_interval=5)
with self.assertRaisesRegexp(AirflowException, 'AirflowException raised here!'):
task.execute(None)
@patch("airflow.hooks.http_hook.requests.Session.send")
def test_head_method(self, mock_session_send):
def resp_check(resp):
return True
task = HttpSensor(
dag=self.dag,
task_id='http_sensor_head_method',
http_conn_id='http_default',
endpoint='',
request_params={},
method='HEAD',
response_check=resp_check,
timeout=5,
poke_interval=1)
import requests
task.execute(None)
args, kwargs = mock_session_send.call_args
received_request = args[0]
prep_request = requests.Request(
'HEAD',
'https://www.google.com',
{}).prepare()
self.assertEqual(prep_request.url, received_request.url)
self.assertTrue(prep_request.method, received_request.method)
@patch("airflow.hooks.http_hook.requests.Session.send")
def test_logging_head_error_request(
self,
mock_session_send
):
def resp_check(resp):
return True
import requests
response = requests.Response()
response.status_code = 404
response.reason = 'Not Found'
mock_session_send.return_value = response
task = HttpSensor(
dag=self.dag,
task_id='http_sensor_head_method',
http_conn_id='http_default',
endpoint='',
request_params={},
method='HEAD',
response_check=resp_check,
timeout=5,
poke_interval=1
)
with mock.patch.object(task.hook.log, 'error') as mock_errors:
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
self.assertTrue(mock_errors.called)
mock_errors.assert_called_with('HTTP error: %s', 'Not Found')
class HdfsSensorTests(unittest.TestCase):
def setUp(self):
from tests.core import Fa
|
keHDFSHook
self.hook = FakeHDFSHook
def test_legacy_file_exist(self):
"""
Test the legacy behaviour
:return:
"""
# Given
logging.info("Test for existing file with the legacy behaviour")
# When
task = HdfsSens
|
or(task_id='Should_be_file_legacy',
filepath='/datadirectory/datafile',
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
task.execute(None)
# Then
# Nothing happens, nothing is raised exec is ok
def test_legacy_file_exist_but_filesize(self):
"""
Test the legacy behaviour with the filesize
:return:
"""
# Given
logging.info("Test for existing file with the legacy behaviour")
# When
task = HdfsSensor(task_id='Should_be_file_legacy',
filepath='/datadirectory/datafile',
timeout=1,
file_size=20,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
# When
# Then
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
def test_legacy_file_does_not_exists(self):
"""
Test the legacy behaviour
:return:
"""
# Given
logging.info("Test for non existing file with the legacy behaviour")
task = HdfsSensor(task_id='Should_not_be_file_legacy',
filepath='/datadirectory/not_existing_file_or_directory',
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
|
BladeSmithJohn/nixysa
|
nixysa/cpp_utils_unittest.py
|
Python
|
apache-2.0
| 1,879
| 0.002661
|
#!/usr/bin/python2.4
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy o
|
f the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for cpp_uti
|
ls."""
import unittest
import cpp_utils
template = """test1
${#Test}
test2"""
template_reuse = """test1
${#Test}
test2
${#Test}
test3"""
class CppFileWriterUnitTest(unittest.TestCase):
def setUp(self):
self.writer = cpp_utils.CppFileWriter('a.cc', False)
def tearDown(self):
pass
def testSectionTemplate(self):
section = self.writer.CreateSection('test')
section.EmitTemplate(template)
self.assertNotEquals(section.GetSection('Test'), None)
test_section = section.GetSection('Test')
test_section.EmitCode('test3')
lines = section.GetLines()
self.assertTrue(lines[0] == 'test1')
self.assertTrue(lines[1] == 'test3')
self.assertTrue(lines[2] == 'test2')
def testSectionTemplateReuse(self):
section = self.writer.CreateSection('test')
section.EmitTemplate(template_reuse)
self.assertNotEquals(section.GetSection('Test'), None)
test_section = section.GetSection('Test')
test_section.EmitCode('test4')
lines = section.GetLines()
self.assertTrue(lines[0] == 'test1')
self.assertTrue(lines[1] == 'test4')
self.assertTrue(lines[2] == 'test2')
self.assertTrue(lines[3] == 'test4')
self.assertTrue(lines[4] == 'test3')
if __name__ == '__main__':
unittest.main()
|
nop33/indico
|
indico/modules/networks/util.py
|
Python
|
gpl-3.0
| 1,020
| 0
|
# This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your
|
option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
def serialize
|
_ip_network_group(group):
"""Serialize group to JSON-like object"""
return {
'id': group.id,
'name': group.name,
'identifier': 'IPNetworkGroup:{}'.format(group.id),
'_type': 'IPNetworkGroup'
}
|
sxjscience/tvm
|
python/tvm/relay/op/contrib/arm_compute_lib.py
|
Python
|
apache-2.0
| 12,300
| 0.000976
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, unused-argument
"""Arm Compute Library supported operators."""
import tvm
from tvm.relay.expr import const
from tvm.relay import transform
from tvm.relay.build_module import bind_params_by_name
from ...dataflow_pattern import wildcard, is_op, is_constant, is_expr
from .register import register_pattern_table
def is_arm_compute_runtime_enabled():
"""Check if the ACL graph runtime is present.
Returns
-------
ret: bool
True if present, False if not.
"""
check_enabled = tvm.get_global_func("relay.op.is_arm_compute_runtime_enabled", True)
if check_enabled:
return check_enabled()
return False
def partition_for_arm_compute_lib(mod, params=None):
"""Partition the graph greedily offloading supported
operators to Arm Compute Library.
Parameters
----------
mod : Module
The module to run passes on.
params : Optional[Dict[str, NDArray]]
Constant input parameters.
Returns
-------
ret : annotated and partitioned module.
"""
if params:
mod["main"] = bind_params_by_name(mod["main"], params)
seq = tvm.transform.Sequential(
[
transform.InferType(),
transform.MergeComposite(arm_compute_lib_pattern_table()),
transform.AnnotateTarget("arm_compute_lib"),
transform.PartitionGraph(),
]
)
return seq(mod)
@register_pattern_table("arm_compute_lib")
def arm_compute_lib_pattern_table():
"""Get the ACL pattern table."""
def conv_pattern():
"""Create a convolution pattern.
Returns
-------
pattern : dataflow_pattern.AltPattern
Denotes the convolution pattern.
"""
pattern = is_op("nn.pad")(wildcard()) | wildcard()
pattern = is_op("nn.conv2d")(pattern, is_constant())
pattern = pattern.optional(lambda x: is_op("nn.bias_add")(x, is_constant()))
pattern = pattern.optional(is_op("nn.relu"))
return pattern
def qnn_conv_pattern():
"""Create a quantized convolution pattern.
Returns
-------
pattern : dataflow_pattern.AltPattern
Denotes the convolution pattern.
"""
pattern = is_op("nn.pad")(wildcard()) | wildcard()
pattern = is_op("qnn.conv2d")(
pattern, is_constant(), is_constant(), is_constant(), is_constant(), is_constant()
)
pattern = pattern.optional(lambda x: is_op("nn.bias_add")(x, is_constant()))
pattern = pattern.optional(is_op("nn.relu"))
pattern = is_op("qnn.requantize")(
pattern, wildcard(), wildcard(), is_constant(), is_constant()
)
return pattern
def dense_pattern():
"""Create a dense (fully-connected) pattern.
Returns
-------
pattern : dataflow_pattern.AltPattern
Denotes the convolution pattern.
"""
pattern = is_op("nn.dense")(wildcard(), is_constant())
pattern = pattern.optional(lambda x: is_op("nn.bias_add")(x, is_constant()))
return pattern
def qnn_dense_pattern():
"""Create a quantized dense (fully-connected) pattern.
Returns
-------
pattern : dataflow_pattern.AltPattern
Denotes the convolution pattern.
"""
pattern = is_op("qnn.dense")(
wildcard(), is_constant(), is_constant(), is_constant(), is_constant(), is_constant()
)
pattern = pattern.optional(lambda x: is_op("nn.bias_add")(x, is_constant()))
pattern = is_op("qnn.requantize")(
pattern, wildcard(), wildcard(), is_constant(), is_constant()
)
return pattern
def avg_pool2d_pattern():
"""Creates a pattern that matches either quantized
avg_pool2d or quantized global_avg_pool2d.
Returns
-------
pattern : dataflow_pattern.AltPattern
Denotes the convolution pattern.
"""
pattern = is_op("cast")(wildcard())
pattern = is_op("nn.avg_pool2d")(pattern) | is_op("nn.global_avg_pool2d")(pattern)
pattern = is_op("cast")(pattern)
return pattern
def l2_pool2d_pattern():
"""Create an l2 pooling pattern from equivalent relay operators.
Returns
-------
pattern : dataflow_pattern.AltPattern
Denotes the convolution pattern.
"""
pattern = is_op("power")(wildcard(), is_expr(const(2.0)))
pattern = is_op("nn.avg_pool2d")(pattern)
pattern = is_op("sqrt")(pattern)
return pattern
def check_conv(extract):
"""Check conv pattern is supported by ACL."""
call = extract
while call.op.name != "nn.conv2d":
call = call.args[0]
return
|
conv2d(call.attrs, call.args)
def check_qnn_conv(extract):
"""Check qnn conv pattern is supported by ACL."""
if extract.attrs.out_dtype != "uint8":
return False
call = extract
while call.op.name != "qnn.conv2d":
call = call.args[0]
return qnn_conv2d(call.at
|
trs, call.args)
def check_dense(extract):
"""Check conv pattern is supported by ACL."""
call = extract
while call.op.name != "nn.dense":
call = call.args[0]
return dense(call.attrs, call.args)
def check_qnn_dense(extract):
"""Check qnn conv pattern is supported by ACL."""
if extract.attrs.out_dtype != "uint8":
return False
call = extract
while call.op.name != "qnn.dense":
call = call.args[0]
return qnn_dense(call.attrs, call.args)
def check_avg_pool2d(extract):
"""Check average pool2d pattern is supported by ACL."""
if extract.attrs.dtype != "uint8":
return False
pool = extract.args[0]
if pool.args[0].attrs.dtype != "int32":
return False
return avg_pool2d(pool.attrs, pool.args, from_quantized_composite=True)
def check_l2_pool2d(extract):
"""Check l2 pool2d pattern is supported by ACL."""
pool = extract.args[0]
return avg_pool2d(pool.attrs, pool.args)
return [
("arm_compute_lib.conv2d", conv_pattern(), check_conv),
("arm_compute_lib.qnn_conv2d", qnn_conv_pattern(), check_qnn_conv),
("arm_compute_lib.dense", dense_pattern(), check_dense),
("arm_compute_lib.qnn_dense", qnn_dense_pattern(), check_qnn_dense),
("arm_compute_lib.qnn_conv2d", qnn_conv_pattern(), check_qnn_conv),
("arm_compute_lib.avg_pool2d", avg_pool2d_pattern(), check_avg_pool2d),
("arm_compute_lib.l2_pool2d", l2_pool2d_pattern(), check_l2_pool2d),
]
def _register_external_op_helper(op_name, supported=True):
@tvm.ir.register_op_attr(op_name, "target.arm_compute_lib")
def _func_wrapper(attrs, args):
return supported
return _func_wrapper
_register_external_op_helper("reshape")
@tvm.ir.register_op_attr("nn.conv2d", "target.arm_compute_lib")
def conv2d(attrs, args):
"""Check if the external ACL codegen for conv2d should be used."""
if attrs.groups != 1:
return False
if attrs.data_layout != "NHWC":
return False
if attrs.out_dtype != "float32" and attrs.out_dtype != "":
return False
data_typ = args[0].
|
joe-jordan/minimalisp
|
setup.py
|
Python
|
mit
| 377
| 0.023873
|
#!/usr/bin/env python
from distu
|
tils.core import setup
setup(name='minimalisp',
version='1.0',
description='An implementation of a small lisp language',
author='Joe Jordan',
author_email='t
|
[email protected]',
url='https://github.com/joe-jordan/minimalisp',
packages=['minimalisp'],
scripts=['scripts/minimalisp'],
include_package_data=True
)
|
unbracketed/snowbird
|
snowbird/analyzer.py
|
Python
|
mit
| 112
| 0.017857
|
def
|
get_related_fields(model):
pass
def get_table_size(model):
pass
def get_row_size(model):
|
pass
|
xuender/test
|
testAdmin/itest/migrations/0006_auto__chg_field_test_content.py
|
Python
|
apache-2.0
| 1,755
| 0.006838
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Test.content'
db.alter_column(u'itest_test', 'content', self.gf('django.db.models.fields.CharField')(max_length=1850, null=True))
def backwards(self, orm):
# Changing field 'Test.content'
db.alter_column(u'itest_test', 'content', self.gf('django.db.models.fields.CharField')(max_length=850, null=True))
models = {
'itest.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'word': ('django.db.models.fields.CharField', [], {'max_length'
|
: '35'})
},
'itest.test': {
'Meta': {'object_name': 'Test'},
'content':
|
('django.db.models.fields.CharField', [], {'max_length': '1850', 'null': 'True', 'blank': 'True'}),
'create_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '450', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'tests'", 'symmetrical': 'False', 'to': "orm['itest.Tag']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'})
}
}
complete_apps = ['itest']
|
jopohl/urh
|
src/urh/ui/views/ZoomAndDropableGraphicView.py
|
Python
|
gpl-3.0
| 2,692
| 0.000743
|
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtGui import QDragEnterEvent, QDropEvent
from urh.signalprocessing.IQArray import IQArray
from urh.cythonext import util
from urh.signalprocessing.ProtocolAnalyzer import ProtocolAnalyzer
from urh.signalprocessing.Signal import Signal
from urh.ui.painting.SignalSceneManager import SignalSceneManager
from urh.ui.views.ZoomableGraphicView import ZoomableGraphicView
class ZoomAndDropableGraphicView(ZoomableGraphicView):
signal_loaded = pyqtSignal(ProtocolAnalyzer)
def __init__(self, parent=None):
self.signal_tree_root = None # type: ProtocolTreeItem
self.scene_manager = None
self.signal = None # type: Signal
self.proto_analyzer = None # type: ProtocolAnalyzer
super().__init__(parent)
def dragEnterEvent(self, event: QDragEnterEvent):
event.acceptProposedAction()
def dropEvent(self, event: QDropEvent):
mime_data = event.mimeData()
data_str = str(mime_data.text())
indexes = list(data_str.split("/")[:-1])
signal = None
proto_analyzer = None
for index in indexes:
row, column, parent = map(int, index.split(","))
if parent == -1:
parent = self.signal_tree_root
else:
parent = self.signal_tree_root.child(parent)
node = parent.child(row)
if node.protocol is not None and node.protocol.signal is not None:
signal = node.protocol.signal
proto_analyzer = node.protocol
break
if signal is None:
return
if signal is None:
return
self.signal = signal # type: Signal
self.proto_analyzer = proto_analyzer # type: ProtocolAnalyzer
self.scene_manager = Sig
|
nalSceneManager(signal, self)
self.plot_data(self.signal.real_plot_data)
self.show_full_scene()
self.auto_fit_view()
self.signal_loaded.emit(self.proto_analyzer)
def auto_fit_view(self):
super().auto_fit_view()
plot_min, plot_max = util
|
.minmax(self.signal.real_plot_data)
data_min, data_max = IQArray.min_max_for_dtype(self.signal.real_plot_data.dtype)
self.scale(1, (data_max - data_min) / (plot_max-plot_min))
self.centerOn(self.view_rect().x() + self.view_rect().width() / 2, self.y_center)
def eliminate(self):
# Do _not_ call eliminate() for self.signal and self.proto_analyzer
# as these are references to the original data!
self.signal = None
self.proto_analyzer = None
self.signal_tree_root = None
super().eliminate()
|
How2Compute/SmartHome
|
hub/Models.py
|
Python
|
mit
| 2,593
| 0.008484
|
"""
Database Models Library
"""
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
# Model to store information about devices
class Device(db.Model):
__tablename__ = 'clients'
id = db.Column(db.Integer, primary_key = True)
name = db.Column(db.Text)
api_key = db.Column(db.Text)
active = db.Column(db.Boolean, default=False)
access_level = db.Column(db.Integer)
status = db.Column(db.Integer)
def __init__(self, name, permission_level):
self.name = name
self.access_level = permission_level
self.api_key = generate_api_token()
# Model to store notifications
class Notification(db.Model):
__tablename__ = 'notifications'
|
id = db.Column(db.Integer, primary_key = True)
user_id = db.Column(db.Integer)
category = db.Column(db.Text)
title = db.Column(db.Text)
body = db.Column(db.Text)
callback_url = db.Column(db.Text)
dismissed = db.Column(db.Boolean, default=0)
timestamp = db.Column(db.DateTime)
# NOTE -120 -> all admins (also TODO when implementing GUI)
# NOTE -121 -> all users
def __init__(self, user_id, category, title, body, callback_ur
|
l):
self.user_id = user_id
self.category = category
self.title = title
self.body = body
self.callback_url = callback_url
# Down here to avoid issues with circular dependancies
from helpers import generate_api_token
class Preference(db.Model):
__tablename__ = 'preferences'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
device_id = db.Column(db.Integer, db.ForeignKey('clients.id'))
key = db.Column(db.Text)
value = db.Column(db.Text)
access_required = db.Column(db.Integer)
def __init__(self, user_id, device_id, key, value, access_required):
self.user_id = user_id
self.device_id = device_id
self.key = key
self.value = value
self.access_required = access_required
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key = True)
username = db.Column(db.Text)
password = db.Column(db.Text)
last_login = db.Column(db.DateTime)
create_date = db.Column(db.DateTime)
access_level = db.Column(db.Integer)
preferences = db.relationship('Preference', backref='user', lazy='joined')
def __init__(self, username, password, access_level):
self.username = username
self.password = password
self.access_level = access_level
|
prculley/gramps
|
gramps/gui/editors/__init__.py
|
Python
|
gpl-2.0
| 4,028
| 0.001986
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2006 Donald N. Allingham
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# gui/editors/__init__.py
from .editaddress import EditAddress
from .editattribute import EditAttribute, EditSrcAttribute
from .editchildref import EditChildRef
from .editcitation import EditCitation, DeleteCitationQuery
from .editdate import EditDate
from .editevent import EditEvent, DeleteEventQuery
from .editeventref import EditEventRef
from .editfamily import EditFamily
from .editldsord import EditLdsOrd, EditFamilyLdsOrd
from .editlocation import EditLocation
from .editmedia import EditMedia, DeleteMediaQuery
from .editmediaref import EditMediaRef
from .editname import EditName
from .editnote import EditNote, DeleteNoteQuery
from .editperson import EditPerson
from .editpersonref import EditPersonRef
from .editplace import EditPlace, DeletePlaceQuery
from .editplacename import EditPlaceName
from .editplaceref import EditPlaceRef
from .editrepository import EditRepository, DeleteRepositoryQuery
from .editreporef import EditRepoRef
from .editsource import EditSource, DeleteSrcQuery
from .edittaglist import EditTagList
from .editurl import EditUrl
from .editlink import EditLink
from .filtereditor import FilterEditor, EditFilter
from gramps.gen.lib import (Person, Family, Event, Place, Repository, Source,
Citation, Media, Note)
# Map from gramps.gen.lib name to Editor:
EDITORS = {
'Person': EditPerson,
'Event': EditEvent,
'Family': EditFamily,
'Media': EditMedia,
'Source': EditSource,
'Citation': EditCitation,
'Place': EditPlace,
'Repository': EditRepository,
'Note': EditNote,
}
CLASSES = {
'Person': Person,
'Event': Event,
'Family': Family,
'Media': Media,
'Source': Source,
'Citation': Citation,
'Place': Place,
'Repository': Repository,
'Note': Note,
}
def EditObject(dbstate, uistate, track, obj_class, prop=None, value=None, callback=None):
"""
Generic Object Editor.
obj_class is Person, Source, Repository, etc.
prop is 'handle', 'gramps_id', or None (for new object)
value is string handle, string gramps_id, or None (for new object)
"""
import logging
LOG = logging.getLogger(".Edit")
if obj_class in EDITORS.keys():
if value is None:
obj = CLASSES[obj_class]
try:
EDITORS[obj_class](dbstate, uistate, track, obj, callback=callback)
except Exception as msg:
LOG.warning(str(msg))
elif prop in ("gramps_id", "handle"):
obj = dbstate.db.method('get_%s_from_%s', obj_class, prop)(value)
|
if obj:
try:
EDITORS[obj_class](dbstate, uistate, track, obj, callback=callback)
except Exception as msg:
LOG.warning(str(msg))
else:
LOG.warning("gramps://%s/%s/%s not found" %
(obj_class, prop, value))
else:
LOG.warning("unknown property to edit '%s'; "
"should be 'gramps_id' or 'handle'" % prop)
else:
LOG.warning("unknown o
|
bject to edit '%s'; "
"should be one of %s" % (obj_class, list(EDITORS.keys())))
|
peragro/peragro-rest
|
manage.py
|
Python
|
bsd-3-clause
| 289
| 0
|
#!/usr/bin/env python
from __future__ import absolute_import
import os
import sys
if __name__ == "__main__":
os.environ.setdefa
|
ult("DJANGO_SETTINGS_MODULE", "service.settings")
fr
|
om django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
brefsdal/sherpa
|
sherpa/models/tests/test_basic.py
|
Python
|
gpl-2.0
| 1,765
| 0.005099
|
#_PYTHON_INSERT_SAO_COPYRIGHT_HERE_(2007)_
#_PYTHON_INSERT_GPL_LICENSE_HERE_
from numpy import arange
import sherpa.models.basic as basic
from sherpa.utils import SherpaFloat, SherpaTestCase
from sherpa.models.model import ArithmeticModel
def userfunc(pars, x, *args, **kwargs):
return x
class test_basic(Sherp
|
aTestCase):
def test_create_and_evaluate(self):
x = arange(1.0, 5.0)
count = 0
for cls in dir(basic):
clsobj = getattr(basic, cls)
if ((not isinstance(clsobj, type)) or
(not issubclass(clsobj, ArithmeticModel)) or
(clsobj is ArithmeticModel)):
continue
# These have very dif
|
ferent interfaces than the others
if cls == 'Integrator1D' or cls == 'Integrate1D':
continue
m = clsobj()
if isinstance(m, basic.TableModel):
m.load(x,x)
if isinstance(m, basic.UserModel):
m.calc = userfunc
self.assertEqual(type(m).__name__.lower(), m.name)
count += 1
try:
if m.name.count('2d'):
pt_out = m(x, x)
int_out = m(x, x, x, x)
else:
if m.name in ('log', 'log10'):
xx = -x
else:
xx = x
pt_out = m(xx)
int_out = m(xx, xx)
except ValueError:
self.fail("evaluation of model '%s' failed" % cls)
for out in (pt_out, int_out):
self.assert_(out.dtype.type is SherpaFloat)
self.assertEqual(out.shape, x.shape)
self.assertEqual(count, 31)
|
phracek/devassistant
|
test/fixtures/files/crt/commands/a.py
|
Python
|
gpl-2.0
| 609
| 0.001642
|
from devassistant.command_runners import CommandRunner
from devassistant.logger import logger
class CR1(CommandRunner):
@classmethod
def matches(cls, c):
return c.comm_type == 'barbarbar'
@classmethod
def run(cls, c):
logger.info('CR1: Doing something ...')
x = c.input_res + 'bar'
return (True, x)
class CR2(CommandR
|
unner):
@classmethod
def matches(cls, c):
return c.comm_type == 'spamspamspam'
@classmethod
def run(cls, c):
logger.info('C
|
R2: Doing something ...')
x = c.input_res + 'spam'
return (True, x)
|
hagberg/nx3k
|
test_edges.py
|
Python
|
bsd-3-clause
| 4,105
| 0.031425
|
#
# TESTS
#
from nose.tools import assert_true, assert_equal, assert_raises
from mixedges import Edges, EdgeKeys, EdgeData, EdgeItems
class BaseEdgeTests(object):
def setup_edges(self):
self.edlist = [{1:"one"}, {1:"two"}, {1:"three"}, {1:"four"}]
ed1, ed2, ed3, ed4 = self.edlist
Ge = self.Ge
Ge.add(0,1,ed1)
Ge.add(0,0,ed2)
Ge.update([(1,0,ed3), (2,3,ed4)])
def test_iter_items(self):
Ge = self.Ge
ed1, ed2, ed3, ed4 = self.edlist
|
if Ge.directed:
ans = [(0,1), (0,0), (1,0), (2,3)]
else:
ans = [(0,1), (0,0), (2,3)]
assert_equal( sorted(Ge), sorted(ans))
if Ge.directed:
ans = [((0,1),ed1), ((0,0),ed2), ((1,0),ed3), ((2,3),ed4)]
else:
|
ans = [((0,1),ed3), ((0,0),ed2), ((2,3),ed4)]
print("succ:",Ge._succ)
print("pred:",Ge._pred)
print("items",list(Ge._items()))
assert_equal( sorted(Ge._items()), sorted(ans))
def test_view_data_keys(self):
Ge = self.Ge
ed1, ed2, ed3, ed4 = self.edlist
if Ge.directed:
ans = [((0,1),ed1), ((0,0),ed2), ((1,0),ed3), ((2,3),ed4)]
else:
ans = [((0,1),ed3), ((0,0),ed2), ((2,3),ed4)]
# iter
assert_equal( sorted(Ge.items()), sorted(ans))
assert_equal( sorted(Ge.data()), sorted(d for k,d in ans))
assert_equal( sorted(Ge.keys()), sorted(k for k,d in ans))
# contains
assert_true( (0,1) in Ge.keys() )
assert_true( (0,3) not in Ge.keys() )
assert_true( (0,8) not in Ge.keys() )
extras = [((0,1),{1:"none"}), ((2,3),ed4), ((0,8),ed3)]
assert_true( ed2 in Ge.data() )
assert_true( extras[0][1] not in Ge.data() )
assert_true( ((0,0),ed2) in Ge.items() )
assert_true( extras[0] not in Ge.items() )
assert_true( extras[1] in Ge.items() )
assert_true( extras[2] not in Ge.items() )
def test_len(self):
Ge = self.Ge
assert_equal(len(Ge), 4 if Ge.directed else 3)
assert_equal(len(Ge.items()), len(Ge))
assert_equal(len(Ge.data()), len(Ge))
assert_equal(len(Ge.keys()), len(Ge))
def test_contains_get(self):
Ge = self.Ge
ed1, ed2, ed3, ed4 = self.edlist
assert_true((0,1) in Ge)
assert_true((1,0) in Ge)
assert_true((2,3) in Ge)
assert_true((0,0) in Ge)
if Ge.directed:
assert_true((3,2) not in Ge)
else:
assert_true((3,2) in Ge)
assert_true((4,5) not in Ge)
assert_true((4,4) not in Ge)
# getitem
assert_true(Ge[(0,1)] == (ed1 if Ge.directed else ed3))
assert_true(Ge[(1,0)] == ed3)
assert_true(Ge[(2,3)] == ed4)
assert_true(Ge[(0,0)] == ed2)
def test_remove_clear(self):
Ge = self.Ge
Ge.remove(0,1)
assert_true((0,1) not in Ge)
if Ge.directed:
assert_true((1,0) in Ge)
else:
assert_true((1,0) not in Ge)
Ge.clear()
assert_equal(len(Ge._node), 5)
assert_equal(len(Ge), 0)
def test_set_ops(self):
Ge = self.Ge
extras = [(1,2), (0,1), (3,4)]
if Ge.directed:
edgs = [(0,1), (0,0), (1,0), (2,3)]
else:
edgs = [(0,1), (0,0), (2,3)]
assert_equal(Ge | extras, set(edgs) | set(extras) )
assert_equal(Ge & extras, set(edgs) & set(extras) )
assert_equal(Ge ^ extras, set(edgs) ^ set(extras) )
assert_equal(Ge - extras, set(edgs) - set(extras) )
assert_equal(extras - Ge, set(extras) - set(edgs) )
class TestDiEdges(BaseEdgeTests):
def setUp(self):
node ={4:{}}
succ = {}
pred = {}
self.Ge = Edges(node, succ, pred, directed=False)
self.setup_edges()
class TestUndiEdges(BaseEdgeTests):
def setUp(self):
node ={4:{}}
succ = {}
pred = {}
self.Ge = Edges(node, succ, pred, directed=False)
self.setup_edges()
self.setup_edges()
|
rapidhere/snake_game
|
snake_game.py
|
Python
|
gpl-3.0
| 1,496
| 0.012032
|
#!/usr/bin/python
# Copyright (C) 2013 rapidhere
#
# Author: rapidhere <[email protected]>
# Maintainer: rapidhere <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import skapp
from optparse import OptionParser
import sys
parser = OptionParser(
usage = "%prog [options]",
description = """A simple snake game.Suggest that resize your terminal window at a property size befor playing!""",
epilog = "[email protected]",
version = "0.1"
)
parser.add_option(
"","--key-help",
action = "store_true",default = False,
help = "show game keys"
)
opts,args = parser.parse_args()
parser.destroy()
if opts.key_help:
print "'w' or 'W' or UP-Arrow up"
print "'a' or
|
'A' or LF-Arrow left"
print "'s' or 'S' or DW-Arrow down"
print "'d' or 'D' or RG-Arrpw right"
print "'q' or 'Q' quit"
s
|
ys.exit(0)
else:
app = skapp.SKApp()
app.run()
|
xianghuzhao/VMDIRAC
|
VMDIRAC/Security/VmProperties.py
|
Python
|
gpl-3.0
| 268
| 0.011194
|
# $HeadURL$
__RCS
|
ID__ = "$Id$"
#
#
VM_WEB_OPERATION = "VmWebOperation"
#
VM_RPC_OPERATION = "VmRpcOperation"
#...............................................................................
#E
|
OF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
|
jonathan-s/happy
|
happy/test/test_lists.py
|
Python
|
apache-2.0
| 11,513
| 0.001042
|
import unittest2
import helper
import simplejson as json
from nose.plugins.attrib import attr
PORTAL_ID = 62515
class ListsClientTest(unittest2.TestCase):
"""
Unit tests for the HubSpot List API Python wrapper (hapipy) client.
This file contains some unittest tests for the List API.
Questions, comments, etc: http://developers.hubspot.com
"""
def setUp(self):
self.client = ListsClient(**helper.get_options())
def tearDown(self):
pass
@attr('api')
def test_get_list(self):
|
# create a list to get
dummy_data = json.dumps(dict(
name='try_and_get_me',
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure it was created
self.asserTrue(len(created_list['lists'
|
]))
# the id number of the list the test is trying to get
id_to_get = created_list['listID']
# try and get it
recieved_lists = self.client.get_list(id_to_get)
# see if the test got the right list
self.assertEqual(recieved_lists['lists'][0]['listId'], created_list['listId'])
print "Got this list: %s" % json.dumps(recieved_list['lists'][0])
# clean up
self.client.delete_list(id_to_get)
@attr('api')
def test_get_batch_lists(self):
# holds the ids of the lists being retrieved
list_ids = []
# make a list to get
dummy_data = json.dumps(dict(
name='first_test_list',
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure it was actually made
self.assertTrue(created_list['listID'])
# put the id of the newly made list in list_ids
list_ids[0] = created_list['listId']
#change the data a little and make another list
dummy_data['name'] = 'second_test_list'
created_list = self.client.create_list(dummy_data)
# make sure itwas actually made
self.assertTrue(created_list['listID'])
# put the id number in list_ids
list_ids[1] = created_list['listId']
# try and get them
batch_lists = self.client.get_batch_lists(list_ids)
# make sure you got as many lists as you were searching for
self.assertEqual(len(list_ids), len(batch_lists['lists']))
# clean up
self.client.delete_list(list_ids[0])
self.client.delete_list(list_ids[1])
@attr('api')
def test_get_lists(self):
# try and get lists
recieved_lists = self.client.get_lists()
# see if the test got at least one
if len(recieved_lists['lists']) == 0:
self.fail("Unable to retrieve any lists")
else:
print "Got these lists %s" % json.dumps(recieved_lists)
@attr('api')
def test_get_static_lists(self):
# create a static list to get
dummy_data = json.dumps(dict(
name='static_test_list',
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure it was actually made
self.assertTrue(created_list['listID'])
# this call will return 20 lists if not given another value
static_lists = self.client.get_static_lists()
if len(static_lists['lists']) == 0:
self.fail("Unable to retrieve any static lists")
else:
print "Found these static lists: %s" % json.dumps(static_lists)
# clean up
self.client.delete_list(created_list['listId'])
@attr('api')
def test_get_dynamic_lists(self):
# make a dynamic list to get
dummy_data = json.dumps(dict(
name='test_dynamic_list',
dynamic=True,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure the dynamic list was made
self.assertTrue(created_list['listId'])
dynamic_lists = self.client.get_dynamic_lists()
if len(dynamic_lists['lists']) == 0:
self.fail("Unable to retrieve any dynamic lists")
else:
print "Found these dynamic lists: %s" % json.dumps(dynamic_lists)
# clean up
self.client.delete_list(created_list['listId'])
@attr('api')
def test_get_list_contacts(self):
# the id number of the list you want the contacts of
# which_list =
# try and get the contacts
contacts = self.client.get_list_contacts(which_list)
# make sure you get at least one
self.assertTrue(len(contacts['contacts'])
print "Got these contacts: %s from this list: %s" % json.dumps(contacts), which_list)
@attr('api')
def test_get_list_contacts_recent(self):
# the id number of the list you want the recent contacts of
which_list =
recent_contacts = self.client.get_list_contacts_recent(which_list)
if len(recent_contacts['lists']) == 0:
self.fail("Did not find any recent contacts")
else:
print "Found these recent contacts: %s" % json.dumps(recent_conacts)
@attr('api')
def test_create_list(self):
# the data for the list the test is making
dummy_data = json.dumps(dict(
list_name='test_list',
dynamic=False,
portalId=PORTAL_ID
))
# try and make the list
created_list = self.client.create_list(dummy_data)
# make sure it was created
if len(created_lists['lists']) == 0:
self.fail("Did not create the list")
else:
print "Created this list: %s" % json.dumps(created_lists)
# clean up
self.client.delete_list(created_lists['lists'][0]['listId'])
@attr('api')
def test_update_list(self):
# make a list to update
dummy_data = json.dumps(dict(
name='delete_me',
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure it was actually made
self.assertTrue(len(created_list['listId']))
# get the id number of the list
update_list_id = created_list['listId']
# this is the data updating the list
update_data = json.dumps(dict(
list_name='really_delete_me',
))
# try and do the update
http_response = self.client.update_list(update_list_id, update_data)
if http_response >= 400:
self.fail("Unable to update list!")
else:
print("Updated a list!")
# clean up
self.client.delete_list(update_list_id)
@attr('api')
def test_add_contacts_to_list_from_emails(self):
# make a list to add contacts to
dummy_data = json.dumps(dict(
name='give_me_contact_emails',
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure it was actually made
self.assertTrue(len(created_list['lists']))
# the id number of the list being added to
which_list = created_list['listId']
# the emails of the contacts being added
emails = json.dumps(dict(
emails
))
# try and add the contacts
self.client.add_contacts_to_list_from_emails(which_list, emails)
@attr('api')
def test_add_contact_to_list(self):
# make a list to add a contact to
dummy_data = json.dumps(dict(
name='add_a_contact',
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure it was actually made
self.assertTrue(created_list['listId'])
# the id number of the list the contact is being added to
which_list = created_list['listId']
# the id number of the contact being added to the list
which_contact =
added = self.client.a
|
s0lst1c3/eaphammer
|
local/hostapd-eaphammer/tests/remote/config.py
|
Python
|
gpl-3.0
| 3,543
| 0.011572
|
# Environment configuration
# Copyright (c) 2016, Tieto Corporation
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
#
# Currently static definition, in the future this could be a config file,
# or even common database with host management.
#
import logging
logger = logging.getLogger()
#
# You can put your settings in cfg.py file with setup_params, devices
# definitions in the format as below. In other case HWSIM cfg will be used.
#
setup_params = {"setup_hw" : "./tests/setup_hw.sh",
"hostapd" : "./tests/hostapd",
"wpa_supplicant" : "./tests/wpa_supplicant",
"iperf" : "iperf",
"wlantest" : "./tests/wlantest",
"wlantest_cli" : "./tests/wlantest_cli",
"country" : "US",
"log_dir" : "/tmp/",
"ipv4_test_net" : "192.168.12.0",
"trace_start" : "./tests/trace_start.sh",
"trace_stop" : "./tests/trace_stop.sh",
"perf_start" : "./tests/perf_start.sh",
"perf_stop" : "./tests/perf_stop.sh"}
#
#devices = [{"hostname": "192.168.254.58", "ifname" : "wlan0", "port": "9877", "name" : "t2-ath9k", "flags" : "AP_HT40 STA_HT40"},
# {"hostname": "192.168.254.58", "ifname" : "wlan1", "port": "9877", "name" : "t2-ath10k", "flags" : "AP_VHT80"},
# {"hostname": "192.168.254.58", "ifname" : "wlan3", "port": "9877", "name" : "t2-intel7260", "flags" : "STA_VHT80"},
# {"hostname": "192.168.254.55", "ifname" : "wlan0, wlan1, wlan2", "port": "", "name" : "t3-monitor"},
# {"hostname": "192.168.254.50", "ifname" : "wlan0", "port": "9877", "name" : "t1-ath9k"},
# {"hostname": "192.168.254.50", "ifname" : "wlan1", "port": "9877", "name" : "t1-ath10k"}]
#
# HWSIM - ifaces available
|
after modprobe mac80211_hwsim
#
devices = [{"hostname": "localhost", "ifname": "wlan0", "port": "9868", "
|
name": "hwsim0", "flags": "AP_VHT80 STA_VHT80"},
{"hostname": "localhost", "ifname": "wlan1", "port": "9878", "name": "hwsim1", "flags": "AP_VHT80 STA_VHT80"},
{"hostname": "localhost", "ifname": "wlan2", "port": "9888", "name": "hwsim2", "flags": "AP_VHT80 STA_VHT80"},
{"hostname": "localhost", "ifname": "wlan3", "port": "9898", "name": "hwsim3", "flags": "AP_VHT80 STA_VHT80"},
{"hostname": "localhost", "ifname": "wlan4", "port": "9908", "name": "hwsim4", "flags": "AP_VHT80 STA_VHT80"}]
def get_setup_params(filename="cfg.py"):
try:
mod = __import__(filename.split(".")[0])
return mod.setup_params
except:
logger.debug("__import__(" + filename + ") failed, using static settings")
pass
return setup_params
def get_devices(filename="cfg.py"):
try:
mod = __import__(filename.split(".")[0])
return mod.devices
except:
logger.debug("__import__(" + filename + ") failed, using static settings")
pass
return devices
def get_device(devices, name=None, flags=None, lock=False):
if name is None and flags is None:
raise Exception("Failed to get device")
for device in devices:
if device['name'] == name:
return device
for device in devices:
try:
device_flags = device['flags']
if device_flags.find(flags) != -1:
return device
except:
pass
raise Exception("Failed to get device " + name)
def put_device(devices, name):
pass
|
jboning/python-dogma
|
test_dogma_extra.py
|
Python
|
agpl-3.0
| 1,477
| 0.00677
|
from unittest import TestCase
import dogma
from test_dogma_values import *
class TestDogmaExtra(TestCase):
def test(self):
ctx = dogma.Context()
slot = ctx.add_module(TYPE_125mmGatlingAutoCannonII)
loc = dogma.Location.module(slot)
affectors = ctx.get_affectors(loc)
ctx.set_ship(TYPE_Rifter)
a
|
ffectors_with_ship = ctx.get_affectors(loc)
self.assertTrue(dogma.type_has_effect(TYPE_125mmGatlingAutoCannonII, dogma.State.ONLINE, EFFECT_HiPower))
self.assertTrue(dogma.type_has_active_effects(TYPE_125mmGatlingAutoCannonII))
self.assertTrue(dogma.type_has_overload_effects(TYPE_125mmGatlingAutoCannonII))
self.asse
|
rtTrue(dogma.type_has_projectable_effects(TYPE_StasisWebifierI))
self.assertEqual(dogma.type_base_attribute(TYPE_Rifter, ATT_LauncherSlotsLeft), 2)
ctx.add_charge(slot, TYPE_BarrageS)
self.assertEqual(ctx.get_number_of_module_cycles_before_reload(slot), 200)
effect = dogma.get_nth_type_effect_with_attributes(TYPE_125mmGatlingAutoCannonII, 0)
(duration, tracking, discharge, att_range, falloff, usagechance,
) = ctx.get_location_effect_attributes(loc, effect)
self.assertEqual(falloff, 7500)
self.assertEqual(att_range, 1200)
self.assertEqual(discharge, 0)
capacitors = ctx.get_capacitor_all(False)
self.assertEqual(len(capacitors), 1)
self.assertIn(ctx, capacitors)
|
mysociety/polipop
|
polipop/popit/models/positions.py
|
Python
|
agpl-3.0
| 7,783
| 0.010022
|
import datetime
from django.contrib.gis.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.core import exceptions
from django.db.models import Q
from django.conf import settings
from django_date_extensions.fields import ApproximateDateField
from markitup.fields import MarkupField
from popit.models import ModelBase, date_help_text, Person, Organisation, DataKey, Data
class PositionCategory(ModelBase):
#category_choices = (
# ('political', 'Political'),
# ('education', 'Education (as a learner)'),
# ('other', 'Anything else'),
#)
category = models.CharField(max_length=100)
class Meta:
ordering = [ 'category' ]
app_label = 'popit'
def __unicode__(self):
return self.category
class PositionType(ModelBase):
name = models.CharField(max_length=100)
slug = models.SlugField()
summary = MarkupField(blank=True, default='')
requires_place = models.BooleanField(default=False, help_text="Does this job type require a place to complete the position?")
organisation = models.ForeignKey(Organisation, null=True, blank=True)
category = models.ForeignKey(PositionCategory, null=True, blank=True, help_text="What sort of position is this?")
class Meta:
ordering = [ "name" ]
app_label = 'popit'
def __unicode__(self):
if self.organisation:
return u'%s (%s)' % (self.name, self.organisation)
return self.name
# @models.permalink
# def get_absolute_url(self):
# return ( 'position', [ self.slug ] )
#
# def organisations(self):
# """
# Return a qs of organisations, with the most frequently related first.
#
# Each organisation is also annotated with 'position_count' which might be
# useful.
#
# This is intended as an alternative to assigning a org to each
# position_title. Instead we can deduce it from the postions.
# """
#
# orgs = (
# Organisation
# .objects
# .filter(position__title=self)
# .annotate( position_count=models.Count('position') )
# .order_by( '-position_count' )
# )
#
# return orgs
class Position(ModelBase):
person = models.ForeignKey(Person)
organisation = models.ForeignKey(Organisation, null=True, blank=True)
type = models.ForeignKey(PositionType, null=True, blank=True)
title = models.CharField(max_length=200, blank=Tru
|
e, default='')
# XXX: Working with South here presumably, umm, tricky
if 'mapit' in settings.INSTALLED_APPS:
place = models.ForeignKey('Place', null=True, blank=True, help_text="use if needed to identify the position - eg add constituency for an 'MP'" )
else:
place = models.CharField(max_length=100, blank=True, help_text="use if needed to identify the position - eg add constituency for an 'MP'")
|
note = models.CharField(max_length=300, blank=True, default='')
start_date = ApproximateDateField(blank=True, help_text=date_help_text)
end_date = ApproximateDateField(blank=True, help_text=date_help_text, default="future")
# Two hidden fields that are only used to do sorting. Filled in by code.
sorting_start_date = models.CharField(editable=True, default='', max_length=10)
sorting_end_date = models.CharField(editable=True, default='', max_length=10)
def __unicode__(self):
if self.organisation:
organisation = self.organisation.name
elif self.type and self.type.organisation:
organisation = self.type.organisation.name
else:
organisation = 'Unknown'
if self.title and self.type:
title = u'%s (%s)' % (self.title, self.type)
elif self.type:
title = self.type
else:
title = self.title or 'Unknown'
if self.place:
place = '(%s)' % self.place
else:
place = ''
out = "%s's position as %s %s at %s (%s-%s)" % ( self.person.name, title, self.place, organisation, self.start_date, self.end_date)
return out
class Meta:
app_label = 'popit'
ordering = ['-sorting_end_date', '-sorting_start_date']
def clean(self):
if not (self.organisation or self.title or self.type):
raise exceptions.ValidationError('Must have at least one of organisation, title or type.')
if self.type and self.type.requires_place and not self.place:
raise exceptions.ValidationError( "The job type '%s' requires a place to be set" % self.type.name )
def display_dates(self):
"""Nice HTML for the display of dates"""
# no dates
if not (self.start_date or self.end_date):
return ''
# start but no end
if self.start_date and not self.end_date:
return "Started %s" % self.start_date
# both dates
if self.start_date and self.end_date:
if self.end_date.future:
return "Started %s" % ( self.start_date )
else:
return "%s → %s" % ( self.start_date, self.end_date )
# end but no start
if not self.start_date and self.end_date:
return 'ongoing'
def display_start_date(self):
"""Return text that represents the start date"""
if self.start_date:
return str(self.start_date)
return '?'
def display_end_date(self):
"""Return text that represents the end date"""
if self.end_date:
return str(self.end_date)
return '?'
def is_ongoing(self):
"""Return True or False for whether the position is currently ongoing"""
if not self.end_date:
return False
elif self.end_date.future:
return True
else:
# turn today's date into an ApproximateDate object and cmp to that
now = datetime.date.today()
now_approx = ApproximateDate(year=now.year, month=now.month, day=now.day )
return now_approx <= self.end_date
def has_known_dates(self):
"""Is there at least one known (not future) date?"""
return (self.start_date and not self.start_date.future) or (self.end_date and not self.end_date.future)
def _set_sorting_dates(self):
"""Set the sorting dates from the actual dates (does not call save())"""
# value can be yyyy-mm-dd, future or None
start = repr( self.start_date ) if self.start_date else ''
end = repr( self.end_date ) if self.end_date else ''
# set the value or default to something sane
sorting_start_date = start or '0000-00-00'
sorting_end_date = end or start or '0000-00-00'
# To make the sorting consistent special case some parts
if not end and start == 'future':
sorting_start_date = 'a-future' # come after 'future'
self.sorting_start_date = sorting_start_date
self.sorting_end_date = sorting_end_date
return True
def save(self, *args, **kwargs):
self._set_sorting_dates()
super(Position, self).save(*args, **kwargs)
class PositionDataKey(DataKey):
class Meta:
app_label = 'popit'
class PositionData(Data):
person = models.ForeignKey(Position, related_name='data')
key = models.ForeignKey(PositionDataKey, related_name='values')
class Meta:
app_label = 'popit'
verbose_name_plural = 'position data'
|
alekseik1/python_mipt_study_1-2
|
1sem/lesson_1/1.py
|
Python
|
gpl-3.0
| 138
| 0.043478
|
a = in
|
t(input())
s = "odd"
s1 = "even"
for i in range(1, a):
if i%2==0 :
print(str(i) + " even")
else:
pri
|
nt(str(i)+" odd")
|
choltz95/story-understanding-amt
|
simpleamt.py
|
Python
|
mit
| 2,932
| 0.011596
|
import argparse, json
import boto3
from boto.mturk.connection import MTurkConnection
from boto.mturk.qualification import *
from jinja2 import Environment, FileSystemLoader
"""
A bunch of free functions that we use in all scripts.
"""
def get_jinja_env(config):
"""
Get a jinja2 Environment object that we can use to find templates.
"""
return Environment(loader=FileSystemLoader(config['template_directories']))
def json_file(filename):
with open(filename, 'r') as f:
return json.load(f)
def get_parent_parser():
"""
Get an argparse parser with arguments that are always needed
"""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--prod', action='store_false', dest='sandbox',
default=True,
help="Whether to run on the production AMT site.")
parser.add_argu
|
ment('--hit_ids_file')
parser.add_argument('--config', default='config.json',
type=json_file)
return parser
def get_mturk_connection_from_args(args):
"""
Utility method to get an MTurkConnection from argparse args.
"""
aws_access_key = args.config.get('aws_access_key')
aws_secret_key = args.config.get('aws_secret_key')
return get_mturk_connection(sandbox=args.sandbox,
aws_access_key=aws_access_key,
aws_se
|
cret_key=aws_secret_key)
def get_mturk_connection(sandbox=True, aws_access_key=None,
aws_secret_key=None):
"""
Get a boto mturk connection. This is a thin wrapper over the
MTurkConnection constructor; the only difference is a boolean
flag to indicate sandbox or not.
"""
kwargs = {}
if aws_access_key is not None:
kwargs['aws_access_key_id'] = aws_access_key
if aws_secret_key is not None:
kwargs['aws_secret_access_key'] = aws_secret_key
if sandbox:
host = 'mechanicalturk.sandbox.amazonaws.com'
else:
host='mechanicalturk.amazonaws.com'
return MTurkConnection(host=host, **kwargs)
def setup_qualifications(hit_properties):
"""
Replace some of the human-readable keys from the raw HIT properties
JSON data structure with boto-specific objects.
"""
qual = Qualifications()
if 'country' in hit_properties:
qual.add(LocaleRequirement('In', hit_properties['country']))
del hit_properties['country']
if 'hits_approved' in hit_properties:
qual.add(NumberHitsApprovedRequirement('GreaterThan',
hit_properties['hits_approved']))
del hit_properties['hits_approved']
if 'percent_approved' in hit_properties:
qual.add(PercentAssignmentsApprovedRequirement('GreaterThan',
hit_properties['percent_approved']))
del hit_properties['percent_approved']
# qual.add(Requirement(qualification_type_id="3TDQPWMDS877YXAXCWP6LHT0FJRANT",comparator='GreaterThan',integer_value=9))
# 3TDQPWMDS877YXAXCWP6LHT0FJRANT
hit_properties['qualifications'] = qual
|
leighpauls/k2cro4
|
third_party/python_26/Lib/idlelib/AutoComplete.py
|
Python
|
bsd-3-clause
| 9,041
| 0.000553
|
"""AutoComplete.py - An IDLE extension for automatically completing names.
This extension can complete either attribute names of file names. It can pop
a window with all available names, for the user to select from.
"""
import os
import sys
import string
from configHandler import idleConf
import AutoCompleteWindow
from HyperParser import HyperParser
import __main__
# This string includes all chars that may be in a file name (without a path
# separator)
FILENAME_CHARS = string.ascii_letters + string.digits + os.curdir + "._~#$:-"
# This string includes all chars that may be in an identifier
ID_CHARS = string.ascii_letters + string.digits + "_"
# These constants represent the two different types of completions
COMPLETE_ATTRIBUTES, COMPLETE_FILES = range(1, 2+1)
SEPS = os.sep
if os.altsep: # e.g. '/' on Windows...
SEPS += os.altsep
class AutoComplete:
menudefs = [
('edit', [
("Show Completions", "<<force-open-completions>>"),
])
]
popupwait = idleConf.GetOption("extensions", "AutoComplete",
"popupwait", type="int", default=0)
def __init__(self, editwin=None):
self.editwin = editwin
if editwin is None: # subprocess and test
return
self.text = editwin.text
self.autocompletewindow = None
# id of delayed call, and the index of the text insert when the delayed
# call was issued. If _delayed_completion_id is None, there is no
# delayed call.
self._delayed_completion_id = None
self._delayed_completion_index = None
def _make_autocomplete_window(self):
return AutoCompleteWindow.AutoCompleteWindow(self.text)
def _remove_autocomplete_window(self, event=None):
if self.autocompletewindow:
self.autocompletewindow.hide_window()
self.autocompletewindow = None
def force_open_completions_event(self, event):
"""Happens when the user really wants to open a completion list, even
if a function call is needed.
"""
self.open_completions(True, False, True)
def try_open_completions_event(self, event):
"""Happens when it would be nice to open a completion list, but not
really neccesary, for example after an dot, so function
calls won't be made.
"""
lastchar = self.text.get("insert-1c")
if lastchar == ".":
self._open_completions_later(False, False, False,
COMPLETE_ATTRIBUTES)
elif lastchar in SEPS:
self._open_completions_later(False, False, False,
COMPLETE_FILES)
def autocomplete_event(self, event):
"""Happens when the user wants to complete his word, and if neccesary,
open a completion list after that (if there is more than one
completion)
"""
if hasattr(event, "mc_state") and event.mc_state:
# A modifier was pressed along with the tab, continue as usual.
return
if self.autocompletewindow and self.autocompletewindow.is_active():
self.autocompletewindow.complete()
return "break"
else:
opened = self.open_completions(False, True, True)
if opened:
return "break"
def _open_completions_later(self, *args):
self._delayed_completion_index = self.text.index("insert")
if self._delayed_completion_id is not None:
self.text.after_cancel(self._delayed_completion_id)
self._delayed_completion_id = \
self.text.after(self.popupwait, self._delayed_open_completions,
*args)
def _delayed_open_completions(self, *args):
self._delayed_completion_id = None
if self.text.index("insert") != self._delayed_completion_index:
return
self.open_completions(*args)
def open_completions(self, evalfuncs, complete, userWantsWin, mode=None):
"""Find the completions and create the AutoCompleteWindow.
Return True if successful (no syntax error or so found).
if complete is True, then if there's nothing to complete and no
start of completion, won't open completions and return False.
If mode is given, will open a completion list only in this mode.
"""
# Cancel another delayed call, if it exists.
if self._delayed_completion_id is not None:
self.text.after_cancel(self._delayed_completion_id)
self._delayed_completion_id = None
hp = HyperParser(self.editwin, "insert")
curline = self.text.get("insert linestart", "insert")
i = j = len(curline)
if hp.is_in_string() and (not mode or mode==COMPLETE_FILES):
self._remove_autocomplete_window()
mode = COMPLETE_FILES
while i and curline[i-1] in FILENAME_CHARS:
i -= 1
comp_start = curline[i:j]
j = i
while i and curline[i-1] in FILENAME_CHARS + SEPS:
i -= 1
comp_what = curline[i:j]
elif hp.is_in_code() and (not mode or mode==COMPLETE_ATTRIBUTES):
self._remove_autocomplete_window()
mode = COMPLETE_ATTRIBUTES
while i and curline[i-1] in ID_CHARS:
i -= 1
comp_start = curline[i:j]
if i and curline[i-1] == '.':
hp.set_index("insert-%dc" % (len(curline)-(i-1)))
comp_what = hp.get_expression()
if not comp_what or \
(not evalfuncs and comp_what.find('(') != -1):
|
return
else:
comp_what = ""
else:
return
if complete and not comp_what and not comp_start:
return
comp_lists = self.fetch_completions(comp_what, mo
|
de)
if not comp_lists[0]:
return
self.autocompletewindow = self._make_autocomplete_window()
self.autocompletewindow.show_window(comp_lists,
"insert-%dc" % len(comp_start),
complete,
mode,
userWantsWin)
return True
def fetch_completions(self, what, mode):
"""Return a pair of lists of completions for something. The first list
is a sublist of the second. Both are sorted.
If there is a Python subprocess, get the comp. list there. Otherwise,
either fetch_completions() is running in the subprocess itself or it
was called in an IDLE EditorWindow before any script had been run.
The subprocess environment is that of the most recently run script. If
two unrelated modules are being edited some calltips in the current
module may be inoperative if the module was not the last to run.
"""
try:
rpcclt = self.editwin.flist.pyshell.interp.rpcclt
except:
rpcclt = None
if rpcclt:
return rpcclt.remotecall("exec", "get_the_completion_list",
(what, mode), {})
else:
if mode == COMPLETE_ATTRIBUTES:
if what == "":
namespace = __main__.__dict__.copy()
namespace.update(__main__.__builtins__.__dict__)
bigl = eval("dir()", namespace)
bigl.sort()
if "__all__" in bigl:
smalll = eval("__all__", namespace)
smalll.sort()
else:
smalll = filter(lambda s: s[:1] != '_', bigl)
else:
try:
entity = self.get_entity(what)
bigl = dir(entity)
bigl.sort()
if "__all__" in bigl:
smalll = entity.__all__
smalll.sort()
else:
|
blckshrk/Weboob
|
weboob/tools/application/qt/qt.py
|
Python
|
agpl-3.0
| 13,100
| 0.001221
|
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import sys
import logging
import re
from threading import Event
from copy import copy
from PyQt4.QtCore import QTimer, SIGNAL, QObject, QString, QSize, QVariant, QMutex, Qt
from PyQt4.QtGui import QMainWindow, QApplication, QStyledItemDelegate, \
QStyleOptionViewItemV4, QTextDocument, QStyle, \
QAbstractTextDocumentLayout, QPalette, QMessageBox, \
QSpinBox, QLineEdit, QComboBox, QCheckBox, QInputDialog
from weboob.core.ouiboube import Weboob, VersionsMismatchError
from weboob.core.scheduler import IScheduler
from weboob.core.repositories import ModuleInstallError
from weboob.tools.config.iconfig import ConfigError
from weboob.tools.browser import BrowserUnavailable, BrowserIncorrectPassword, BrowserForbidden
from weboob.tools.value import ValueInt, ValueBool, ValueBackendPassword
from weboob.tools.misc import to_unicode
from weboob.capabilities import UserError
from ..base import BaseApplication, MoreResultsAvailable
__all__ = ['QtApplication', 'QtMainWindow', 'QtDo', 'HTMLDelegate']
class QtScheduler(IScheduler):
def __init__(self, app):
self.app = app
self.count = 0
self.timers = {}
def schedule(self, interval, function, *args):
timer = QTimer()
timer.setInterval(interval * 1000)
timer.setSingleShot(True)
count = self.count
self.count += 1
timer.start()
self.app.connect(timer, SIGNAL("timeout()"), lambda: self.timeout(count, None, function, *args))
self.timers[count] = timer
def repeat(self, interval, function, *args):
timer = QTimer()
timer.setSingleShot(False)
count = self.count
self.count += 1
timer.start(0)
self.app.connect(timer, SIGNAL("timeout()"), lambda: self.timeout(count, interval, function, *args))
self.timers[count] = timer
def timeout(self, _id, interval, function, *args):
function(*args)
if interval is None:
self.timers.pop(_id)
else:
self.timers[_id].setInterval(interval * 1000)
def want_stop(self):
self.app.quit()
def run(self):
self.app.exec_()
class QCallbacksManager(QObject):
class Request(object):
def __init__(self):
self.event = Event()
self.answer = None
def __call__(self):
raise NotImplementedError()
class LoginRequest(Request):
def __init__(self, backend_name, value):
QCallbacksManager.Request.__init__(self)
self.backend_name = backend_name
self.value = value
def __call__(self):
password, ok = QInputDialog.getText(None,
'%s request' % self.value.label,
'Please enter %s for %s' % (self.value.label,
self.backend_name),
QLineEdit.Password)
return password
def __init__(self, weboob, parent=None):
QObject.__init__(self, parent)
self.weboob = weboob
self.weboob.callbacks['login'] = self.callback(self.LoginRequest)
self.mutex = QMutex()
self.requests = []
self.connect(self, SIGNAL('new_request'), self.do_request)
def callback(self, klass):
def cb(*args, **kwargs):
return self.add_request(klass(*args, **kwargs))
return cb
def do_request(self):
self.mutex.lock()
request = self.requests.pop()
request.answer = request()
request.event.set()
self.mutex.unlock()
def add_request(self, request):
self.mutex.lock()
self.requests.append(request)
self.mutex.unlock()
self.emit(SIGNAL('new_request'))
request.event.wait()
return request.answer
class QtApplication(QApplication, BaseApplication):
def __init__(self):
QApplication.__init__(self, sys.argv)
self.setApplicationName(self.APPNAME)
BaseApplication.__init__(self)
self.cbmanager = QCallbacksManager(self.weboob, self)
def create_weboob(self):
return Weboob(scheduler=QtScheduler(self))
def load_backends(self, *args, **kwargs):
while True:
try:
return BaseApplication.load_backends(self, *args, **kwargs)
except VersionsMismatchError as e:
msg = 'Versions of modules mismatch with version of weboob.'
except ConfigError as e:
msg = unicode(e)
res = QMessageBox.question(None, 'Configuration error', u'%s\n\nDo you want to update repositories?' % msg, QMessageBox.Yes|QMessageBox.No)
if res == QMessageBox.No:
raise e
# Do not import it globally, it causes circular imports
from .backendcfg import ProgressDialog
pd = ProgressDialog('Update of repositories', "Cancel", 0, 100)
pd.setWindowModality(Qt.WindowModal)
try:
self.weboob.update(pd)
except ModuleInstallError as err:
QMessageBox.critical(None, self.tr('Update error'),
unicode(self.tr('Unable to update repositories: %s' % err)),
QMessageBox.Ok)
pd.setValue(100)
QMessageBox.information(None, self.tr('Update of repositories'),
self.tr('Repositories updated!'), QMessageBox.Ok)
class QtMainWindow(QMainWindow):
def __init__(self, parent=None):
QMainWindow.__init__(self, parent)
class QtDo(QObject):
def __init__(self, weboob, cb, eb=None):
QObject.__init__(self)
if not eb:
eb = self.default_eb
self.weboob = weboob
self.process = None
self.cb = cb
self.eb = eb
self.connect(self, SIGNAL('cb'), self.local_cb)
self.connect(self, SIGNAL('eb'), self.local_eb)
def do(self, *args, **kwargs):
self.proces
|
s = self.weboob.do(*args, **kwargs)
self.process.callback_thread(self.thread_c
|
b, self.thread_eb)
def default_eb(self, backend, error, backtrace):
if isinstance(error, MoreResultsAvailable):
# This is not an error, ignore.
return
msg = unicode(error)
if isinstance(error, BrowserIncorrectPassword):
if not msg:
msg = 'Invalid login/password.'
elif isinstance(error, BrowserUnavailable):
if not msg:
msg = 'Website is unavailable.'
elif isinstance(error, BrowserForbidden):
if not msg:
msg = 'This action is forbidden.'
elif isinstance(error, NotImplementedError):
msg = u'This feature is not supported by this backend.\n\n' \
u'To help the maintainer of this backend implement this feature, please contact: %s <%s>' % (backend.MAINTAINER, backend.EMAIL)
elif isinstance(error, UserError):
if not msg:
msg = type(error).__name__
elif logging.root.level == logging.DEBUG:
msg += u'<br />'
ul_opened = False
for line in backtrace.split('\n'):
m = re.match(' File (.*)', line)
if m:
if not ul_opened:
msg +=
|
timj/scons
|
test/QT/installed.py
|
Python
|
mit
| 5,726
| 0.001921
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Look if qt is installed, and try out all builders.
"""
import os
import sys
import TestSCons
test = TestSCons.TestSCons()
if not os.environ.get('QTDIR', None):
x ="External environment variable $QTDIR not set; skipping test(s).\n"
test.skip_test(x)
test.Qt_dummy_installation()
QTDIR=os.environ['QTDIR']
test.write('SConstruct', """\
import os
dummy_env = Environment()
ENV = dummy_env['ENV']
try:
PATH=ARGUMENTS['PATH']
if 'PATH' in ENV:
ENV_PATH = PATH + os.pathsep + ENV['PATH']
else:
Exit(0) # this is certainly a weird system :-)
except KeyError:
ENV_PATH=ENV.get('PATH', '')
env = Environment(tools=['default','qt'],
ENV={'PATH':EN
|
V_PATH,
'PATHEXT':os.environ.get('PATHEXT'),
'HOME':os.getcwd(),
'SystemRoot':ENV.get('SystemRoot')},
# moc / uic want to write stuff in ~/.qt
CXXFILESUFFIX=".cpp")
conf = env.Configure()
if not conf.CheckLib(env.subst("$QT_LIB"), autoadd=0):
conf.env['QT_LIB'] = 'qt-mt'
if not conf.CheckLib(env.subst("$QT_LIB"), autoadd=0):
Exit(0)
env = conf.Finish
|
()
VariantDir('bld', '.')
env.Program('bld/test_realqt', ['bld/mocFromCpp.cpp',
'bld/mocFromH.cpp',
'bld/anUiFile.ui',
'bld/main.cpp'])
""")
test.write('mocFromCpp.h', """\
void mocFromCpp();
""")
test.write('mocFromCpp.cpp', """\
#include <qobject.h>
#include "mocFromCpp.h"
class MyClass1 : public QObject {
Q_OBJECT
public:
MyClass1() : QObject() {};
public slots:
void myslot() {};
};
void mocFromCpp() {
MyClass1 myclass;
}
#include "mocFromCpp.moc"
""")
test.write('mocFromH.h', """\
#include <qobject.h>
class MyClass2 : public QObject {
Q_OBJECT;
public:
MyClass2();
public slots:
void myslot();
};
void mocFromH();
""")
test.write('mocFromH.cpp', """\
#include "mocFromH.h"
MyClass2::MyClass2() : QObject() {}
void MyClass2::myslot() {}
void mocFromH() {
MyClass2 myclass;
}
""")
test.write('anUiFile.ui', """\
<!DOCTYPE UI><UI>
<class>MyWidget</class>
<widget>
<class>QWidget</class>
<property name="name">
<cstring>MyWidget</cstring>
</property>
<property name="caption">
<string>MyWidget</string>
</property>
</widget>
<includes>
<include location="local" impldecl="in implementation">anUiFile.ui.h</include>
</includes>
<slots>
<slot>testSlot()</slot>
</slots>
<layoutdefaults spacing="6" margin="11"/>
</UI>
""")
test.write('anUiFile.ui.h', r"""
#include <stdio.h>
#if QT_VERSION >= 0x030100
void MyWidget::testSlot()
{
printf("Hello World\n");
}
#endif
""")
test.write('main.cpp', r"""
#include <qapp.h>
#include "mocFromCpp.h"
#include "mocFromH.h"
#include "anUiFile.h"
#include <stdio.h>
int main(int argc, char **argv) {
QApplication app(argc, argv);
mocFromCpp();
mocFromH();
MyWidget mywidget;
#if QT_VERSION >= 0x030100
mywidget.testSlot();
#else
printf("Hello World\n");
#endif
return 0;
}
""")
test.run(arguments="bld/test_realqt" + TestSCons._exe)
test.run(program=test.workpath("bld", "test_realqt"),
stdout=None,
status=None,
stderr=None)
if test.stdout() != "Hello World\n" or test.stderr() != '' or test.status:
sys.stdout.write(test.stdout())
sys.stderr.write(test.stderr())
# The test might be run on a system that doesn't have an X server
# running, or may be run by an ID that can't connect to the server.
# If so, then print whatever it showed us (which is in and of itself
# an indication that it built correctly) but don't fail the test.
expect = 'cannot connect to X server'
test.fail_test(test.stdout())
test.fail_test(test.stderr().find(expect) == -1)
if test.status != 1 and (test.status>>8) != 1:
sys.stdout.write('test_realqt returned status %s\n' % test.status)
test.fail_test()
QTDIR = os.environ['QTDIR']
PATH = os.environ['PATH']
os.environ['QTDIR']=''
os.environ['PATH']='.'
test.run(stderr=None, arguments="-c bld/test_realqt" + TestSCons._exe)
expect1 = "scons: warning: Could not detect qt, using empty QTDIR"
expect2 = "scons: warning: Could not detect qt, using moc executable as a hint"
test.fail_test(test.stderr().find(expect1) == -1 and
test.stderr().find(expect2) == -1)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
adriansuhov/lis-test
|
WS2012R2/lisa/Infrastructure/lisa-parser/lisa_parser/file_parser.py
|
Python
|
apache-2.0
| 38,829
| 0.00376
|
"""
Linux on Hyper-V and Azure Test Code, ver. 1.0.0
Copyright (c) Microsoft Corporation
All rights reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
See the Apache Version 2.0 License for specific language governing
permissions and limitations under the License.
"""
from __future__ import print_function
import logging
import re
import os
import sys
import csv
import fileinput
import zipfile
import shutil
import decimal
try:
import xml.etree.cElementTree as ElementTree
except ImportError:
import xml.etree.ElementTree as ElementTree
logger = logging.getLogger(__name__)
class ParseXML(object):
"""Class used to parse a specific xml test suite file
"""
def __init__(self, file_path):
self.tree = ElementTree.ElementTree(file=file_path)
self.root = self.tree.getroot()
def get_tests_suite(self):
return self.root.find('testSuites').getchildren()[0]\
.find('suiteName').text
def get_tests(self):
"""Iterates through the xml file looking for <test> sections
and initializes a dict for every test case returning them in
the end
Dict structure:
{ 'testName' : {} }
"""
tests_dict = dict()
for test in self.root.iter('suiteTest'):
tests_dict[test.text.lower()] = dict()
for test_case in self.root.iter('test'):
# Check if testCase was not commented out
if test_case.find('testName').text.lower() == \
test.text.lower():
logger.debug('Getting test details for - %s', test.text)
tests_dict[test.text.lower()] = \
self.get_test_details(test_case)
return tests_dict
@staticmethod
def get_test_details(test_root):
"""Gets and an XML object and iterates through it
parsing the test details into a dictionary
Dict structure:
{ 'testProperty' : [ value(s) ] }
"""
test_dict = dict()
for test_property in test_root.getchildren():
if test_property.tag == 'testName':
continue
elif not test_property.getchildren() and test_property.text:
test_dict[test_property.tag.lower()] = \
test_property.text.strip().split()
else:
test_dict[test_property.tag.lower()] = list()
for item in test_property.getchildren():
if test_property.tag.lower() == 'testparams':
parameter = item.text.split('=')
test_dict[test_property.tag.lower()].append(
(parameter[0], parameter[1])
)
else:
test_dict[test_property.tag.lower()].append(item.text)
return test_dict
def get_vms(self):
"""Method searches for the 'vm' sections in the XML file
saving a dict for each vm found.
Dict structure:
{
vm_name: { vm_details }
}
"""
vm_dict = dict()
for machine in self.root.iter('vm'):
vm_dict[machine.find('vmName').text.lower()] = {
'hvServer': machine.find('hvServer').text.lower(),
'os': machine.find('os').text.lower()
}
return vm_dict
# TODO(bogdancarpusor): Narrow exception field
@staticmethod
def parse_from_string(xml_string):
"""Static method that parses xml content from a string
The method is used to parse the output of the PS command
that is sent to the vm in order to get more details
It returns a dict with the following structure:
{
vm_property: value
}
"""
try:
logger.debug('Converting XML string from KVP Command')
root = ElementTree.fromstring(xml_string.strip())
prop_name = ''
prop_value = ''
for child in root:
if child.attrib['NAME'] == 'Name':
prop_name = child[0].text
elif child.attrib['NAME'] == 'Data':
prop_value = child[0].text
return prop_name, prop_value
except RuntimeError:
logger.error('Failed to parse XML string,', exc_info=True)
logger.info('Terminating execution')
sys.exit(0)
def parse_ica_log(log_path):
""" Parser for the generated log file after a lisa run - ica.log
The method iterates until the start of the test outcome section. After that
it searches, using regex, for predefined fields and saves them in a
dict structure.
:param log_path:
:return:
"""
logger.debug(
'Iterating through %s file until the test results part', log_path
)
parsed_ica = dict()
parsed_ica['vms'] = dict()
parsed_ica['tests'] = dict()
with open(log_path, 'r') as log_file:
for line in log_file:
if line.strip() == 'Test Results Summary':
break
# Get timestamp
parsed_ica['timestamp'] = re.search('([0-9/]+) ([0-9:]+)',
log_file.next()).group(0)
vm_name = ""
for line in log_file:
line = line.strip().lower()
logger.debug('Parsing line %s', line)
if re.search("^vm:", line) and len(line.split()) == 2:
vm_name = line.split()[1]
parsed_ica['vms'][vm_name] = dict()
# Check if there are any details about the VM
try:
parsed_ica['vms'][vm_name]['TestLocation'] = 'Hyper-V'
except KeyError:
parsed_ica['vms'][vm_name] = dict()
parsed_ica['vms'][vm_name]['TestLocation'] = 'Azure'
elif re.search('^test', line) and \
re.search('(passed$|failed$|aborted$|skipped$)', line):
test = line.split()
try:
parsed_ica['tests'][test[1].lower()] = (vm_name, test[3])
except KeyError:
logging.debug('Test %s was not listed in Test Suites '
'section.It will be ignored from the final'
'results', test)
elif re.search('^os', line):
parsed_ica['vms'][vm_name]['hostOS'] = line.split(':')[1]\
.strip()
elif re.search('^server', line):
parsed_ica['vms'][vm_name]['hvServer'] = line
|
.split(':')[1]\
.strip()
elif re.search('^logs can be found at', line):
parsed_ica['logPath'] = line.split()[-1]
elif re.search('^lis version', line):
parsed_ica['lisVersion'] = line.split(':')[1].strip()
return parsed_ica
def parse_from_csv(csv_path):
"""
Strip a
|
nd read csv file into a dict data type.
:param csv_path: csv file path
:return: <list of dict> e.g. [{'t_col1': 'val1',
't_col2': 'val2',
...
},
...]
None - on error
"""
# python [2.7.10, 3.0) does not support context manager for fileinput
# strip csv of empty spaces or tabs
f_csv = fileinput.input(csv_path, inplace=True)
for line in f_csv:
# redirect std to file write
print(' '.join(line.split()))
f_csv.close()
list_csv_dict = []
with open(csv_path, 'rb') as fl:
t
|
GoogleCloudPlatform/healthcare
|
fhir/immunizations_demo/inference/main.py
|
Python
|
apache-2.0
| 10,155
| 0.008075
|
#!/usr/bin/python3
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Cloud Functions implementation which takes a patient bundle from a FHIR
Store whenever a questionnaire gets answered, runs prediction against a
pre-trained model and writes the results back to the same FHIR Store.
"""
import base64
import datetime
import googleapiclient.discovery
import google.auth
import json
import logging
import os
from google.auth.transport.urllib3 import AuthorizedHttp
from utils import *
# These should be passed in through deployment.
MODEL = os.environ.get('MODEL')
VERSION = os.environ.get('VERSION')
FHIR_STORE_ENDPOINT_PREFIX = 'https://healthcare.googleapis.com/v1beta1'
CREATE_RESOURCE_ACTION = 'CreateResource'
UPDATE_RESOURCE_ACTION = 'UpdateResource'
RISKS = ['negligible', 'low', 'moderate', 'high', 'certain']
LOGGER = logging.getLogger('main')
def get_resource(http, resource_name):
"""Fetches a resource from the FHIR Store.
Args:
resource_name (str): the name of the resource, e.g. 'projects/my-project
/locations/us-central1/datasets/my-dataset/fhirStores/my-store
/fhir/Patient/patient-id'
Returns:
Object: the resource loaded from the FHIR Store.
"""
response = http.request('GET', format_url(resource_name))
if response.status > 299:
LOGGER.critical("Failed to retrieve resource %s, response: %s" % (
resource_name, response.data))
return None
return json.loads(response.data)
def build_risk_assessment(pid, qid, disease, risk, rid=None):
"""Builds a risk assessment JSON object.
Returns:
Str: JSON representation of a RiskAssessment resource.
"""
risk_assessment = {
'resourceType': RISKASSESSMENT_TYPE,
'basis': [{'reference': pid}, {'reference': qid}],
'status': 'final',
'subject': {'reference': pid},
'occurrenceDateTime':
datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
'prediction': [{
'outcome': {
'coding': [{'display': disease}],
},
'qualitativeRisk': {
'coding': [{
'system': "http://hl7.org/fhir/risk-probability",
'code': risk
}]
}
}]
}
if rid is not None:
risk_assessment['id'] = rid
return json.dumps(risk_assessment)
def get_action(data):
"""Reads operation action (e.g. Create or Update) from pubsub message."""
if data['attributes'] is not None:
return data['attributes']['action']
return None
def format_url(path, query=None):
"""Formats request URL with path and query string."""
if query is None:
return "%s/%s" % (FHIR_STORE_ENDPOINT_PREFIX, path)
else:
return "%s/%s?%s" % (FHIR_STORE_ENDPOINT_PREFIX, path, query)
def create_or_update_resource(http, path, payload):
"""Writes a resource to the FHIR Store.
Args:
path (str): path to the endpoint, e.g. 'projects/my-project
/locations/us-central1/datasets/my-dataset/fhirSto
|
res/my-store
/fhir/Patient' for create requests and 'projects/my-project
/locations/us-central1/datasets/my-dataset/fhirStores/my-store
/fhir/Patient/patient-id' for update requests.
payload (str): resource to be written to
|
the FHIR Store.
Returns:
Object: the resource from the server, usually this is an
OperationOutcome resource if there is anything wrong.
"""
# Determine which HTTP method we need to use: POST for create, and PUT for
# update. The path of update requests have one more component than create
# requests.
method = 'POST' if path.count('/') == 9 else 'PUT'
response = http.request(method, format_url(path), body=payload,
headers={'Content-Type': 'application/fhir+json;charset=utf-8'})
if response.status > 299:
LOGGER.error("Failed to create or update resource %s, response: %s" % (
payload, response.data))
return None
return json.loads(response.data)
def search_resource(http, path, query):
"""Searches a resource in the FHIR Store.
Args:
path (str): path to the search endpoint, e.g. 'projects/my-project
/locations/us-central1/datasets/my-dataset/fhirStores/my-store
/fhir/Patient'
query (str): query parameter, e.g. 'age=gt30'
Returns:
List[dict]: a list of resources matching the search criteria.
"""
response = http.request('GET', format_url(path, query=query))
if response.status > 299:
LOGGER.error("Failed to search resource %s, response: %s" % (query,
response.data))
return None
bundle = json.loads(response.data)
return list(map(lambda r: r['resource'], bundle['entry']))
def filter_resource(resources, qid, disease):
"""Finds a RiskAssessment.
The target references a certain QuestionnaireResponse and is about the
specified disease
"""
def match(res):
return extract_qid(res) == qid and extract_disease(res) == disease
return next(filter(match, resources), None)
def build_examples(patient, questionnaire_response):
"""Builds examples to be sent for prediction.
Two examples are created for the two diseases we are targeting at.
"""
def map_example(disease):
return {
'age': calculate_age(patient['birthDate']),
'gender': 1 if patient['gender'] == 'male' else 0,
'country': COUNTRY_MAP[extract_country(questionnaire_response)],
'duration': calculate_duration(
*extract_start_end_date(questionnaire_response)),
'disease': disease
}
return list(map(map_example, range(len(DISEASE_MAP))))
def predict(examples):
"""Sends features to Cloud ML Engine for online prediction.
Args:
examples (list): features to be fed into the model for prediction.
Returns:
Mapping[str: any]: dictionary of prediction results defined by the model.
"""
service = googleapiclient.discovery.build('ml', 'v1', cache_discovery=False)
name = "projects/%s/models/%s/versions/%s" % (
os.environ.get('GCP_PROJECT'), MODEL, VERSION)
response = service.projects().predict(name=name,
body={'instances': examples}).execute()
if 'error' in response:
LOGGER.error("Prediction failed: %s" % response['error'])
return None
return response['predictions']
def main(data, context):
"""Extracts features from a patient bundle for online prediction.
This process is broken down into a few steps:
1. Fetch the QuestionnaireResponse we get triggered on (note that we
only react to this resource type), and extract the patient that
answered it.
2. Fetch everything for the patient from step 1, and extract the
features we are interested in.
3. Send the features to Cloud ML for online prediction, and write the
results back to the FHIR store.
Args:
data (dict): Cloud PubSub payload. The `data` field is what we are
looking for.
context (google.cloud.functions.Context): Metadata for the event.
"""
if 'data' not in data:
LOGGER.info('`data` field is not present, skipping...')
return
resource_name = base64.b64decode(data['data']).decode('utf-8')
if QUESTIONNAIRERESPONSE_TYPE not in resource_name:
LOGGER.info("Skipping resource %s which is irrelevant for prediction." %
resource_name)
return
credentials, _ = google.auth.default()
http = AuthorizedHttp(credentials)
questionnaire_response = get_resource(http, resource_name)
if questionnaire_response is None:
return
patient_id = questionnaire_response['subject']['reference']
project_id, location, dataset_id, fhir_store_id, _ = _parse_resource_name(
resource_name)
patient = get_resource(http, _construct_resource_name(project_id, location,
dataset_id, fhir_store_id, patient_id))
|
eile/ITK
|
Modules/ThirdParty/pygccxml/src/pygccxml/declarations/type_traits.py
|
Python
|
apache-2.0
| 44,926
| 0.02838
|
# Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
"""
defines few algorithms, that deals with different properties of C++ types
Do you aware of boost::type_traits library? pygccxml has functionality similar to
it. Using functions defined in this module you can
- find out some properties of the type
- modify type
Those functions are very valuable for code generation. Almost all functions
within this module works on L{type_t} class hierarchy and\\or L{class_t}.
"""
import os
import types
import matchers
import typedef
import calldef
import cpptypes
import variable
import algorithm
import namespace
import templates
import enumeration
import class_declaration
from pygccxml import utils
import types as build_in_types
def __remove_alias(type_):
"""implementation details"""
if isinstance( type_, typedef.typedef_t ):
return __remove_alias( type_.type )
if isinstance( type_, cpptypes.declarated_t ) and isinstance( type_.declaration, typedef.typedef_t ):
return __remove_alias( type_.declaration.type )
if isinstance( type_, cpptypes.compound_t ):
type_.base = __remove_alias( type_.base )
return type_
return type_
def remove_alias(type_):
"""returns type without typedefs"""
type_ref = None
if isinstance( type_, cpptypes.type_t ):
type_ref = type_
elif isinstance( type_, typedef.typedef_t ):
type_ref = type_.type
else:
pass #not a valid input, just return it
if not type_ref:
return type_
if type_ref.cache.remove_alias:
return type_ref.cache.remove_alias
no_alias = __remove_alias( type_ref.clone() )
type_ref.cache.remove_alias = no_alias
return no_alias
def create_cv_types( base ):
"""implementation details"""
return [ base
, cpptypes.const_t( base )
, cpptypes.volatile_t( base )
, cpptypes.volatile_t( cpptypes.const_t( base ) ) ]
def decompose_type(tp):
"""implementation details"""
#implementation of this function is important
if isinstance( tp, cpptypes.compound_t ):
return [tp] + decompose_type( tp.base )
elif isinstance( tp, typedef.typedef_t ):
return decompose_type( tp.type )
elif isinstance( tp, cpptypes.declarated_t ) and isinstance( tp.declaration, typedef.typedef_t ):
return decompose_type( tp.declaration.type )
else:
return [tp]
def decompose_class(type):
"""implementation details"""
types = decompose_type( type )
return [ tp.__class__ for tp in types ]
def base_type(type):
"""returns base type.
For C{const int} will return C{int}
"""
types = decompose_type( type )
return types[-1]
def does_match_definition(given, main, secondary ):
"""implementation details"""
assert isinstance( secondary, build_in_types.TupleType )
assert 2 == len( secondary ) #general solution could be provided
types = decompose_type( given )
if isinstance( types[0], main ):
return True
elif 2 <= len( types ) and \
( ( isinstance( types[0], main ) and isinstance( types[1], secondary ) ) \
or ( isinstance( types[1], main ) and isinstance( types[0], secondary ) ) ):
return True
elif 3 <= len( types ):
classes = set( [tp.__class__ for tp in types[:3]] )
desired = set( [main] + list( secondary ) )
return classes == desired
else:
return False
def is_bool( type_ ):
"""returns True, if type represents C{bool}, False otherwise"""
return remove_alias( type_ ) in create_cv_types( cpptypes.bool_t() )
def is_void( type ):
"""returns True, if type represents C{void}, False otherwise"""
return remove_alias( type ) in create_cv_types( cpptypes.void_t() )
def is_void_pointer( type ):
"""returns True, if type represents C{void*}, False otherwise"""
return is_same( type, cpptypes.pointer_t( cpptypes.void_t() ) )
def is_integral( type ):
"""returns True, if type represents C++ integral type, False otherwise"""
integral_def = create_cv_types( cpptypes.char_t() ) \
+ create_cv_types( cpptypes.unsigned_char_t() ) \
+ create_cv_types( cpptypes.signed_char_t() ) \
+ create_cv_types( cpptypes.wchar_t() ) \
+ create_cv_types( cpptypes.short_int_t() ) \
+ create_cv_types( cpptypes.short_unsigned_int_t() ) \
+ create_cv_types( cpptypes.bool_t() ) \
+ create_cv_types( cpptypes.int_t() ) \
+ create_cv_types( cpptypes.unsigned_int_t() ) \
+ create_cv_types( cpptypes.long_int_t() ) \
+ create_cv_types( cpptypes.long_unsigned_int_t() ) \
+ create_cv_types( cpptypes.long_long_int_t() ) \
+ create_cv_types( cpptypes.long_long_unsigned_int_t() )
return remove_alias( type ) in integral_def
def is_floating_point( type ):
"""returns True, if type represents C++ floating point type, False otherwise"""
float_def = create_cv_types( cpptypes.float_t() ) \
+ create_cv_types( cpptypes.double_t() ) \
+ create_cv_types( cpptypes.long_double_t() )
return remove_alias( type ) in float_def
def is_arithmetic( type ):
"""returns True, if type represents C++ integral or floating point type, False otherwise"""
return is_integral( type ) or is_floating_point( type )
def is_pointer(type):
"""returns True, if type represents C++ pointer type, False otherwise"""
return does_match_definition( type
, cpptypes.pointer_t
, (cpptypes.const_t, cpptypes.volatile_t) )
def is_calldef_pointer(type):
"""returns True, if t
|
ype represents pointer to free/member function, False otherwise"""
if not is_pointer(type):
return False
nake_type = remove_alias( type )
nake_type = remove_const( nake_type )
nake_type = remove_volatile( nake_type )
return isinstance( nake_type, cpptypes.compound_t ) \
and isinstance( nake_type.base, cpptypes.calldef_type_t )
def remove_pointer(type):
|
"""removes pointer from the type definition
If type is not pointer type, it will be returned as is.
"""
nake_type = remove_alias( type )
if not is_pointer( nake_type ):
return type
elif isinstance( nake_type, cpptypes.volatile_t ) and isinstance( nake_type.base, cpptypes.pointer_t ):
return cpptypes.volatile_t( nake_type.base.base )
elif isinstance( nake_type, cpptypes.const_t ) and isinstance( nake_type.base, cpptypes.pointer_t ):
return cpptypes.const_t( nake_type.base.base )
elif isinstance( nake_type.base, cpptypes.calldef_type_t ):
return type
else:
return nake_type.base
def is_reference(type):
"""returns True, if type represents C++ reference type, False otherwise"""
nake_type = remove_alias( type )
return isinstance( nake_type, cpptypes.reference_t )
def is_array(type):
"""returns True, if type represents C++ array type, False otherwise"""
nake_type = remove_alias( type )
nake_type = remove_reference( nake_type )
nake_type = remove_cv( nake_type )
return isinstance( nake_type, cpptypes.array_t )
def array_size(type):
"""returns array size"""
nake_type = remove_alias( type )
nake_type = remove_reference( nake_type )
nake_type = remove_cv( nake_type )
assert isinstance( nake_type, cpptypes.array_t )
return nake_type.size
def array_item_type(type_):
"""returns array item type"""
if is_array(type_):
type_ = remove_alias( type_ )
type_ = remove_cv( type_ )
return type_.base
elif is_pointer( type_ ):
return remove_pointer( type_ )
else:
assert 0
def r
|
erc7as/cs3240-labdemo
|
check.py
|
Python
|
mit
| 94
| 0.06383
|
def check(msg):
if msg == 'hello':
print(
|
hello)
|
else:
print(goodbye)
check('greetings')
|
mcmcplotlib/mcmcplotlib
|
api/generated/arviz-plot_dist-5.py
|
Python
|
apache-2.0
| 69
| 0
|
az.
|
plot_dist(b, rug=True, quantil
|
es=[.25, .5, .75], cumulative=True)
|
gurunars/state_machine_crawler
|
setup.py
|
Python
|
apache-2.0
| 812
| 0
|
#!/usr/bin/python
import os
from setuptools import setup, find_packages
SRC_DIR = os.path.dirname(__file__)
CHANGES_FILE = os.path.join(SRC_DIR, "CHANGES")
with open(CHANGES_FILE) as fil:
version = fil.readline().split()[0]
setup(
name="state-machine-crawler",
description="A library for following automata based programming model.",
version=version,
packages=find_packages(),
setup_requires=["nose"],
tests_require=["moc
|
k==1.0.1", "coverage"],
install_requires=["werkzeug", "pydot2", "pyparsing==1.5.2"],
test_suite='nose.collector',
author="Anton Berezin",
author_email="[email protected]",
entry_points={
"console_scripts": [
'state-machine-crawler = state_machine_crawler:entry_point'
]
},
inc
|
lude_package_data=True
)
|
jpeddicord/jobservice
|
JobService/policy.py
|
Python
|
gpl-3.0
| 2,319
| 0.004743
|
# This file is part of jobservice.
# Copyright 2010 Jacob Peddicord <[email protected]>
#
# jobservice is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# jobservice is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with jobservice. If not, see <http://www.gnu.org/licenses/>.
import logging
from dbus import SystemBus, DBusException, Interface, UInt64
log = logging.getLogger('policy')
class DeniedByPolicy(DBusException):
_dbus_error_name = 'com.ubuntu.JobService.DeniedByPolicy'
class Policy:
def __init__(self, en
|
force=True):
self.enforce = enforce
self.bus = SystemBus()
self.dbus_iface = None
self.pk = Interface(
|
self.bus.get_object('org.freedesktop.PolicyKit1',
'/org/freedesktop/PolicyKit1/Authority'),
'org.freedesktop.PolicyKit1.Authority')
if not enforce:
log.warn('Not enforcing PolicyKit privileges!')
def check(self, sender, conn, priv='com.ubuntu.jobservice.manage'):
"""
Check or ask for authentication for job management.
"""
if not self.enforce: return
log.debug('Asking for PolicyKit authorization')
# get the PID of the sender
if not self.dbus_iface:
self.dbus_iface = Interface(conn.get_object('org.freedesktop.DBus',
'/org/freedesktop/DBus/Bus'), 'org.freedesktop.DBus')
pid = self.dbus_iface.GetConnectionUnixProcessID(sender)
# ask PolicyKit
auth, challenge, details = self.pk.CheckAuthorization(
('unix-process', {'pid': pid, 'start-time': UInt64(0)}),
priv, {'': ''}, 1, '', timeout=500)
if not auth:
log.info('Authorization failed')
raise DeniedByPolicy('Not authorized to manage jobs.')
log.debug('Authorization passed')
|
IrinaZI/Python_training
|
model/contact.py
|
Python
|
apache-2.0
| 1,808
| 0.005531
|
from sys import maxsize
class Contact:
def __init__(self, Firstname=None, Middlename=None, Lastname=None, Nickname=None, Title=None, Company=None, Address=None, Home=None, Mobile=None, Work=None,
Fax=None, Email=None, Email2=None, Email3=None, Homepage=None, Bday=N
|
one, Bmonth=None, Byear=None, Aday=None, Amonth=None, Ayear=None, Address2=None, Phone2=None,
Notes=None, id=None, all_phones_from_home_p
|
age=None, all_address_from_home_page=None, all_emails=None):
self.Firstname = Firstname
self.Middlename = Middlename
self.Lastname = Lastname
self.Nickname = Nickname
self.Title = Title
self.Company = Company
self.Address = Address
self.Home = Home
self.Mobile = Mobile
self.Work = Work
self.Fax = Fax
self.Email = Email
self.Email2 = Email2
self.Email3 = Email3
self.Homepage = Homepage
self.Bday = Bday
self.Bmonth = Bmonth
self.Byear = Byear
self.Aday = Aday
self.Amonth = Amonth
self.Ayear = Ayear
self.Address2 = Address2
self.Phone2 = Phone2
self.Notes = Notes
self.id = id
self.all_phones_from_home_page = all_phones_from_home_page
self.all_address_from_home_page = all_address_from_home_page
self.all_emails=all_emails
def __eq__(self, other):
return (self.id is None or other.id is None or self.id == other.id) and self.Firstname == other.Firstname and self.Lastname == other.Lastname
def __repr__(self):
return "%s:%s;%s" % (self.Firstname, self.Lastname, self.Middlename)
def id_or_max(self):
if self.id:
return int(self.id)
else:
return maxsize
|
eusoubrasileiro/fatiando
|
fatiando/gravmag/magdir.py
|
Python
|
bsd-3-clause
| 7,336
| 0
|
"""
Estimation of the total magnetization vector of homogeneous bodies.
It estimates parameters related to the magnetization vector of homogeneous
bodies.
**Algorithms**
* :class:`~fatiando.gravmag.magdir.DipoleMagDir`: This class estimates
the Cartesian components of the magnetization vector of homogeneous
dipolar bodies with known center. The estimated magnetization vector
is converted to dipole moment, inclination (positive down) and declination
(with respect to x, North).
----
"""
from __future__ import division
import numpy
from ..inversion.base import Misfit
from .. import mesher
from ..utils import ang2vec, vec2ang, safe_dot
from . import sphere
from ..constants import G, CM, T2NT, SI2EOTVOS
class DipoleMagDir(Misfit):
"""
Estimate the magnetization vector of a set of dipoles from magnetic
total field anomaly.
By using the well-known first-order approximation of the total field
anomaly (Blakely, 1996, p. 179) produced by a se
|
t of dipoles, the
estimation of the Cartesian components
|
of the magnetization vectors is
formulated as linear inverse problem. After estimating the magnetization
vectors, they are converted to dipole moment, inclination (positive down)
and declination (with respect to x, North).
Reference
Blakely, R. (1996), Potential theory in gravity and magnetic applications:
CUP
.. note:: Assumes x = North, y = East, z = Down.
Parameters:
* x, y, z : 1d-arrays
The x, y, z coordinates of each data point.
* data : 1d-array
The total field magnetic anomaly data at each point.
* inc, dec : floats
The inclination and declination of the inducing field
* points : list of points [x, y, z]
Each point [x, y, z] is the center of a dipole. Will invert for
the Cartesian components of the magnetization vector of each
dipole. Subsequently, the estimated magnetization vectors are
converted to dipole moment, inclination and declination.
.. note:: Inclination is positive down and declination is measured with
respect to x (North).
Examples:
Estimation of the total magnetization vector of dipoles with known centers
>>> import numpy
>>> from fatiando import gridder, utils
>>> from fatiando.gravmag import sphere
>>> from fatiando.mesher import Sphere, Prism
>>> # Produce some synthetic data
>>> area = (0, 10000, 0, 10000)
>>> x, y, z = gridder.scatter(area, 500, z=-150, seed=0)
>>> model = [Sphere(3000, 3000, 1000, 1000,
... {'magnetization': utils.ang2vec(6.0, -20.0, -10.0)}),
... Sphere(7000, 7000, 1000, 1000,
... {'magnetization': utils.ang2vec(6.0, 30.0, -40.0)})]
>>> inc, dec = -9.5, -13
>>> tf = sphere.tf(x, y, z, model, inc, dec)
>>> # Give the coordinates of the dipoles
>>> points = [[3000.0, 3000.0, 1000.0], [7000.0, 7000.0, 1000.0]]
>>> p_true = numpy.hstack((ang2vec(CM*(4.*numpy.pi/3.)*6.0*1000**3,
... -20.0, -10.0),
... ang2vec(CM*(4.*numpy.pi/3.)*6.0*1000**3,
... 30.0, -40.0)))
>>> estimate_true = [utils.vec2ang(p_true[3*i : 3*i + 3]) for i
... in range(len(points))]
>>> # Make a solver and fit it to the data
>>> solver = DipoleMagDir(x, y, z, tf, inc, dec, points).fit()
>>> # Check the fit
>>> numpy.allclose(tf, solver.predicted(), rtol=0.001, atol=0.001)
True
>>> # solver.p_ returns the Cartesian components of the
>>> # estimated magnetization vectors
>>> for p in solver.p_: print "%.10f" % p
2325.8255393651
-410.1057950109
-859.5903757213
1667.3411086852
-1399.0653093445
1256.6370614359
>>> # Check the estimated parameter vector
>>> numpy.allclose(p_true, solver.p_, rtol=0.001, atol=0.001)
True
>>> # The parameter vector is not that useful so use solver.estimate_
>>> # to convert the estimated magnetization vectors in dipole moment,
>>> # inclination and declination.
>>> for e in solver.estimate_:
... print "%.10f %.10f %.10f" % (e[0], e[1], e[2])
2513.2741228718 -20.0000000000 -10.0000000000
2513.2741228718 30.0000000000 -40.0000000000
>>> # Check the converted estimate
>>> numpy.allclose(estimate_true, solver.estimate_, rtol=0.001,
... atol=0.001)
True
"""
def __init__(self, x, y, z, data, inc, dec, points):
super(DipoleMagDir, self).__init__(
data=data,
positional={'x': x, 'y': y, 'z': z},
model={'inc': inc, 'dec': dec, 'points': points},
nparams=3 * len(points),
islinear=True)
# Constants
self.ndipoles = len(points)
self.cte = 1.0 / ((4.0 * numpy.pi / 3.0) * G * SI2EOTVOS)
# Geomagnetic Field versor
self.F_versor = ang2vec(1.0, self.model['inc'], self.model['dec'])
def _get_predicted(self, p):
return safe_dot(self.jacobian(p), p)
def _get_jacobian(self, p):
x = self.positional['x']
y = self.positional['y']
z = self.positional['z']
dipoles = [mesher.Sphere(xp, yp, zp, 1.) for xp, yp, zp in
self.model['points']]
jac = numpy.empty((self.ndata, self.nparams), dtype=float)
for i, dipole in enumerate(dipoles):
k = 3 * i
derivative_gxx = sphere.gxx(x, y, z, [dipole], dens=self.cte)
derivative_gxy = sphere.gxy(x, y, z, [dipole], dens=self.cte)
derivative_gxz = sphere.gxz(x, y, z, [dipole], dens=self.cte)
derivative_gyy = sphere.gyy(x, y, z, [dipole], dens=self.cte)
derivative_gyz = sphere.gyz(x, y, z, [dipole], dens=self.cte)
derivative_gzz = sphere.gzz(x, y, z, [dipole], dens=self.cte)
jac[:, k] = T2NT * ((self.F_versor[0] * derivative_gxx) +
(self.F_versor[1] * derivative_gxy) +
(self.F_versor[2] * derivative_gxz))
jac[:, k + 1] = T2NT * ((self.F_versor[0] * derivative_gxy) +
(self.F_versor[1] * derivative_gyy) +
(self.F_versor[2] * derivative_gyz))
jac[:, k + 2] = T2NT * ((self.F_versor[0] * derivative_gxz) +
(self.F_versor[1] * derivative_gyz) +
(self.F_versor[2] * derivative_gzz))
return jac
def fit(self):
"""
Solve for the magnetization direction of a set of dipoles.
After solving, use the ``estimate_`` attribute to get the
estimated magnetization vectors in dipole moment, inclination
and declination.
The estimated magnetization vectors in Cartesian coordinates can
be accessed through the ``p_`` attribute.
See the the docstring of :class:`~fatiando.gravmag.magdir.DipoleMagDir`
for examples.
"""
super(DipoleMagDir, self).fit()
self._estimate = [vec2ang(self.p_[3 * i: 3 * i + 3]) for i in
range(len(self.model['points']))]
return self
|
weinrank/wireshark
|
tools/asn2wrs.py
|
Python
|
gpl-2.0
| 308,942
| 0.01106
|
#!/usr/bin/env python
#
# asn2wrs.py
# ASN.1 to Wireshark dissector compiler
# Copyright 2004 Tomas Kukosa
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, and/or sell copies of the Software, and to permit persons
# to whom the Software is furnished to do so, provided that the above
# copyright notice(s) and this permission notice appear in all copies of
# the Software and that both the above copyright notice(s) and this
# permission notice appear in supporting documentation.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
# OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL
# INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING
# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# Except as contained in this notice, the name of a copyright holder
# shall not
|
be used in advertising or otherwise to promote the sale, use
# or other dealings in this Software without prior written authorization
# of the copyright holder.
"""ASN.1 to Wir
|
eshark dissector compiler"""
#
# Compiler from ASN.1 specification to the Wireshark dissector
#
# Based on ASN.1 to Python compiler from Aaron S. Lav's PyZ3950 package licensed under the X Consortium license
# http://www.pobox.com/~asl2/software/PyZ3950/
# (ASN.1 to Python compiler functionality is broken but not removed, it could be revived if necessary)
#
# It requires Dave Beazley's PLY parsing package licensed under the LGPL (tested with version 2.3)
# http://www.dabeaz.com/ply/
#
#
# ITU-T Recommendation X.680 (07/2002),
# Information technology - Abstract Syntax Notation One (ASN.1): Specification of basic notation
#
# ITU-T Recommendation X.681 (07/2002),
# Information technology - Abstract Syntax Notation One (ASN.1): Information object specification
#
# ITU-T Recommendation X.682 (07/2002),
# Information technology - Abstract Syntax Notation One (ASN.1): Constraint specification
#
# ITU-T Recommendation X.683 (07/2002),
# Information technology - Abstract Syntax Notation One (ASN.1): Parameterization of ASN.1 specifications
#
# ITU-T Recommendation X.880 (07/1994),
# Information technology - Remote Operations: Concepts, model and notation
#
import warnings
import re
import sys
import os
import os.path
import time
import getopt
import traceback
import lex
import yacc
if sys.version_info[0] < 3:
from string import maketrans
# OID name -> number conversion table
oid_names = {
'/itu-t' : 0,
'/itu' : 0,
'/ccitt' : 0,
'/itu-r' : 0,
'0/recommendation' : 0,
'0.0/a' : 1,
'0.0/b' : 2,
'0.0/c' : 3,
'0.0/d' : 4,
'0.0/e' : 5,
'0.0/f' : 6,
'0.0/g' : 7,
'0.0/h' : 8,
'0.0/i' : 9,
'0.0/j' : 10,
'0.0/k' : 11,
'0.0/l' : 12,
'0.0/m' : 13,
'0.0/n' : 14,
'0.0/o' : 15,
'0.0/p' : 16,
'0.0/q' : 17,
'0.0/r' : 18,
'0.0/s' : 19,
'0.0/t' : 20,
'0.0/tseries' : 20,
'0.0/u' : 21,
'0.0/v' : 22,
'0.0/w' : 23,
'0.0/x' : 24,
'0.0/y' : 25,
'0.0/z' : 26,
'0/question' : 1,
'0/administration' : 2,
'0/network-operator' : 3,
'0/identified-organization' : 4,
'0/r-recommendation' : 5,
'0/data' : 9,
'/iso' : 1,
'1/standard' : 0,
'1/registration-authority' : 1,
'1/member-body' : 2,
'1/identified-organization' : 3,
'/joint-iso-itu-t' : 2,
'/joint-iso-ccitt' : 2,
'2/presentation' : 0,
'2/asn1' : 1,
'2/association-control' : 2,
'2/reliable-transfer' : 3,
'2/remote-operations' : 4,
'2/ds' : 5,
'2/directory' : 5,
'2/mhs' : 6,
'2/mhs-motis' : 6,
'2/ccr' : 7,
'2/oda' : 8,
'2/ms' : 9,
'2/osi-management' : 9,
'2/transaction-processing' : 10,
'2/dor' : 11,
'2/distinguished-object-reference' : 11,
'2/reference-data-transfe' : 12,
'2/network-layer' : 13,
'2/network-layer-management' : 13,
'2/transport-layer' : 14,
'2/transport-layer-management' : 14,
'2/datalink-layer' : 15,
'2/datalink-layer-managemen' : 15,
'2/datalink-layer-management-information' : 15,
'2/country' : 16,
'2/registration-procedures' : 17,
'2/registration-procedure' : 17,
'2/physical-layer' : 18,
'2/physical-layer-management' : 18,
'2/mheg' : 19,
'2/genericULS' : 20,
'2/generic-upper-layers-security' : 20,
'2/guls' : 20,
'2/transport-layer-security-protocol' : 21,
'2/network-layer-security-protocol' : 22,
'2/international-organizations' : 23,
'2/internationalRA' : 23,
'2/sios' : 24,
'2/uuid' : 25,
'2/odp' : 26,
'2/upu' : 40,
}
ITEM_FIELD_NAME = '_item'
UNTAG_TYPE_NAME = '_untag'
def asn2c(id):
return id.replace('-', '_').replace('.', '_').replace('&', '_')
input_file = None
g_conform = None
lexer = None
in_oid = False
class LexError(Exception):
def __init__(self, tok, filename=None):
self.tok = tok
self.filename = filename
self.msg = "Unexpected character %r" % (self.tok.value[0])
Exception.__init__(self, self.msg)
def __repr__(self):
return "%s:%d: %s" % (self.filename, self.tok.lineno, self.msg)
__str__ = __repr__
class ParseError(Exception):
def __init__(self, tok, filename=None):
self.tok = tok
self.filename = filename
self.msg = "Unexpected token %s(%r)" % (self.tok.type, self.tok.value)
Exception.__init__(self, self.msg)
def __repr__(self):
return "%s:%d: %s" % (self.filename, self.tok.lineno, self.msg)
__str__ = __repr__
class DuplicateError(Exception):
def __init__(self, type, ident):
self.type = type
self.ident = ident
self.msg = "Duplicate %s for %s" % (self.type, self.ident)
Exception.__init__(self, self.msg)
def __repr__(self):
return self.msg
__str__ = __repr__
class CompError(Exception):
def __init__(self, msg):
self.msg = msg
Exception.__init__(self, self.msg)
def __repr__(self):
return self.msg
__str__ = __repr__
states = (
('braceignore','exclusive'),
)
precedence = (
('left', 'UNION', 'BAR'),
('left', 'INTERSECTION', 'CIRCUMFLEX'),
)
# 11 ASN.1 lexical items
static_tokens = {
r'::=' : 'ASSIGNMENT', # 11.16 Assignment lexical item
r'\.\.' : 'RANGE', # 11.17 Range separator
r'\.\.\.' : 'ELLIPSIS', # 11.18 Ellipsis
r'\[\[' : 'LVERBRACK', # 11.19 Left version brackets
r'\]\]' : 'RVERBRACK', # 11.20 Right version brackets
# 11.26 Single character lexical items
r'\{' : 'LBRACE',
r'\}' : 'RBRACE',
r'<' : 'LT',
#r'>' : 'GT',
r',' : 'COMMA',
r'\.' : 'DOT',
r'\(' : 'LPAREN',
r'\)' : 'RPAREN',
r'\[' : 'LBRACK',
r'\]' : 'RBRACK',
r'-' : 'MINUS',
r':' : 'COLON',
#r'=' : 'EQ',
#r'"' : 'QUOTATION',
#r"'" : 'APOSTROPHE',
r';' : 'SEMICOLON',
r'@' : 'AT',
r'\!' : 'EXCLAMATION',
r'\^' : 'CIRCUMFLEX',
r'\&' : 'AMPERSAND',
r'\|' : 'BAR'
}
# 11.27 Reserved words
# all keys in reserved_words must start w/ upper case
reserved_words = {
'ABSENT' : 'ABSENT',
'ABSTRACT-SYNTAX' : 'ABSTRACT_SYNTAX',
'ALL' : 'ALL',
'APPLICATION' : 'APPLICATION',
'AUTOMATIC' : 'AUTOMATIC',
'BEGIN' : 'BEGIN',
'BIT' : 'BIT',
'BOOLEAN' : 'BOOLEAN',
'BY' : 'BY',
'CHARACTER' : 'CHARACTER',
'CHOICE' : 'CHOICE',
'CLASS' : 'CLASS',
'COMPONENT' : 'COMPONENT',
'COMPONENTS' : 'COMPONENTS',
'CONSTRAINED' : 'CONSTRAINED',
'CONTAINING' : 'CONT
|
keelerm84/powerline
|
setup.py
|
Python
|
mit
| 931
| 0.026853
|
#!/usr/bin/env python
# vim:fileencoding=utf-8:noet
from __future__ import unicode_literals
import os
import sys
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst'), 'rb').read().decode('utf-8')
except IOError:
README = ''
old_python = sys.version_info < (2, 7)
setup(
name='Powerline',
version='beta',
description='The ultimate statusline/prompt utility.',
long_description=README,
classifiers=[],
author='Kim Silkebaekken',
author_email='[email protected]',
url='https://github.com/Lokaltog/powerline',
scripts=[
'scripts/powerline',
'scripts/powerline-lint',
],
ke
|
ywords='',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
install_req
|
uires=[],
extras_require={
'docs': [
'Sphinx',
],
},
test_suite='tests' if not old_python else None,
)
|
uclouvain/osis_louvain
|
base/migrations/0208_create_role_executive.py
|
Python
|
agpl-3.0
| 935
| 0.003209
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-11-22 07:11
from __future__ import unicode_literals
from django.core.management.sql import emit_post_migrate_signal
from django.db import migrations
def add_executive_group(apps, schema_editor):
# create group
db_alias = schema_editor.connection.alias
emit_post_migrate_signal(2, False, db_alias)
Group = apps.get_model('auth', 'Group')
Permission = apps.get_model('auth', 'Permission')
executive_group, created = Group.objects.get_or_create(name='executive')
if created:
# Learning un
|
it
can_access_learningunit = Permission.objects.get(codename='can_access_learningunit')
executive_group.permissions.add(can_ac
|
cess_learningunit)
class Migration(migrations.Migration):
dependencies = [
('base', '0207_auto_20171220_1035'),
]
operations = [
migrations.RunPython(add_executive_group),
]
|
mhielscher/wasabiscripts
|
twitterpic.py
|
Python
|
bsd-2-clause
| 1,695
| 0.020649
|
# Twitter profile image updater
# http
|
://twitter.com/account/update_profile_image.json
# image = [imagefile]
import sys
import os
import random
import re
import urllib
import json
import urllib2
import oauth2 as oauth
import time
import wcommon
def encode_file_data(image):
boundary = hex(int(time.time()))[2:]
headers = {}
headers['Content-Type'] = 'multipart/form-data; boundary="%s"' % (boundary)
data = [] #to be joined later (faster)
data.append("--"+boundary)
data.append("\
|
r\n")
data.append('Content-Disposition: form-data; name="image"; filename="%s"\r\n' % image.name)
if image.name.endswith("jpg") or image.name.endswith("jpeg"):
data.append("Content-Type: image/jpeg\r\n\r\n")
elif image.name.endswith("png"):
data.append("Content-Type: image/png\r\n\r\n")
elif image.name.endswith("gif"):
data.append("Content-Type: image/gif\r\n\r\n")
else:
data.append("Content-Type: application/octet-stream\r\n\r\n")
data.append(image.read())
data.append("\r\n--")
data.append(boundary)
data.append("--\r\n\r\n")
body = ''.join(data)
headers['Content-Length'] = str(len(body))
return (headers, body)
os.chdir("./pics/")
files = os.listdir(os.getcwd())
images = []
for filename in files:
if filename.endswith(".jpg") or filename.endswith(".png") or filename.endswith(".gif"):
images.append(filename)
imagefile = random.choice(images)
image = open(imagefile, 'r')
url = 'http://api.twitter.com/1/account/update_profile_image.json'
headers, postdata = encode_file_data(image)
r, c = wcommon.oauth_req(url, http_method="POST", post_body=postdata, http_headers=headers)
if r.status != 200:
print "Updating profile image did not succeed: Status %d" % (r.status)
|
gentooza/Freedom-Fighters-of-Might-Magic
|
src/gamelib/gummworld2/pygametext.py
|
Python
|
gpl-3.0
| 21,402
| 0.00271
|
# ptext module: place this in your import directory.
# ptext.draw(text, pos=None, **options)
# Please see README.md for explanation of options.
# https://github.com/cosmologicon/pygame-text
from __future__ import division
from math import ceil, sin, cos, radians, exp
import pygame
DEFAULT_FONT_SIZE = 24
REFERENCE_FONT_SIZE = 100
DEFAULT_LINE_HEIGHT = 1.0
DEFAULT_PARAGRAPH_SPACE = 0.0
DEFAULT_FONT_NAME = None
FONT_NAME_TEMPLATE = "%s"
DEFAULT_COLOR = "white"
DEFAULT_BACKGROUND = None
DEFAULT_SHADE = 0
DEFAULT_OUTLINE_COLOR = "black"
DEFAULT_SHADOW_COLOR = "black"
OUTLINE_UNIT = 1 / 24
SHADOW_UNIT = 1 / 18
DEFAULT_ALIGN = "left" # left, center, or right
DEFAULT_ANCHOR = 0, 0 # 0, 0 = top left ; 1, 1 = bottom right
DEFAULT_STRIP = True
ALPHA_RESOLUTION = 16
ANGLE_RESOLUTION_DEGREES = 3
AUTO_CLEAN = True
MEMORY_LIMIT_MB = 64
MEMORY_REDUCTION_FACTOR = 0.5
pygame.font.init()
_font_cache = {}
def getfont(fontname=None, fontsize=None, sysfontname=None,
bold=None, italic=None, underline=None):
if fontname is not None and sysfontname is not None:
raise ValueError("Can't set both fontname and sysfontname")
if fontname is None and sysfontname is None:
fontname = DEFAULT_FONT_NAME
if fontsize is None:
fontsize = DEFAULT_FONT_SIZE
key = fontname, fontsize, sysfontname, bold, italic, underline
if key in _font_cache:
return _font_cache[key]
if sysfontname is not None:
font = pygame.font.SysFont(sysfontname, fontsize, bold or False, italic or False)
else:
if fontname is not None:
fontname = FONT_NAME_TEMPLATE % fontname
try:
font = pygame.font.Font(fontname, fontsize)
except IOError:
raise IOError("unable to read font filename: %s" % fontname)
if bold is not None:
font.set_bold(bold)
if italic is not None:
font.set_italic(italic)
if underline is not None:
font.set_underline(underline)
_font_cache[key] = font
return font
def wrap(text, fontname=None, fontsize=None, sysfontname=None,
bold=None, italic=None, underline=None, width=None, widthem=None, strip=None):
if widthem is None:
font = getfont(fontname, fontsize, sysfontname, bold, italic, underline)
elif width is not None:
raise ValueError("Can't set both width and widthem")
else:
font = getfont(fontname, REFERENCE_FONT_SIZE, sysfontname, bold, italic, underline)
width = widthem * REFERENCE_FONT_SIZE
if strip is None:
strip = DEFAULT_STRIP
|
paras = text.replace("\t", " ").split("\n")
lines = []
for jpara, para in enumerate(paras):
if strip:
para = para.rstrip(" ")
if width is None:
lines.append((para, jpara))
continue
if not para:
lines.append(("", jpara))
continue
# Preserve leading spaces in all cases.
a = len(para) - len(para.lstrip(" "))
# At any ti
|
me, a is the rightmost known index you can legally split a line. I.e. it's legal
# to add para[:a] to lines, and line is what will be added to lines if para is split at a.
a = para.index(" ", a) if " " in para else len(para)
line = para[:a]
while a + 1 < len(para):
# b is the next legal place to break the line, with bline the corresponding line to add.
if " " not in para[a + 1:]:
b = len(para)
bline = para
elif strip:
# Lines may be split at any space character that immediately follows a non-space
# character.
b = para.index(" ", a + 1)
while para[b - 1] == " ":
if " " in para[b + 1:]:
b = para.index(" ", b + 1)
else:
b = len(para)
break
bline = para[:b]
else:
# Lines may be split at any space character, or any character immediately following
# a space character.
b = a + 1 if para[a] == " " else para.index(" ", a + 1)
bline = para[:b]
if font.size(bline)[0] <= width:
a, line = b, bline
else:
lines.append((line, jpara))
para = para[a:].lstrip(" ") if strip else para[a:]
a = para.index(" ", 1) if " " in para[1:] else len(para)
line = para[:a]
if para:
lines.append((line, jpara))
return lines
_fit_cache = {}
def _fitsize(text, fontname, sysfontname, bold, italic, underline, width, height, lineheight, pspace, strip):
key = text, fontname, sysfontname, bold, italic, underline, width, height, lineheight, pspace, strip
if key in _fit_cache:
return _fit_cache[key]
def fits(fontsize):
texts = wrap(text, fontname, fontsize, sysfontname, bold, italic, underline, width, strip)
font = getfont(fontname, fontsize, sysfontname, bold, italic, underline)
w = max(font.size(line)[0] for line, jpara in texts)
linesize = font.get_linesize() * lineheight
paraspace = font.get_linesize() * pspace
h = int(round((len(texts) - 1) * linesize + texts[-1][1] * paraspace)) + font.get_height()
return w <= width and h <= height
a, b = 1, 256
if not fits(a):
fontsize = a
elif fits(b):
fontsize = b
else:
while b - a > 1:
c = (a + b) // 2
if fits(c):
a = c
else:
b = c
fontsize = a
_fit_cache[key] = fontsize
return fontsize
def _resolvecolor(color, default):
if color is None:
color = default
if color is None:
return None
try:
return tuple(pygame.Color(color))
except ValueError:
return tuple(color)
def _applyshade(color, shade):
f = exp(-0.4 * shade)
r, g, b = [
min(max(int(round((c + 50) * f - 50)), 0), 255)
for c in color[:3]
]
return (r, g, b) + tuple(color[3:])
def _resolvealpha(alpha):
if alpha >= 1:
return 1
return max(int(round(alpha * ALPHA_RESOLUTION)) / ALPHA_RESOLUTION, 0)
def _resolveangle(angle):
if not angle:
return 0
angle %= 360
return int(round(angle / ANGLE_RESOLUTION_DEGREES)) * ANGLE_RESOLUTION_DEGREES
# Return the set of points in the circle radius r, using Bresenham's circle algorithm
_circle_cache = {}
def _circlepoints(r):
r = int(round(r))
if r in _circle_cache:
return _circle_cache[r]
x, y, e = r, 0, 1 - r
_circle_cache[r] = points = []
while x >= y:
points.append((x, y))
y += 1
if e < 0:
e += 2 * y - 1
else:
x -= 1
e += 2 * (y - x) - 1
points += [(y, x) for x, y in points if x > y]
points += [(-x, y) for x, y in points if x]
points += [(x, -y) for x, y in points if y]
points.sort()
return points
_surf_cache = {}
_surf_tick_usage = {}
_surf_size_total = 0
_unrotated_size = {}
_tick = 0
def getsurf(text, fontname=None, fontsize=None, sysfontname=None, bold=None, italic=None,
underline=None, width=None, widthem=None, strip=None, color=None,
background=None, antialias=True, ocolor=None, owidth=None, scolor=None, shadow=None,
gcolor=None, shade=None, alpha=1.0, align=None, lineheight=None, pspace=None, angle=0,
cache=True):
global _tick, _surf_size_total
if fontname is None:
fontname = DEFAULT_FONT_NAME
if fontsize is None:
fontsize = DEFAULT_FONT_SIZE
fontsize = int(round(fontsize))
if align is None:
align = DEFAULT_ALIGN
if align in ["left", "center", "right"]:
align = [0, 0.5, 1][["left", "center", "right"].i
|
neuroo/equip
|
equip/analysis/flow.py
|
Python
|
apache-2.0
| 18,228
| 0.012783
|
# -*- coding: utf-8 -*-
"""
equip.analysis.flow
~~~~~~~~~~~~~~~~~~~
Extract the control flow graphs from the bytecode.
:copyright: (c) 2014 by Romain Gaucher (@rgaucher)
:license: Apache 2, see LICENSE for more details.
"""
import opcode
from operator import itemgetter, attrgetter
from itertools import tee, izip
from ..utils.log import logger
from ..utils.structures import intervalmap
from ..bytecode.utils import show_bytecode
from .graph import DiGraph, Edge, Node, Walker, EdgeVisitor, Tree, TreeNode
from .graph import DominatorTree, ControlDependence
from .block import BasicBlock
from .ast import Statement, Expression
from .constraint import Constraint
from .python.effects import get_stack_effect
from .python.opcodes import *
class ControlFlow(object):
"""
Performs the control-flow analysis on a ``Declaration`` object. It iterates
over its bytecode and builds the basic block. The final representation
leverages the ``DiGraph`` structure, and contains an instance of the
``DominatorTree``.
"""
E_TRUE = 'TRUE'
E_FALSE = 'FALSE'
E_UNCOND = 'UNCOND'
E_COND = 'COND'
E_EXCEPT = 'EXCEPT'
E_FINALLY = 'FINALLY'
E_RETURN = 'RETURN'
E_RAISE = 'RAISE'
E_END_LOOP = 'END_LOOP'
N_ENTRY = 'ENTRY'
N_IMPLICIT_RETURN = 'IMPLICIT_RETURN'
N_UNKNOWN = 'UNKNOWN'
N_LOOP = 'LOOP'
N_IF = 'IF'
N_EXCEPT = 'EXCEPT'
N_CONDITION = 'CONDITION'
CFG_TMP_RETURN = -1
CFG_TMP_BREAK = -2
CFG_TMP_RAISE = -3
CFG_TMP_CONTINUE = -4
def __init__(self, decl):
self._decl = decl
self._blocks = None
self._block_idx_map = {}
self._block_nodes = {}
self._block_intervals = None
self._conds = None
self._frames = None
self._graph = None
self._entry = None
self._exit = None
self._entry_node = None
self._exit_node = None
self._dom = None
self._cdg = None
self.analyze()
@property
def decl(self):
return self._decl
@decl.setter
def decl(self, value):
self._decl = value
@property
def entry(self):
return self._entry
@entry.setter
def entry(self, value):
self._entry = value
@property
def entry_node(self):
return self._entry_node
@entry_node.setter
def entry_node(self, value):
self._entry_node = value
@property
def exit(self):
return self._exit
@exit.setter
def exit(self, value):
self._exit = value
@property
def exit_node(self):
return self._exit_node
@exit_node.setter
def exit_node(self, value):
self._exit_node = value
@property
def blocks(self):
"""
Returns the basic blocks created during the control flow analysis.
"""
return self._blocks
@property
def block_indices_dict(self):
"""
Returns the mapping of a bytecode indices and a basic blocks.
"""
return self._block_idx_map
@property
def block_nodes_dict(self):
"""
Returns the mapping of a basic bocks and CFG nodes.
"""
return self._block_nodes
@property
def blocks_intervals(self):
if self._block_intervals is None:
self._block_intervals = intervalmap()
for block in self.blocks:
self._block_intervals[block.index:block.index + block.length] = block
return self._block_intervals
@property
def block_constraints(self):
"""
Returns the constraints associated with each ``N_CONDITION`` node
in the CFG. This is lazily compute
|
d.
"""
if self._conds is None:
self.compute_conditions()
return self._conds
@property
def frames(self):
return self._frames
@property
def graph(self):
"""
Returns the underlying graph that holds the CFG.
"""
return self._graph
@property
def dominators(self):
"""
Returns the ``DominatorTree`` that contains:
- Dominator/Post-dominator tree (dict of IDom/PIDom)
- Dominance/Post-domimance frontier (dict of
|
CFG node -> set CFG nodes)
This is lazily computed.
"""
if self._dom is None:
self._dom = DominatorTree(self)
return self._dom
@property
def control_dependence(self):
"""
Returns the ``ControlDependence`` graph. This is lazily computed.
"""
if self._cdg is None:
self._cdg = ControlDependence(self)
return self._cdg
def analyze(self):
"""
Performs the CFA and stores the resulting CFG.
"""
bytecode = self.decl.bytecode
self.entry = BasicBlock(BasicBlock.ENTRY, self.decl, -1)
self.exit = BasicBlock(BasicBlock.IMPLICIT_RETURN, self.decl, -1)
self._blocks = ControlFlow.make_blocks(self.decl, bytecode)
self.__build_flowgraph(bytecode)
# logger.debug("CFG(%s) :=\n%s", self.decl, self.graph.to_dot())
def __build_flowgraph(self, bytecode):
g = DiGraph(multiple_edges=False)
self.entry_node = g.make_add_node(kind=ControlFlow.N_ENTRY, data=self._entry)
self.exit_node = g.make_add_node(kind=ControlFlow.N_IMPLICIT_RETURN, data=self._exit)
self._block_idx_map = {}
self._block_nodes = {}
# Connect entry/implicit return blocks
last_block_index, last_block = -1, None
for block in self.blocks:
self._block_idx_map[block.index] = block
node_kind = ControlFlow.get_kind_from_block(block)
block_node = g.make_add_node(kind=node_kind, data=block)
self._block_nodes[block] = block_node
if block.index == 0:
g.make_add_edge(self.entry_node, self._block_nodes[block], kind=ControlFlow.E_UNCOND)
if block.index >= last_block_index:
last_block = block
last_block_index = block.index
g.make_add_edge(self._block_nodes[last_block], self.exit_node, kind=ControlFlow.E_UNCOND)
sorted_blocks = sorted(self.blocks, key=attrgetter('_index'))
i, length = 0, len(sorted_blocks)
while i < length:
cur_block = sorted_blocks[i]
if cur_block.jumps:
# Connect the current block to its jump targets
for (jump_index, branch_kind) in cur_block.jumps:
if jump_index <= ControlFlow.CFG_TMP_RETURN:
continue
target_block = self._block_idx_map[jump_index]
g.make_add_edge(
self._block_nodes[cur_block], self._block_nodes[target_block], kind=branch_kind)
i += 1
self._graph = g
self.__finalize()
self._graph.freeze()
logger.debug("CFG :=\n%s", self._graph.to_dot())
def __finalize(self):
def has_true_false_branches(list_edges):
has_true, has_false = False, False
for edge in list_edges:
if edge.kind == ControlFlow.E_TRUE: has_true = True
elif edge.kind == ControlFlow.E_FALSE: has_false = True
return has_true and has_false
def get_cfg_tmp_values(node):
values = set()
for (jump_index, branch_kind) in node.data.jumps:
if jump_index <= ControlFlow.CFG_TMP_RETURN:
values.add(jump_index)
return values
def get_parent_loop(node):
class BwdEdges(EdgeVisitor):
def __init__(self):
EdgeVisitor.__init__(self)
self.edges = []
def visit(self, edge):
self.edges.append(edge)
visitor = BwdEdges()
walker = Walker(self.graph, visitor, backwards=True)
walker.traverse(node)
parents = visitor.edges
node_bc_index = node.data.index
for parent_edge in parents:
parent = parent_edge.source
if parent.kind != ControlFlow.N_LOOP:
continue
# Find the loop in which the break/current node is nested in
if parent.data.index < node_bc_index and parent.data.end_target > node_bc_index:
return parent
return None
# Burn N_CONDITION nodes
for node in self.graph.nodes:
out_edges = self.graph.out_edges(node)
if len(out_edges) < 2 or not has_true_false_branches(out_edges):
continue
node.kind = ControlFlow.N_CONDITION
# Handle continue/return/break statements:
# - blocks with continue are simply connected to the parent loop
# - blocks with returns are simply connected to the IMPLICIT_RETURN
# and previous out edges removed
# - blocks with breaks are connected to the end of the current loop
# and previous ou
|
Tribler/decentralized-mortgage-market
|
market/community/blockchain/community.py
|
Python
|
gpl-3.0
| 30,409
| 0.00342
|
import os
import time
import hashlib
import logging
from base64 import b64encode
from collections import OrderedDict, defaultdict
from twisted.internet.task import LoopingCall
from twisted.internet.defer import Deferred
from dispersy.authentication import MemberAuthentication
from dispersy.candidate import Candidate
from dispersy.community import Community
from dispersy.conversion import DefaultConversion
from dispersy.destination import CandidateDestination
from dispersy.distribution import DirectDistribution
from dispersy.message import Message
from dispersy.resolution import PublicResolution
from dispersy.requestcache import RandomNumberCache
from market.community.blockchain.conversion import BlockchainConversion
from market.community.payload import ProtobufPayload
from market.database.datamanager import BlockchainDataManager
from market.models.block import Block
from market.models.block_index import BlockIndex
from market.models.contract import Contract
from market.util.misc import median
from market.util.uint256 import full_to_uint256, compact_to_uint256, uint256_to_compact
from market.models import ObjectType
COMMIT_INTERVAL = 60
BLOCK_CREATION_INTERNAL = 1
BLOCK_TARGET_SPACING = 30 # 10 * 60
BLOCK_TARGET_TIMESPAN = 300 # 14 * 24 * 60 * 60
BLOCK_TARGET_BLOCKSPAN = BLOCK_TARGET_TIMESPAN / BLOCK_TARGET_SPACING
BLOCK_DIFFICULTY_INIT = 0x05ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
BLOCK_DIFFICULTY_MIN = 0x05ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
BLOCK_GENESIS_HASH = '\00' * 32
MAX_CLOCK_DRIFT = 15 * 60
MAX_PACKET_SIZE = 1500
class SignatureRequestCache(RandomNumberCache):
def __init__(self, community):
super(SignatureRequestCache, self).__init__(community.request_cache, u'signature-request')
def on_timeout(self):
pass
class BlockRequestCache(RandomNumberCache):
def __init__(self, community, block_id):
super(BlockRequestCache, self).__init__(community.request_cache, u'block-request')
self.community = community
self.block_id = block_id
def on_timeout(self):
# Retry to download block
self.community.send_block_request(self.block_id)
class TraversalRequestCache(RandomNumberCache):
def __init__(self, community, contract_id, contract_type, deferred, min_responses, max_responses):
super(TraversalRequestCache, self).__init__(community.request_cache, u'traversal-request')
self.logger = community.logger
self.contract_id = contract_id
self.contract_type = contract_type
self.deferred = deferred
self.min_responses = min_responses
self.max_responses = max_responses
self.responses = {}
self.public_keys = []
def callback(self):
responses_sorted = sorted(self.responses.items(), key=lambda item: item[1])
if responses_sorted and responses_sorted[-1][1] >= self.min_responses:
self.deferred.callback(responses_sorted[-1][0])
else:
self.logger.warning('Not enough similar responses to traversal-request')
self.deferred.errback()
def add_response(self, public_key, response_tuple):
# Only allow 1 response per peer
if public_key in self.public_keys:
return False
self.public_keys.append(public_key)
self.responses[response_tuple] = self.responses.get(response_tuple, 0) + 1
# If we already have all responses there is not need to wait for the timeout
if sum(self.responses.values()) >= self.max_responses:
self.callback()
return True
return False
def on_timeout(self):
self.callback()
class BlockchainCommunity(Community):
def __init__(self, dispersy, master, my_member):
super(BlockchainCommunity, self).__init__(dispersy, master, my_member)
self.logger = logging.getLogger('BlockchainLogger')
self.incoming_contracts = OrderedDict()
self.incoming_blocks = {}
self.data_manager = None
def initialize(self, verifier=True, **db_kwargs):
super(BlockchainCommunity, self).initialize()
self.initialize_database(**db_kwargs)
if verifier:
self.register_task('create_block', LoopingCall(self.create_block)).start(BLOCK_CREATION_INTERNAL)
self.register_task('commit', LoopingCall(self.data_manager.commit)).start(COMMIT_INTERVAL)
self.logger.info('BlockchainCommunity initialized')
def initialize_database(self, database_fn=''):
if database_fn:
database_fn = os.path.join(self.dispersy.working_directory, database_fn)
self.data_manager = BlockchainDataManager(database_fn)
self.data_manager.initialize()
@classmethod
def get_master_members(cls, dispersy):
# generated: Fri Feb 24 11:22:22 2017
# curve: None
# len: 571 bits ~ 144 bytes signature
# pub: 170 3081a7301006072a8648ce3d020106052b81040027038192000407b
# acf5ae4d3fe94d49a7f94b7239e9c2d878b29f0fbdb7374d5b6a09d9d6fba80d
# 3807affd0ba45ba1ac1c278ca59bec422d8a44b5fefaabcdd62c2778414c01da
# 4578b304b104b00eec74de98dcda803b79fd1783d76cc1bd7aab75cfd8fff982
# 7a9647ae3c59423c2a9a984700e7cb43b881a6455574032cc11dba806dba9699
# f54f2d30b10eed5c7c0381a0915a5
# pub-sha1 56553661e30b342b2fc39f1a425eb612ef8b8c33
# -----BEGIN PUBLIC KEY-----
# MIGnMBAGByqGSM49AgEGBSuBBAAnA4GSAAQHus9a5NP+lNSaf5S3I56cLYeLKfD7
# 23N01bagnZ1vuoDTgHr/0LpFuhrBwnjKWb7EItikS1/vqrzdYsJ3hBTAHaRXizBL
# EEsA7sdN6Y3NqAO3n9F4PXbMG9eqt1z9j/+YJ6lkeuPFlCPCqamEcA58tDuIGmRV
# V0AyzBHbqAbbqWmfVPLTCxDu1cfAOBoJFaU=
# -----END PUBLIC KEY-----
master_key = '3081a7301006072a8648ce3d02010
|
6052b81040027038192000407bacf5ae4d3fe94d49a7f94b7239e9c2d878b29' + \
'f0fbdb7374d5b6a09d9d6fba80d3807affd0ba45ba1ac1c278ca59bec422d8a44b5fefaabcdd62c2778414c01da4' + \
'578b304b104b00eec74de98dcda803b79fd1783d76cc1bd7aab75cfd8fff9827a9647ae3c59423c2a9a984700e7c' + \
|
'b43b881a6455574032cc11dba806dba9699f54f2d30b10eed5c7c0381a0915a5'
master = dispersy.get_member(public_key=master_key.decode('hex'))
return [master]
def initiate_meta_messages(self):
meta_messages = super(BlockchainCommunity, self).initiate_meta_messages()
return meta_messages + [
Message(self, u"signature-request",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_signature_request),
Message(self, u"signature-response",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_signature_response),
Message(self, u"contract",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_contract),
Message(self, u"block-request",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_block_request),
Message(self, u"block",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_block)
|
windreamer/dpark
|
dpark/conf.py
|
Python
|
bsd-3-clause
| 991
| 0.001009
|
from __future
|
__ import absolute_import
import os.path
from dpark.util import get_logger
logger = get_logger(__name__)
# workdir used in slaves for internal files
#
DPARK_WORK_DIR = '/tmp/dpark'
if os.path.exists('/dev/shm'):
DPARK_WORK_DIR = '/dev/shm,/tmp/dpark'
# uri of mesos master, host[:5050] or or zk://...
MESOS_MASTER = 'localhost'
# mount points of MooseFS, must be available on all slaves
# for example: '/mfs' : 'mfsmaster',
MOOSEFS_MOUNT_POINTS = {
}
# consistan
|
t dir cache in client, need patched mfsmaster
MOOSEFS_DIR_CACHE = False
# memory used per task, like -M (--m) option in context.
MEM_PER_TASK = 200.0
def load_conf(path):
if not os.path.exists(path):
logger.error("conf %s do not exists", path)
raise Exception("conf %s do not exists" % path)
try:
data = open(path).read()
exec(data, globals(), globals())
except Exception as e:
logger.error("error while load conf from %s: %s", path, e)
raise
|
Impavidity/SearchEngine
|
WebSite/engine/query.py
|
Python
|
mit
| 3,939
| 0.001269
|
import cPickle as pickle
import numpy as np
import re
from math import log
from dic import Info
from
|
config.config import config
from tools import tokenlize, comp_tuple, weights
class query_voc(object):
def __init__(self, tokens, dic):
self.tokens = tokens
self.dic = dic
class query_entry(object):
def __init__(self, doc_id, tokens, voc):
self.docID = doc_id
self.tokens = tokens
self.vector = np.zeros([1, len(voc.tokens)])
f
|
or token in self.tokens:
if config.WEIGHT_TYPE == 'wf-idf':
self.vector[0, voc.dic[token]['index']] = (1 + log(self.tokens[token])) * voc.dic[token]['idf']
elif config.WEIGHT_TYPE == 'tf-idf':
self.vector[0, voc.dic[token]['index']] = self.tokens[token] * voc.dic[token]['idf']
class query_index(object):
def __init__(self, tiered_index):
self.tiered_index = tiered_index
def load_and_calc(info):
# assert isinstance(pkl_path, str), "pkl_path is not the instance of string.\n"
#
# pkl_file = open(pkl_path, 'r')
# info = pickle.load(pkl_file)
voc = query_voc(info.voc_tokens, info.voc_dic)
tiered_index = query_index(info.tiered_index)
entries = {}
for item in info.entry_tokens:
entries[item] = query_entry(item, info.entry_tokens[item], voc)
return tiered_index, voc, entries
def construct_query_vector(tokens, voc):
query_vector = np.zeros([1, len(voc.tokens)])
print tokens
for token in tokens:
if token in voc.tokens:
query_vector[0, voc.dic[token]['index']] = tokens[token]
return query_vector
def topK_get_result(index, voc, entries, tokens):
result = []
query_vector = construct_query_vector(tokens, voc)
for level in index.tiered_index:
for token in tokens:
if token not in voc.tokens:
continue
docs = level[voc.dic[token]['index']]
for doc_id in docs:
if doc_id not in result:
weight = weights(query_vector, entries[doc_id].vector)
result.append((doc_id, weight))
if len(result) >= config.PARA_TOP_K:
return result[:config.PARA_TOP_K]
# if config.DEBUG:
# print '----------------query result--------------------'
# print result
# print '------------------------------------------------'
return result
def topK_query(index, voc, entries, query, index_type='tiered'):
result = []
if index_type == 'tiered':
result = topK_get_result(index, voc, entries, tokenlize(query))
result.sort(comp_tuple)
return result
def wildcard_query(index, voc, entries, query, index_type='tiered'):
tokens = tokenlize(query)
query_match = [[]]
for token in tokens:
match_tokens = []
if '*' in token:
regex = re.compile(token)
match_tokens = [string for string in voc.tokens if re.match(regex, string)]
else:
match_tokens.append(token)
tmp = []
if len(match_tokens) > 0:
for t1 in match_tokens:
for t2 in query_match:
tmp.append(t2 + [t1])
query_match = tmp
tmp = []
for item in query_match:
q = {}
for token in item:
if token in q:
q[token] += 1
else:
q[token] = 1
tmp.append(q)
query_match = tmp
result = []
if index_type == 'tiered':
for match in query_match:
result += topK_get_result(index, voc, entries, match)
result.sort(comp_tuple)
match = []
match_id = []
for doc in result:
if doc[0] in match_id:
continue
else:
match_id.append(doc[0])
match.append(doc)
if len(match_id) > config.PARA_TOP_K:
return match
return match
|
javert/tftpudGui
|
src/tftpudgui/qt4/TftpudSettings.py
|
Python
|
mit
| 2,975
| 0.005714
|
'''
Created on 21 Dec 2013
@author: huw
'''
from ConfigParser import ConfigParser
class TftpudSettings:
'''
A class to hold the settings for the TftpudServerGui application.
'''
def __init__(self):
'''
Constructor
'''
self.saveLastUsed = False
self.defaultDirectory = ''
self.defaultIpAddress = ''
self.defaultPort = 69
self.ephemeralPorts = [2048, 65535]
self.tftpTimeout = 6.0
self.tftpRetries = 3
def write(self, f):
'''Write these TFTPUD settings to the given file handle.'''
cfg = ConfigParser()
netSection = 'Network'
cfg.add_section(netSection)
cfg.set(netSection, 'defaultIpAddress', self.defaultIpAddress)
cfg.set(netSection, 'defaultPort', self.defaultPort)
cfg.set(netSection, 'ephemeralPortsFrom', self.ephemeralPorts[0])
cfg.set(netSection, 'ephemeralPortsTo', self.ephemeralPorts[1])
tftpSection = 'TFTP'
cfg.add_section(tftpSection)
cfg.set(tftpSection, 'timeout', self.tftpTimeout)
cfg.set(tftpSection, 'retries', self.tftpRetries)
serverSection = 'Server'
cfg.add_section(serverSection)
cfg.set(serverSection, 'defaultDirectory', self.defaultDirectory)
cfg.set(serverSection, 'saveLastUsed', self.saveLastUsed)
cfg.write(f)
def read(self, f):
'''Read the settings from the given file handle.'''
cfg = ConfigParser()
cfg.readfp(f)
netSection = 'Network'
if cfg.has_section(netSection):
if cfg.has_option(netSection, 'defaultIpAddress'):
self.defaultIpAddress = cfg.get(netSection, 'defaultIpAddress')
if cfg.has_option(netSection, 'defaultPort'):
self.defaultPort = cfg.getint(netSection, 'defaultPort')
if cfg.has_option(netSection, 'ephemeralPortsFrom'):
self.ephemeralPorts[0] = cfg.getint(netSection, 'ephemeralPortsFrom')
if cfg.has_option(netSection, 'ephemeralPortsTo'):
self.ephemeralPorts[1] = cfg.getint(netSection, 'ephemeralPortsTo')
tftpSection = 'TFTP'
if cfg.has_section(tftpSection):
if cfg.has_option(tftpSection, 'timeout'):
self.tftpTimeout = cfg.getfloat(tftpSection, 'timeout')
if cfg.has_option(tftpSection, 'retries'):
self.tftpRetries = cfg.getint(tftpSection, 'retries')
serv
|
erSection = 'Server'
if cfg.has_section(serverSection):
if cfg.has_option(serverSection, 'defaultDirectory'):
self.defaultDirectory = cfg.get(serverSection, 'defaultDirectory')
if cfg.has_option(serverSection, 'saveLastUsed'):
self.saveLastUsed = cfg.getboolean(serverSection, 'saveLastUsed')
| |
centaurialpha/edis
|
src/ui/editor/lexer.py
|
Python
|
gpl-3.0
| 2,435
| 0.000411
|
# -*- coding: utf-8 -*-
# EDIS - a simple cross-platform IDE for C
#
# This file is part of Edis
# Copyright 2014-2015 - Gabriel Acosta <acostadariogabriel at gmail>
# License: GPLv3 (see http://www.gnu.org/licenses/gpl.html)
from PyQt4.Qsci import QsciLexerCPP
from PyQt4.QtGui import QColor
from src import editor_scheme
from src.core import settings
class Lexer(QsciLexerCPP):
""" Lexer class """
def __init__(self, *args, **kwargs):
super(Lexer, self).__init__(*args, **kwargs)
# Configuración
self.setStylePreprocessor(True)
self.setFoldComments(True)
self.setFoldPreprocessor(True)
self.setFoldCompact(False)
self.load_highlighter()
def load_highlighter(self):
""" Método público: carga el resaltado de sintáxis """
scheme = editor_scheme.get_scheme(
settings.get_setting('edito
|
r/scheme'))
self.setDefaultPaper(QColor(scheme['BackgroundEditor']))
self.setPaper(self.defaultPaper(0))
self.setColor(QColor(scheme['Color']))
types = dir(self)
for _type in types:
if _type in scheme:
atr = getattr(self, _type)
self.setColor(QColor(scheme[
|
_type]), atr)
def keywords(self, kset):
super(Lexer, self).keywords(kset)
if kset == 1:
# Palabras reservadas
return ('auto break case const continue default do else enum '
'extern for goto if register return short sizeof static '
'struct switch typedef union unsigned void volatile while '
'char float int long double')
elif kset == 2:
# Funciones definidas en stdio.h y stdlib.h
return ('fprintf fscanf printf scanf sprintf sscanf vfprintf '
'vprintf vsprintf fclose fflush fopen freopen remove '
'rename setbuf tmpfile tmpnam fgetc fgets fputc fputs '
'getc getchar gets putc putchar puts ungetc fread fseek '
'fsetpos ftell rewind clearerr feof ferror perror '
'abort atexit exit getenv system abs div labs ldiv '
'rand srand atof atoi atol strtod strtod strtoll '
'strtoul bsearch qsort calloc realloc malloc free '
'mblen mbtowc wctomb mbstowcs wcstombs')
super(Lexer, self).keywords(kset)
|
ZoranPavlovic/kombu
|
t/unit/utils/test_amq_manager.py
|
Python
|
bsd-3-clause
| 1,196
| 0
|
import pytest
from unittest.mock import patch
from case import mock
from kombu import Connection
class test_get_manager:
@mock.mask_modules('pyrabbit')
def test_without_pyrabbit(self):
with pytest.raises(ImportError):
Connection('amqp://').get_manager()
@mock.module_exists('
|
pyrabbit')
def test_with_pyrabbit(self):
with patch('pyrabbit.Client', create=True) as Client:
manager = Connection('amqp://').get_manager()
assert manager i
|
s not None
Client.assert_called_with(
'localhost:15672', 'guest', 'guest',
)
@mock.module_exists('pyrabbit')
def test_transport_options(self):
with patch('pyrabbit.Client', create=True) as Client:
manager = Connection('amqp://', transport_options={
'manager_hostname': 'admin.mq.vandelay.com',
'manager_port': 808,
'manager_userid': 'george',
'manager_password': 'bosco',
}).get_manager()
assert manager is not None
Client.assert_called_with(
'admin.mq.vandelay.com:808', 'george', 'bosco',
)
|
teheavy/AMA3D
|
Nh3D/1_AMA3D_start.py
|
Python
|
gpl-2.0
| 1,285
| 0.035798
|
# Script Version: 1.0
# Author: Te Chen
# Project: AMA3D
# Task Step: 1
import sys
import urllib2
import time
VERSION = '4.0.0'
def prepare_cath():
ver = VERSION.replace('.', '_')
download_file(ver, 'CathDomainList')
download_file(ver, 'CathNames')
download_file(ver, 'CathDomainDescriptionFile')
def download_file(ver, file_name):
url = "ftp://ftp.biochem.ucl.ac.uk/pub/c
|
ath/v%s/%s" % (ver, file_name)
file_name = url.split('/')[-1]
u = urllib2.urlopen(url)
f = open('C:/AMA3D/Nh3D/' + file_name, 'wb')
meta = u.in
|
fo()
file_size = int(meta.getheaders("Content-Length")[0])
print "Downloading: %s Bytes: %s" % (file_name, file_size)
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
f.close()
print "Downloaded file" + file_name
if __name__ == '__main__':
# Download necessary files when start
prepare_cath()
# This part triggers all the tasks afterwards.
print "trigger\t%s\t%d\t%d"%('', 2, 1)
sys.stdout.flush()
print "trigger\t%s\t%d\t%d"%('', 3, 1)
sys.stdout.flush()
# Write result to a file as well just for testing
with open("Domain_Result", "w") as f:
f.write("Topology\tPDB ID\tR\tResolution\tChain Length\tScore\n")
|
Michael-Tu/tools
|
stock_scraping/stock_price_scraping_to_local.py
|
Python
|
mit
| 2,169
| 0.003688
|
'''
This script helps you scrap stock data avaliable on Bloomberg Finance
and store them locally.
Please obey applicable local and federal laws and applicable API term of use
when using this scripts. I, the creater of this script, will not be responsible
for any legal issues resulting from the use of this script.
@author Gan Tu
@version python 2 or python 3
[HOW TO CHANGE PYTHON VERSION]
This script by default should be run by Python 2.
To use this in Python 3, change the followings:
1) change ALL occurrences of "urllib" to "urllib.request".
'''
import urllib
import re
import json
import os
# Stock Symbol
|
s Initialization
# Feel free to modify the file source to contain stock symbols you plan to scrap fro
stocks = open("nas
|
daq_symbols.txt", "r").read().split("\n")
# URL Initialization
urlPrefix = "http://www.bloomberg.com/markets/api/bulk-time-series/price/"
urlAffix = "%3AUS?timeFrame="
# Only four of these are valid options for now
# 1_Day will scrap minute by minute data for one day, while others will be daily close price
# Feel free to modify them for your own need
options = ["1_DAY", "1_MONTH", "1_YEAR", "5_YEAR"]
def setup():
try:
os.mkdir("data")
except Exception as e:
pass
for option in options:
try:
os.mkdir("data/" + option + "/")
except Exception as e:
pass
def scrap():
i = 0
while i < len(stocks):
for option in options:
file = open("data/" + option + "/" + stocks[i] + ".txt", "w")
file.close()
htmltext = urllib.urlopen(urlPrefix + stocks[i] + urlAffix + option)
try:
data = json.load(htmltext)[0]["price"]
key = "date"
if option == "1_DAY":
key = "dateTime"
file = open("data/" + option + "/" + stocks[i] + ".txt", "a")
for price in data:
file.write(stocks[i] + "," + price[key] + "," + str(price["value"]) + "\n")
file.close()
except Exception as e:
pass
i += 1
if __name__ == "__main__":
setup()
scrap()
|
mauimuc/gptt
|
src/animation.py
|
Python
|
gpl-3.0
| 2,920
| 0.012671
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = "Stefan Mauerberger"
__copyright__ = "Copyright (C) 2017 Stefan Mauerberger"
__license__ = "GPLv3"
import numpy as np
from sys import stdout
from matplotlib import pyplot as plt
from matplotlib import animation
from plotting import prepare_map, lllat, lllon, urlat, urlon, cmap_mu, cmap_sd
from reference import c_act, dt_latlon
import h5py
dpi=150
fh = h5py.File('../dat/example.hdf5', 'r')
points = fh['points']
stations = fh['stations']
mu_C = fh['mu']
sd_C = fh['sd']
# Ratio 16:9
fig = plt.figure(figsize=(8,4.5))
fig.subplots_adjust(left=0.06, right=0.97, top=0.95, wspace=0.02, bottom=0.05)
ax_mu = fig.add_subplot(121)
ax_sd = fig.add_subplot(122)
# Subplot on the left
mu_delta = max(c_act._v0 - c_act.min, c_act.max - c_act._v0)
mu_vmax = (c_act._v0 + mu_delta).round(0)
mu_vmin = (c_act._v0 - mu_delta).round(0)
m = prepare_map(ax_mu)
x, y = m(points['lon'], points['lat'])
tpc_mu = ax_mu.tripcolor(x, y, mu_C[0,:], \
vmin=mu_vmin, vmax=mu_vmax, cmap=cmap_mu, shading='gouraud')
cbar = m.colorbar(tpc_mu, location='bottom')
cbar.set_ticks( range(mu_vmin.astype(np.int), mu_vmax.astype(np.int), 40)[1:])
#cbar.set_label('mean')
m.scatter(stations['lon'], stations['lat'], latlon=True, lw=0, color='g')
# Make a lat, lon grid with extent of the map
N = 60j
grid = np.rec.fromarrays(np.mgrid[lllat:urlat:N, lllon:urlon:N], dtype=dt_latlon)
c = c_act(grid) # Actual velocity model
# Contour lines
cnt = m.contour(grid['lon'], grid['lat'], c, levels=c_act.levels(20), latlon=True, colors='k', linewidths=0.5)
# Subplot right
m = prepare_map(ax_sd, pls=[0,0,0,0])
tpc_sd = ax_sd.tripcolor(x, y, sd_C[0,:], \
vmin=np.min(sd_C), vmax=np.max(sd_C), cmap=cmap_sd, shading='gouraud')
cbar = m.colorbar(tpc_sd, location='bottom')
vmin_sd = np.min(sd_C).round().astype(np.integer)
v
|
max_sd = np.max(sd_C).round().astype(np.integer)
cbar.set_ticks(range(vmin_sd, vmax_sd, 5))
#cbar.set_label('standard deviation')
m.scatter(stations['lon'], stations['lat'], latlon=
|
True, lw=0, color='g')
# First frame; Necessary for LaTeX beamer
plt.savefig('../animation_pri.png', dpi=dpi)
def animate(i):
global mu_C, cov_CC
tpc_mu.set_array(mu_C[i,:])
tpc_sd.set_array(sd_C[i,:])
# Screen output; a very basic progress bar
p = int(100.*(i+1)/mu_C.shape[0]) # Progress
stdout.write('\r[' + p*'#' + (100-p)*'-' + '] %3i' % p + '%' )
if (i+1) == mu_C.shape[0]:
stdout.write('\n')
return tpc_mu, tpc_sd
frames = mu_C.shape[0]
duration = 30. # s
interval = 1000.*duration/frames # ms
anim = animation.FuncAnimation(fig, animate, save_count=0, \
frames=frames, interval=interval, blit=False)
# Save video
anim.save('../animation.avi', dpi=dpi, extra_args=['-vcodec', 'msmpeg4v2'])
# Last frame; Necessary for LaTeX beamer
plt.savefig('../animation_pst.png', dpi=dpi)
#plt.close()
|
daicang/Leetcode-solutions
|
064-minimum-path-sum.py
|
Python
|
mit
| 877
| 0.002281
|
class Solution(object):
def minPathSum(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
if not grid or not grid[0]:
|
return 0
m = len(grid)
n = len(grid[0])
dp = []
for _ in range(m):
dp.append([None] * (n))
dp[m-1][n-1] = grid[m-1][n-1]
def solve(row, col):
if dp[row][col] is not None:
return dp[row][col]
if row == m-1:
cost = grid[row][col] + solve(row, col+1)
elif col == n-1:
cost = grid[row]
|
[col] + solve(row+1, col)
else:
cost = grid[row][col] + min(solve(row, col+1), solve(row+1, col))
dp[row][col] = cost
# print 'dp(%s,%s) is %s' % (row, col, ans)
return cost
return solve(0, 0)
|
dryobates/testing_django
|
todo/tasks/tests/test_models.py
|
Python
|
mit
| 601
| 0
|
from django.test import TestCase
from morelia.decorators import tags
from smarttest.decorators import no_db_testcase
from task
|
s.factories import TaskFactory, UserFactory
@no_db_testcase
@tags(['unit'])
class TaskGetAbsoluteUrlTest(TestCase):
''' :py:meth:`tasks.models.Task.get_absolute_url` '''
def test_should_return_task_absolute_url(self):
# Arrange
owner = UserFactory.build(pk=1)
task = TaskFactory.build(owner=owner, author=owner)
# Act
url = task.get_ab
|
solute_url()
# Assert
self.assertEqual(url, '/%s/' % owner.username)
|
gnowgi/gnowsys-studio
|
gstudio/xmlrpc/dispatcher.py
|
Python
|
agpl-3.0
| 3,078
| 0.00065
|
"""Offers a simple XML-RPC dispatcher for django_xmlrpc
Author::
|
Graham Binns
Credit must go to Brendan W. McAdams <brendan.mcadams@thewi
|
ntergrp.com>, who
posted the original SimpleXMLRPCDispatcher to the Django wiki:
http://code.djangoproject.com/wiki/XML-RPC
New BSD License
===============
Copyright (c) 2007, Graham Binns http://launchpad.net/~codedragon
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the <ORGANIZATION> nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# This file is needed to run XMLRPC
from inspect import getargspec
from SimpleXMLRPCServer import SimpleXMLRPCDispatcher
from django.conf import settings
# If we need to debug, now we know
DEBUG = hasattr(settings, 'XMLRPC_DEBUG') and settings.XMLRPC_DEBUG
class DjangoXMLRPCDispatcher(SimpleXMLRPCDispatcher):
"""A simple XML-RPC dispatcher for Django.
Subclassess SimpleXMLRPCServer.SimpleXMLRPCDispatcher for the purpose of
overriding certain built-in methods (it's nicer than monkey-patching them,
that's for sure).
"""
def system_methodSignature(self, method):
"""Returns the signature details for a specified method
method
The name of the XML-RPC method to get the details for
"""
# See if we can find the method in our funcs dict
# TODO: Handle this better: We really should return something more
# formal than an AttributeError
func = self.funcs[method]
try:
sig = func._xmlrpc_signature
except:
sig = {
'returns': 'string',
'args': ['string' for arg in getargspec(func)[0]],
}
return [sig['returns']] + sig['args']
|
tmerrick1/spack
|
var/spack/repos/builtin/packages/gmp/package.py
|
Python
|
lgpl-2.1
| 2,575
| 0.000388
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free
|
software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more de
|
tails.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Gmp(AutotoolsPackage):
"""GMP is a free library for arbitrary precision arithmetic, operating
on signed integers, rational numbers, and floating-point numbers."""
homepage = "https://gmplib.org"
url = "https://ftp.gnu.org/gnu/gmp/gmp-6.1.2.tar.bz2"
version('6.1.2', '8ddbb26dc3bd4e2302984debba1406a5')
version('6.1.1', '4c175f86e11eb32d8bf9872ca3a8e11d')
version('6.1.0', '86ee6e54ebfc4a90b643a65e402c4048')
version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470')
version('6.0.0', '6ef5869ae735db9995619135bd856b84')
version('5.1.3', 'a082867cbca5e898371a97bb27b31fea')
# Old version needed for a binary package in ghc-bootstrap
version('4.3.2', 'dd60683d7057917e34630b4a787932e8')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
# gmp's configure script seems to be broken; it sometimes misdetects
# shared library support. Regenerating it fixes the issue.
force_autoreconf = True
def configure_args(self):
args = ['--enable-cxx']
# This flag is necessary for the Intel build to pass `make check`
if self.spec.compiler.name == 'intel':
args.append('CXXFLAGS=-no-ftz')
return args
|
openstack/python-tripleoclient
|
tripleoclient/tests/workflows/test_baremetal.py
|
Python
|
apache-2.0
| 19,069
| 0
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from unittest import mock
import ironic_inspector_client
from oslo_concurrency import processutils
from oslo_utils import units
from tripleoclient import exceptions
from tripleoclient.tests import fakes
from tripleoclient.workflows import baremetal
class TestBaremetalWorkflows(fakes.FakePlaybookExecution):
def setUp(self):
super(TestBaremetalWorkflows, self).setUp()
self.app.client_manager.workflow_engine = self.workflow = mock.Mock()
self.glance = self.app.client_manager.image = mock.Mock()
self.tripleoclient = mock.Mock()
self.app.client_manager.tripleoclient = self.tripleoclient
self.mock_playbook = mock.patch(
'tripleoclient.utils.run_ansible_playbook',
autospec=True
)
self.mock_playbook.start()
self.addCleanup(self.mock_playbook.stop)
self.node_update = [{'op': 'add',
'path': '/properties/capabilities',
'value': 'boot_option:local'},
{'op': 'add',
'path': '/driver_info/deploy_ramdisk',
'value': None},
{'op': 'add',
'path': '/driver_info/deploy_kernel',
'value': None},
{'op': 'add',
'path': '/driver_info/rescue_ramdisk',
'value': None},
{'op': 'add',
'path': '/driver_info/rescue_kernel',
'value': None}]
# Mock data
self.disks = [
{'name': '/dev/sda', 'size': 11 * units.Gi},
{'name': '/dev/sdb', 'size': 2 * units.Gi},
{'name': '/dev/sdc', 'size': 5 * units.Gi},
{'name': '/dev/sdd', 'size': 21 * units.Gi},
{'name': '/dev/sde', 'size': 13 * units.Gi},
]
for i, disk in enumerate(self.disks):
disk['wwn'] = 'wwn%d' % i
disk['serial'] = 'serial%d' % i
self.baremetal.node.list.return_value = [
mock.Mock(uuid="ABCDEFGH"),
]
self.node = mock.Mock(uuid="ABCDEFGH", properties={})
self.baremetal.node.get.return_value = self.node
self.inspector.get_data.return_value = {
'inventory': {'disks': self.disks}
}
|
self.existing_nodes = [
{'uuid': '1', 'driver': 'ipmi',
'driver_info': {'ipmi_address': '10.0.0.1'}},
{'uuid': '2', 'driver': 'pxe
|
_ipmitool',
'driver_info': {'ipmi_address': '10.0.0.1', 'ipmi_port': 6235}},
{'uuid': '3', 'driver': 'foobar', 'driver_info': {}},
{'uuid': '4', 'driver': 'fake',
'driver_info': {'fake_address': 42}},
{'uuid': '5', 'driver': 'ipmi', 'driver_info': {}},
{'uuid': '6', 'driver': 'pxe_drac',
'driver_info': {'drac_address': '10.0.0.2'}},
{'uuid': '7', 'driver': 'pxe_drac',
'driver_info': {'drac_address': '10.0.0.3', 'drac_port': 6230}},
]
def test_register_or_update_success(self):
self.assertEqual(baremetal.register_or_update(
self.app.client_manager,
nodes_json=[],
instance_boot_option='local'
), [mock.ANY])
def test_introspect_success(self):
baremetal.introspect(self.app.client_manager, node_uuids=[],
run_validations=True, concurrency=20,
node_timeout=1200, max_retries=1,
retry_timeout=120)
def test_introspect_manageable_nodes_success(self):
baremetal.introspect_manageable_nodes(
self.app.client_manager, run_validations=False, concurrency=20,
node_timeout=1200, max_retries=1, retry_timeout=120,
)
def test_run_instance_boot_option(self):
result = baremetal._configure_boot(
self.app.client_manager,
node_uuid='MOCK_UUID',
instance_boot_option='netboot')
self.assertIsNone(result)
self.node_update[0].update({'value': 'boot_option:netboot'})
self.baremetal.node.update.assert_called_once_with(
mock.ANY, self.node_update)
def test_run_instance_boot_option_not_set(self):
result = baremetal._configure_boot(
self.app.client_manager,
node_uuid='MOCK_UUID')
self.assertIsNone(result)
self.node_update[0].update({'value': ''})
self.baremetal.node.update.assert_called_once_with(
mock.ANY, self.node_update)
def test_run_instance_boot_option_already_set_no_overwrite(self):
node_mock = mock.MagicMock()
node_mock.properties.get.return_value = ({'boot_option': 'netboot'})
self.app.client_manager.baremetal.node.get.return_value = node_mock
result = baremetal._configure_boot(
self.app.client_manager,
node_uuid='MOCK_UUID')
self.assertIsNone(result)
self.node_update[0].update({'value': 'boot_option:netboot'})
self.baremetal.node.update.assert_called_once_with(
mock.ANY, self.node_update)
def test_run_instance_boot_option_already_set_do_overwrite(self):
node_mock = mock.MagicMock()
node_mock.properties.get.return_value = ({'boot_option': 'netboot'})
self.app.client_manager.baremetal.node.get.return_value = node_mock
result = baremetal._configure_boot(
self.app.client_manager,
node_uuid='MOCK_UUID',
instance_boot_option='local')
self.assertIsNone(result)
self.node_update[0].update({'value': 'boot_option:local'})
self.baremetal.node.update.assert_called_once_with(
mock.ANY, self.node_update)
def test_run_exception_on_node_update(self):
self.baremetal.node.update.side_effect = Exception("Update error")
self.assertRaises(
Exception,
baremetal._configure_boot,
self.app.client_manager,
node_uuid='MOCK_UUID')
self.inspector.get_data.return_value = {
'inventory': {'disks': self.disks}
}
def test_smallest(self):
baremetal._apply_root_device_strategy(
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='smallest')
self.assertEqual(self.baremetal.node.update.call_count, 1)
root_device_args = self.baremetal.node.update.call_args_list[0]
expected_patch = [{'op': 'add', 'path': '/properties/root_device',
'value': {'wwn': 'wwn2'}},
{'op': 'add', 'path': '/properties/local_gb',
'value': 4}]
self.assertEqual(mock.call('ABCDEFGH', expected_patch),
root_device_args)
def test_smallest_with_ext(self):
self.disks[2]['wwn_with_extension'] = 'wwnext'
baremetal._apply_root_device_strategy(
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='smallest')
self.assertEqual(self.baremetal.node.update.call_count, 1)
root_device_args = self.baremetal.node.update.call_args_list[0]
expected_patch = [{'op': 'add', 'path': '/properties/root_device',
'value': {'wwn_with_extension': 'wwnext'}},
{'op': 'add', 'path': '/properties/local_gb',
'
|
daobilige-su/SSM_LinearArray
|
ROS/SSM_LinearArray/scripts/ps3_driver.py
|
Python
|
gpl-3.0
| 1,741
| 0.013211
|
#!/usr/bin/env python
#
# This file is part of the SSM_LinearArray (Sound Sources Mapping
# using a Linear Microphone Array)
# developed by Daobilige Su <daobilige DOT su AT student DOT uts DOT edu DOT au>
#
# This file is under the GPLv3 licence.
#
import rospy
from std_msgs.msg import String
from std_msgs.msg import Int32MultiArray
#sudo apt-get install python-pyaudio
import pyaudio
from rospy.numpy_msg import numpy_msg
import numpy as np
import time
import signal
import os
import sys
CHUNK = 3200
FORMAT = pyaudio.paInt16
CHANNELS = 4
RATE = 16000
DEV_IDX = 5
p = pyaudio.PyAudio()
pub_mic_array = rospy.Publisher("/microphone_array_raw", numpy_msg(Int32MultiArray),queue_s
|
ize=1)
def callback(in_data, frame_count, time_info, status):
global np,pub_mic_array
numpydata = np.fromstring(in_data, dtype=np.int16)
print('sending...')
numpydata_msg = Int32MultiArray()
numpydata_msg.data = numpydata
pub_mic_array.publish(numpydata_msg)
return (in_data, pyaudio.paContinue)
stream = p.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
|
input=True,
frames_per_buffer=CHUNK,
input_device_index=DEV_IDX,
stream_callback=callback)
def signal_handler(signal, frame):
print('---stopping---')
stream.close()
p.terminate()
sys.exit()
signal.signal(signal.SIGINT, signal_handler)
def talker():
rospy.init_node('microphone_array_driver', anonymous=True)
print("---recording---")
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.close()
p.terminate()
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass
|
AASHE/hub
|
hub/apps/api/views.py
|
Python
|
mit
| 3,271
| 0
|
from __future__ import unicode_literals
from logging import getLogger
from django.conf import settings
from django.core.cache import cache
from django.views.generic import View
from django.http import JsonResponse, HttpResponseBadRequest
from django.template.defaultfilters import sl
|
ugify
from ratelimit.mixins import RatelimitMixin
from ..metadata.models import Organization
from ..content.models import ContentType
logger = getLogger(__name__)
class BaseApiView(RatelimitMixin, View):
cache = False
cache_timeout = 60 * 60
# Rate-limiting
ratelimit_key = 'ip'
ratelimit_rate = settings.BROWSE_RATE_LIMIT
ratelimit_block = True
ratelimit_method = 'GET'
def get(self
|
, request, *args, **kwargs):
"""
Respond the content of `self.get_data` as JSON. Cache it, if enabled.
"""
if self.cache:
data = cache.get(self.get_cache_key())
if data:
logger.debug('API response: cache hit :: {}'.format(
self.get_cache_key()))
return data
data = JsonResponse(self.get_data(), safe=False)
if self.cache:
logger.debug('API response: cache set :: {}'.format(
self.get_cache_key()))
cache.set(self.get_cache_key(), data, self.cache_timeout)
return data
class AutoCompleteView(BaseApiView):
def get(self, request, *args, **kwargs):
"""
Store the `q` keyword in the class namespace.
"""
if not self.request.GET.get('q'):
return HttpResponseBadRequest('No search term given')
self.q = self.request.GET['q']
if len(self.q) < self.min_keyword_length:
error_str = 'Search term must be at least {} characters long.'
return HttpResponseBadRequest(
error_str.format(self.min_keyword_length))
return super(AutoCompleteView, self).get(request, *args, **kwargs)
class OrganizationsApiView(AutoCompleteView):
"""
Returns a list of organizations matching a given `q` keyword.
"""
cache = True
# API view specific
max_num_results = 50
min_keyword_length = 2
def get_cache_key(self):
return 'api_organizations_{}'.format(slugify(self.q))
def get_data(self):
data = Organization.objects.values('pk', 'org_name', 'state')
data = data.filter(org_name__icontains=self.q)
data = data.order_by('org_name')
data = list(data)
for x in data:
x['org_name'] = '{}, {}'.format(x['org_name'], x['state'])
return data
class TagsApiView(AutoCompleteView):
"""
Returns a list of tags matching a given `q` keyword.
"""
cache = True
# API view specific
max_num_results = 50
min_keyword_length = 2
def get_cache_key(self):
return 'api_tags_{}'.format(slugify(self.q))
def get_data(self):
# @todo: should we limit this to only tags on published contenttypes?
# I think this will be too heavy a query... :(
qs = ContentType.keywords.tag_model.objects.values(
'pk', 'name', 'slug').distinct('name')
qs = qs.filter(name__icontains=self.q)
qs = qs.exclude(count=0)
return list(qs)
|
rbtcollins/lmirror
|
l_mirror/tests/test_logging_support.py
|
Python
|
gpl-3.0
| 2,180
| 0.004128
|
#
# LMirror is Copyright (C) 2010 Robert Collins <[email protected]>
#
# LMirror is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
#
# In the LMirror source tree the file COPYING.txt contains the GNU General Public
# License version 3.
#
"""Tests for logging support code."""
from StringIO import StringIO
import logging
import os.path
import time
from l_mirror import logging_support
from l_mirror.tests import ResourcedTestCase
from l_mirror.tests.logging_resource import LoggingResourceManager
from l_mirror.tests.stubpackage import TempDirResource
class TestLoggingSetup(ResourcedTestCase):
resources = [('logging', LoggingResourceManager())]
def test_configure_logging_sets_converter(self):
out = StringIO()
c_log, f_log, formatter = logging_support.configure_logging(out)
self.assertEqual(c_log, logging.root.handlers[0])
self.assertEqual(f_log, logging.root.handlers[1])
self.assertEqual(None, c_log.formatter)
self.assertEqual(formatter, f_log.formatter)
self.as
|
sertEqual(time.gmtime, formatter.converter)
|
self.assertEqual("%Y-%m-%d %H:%M:%SZ", formatter.datefmt)
self.assertEqual(logging.StreamHandler, c_log.__class__)
self.assertEqual(out, c_log.stream)
self.assertEqual(logging.FileHandler, f_log.__class__)
self.assertEqual(os.path.expanduser("~/.cache/lmirror/log"), f_log.baseFilename)
def test_can_supply_filename_None(self):
out = StringIO()
c_log, f_log, formatter = logging_support.configure_logging(out, None)
self.assertEqual(None, f_log)
|
laurentb/weboob
|
modules/cragr/netfinca_browser.py
|
Python
|
lgpl-3.0
| 251
| 0
|
# -*- coding: utf-8 -*-
# Copyrig
|
ht(C) 2012-2019 Budget Insight
# yapf-compatible
from weboob.browser import AbstractBrowser
class NetfincaBrowser(A
|
bstractBrowser):
PARENT = 'netfinca'
BASEURL = 'https://www.cabourse.credit-agricole.fr'
|
itskewpie/tempest
|
tempest/api/identity/base.py
|
Python
|
apache-2.0
| 6,216
| 0.000161
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest.common.utils.data_utils import rand_name
import tempest.test
class BaseIdentityAdminTest(tempest.test.BaseTestCase):
@classmethod
def setUpClass(cls):
super(BaseIdentityAdminTest, cls).setUpClass()
os = clients.AdminManager(interface=cls._interface)
cls.client = os.identity_client
cls.token_client = os.token_client
cls.endpoints_client = os.endpoints_client
cls.v3_client = os.identity_v3_client
cls.service_client = os.service_client
cls.policy_client = os.policy_client
cls.v3_token = os.token_v3_client
cls.creds_client = os.credentials_client
if not cls.client.has_admin_extensions():
raise cls.skipException("Admin extensions disabled")
cls.data = DataGenerator(cls.client)
cls.v3data = DataGenerator(cls.v3_client)
os = clients.Manager(interface=cls._interface)
cls.non_admin_client = os.identity_client
cls.v3_non_admin_client = os.identity_v3_client
@classmethod
def tearDownClass(cls):
cls.data.teardown_all()
cls.v3data.teardown_all()
super(BaseIdentityAdminTest, cls).tearDownClass()
def disable_user(self, user_name):
user = self.get_user_by_name(user_name)
self.client.enable_disable_user(user['id'], False)
def disable_tenant(self, tenant_name):
tenant = self.get_tenant_by_name(tenant_name)
self.client.update_tenant(tenant['id'], enabled=False)
def get_user_by_name(self, name):
_, users = self.client.get_users()
user = [u for u in users if u['name'] == name]
if len(user) > 0:
return user[0]
def get_tenant_by_name(self, name):
_, tenants = self.client.list_tenants()
tenant = [t for t in tenants if t['name'] == name]
if len(tenant) > 0:
return tenant[0]
def get_role_by_name(self, name):
_, roles = self.client.list_roles()
role = [r for r in roles if r['name'] == name]
if len(role) > 0:
return role[0]
class DataGenerator(object):
def __init__(self, client):
self.client = client
self.users = []
self.tenants = []
self.roles = []
self.role_name = None
self.v3_users = []
self.projects = []
self.v3_roles = []
def setup_test_user(self):
"""Set up a test user."""
self.setup_test_tenant()
self.test_user = rand_name('test_user_')
self.test_password = rand_name('pass_')
self.test_email = self.test_user + '@testmail.tm'
resp, self.user = self.client.create_user(self.test_user,
self.test_password,
self.tenant['id'],
self.test_email)
self.users.append(self.user)
def setup_test_tenant(self):
"""Set up a test tenant."""
self.test_tenant = rand_name('test_tenant_')
self.test_description = rand_name('desc_')
resp, self.tenant = self.client.create_tenant(
name=self.test_tenant,
description=self.test_description)
self.tenants.append(self.tenant)
def setup_test_role(self):
"""Set up a test role."""
self.test_role = rand_name('role')
resp, self.role = self.client.create_role(self.test_role)
self.roles.append(self.role)
def setup_test_v3_user(self):
"""Set up a test v3 user."""
self.setup_test_project()
self.test_user = rand_name('test_user_')
self.test_password = rand_name('pass_')
self.test_email = self.test_user + '@testmail.tm'
resp, self.v3_user = self.client.create_user(self.test_user,
self.test_password,
self.project['id'],
self.test_email)
self.v3_users.append(self.v3_user)
def setup_test_project(self):
"""Set up a test project."""
self.test_project = rand_name('test_project_')
self.test_description = rand_name('desc_')
|
resp, self.project = self.client.create_project(
name=self.test_project,
description=self.test_description)
self.projects.append(self.project)
def setup_test_v3_role(self):
"""Set up a test v3 role.
|
"""
self.test_role = rand_name('role')
resp, self.v3_role = self.client.create_role(self.test_role)
self.v3_roles.append(self.v3_role)
def teardown_all(self):
for user in self.users:
self.client.delete_user(user['id'])
for tenant in self.tenants:
self.client.delete_tenant(tenant['id'])
for role in self.roles:
self.client.delete_role(role['id'])
for v3_user in self.v3_users:
self.client.delete_user(v3_user['id'])
for v3_project in self.projects:
self.client.delete_project(v3_project['id'])
for v3_role in self.v3_roles:
self.client.delete_role(v3_role['id'])
|
tylertian/Openstack
|
openstack F/cinder/cinder/tests/test_skip_examples.py
|
Python
|
apache-2.0
| 1,837
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=
|
4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License a
|
t
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinder import test
class ExampleSkipTestCase(test.TestCase):
test_counter = 0
@test.skip_test("Example usage of @test.skip_test()")
def test_skip_test_example(self):
self.fail("skip_test failed to work properly.")
@test.skip_if(True, "Example usage of @test.skip_if()")
def test_skip_if_example(self):
self.fail("skip_if failed to work properly.")
@test.skip_unless(False, "Example usage of @test.skip_unless()")
def test_skip_unless_example(self):
self.fail("skip_unless failed to work properly.")
@test.skip_if(False, "This test case should never be skipped.")
def test_001_increase_test_counter(self):
ExampleSkipTestCase.test_counter += 1
@test.skip_unless(True, "This test case should never be skipped.")
def test_002_increase_test_counter(self):
ExampleSkipTestCase.test_counter += 1
def test_003_verify_test_counter(self):
self.assertEquals(ExampleSkipTestCase.test_counter, 2,
"Tests were not skipped appropriately")
|
spoqa/flask-s3
|
flask_s3.py
|
Python
|
mit
| 12,801
| 0.002734
|
from concurrent.futures import ThreadPoolExecutor
import os
import re
import gzip
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import logging
import mimetypes
from collections import defaultdict
from flask import url_for as flask_url_for
from flask import current_app, request
from boto.s3.connection import S3Connection
from boto.exception import S3CreateError, S3ResponseError
from boto.s3.key import Key
logger = logging.getLogger('flask_s3')
mimetypes.add_type('text/css', '.less')
def url_for(endpoint, **values):
"""
Generates a URL to the given endpoint.
If the endpoint is for a static resource then an Amazon S3 URL is
generated, otherwise the call is passed on to `flask.url_for`.
Because this function is set as a jinja environment variable when
`FlaskS3.init_app` is invoked, this function replaces
`flask.url_for` in templates automatically. It is unlikely that this
function will need to be directly called from within your
application code, unless you need to refer to static assets outside
of your templates.
"""
app = current_app
if 'S3_BUCKET_NAME' not in app.config:
raise ValueError("S3_BUCKET_NAME not found in app configuration.")
if app.debug and not app.config['USE_S3_DEBUG']:
return flask_url_for(endpoint, **values)
if endpoint == 'static' or endpoint.endswith('.static'):
scheme = 'http'
scheme = app.config['S3_URL_SCHEME'] or 'https'
bucket_path = '%s.%s' % (app.config['S3_BUCKET_NAME'],
app.config['S3_BUCKET_DOMAIN'])
if app.config['S3_CDN_DOMAIN']:
bucket_path = '%s' % app.config['S3_CDN_DOMAIN']
if app.config['S3_PREFIX']:
bucket_path = "/".join((bucket_path, app.config['S3_PREFIX']))
urls = app.url_map.bind(bucket_path, url_scheme=scheme)
try:
mimetype = mimetypes.guess_type(values['filename'])[0]
except KeyError:
mimetype = None
if app.config['USE_GZIP']:
accept_encoding = request.headers.get('Accept-Encoding', '')
if (mimetype in app.config['S3_GZIP_CONTENT_TYPES'] and
'gzip' in accept_encoding.lower()):
values['filename'] += '.gz'
url = urls.build(endpoint, values=values, force_external=True)
if app.config['S3_URL_SCHEME'] is None:
url = re.sub(r'^https://', '//', url)
return url
return flask_url_for(endpoint, **values)
def _bp_static_url(blueprint):
""" builds the absolute url path for a blueprint's static folder """
u = u'%s%s' % (blueprint.url_prefix or '', blueprint.static_url_path or '')
return u
def _gather_files(app, hidden):
""" Gets all files in static folders and returns in dict."""
dirs = [(unicode(app.static_folder), app.static_url_path)]
if hasattr(app, 'blueprints'):
blueprints = app.blueprints.values()
bp_details = lambda x: (x.static_folder, _bp_static_url(x))
dirs.extend([bp_details(x) for x in blueprints if x.static_folder])
valid_files = defaultdict(list)
for static_folder, static_url_loc in dirs:
if not os.path.isdir(static_folder):
logger.warning("WARNING - [%s does not exist]" % static_folder)
else:
logger.debug("Checking static folder: %s" % static_folder)
for root, _, files in os.walk(static_folder):
files = [os.path.join(root, x) \
for x in files if hidden or x[0] != '.']
if files:
valid_files[(static_folder, static_url_loc)].extend(files)
return valid_files
def _path_to_relative_url(path):
""" Converts a folder and filename into a ralative url path """
return os.path.splitdrive(path)[1].replace('\\', '/')
def _static_folder_path(static_url, static_folder, static_asset, prefix=''):
"""
Returns a path to a file based on the static folder, and not on the
filesystem holding the file.
Returns a path relative to static_url for static_asset
"""
# first get the asset path relative to the static folder.
# static_asset is not simply a filename because it could be
# sub-directory then file etc.
if not static_asset.startswith(static_folder):
raise ValueError("%s startic asset mus
|
t be under %s static folder" %
(static_asset, static_folder))
rel_asset = static_asset[len(sta
|
tic_folder):]
# Now bolt the static url path and the relative asset location together
key = u'%s/%s' % (static_url.rstrip('/'), rel_asset.lstrip('/'))
if prefix:
key = u'%s/%s' % (prefix, key)
return key
def _write_files(app, static_url_loc, static_folder, files, bucket,
ex_keys=None):
""" Writes all the files inside a static folder to S3. """
with ThreadPoolExecutor(app.config['S3_UPLOAD_COCURRENCY']) as executor:
for file_path in files:
asset_loc = _path_to_relative_url(file_path)
key_name = _static_folder_path(static_url_loc, static_folder,
asset_loc, app.config['S3_PREFIX'])
mimetype = mimetypes.guess_type(key_name)[0]
is_gzippable = mimetype in app.config['S3_GZIP_CONTENT_TYPES']
headers = app.config['S3_HEADERS']
msg = "Uploading %s to %s as %s" % (file_path, bucket, key_name)
logger.debug(msg)
if ex_keys and key_name in ex_keys:
logger.debug("%s excluded from upload" % key_name)
else:
do_gzip = app.config['USE_GZIP'] and is_gzippable
# upload origin file
executor.submit(_upload_file, file_path, bucket, key_name, headers)
# upload gzipped file (if enabled)
if do_gzip:
gzip_key_name = "%s.gz" % key_name
executor.submit(_upload_file, file_path, bucket, gzip_key_name, headers, True)
def _upload_file(file_path, bucket, key_name, headers={}, do_gzip=False):
k = Key(bucket=bucket, name=key_name)
for header, value in headers.items():
if (header, value) != ('Content-Encoding', 'gzip'):
k.set_metadata(header, value)
mimetype = mimetypes.guess_type(file_path)[0]
if mimetype:
k.set_metadata('Content-Type', mimetype)
with open(file_path) as f:
content = f.read()
if do_gzip:
k.set_metadata('Content-Encoding', 'gzip')
gzipped = StringIO()
with gzip.GzipFile(fileobj=gzipped, mode='w') as _gzip:
_gzip.write(content)
content = gzipped.getvalue()
try:
k.set_contents_from_string(content)
except S3ResponseError:
if not do_gzip:
k.set_contents_from_filename(file_path)
else:
raise
k.make_public()
return k
def _upload_files(app, files_, bucket):
for (static_folder, static_url), names in files_.iteritems():
_write_files(app, static_url, static_folder, names, bucket)
def create_all(app, user=None, password=None, bucket_name=None,
location='', include_hidden=False, force_refresh=False):
"""
Uploads of the static assets associated with a Flask application to
Amazon S3.
All static assets are identified on the local filesystem, including
any static assets associated with *registered* blueprints. In turn,
each asset is uploaded to the bucket described by `bucket_name`. If
the bucket does not exist then it is created.
Flask-S3 creates the same relative static asset folder structure on
S3 as can be found within your Flask application.
Many of the optional arguments to `create_all` can be specified
instead in your application's configuration using the Flask-S3
`configuration`_ variables.
:param app: a :class:`flask.Flask` application object.
:param user: an AWS Access Key ID. You can find this key in the
Security Credentials section of your AWS account.
:type user: `ba
|
parksandwildlife/ibms
|
ibms_project/ibms/migrations/0007_auto_20180813_1604.py
|
Python
|
apache-2.0
| 391
| 0
|
# Generated by Django 2.1 on 2018-08-13 08:04
from django.db import migrations
class Migration(migrations.Migration):
dependen
|
cies = [
('ibms', '0006_auto_20180813_1603'),
]
operations = [
migrations.RenameField(
model_name='serviceprioritymappings',
old_name='costcentreName',
n
|
ew_name='costCentreName',
),
]
|
eblur/newdust
|
newdust/graindist/composition/cmsilicate.py
|
Python
|
bsd-2-clause
| 3,341
| 0.00419
|
import numpy as np
from scipy.interpolate import interp1d
from astropy.io import ascii
from astropy import units as u
from newdust import constants as c
from newdust.graindist.composition import _find_cmfile
__all__ = ['CmSilicate']
RHO_SIL = 3.8 # g cm^-3
class CmSilicate(object):
"""
| **ATTRIBUTES**
| cmtype : 'Silicate'
| rho : grain material density (g cm^-3)
| citation : A string containing citation to the original work
| interps : A tuple containing scipy.interp1d objects (rp, ip)
|
| *functions*
| rp(lam, unit='kev') : Returns real part (unit='kev'|'angs')
| ip(lam, unit='kev') : Returns imaginary part (unit='kev'|'angs')
| cm(lam, unit='kev') : Complex index of refraction of dtype='complex'
| plot(lam=None, unit='kev') : Plots Re(m-1) and Im(m)
| if lam is *None*, plots the original interp objects
| otherwise, plots with user defined wavelength (lam)
"""
def __init__(self, rho=RHO_SIL):
self.cmtype = 'Silicate'
self.rho = rho
self.citation = "Using optical constants for astrosilicate,\nDraine, B. T. 2003, ApJ, 598, 1026\nhttp://adsabs.harvard.edu/abs/2003ApJ...598.1026D"
D03file = _find_cmfile('callindex.out_sil.D03')
D03dat = ascii.read(D03file, header_start=4, data_start=5)
wavel = D03dat['wave(um)'] * u.micron
rp = interp1d(wavel.to(u.cm).value, 1.0 + D03dat['Re(n)-1']) # wavelength (cm), rp
ip = interp1d(wavel.to(u.cm).value, D03dat['Im(n)']) # wavelength (cm), ip
self.interps = (rp, ip)
def _interp_helper(self, lam_cm, interp, rp=False):
# Returns zero for wavelengths not covered by the interpolation object
# If the real part is needed, returns 1 (consistent with vacuum)
result = np.zeros(np.size(lam_cm))
if rp: result += 1
if np.size(lam_cm) == 1:
if (lam_cm >= np.min(interp.x)) & (lam_cm <= np.max(interp.x)):
result = interp(lam_cm)
else:
ii = (lam_cm >= np.min(interp.x)) & (lam_cm <= np.max(interp.x))
result[ii] = interp(lam_cm[ii])
return result
def rp(self, lam, unit='kev'):
lam_cm = c._lam_cm(lam, unit)
return self._interp_helper(lam_cm, self.interps[0], rp=True)
def ip(self, lam, unit='kev'):
lam_cm = c._lam_cm(lam, unit)
return self._interp_helper(lam_cm, self.interps[1])
def cm(self, lam, unit='kev'):
return self.rp(lam, unit=unit) + 1j * self.ip(lam, unit=unit)
def plot(self, ax, lam=None, unit='kev', rppart=True, impart=True):
if lam is None:
rp_m1 = np.abs(self.interps[0].y - 1.0)
ip = self.interps[1].y
x = self.interps[0].x / c.micron2cm # cm / (cm/um)
xlabel = "Wavelength (um)"
else:
rp_m1 = np.abs(self.rp(lam, unit=unit)-1.0)
ip = sel
|
f.ip(lam, unit)
x = lam
assert unit in c.ALLOWED_LAM_UNITS
if unit == 'kev': xlabel = "Energy (keV)"
if unit == 'angs': xlabel = "Wavelength (Angstroms)"
if rppart:
ax.plot(x, rp_m1, ls='-', label='|Re(m-1)|')
if impart:
ax.plot(x, ip, ls='--', label='Im(m)')
ax.set_xlabel(xlabel)
|
ax.legend()
|
supunkamburugamuve/mooc2
|
controllers/lessons.py
|
Python
|
apache-2.0
| 12,062
| 0.00257
|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for generating various frontend pages."""
__author__ = 'Saifu Angto ([email protected])'
import json
from models import models
from models.config import ConfigProperty
from models.counters import PerfCounter
from utils import BaseHandler
from utils import BaseRESTHandler
from utils import XsrfTokenManager
from models.models import Student
# Whether to record events in a database.
CAN_PERSIST_ACTIVITY_EVENTS = ConfigProperty(
'gcb_can_persist_activity_events', bool, (
'Whether or not to record student activity interactions in a '
'datastore. Without event recording, you cannot analyze student '
'activity interactions. On the other hand, no event recording reduces '
'the number of datastore operations and minimizes the use of Google '
'App Engine quota. Turn event recording on if you want to analyze '
'this data.'),
False)
COURSE_EVENTS_RECEIVED = PerfCounter(
'gcb-course-events-received',
'A number of activity/assessment events received by the server.')
COURSE_EVENTS_RECORDED = PerfCounter(
'gcb-course-events-recorded',
'A number of activity/assessment events recorded in a datastore.')
def extract_unit_and_lesson_id(handler):
"""Extracts unit and lesson id from the request."""
c = handler.request.get('unit')
if not c:
unit_id = 1
else:
unit_id = int(c)
l = handler.request.get('lesson')
if not l:
lesson_id = 1
else:
lesson_id = int(l)
return unit_id, lesson_id
class CourseHandler(BaseHandler):
"""Handler for generating course page."""
@classmethod
def get_child_routes(cls):
"""Add child handlers for REST."""
return [('/rest/events', EventsRESTHandler)]
def get(self):
"""Handles GET requests."""
|
user = self.personalize_page_and_get_user()
if not user:
self.redirect('/preview')
|
return None
student = Student.get_by_email(user.email())
playlist = student.playlist
playlist_urls = student.playlist_urls
if not self.personalize_page_and_get_enrolled():
return
self.template_value['units'] = self.get_units()
self.template_value['playlist'] = playlist
self.template_value['playlist_urls'] = playlist_urls
self.template_value['navbar'] = {'course': True}
self.render('course.html')
class PlaylistViewerHandler(BaseHandler):
def get(self):
"""Handles GET requests."""
if not self.personalize_page_and_get_enrolled():
return
user = self.personalize_page_and_get_user()
student = Student.get_by_email(user.email())
playlist = student.playlist
# Extract incoming args
unit_id, lesson_id = extract_unit_and_lesson_id(self)
self.template_value['unit_id'] = unit_id
self.template_value['lesson_id'] = lesson_id
# Set template values for a unit and its lesson entities
for unit in self.get_units():
if unit.unit_id == str(unit_id):
self.template_value['units'] = unit
lessons = self.get_lessons(unit_id)
self.template_value['lessons'] = lessons
# Set template values for nav bar
self.template_value['navbar'] = {'course': True}
# Set template values for back and next nav buttons
if lesson_id == 1:
self.template_value['back_button_url'] = ''
elif lessons[lesson_id - 2].activity:
self.template_value['back_button_url'] = (
'activity?unit=%s&lesson=%s' % (unit_id, lesson_id - 1))
else:
self.template_value['back_button_url'] = (
'unit?unit=%s&lesson=%s' % (unit_id, lesson_id - 1))
if lessons[lesson_id - 1].activity:
self.template_value['playlist_button_url'] = (
'activity?unit=%s&lessons=%s' % (unit_id, lesson_id))
elif playlist[0] != (str(unit_id) + '.' + str(lesson_id)):
self.template_value['playlist_button_url'] = ''
else:
self.template_value['playlist_button_url'] = (
'unit?unit=%s&lesson=%s' % (unit_id, lesson_id + 1))
if lessons[lesson_id - 1].activity:
self.template_value['next_button_url'] = (
'activity?unit=%s&lesson=%s' % (unit_id, lesson_id))
elif lesson_id == len(lessons):
self.template_value['next_button_url'] = ''
else:
self.template_value['next_button_url'] = (
'unit?unit=%s&lesson=%s' % (unit_id, lesson_id + 1))
self.response.out.write(unit_id)
self.response.out.write(lesson_id)
#self.render('unit.html')
class UnitHandler(BaseHandler):
"""Handler for generating unit page."""
def get(self):
"""Handles GET requests."""
if not self.personalize_page_and_get_enrolled():
return
user = self.personalize_page_and_get_user()
student = Student.get_by_email(user.email())
playlist = filter(lambda x: x != "", student.playlist)
# Extract incoming args
unit_id, lesson_id = extract_unit_and_lesson_id(self)
self.template_value['unit_id'] = unit_id
self.template_value['lesson_id'] = lesson_id
# Set template values for a unit and its lesson entities
for unit in self.get_units():
if unit.unit_id == str(unit_id):
self.template_value['units'] = unit
lessons = self.get_lessons(unit_id)
self.template_value['lessons'] = lessons
# Set template values for nav bar
self.template_value['navbar'] = {'course': True}
# Set template values for back and next nav buttons
if lesson_id == 1:
self.template_value['back_button_url'] = ''
elif lessons[lesson_id - 2].activity:
self.template_value['back_button_url'] = (
'activity?unit=%s&lesson=%s' % (unit_id, lesson_id - 1))
else:
self.template_value['back_button_url'] = (
'unit?unit=%s&lesson=%s' % (unit_id, lesson_id - 1))
if lessons[lesson_id - 1].activity:
self.template_value['playlist_button_url'] = (
'activity?unit=%s&lesson=%s' % (unit_id, lesson_id))
elif str(unit_id) + '.' + str(lesson_id) in playlist:
for i in range (len(playlist)):
if playlist[i] == str(unit_id) + '.' + str(lesson_id):
if i != len(playlist) - 1:
next_playlist = playlist[i + 1] #will go out of bounds if at last item in playlist
self.template_value['playlist_button_url'] = (
'unit?unit=%s&lesson=%s' % (next_playlist[0], next_playlist[2]))
break
# if lessons[lesson_id - 1].activity:
# self.template_value['playlist_button_url'] = (
# 'activity?unit=%s&lessons=%s' % (unit_id, lesson_id))
# else:
# self.template_value['playlist_button_url'] = (
# 'unit?unit=%s&lesson=%s' % (unit_id, lesson_id +
# 1))
if lessons[lesson_id - 1].activity:
self.template_value['next_button_url'] = (
'activity?unit=%s&lesson=%s' % (unit_id, lesson_id))
elif lesson_id == len(lessons):
self.template_value['next_button_
|
ddico/odoo
|
addons/l10n_in/models/res_config_settings.py
|
Python
|
agpl-3.0
| 355
| 0.002817
|
# -*- coding: utf-8 -*-
# Part of Odoo. S
|
ee LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResConfigSettings(models.TransientModel):
_inherit = 'res.config.settings'
group_l10n_in_reseller = fields.Boolean(implied_group='l10n_in.group_l10n_in_reseller', string="M
|
anage Reseller(E-Commerce)")
|
phobson/bokeh
|
sphinx/docserver.py
|
Python
|
bsd-3-clause
| 1,749
| 0.005146
|
from __future__ import print_function
import flask
import os
import threading
import time
import webbrowser
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
_basedir = os.path.join("..", os.path.dirname(__file__))
app = flask.Flask(__name__, static_path="/unused")
PORT=5009
http_server = HTTPServer(WSGIContainer(app))
"""this is a simple s
|
erver to facilitate developing the docs. by
serving up static files from this server, we avoid the need to use a
symlink.
"""
@app.route('/')
def welcome():
return """
<h1>Welcome to the Bokeh documentation server</h1>
You probably want to go to <a href="/en/latest/index.html"> Index</a>
"""
@app.route('/en/latest/<path:filename>')
def send_pic(filename):
return flask.send_from_directory(
os.path.join(_basedir,"sphinx/_build/html/"), filename)
def
|
open_browser():
# Child process
time.sleep(0.5)
webbrowser.open("http://localhost:%d/en/latest/index.html" % PORT, new="tab")
def serve_http():
http_server.listen(PORT)
IOLoop.instance().start()
def shutdown_server():
ioloop = IOLoop.instance()
ioloop.add_callback(ioloop.stop)
print("Asked Server to shut down.")
def ui():
time.sleep(0.5)
input("Press <ENTER> to exit...\n")
if __name__ == "__main__":
print("\nStarting Bokeh plot server on port %d..." % PORT)
print("Visit http://localhost:%d/en/latest/index.html to see plots\n" % PORT)
t_server = threading.Thread(target=serve_http)
t_server.start()
t_browser = threading.Thread(target=open_browser)
t_browser.start()
ui()
shutdown_server()
t_server.join()
t_browser.join()
print("Server shut down.")
|
fcrozat/telepathy-haze
|
tests/twisted/roster/removed-from-rp-subscribe.py
|
Python
|
gpl-2.0
| 6,175
| 0.002591
|
"""
Regression tests for rescinding outstanding subscription requests.
"""
from twisted.words.protocols.jabber.client import IQ
from servicetest import (EventPattern, wrap_channel, assertLength,
assertEquals, call_async, sync_dbus)
from hazetest import exec_test
import constants as cs
import ns
jid = '[email protected]'
def test(q, bus, conn, stream, remove, local):
call_async(q, conn.Requests, 'EnsureChannel',{
cs.CHANNEL_TYPE: cs.CHANNEL_TYPE_CONTACT_LIST,
cs.TARGET_HANDLE_TYPE: cs.HT_LIST,
cs.TARGET_ID: 'subscribe',
})
e = q.expect('dbus-return', method='
|
EnsureChannel')
subscribe = wrap_channel(bus
|
.get_object(conn.bus_name, e.value[1]),
cs.CHANNEL_TYPE_CONTACT_LIST)
call_async(q, conn.Requests, 'EnsureChannel',{
cs.CHANNEL_TYPE: cs.CHANNEL_TYPE_CONTACT_LIST,
cs.TARGET_HANDLE_TYPE: cs.HT_LIST,
cs.TARGET_ID: 'stored',
})
e = q.expect('dbus-return', method='EnsureChannel')
stored = wrap_channel(bus.get_object(conn.bus_name, e.value[1]),
cs.CHANNEL_TYPE_CONTACT_LIST)
call_async(q, conn.Requests, 'EnsureChannel',{
cs.CHANNEL_TYPE: cs.CHANNEL_TYPE_CONTACT_LIST,
cs.TARGET_HANDLE_TYPE: cs.HT_LIST,
cs.TARGET_ID: 'publish',
})
e = q.expect('dbus-return', method='EnsureChannel')
publish = wrap_channel(bus.get_object(conn.bus_name, e.value[1]),
cs.CHANNEL_TYPE_CONTACT_LIST)
h = conn.RequestHandles(cs.HT_CONTACT, [jid])[0]
# Another client logged into our account (Gajim, say) wants to subscribe to
# Marco's presence. First, per RFC 3921 it 'SHOULD perform a "roster set"
# for the new roster item':
#
# <iq type='set'>
# <query xmlns='jabber:iq:roster'>
# <item jid='[email protected]'/>
# </query>
# </iq>
#
# 'As a result, the user's server (1) MUST initiate a roster push for the
# new roster item to all available resources associated with this user that
# have requested the roster, setting the 'subscription' attribute to a
# value of "none"':
iq = IQ(stream, "set")
item = iq.addElement((ns.ROSTER, 'query')).addElement('item')
item['jid'] = jid
item['subscription'] = 'none'
stream.send(iq)
# In response, Haze adds Marco to the roster, which we guess (wrongly,
# in this case) also means subscribe
q.expect_many(
EventPattern('dbus-signal', signal='MembersChanged',
args=['', [h], [], [], [], h, 0], path=subscribe.object_path),
EventPattern('dbus-signal', signal='MembersChanged',
args=['', [h], [], [], [], 0, 0], path=stored.object_path),
)
# Gajim sends a <presence type='subscribe'/> to Marco. 'As a result, the
# user's server MUST initiate a second roster push to all of the user's
# available resources that have requested the roster, setting [...]
# ask='subscribe' attribute in the roster item [for Marco]:
iq = IQ(stream, "set")
item = iq.addElement((ns.ROSTER, 'query')).addElement('item')
item['jid'] = jid
item['subscription'] = 'none'
item['ask'] = 'subscribe'
stream.send(iq)
# In response, Haze should add Marco to subscribe:remote-pending,
# but libpurple has no such concept, so nothing much happens.
# The user decides that they don't care what Marco's baking after all
# (maybe they read his blog instead?) and:
if remove:
# ...removes him from the roster...
if local:
# ...by telling Haze to remove him from stored
stored.Group.RemoveMembers([h], '')
event = q.expect('stream-iq', iq_type='set', query_ns=ns.ROSTER)
item = event.query.firstChildElement()
assertEquals(jid, item['jid'])
assertEquals('remove', item['subscription'])
else:
# ...using the other client.
pass
# The server must 'inform all of the user's available resources that
# have requested the roster of the roster item removal':
iq = IQ(stream, "set")
item = iq.addElement((ns.ROSTER, 'query')).addElement('item')
item['jid'] = jid
item['subscription'] = 'remove'
# When Marco found this bug, this roster update included:
item['ask'] = 'subscribe'
# which is a bit weird: I don't think the server should send that when
# the contact's being removed. I think CMs should ignore it, so I'm
# including it in the test.
stream.send(iq)
# In response, Haze should announce that Marco has been removed from
# subscribe:remote-pending and stored:members
q.expect_many(
EventPattern('dbus-signal', signal='MembersChanged',
args=['', [], [h], [], [], 0, 0],
path=subscribe.object_path),
EventPattern('dbus-signal', signal='MembersChanged',
args=['', [], [h], [], [], 0, 0],
path=stored.object_path),
)
else:
# ...rescinds the subscription request...
if local:
raise AssertionError("Haze can't do this ")
else:
# ...in the other client.
pass
# In response, the server sends a roster update:
iq = IQ(stream, "set")
item = iq.addElement((ns.ROSTER, 'query')).addElement('item')
item['jid'] = jid
item['subscription'] = 'none'
# no ask='subscribe' any more.
stream.send(iq)
# In response, Haze should announce that Marco has been removed from
# subscribe:remote-pending; but it can't know that, so nothing happens.
def test_remove_local(q, bus, conn, stream):
test(q, bus, conn, stream, remove=True, local=True)
def test_remove_remote(q, bus, conn, stream):
test(q, bus, conn, stream, remove=True, local=False)
def test_unsubscribe_remote(q, bus, conn, stream):
test(q, bus, conn, stream, remove=False, local=False)
if __name__ == '__main__':
exec_test(test_remove_local)
exec_test(test_remove_remote)
exec_test(test_unsubscribe_remote)
|
tungvx/deploy
|
Django-0.90/django/contrib/comments/views/comments.py
|
Python
|
apache-2.0
| 16,510
| 0.006481
|
from django.core import formfields, validators
from django.core.mail import mail_admins, mail_managers
from django.core.exceptions import Http404, ObjectDoesNotExist
from django.core.extensions import DjangoContext, render_to_response
from django.models.auth import users
from django.models.comments import comments, freecomments
from django.models.core import contenttypes
from django.parts.auth.formfields import AuthenticationForm
from django.utils.httpwrappers import HttpResponseRedirect
from django.utils.text import normalize_newlines
from django.conf.settings import BANNED_IPS, COMMENTS_ALLOW_PROFANITIES, COMMENTS_SKETCHY_USERS_GROUP, COMMENTS_FIRST_FEW, SITE_ID
import base64, datetime
COMMENTS_PER_PAGE = 20
class PublicCommentManipulator(AuthenticationForm):
"Manipulator that handles public registered comments"
def __init__(self, user, ratings_required, ratings_range, num_rating_choices):
AuthenticationForm.__init__(self)
|
self.ratings_range, self.num_rating_choices = ratings_range, num_rating_choices
choices = [(c, c) for c in ratings_range]
def get_validator_list(rating_num):
if rating_num <= num_rating_choices:
return [validators.RequiredIfOtherFiel
|
dsGiven(['rating%d' % i for i in range(1, 9) if i != rating_num], "This rating is required because you've entered at least one other rating.")]
else:
return []
self.fields.extend([
formfields.LargeTextField(field_name="comment", maxlength=3000, is_required=True,
validator_list=[self.hasNoProfanities]),
formfields.RadioSelectField(field_name="rating1", choices=choices,
is_required=ratings_required and num_rating_choices > 0,
validator_list=get_validator_list(1),
),
formfields.RadioSelectField(field_name="rating2", choices=choices,
is_required=ratings_required and num_rating_choices > 1,
validator_list=get_validator_list(2),
),
formfields.RadioSelectField(field_name="rating3", choices=choices,
is_required=ratings_required and num_rating_choices > 2,
validator_list=get_validator_list(3),
),
formfields.RadioSelectField(field_name="rating4", choices=choices,
is_required=ratings_required and num_rating_choices > 3,
validator_list=get_validator_list(4),
),
formfields.RadioSelectField(field_name="rating5", choices=choices,
is_required=ratings_required and num_rating_choices > 4,
validator_list=get_validator_list(5),
),
formfields.RadioSelectField(field_name="rating6", choices=choices,
is_required=ratings_required and num_rating_choices > 5,
validator_list=get_validator_list(6),
),
formfields.RadioSelectField(field_name="rating7", choices=choices,
is_required=ratings_required and num_rating_choices > 6,
validator_list=get_validator_list(7),
),
formfields.RadioSelectField(field_name="rating8", choices=choices,
is_required=ratings_required and num_rating_choices > 7,
validator_list=get_validator_list(8),
),
])
if not user.is_anonymous():
self["username"].is_required = False
self["username"].validator_list = []
self["password"].is_required = False
self["password"].validator_list = []
self.user_cache = user
def hasNoProfanities(self, field_data, all_data):
if COMMENTS_ALLOW_PROFANITIES:
return
return validators.hasNoProfanities(field_data, all_data)
def get_comment(self, new_data):
"Helper function"
return comments.Comment(None, self.get_user_id(), new_data["content_type_id"],
new_data["object_id"], new_data.get("headline", "").strip(),
new_data["comment"].strip(), new_data.get("rating1", None),
new_data.get("rating2", None), new_data.get("rating3", None),
new_data.get("rating4", None), new_data.get("rating5", None),
new_data.get("rating6", None), new_data.get("rating7", None),
new_data.get("rating8", None), new_data.get("rating1", None) is not None,
datetime.datetime.now(), new_data["is_public"], new_data["ip_address"], False, SITE_ID)
def save(self, new_data):
today = datetime.date.today()
c = self.get_comment(new_data)
for old in comments.get_list(content_type__id__exact=new_data["content_type_id"],
object_id__exact=new_data["object_id"], user__id__exact=self.get_user_id()):
# Check that this comment isn't duplicate. (Sometimes people post
# comments twice by mistake.) If it is, fail silently by pretending
# the comment was posted successfully.
if old.submit_date.date() == today and old.comment == c.comment \
and old.rating1 == c.rating1 and old.rating2 == c.rating2 \
and old.rating3 == c.rating3 and old.rating4 == c.rating4 \
and old.rating5 == c.rating5 and old.rating6 == c.rating6 \
and old.rating7 == c.rating7 and old.rating8 == c.rating8:
return old
# If the user is leaving a rating, invalidate all old ratings.
if c.rating1 is not None:
old.valid_rating = False
old.save()
c.save()
# If the commentor has posted fewer than COMMENTS_FIRST_FEW comments,
# send the comment to the managers.
if self.user_cache.get_comments_comment_count() <= COMMENTS_FIRST_FEW:
message = 'This comment was posted by a user who has posted fewer than %s comments:\n\n%s' % \
(COMMENTS_FIRST_FEW, c.get_as_text())
mail_managers("Comment posted by rookie user", message)
if COMMENTS_SKETCHY_USERS_GROUP and COMMENTS_SKETCHY_USERS_GROUP in [g.id for g in self.user_cache.get_group_list()]:
message = 'This comment was posted by a sketchy user:\n\n%s' % c.get_as_text()
mail_managers("Comment posted by sketchy user (%s)" % self.user_cache.username, c.get_as_text())
return c
class PublicFreeCommentManipulator(formfields.Manipulator):
"Manipulator that handles public free (unregistered) comments"
def __init__(self):
self.fields = (
formfields.TextField(field_name="person_name", maxlength=50, is_required=True,
validator_list=[self.hasNoProfanities]),
formfields.LargeTextField(field_name="comment", maxlength=3000, is_required=True,
validator_list=[self.hasNoProfanities]),
)
def hasNoProfanities(self, field_data, all_data):
if COMMENTS_ALLOW_PROFANITIES:
return
return validators.hasNoProfanities(field_data, all_data)
def get_comment(self, new_data):
"Helper function"
return freecomments.FreeComment(None, new_data["content_type_id"],
new_data["object_id"], new_data["comment"].strip(),
new_data["person_name"].strip(), datetime.datetime.now(), new_data["is_public"],
new_data["ip_address"], False, SITE_ID)
def save(self, new_data):
today = datetime.date.today()
c = self.get_comment(new_data)
# Check that this comment isn't duplicate. (Sometimes people post
# comments twice by mistake.) If it is, fail silently by pretending
# the comment was posted successfully.
for old_comment in freecomments.get_list(content_type__id__exact=new_data["content_type_id"],
object_id__exact=new_data["object_id"], person_name__exact=new_data["person_name"],
submit_date__year=today.year, submit_date__month=today.month,
submit_date__day=today.day):
if old_comment.comment == c.comment:
return old_comment
c.save()
return c
def post_comment(re
|
Benzhaomin/TwitchCancer
|
twitchcancer/api/tests/test_pubsubmanager.py
|
Python
|
gpl-3.0
| 4,912
| 0.000611
|
import unittest
from unittest.mock import patch, MagicMock
from twitchcancer.api.pubsubmanager import PubSubManager
# PubSubManager.instance()
class TestPubSubManagerInstance(unittest.TestCase):
# check that we only store one instance of any topic
@patch('twitchcancer.api.pubsubmanager.PubSubManager.__new__', side_effect=PubSubManager.__new__)
def test_all(self, new):
PubSubManager.instance()
PubSubManager.instance()
self.assertEqual(new.call_count, 1)
# PubSubManager.subscribe()
class TestPubSubManagerSubscribe(unittest.TestCase):
# subscribe to a new topic
def test_subscribe_new(self):
p = PubSubManager()
p.subscribe("client", "topic")
# check that the topic was created
self.assertEqual(len(p.subscriptions.keys()), 1)
# check that we are subbed
self.assertTrue("client" in p.subscriptions["topic"])
self.assertTrue(len(p.subscriptions["topic"]), 1)
# subscribe to an existing topic
def test_subscribe_existing(self):
p = PubSubManager()
p.subscriptions["topic"] = {"other client"}
p.subscribe("client", "topic")
# check that the topic was reused
self.assertEqual(len(p.subscriptions.keys()), 1)
# check that we are subbed
self.assertTrue("client" in p.subscriptions["topic"])
self.assertTrue(len(p.subscriptions["topic"]), 2)
# PubSubManager.unsubscribe()
class TestPubSubManagerUnsubscribe(unittest.TestCase):
# unsubscribe from an existing topic
def test_unsubscribe_existing(self):
p = PubSubManager()
p.subscriptions["topic"] = {"client", "other client"}
p.unsubscribe("client", "topic")
# check that we are not subbed anymore
self.assertTrue("client" not in p.subscriptions["topic"])
# unsubscribe from an existing topic as the last client
def test_unsubscribe_existing_last(self):
p = PubSubManager()
p.subscriptions["topic"] = {"client"}
p.unsubscribe("client", "topic")
# check that the topic was garbage collected
self.assertTrue("topic" not in p.subscriptions)
# unsubscribe from an unknown topic
def test_unsubscribe_not_existing(self):
p = PubSubManager()
p.unsubscribe("client", "topic")
# check that the topic wasn't created
self.assertTrue("topic" not in p.subscriptions)
# PubSubManager.unsubscribe_all()
class TestPubSubManagerUnsubscribeAll(unittest.TestCase):
# check that unsubcribe is called for all topics
@patch('twitchcancer.api.pubsubmanager.PubSubManager.unsubscribe')
def test_unsubscribe_all(self, unsubscribe):
p = PubSubManager()
p.subscriptions["topic"] = {"client"}
p.subscriptions["topic 2"] = {"client"}
p.unsubscribe_all("client")
# check the number of calls
# TODO: check the actual arguments of each call
self.assertEqual(unsubscribe.call_count, 2)
# PubSubManager.publish()
class TestPubSubManagerPublish(unittest.TestCase):
# check
|
that a client subscribed to a topic gets data on publish()
def test_publish_subscribed(self):
# subscribe a cli
|
ent to a topic
client = MagicMock()
p = PubSubManager()
p.subscriptions["topic"] = {client}
# publish data for that topic
topic = MagicMock()
topic.payload = MagicMock(return_value="payload")
p.publish(topic)
# make sure the client got data
client.send.assert_called_once_with("topic", "payload")
# check that a client not subscribed to a topic doesn't get data on publish()
def test_publish_not_subscribed(self):
# subscribe a client to a topic
client = MagicMock()
p = PubSubManager()
p.subscriptions["topic"] = {client}
# publish data for another topic
topic = MagicMock()
topic.match = MagicMock(return_value=False)
p.publish(topic)
# make sure the client didn't get called
self.assertFalse(client.send.called)
# PubSubManager.publish_one()
class TestPubSubManagerPublishOne(unittest.TestCase):
def test_publish_one_existing(self):
client = MagicMock()
topic = MagicMock()
topic.payload = MagicMock(return_value="payload")
with patch('twitchcancer.api.pubsubtopic.PubSubTopic.find', return_value=topic):
PubSubManager().publish_one(client, "topic")
# make sure the client got data
client.send.assert_called_once_with("topic", "payload")
@patch('twitchcancer.api.pubsubtopic.PubSubTopic.find', return_value=None)
def test_publish_one_not_existing(self, find):
client = MagicMock()
PubSubManager().publish_one(client, "topic")
# make sure the client didn't get called
self.assertFalse(client.send.called)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.