code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1
value | license stringclasses 15
values | size int64 3 1.05M |
|---|---|---|---|---|---|
import inspect
from django.db import models
from graphene.core.options import Options
from graphene.core.types import BaseObjectType
from graphene.relay.utils import is_node
VALID_ATTRS = ('model', 'only_fields', 'exclude_fields')
def is_base(cls):
from graphene.contrib.django.types import DjangoObjectType
return DjangoObjectType in cls.__bases__
class DjangoOptions(Options):
def __init__(self, *args, **kwargs):
self.model = None
super(DjangoOptions, self).__init__(*args, **kwargs)
self.valid_attrs += VALID_ATTRS
self.only_fields = None
self.exclude_fields = []
def contribute_to_class(self, cls, name):
super(DjangoOptions, self).contribute_to_class(cls, name)
if not is_node(cls) and not is_base(cls):
return
if not self.model:
raise Exception(
'Django ObjectType %s must have a model in the Meta class attr' % cls)
elif not inspect.isclass(self.model) or not issubclass(self.model, models.Model):
raise Exception('Provided model in %s is not a Django model' % cls)
| jhgg/graphene | graphene/contrib/django/options.py | Python | mit | 1,122 |
# -*- coding: utf-8 -*-
# PROJECT : picopico
# TIME : 17-3-16 下午4:30
# AUTHOR : youngershen <[email protected]>
from picopico.utils.captcha.captcha import get_captcha_code, send_captcha
__all__ = ['get_captcha_code', 'send_captcha']
| universalyouth/picopico | picopico/utils/captcha/__init__.py | Python | mit | 249 |
from functools import partial
from sympy.strategies import chain, minimize
import sympy.strategies.branch as branch
from sympy.strategies.branch import yieldify
identity = lambda x: x
def treeapply(tree, join, leaf=identity):
""" Apply functions onto recursive containers (tree)
join - a dictionary mapping container types to functions
e.g. ``{list: minimize, tuple: chain}``
Keys are containers/iterables. Values are functions [a] -> a.
Examples
--------
>>> from sympy.strategies.tree import treeapply
>>> tree = [(3, 2), (4, 1)]
>>> treeapply(tree, {list: max, tuple: min})
2
>>> add = lambda *args: sum(args)
>>> def mul(*args):
... total = 1
... for arg in args:
... total *= arg
... return total
>>> treeapply(tree, {list: mul, tuple: add})
25
"""
for typ in join:
if isinstance(tree, typ):
return join[typ](*map(partial(treeapply, join=join, leaf=leaf),
tree))
return leaf(tree)
def greedy(tree, objective=identity, **kwargs):
""" Execute a strategic tree. Select alternatives greedily
Trees
-----
Nodes in a tree can be either
function - a leaf
list - a selection among operations
tuple - a sequence of chained operations
Textual examples
----------------
Text: Run f, then run g, e.g. ``lambda x: g(f(x))``
Code: ``(f, g)``
Text: Run either f or g, whichever minimizes the objective
Code: ``[f, g]``
Textx: Run either f or g, whichever is better, then run h
Code: ``([f, g], h)``
Text: Either expand then simplify or try factor then foosimp. Finally print
Code: ``([(expand, simplify), (factor, foosimp)], print)``
Objective
---------
"Better" is determined by the objective keyword. This function makes
choices to minimize the objective. It defaults to the identity.
Example
-------
>>> from sympy.strategies.tree import greedy
>>> inc = lambda x: x + 1
>>> dec = lambda x: x - 1
>>> double = lambda x: 2*x
>>> tree = [inc, (dec, double)] # either inc or dec-then-double
>>> fn = greedy(tree)
>>> fn(4) # lowest value comes from the inc
5
>>> fn(1) # lowest value comes from dec then double
0
This funcion selects between options in a tuple. The result is chosen that
minimizes the objective function.
>>> fn = greedy(tree, objective=lambda x: -x) # maximize
>>> fn(4) # highest value comes from the dec then double
6
>>> fn(1) # highest value comes from the inc
2
Greediness
----------
This is a greedy algorithm. In the example:
([a, b], c) # do either a or b, then do c
the choice between running ``a`` or ``b`` is made without foresight to c
"""
optimize = partial(minimize, objective=objective)
return treeapply(tree, {list: optimize, tuple: chain}, **kwargs)
def allresults(tree, leaf=yieldify):
""" Execute a strategic tree. Return all possibilities.
Returns a lazy iterator of all possible results
Exhaustiveness
--------------
This is an exhaustive algorithm. In the example
([a, b], [c, d])
All of the results from
(a, c), (b, c), (a, d), (b, d)
are returned. This can lead to combinatorial blowup.
See sympy.strategies.greedy for details on input
"""
return treeapply(tree, {list: branch.multiplex, tuple: branch.chain},
leaf=leaf)
def brute(tree, objective=identity, **kwargs):
return lambda expr: min(tuple(allresults(tree, **kwargs)(expr)),
key=objective)
| lidavidm/mathics-heroku | venv/lib/python2.7/site-packages/sympy/strategies/tree.py | Python | gpl-3.0 | 3,710 |
#!/usr/bin/env python
"""
main.py -- Udacity conference server-side Python App Engine
HTTP controller handlers for memcache & task queue access
$Id$
created by wesc on 2014 may 24
Modified by Richard Gieg on 12/2/2015 for Udacity Full Stack Project #4
"""
__author__ = '[email protected] (Wesley Chun)'
from time import sleep
import webapp2
from google.appengine.api import app_identity
from google.appengine.api import mail
from conference import ConferenceApi
class SendConfirmationEmailHandler(webapp2.RequestHandler):
def post(self):
"""Send email confirming Conference creation."""
mail.send_mail(
'noreply@%s.appspotmail.com' % (
app_identity.get_application_id()), # from
self.request.get('email'), # to
'You created a new Conference!', # subj
'Hi, you have created a following ' # body
'conference:\r\n\r\n%s' % self.request.get(
'conferenceInfo')
)
class SetAnnouncementHandler(webapp2.RequestHandler):
def get(self):
"""Set Announcement in Memcache."""
ConferenceApi._cacheAnnouncement()
class UpdateFeaturedSpeakerHandler(webapp2.RequestHandler):
def post(self):
"""Update the featured speaker."""
# Wait for eventual consistency to catch up, since new session was
# just added. Since this is running on a task queue thread the wait
# doesn't affect application responsiveness whatsoever.
sleep(3)
# Call the routine that performs the logic for updating the featured
# speaker.
ConferenceApi._updateFeaturedSpeaker(
self.request.get('websafeSpeakerKey'),
self.request.get('websafeConferenceKey')
)
app = webapp2.WSGIApplication([
('/crons/set_announcement', SetAnnouncementHandler),
('/tasks/update_featured_speaker', UpdateFeaturedSpeakerHandler),
('/tasks/send_confirmation_email', SendConfirmationEmailHandler),
], debug=True)
| richgieg/ConferenceCentral | main.py | Python | apache-2.0 | 2,056 |
# Plot the gamma distribution
# Based on https://github.com/probml/pmtk3/blob/master/demos/gammaPlotDemo.m
import superimport
import numpy as np
import matplotlib.pyplot as plt
import pyprobml_utils as pml
from scipy.stats import gamma
x = np.linspace(0, 7, 100)
aa = [1.0, 1.5, 2.0, 1.0, 1.5, 2.0]
bb = [1.0, 1.0, 1.0, 2.0, 2.0, 2.0]
#props = ['b-', 'r:', 'k-.', 'g--', 'c-', 'o-']
props = ['b-', 'r-', 'k-', 'b:', 'r:', 'k:']
for a, b, p in zip(aa, bb, props):
y = gamma.pdf(x, a, scale=1/b, loc=0)
plt.plot(x, y, p, lw=3, label='a=%.1f,b=%.1f' % (a, b))
plt.title('Gamma distributions')
plt.legend(fontsize=14)
pml.savefig('gammadist.pdf')
plt.show()
x = np.linspace(0, 7, 100)
b = 1
plt.figure()
for a in [1, 1.5, 2]:
y = gamma.pdf(x, a, scale=1/b, loc=0)
plt.plot(x, y)
plt.legend(['a=%.1f, b=1' % a for a in [1, 1.5, 2]])
plt.title('Gamma(a,b) distributions')
pml.savefig('gammaDistb1.pdf')
plt.show()
x = np.linspace(0, 7, 100)
b = 1
a = 1
rv = gamma(a, scale=1/b, loc=0)
y = rv.pdf(x)
plt.plot(x, y)
plt.axvline(1, color='r')
plt.title('Gamma(1,1) distribution')
pml.savefig('gammaDist1.pdf')
plt.show()
| probml/pyprobml | scripts/gamma_dist_plot.py | Python | mit | 1,137 |
VERSION = (0, 0, 2)
__version__ = ".".join(map(str, VERSION)) | myles-archive/django-faq | faq/__init__.py | Python | bsd-3-clause | 61 |
from __future__ import unicode_literals, division, absolute_import
import logging
import sys
import os
from flexget import plugin
from flexget.event import event
log = logging.getLogger('change')
found_deprecated = False
class ChangeWarn(object):
"""
Gives warning if user has deprecated / changed configuration in the root level.
Will be replaced by root level validation in the future!
Contains ugly hacks, better to include all deprecation warnings here during 1.0 BETA phase
"""
def on_task_start(self, task, config):
global found_deprecated
if 'torrent_size' in task.config:
log.critical('Plugin torrent_size is deprecated, use content_size instead')
found_deprecated = True
if 'nzb_size' in task.config:
log.critical('Plugin nzb_size is deprecated, use content_size instead')
found_deprecated = True
if found_deprecated:
task.manager.scheduler.shutdown(finish_queue=False)
task.abort('Deprecated config.')
@event('plugin.register')
def register_plugin():
plugin.register(ChangeWarn, 'change_warn', builtin=True, api_ver=2)
# check that no old plugins are in pre-compiled form (pyc)
try:
import os.path
plugin_dirs = (os.path.normpath(sys.path[0] + '/../flexget/plugins/'),
os.path.normpath(sys.path[0] + '/../flexget/plugins/input/'))
for plugin_dir in plugin_dirs:
for name in os.listdir(plugin_dir):
require_clean = False
if name.startswith('module'):
require_clean = True
if name == 'csv.pyc':
require_clean = True
if 'resolver' in name:
require_clean = True
if 'filter_torrent_size' in name:
require_clean = True
if 'filter_nzb_size' in name:
require_clean = True
if 'module_priority' in name:
require_clean = True
if 'ignore_feed' in name:
require_clean = True
if 'module_manual' in name:
require_clean = True
if 'output_exec' in name:
require_clean = True
if 'plugin_adv_exec' in name:
require_clean = True
if 'output_transmissionrpc' in name:
require_clean = True
if require_clean:
log.critical('-' * 79)
log.critical('IMPORTANT: Your installation has some files from older FlexGet!')
log.critical('')
log.critical(' Please remove all pre-compiled .pyc and .pyo files from %s' % plugin_dir)
log.critical(' Offending file: %s' % name)
log.critical('')
log.critical(' After getting rid of these FlexGet should run again normally')
from flexget import __version__ as version
if version == '{git}':
log.critical('')
log.critical(' If you are using bootstrapped git checkout you can run:')
log.critical(' bin/paver clean_compiled')
log.critical('')
log.critical('-' * 79)
found_deprecated = True
break
except:
pass
| voriux/Flexget | flexget/plugins/plugin_change_warn.py | Python | mit | 3,389 |
# -*- coding: utf-8 -*-
# Copyright (C) 2005 Osmo Salomaa
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Formatting text."""
import aeidon
class FormatAgent(aeidon.Delegate):
"""Formatting text."""
@aeidon.deco.export
def _on_toggle_dialogue_dashes_activate(self, *args):
"""Add or remove dialogue dashes on the selected texts."""
page = self.get_current_page()
rows = page.view.get_selected_rows()
col = page.view.get_focus()[1]
doc = page.text_column_to_document(col)
page.project.toggle_dialogue_dashes(rows, doc)
@aeidon.deco.export
def _on_toggle_italicization_activate(self, *args):
"""Italicize or unitalicize the selected texts."""
page = self.get_current_page()
rows = page.view.get_selected_rows()
col = page.view.get_focus()[1]
doc = page.text_column_to_document(col)
page.project.toggle_italicization(rows, doc)
@aeidon.deco.export
def _on_use_lower_case_activate(self, *args):
"""Change the selected texts to lower case."""
page = self.get_current_page()
rows = page.view.get_selected_rows()
col = page.view.get_focus()[1]
doc = page.text_column_to_document(col)
page.project.change_case(rows, doc, "lower")
@aeidon.deco.export
def _on_use_sentence_case_activate(self, *args):
"""Change the selected texts to sentence case."""
page = self.get_current_page()
rows = page.view.get_selected_rows()
col = page.view.get_focus()[1]
doc = page.text_column_to_document(col)
page.project.change_case(rows, doc, "capitalize")
@aeidon.deco.export
def _on_use_title_case_activate(self, *args):
"""Change the selected texts to title case."""
page = self.get_current_page()
rows = page.view.get_selected_rows()
col = page.view.get_focus()[1]
doc = page.text_column_to_document(col)
page.project.change_case(rows, doc, "title")
@aeidon.deco.export
def _on_use_upper_case_activate(self, *args):
"""Change the selected texts to upper case."""
page = self.get_current_page()
rows = page.view.get_selected_rows()
col = page.view.get_focus()[1]
doc = page.text_column_to_document(col)
page.project.change_case(rows, doc, "upper")
| otsaloma/gaupol | gaupol/agents/format.py | Python | gpl-3.0 | 2,965 |
# Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import os
from oslo_config import cfg
versions_opts = [
cfg.StrOpt('public_endpoint',
help="Public url to use for versions endpoint. The default "
"is None, which will use the request's host_url "
"attribute to populate the URL base. If Cinder is "
"operating behind a proxy, you will want to change "
"this to represent the proxy's URL."),
]
CONF = cfg.CONF
CONF.register_opts(versions_opts)
def get_view_builder(req):
base_url = CONF.public_endpoint or req.application_url
return ViewBuilder(base_url)
class ViewBuilder(object):
def __init__(self, base_url):
"""Initialize ViewBuilder.
:param base_url: url of the root wsgi application
"""
self.base_url = base_url
def build_choices(self, VERSIONS, req):
version_objs = []
for version in VERSIONS:
version = VERSIONS[version]
version_objs.append({
"id": version['id'],
"status": version['status'],
"links": [{"rel": "self",
"href": self.generate_href(version['id'],
req.path), }, ],
"media-types": version['media-types'], })
return dict(choices=version_objs)
def build_versions(self, versions):
version_objs = []
for version in sorted(versions.keys()):
version = versions[version]
version_objs.append({
"id": version['id'],
"status": version['status'],
"updated": version['updated'],
"links": self._build_links(version), })
return dict(versions=version_objs)
def build_version(self, version):
reval = copy.deepcopy(version)
reval['links'].insert(0, {
"rel": "self",
"href": self.base_url.rstrip('/') + '/', })
return dict(version=reval)
def _build_links(self, version_data):
"""Generate a container of links that refer to the provided version."""
href = self.generate_href(version_data['id'])
links = [{'rel': 'self',
'href': href, }, ]
return links
def generate_href(self, version, path=None):
"""Create an url that refers to a specific version_number."""
if version.find('v1.') == 0:
version_number = 'v1'
else:
version_number = 'v2'
if path:
path = path.strip('/')
return os.path.join(self.base_url, version_number, path)
else:
return os.path.join(self.base_url, version_number) + '/'
| dims/cinder | cinder/api/views/versions.py | Python | apache-2.0 | 3,378 |
# This file is part of Parti.
# Copyright (C) 2008, 2009 Nathaniel Smith <[email protected]>
# Parti is released under the terms of the GNU GPL v2, or, at your option, any
# later version. See the file COPYING for details.
import gtk
import wimpiggy.lowlevel
from wimpiggy.wm import Wm
from wimpiggy.keys import HotkeyManager
from wimpiggy.util import gtk_main_quit_really
from parti.world_organizer import WorldOrganizer
from parti.tray import TraySet
from parti.addons.ipython_embed import spawn_repl_window
from parti.bus import PartiDBusService
class Parti(object):
def __init__(self, options):
self._wm = Wm("Parti", options.replace)
self._wm.connect("new-window", self._new_window_signaled)
self._wm.connect("quit", self._wm_quit)
self._trays = TraySet()
self._trays.connect("changed", self._desktop_list_changed)
# Create our display stage
self._world_organizer = WorldOrganizer(self._trays)
self._wm.get_property("toplevel").add(self._world_organizer)
self._world_organizer.show_all()
ltray = options.tray.lower()
# __import__ returns topmost module and getattr will not get sub-modules not imported
# thus (using these two functions) the module path must be specified twice
dynmodule = getattr(getattr(__import__('parti.trays.' + ltray), 'trays'), ltray)
dynclass = getattr(dynmodule, options.tray + "Tray")
self._trays.new(u"default", dynclass)
self._root_hotkeys = HotkeyManager(gtk.gdk.get_default_root_window())
self._root_hotkeys.add_hotkeys({"<shift><alt>r": "repl"})
self._root_hotkeys.connect("hotkey::repl",
lambda *args: self.spawn_repl_window())
for window in self._wm.get_property("windows"):
self._add_new_window(window)
# Start providing D-Bus api
self._dbus = PartiDBusService(self)
def main(self):
gtk.main()
def _wm_quit(self, *args):
gtk_main_quit_really()
def _new_window_signaled(self, wm, window):
self._add_new_window(window)
def _add_new_window(self, window):
# FIXME: be less stupid
self._trays.trays[0].add(window)
def _desktop_list_changed(self, *args):
self._wm.emit("desktop-list-changed", self._trays.tags())
def spawn_repl_window(self):
spawn_repl_window(self._wm,
{"parti": self,
"wm": self._wm,
"windows": self._wm.get_property("windows"),
"trays": self._trays,
"lowlevel": wimpiggy.lowlevel})
| njsmith/partiwm | parti/parti_main.py | Python | gpl-2.0 | 2,693 |
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
#
# Copyright (C) 2006-2007 Lukáš Lalinský
# Copyright (C) 2013-2015, 2018 Laurent Monin
# Copyright (C) 2014, 2019 Philipp Wolfer
# Copyright (C) 2016-2017 Sambhav Kothari
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from picard import config
from picard.ui.options import (
OptionsPage,
register_options_page,
)
from picard.ui.ui_options_advanced import Ui_AdvancedOptionsPage
class AdvancedOptionsPage(OptionsPage):
NAME = "advanced"
TITLE = N_("Advanced")
PARENT = None
SORT_ORDER = 90
ACTIVE = True
options = [
config.TextOption("setting", "ignore_regex", ""),
config.BoolOption("setting", "ignore_hidden_files", False),
config.BoolOption("setting", "recursively_add_files", True),
config.IntOption("setting", "ignore_track_duration_difference_under", 2),
config.BoolOption("setting", "completeness_ignore_videos", False),
config.BoolOption("setting", "completeness_ignore_pregap", False),
config.BoolOption("setting", "completeness_ignore_data", False),
config.BoolOption("setting", "completeness_ignore_silence", False),
config.ListOption("setting", "compare_ignore_tags", []),
]
def __init__(self, parent=None):
super().__init__(parent)
self.ui = Ui_AdvancedOptionsPage()
self.ui.setupUi(self)
self.init_regex_checker(self.ui.ignore_regex, self.ui.regex_error)
def load(self):
self.ui.ignore_regex.setText(config.setting["ignore_regex"])
self.ui.ignore_hidden_files.setChecked(config.setting["ignore_hidden_files"])
self.ui.recursively_add_files.setChecked(config.setting["recursively_add_files"])
self.ui.ignore_track_duration_difference_under.setValue(config.setting["ignore_track_duration_difference_under"])
self.ui.completeness_ignore_videos.setChecked(config.setting["completeness_ignore_videos"])
self.ui.completeness_ignore_pregap.setChecked(config.setting["completeness_ignore_pregap"])
self.ui.completeness_ignore_data.setChecked(config.setting["completeness_ignore_data"])
self.ui.completeness_ignore_silence.setChecked(config.setting["completeness_ignore_silence"])
self.ui.compare_ignore_tags.update(config.setting["compare_ignore_tags"])
self.ui.compare_ignore_tags.set_user_sortable(False)
def save(self):
config.setting["ignore_regex"] = self.ui.ignore_regex.text()
config.setting["ignore_hidden_files"] = self.ui.ignore_hidden_files.isChecked()
config.setting["recursively_add_files"] = self.ui.recursively_add_files.isChecked()
config.setting["ignore_track_duration_difference_under"] = self.ui.ignore_track_duration_difference_under.value()
config.setting["completeness_ignore_videos"] = self.ui.completeness_ignore_videos.isChecked()
config.setting["completeness_ignore_pregap"] = self.ui.completeness_ignore_pregap.isChecked()
config.setting["completeness_ignore_data"] = self.ui.completeness_ignore_data.isChecked()
config.setting["completeness_ignore_silence"] = self.ui.completeness_ignore_silence.isChecked()
tags = list(self.ui.compare_ignore_tags.tags)
if tags != config.setting["compare_ignore_tags"]:
config.setting["compare_ignore_tags"] = tags
def restore_defaults(self):
self.ui.compare_ignore_tags.clear()
super().restore_defaults()
register_options_page(AdvancedOptionsPage)
| Sophist-UK/Sophist_picard | picard/ui/options/advanced.py | Python | gpl-2.0 | 4,214 |
#!/usr/bin/env python3
import itertools
from collections import defaultdict
import networkx as nx
import numpy as np
from pgmpy.base import UndirectedGraph
from pgmpy.factors import factor_product, Factor
from pgmpy.independencies import Independencies
from pgmpy.extern.six.moves import map, range, zip
class MarkovModel(UndirectedGraph):
"""
Base class for markov model.
A MarkovModel stores nodes and edges with potentials
MarkovModel holds undirected edges.
Parameters
----------
data : input graph
Data to initialize graph. If data=None (default) an empty
graph is created. The data can be an edge list, or any
NetworkX graph object.
Examples
--------
Create an empty Markov Model with no nodes and no edges.
>>> from pgmpy.models import MarkovModel
>>> G = MarkovModel()
G can be grown in several ways.
**Nodes:**
Add one node at a time:
>>> G.add_node('a')
Add the nodes from any container (a list, set or tuple or the nodes
from another graph).
>>> G.add_nodes_from(['a', 'b'])
**Edges:**
G can also be grown by adding edges.
Add one edge,
>>> G.add_edge('a', 'b')
a list of edges,
>>> G.add_edges_from([('a', 'b'), ('b', 'c')])
If some edges connect nodes not yet in the model, the nodes
are added automatically. There are no errors when adding
nodes or edges that already exist.
**Shortcuts:**
Many common graph features allow python syntax for speed reporting.
>>> 'a' in G # check if node in graph
True
>>> len(G) # number of nodes in graph
3
Public Methods
--------------
add_node('node1')
add_nodes_from(['node1', 'node2', ...])
add_edge('node1', 'node2')
add_edges_from([('node1', 'node2'),('node3', 'node4')])
"""
def __init__(self, ebunch=None):
super(MarkovModel, self).__init__()
if ebunch:
self.add_edges_from(ebunch)
self.factors = []
def add_edge(self, u, v, **kwargs):
"""
Add an edge between u and v.
The nodes u and v will be automatically added if they are
not already in the graph
Parameters
----------
u,v : nodes
Nodes can be any hashable Python object.
Examples
--------
>>> from pgmpy.models import MarkovModel
>>> G = MarkovModel()
>>> G.add_nodes_from(['Alice', 'Bob', 'Charles'])
>>> G.add_edge('Alice', 'Bob')
"""
# check that there is no self loop.
if u != v:
super(MarkovModel, self).add_edge(u, v, **kwargs)
else:
raise ValueError('Self loops are not allowed')
def add_factors(self, *factors):
"""
Associate a factor to the graph.
See factors class for the order of potential values
Parameters
----------
*factor: pgmpy.factors.factors object
A factor object on any subset of the variables of the model which
is to be associated with the model.
Returns
-------
None
Examples
--------
>>> from pgmpy.models import MarkovModel
>>> from pgmpy.factors import Factor
>>> student = MarkovModel([('Alice', 'Bob'), ('Bob', 'Charles'),
... ('Charles', 'Debbie'), ('Debbie', 'Alice')])
>>> factor = Factor(['Alice', 'Bob'], cardinality=[3, 2],
... value=np.random.rand(6))
>>> student.add_factors(factor)
"""
for factor in factors:
if set(factor.variables) - set(factor.variables).intersection(
set(self.nodes())):
raise ValueError("Factors defined on variable not in the model",
factor)
self.factors.append(factor)
def get_factors(self):
"""
Returns the factors that have been added till now to the graph
Examples
--------
>>> from pgmpy.models import MarkovModel
>>> from pgmpy.factors import Factor
>>> student = MarkovModel([('Alice', 'Bob'), ('Bob', 'Charles')])
>>> factor = Factor(['Alice', 'Bob'], cardinality=[2, 2],
... value=np.random.rand(4))
>>> student.add_factors(factor)
>>> student.get_factors()
"""
return self.factors
def remove_factors(self, *factors):
"""
Removes the given factors from the added factors.
Examples
--------
>>> from pgmpy.models import MarkovModel
>>> from pgmpy.factors import Factor
>>> student = MarkovModel([('Alice', 'Bob'), ('Bob', 'Charles')])
>>> factor = Factor(['Alice', 'Bob'], cardinality=[2, 2],
... values=np.random.rand(4))
>>> student.add_factors(factor)
>>> student.remove_factors(factor)
"""
for factor in factors:
self.factors.remove(factor)
def get_cardinality(self, check_cardinality=False):
"""
Returns a dictionary with the given factors as keys and their respective
cardinality as values.
Parameters
----------
check_cardinality: boolean, optional
If, check_cardinality=True it checks if cardinality information
for all the variables is availble or not. If not it raises an error.
Examples
--------
>>> from pgmpy.models import MarkovModel
>>> from pgmpy.factors import Factor
>>> student = MarkovModel([('Alice', 'Bob'), ('Bob', 'Charles')])
>>> factor = Factor(['Alice', 'Bob'], cardinality=[2, 2],
... values=np.random.rand(4))
>>> student.add_factors(factor)
>>> student.get_cardinality()
defaultdict(<class 'int'>, {'Bob': 2, 'Alice': 2})
"""
cardinalities = defaultdict(int)
for factor in self.factors:
for variable, cardinality in zip(factor.scope(), factor.cardinality):
cardinalities[variable] = cardinality
if check_cardinality and len(self.nodes()) != len(cardinalities):
raise ValueError('Factors for all the variables not defined')
return cardinalities
def check_model(self):
"""
Check the model for various errors. This method checks for the following
errors -
* Checks if the cardinalities of all the variables are consistent across all the factors.
* Factors are defined for all the random variables.
Returns
-------
check: boolean
True if all the checks are passed
"""
cardinalities = self.get_cardinality()
for factor in self.factors:
for variable, cardinality in zip(factor.scope(), factor.cardinality):
if cardinalities[variable] != cardinality:
raise ValueError(
'Cardinality of variable {var} not matching among factors'.format(var=variable))
for var1, var2 in itertools.combinations(factor.variables, 2):
if var2 not in self.neighbors(var1):
raise ValueError("Factor inconsistent with the model.")
return True
def to_factor_graph(self):
"""
Converts the markov model into factor graph.
A factor graph contains two types of nodes. One type corresponds to
random variables whereas the second type corresponds to factors over
these variables. The graph only contains edges between variables and
factor nodes. Each factor node is associated with one factor whose
scope is the set of variables that are its neighbors.
Examples
--------
>>> from pgmpy.models import MarkovModel
>>> from pgmpy.factors import Factor
>>> student = MarkovModel([('Alice', 'Bob'), ('Bob', 'Charles')])
>>> factor1 = Factor(['Alice', 'Bob'], [3, 2], np.random.rand(6))
>>> factor2 = Factor(['Bob', 'Charles'], [2, 2], np.random.rand(4))
>>> student.add_factors(factor1, factor2)
>>> factor_graph = student.to_factor_graph()
"""
from pgmpy.models import FactorGraph
factor_graph = FactorGraph()
if not self.factors:
raise ValueError('Factors not associated with the random variables.')
factor_graph.add_nodes_from(self.nodes())
for factor in self.factors:
scope = factor.scope()
factor_node = 'phi_' + '_'.join(scope)
factor_graph.add_edges_from(itertools.product(scope, [factor_node]))
factor_graph.add_factors(factor)
return factor_graph
def triangulate(self, heuristic='H6', order=None, inplace=False):
"""
Triangulate the graph.
If order of deletion is given heuristic algorithm will not be used.
Parameters
----------
heuristic: H1 | H2 | H3 | H4 | H5 | H6
The heuristic algorithm to use to decide the deletion order of
the variables to compute the triangulated graph.
Let X be the set of variables and X(i) denotes the i-th variable.
* S(i) - The size of the clique created by deleting the variable.
* E(i) - Cardinality of variable X(i).
* M(i) - Maximum size of cliques given by X(i) and its adjacent nodes.
* C(i) - Sum of size of cliques given by X(i) and its adjacent nodes.
The heuristic algorithm decide the deletion order if this way:
* H1 - Delete the variable with minimal S(i).
* H2 - Delete the variable with minimal S(i)/E(i).
* H3 - Delete the variable with minimal S(i) - M(i).
* H4 - Delete the variable with minimal S(i) - C(i).
* H5 - Delete the variable with minimal S(i)/M(i).
* H6 - Delete the variable with minimal S(i)/C(i).
order: list, tuple (array-like)
The order of deletion of the variables to compute the triagulated
graph. If order is given heuristic algorithm will not be used.
inplace: True | False
if inplace is true then adds the edges to the object from
which it is called else returns a new object.
Reference
---------
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.56.3607
Examples
--------
>>> from pgmpy.models import MarkovModel
>>> from pgmpy.factors import Factor
>>> G = MarkovModel()
>>> G.add_nodes_from(['x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7'])
>>> G.add_edges_from([('x1', 'x3'), ('x1', 'x4'), ('x2', 'x4'),
... ('x2', 'x5'), ('x3', 'x6'), ('x4', 'x6'),
... ('x4', 'x7'), ('x5', 'x7')])
>>> phi = [Factor(edge, [2, 2], np.random.rand(4)) for edge in G.edges()]
>>> G.add_factors(*phi)
>>> G_chordal = G.triangulate()
"""
self.check_model()
if self.is_triangulated():
if inplace:
return
else:
return self
graph_copy = nx.Graph(self.edges())
edge_set = set()
def _find_common_cliques(cliques_list):
"""
Finds the common cliques among the given set of cliques for
corresponding node.
"""
common = set([tuple(x) for x in cliques_list[0]])
for i in range(1, len(cliques_list)):
common = common & set([tuple(x) for x in cliques_list[i]])
return list(common)
def _find_size_of_clique(clique, cardinalities):
"""
Computes the size of a clique.
Size of a clique is defined as product of cardinalities of all the
nodes present in the clique.
"""
return list(map(lambda x: np.prod([cardinalities[node] for node in x]),
clique))
def _get_cliques_dict(node):
"""
Returns a dictionary in the form of {node: cliques_formed} of the
node along with its neighboring nodes.
clique_dict_removed would be containing the cliques created
after deletion of the node
clique_dict_node would be containing the cliques created before
deletion of the node
"""
graph_working_copy = nx.Graph(graph_copy.edges())
neighbors = graph_working_copy.neighbors(node)
graph_working_copy.add_edges_from(itertools.combinations(neighbors, 2))
clique_dict = nx.cliques_containing_node(graph_working_copy,
nodes=([node] + neighbors))
graph_working_copy.remove_node(node)
clique_dict_removed = nx.cliques_containing_node(graph_working_copy,
nodes=neighbors)
return clique_dict, clique_dict_removed
if not order:
order = []
cardinalities = self.get_cardinality()
for index in range(self.number_of_nodes()):
# S represents the size of clique created by deleting the
# node from the graph
S = {}
# M represents the size of maximum size of cliques given by
# the node and its adjacent node
M = {}
# C represents the sum of size of the cliques created by the
# node and its adjacent node
C = {}
for node in set(graph_copy.nodes()) - set(order):
clique_dict, clique_dict_removed = _get_cliques_dict(node)
S[node] = _find_size_of_clique(
_find_common_cliques(list(clique_dict_removed.values())),
cardinalities
)[0]
common_clique_size = _find_size_of_clique(
_find_common_cliques(list(clique_dict.values())),
cardinalities
)
M[node] = np.max(common_clique_size)
C[node] = np.sum(common_clique_size)
if heuristic == 'H1':
node_to_delete = min(S, key=S.get)
elif heuristic == 'H2':
S_by_E = {key: S[key] / cardinalities[key] for key in S}
node_to_delete = min(S_by_E, key=S_by_E.get)
elif heuristic == 'H3':
S_minus_M = {key: S[key] - M[key] for key in S}
node_to_delete = min(S_minus_M, key=S_minus_M.get)
elif heuristic == 'H4':
S_minus_C = {key: S[key] - C[key] for key in S}
node_to_delete = min(S_minus_C, key=S_minus_C.get)
elif heuristic == 'H5':
S_by_M = {key: S[key] / M[key] for key in S}
node_to_delete = min(S_by_M, key=S_by_M.get)
else:
S_by_C = {key: S[key] / C[key] for key in S}
node_to_delete = min(S_by_C, key=S_by_C.get)
order.append(node_to_delete)
graph_copy = nx.Graph(self.edges())
for node in order:
for edge in itertools.combinations(graph_copy.neighbors(node), 2):
graph_copy.add_edge(edge[0], edge[1])
edge_set.add(edge)
graph_copy.remove_node(node)
if inplace:
for edge in edge_set:
self.add_edge(edge[0], edge[1])
return self
else:
graph_copy = MarkovModel(self.edges())
for edge in edge_set:
graph_copy.add_edge(edge[0], edge[1])
return graph_copy
def to_junction_tree(self):
"""
Creates a junction tree (or clique tree) for a given markov model.
For a given markov model (H) a junction tree (G) is a graph
1. where each node in G corresponds to a maximal clique in H
2. each sepset in G separates the variables strictly on one side of the
edge to other.
Examples
--------
>>> from pgmpy.models import MarkovModel
>>> from pgmpy.factors import Factor
>>> mm = MarkovModel()
>>> mm.add_nodes_from(['x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7'])
>>> mm.add_edges_from([('x1', 'x3'), ('x1', 'x4'), ('x2', 'x4'),
... ('x2', 'x5'), ('x3', 'x6'), ('x4', 'x6'),
... ('x4', 'x7'), ('x5', 'x7')])
>>> phi = [Factor(edge, [2, 2], np.random.rand(4)) for edge in mm.edges()]
>>> mm.add_factors(*phi)
>>> junction_tree = mm.to_junction_tree()
"""
from pgmpy.models import JunctionTree
# Check whether the model is valid or not
self.check_model()
# Triangulate the graph to make it chordal
triangulated_graph = self.triangulate()
# Find maximal cliques in the chordal graph
cliques = list(map(tuple, nx.find_cliques(triangulated_graph)))
# If there is only 1 clique, then the junction tree formed is just a
# clique tree with that single clique as the node
if len(cliques) == 1:
clique_trees = JunctionTree()
clique_trees.add_node(cliques[0])
# Else if the number of cliques is more than 1 then create a complete
# graph with all the cliques as nodes and weight of the edges being
# the length of sepset between two cliques
elif len(cliques) >= 2:
complete_graph = UndirectedGraph()
edges = list(itertools.combinations(cliques, 2))
weights = list(map(lambda x: len(set(x[0]).intersection(set(x[1]))),
edges))
for edge, weight in zip(edges, weights):
complete_graph.add_edge(*edge, weight=-weight)
# Create clique trees by minimum (or maximum) spanning tree method
clique_trees = JunctionTree(nx.minimum_spanning_tree(complete_graph).edges())
# Check whether the factors are defined for all the random variables or not
all_vars = itertools.chain(*[factor.scope() for factor in self.factors])
if set(all_vars) != set(self.nodes()):
ValueError('Factor for all the random variables not specified')
# Dictionary stating whether the factor is used to create clique
# potential or not
# If false, then it is not used to create any clique potential
is_used = {factor: False for factor in self.factors}
for node in clique_trees.nodes():
clique_factors = []
for factor in self.factors:
# If the factor is not used in creating any clique potential as
# well as has any variable of the given clique in its scope,
# then use it in creating clique potential
if not is_used[factor] and set(factor.scope()).issubset(node):
clique_factors.append(factor)
is_used[factor] = True
# To compute clique potential, initially set it as unity factor
var_card = [self.get_cardinality()[x] for x in node]
clique_potential = Factor(node, var_card, np.ones(np.product(var_card)))
# multiply it with the factors associated with the variables present
# in the clique (or node)
clique_potential *= factor_product(*clique_factors)
clique_trees.add_factors(clique_potential)
if not all(is_used.values()):
raise ValueError('All the factors were not used to create Junction Tree.'
'Extra factors are defined.')
return clique_trees
def markov_blanket(self, node):
"""
Returns a markov blanket for a random variable.
Markov blanket is the neighboring nodes of the given node.
Examples
--------
>>> from pgmpy.models import MarkovModel
>>> mm = MarkovModel()
>>> mm.add_nodes_from(['x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7'])
>>> mm.add_edges_from([('x1', 'x3'), ('x1', 'x4'), ('x2', 'x4'),
... ('x2', 'x5'), ('x3', 'x6'), ('x4', 'x6'),
... ('x4', 'x7'), ('x5', 'x7')])
>>> mm.markov_blanket('x1')
"""
return self.neighbors(node)
def get_local_independencies(self, latex=False):
"""
Returns all the local independencies present in the markov model.
Local independencies are the independence assertion in the form of
.. math:: {X \perp W - {X} - MB(X) | MB(X)}
where MB is the markov blanket of all the random variables in X
Parameters
----------
latex: boolean
If latex=True then latex string of the indepedence assertion would
be created
Examples
--------
>>> from pgmpy.models import MarkovModel
>>> mm = MarkovModel()
>>> mm.add_nodes_from(['x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7'])
>>> mm.add_edges_from([('x1', 'x3'), ('x1', 'x4'), ('x2', 'x4'),
... ('x2', 'x5'), ('x3', 'x6'), ('x4', 'x6'),
... ('x4', 'x7'), ('x5', 'x7')])
>>> mm.get_local_independecies()
"""
from pgmpy.exceptions import RequiredError
local_independencies = Independencies()
all_vars = set(self.nodes())
for node in self.nodes():
markov_blanket = set(self.markov_blanket(node))
rest = all_vars - set([node]) - markov_blanket
try:
local_independencies.add_assertions([node, list(rest), list(markov_blanket)])
except RequiredError:
pass
local_independencies.reduce()
if latex:
return local_independencies.latex_string()
else:
return local_independencies
def to_bayesian_model(self):
"""
Creates a Bayesian Model which is a minimum I-Map for this markov model.
The ordering of parents may not remain constant. It would depend on the
ordering of variable in the junction tree (which is not constant) all the
time.
Examples
--------
>>> from pgmpy.models import MarkovModel
>>> from pgmpy.factors import Factor
>>> mm = MarkovModel()
>>> mm.add_nodes_from(['x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7'])
>>> mm.add_edges_from([('x1', 'x3'), ('x1', 'x4'), ('x2', 'x4'),
... ('x2', 'x5'), ('x3', 'x6'), ('x4', 'x6'),
... ('x4', 'x7'), ('x5', 'x7')])
>>> phi = [Factor(edge, [2, 2], np.random.rand(4)) for edge in mm.edges()]
>>> mm.add_factors(*phi)
>>> bm = mm.to_bayesian_model()
"""
from pgmpy.models import BayesianModel
bm = BayesianModel()
var_clique_dict = defaultdict(tuple)
var_order = []
# Create a junction tree from the markov model.
# Creation of clique tree involves triangulation, finding maximal cliques
# and creating a tree from these cliques
junction_tree = self.to_junction_tree()
# create an ordering of the nodes based on the ordering of the clique
# in which it appeared first
root_node = junction_tree.nodes()[0]
bfs_edges = nx.bfs_edges(junction_tree, root_node)
for node in root_node:
var_clique_dict[node] = root_node
var_order.append(node)
for edge in bfs_edges:
clique_node = edge[1]
for node in clique_node:
if not var_clique_dict[node]:
var_clique_dict[node] = clique_node
var_order.append(node)
# create a bayesian model by adding edges from parent of node to node as
# par(x_i) = (var(c_k) - x_i) \cap {x_1, ..., x_{i-1}}
for node_index in range(len(var_order)):
node = var_order[node_index]
node_parents = (set(var_clique_dict[node]) - set([node])).intersection(
set(var_order[:node_index]))
bm.add_edges_from([(parent, node) for parent in node_parents])
# TODO : Convert factor into CPDs
return bm
def get_partition_function(self):
"""
Returns the partition function for a given undirected graph.
A partition function is defined as
.. math:: \sum_{X}(\prod_{i=1}^{m} \phi_i)
where m is the number of factors present in the graph
and X are all the random variables present.
Examples
--------
>>> from pgmpy.models import MarkovModel
>>> from pgmpy.factors import Factor
>>> G = MarkovModel()
>>> G.add_nodes_from(['x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7'])
>>> G.add_edges_from([('x1', 'x3'), ('x1', 'x4'), ('x2', 'x4'),
... ('x2', 'x5'), ('x3', 'x6'), ('x4', 'x6'),
... ('x4', 'x7'), ('x5', 'x7')])
>>> phi = [Factor(edge, [2, 2], np.random.rand(4)) for edge in G.edges()]
>>> G.add_factors(*phi)
>>> G.get_partition_function()
"""
self.check_model()
factor = self.factors[0]
factor = factor_product(factor, *[self.factors[i] for i in
range(1, len(self.factors))])
if set(factor.scope()) != set(self.nodes()):
raise ValueError('Factor for all the random variables not defined.')
return np.sum(factor.values)
def copy(self):
"""
Returns a copy of this Markov Model.
Returns
-------
MarkovModel: Copy of this Markov model.
Examples
-------
>>> from pgmpy.factors import Factor
>>> from pgmpy.models import MarkovModel
>>> G = MarkovModel()
>>> G.add_nodes_from([('a', 'b'), ('b', 'c')])
>>> G.add_edge(('a', 'b'), ('b', 'c'))
>>> G_copy = G.copy()
>>> G_copy.edges()
[(('a', 'b'), ('b', 'c'))]
>>> G_copy.nodes()
[('a', 'b'), ('b', 'c')]
>>> factor = Factor([('a', 'b')], cardinality=[3],
... values=np.random.rand(3))
>>> G.add_factors(factor)
>>> G.get_factors()
[<Factor representing phi(('a', 'b'):3) at 0x...>]
>>> G_copy.get_factors()
[]
"""
clone_graph = MarkovModel(self.edges())
clone_graph.add_nodes_from(self.nodes())
if self.factors:
factors_copy = [factor.copy() for factor in self.factors]
clone_graph.add_factors(*factors_copy)
return clone_graph
| anaviltripathi/pgmpy | pgmpy/models/MarkovModel.py | Python | mit | 27,052 |
# Copyright 2013, Big Switch Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon.utils import validators
from openstack_dashboard import api
port_validator = validators.validate_port_or_colon_separated_port_range
LOG = logging.getLogger(__name__)
class UpdateRule(forms.SelfHandlingForm):
name = forms.CharField(max_length=80, label=_("Name"), required=False)
description = forms.CharField(
required=False,
max_length=80, label=_("Description"))
protocol = forms.ChoiceField(
label=_("Protocol"), required=False,
choices=[('TCP', _('TCP')), ('UDP', _('UDP')), ('ICMP', _('ICMP')),
('ANY', _('ANY'))],
help_text=_('Protocol for the firewall rule'))
action = forms.ChoiceField(
label=_("Action"), required=False,
choices=[('ALLOW', _('ALLOW')), ('DENY', _('DENY'))],
help_text=_('Action for the firewall rule'))
source_ip_address = forms.IPField(
label=_("Source IP Address/Subnet"),
version=forms.IPv4 | forms.IPv6,
required=False, mask=True,
help_text=_('Source IP address or subnet'))
destination_ip_address = forms.IPField(
label=_('Destination IP Address/Subnet'),
version=forms.IPv4 | forms.IPv6,
required=False, mask=True,
help_text=_('Destination IP address or subnet'))
source_port = forms.CharField(
max_length=80,
label=_("Source Port/Port Range"),
required=False,
validators=[port_validator],
help_text=_('Source port (integer in [1, 65535] or range in a:b)'))
destination_port = forms.CharField(
max_length=80,
label=_("Destination Port/Port Range"),
required=False,
validators=[port_validator],
help_text=_('Destination port (integer in [1, 65535] or range'
' in a:b)'))
shared = forms.BooleanField(label=_("Shared"), required=False)
enabled = forms.BooleanField(label=_("Enabled"), required=False)
failure_url = 'horizon:project:firewalls:index'
def handle(self, request, context):
rule_id = self.initial['rule_id']
name_or_id = context.get('name') or rule_id
if context['protocol'] == 'ANY':
context['protocol'] = None
for f in ['source_ip_address', 'destination_ip_address',
'source_port', 'destination_port']:
if not context[f]:
context[f] = None
try:
rule = api.fwaas.rule_update(request, rule_id, **context)
msg = _('Rule %s was successfully updated.') % name_or_id
LOG.debug(msg)
messages.success(request, msg)
return rule
except Exception as e:
msg = (_('Failed to update rule %(name)s: %(reason)s') %
{'name': name_or_id, 'reason': e})
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class UpdatePolicy(forms.SelfHandlingForm):
name = forms.CharField(max_length=80, label=_("Name"), required=False)
description = forms.CharField(required=False,
max_length=80, label=_("Description"))
shared = forms.BooleanField(label=_("Shared"), required=False)
audited = forms.BooleanField(label=_("Audited"), required=False)
failure_url = 'horizon:project:firewalls:index'
def handle(self, request, context):
policy_id = self.initial['policy_id']
name_or_id = context.get('name') or policy_id
try:
policy = api.fwaas.policy_update(request, policy_id, **context)
msg = _('Policy %s was successfully updated.') % name_or_id
LOG.debug(msg)
messages.success(request, msg)
return policy
except Exception as e:
msg = _('Failed to update policy %(name)s: %(reason)s') % {
'name': name_or_id, 'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class UpdateFirewall(forms.SelfHandlingForm):
name = forms.CharField(max_length=80,
label=_("Name"),
required=False)
description = forms.CharField(max_length=80,
label=_("Description"),
required=False)
firewall_policy_id = forms.ChoiceField(label=_("Policy"))
admin_state_up = forms.ChoiceField(choices=[(True, _('UP')),
(False, _('DOWN'))],
label=_("Admin State"))
failure_url = 'horizon:project:firewalls:index'
def __init__(self, request, *args, **kwargs):
super(UpdateFirewall, self).__init__(request, *args, **kwargs)
try:
tenant_id = self.request.user.tenant_id
policies = api.fwaas.policy_list_for_tenant(request, tenant_id)
policies = sorted(policies, key=lambda policy: policy.name)
except Exception:
exceptions.handle(request,
_('Unable to retrieve policy list.'))
policies = []
policy_id = kwargs['initial']['firewall_policy_id']
policy_name = [p.name for p in policies if p.id == policy_id][0]
firewall_policy_id_choices = [(policy_id, policy_name)]
for p in policies:
if p.id != policy_id:
firewall_policy_id_choices.append((p.id, p.name_or_id))
self.fields['firewall_policy_id'].choices = firewall_policy_id_choices
def handle(self, request, context):
firewall_id = self.initial['firewall_id']
name_or_id = context.get('name') or firewall_id
context['admin_state_up'] = (context['admin_state_up'] == 'True')
try:
firewall = api.fwaas.firewall_update(request, firewall_id,
**context)
msg = _('Firewall %s was successfully updated.') % name_or_id
LOG.debug(msg)
messages.success(request, msg)
return firewall
except Exception as e:
msg = _('Failed to update firewall %(name)s: %(reason)s') % {
'name': name_or_id, 'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class InsertRuleToPolicy(forms.SelfHandlingForm):
firewall_rule_id = forms.ChoiceField(label=_("Insert Rule"))
insert_before = forms.ChoiceField(label=_("Before"),
required=False)
insert_after = forms.ChoiceField(label=_("After"),
required=False)
failure_url = 'horizon:project:firewalls:index'
def __init__(self, request, *args, **kwargs):
super(InsertRuleToPolicy, self).__init__(request, *args, **kwargs)
try:
tenant_id = self.request.user.tenant_id
all_rules = api.fwaas.rule_list_for_tenant(request, tenant_id)
all_rules = sorted(all_rules, key=lambda rule: rule.name_or_id)
available_rules = [r for r in all_rules
if not r.firewall_policy_id]
current_rules = []
for r in kwargs['initial']['firewall_rules']:
r_obj = [rule for rule in all_rules if r == rule.id][0]
current_rules.append(r_obj)
available_choices = [(r.id, r.name_or_id) for r in available_rules]
current_choices = [(r.id, r.name_or_id) for r in current_rules]
except Exception as e:
msg = _('Failed to retrieve available rules: %s') % e
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
self.fields['firewall_rule_id'].choices = available_choices
self.fields['insert_before'].choices = [('', '')] + current_choices
self.fields['insert_after'].choices = [('', '')] + current_choices
def handle(self, request, context):
policy_id = self.initial['policy_id']
policy_name_or_id = self.initial['name'] or policy_id
try:
insert_rule_id = context['firewall_rule_id']
insert_rule = api.fwaas.rule_get(request, insert_rule_id)
body = {'firewall_rule_id': insert_rule_id,
'insert_before': context['insert_before'],
'insert_after': context['insert_after']}
policy = api.fwaas.policy_insert_rule(request, policy_id, **body)
msg = _('Rule %(rule)s was successfully inserted to policy '
'%(policy)s.') % {
'rule': insert_rule.name or insert_rule.id,
'policy': policy_name_or_id}
LOG.debug(msg)
messages.success(request, msg)
return policy
except Exception as e:
msg = _('Failed to insert rule to policy %(name)s: %(reason)s') % {
'name': policy_id, 'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class RemoveRuleFromPolicy(forms.SelfHandlingForm):
firewall_rule_id = forms.ChoiceField(label=_("Remove Rule"))
failure_url = 'horizon:project:firewalls:index'
def __init__(self, request, *args, **kwargs):
super(RemoveRuleFromPolicy, self).__init__(request, *args, **kwargs)
try:
tenant_id = request.user.tenant_id
all_rules = api.fwaas.rule_list_for_tenant(request, tenant_id)
current_rules = []
for r in kwargs['initial']['firewall_rules']:
r_obj = [rule for rule in all_rules if r == rule.id][0]
current_rules.append(r_obj)
current_choices = [(r.id, r.name_or_id) for r in current_rules]
except Exception as e:
msg = _('Failed to retrieve current rules in policy %(name)s: '
'%(reason)s') % {'name': self.initial['name'], 'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
self.fields['firewall_rule_id'].choices = current_choices
def handle(self, request, context):
policy_id = self.initial['policy_id']
policy_name_or_id = self.initial['name'] or policy_id
try:
remove_rule_id = context['firewall_rule_id']
remove_rule = api.fwaas.rule_get(request, remove_rule_id)
body = {'firewall_rule_id': remove_rule_id}
policy = api.fwaas.policy_remove_rule(request, policy_id, **body)
msg = _('Rule %(rule)s was successfully removed from policy '
'%(policy)s.') % {
'rule': remove_rule.name or remove_rule.id,
'policy': policy_name_or_id}
LOG.debug(msg)
messages.success(request, msg)
return policy
except Exception as e:
msg = _('Failed to remove rule from policy %(name)s: '
'%(reason)s') % {'name': self.initial['name'],
'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class RouterInsertionFormBase(forms.SelfHandlingForm):
def __init__(self, request, *args, **kwargs):
super(RouterInsertionFormBase, self).__init__(request, *args, **kwargs)
try:
router_choices = self.get_router_choices(request, kwargs)
self.fields['router_ids'].choices = router_choices
except Exception as e:
msg = self.init_failure_msg % {'name': self.initial['name'],
'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
@abc.abstractmethod
def get_router_choices(self, request, kwargs):
"""Return a list of selectable routers."""
@abc.abstractmethod
def get_new_router_ids(self, context):
"""Return a new list of router IDs associated with the firewall."""
def handle(self, request, context):
firewall_id = self.initial['firewall_id']
firewall_name_or_id = self.initial['name'] or firewall_id
try:
body = {'router_ids': self.get_new_router_ids(context)}
firewall = api.fwaas.firewall_update(request, firewall_id, **body)
msg = self.success_msg % {'firewall': firewall_name_or_id}
LOG.debug(msg)
messages.success(request, msg)
return firewall
except Exception as e:
msg = self.failure_msg % {'name': firewall_name_or_id, 'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class AddRouterToFirewall(RouterInsertionFormBase):
router_ids = forms.MultipleChoiceField(
label=_("Add Routers"),
required=False,
widget=forms.CheckboxSelectMultiple(),
help_text=_("Add selected router(s) to the firewall."))
failure_url = 'horizon:project:firewalls:index'
success_msg = _('Router(s) was/were successfully added to firewall '
'%(firewall)s.')
failure_msg = _('Failed to add router(s) to firewall %(name)s: %(reason)s')
init_failure_msg = _('Failed to retrieve available routers: %(reason)s')
def get_router_choices(self, request, kwargs):
tenant_id = self.request.user.tenant_id
routers_list = api.fwaas.firewall_unassociated_routers_list(
request, tenant_id)
return [(r.id, r.name_or_id) for r in routers_list]
def get_new_router_ids(self, context):
existing_router_ids = self.initial['router_ids']
add_router_ids = context['router_ids']
return add_router_ids + existing_router_ids
class RemoveRouterFromFirewall(RouterInsertionFormBase):
router_ids = forms.MultipleChoiceField(
label=_("Remove Routers"),
required=False,
widget=forms.CheckboxSelectMultiple(),
help_text=_("Unselect the router(s) to be removed from firewall."))
failure_url = 'horizon:project:firewalls:index'
success_msg = _('Router(s) was successfully removed from firewall '
'%(firewall)s.')
failure_msg = _('Failed to remove router(s) from firewall %(name)s: '
'%(reason)s')
init_failure_msg = _('Failed to retrieve current routers in firewall '
'%(name)s: %(reason)s')
def get_router_choices(self, request, kwargs):
tenant_id = self.request.user.tenant_id
all_routers = api.neutron.router_list(request, tenant_id=tenant_id)
current_routers = [r for r in all_routers
if r['id'] in kwargs['initial']['router_ids']]
return [(r.id, r.name_or_id) for r in current_routers]
def get_new_router_ids(self, context):
# context[router_ids] is router IDs to be kept.
return context['router_ids']
| wangxiangyu/horizon | openstack_dashboard/dashboards/project/firewalls/forms.py | Python | apache-2.0 | 16,187 |
import time
import logging
from .CogCompilerInterface import CogCompilerInterface
from .functions import str_fname_inode
class TestbenchCompiler(CogCompilerInterface):
'''This compiler only generates compile-time timestamp the given time
it's been called. Can be retrieved in the same instance at the
moment only.
'''
def __init__(self):
self._virtual_lib = None
def get_libs_content(self, libs):
if not self._virtual_lib:
raise NotImplementedError
entities = {}
for lib in libs:
entities.update(self._virtual_lib[lib['lib']])
return entities
def compile_all_files(self, compile_order_list):
virtual_lib = {}
# Required to compile list generated by cog.
for i in compile_order_list:
inode = str_fname_inode(i[1])
ctime = time.time()
lib = i[0]
logging.debug('Entity: ' + i[1] + ' in library ' + i[0])
try:
virtual_lib[lib].update({inode : {'ctime' : ctime}})
except KeyError:
virtual_lib.update({lib : {inode : {'ctime' : ctime}}})
self._virtual_lib = virtual_lib
# The following functions are obviously not required, but the
# simulation and compiler tools usually go hand-in-hand.
def run_simulation(self, dut_name, sim_options):
raise NotImplementedError
def run_simulation_gui(self, dut_name, sim_options):
raise NotImplementedError
| CogPy/cog | src/TestbenchCompiler.py | Python | lgpl-3.0 | 1,558 |
from . import *
from webstar.core import *
from webstar import core
from webstar.router import Router
class TestRouterBasics(TestCase):
def setUp(self):
self.router = Router()
self.app = TestApp(self.router)
@self.router.register('/static')
def static(environ, start):
self.autostart(environ, start)
return ['static; path_info=%(PATH_INFO)r, script_name=%(SCRIPT_NAME)r' % environ]
@self.router.register('/{fruit:apple|banana}')
def fruit(environ, start):
self.autostart(environ, start)
return ['fruit']
@self.router.register('/{num:\d+}', _parsers=dict(num=int))
def numbers(environ, start):
self.autostart(environ, start)
return ['number-%d' % get_route_data(environ)['num']]
def test_miss(self):
res = self.app.get('/notfound', status=404)
self.assertEqual(res.status, '404 Not Found')
def test_static(self):
res = self.app.get('/static')
self.assertEqual(res.body, "static; path_info='/', script_name='/static'")
def test_static_incomplete(self):
res = self.app.get('/static/more')
self.assertEqual(res.body, "static; path_info='/more', script_name='/static'")
def test_basic_re(self):
res = self.app.get('/apple')
self.assertEqual(res.body, 'fruit')
res = self.app.get('/banana')
self.assertEqual(res.body, 'fruit')
def test_number_re(self):
res = self.app.get('/1234')
self.assertEqual(res.body, 'number-1234')
def test_number_gen(self):
path = self.router.url_for(num=314)
self.assertEqual('/314', path)
def test_gen_mismatch(self):
path = self.router.url_for(fruit='apple')
self.assertEqual(path, '/apple')
self.assertRaises(GenerationError, self.router.url_for, fruit='carrot')
class TestDummyModules(TestCase):
def setUp(self):
root = self.root = DummyModule('dummy')
root.__app__ = EchoApp('/dummy')
a = root('a')
a.__app__ = EchoApp('/dummy/A')
b = root('b')
b.__app__ = EchoApp('/dummy/B')
leaf = b('leaf')
leaf.__app__ = EchoApp('/dummy/B/leaf')
def tearDown(self):
DummyModule.remove_all()
def test_basic(self):
router = Router()
router.register_package(None, self.root, testing=True, include_self=True)
self.app = TestApp(router)
res = self.app.get('/')
self.assertEqual(res.body, '/dummy')
res = self.app.get('/a')
self.assertEqual(res.body, '/dummy/A')
res = self.app.get('/b')
self.assertEqual(res.body, '/dummy/B')
res = self.app.get('/b/leaf')
self.assertEqual(res.body, '/dummy/B')
def test_recursive(self):
router = Router()
router.register_package(None, self.root, recursive=True, testing=True, include_self=True)
router.print_graph()
self.app = TestApp(router)
res = self.app.get('/')
self.assertEqual(res.body, '/dummy')
res = self.app.get('/a')
self.assertEqual(res.body, '/dummy/A')
res = self.app.get('/b')
self.assertEqual(res.body, '/dummy/B')
res = self.app.get('/b/leaf')
self.assertEqual(res.body, '/dummy/B/leaf')
class TestRealModules(TestCase):
def setUp(self):
self.router = Router()
self.app = TestApp(self.router)
import examplepackage
self.router.register_package(None, examplepackage, include_self=True)
def test_default(self):
res = self.app.get('/')
self.assertEqual(res.body, 'package.__init__')
def test_basic(self):
res = self.app.get('/static')
self.assertEqual(res.body, 'package.static')
def test_leaf(self):
res = self.app.get('/sub/leaf')
self.assertEqual(res.body, 'package.__init__')
class TestRealRecursiveModules(TestRealModules):
def setUp(self):
self.router = Router()
self.app = TestApp(self.router)
from . import examplepackage
self.router.register_package(None, examplepackage, recursive=True, include_self=True)
def test_leaf(self):
res = self.app.get('/sub/leaf')
self.assertEqual(res.body, 'I am a leaf')
class TestTraversal(TestCase):
def test_dont_fail_immediately(self):
main = Router()
a = main.register(None, Router())
b = main.register(None, Router())
a.register('/a', EchoApp('A says hello'))
b.register('/b', EchoApp('B says hi'))
b.register(None, EchoApp('catchall'))
app = TestApp(main)
res = app.get('/a')
self.assertEqual(res.body, 'A says hello')
res = app.get('/b')
self.assertEqual(res.body, 'B says hi')
res = app.get('/notthere')
self.assertEqual(res.body, 'catchall')
| mikeboers/WebStar | test_webstar/test_router.py | Python | bsd-3-clause | 5,107 |
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from airflow.ti_deps.deps.base_ti_dep import BaseTIDep
from airflow.utils.db import provide_session
class DagTISlotsAvailableDep(BaseTIDep):
NAME = "Task Instance Slots Available"
IGNOREABLE = True
@provide_session
def _get_dep_statuses(self, ti, session, dep_context):
if ti.task.dag.concurrency_reached:
yield self._failing_status(
reason="The maximum number of running tasks ({0}) for this task's DAG "
"'{1}' has been reached.".format(ti.task.dag.concurrency,
ti.dag_id))
| holygits/incubator-airflow | airflow/ti_deps/deps/dag_ti_slots_available_dep.py | Python | apache-2.0 | 1,176 |
# -*- Mode: Python; test-case-name:flumotion.test.test_config -*-
# vi:si:et:sw=4:sts=4:ts=4
#
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007 Fluendo, S.L. (www.fluendo.com).
# All rights reserved.
# This file may be distributed and/or modified under the terms of
# the GNU General Public License version 2 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.GPL" in the source distribution for more information.
# Licensees having purchased or holding a valid Flumotion Advanced
# Streaming Server license may use this file in accordance with the
# Flumotion Advanced Streaming Server Commercial License Agreement.
# See "LICENSE.Flumotion" in the source distribution for more information.
# Headers in this file shall remain intact.
from flumotion.common import errors
from flumotion.common import testsuite
from flumotion.component.base import http
class TestLogFilter(testsuite.TestCase):
def testSimpleFilter(self):
filterdef = "192.168.1.0/24"
filter = http.LogFilter()
filter.addIPFilter(filterdef)
self.failUnless(filter.isInRange("192.168.1.200"))
self.failIf(filter.isInRange("192.168.0.200"))
def testComplexFilter(self):
filterdefs = ["192.168.1.0/24", "127.0.0.1"]
filter = http.LogFilter()
filter.addIPFilter(filterdefs[0])
filter.addIPFilter(filterdefs[1])
self.failUnless(filter.isInRange("192.168.1.200"))
self.failUnless(filter.isInRange("127.0.0.1"))
self.failIf(filter.isInRange("192.168.0.200"))
self.failIf(filter.isInRange("127.0.0.2"))
def testParseFailure(self):
filter = http.LogFilter()
self.assertRaises(errors.ConfigError, filter.addIPFilter, "192.12")
self.assertRaises(errors.ConfigError, filter.addIPFilter,
"192.168.0.0/33")
self.assertRaises(errors.ConfigError, filter.addIPFilter,
"192.168.0.0/30/1")
| ylatuya/Flumotion | flumotion/test/test_logfilter.py | Python | gpl-2.0 | 2,099 |
from __future__ import unicode_literals
from django.contrib import messages
from django.contrib.auth.decorators import permission_required
from django.contrib.auth.mixins import PermissionRequiredMixin
from django.db import transaction
from django.db.models import Count
from django.shortcuts import get_object_or_404, redirect, render
from django.urls import reverse
from django.views.generic import View
from extras.models import Graph, GRAPH_TYPE_PROVIDER
from utilities.forms import ConfirmationForm
from utilities.views import (
BulkDeleteView, BulkEditView, BulkImportView, ObjectDeleteView, ObjectEditView, ObjectListView,
)
from . import filters, forms, tables
from .models import Circuit, CircuitTermination, CircuitType, Provider, TERM_SIDE_A, TERM_SIDE_Z
#
# Providers
#
class ProviderListView(ObjectListView):
queryset = Provider.objects.annotate(count_circuits=Count('circuits'))
filter = filters.ProviderFilter
filter_form = forms.ProviderFilterForm
table = tables.ProviderDetailTable
template_name = 'circuits/provider_list.html'
class ProviderView(View):
def get(self, request, slug):
provider = get_object_or_404(Provider, slug=slug)
circuits = Circuit.objects.filter(provider=provider).select_related(
'type', 'tenant'
).prefetch_related(
'terminations__site'
)
show_graphs = Graph.objects.filter(type=GRAPH_TYPE_PROVIDER).exists()
return render(request, 'circuits/provider.html', {
'provider': provider,
'circuits': circuits,
'show_graphs': show_graphs,
})
class ProviderCreateView(PermissionRequiredMixin, ObjectEditView):
permission_required = 'circuits.add_provider'
model = Provider
form_class = forms.ProviderForm
template_name = 'circuits/provider_edit.html'
default_return_url = 'circuits:provider_list'
class ProviderEditView(ProviderCreateView):
permission_required = 'circuits.change_provider'
class ProviderDeleteView(PermissionRequiredMixin, ObjectDeleteView):
permission_required = 'circuits.delete_provider'
model = Provider
default_return_url = 'circuits:provider_list'
class ProviderBulkImportView(PermissionRequiredMixin, BulkImportView):
permission_required = 'circuits.add_provider'
model_form = forms.ProviderCSVForm
table = tables.ProviderTable
default_return_url = 'circuits:provider_list'
class ProviderBulkEditView(PermissionRequiredMixin, BulkEditView):
permission_required = 'circuits.change_provider'
cls = Provider
filter = filters.ProviderFilter
table = tables.ProviderTable
form = forms.ProviderBulkEditForm
default_return_url = 'circuits:provider_list'
class ProviderBulkDeleteView(PermissionRequiredMixin, BulkDeleteView):
permission_required = 'circuits.delete_provider'
cls = Provider
filter = filters.ProviderFilter
table = tables.ProviderTable
default_return_url = 'circuits:provider_list'
#
# Circuit Types
#
class CircuitTypeListView(ObjectListView):
queryset = CircuitType.objects.annotate(circuit_count=Count('circuits'))
table = tables.CircuitTypeTable
template_name = 'circuits/circuittype_list.html'
class CircuitTypeCreateView(PermissionRequiredMixin, ObjectEditView):
permission_required = 'circuits.add_circuittype'
model = CircuitType
form_class = forms.CircuitTypeForm
def get_return_url(self, request, obj):
return reverse('circuits:circuittype_list')
class CircuitTypeEditView(CircuitTypeCreateView):
permission_required = 'circuits.change_circuittype'
class CircuitTypeBulkDeleteView(PermissionRequiredMixin, BulkDeleteView):
permission_required = 'circuits.delete_circuittype'
cls = CircuitType
queryset = CircuitType.objects.annotate(circuit_count=Count('circuits'))
table = tables.CircuitTypeTable
default_return_url = 'circuits:circuittype_list'
#
# Circuits
#
class CircuitListView(ObjectListView):
queryset = Circuit.objects.select_related('provider', 'type', 'tenant').prefetch_related('terminations__site')
filter = filters.CircuitFilter
filter_form = forms.CircuitFilterForm
table = tables.CircuitTable
template_name = 'circuits/circuit_list.html'
class CircuitView(View):
def get(self, request, pk):
circuit = get_object_or_404(Circuit.objects.select_related('provider', 'type', 'tenant__group'), pk=pk)
termination_a = CircuitTermination.objects.select_related(
'site__region', 'interface__device'
).filter(
circuit=circuit, term_side=TERM_SIDE_A
).first()
termination_z = CircuitTermination.objects.select_related(
'site__region', 'interface__device'
).filter(
circuit=circuit, term_side=TERM_SIDE_Z
).first()
return render(request, 'circuits/circuit.html', {
'circuit': circuit,
'termination_a': termination_a,
'termination_z': termination_z,
})
class CircuitCreateView(PermissionRequiredMixin, ObjectEditView):
permission_required = 'circuits.add_circuit'
model = Circuit
form_class = forms.CircuitForm
template_name = 'circuits/circuit_edit.html'
default_return_url = 'circuits:circuit_list'
class CircuitEditView(CircuitCreateView):
permission_required = 'circuits.change_circuit'
class CircuitDeleteView(PermissionRequiredMixin, ObjectDeleteView):
permission_required = 'circuits.delete_circuit'
model = Circuit
default_return_url = 'circuits:circuit_list'
class CircuitBulkImportView(PermissionRequiredMixin, BulkImportView):
permission_required = 'circuits.add_circuit'
model_form = forms.CircuitCSVForm
table = tables.CircuitTable
default_return_url = 'circuits:circuit_list'
class CircuitBulkEditView(PermissionRequiredMixin, BulkEditView):
permission_required = 'circuits.change_circuit'
cls = Circuit
queryset = Circuit.objects.select_related('provider', 'type', 'tenant').prefetch_related('terminations__site')
filter = filters.CircuitFilter
table = tables.CircuitTable
form = forms.CircuitBulkEditForm
default_return_url = 'circuits:circuit_list'
class CircuitBulkDeleteView(PermissionRequiredMixin, BulkDeleteView):
permission_required = 'circuits.delete_circuit'
cls = Circuit
queryset = Circuit.objects.select_related('provider', 'type', 'tenant').prefetch_related('terminations__site')
filter = filters.CircuitFilter
table = tables.CircuitTable
default_return_url = 'circuits:circuit_list'
@permission_required('circuits.change_circuittermination')
def circuit_terminations_swap(request, pk):
circuit = get_object_or_404(Circuit, pk=pk)
termination_a = CircuitTermination.objects.filter(circuit=circuit, term_side=TERM_SIDE_A).first()
termination_z = CircuitTermination.objects.filter(circuit=circuit, term_side=TERM_SIDE_Z).first()
if not termination_a and not termination_z:
messages.error(request, "No terminations have been defined for circuit {}.".format(circuit))
return redirect('circuits:circuit', pk=circuit.pk)
if request.method == 'POST':
form = ConfirmationForm(request.POST)
if form.is_valid():
if termination_a and termination_z:
# Use a placeholder to avoid an IntegrityError on the (circuit, term_side) unique constraint
with transaction.atomic():
termination_a.term_side = '_'
termination_a.save()
termination_z.term_side = 'A'
termination_z.save()
termination_a.term_side = 'Z'
termination_a.save()
elif termination_a:
termination_a.term_side = 'Z'
termination_a.save()
else:
termination_z.term_side = 'A'
termination_z.save()
messages.success(request, "Swapped terminations for circuit {}.".format(circuit))
return redirect('circuits:circuit', pk=circuit.pk)
else:
form = ConfirmationForm()
return render(request, 'circuits/circuit_terminations_swap.html', {
'circuit': circuit,
'termination_a': termination_a,
'termination_z': termination_z,
'form': form,
'panel_class': 'default',
'button_class': 'primary',
'return_url': circuit.get_absolute_url(),
})
#
# Circuit terminations
#
class CircuitTerminationCreateView(PermissionRequiredMixin, ObjectEditView):
permission_required = 'circuits.add_circuittermination'
model = CircuitTermination
form_class = forms.CircuitTerminationForm
template_name = 'circuits/circuittermination_edit.html'
def alter_obj(self, obj, request, url_args, url_kwargs):
if 'circuit' in url_kwargs:
obj.circuit = get_object_or_404(Circuit, pk=url_kwargs['circuit'])
return obj
def get_return_url(self, request, obj):
return obj.circuit.get_absolute_url()
class CircuitTerminationEditView(CircuitTerminationCreateView):
permission_required = 'circuits.change_circuittermination'
class CircuitTerminationDeleteView(PermissionRequiredMixin, ObjectDeleteView):
permission_required = 'circuits.delete_circuittermination'
model = CircuitTermination
| snazy2000/netbox | netbox/circuits/views.py | Python | apache-2.0 | 9,414 |
'''
Created on Dec 23, 2011
@author: ajju
'''
from common.DataObjects import VideoHostingInfo, VideoInfo, VIDEO_QUAL_SD, \
VIDEO_QUAL_HD_720
from common import HttpUtils
import re
import urllib
def getVideoHostingInfo():
video_hosting_info = VideoHostingInfo()
video_hosting_info.set_video_hosting_image('http://www.koreaittimes.com/images/imagecache/medium/facebook-video-player-logo.png')
video_hosting_info.set_video_hosting_name('Facebook')
return video_hosting_info
def retrieveVideoInfo(video_id):
video_info = VideoInfo()
video_info.set_video_hosting_info(getVideoHostingInfo())
video_info.set_video_id(video_id)
try:
video_info_link = 'http://www.facebook.com/video/video.php?v=' + str(video_id)
html = urllib.unquote_plus(HttpUtils.HttpClient().getHtmlContent(url=video_info_link).replace('\u0025', '%'))
video_title = re.compile('addVariable\("video_title"\, "(.+?)"').findall(html)[0]
img_link = re.compile('addVariable\("thumb_url"\, "(.+?)"').findall(html)[0]
high_video_link = re.compile('addVariable\("highqual_src"\, "(.+?)"').findall(html)
low_video_link = re.compile('addVariable\("lowqual_src"\, "(.+?)"').findall(html)
video_link = re.compile('addVariable\("video_src"\, "(.+?)"').findall(html)
if len(high_video_link) > 0:
video_info.add_video_link(VIDEO_QUAL_HD_720, high_video_link[0])
if len(low_video_link) > 0:
video_info.add_video_link(VIDEO_QUAL_SD, low_video_link[0])
if len(video_link) > 0:
video_info.add_video_link(VIDEO_QUAL_SD, video_link[0])
video_info.set_video_stopped(False)
video_info.set_video_name(video_title)
video_info.set_video_image(img_link)
except:
raise
video_info.set_video_stopped(True)
return video_info
| dknlght/dkodi | src/script.module.turtle/lib/snapvideo/Facebook.py | Python | gpl-2.0 | 1,880 |
# from the paper `using cython to speedup numerical python programs'
#pythran export wave(float, float, float, float, float, float [] [], float [] [], float [] [])
#runas A=[map(float,range(2000)) for i in xrange(100)] ; B=[map(float,range(2000)) for i in xrange(100)] ; C=[map(float,range(2000)) for i in xrange(100)] ; import numpy ; A = numpy.array(A) ; B = numpy.array(B) ; C = numpy.array(C) ; wave(1,2,.01,.1,.18, A,B,C )
def wave(t, t_stop, dt, dx, dy, u, um, k):
while t <= t_stop:
t += dt
new_u = calculate_u(dt, dx, dy, u, um, k)
um = u
u = new_u
return u
import numpy
def calculate_u(dt, dx, dy, u, um, k):
up = numpy.zeros((u.shape[0], u.shape[1]))
"omp parallel for private(i,j)"
for i in xrange(1, u.shape[0]-1):
for j in xrange(1, u.shape[1]-1):
up[i,j] = 2*u[i,j] - um[i,j] + \
(dt/dx)**2*(
(0.5*(k[i+1,j] + k[i,j])*(u[i+1,j] - u[i,j]) -
0.5*(k[i,j] + k[i-1,j])*(u[i,j] - u[i-1,j]))) + \
(dt/dy)**2*(
(0.5*(k[i,j+1] + k[i,j])*(u[i,j+1] - u[i,j]) -
0.5*(k[i,j] + k[i,j-1])*(u[i,j] - u[i,j-1])))
return up
| serge-sans-paille/pythran | docs/papers/sc2013/bench/pythran/wave_.py | Python | bsd-3-clause | 1,258 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 by Christian Tremblay, P.Eng <[email protected]>
# Licensed under LGPLv3, see file LICENSE in this source tree.
#
"""
Read.py - creation of ReadProperty and ReadPropertyMultiple requests
Used while defining an app:
Example::
class BasicScript(WhoisIAm, ReadProperty)
Class::
ReadProperty()
def read()
def readMultiple()
"""
# --- standard Python modules ---
# --- 3rd party modules ---
from bacpypes.debugging import bacpypes_debugging
from bacpypes.pdu import Address
from bacpypes.object import get_object_class, get_datatype
from bacpypes.apdu import (
PropertyReference,
ReadAccessSpecification,
ReadPropertyRequest,
ReadPropertyMultipleRequest,
RejectReason,
AbortReason,
RejectPDU,
AbortPDU,
)
from bacpypes.basetypes import PropertyIdentifier, DateTime
from bacpypes.apdu import (
ReadPropertyMultipleACK,
ReadPropertyACK,
ReadRangeRequest,
ReadRangeACK,
Range,
RangeByPosition,
RangeBySequenceNumber,
RangeByTime,
)
from bacpypes.primitivedata import Tag, ObjectIdentifier, Unsigned, Date, Time
from bacpypes.constructeddata import Array
from bacpypes.iocb import IOCB, TimeoutError
from bacpypes.core import deferred
# --- this application's modules ---
from .IOExceptions import (
ReadPropertyException,
ReadPropertyMultipleException,
ReadRangeException,
NoResponseFromController,
ApplicationNotStarted,
UnrecognizedService,
SegmentationNotSupported,
UnknownPropertyError,
UnknownObjectError,
BufferOverflow,
)
from bacpypes.object import registered_object_types
from ..utils.notes import note_and_log
# ------------------------------------------------------------------------------
@note_and_log
class ReadProperty:
"""
Defines BACnet Read functions: readProperty and readPropertyMultiple.
Data exchange is made via a Queue object
A timeout of 10 seconds allows detection of invalid device or communciation errors.
"""
def read(
self,
args,
arr_index=None,
vendor_id=0,
bacoid=None,
timeout=10,
show_property_name=False,
):
"""
Build a ReadProperty request, wait for the answer and return the value
:param args: String with <addr> <type> <inst> <prop> [ <indx> ]
:returns: data read from device (str representing data like 10 or True)
*Example*::
import BAC0
myIPAddr = '192.168.1.10/24'
bacnet = BAC0.connect(ip = myIPAddr)
bacnet.read('2:5 analogInput 1 presentValue')
Requests the controller at (Network 2, address 5) for the presentValue of
its analog input 1 (AI:1).
"""
if not self._started:
raise ApplicationNotStarted("BACnet stack not running - use startApp()")
args_split = args.split()
self.log_title("Read property", args_split)
vendor_id = vendor_id
bacoid = bacoid
try:
# build ReadProperty request
iocb = IOCB(
self.build_rp_request(
args_split, arr_index=arr_index, vendor_id=vendor_id, bacoid=bacoid
)
)
iocb.set_timeout(timeout)
# pass to the BACnet stack
deferred(self.this_application.request_io, iocb)
self._log.debug("{:<20} {!r}".format("iocb", iocb))
except ReadPropertyException as error:
# construction error
self._log.exception("exception: {!r}".format(error))
iocb.wait() # Wait for BACnet response
if iocb.ioResponse: # successful response
apdu = iocb.ioResponse
if not isinstance(apdu, ReadPropertyACK): # expecting an ACK
self._log.warning("Not an ack, see debug for more infos.")
self._log.debug("Not an ack. | APDU : {}".format(apdu))
return
# find the datatype
datatype = get_datatype(
apdu.objectIdentifier[0], apdu.propertyIdentifier, vendor_id=vendor_id
)
if not datatype:
# raise TypeError("unknown datatype")
value = cast_datatype_from_tag(
apdu.propertyValue,
apdu.objectIdentifier[0],
apdu.propertyIdentifier,
)
else:
# special case for array parts, others are managed by cast_out
if issubclass(datatype, Array) and (
apdu.propertyArrayIndex is not None
):
if apdu.propertyArrayIndex == 0:
value = apdu.propertyValue.cast_out(Unsigned)
else:
value = apdu.propertyValue.cast_out(datatype.subtype)
else:
value = apdu.propertyValue.cast_out(datatype)
self._log.debug("{:<20} {:<20}".format("value", "datatype"))
self._log.debug("{!r:<20} {!r:<20}".format(value, datatype))
if not show_property_name:
return value
try:
int(apdu.propertyIdentifier)
objid = apdu.objectIdentifier
prop_id = "@prop_{}".format(apdu.propertyIdentifier)
value = list(value.items())[0][1]
except ValueError:
prop_id = apdu.propertyIdentifier
return (value, prop_id)
if iocb.ioError: # unsuccessful: error/reject/abort
apdu = iocb.ioError
reason = find_reason(apdu)
if reason == "segmentationNotSupported":
value = self._split_the_read_request(args, arr_index)
return value
else:
if reason == "unknownProperty":
if "description" in args:
self._log.warning(
"The description property is not implemented in the device. Using a default value for internal needs."
)
return "Property Not Implemented"
elif "inactiveText" in args:
self._log.warning(
"The inactiveText property is not implemented in the device. Using a default value of Off for internal needs."
)
return "Off"
elif "activeText" in args:
self._log.warning(
"The activeText property is not implemented in the device. Using a default value of On for internal needs."
)
return "On"
else:
raise UnknownPropertyError("Unknown property {}".format(args))
elif reason == "unknownObject":
self._log.warning("Unknown object {}".format(args))
raise UnknownObjectError("Unknown object {}".format(args))
elif reason == "bufferOverflow":
self._log.warning(
"Buffer capacity exceeded in device {}".format(args)
)
return self._split_the_read_request(args, arr_index)
else:
# Other error... consider NoResponseFromController (65)
# even if the real reason is another one
raise NoResponseFromController(
"APDU Abort Reason : {}".format(reason)
)
def _split_the_read_request(self, args, arr_index):
"""
When a device doesn't support segmentation, this function
will split the request according to the length of the
predicted result which can be known when reading the array_index
number 0.
This can be a very long process as some devices count a large
number of properties without supporting segmentation
(FieldServers are a good example)
"""
nmbr_obj = self.read(args, arr_index=0)
return [self.read(args, arr_index=i) for i in range(1, nmbr_obj + 1)]
def readMultiple(
self, args, request_dict=None, vendor_id=0, timeout=10, show_property_name=False
):
"""Build a ReadPropertyMultiple request, wait for the answer and return the values
:param args: String with <addr> ( <type> <inst> ( <prop> [ <indx> ] )... )...
:returns: data read from device (str representing data like 10 or True)
*Example*::
import BAC0
myIPAddr = '192.168.1.10/24'
bacnet = BAC0.connect(ip = myIPAddr)
bacnet.readMultiple('2:5 analogInput 1 presentValue units')
Requests the controller at (Network 2, address 5) for the (presentValue and units) of
its analog input 1 (AI:1).
"""
if not self._started:
raise ApplicationNotStarted("BACnet stack not running - use startApp()")
if request_dict is not None:
request = self.build_rpm_request_from_dict(request_dict, vendor_id)
else:
args = args.split()
request = self.build_rpm_request(args, vendor_id=vendor_id)
self.log_title("Read Multiple", args)
values = []
dict_values = {}
try:
# build an ReadPropertyMultiple request
iocb = IOCB(request)
iocb.set_timeout(timeout)
# pass to the BACnet stack
deferred(self.this_application.request_io, iocb)
self._log.debug("{:<20} {!r}".format("iocb", iocb))
except ReadPropertyMultipleException as error:
# construction error
self._log.exception("exception: {!r}".format(error))
iocb.wait() # Wait for BACnet response
if iocb.ioResponse: # successful response
apdu = iocb.ioResponse
if not isinstance(apdu, ReadPropertyMultipleACK): # expecting an ACK
self._log.debug("{:<20}".format("not an ack"))
self._log.warning(
"Not an Ack. | APDU : {} / {}".format((apdu, type(apdu)))
)
return
# loop through the results
for result in apdu.listOfReadAccessResults:
# here is the object identifier
objectIdentifier = result.objectIdentifier
self.log_subtitle(
"{!r} : {!r}".format(objectIdentifier[0], objectIdentifier[1]),
width=114,
)
self._log.debug(
"{:<20} {:<20} {:<30} {:<20}".format(
"propertyIdentifier", "propertyArrayIndex", "value", "datatype"
)
)
self._log.debug("-" * 114)
dict_values[objectIdentifier] = []
# now come the property values per object
for element in result.listOfResults:
# get the property and array index
propertyIdentifier = element.propertyIdentifier
propertyArrayIndex = element.propertyArrayIndex
readResult = element.readResult
if propertyArrayIndex is not None:
_prop_id = "{}@idx:{}".format(
propertyIdentifier, propertyArrayIndex
)
else:
_prop_id = propertyIdentifier
if readResult.propertyAccessError is not None:
self._log.debug(
"Property Access Error for {}".format(
readResult.propertyAccessError
)
)
values.append(None)
dict_values[objectIdentifier].append((_prop_id, None))
else:
# here is the value
propertyValue = readResult.propertyValue
# find the datatype
datatype = get_datatype(
objectIdentifier[0], propertyIdentifier, vendor_id=vendor_id
)
if not datatype:
value = cast_datatype_from_tag(
propertyValue, objectIdentifier[0], propertyIdentifier
)
else:
# special case for array parts, others are managed by cast_out
if issubclass(datatype, Array) and (
propertyArrayIndex is not None
):
if propertyArrayIndex == 0:
value = propertyValue.cast_out(Unsigned)
else:
value = propertyValue.cast_out(datatype.subtype)
elif propertyValue.is_application_class_null():
value = None
else:
value = propertyValue.cast_out(datatype)
self._log.debug(
"{!r:<20} {!r:<20} {!r:<30} {!r:<20}".format(
propertyIdentifier,
propertyArrayIndex,
value,
datatype,
)
)
if show_property_name:
try:
int(
propertyIdentifier
) # else it will be a name like maxMaster
prop_id = "@prop_{}".format(propertyIdentifier)
_obj, _id = apdu.listOfReadAccessResults[
0
].objectIdentifier
_key = (str(_obj), vendor_id)
if _key in registered_object_types.keys():
_classname = registered_object_types[_key].__name__
for k, v in registered_object_types["BAC0"][
_classname
].items():
if v["obj_id"] == propertyIdentifier:
prop_id = (k, propertyIdentifier)
if isinstance(value, dict):
value = list(value.items())[0][1]
except ValueError:
prop_id = propertyIdentifier
values.append((value, prop_id))
dict_values[objectIdentifier].append(
(_prop_id, (value, prop_id))
)
else:
values.append(value)
dict_values[objectIdentifier].append((_prop_id, value))
if request_dict is not None:
return dict_values
else:
return values
if iocb.ioError: # unsuccessful: error/reject/abort
apdu = iocb.ioError
reason = find_reason(apdu)
self._log.warning("APDU Abort Reject Reason : {}".format(reason))
self._log.debug("The Request was : {}".format(args))
if reason == "unrecognizedService":
raise UnrecognizedService()
elif reason == "segmentationNotSupported":
# value = self._split_the_read_request(args, arr_index)
# return value
self.segmentation_supported = False
raise SegmentationNotSupported()
elif reason == "unknownObject":
self._log.warning("Unknown object {}".format(args))
raise UnknownObjectError("Unknown object {}".format(args))
elif reason == "unknownProperty":
self._log.warning("Unknown property {}".format(args))
values.append("")
return values
else:
self._log.warning("No response from controller {}".format(reason))
values.append("")
return values
def build_rp_request(self, args, arr_index=None, vendor_id=0, bacoid=None):
addr, obj_type, obj_inst, prop_id = args[:4]
vendor_id = vendor_id
bacoid = bacoid
if obj_type.isdigit():
obj_type = int(obj_type)
elif "@obj_" in obj_type:
obj_type = int(obj_type.split("_")[1])
elif not get_object_class(obj_type, vendor_id=vendor_id):
raise ValueError("Unknown object type : {}".format(obj_type))
obj_inst = int(obj_inst)
if prop_id.isdigit():
prop_id = int(prop_id)
elif "@prop_" in prop_id:
prop_id = int(prop_id.split("_")[1])
# datatype = get_datatype(obj_type, prop_id, vendor_id=vendor_id)
# build a request
request = ReadPropertyRequest(
objectIdentifier=(obj_type, obj_inst),
propertyIdentifier=prop_id,
propertyArrayIndex=arr_index,
)
request.pduDestination = Address(addr)
if len(args) == 5:
request.propertyArrayIndex = int(args[4])
self._log.debug("{:<20} {!r}".format("REQUEST", request))
return request
def build_rpm_request(self, args, vendor_id=0):
"""
Build request from args
"""
self._log.debug(args)
i = 0
addr = args[i]
i += 1
vendor_id = vendor_id
read_access_spec_list = []
while i < len(args):
obj_type = args[i]
i += 1
if obj_type.isdigit():
obj_type = int(obj_type)
elif "@obj_" in obj_type:
obj_type = int(obj_type.split("_")[1])
elif not get_object_class(obj_type, vendor_id=vendor_id):
raise ValueError("Unknown object type : {}".format(obj_type))
obj_inst = int(args[i])
i += 1
prop_reference_list = []
while i < len(args):
prop_id = args[i]
if "@obj_" in prop_id:
break
if prop_id not in PropertyIdentifier.enumerations:
try:
if "@prop_" in prop_id:
prop_id = int(prop_id.split("_")[1])
self._log.debug(
"Proprietary property : {} | {} -> Vendor : {}".format(
obj_type, prop_id, vendor_id
)
)
else:
break
except:
break
elif prop_id not in (
"all",
"required",
"optional",
"objectName",
"objectType",
"objectIdentifier",
"polarity",
):
datatype = get_datatype(obj_type, prop_id, vendor_id=vendor_id)
if not datatype:
raise ValueError(
"invalid property for object type : {} | {}".format(
obj_type, prop_id
)
)
i += 1
# build a property reference
prop_reference = PropertyReference(propertyIdentifier=prop_id)
# check for an array index
if (i < len(args)) and args[i].isdigit():
prop_reference.propertyArrayIndex = int(args[i])
i += 1
prop_reference_list.append(prop_reference)
if not prop_reference_list:
raise ValueError("provide at least one property")
# build a read access specification
read_access_spec = ReadAccessSpecification(
objectIdentifier=(obj_type, obj_inst),
listOfPropertyReferences=prop_reference_list,
)
read_access_spec_list.append(read_access_spec)
if not read_access_spec_list:
raise RuntimeError("at least one read access specification required")
# build the request
request = ReadPropertyMultipleRequest(
listOfReadAccessSpecs=read_access_spec_list
)
request.pduDestination = Address(addr)
return request
def build_rpm_request_from_dict(self, request_dict, vendor_id):
"""
Read property multiple allow to read a lot of properties with only one request
The existing RPM function is made using a string that must be created using bacpypes
console style and is hard to automate.
This new version will be an attempt to improve that::
_rpm = {'address': '11:2',
'objects': {'analogInput:1': ['presentValue', 'description', 'unit', 'objectList@idx:0'],
'analogInput:2': ['presentValue', 'description', 'unit', 'objectList@idx:0'],
},
vendor_id: 842
}
"""
vendor_id = 842
addr = request_dict["address"]
objects = request_dict["objects"]
if "vendor_id" in request_dict.keys():
vendor_id = int(request_dict["vendor_id"])
read_access_spec_list = []
for obj, list_of_properties in objects.items():
obj_type, obj_instance = obj.split(":")
obj_type = validate_object_type(obj_type, vendor_id=vendor_id)
obj_instance = int(obj_instance)
property_reference_list = build_property_reference_list(
obj_type, list_of_properties
)
read_acces_spec = build_read_access_spec(
obj_type, obj_instance, property_reference_list
)
read_access_spec_list.append(read_acces_spec)
if not read_access_spec_list:
raise RuntimeError("at least one read access specification required")
# build the request
request = ReadPropertyMultipleRequest(
listOfReadAccessSpecs=read_access_spec_list
)
request.pduDestination = Address(addr)
return request
def build_rrange_request(
self, args, range_params=None, arr_index=None, vendor_id=0, bacoid=None
):
addr, obj_type, obj_inst, prop_id = args[:4]
vendor_id = vendor_id
bacoid = bacoid
if obj_type.isdigit():
obj_type = int(obj_type)
elif not get_object_class(obj_type, vendor_id=vendor_id):
raise ValueError("Unknown object type {}".format(obj_type))
obj_inst = int(obj_inst)
if prop_id.isdigit():
prop_id = int(prop_id)
datatype = get_datatype(obj_type, prop_id, vendor_id=vendor_id)
if not datatype:
raise ValueError("invalid property for object type")
# build a request
request = ReadRangeRequest(
objectIdentifier=(obj_type, obj_inst), propertyIdentifier=prop_id
)
request.pduDestination = Address(addr)
if range_params is not None:
range_type, first, date, time, count = range_params
if range_type == "p":
rbp = RangeByPosition(referenceIndex=int(first), count=int(count))
request.range = Range(byPosition=rbp)
elif range_type == "s":
rbs = RangeBySequenceNumber(
referenceSequenceNumber=int(first), count=int(count)
)
request.range = Range(bySequenceNumber=rbs)
elif range_type == "t":
rbt = RangeByTime(
referenceTime=DateTime(
date=Date(date).value, time=Time(time).value
),
count=int(count),
)
request.range = Range(byTime=rbt)
elif range_type == "x":
# should be missing required parameter
request.range = Range()
else:
raise ValueError("unknown range type: %r" % (range_type,))
if len(args) == 5:
request.propertyArrayIndex = int(args[4])
self._log.debug("{:<20} {!r}".format("REQUEST", request))
return request
def readRange(
self,
args,
range_params=None,
arr_index=None,
vendor_id=0,
bacoid=None,
timeout=10,
):
"""
Build a ReadProperty request, wait for the answer and return the value
:param args: String with <addr> <type> <inst> <prop> [ <indx> ]
:returns: data read from device (str representing data like 10 or True)
*Example*::
import BAC0
myIPAddr = '192.168.1.10/24'
bacnet = BAC0.connect(ip = myIPAddr)
bacnet.read('2:5 analogInput 1 presentValue')
Requests the controller at (Network 2, address 5) for the presentValue of
its analog input 1 (AI:1).
"""
if not self._started:
raise ApplicationNotStarted("BACnet stack not running - use startApp()")
args_split = args.split()
self.log_title("Read range ", args_split)
vendor_id = vendor_id
bacoid = bacoid
try:
# build ReadProperty request
request = self.build_rrange_request(
args_split,
range_params=range_params,
arr_index=arr_index,
vendor_id=vendor_id,
bacoid=bacoid,
)
iocb = IOCB(request)
iocb.set_timeout(timeout)
# pass to the BACnet stack
deferred(self.this_application.request_io, iocb)
self._log.debug("{:<20} {!r}".format("iocb", iocb))
except ReadRangeException as error:
# construction error
self._log.exception("exception: {!r}".format(error))
iocb.wait() # Wait for BACnet response
if iocb.ioResponse: # successful response
apdu = iocb.ioResponse
if not isinstance(apdu, ReadRangeACK): # expecting an ACK
self._log.warning("Not an ack, see debug for more infos.")
self._log.debug(
"Not an ack. | APDU : {} / {}".format((apdu, type(apdu)))
)
return
# find the datatype
datatype = get_datatype(
apdu.objectIdentifier[0], apdu.propertyIdentifier, vendor_id=vendor_id
)
if not datatype:
# raise TypeError("unknown datatype")
datatype = cast_datatype_from_tag(
apdu.propertyValue,
apdu.objectIdentifier[0],
apdu.propertyIdentifier,
)
try:
value = apdu.itemData.cast_out(datatype)
except TypeError as error:
self._log.error(
"Problem casting value : {} | Datatype : {} | error : {}".format(
apdu.itemData, datatype, error
)
)
return apdu
self._log.debug("{:<20} {:<20}".format("value", "datatype"))
self._log.debug("{!r:<20} {!r:<20}".format(value, datatype))
return value
if iocb.ioError: # unsuccessful: error/reject/abort
apdu = iocb.ioError
reason = find_reason(apdu)
if reason == "segmentationNotSupported":
self._log.warning(
"Segmentation not supported... will read properties one by one..."
)
self._log.debug("The Request was : {}".format(args_split))
value = self._split_the_read_request(args, arr_index)
return value
else:
if reason == "unknownProperty":
if "priorityArray" in args:
self._log.debug("Unknown property {}".format(args))
else:
self._log.warning("Unknown property {}".format(args))
if "description" in args:
return ""
elif "inactiveText" in args:
return "Off"
elif "activeText" in args:
return "On"
else:
raise UnknownPropertyError("Unknown property {}".format(args))
elif reason == "unknownObject":
self._log.warning("Unknown object {}".format(args))
raise UnknownObjectError("Unknown object {}".format(args))
else:
# Other error... consider NoResponseFromController (65)
# even if the realy reason is another one
raise NoResponseFromController(
"APDU Abort Reason : {}".format(reason)
)
def read_priority_array(self, addr, obj, obj_instance):
pa = self.read("{} {} {} priorityArray".format(addr, obj, obj_instance))
res = [pa]
for each in range(1, 17):
_pa = pa[each]
for k, v in _pa.__dict__.items():
if v != None:
res.append(v)
return res
def find_reason(apdu):
try:
if apdu == TimeoutError:
return "Timeout"
elif apdu.pduType == RejectPDU.pduType:
reasons = RejectReason.enumerations
elif apdu.pduType == AbortPDU.pduType:
reasons = AbortReason.enumerations
else:
if apdu.errorCode and apdu.errorClass:
return "{}".format(apdu.errorCode)
else:
raise ValueError("Cannot find reason...")
code = apdu.apduAbortRejectReason
try:
return [k for k, v in reasons.items() if v == code][0]
except IndexError:
return code
except KeyError as err:
return "KeyError: {} has no key {0!r}".format(type(apdu), err.args[0])
def cast_datatype_from_tag(propertyValue, obj_id, prop_id):
try:
tag_list = propertyValue.tagList.tagList
if tag_list[0].tagClass == 0:
tag = tag_list[0].tagNumber
datatype = Tag._app_tag_class[tag]
else:
from bacpypes.constructeddata import ArrayOf
subtype_tag = propertyValue.tagList.tagList[0].tagList[0].tagNumber
datatype = ArrayOf(Tag._app_tag_class[subtype_tag])
value = {"{}_{}".format(obj_id, prop_id): propertyValue.cast_out(datatype)}
except:
value = {"{}_{}".format(obj_id, prop_id): propertyValue}
return value
def validate_object_type(obj_type, vendor_id=842):
if obj_type.isdigit():
obj_type = int(obj_type)
elif "@obj_" in obj_type:
obj_type = int(obj_type.split("_")[1])
elif not get_object_class(obj_type, vendor_id=vendor_id):
raise ValueError("Unknown object type : {}".format(obj_type))
return obj_type
def build_read_access_spec(obj_type, obj_instance, property_reference_list):
return ReadAccessSpecification(
objectIdentifier=(obj_type, obj_instance),
listOfPropertyReferences=property_reference_list,
)
def build_property_reference_list(obj_type, list_of_properties):
property_reference_list = []
for prop in list_of_properties:
idx = None
if "@idx:" in prop:
prop, idx = prop.split("@idx:")
prop_id = validate_property_id(obj_type, prop)
prop_reference = PropertyReference(propertyIdentifier=prop_id)
if idx:
prop_reference.propertyArrayIndex = int(idx)
property_reference_list.append(prop_reference)
return property_reference_list
def validate_property_id(obj_type, prop_id):
if prop_id in PropertyIdentifier.enumerations:
if prop_id in (
"all",
"required",
"optional",
"objectName",
"objectType",
"objectIdentifier",
"polarity",
):
return prop_id
elif validate_datatype(obj_type, prop_id) is not None:
return prop_id
else:
raise ValueError(
"invalid property for object type : {} | {}".format(obj_type, prop_id)
)
elif "@prop_" in prop_id:
return int(prop_id.split("_")[1])
else:
raise ValueError("{} is an invalid property for {}".format(prop_id, obj_type))
def validate_datatype(obj_type, prop_id, vendor_id=842):
return get_datatype(obj_type, prop_id, vendor_id=vendor_id) if not None else False
| ChristianTremblay/BAC0 | BAC0/core/io/Read.py | Python | lgpl-3.0 | 33,614 |
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer WMS GetLegendGraphic.
From build dir, run: ctest -R PyQgsServerWMSGetLegendGraphic -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Alessandro Pasotti'
__date__ = '25/05/2015'
__copyright__ = 'Copyright 2015, The QGIS Project'
import os
# Needed on Qt 5 so that the serialization of XML is consistent among all executions
os.environ['QT_HASH_SEED'] = '1'
import re
import urllib.request
import urllib.parse
import urllib.error
from qgis.testing import unittest
from qgis.PyQt.QtCore import QSize
import osgeo.gdal # NOQA
from test_qgsserver import QgsServerTestBase
from qgis.core import QgsProject
# Strip path and content length because path may vary
RE_STRIP_UNCHECKABLE = b'MAP=[^"]+|Content-Length: \d+'
RE_ATTRIBUTES = b'[^>\s]+=[^>\s]+'
class TestQgsServerWMSGetLegendGraphic(QgsServerTestBase):
"""QGIS Server WMS Tests for GetLegendGraphic request"""
# Set to True to re-generate reference files for this class
#regenerate_reference = True
def test_getLegendGraphics(self):
"""Test that does not return an exception but an image"""
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
# 'WIDTH': '20', # optional
# 'HEIGHT': '20', # optional
'LAYER': 'testlayer%20èé',
}
qs = '?' + '&'.join(["%s=%s" % (k, v) for k, v in parms.items()])
h, r = self._execute_request(qs)
self.assertEqual(-1, h.find(b'Content-Type: text/xml; charset=utf-8'), "Header: %s\nResponse:\n%s" % (h, r))
self.assertNotEqual(-1, h.find(b'Content-Type: image/png'), "Header: %s\nResponse:\n%s" % (h, r))
def test_wms_GetLegendGraphic_LayerSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
# "HEIGHT": "500",
# "WIDTH": "500",
"LAYERSPACE": "50.0",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTSIZE": "30",
"ITEMFONTBOLD": "TRUE",
"ITEMFONTSIZE": "20",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"LAYERTITLE": "TRUE",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_LayerSpace", max_size_diff=QSize(1, 1))
def test_wms_getLegendGraphics_invalid_parameters(self):
"""Test that does return an exception"""
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello,db_point",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"RULE": "1",
"BBOX": "-151.7,-38.9,51.0,78.0",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
err = b"BBOX parameter cannot be combined with RULE" in r
self.assertTrue(err)
def test_wms_GetLegendGraphic_LayerTitleSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
# "HEIGHT": "500",
# "WIDTH": "500",
"LAYERTITLESPACE": "20.0",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTSIZE": "30",
"ITEMFONTBOLD": "TRUE",
"ITEMFONTSIZE": "20",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"LAYERTITLE": "TRUE",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_LayerTitleSpace")
def test_wms_GetLegendGraphic_ShowFeatureCount(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
# "HEIGHT": "500",
# "WIDTH": "500",
"LAYERTITLE": "TRUE",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTSIZE": "30",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"ITEMFONTBOLD": "TRUE",
"ITEMFONTSIZE": "20",
"SHOWFEATURECOUNT": "TRUE",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ShowFeatureCount", max_size_diff=QSize(1, 1))
def test_wms_getLegendGraphics_layertitle(self):
"""Test that does not return an exception but an image"""
print("TEST FONT FAMILY: ", self.fontFamily)
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
# 'WIDTH': '20', # optional
# 'HEIGHT': '20', # optional
'LAYER': u'testlayer%20èé',
'LAYERFONTBOLD': 'TRUE',
'LAYERFONTSIZE': '30',
'ITEMFONTBOLD': 'TRUE',
'LAYERFONTFAMILY': self.fontFamily,
'ITEMFONTFAMILY': self.fontFamily,
'ITEMFONTSIZE': '20',
'LAYERTITLE': 'TRUE',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_test", 250, QSize(15, 15))
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
# 'WIDTH': '20', # optional
# 'HEIGHT': '20', # optional
'LAYER': u'testlayer%20èé',
'LAYERTITLE': 'FALSE',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_test_layertitle_false", 250, QSize(15, 15))
def test_wms_getLegendGraphics_rulelabel(self):
"""Test that does not return an exception but an image"""
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'LAYERFONTBOLD': 'TRUE',
'LAYERFONTSIZE': '30',
'LAYERFONTFAMILY': self.fontFamily,
'ITEMFONTFAMILY': self.fontFamily,
'ITEMFONTBOLD': 'TRUE',
'ITEMFONTSIZE': '20',
'RULELABEL': 'TRUE',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_test", 250, QSize(15, 15))
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'LAYERFONTBOLD': 'TRUE',
'LAYERFONTSIZE': '30',
'ITEMFONTBOLD': 'TRUE',
'ITEMFONTSIZE': '20',
'LAYERFONTFAMILY': self.fontFamily,
'ITEMFONTFAMILY': self.fontFamily,
'RULELABEL': 'FALSE',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_rulelabel_false", 250, QSize(15, 15))
def test_wms_getLegendGraphics_rule(self):
"""Test that does not return an exception but an image"""
parms = {
'MAP': self.testdata_path + "test_project_legend_rule.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'WIDTH': '20',
'HEIGHT': '20',
'RULE': 'rule0',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_rule0", 250, QSize(15, 15))
parms = {
'MAP': self.testdata_path + "test_project_legend_rule.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'WIDTH': '20',
'HEIGHT': '20',
'RULE': 'rule1',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_rule1", 250, QSize(15, 15))
def test_wms_GetLegendGraphic_Basic(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Basic")
def test_wms_GetLegendGraphic_Transparent(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"TRANSPARENT": "TRUE"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Transparent")
def test_wms_GetLegendGraphic_Background(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"BGCOLOR": "green"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Background")
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"BGCOLOR": "0x008000"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Background_Hex")
def test_wms_GetLegendGraphic_BoxSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"BOXSPACE": "100",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_BoxSpace")
def test_wms_GetLegendGraphic_SymbolSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"SYMBOLSPACE": "100",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_SymbolSpace")
def test_wms_GetLegendGraphic_IconLabelSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"ICONLABELSPACE": "100",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_IconLabelSpace")
def test_wms_GetLegendGraphic_SymbolSize(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"SYMBOLWIDTH": "50",
"SYMBOLHEIGHT": "30",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_SymbolSize")
def test_wms_GetLegendGraphic_LayerFont(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "TRUE",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTITALIC": "TRUE",
"LAYERFONTSIZE": "30",
"ITEMFONTBOLD": "TRUE",
"ITEMFONTSIZE": "20",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_LayerFont", max_size_diff=QSize(1, 1))
def test_wms_GetLegendGraphic_ItemFont(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "TRUE",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTSIZE": "30",
"ITEMFONTBOLD": "TRUE",
"ITEMFONTITALIC": "TRUE",
"ITEMFONTSIZE": "20",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ItemFont", max_size_diff=QSize(1, 1))
def test_wms_GetLegendGraphic_BBox(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello,db_point",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"SRCHEIGHT": "500",
"SRCWIDTH": "500",
"BBOX": "-151.7,-38.9,51.0,78.0",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_BBox")
def test_wms_GetLegendGraphic_BBox2(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello,db_point",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"SRCHEIGHT": "500",
"SRCWIDTH": "500",
"BBOX": "-76.08,-6.4,-19.38,38.04",
"SRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_BBox2")
def test_wms_GetLegendGraphic_BBox_Fallback(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello,db_point",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"BBOX": "-151.7,-38.9,51.0,78.0",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_BBox")
def test_wms_GetLegendGraphic_BBox2_Fallback(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello,db_point",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"BBOX": "-76.08,-6.4,-19.38,38.04",
"SRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_BBox2")
def test_wms_GetLegendGraphic_EmptyLegend(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_contextual_legend.qgs',
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "QGIS%20Server%20Hello%20World",
"FORMAT": "image/png",
"SRCHEIGHT": "840",
"SRCWIDTH": "1226",
"BBOX": "10.38450,-49.6370,73.8183,42.9461",
"SRS": "EPSG:4326",
"SCALE": "15466642"
}.items())])
h, r = self._execute_request(qs)
self.assertEqual(-1, h.find(b'Content-Type: text/xml; charset=utf-8'), "Header: %s\nResponse:\n%s" % (h, r))
self.assertNotEqual(-1, h.find(b'Content-Type: image/png'), "Header: %s\nResponse:\n%s" % (h, r))
def test_wms_GetLegendGraphic_wmsRootName(self):
"""Test an unreported issue when a wmsRootName short name is set in the service capabilities"""
# First test with the project title itself:
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_wms_grouped_layers.qgs',
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "QGIS%20Server%20-%20Grouped%20Layer",
"FORMAT": "image/png",
"SRCHEIGHT": "840",
"SRCWIDTH": "1226",
"BBOX": "609152,5808188,625492,5814318",
"SRS": "EPSG:25832",
"SCALE": "38976"
}.items())])
h, r = self._execute_request(qs)
self.assertEqual(-1, h.find(b'Content-Type: text/xml; charset=utf-8'), "Header: %s\nResponse:\n%s" % (h, r))
self.assertNotEqual(-1, h.find(b'Content-Type: image/png'), "Header: %s\nResponse:\n%s" % (h, r))
# Then test with the wmsRootName short name:
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_wms_grouped_layers_wmsroot.qgs',
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "All_grouped_layers",
"FORMAT": "image/png",
"SRCHEIGHT": "840",
"SRCWIDTH": "1226",
"BBOX": "609152,5808188,625492,5814318",
"SRS": "EPSG:25832",
"SCALE": "38976"
}.items())])
h, r = self._execute_request(qs)
self.assertEqual(-1, h.find(b'Content-Type: text/xml; charset=utf-8'), "Header: %s\nResponse:\n%s" % (h, r))
self.assertNotEqual(-1, h.find(b'Content-Type: image/png'), "Header: %s\nResponse:\n%s" % (h, r))
def test_wms_GetLegendGraphic_ScaleSymbol_Min(self):
# 1:500000000 min
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"SRCHEIGHT": "550",
"SRCWIDTH": "850",
"BBOX": "-608.4,-1002.6,698.2,1019.0",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Min", max_size_diff=QSize(1, 1))
# 1:1000000000 min
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"SRCHEIGHT": "550",
"SRCWIDTH": "850",
"BBOX": "-1261.7,-2013.5,1351.5,2029.9",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Min", max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_ScaleSymbol_Scaled_01(self):
# 1:10000000 scaled
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"SRCHEIGHT": "550",
"SRCWIDTH": "850",
"BBOX": "31.8,-12.0,58.0,28.4",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Scaled_01", max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_ScaleSymbol_Scaled_02(self):
# 1:15000000 scaled
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"SRCHEIGHT": "550",
"SRCWIDTH": "850",
"BBOX": "25.3,-22.1,64.5,38.5",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Scaled_02", max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_ScaleSymbol_Max(self):
# 1:100000 max
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"SRCHEIGHT": "550",
"SRCWIDTH": "850",
"BBOX": "44.8,8.0,45.0,8.4",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Max", max_size_diff=QSize(15, 15))
# 1:1000000 max
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"SRCHEIGHT": "550",
"SRCWIDTH": "850",
"BBOX": "43.6,6.2,46.2,10.2",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Max", max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_ScaleSymbol_DefaultMapUnitsPerMillimeter(self):
# map units per mm on 1:20000000 with SRCHEIGHT=598&SRCWIDTH=1640&BBOX=16.5,-69.7,73.3,86.1 would be around what is set as default: 0.359 map units per mm
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_DefaultMapUnitsPerMillimeter", max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_ScaleSymbol_Scaled_2056(self):
# 1:1000 scale on an EPSG:2056 calculating DPI that is around 96
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols_2056.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer_2056",
"FORMAT": "image/png",
"SRCHEIGHT": "600",
"SRCWIDTH": "1500",
"BBOX": "2662610.7,1268841.8,2663010.5,1269000.05",
"CRS": "EPSG:2056"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Scaled_2056", max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_ScaleSymbol_DefaultScale_2056(self):
# 1:1000 as default value - it's not exactly the same result than passing the bbox and size because of exact DPI 96 (default)
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols_2056.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer_2056",
"FORMAT": "image/png",
"CRS": "EPSG:2056"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_DefaultScale_2056", max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_LAYERFONTCOLOR(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"LAYERFONTCOLOR": "red"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_LAYERFONTCOLOR", max_size_diff=QSize(10, 2))
def test_wms_GetLegendGraphic_ITEMFONTCOLOR(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"ITEMFONTCOLOR": "red",
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ITEMFONTCOLOR", max_size_diff=QSize(10, 2))
def test_wms_GetLegendGraphic_ITEMFONTCOLOR_and_LAYERFONTCOLOR(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"ITEMFONTCOLOR": "red",
"LAYERFONTCOLOR": "blue"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ITEMFONTCOLOR_and_LAYERFONTCOLOR", max_size_diff=QSize(10, 2))
def test_wms_GetLegendGraphic_ITEMFONTCOLOR_and_LAYERFONTCOLOR_hex(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"ITEMFONTCOLOR": r"%23FF0000",
"LAYERFONTCOLOR": r"%230000FF"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ITEMFONTCOLOR_and_LAYERFONTCOLOR", max_size_diff=QSize(10, 2))
def test_BBoxNoWidthNoHeight(self):
"""Test with BBOX and no width/height (like QGIS client does)"""
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_wms_grouped_nested_layers.qgs',
"SERVICE": "WMS",
"VERSION": "1.3",
"REQUEST": "GetLegendGraphic",
"LAYER": "areas%20and%20symbols",
"FORMAT": "image/png",
"CRS": "EPSG:4326",
"BBOX": "52.44462990911360123,10.6723591605239374,52.44631832182876963,10.6795952150175264",
"SLD_VERSION": "1.1",
}.items())])
r, h = self._result(self._execute_request(qs))
self.assertFalse(b'Exception' in r)
self._img_diff_error(r, h, "WMS_GetLegendGraphic_NoWidthNoHeight", max_size_diff=QSize(10, 2))
if __name__ == '__main__':
unittest.main()
| blazek/QGIS | tests/src/python/test_qgsserver_wms_getlegendgraphic.py | Python | gpl-2.0 | 32,351 |
# -*- coding: utf-8 -*-
# author: Pavel Studenik
# email: [email protected]
# created: 24.1.2014
# Author: Martin Korbel
# Email: [email protected]
# Date: 20.07.2014
import logging
import shlex
import sys
import time
import traceback
from cStringIO import StringIO
from datetime import timedelta
from croniter import croniter
from django.conf import settings
from django.core import management
from django.db import models
from django.db.models import Max
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from apps.core.utils.date_helpers import toLocalZone
logger = logging.getLogger(__name__)
class TaskPeriodList:
@staticmethod
def last_runs(history=0):
filters = {}
if history > 0:
filters["date_create__lt"] = timezone.now() - timedelta(history)
data = TaskPeriodSchedule.objects.values("period_id")\
.filter(**filters)\
.annotate(max_id=Max("id")).order_by("period")
return data
class TaskPeriodSchedule(models.Model):
title = models.CharField(max_length=64)
period = models.ForeignKey("TaskPeriod", blank=True, null=True)
date_create = models.DateTimeField(
_('Date of create'),
default=timezone.now)
counter = models.BigIntegerField(default=0)
class Meta:
ordering = ["period_id", "counter"]
def __unicode__(self):
return "[%d] %s" % (self.counter, self.title)
def recount(self):
self.counter = TaskPeriodSchedule.object.filter(
period=self.period).count()
self.save()
def delete(self, *args, **kwargs):
super(TaskPeriodSchedule, self).delete(*args, **kwargs)
self.period.recount_all()
class TaskPeriod(models.Model):
title = models.CharField(max_length=64)
label = models.SlugField(max_length=64, unique=True,
help_text="Label must be same for command 'schedule --schedule-label [label]'")
common = models.CharField(_("Command"), max_length=128,
help_text="All allowed <a href=\"https://github.com/SatelliteQE/GreenTea/wiki/Commands\">commands</a>. Example: \'beaker schedule --schedule-label daily\'")
date_last = models.DateTimeField(
_('Date of last run'),
null=True,
blank=True)
is_enable = models.BooleanField(default=False)
cron = models.CharField(max_length=64, default="* * * * *")
position = models.SmallIntegerField(default=0)
class Meta:
ordering = ["position", "title"]
def get_previous_run(self):
tasks = Task.objects.filter(period=self).order_by("-date_run")
if len(tasks) > 0:
return tasks[0].date_run
return None
def __unicode__(self):
return self.title
def createTask(self):
res = self.common.split(' ', 1)
command = res.pop(0)
params = "" if len(res) == 0 else res.pop()
task = Task.objects.create(title=self.title,
common=command,
common_params=params,
status=Task.STATUS_ENUM_WAIT,
period=self)
return task
def recount_all(self):
tps = TaskPeriodSchedule.objects.filter(
period=self).order_by("date_create")
for key, it in enumerate(tps):
it.counter = key
it.save()
def clean_empty(self):
tps = TaskPeriodSchedule.objects.filter(
period=self).order_by("date_create")
for it in tps:
count = it.job_set.all().count()
if count == 0:
it.delete()
class Task(models.Model):
STATUS_ENUM_WAIT = 0
STATUS_ENUM_INPROGRESS = 1
STATUS_ENUM_DONE = 2
STATUS_ENUM_ERROR = 3
STATUS_ENUM = (
(STATUS_ENUM_WAIT, "Waiting"),
(STATUS_ENUM_INPROGRESS, "In progress"),
(STATUS_ENUM_DONE, "Done"),
(STATUS_ENUM_ERROR, "Error"),
)
# user = models.ForeignKey(User)
title = models.CharField(max_length=64)
common = models.CharField(max_length=128)
common_params = models.TextField(_('Parameters'), blank=True)
status = models.IntegerField(default=0, choices=STATUS_ENUM)
exit_result = models.TextField(_('Result log'), blank=True)
date_create = models.DateTimeField(
_('Date of create'),
default=timezone.now)
date_run = models.DateTimeField(
_('Date of pick up'),
blank=True,
null=True)
time_long = models.FloatField(default=0.0) # better set to NULL
period = models.ForeignKey(TaskPeriod, blank=True, null=True)
def __unicode__(self):
return self.title
def get_time_long(self):
t = timedelta(seconds=self.time_long)
return str(t)
def get_params(self):
data = {}
for it in self.common_params.split():
key, value = it.strip().split("=", 1)
data[key] = value
return data
def run(self, errorHandler=None):
t1 = timezone.now()
self.status = self.STATUS_ENUM_INPROGRESS # set status "in progress"
self.date_run = t1
self.save()
params = [it.strip() for it in self.common_params.split()]
# --- RUN --- #
if errorHandler:
errorHandler.flush()
out = StringIO()
formatter = logging.Formatter('[%(levelname)s] %(message)s')
handler = logging.StreamHandler(stream=out)
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
logging.getLogger('').addHandler(handler)
old = (sys.stdout, sys.stderr)
sys.stdout = out
sys.stderr = out
try:
params = shlex.split(self.common_params)
management.call_command(
self.common, *params, verbosity=3, stdout=out,
interactive=False)
self.exit_result += out.getvalue() + "\n"
self.status = self.STATUS_ENUM_DONE # set status "done"
sys.stdout, sys.stderr = old
except Exception as e:
self.exit_result = traceback.format_exc()
self.status = self.STATUS_ENUM_ERROR # set status "error"
logger.exception(e)
finally:
sys.stdout, sys.stderr = old
logging.getLogger('').removeHandler(handler)
out.close()
# Get all errors from logger
if errorHandler:
for er in errorHandler.flush():
self.exit_result += er.getMessage() + "\n"
# --- END RUN --- #
t0 = timezone.now()
t2 = t0 - t1
self.time_long = t2.seconds + t2.microseconds / 1000000.0
self.save()
class Taskomatic:
logHandler = None
class ListBufferingHandler(logging.handlers.BufferingHandler):
def shouldFlush(self, record):
return False
def flush(self):
old = self.buffer
self.buffer = list()
return old
def checkTaskPeriods(self):
tPeriods = TaskPeriod.objects.filter(is_enable=True)
for period in tPeriods:
if not period.date_last:
period.date_last = timezone.now()
period.save()
last_check = toLocalZone(period.date_last)
citer = croniter(period.cron, last_check)
next_date = citer.get_next()
if next_date < time.time():
period.createTask()
period.date_last = timezone.now()
period.save()
def checkTasks(self):
tasks = Task.objects.filter(status=Task.STATUS_ENUM_WAIT)
self.logHandler = Taskomatic.ListBufferingHandler(0)
self.logHandler.setLevel(logging.INFO)
logger.addHandler(self.logHandler)
for task in tasks:
task.run(self.logHandler)
def cleanOldTasks(self):
# delete old tasks with status DONE, keep only last 300 tasks
[it.delete() for it in Task.objects.all()
.order_by("-date_run")[settings.MAX_TASKOMATIC_HISTORY:]]
def run(self):
self.checkTaskPeriods()
self.checkTasks()
self.cleanOldTasks()
| SatelliteQE/GreenTea | apps/taskomatic/models.py | Python | gpl-2.0 | 8,244 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import string
import datetime
import re
_logger = logging.getLogger(__name__)
try:
import vatnumber
except ImportError:
_logger.warning("VAT validation partially unavailable because the `vatnumber` Python library cannot be found. "
"Install it to support more countries, for example with `easy_install vatnumber`.")
vatnumber = None
from openerp.osv import fields, osv
from openerp.tools.misc import ustr
from openerp.tools.translate import _
from openerp.exceptions import UserError
_ref_vat = {
'at': 'ATU12345675',
'be': 'BE0477472701',
'bg': 'BG1234567892',
'ch': 'CHE-123.456.788 TVA or CH TVA 123456', #Swiss by Yannick Vaucher @ Camptocamp
'cy': 'CY12345678F',
'cz': 'CZ12345679',
'de': 'DE123456788',
'dk': 'DK12345674',
'ee': 'EE123456780',
'el': 'EL12345670',
'es': 'ESA12345674',
'fi': 'FI12345671',
'fr': 'FR32123456789',
'gb': 'GB123456782',
'gr': 'GR12345670',
'hu': 'HU12345676',
'hr': 'HR01234567896', # Croatia, contributed by Milan Tribuson
'ie': 'IE1234567FA',
'it': 'IT12345670017',
'lt': 'LT123456715',
'lu': 'LU12345613',
'lv': 'LV41234567891',
'mt': 'MT12345634',
'mx': 'MXABC123456T1B',
'nl': 'NL123456782B90',
'no': 'NO123456785',
'pe': 'PER10254824220 or PED10254824220',
'pl': 'PL1234567883',
'pt': 'PT123456789',
'ro': 'RO1234567897',
'se': 'SE123456789701',
'si': 'SI12345679',
'sk': 'SK0012345675',
'tr': 'TR1234567890 (VERGINO) veya TR12345678901 (TCKIMLIKNO)' # Levent Karakas @ Eska Yazilim A.S.
}
class res_partner(osv.osv):
_inherit = 'res.partner'
def _split_vat(self, vat):
vat_country, vat_number = vat[:2].lower(), vat[2:].replace(' ', '')
return vat_country, vat_number
def simple_vat_check(self, cr, uid, country_code, vat_number, context=None):
'''
Check the VAT number depending of the country.
http://sima-pc.com/nif.php
'''
if not ustr(country_code).encode('utf-8').isalpha():
return False
check_func_name = 'check_vat_' + country_code
check_func = getattr(self, check_func_name, None) or \
getattr(vatnumber, check_func_name, None)
if not check_func:
# No VAT validation available, default to check that the country code exists
if country_code.upper() == 'EU':
# Foreign companies that trade with non-enterprises in the EU
# may have a VATIN starting with "EU" instead of a country code.
return True
res_country = self.pool.get('res.country')
return bool(res_country.search(cr, uid, [('code', '=ilike', country_code)], context=context))
return check_func(vat_number)
def vies_vat_check(self, cr, uid, country_code, vat_number, context=None):
try:
# Validate against VAT Information Exchange System (VIES)
# see also http://ec.europa.eu/taxation_customs/vies/
return vatnumber.check_vies(country_code.upper()+vat_number)
except Exception:
# see http://ec.europa.eu/taxation_customs/vies/checkVatService.wsdl
# Fault code may contain INVALID_INPUT, SERVICE_UNAVAILABLE, MS_UNAVAILABLE,
# TIMEOUT or SERVER_BUSY. There is no way we can validate the input
# with VIES if any of these arise, including the first one (it means invalid
# country code or empty VAT number), so we fall back to the simple check.
return self.simple_vat_check(cr, uid, country_code, vat_number, context=context)
def button_check_vat(self, cr, uid, ids, context=None):
if not self.check_vat(cr, uid, ids, context=context):
msg = self._construct_constraint_msg(cr, uid, ids, context=context)
raise UserError(msg)
return True
def check_vat(self, cr, uid, ids, context=None):
user_company = self.pool.get('res.users').browse(cr, uid, uid).company_id
if user_company.vat_check_vies:
# force full VIES online check
check_func = self.vies_vat_check
else:
# quick and partial off-line checksum validation
check_func = self.simple_vat_check
for partner in self.browse(cr, uid, ids, context=context):
if not partner.vat:
continue
vat_country, vat_number = self._split_vat(partner.vat)
if not check_func(cr, uid, vat_country, vat_number, context=context):
_logger.info(_("Importing VAT Number [%s] is not valid !" % vat_number))
return False
return True
def _construct_constraint_msg(self, cr, uid, ids, context=None):
def default_vat_check(cn, vn):
# by default, a VAT number is valid if:
# it starts with 2 letters
# has more than 3 characters
return cn[0] in string.ascii_lowercase and cn[1] in string.ascii_lowercase
vat_country, vat_number = self._split_vat(self.browse(cr, uid, ids)[0].vat)
vat_no = "'CC##' (CC=Country Code, ##=VAT Number)"
error_partner = self.browse(cr, uid, ids, context=context)
if default_vat_check(vat_country, vat_number):
vat_no = _ref_vat[vat_country] if vat_country in _ref_vat else vat_no
if self.pool['res.users'].browse(cr, uid, uid).company_id.vat_check_vies:
return '\n' + _('The VAT number [%s] for partner [%s] either failed the VIES VAT validation check or did not respect the expected format %s.') % (error_partner[0].vat, error_partner[0].name, vat_no)
return '\n' + _('The VAT number [%s] for partner [%s] does not seem to be valid. \nNote: the expected format is %s') % (error_partner[0].vat, error_partner[0].name, vat_no)
_constraints = [(check_vat, _construct_constraint_msg, ["vat"])]
__check_vat_ch_re1 = re.compile(r'(MWST|TVA|IVA)[0-9]{6}$')
__check_vat_ch_re2 = re.compile(r'E([0-9]{9}|-[0-9]{3}\.[0-9]{3}\.[0-9]{3})(MWST|TVA|IVA)$')
def check_vat_ch(self, vat):
'''
Check Switzerland VAT number.
'''
# VAT number in Switzerland will change between 2011 and 2013
# http://www.estv.admin.ch/mwst/themen/00154/00589/01107/index.html?lang=fr
# Old format is "TVA 123456" we will admit the user has to enter ch before the number
# Format will becomes such as "CHE-999.999.99C TVA"
# Both old and new format will be accepted till end of 2013
# Accepted format are: (spaces are ignored)
# CH TVA ######
# CH IVA ######
# CH MWST #######
#
# CHE#########MWST
# CHE#########TVA
# CHE#########IVA
# CHE-###.###.### MWST
# CHE-###.###.### TVA
# CHE-###.###.### IVA
#
if self.__check_vat_ch_re1.match(vat):
return True
match = self.__check_vat_ch_re2.match(vat)
if match:
# For new TVA numbers, do a mod11 check
num = filter(lambda s: s.isdigit(), match.group(1)) # get the digits only
factor = (5,4,3,2,7,6,5,4)
csum = sum([int(num[i]) * factor[i] for i in range(8)])
check = (11 - (csum % 11)) % 11
return check == int(num[8])
return False
def _ie_check_char(self, vat):
vat = vat.zfill(8)
extra = 0
if vat[7] not in ' W':
if vat[7].isalpha():
extra = 9 * (ord(vat[7]) - 64)
else:
# invalid
return -1
checksum = extra + sum((8-i) * int(x) for i, x in enumerate(vat[:7]))
return 'WABCDEFGHIJKLMNOPQRSTUV'[checksum % 23]
def check_vat_ie(self, vat):
""" Temporary Ireland VAT validation to support the new format
introduced in January 2013 in Ireland, until upstream is fixed.
TODO: remove when fixed upstream"""
if len(vat) not in (8, 9) or not vat[2:7].isdigit():
return False
if len(vat) == 8:
# Normalize pre-2013 numbers: final space or 'W' not significant
vat += ' '
if vat[:7].isdigit():
return vat[7] == self._ie_check_char(vat[:7] + vat[8])
elif vat[1] in (string.ascii_uppercase + '+*'):
# Deprecated format
# See http://www.revenue.ie/en/online/third-party-reporting/reporting-payment-details/faqs.html#section3
return vat[7] == self._ie_check_char(vat[2:7] + vat[0] + vat[8])
return False
# Mexican VAT verification, contributed by Vauxoo
# and Panos Christeas <[email protected]>
__check_vat_mx_re = re.compile(r"(?P<primeras>[A-Za-z\xd1\xf1&]{3,4})" \
r"[ \-_]?" \
r"(?P<ano>[0-9]{2})(?P<mes>[01][0-9])(?P<dia>[0-3][0-9])" \
r"[ \-_]?" \
r"(?P<code>[A-Za-z0-9&\xd1\xf1]{3})$")
def check_vat_mx(self, vat):
''' Mexican VAT verification
Verificar RFC México
'''
# we convert to 8-bit encoding, to help the regex parse only bytes
vat = ustr(vat).encode('iso8859-1')
m = self.__check_vat_mx_re.match(vat)
if not m:
#No valid format
return False
try:
ano = int(m.group('ano'))
if ano > 30:
ano = 1900 + ano
else:
ano = 2000 + ano
datetime.date(ano, int(m.group('mes')), int(m.group('dia')))
except ValueError:
return False
#Valid format and valid date
return True
# Norway VAT validation, contributed by Rolv Råen (adEgo) <[email protected]>
def check_vat_no(self, vat):
'''
Check Norway VAT number.See http://www.brreg.no/english/coordination/number.html
'''
if len(vat) != 9:
return False
try:
int(vat)
except ValueError:
return False
sum = (3 * int(vat[0])) + (2 * int(vat[1])) + \
(7 * int(vat[2])) + (6 * int(vat[3])) + \
(5 * int(vat[4])) + (4 * int(vat[5])) + \
(3 * int(vat[6])) + (2 * int(vat[7]))
check = 11 -(sum % 11)
if check == 11:
check = 0
if check == 10:
# 10 is not a valid check digit for an organization number
return False
return check == int(vat[8])
# Peruvian VAT validation, contributed by Vauxoo
def check_vat_pe(self, vat):
vat_type,vat = vat and len(vat)>=2 and (vat[0], vat[1:]) or (False, False)
if vat_type and vat_type.upper() == 'D':
#DNI
return True
elif vat_type and vat_type.upper() == 'R':
#verify RUC
factor = '5432765432'
sum = 0
dig_check = False
if len(vat) != 11:
return False
try:
int(vat)
except ValueError:
return False
for f in range(0,10):
sum += int(factor[f]) * int(vat[f])
subtraction = 11 - (sum % 11)
if subtraction == 10:
dig_check = 0
elif subtraction == 11:
dig_check = 1
else:
dig_check = subtraction
return int(vat[10]) == dig_check
else:
return False
# VAT validation in Turkey, contributed by # Levent Karakas @ Eska Yazilim A.S.
def check_vat_tr(self, vat):
if not (10 <= len(vat) <= 11):
return False
try:
int(vat)
except ValueError:
return False
# check vat number (vergi no)
if len(vat) == 10:
sum = 0
check = 0
for f in range(0,9):
c1 = (int(vat[f]) + (9-f)) % 10
c2 = ( c1 * (2 ** (9-f)) ) % 9
if (c1 != 0) and (c2 == 0): c2 = 9
sum += c2
if sum % 10 == 0:
check = 0
else:
check = 10 - (sum % 10)
return int(vat[9]) == check
# check personal id (tc kimlik no)
if len(vat) == 11:
c1a = 0
c1b = 0
c2 = 0
for f in range(0,9,2):
c1a += int(vat[f])
for f in range(1,9,2):
c1b += int(vat[f])
c1 = ( (7 * c1a) - c1b) % 10
for f in range(0,10):
c2 += int(vat[f])
c2 = c2 % 10
return int(vat[9]) == c1 and int(vat[10]) == c2
return False
| tvtsoft/odoo8 | addons/base_vat/base_vat.py | Python | agpl-3.0 | 13,019 |
class Solution(object):
def combinationSum3(self, k, n):
"""
:type k: int
:type n: int
:rtype: List[List[int]]
"""
cur = []
res = []
x = [i for i in range(1,10)]
self.helper(x,res,cur,k,n)
return res
def helper(self,x,res,cur,level,target):
if level == 0:
if target == 0:
res.append(cur[:])
return
for i in range(len(x)):
cur.append(x[i])
self.helper(x[i+1:],res,cur,level-1,target-x[i])
cur.pop(-1) | tedye/leetcode | Python/leetcode.216.combination-sum-iii.py | Python | mit | 589 |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyOpenslidePython(PythonPackage):
"""OpenSlide Python is a Python interface to the OpenSlide library."""
homepage = "https://github.com/openslide/openslide-python"
url = "https://github.com/openslide/openslide-python/archive/v1.1.1.tar.gz"
version('1.1.1', sha256='33c390fe43e3d7d443fafdd66969392d3e9efd2ecd5d4af73c3dbac374485ed5')
import_modules = ['openslide']
depends_on('[email protected]:')
depends_on('[email protected]:2.8,3.3:')
depends_on('py-setuptools', type='build')
depends_on('py-pillow-simd+jpeg+jpeg2000+tiff', type=('build', 'run'))
| iulian787/spack | var/spack/repos/builtin/packages/py-openslide-python/package.py | Python | lgpl-2.1 | 812 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
####################################################################################################################################################################################################################################
######################################################################################################## PRE-DEFINED IMPORTS #######################################################################################################
####################################################################################################################################################################################################################################
# Imports that are necessary for the program architecture to work properly
# Do not edit this code
import ast
import sys
import os
####################################################################################################################################################################################################################################
####################################################################################################### PRE-DEFINED CONSTANTS ######################################################################################################
####################################################################################################################################################################################################################################
# Possible characters to send to the maze application
# Any other will be ignored
# Do not edit this code
UP = 'U'
DOWN = 'D'
LEFT = 'L'
RIGHT = 'R'
####################################################################################################################################################################################################################################
# Name of your team
# It will be displayed in the maze
# You have to edit this code
TEAM_NAME = "Improved closest v2"
####################################################################################################################################################################################################################################
########################################################################################################## YOUR VARIABLES ##########################################################################################################
####################################################################################################################################################################################################################################
# Stores all the moves in a list to restitute them one by one
allMoves = [UP, RIGHT, DOWN, RIGHT, RIGHT, RIGHT, UP, RIGHT, RIGHT, UP, UP, UP, UP, UP, UP, RIGHT, RIGHT, UP]
####################################################################################################################################################################################################################################
####################################################################################################### PRE-DEFINED FUNCTIONS ######################################################################################################
####################################################################################################################################################################################################################################
# Writes a message to the shell
# Use for debugging your program
# Channels stdout and stdin are captured to enable communication with the maze
# Do not edit this code
def debug (text) :
# Writes to the stderr channel
sys.stderr.write(str(text) + "\n")
sys.stderr.flush()
####################################################################################################################################################################################################################################
# Reads one line of information sent by the maze application
# This function is blocking, and will wait for a line to terminate
# The received information is automatically converted to the correct type
# Do not edit this code
def readFromPipe () :
# Reads from the stdin channel and returns the structure associated to the string
try :
text = sys.stdin.readline()
return ast.literal_eval(text.strip())
except :
os._exit(-1)
####################################################################################################################################################################################################################################
# Sends the text to the maze application
# Do not edit this code
def writeToPipe (text) :
# Writes to the stdout channel
sys.stdout.write(text)
sys.stdout.flush()
####################################################################################################################################################################################################################################
# Reads the initial maze information
# The function processes the text and returns the associated variables
# The dimensions of the maze are positive integers
# Maze map is a dictionary associating to a location its adjacent locations and the associated weights
# The preparation time gives the time during which 'initializationCode' can make computations before the game starts
# The turn time gives the time during which 'determineNextMove' can make computations before returning a decision
# Player locations are tuples (line, column)
# Coins are given as a list of locations where they appear
# A boolean indicates if the game is over
# Do not edit this code
def processInitialInformation () :
# We read from the pipe
data = readFromPipe()
return (data['mazeWidth'], data['mazeHeight'], data['mazeMap'], data['preparationTime'], data['turnTime'], data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
####################################################################################################################################################################################################################################
# Reads the information after each player moved
# The maze map and allowed times are no longer provided since they do not change
# Do not edit this code
def processNextInformation () :
# We read from the pipe
data = readFromPipe()
return (data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
####################################################################################################################################################################################################################################
########################################################################################################## YOUR FUNCTIONS ##########################################################################################################
####################################################################################################################################################################################################################################
# This is where you should write your code to do things during the initialization delay
# This function should not return anything, but should be used for a short preprocessing
# This function takes as parameters the dimensions and map of the maze, the time it is allowed for computing, the players locations in the maze and the remaining coins locations
# Make sure to have a safety margin for the time to include processing times (communication etc.)
def initializationCode (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins) :
# Nothing to do
pass
####################################################################################################################################################################################################################################
# This is where you should write your code to determine the next direction
# This function should return one of the directions defined in the CONSTANTS section
# This function takes as parameters the dimensions and map of the maze, the time it is allowed for computing, the players locations in the maze and the remaining coins locations
# Make sure to have a safety margin for the time to include processing times (communication etc.)
def determineNextMove (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins) :
# We return the next move as described by the list
global allMoves
nextMove = allMoves[0]
allMoves = allMoves[1:]
return nextMove
####################################################################################################################################################################################################################################
############################################################################################################# MAIN LOOP ############################################################################################################
####################################################################################################################################################################################################################################
# This is the entry point when executing this file
# We first send the name of the team to the maze
# The first message we receive from the maze includes its dimensions and map, the times allowed to the various steps, and the players and coins locations
# Then, at every loop iteration, we get the maze status and determine a move
# Do not edit this code
if __name__ == "__main__" :
# We send the team name
writeToPipe(TEAM_NAME + "\n")
# We process the initial information and have a delay to compute things using it
(mazeWidth, mazeHeight, mazeMap, preparationTime, turnTime, playerLocation, opponentLocation, coins, gameIsOver) = processInitialInformation()
initializationCode(mazeWidth, mazeHeight, mazeMap, preparationTime, playerLocation, opponentLocation, coins)
# We decide how to move and wait for the next step
while not gameIsOver :
(playerLocation, opponentLocation, coins, gameIsOver) = processNextInformation()
if gameIsOver :
break
nextMove = determineNextMove(mazeWidth, mazeHeight, mazeMap, turnTime, playerLocation, opponentLocation, coins)
writeToPipe(nextMove)
####################################################################################################################################################################################################################################
#################################################################################################################################################################################################################################### | dimtion/jml | outputFiles/statistics/archives/ourIA/improved_closest_v2.py/0.7/5/player1.py | Python | mit | 11,263 |
import json
import six
from jwkest import b64d, as_unicode
from jwkest import b64e
from jwkest import BadSyntax
__author__ = 'roland'
def split_token(token):
if not token.count(b"."):
raise BadSyntax(token,
"expected token to contain at least one dot")
return tuple(token.split(b"."))
def b2s_conv(item):
if isinstance(item, bytes):
return item.decode("utf-8")
elif isinstance(item, (six.string_types, int, bool)):
return item
elif isinstance(item, list):
return [b2s_conv(i) for i in item]
elif isinstance(item, dict):
return dict([(k, b2s_conv(v)) for k, v in item.items()])
def b64encode_item(item):
if isinstance(item, bytes):
return b64e(item)
elif isinstance(item, str):
return b64e(item.encode("utf-8"))
elif isinstance(item, int):
return b64e(item)
else:
return b64e(json.dumps(b2s_conv(item),
separators=(",", ":")).encode("utf-8"))
class JWT(object):
def __init__(self, **headers):
if not headers.get("alg"):
headers["alg"] = None
self.headers = headers
self.b64part = [b64encode_item(headers)]
self.part = [b64d(self.b64part[0])]
def unpack(self, token):
"""
Unpacks a JWT into its parts and base64 decodes the parts
individually
:param token: The JWT
"""
if isinstance(token, six.string_types):
try:
token = token.encode("utf-8")
except UnicodeDecodeError:
pass
part = split_token(token)
self.b64part = part
self.part = [b64d(p) for p in part]
self.headers = json.loads(self.part[0].decode())
return self
def pack(self, parts, headers=None):
"""
Packs components into a JWT
:param returns: The string representation of a JWT
"""
if not headers:
if self.headers:
headers = self.headers
else:
headers = {'alg': 'none'}
self.part = [self.part[0]] + parts
_all = self.b64part = [self.b64part[0]]
_all.extend([b64encode_item(p) for p in parts])
return ".".join([a.decode() for a in _all])
def payload(self):
_msg = as_unicode(self.part[1])
# If not JSON web token assume JSON
if "cty" in self.headers and self.headers["cty"].lower() != "jwt":
pass
else:
try:
_msg = json.loads(_msg)
except ValueError:
pass
return _msg
| catapult-project/catapult-csm | third_party/google-endpoints/jwkest/jwt.py | Python | bsd-3-clause | 2,652 |
import os
from celery import Celery
from django.apps import apps, AppConfig
from django.conf import settings
if not settings.configured:
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local') # pragma: no cover
app = Celery('swagger_ui')
class CeleryConfig(AppConfig):
name = 'swagger_ui.taskapp'
verbose_name = 'Celery Config'
def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
installed_apps = [app_config.name for app_config in apps.get_app_configs()]
app.autodiscover_tasks(lambda: installed_apps, force=True)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request)) # pragma: no cover
| cuongnb14/swagger-ui | swagger_ui/taskapp/celery.py | Python | mit | 911 |
# -*- coding: utf-8 -*-
__version__ = '0.3.5'
| joebowen/movement_validation_cloud | djangodev/lib/python2.7/site-packages/django_boto/__init__.py | Python | mit | 47 |
'''
New Test For mini cluster creation and roll back when creation failed
@author: Glody
'''
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.operations.account_operations as acc_ops
import zstackwoodpecker.operations.cluster_operations as cluster_ops
import apibinding.api_actions as api_actions
import apibinding.inventory as inventory
import threading
import time
import os
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
def test():
zone_uuid = res_ops.query_resource_fields(res_ops.ZONE)[0].uuid
cluster_uuid = res_ops.query_resource_fields(res_ops.CLUSTER)[0].uuid
cond = res_ops.gen_query_conditions('clusterUuid', '=', cluster_uuid)
hosts = res_ops.query_resource_fields(res_ops.HOST, cond)
minicluster_name = 'minicluster'
username = hosts[0].username
password = 'password'
ssh_port = hosts[0].sshPort
hypervisor_type= 'KVM'
host1_ip = hosts[0].managementIp
host1_uuid = hosts[0].uuid
host2_ip = hosts[1].managementIp
host2_uuid = hosts[1].uuid
#Delete cluster then add minicluster
cluster_ops.delete_cluster(cluster_uuid)
mini_cluster_option = test_util.MiniClusterOption()
mini_cluster_option.set_name(minicluster_name)
mini_cluster_option.set_username(username)
mini_cluster_option.set_password(password)
mini_cluster_option.set_sshPort(ssh_port)
mini_cluster_option.set_hypervisor_type(hypervisor_type)
mini_cluster_option.set_zone_uuid(zone_uuid)
mini_cluster_option.set_host_management_ips([host1_ip, host2_ip])
cluster_ops.create_mini_cluster(mini_cluster_option)
test_util.test_logger("Create Minicluster Passed")
#Check roll back when create mini cluster failed
cluster_uuid = res_ops.query_resource_fields(res_ops.CLUSTER)[0].uuid
cond = res_ops.gen_query_conditions('clusterUuid', '=', cluster_uuid)
hosts = res_ops.query_resource_fields(res_ops.HOST, cond)
host_ip = hosts[0].managementIp
#Delete cluster then add minicluster
cluster_ops.delete_cluster(cluster_uuid)
mini_cluster_option.set_host_management_ips([host_ip, '127.127.127.127'])
try:
cluster_ops.create_mini_cluster(mini_cluster_option)
except:
pass
cond = res_ops.gen_query_conditions('managementIp', '=', host_ip)
try:
hosts = res_ops.query_resource_fields(res_ops.HOST, cond)
except:
test_util.test_pass("[Host:] %s is removed when create mini cluster failed" %host_ip)
if hosts != []:
test_util.test_fail("Fail to roll back when create mini cluster failed")
test_util.test_pass("Mini cluster test passed")
def error_cleanup():
pass
| zstackio/zstack-woodpecker | integrationtest/vm/simulator/test_minicluster.py | Python | apache-2.0 | 2,859 |
############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at https://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
source("../../shared/qtcreator.py")
def typeToDebuggerConsole(expression):
editableIndex = getQModelIndexStr("text=''",
":DebugModeWidget_Debugger::Internal::ConsoleView")
mouseClick(editableIndex)
type(waitForObject(":Debugger::Internal::ConsoleEdit"), expression)
type(waitForObject(":Debugger::Internal::ConsoleEdit"), "<Return>")
def useDebuggerConsole(expression, expectedOutput, check=None, checkOutp=None):
typeToDebuggerConsole(expression)
if expectedOutput == None:
result = getQmlJSConsoleOutput()[-1]
clickButton(":*Qt Creator.Clear_QToolButton")
return result
expected = getQModelIndexStr("text='%s'" % expectedOutput,
":DebugModeWidget_Debugger::Internal::ConsoleView")
try:
obj = waitForObject(expected, 3000)
test.compare(obj.text, expectedOutput, "Verifying whether expected output appeared.")
except:
test.fail("Expected output (%s) missing - got '%s'."
% (expectedOutput, getQmlJSConsoleOutput()[-1]))
clickButton(":*Qt Creator.Clear_QToolButton")
if check:
if checkOutp == None:
checkOutp = expectedOutput
useDebuggerConsole(check, checkOutp)
def debuggerHasStopped():
debuggerPresetCombo = waitForObject("{type='QComboBox' unnamed='1' visible='1' "
"window=':Qt Creator_Core::Internal::MainWindow'}")
waitFor('dumpItems(debuggerPresetCombo.model()) == ["Debugger Preset"]', 5000)
if not test.compare(dumpItems(debuggerPresetCombo.model()), ["Debugger Preset"],
"Verifying whether all debugger engines have quit."):
return False
fancyDebugButton = waitForObject(":*Qt Creator.Start Debugging_Core::Internal::FancyToolButton")
result = test.verify(fancyDebugButton.enabled,
"Verifying whether main debugger button is in correct state.")
ensureChecked(":Qt Creator_AppOutput_Core::Internal::OutputPaneToggleButton")
output = waitForObject("{type='Core::OutputWindow' visible='1' "
"windowTitle='Application Output Window'}")
result &= test.verify(waitFor("'Debugging has finished' in str(output.plainText)", 2000),
"Verifying whether Application output contains 'Debugging has finished'.")
return result
def getQmlJSConsoleOutput():
try:
consoleView = waitForObject(":DebugModeWidget_Debugger::Internal::ConsoleView")
model = consoleView.model()
# old input, output, new input > 2
waitFor("model.rowCount() > 2", 2000)
return dumpItems(model)[:-1]
except:
return [""]
def runChecks(elementProps, parent, checks):
mouseClick(getQModelIndexStr(elementProps, parent))
for check in checks:
useDebuggerConsole(*check)
def testLoggingFeatures():
expressions = ("console.log('info message'); console.info('info message2'); console.debug()",
'console.warn("warning message")',
"console.error('error message')")
expected = (["info message", "info message2", "", "<undefined>"],
["warning message", "<undefined>"],
["error message", "<undefined>"])
filterToolTips = ("Show debug, log, and info messages.",
"Show warning messages.",
"Show error messages.",
)
for expression, expect, tooltip in zip(expressions, expected, filterToolTips):
typeToDebuggerConsole(expression)
output = getQmlJSConsoleOutput()[1:]
test.compare(output, expect, "Verifying expected output.")
filterButton = waitForObject("{container=':Qt Creator.DebugModeWidget_QSplitter' "
"toolTip='%s' type='QToolButton' unnamed='1' visible='1'}"
% tooltip)
ensureChecked(filterButton, False)
output = getQmlJSConsoleOutput()[1:]
test.compare(output, ["<undefined>"], "Verifying expected filtered output.")
ensureChecked(filterButton, True)
output = getQmlJSConsoleOutput()[1:]
test.compare(output, expect, "Verifying unfiltered output is displayed again.")
clickButton(":*Qt Creator.Clear_QToolButton")
def main():
test.xfail("Skipping test. This will not work correctly with Qt <= 5.15 (QTBUG-82150).")
return
projName = "simpleQuickUI2.qmlproject"
projFolder = os.path.dirname(findFile("testdata", "simpleQuickUI2/%s" % projName))
if not neededFilePresent(os.path.join(projFolder, projName)):
return
qmlProjDir = prepareTemplate(projFolder)
if qmlProjDir == None:
test.fatal("Could not prepare test files - leaving test")
return
qmlProjFile = os.path.join(qmlProjDir, projName)
# start Creator by passing a .qmlproject file
startQC(['"%s"' % qmlProjFile])
if not startedWithoutPluginError():
return
# if Debug is enabled - 1 valid kit is assigned - real check for this is done in tst_qml_locals
fancyDebugButton = waitForObject(":*Qt Creator.Start Debugging_Core::Internal::FancyToolButton")
if test.verify(waitFor('fancyDebugButton.enabled', 5000), "Start Debugging is enabled."):
# make sure QML Debugging is enabled
switchViewTo(ViewConstants.PROJECTS)
switchToBuildOrRunSettingsFor(Targets.getDefaultKit(), ProjectSettings.RUN)
ensureChecked("{container=':Qt Creator.scrollArea_QScrollArea' text='Enable QML' "
"type='QCheckBox' unnamed='1' visible='1'}")
switchViewTo(ViewConstants.EDIT)
# start debugging
clickButton(fancyDebugButton)
progressBarWait()
waitForObject(":Locals and Expressions_Debugger::Internal::WatchTreeView")
rootIndex = getQModelIndexStr("text='QQmlEngine'",
":Locals and Expressions_Debugger::Internal::WatchTreeView")
# make sure the items inside the QQmlEngine's root are visible
mainRect = getQModelIndexStr("text='Rectangle'", rootIndex)
doubleClick(waitForObject(mainRect))
if not object.exists(":DebugModeWidget_Debugger::Internal::ConsoleView"):
invokeMenuItem("View", "Output Panes", "QML Debugger Console")
# Window might be too small to show Locals, so close what we don't need
for view in ("Stack", "Breakpoints", "Expressions"):
invokeMenuItem("View", "Views", view)
# color and float values have additional ZERO WIDTH SPACE (\u200b), different usage of
# whitespaces inside expressions is part of the test
checks = [("color", u"#\u200b008000"), ("width", "50"),
("color ='silver'", "silver", "color", u"#\u200bc0c0c0"),
("width=66", "66", "width"), ("anchors.centerIn", "<unnamed object>"),
("opacity", "1"), ("opacity = .1875", u"0.\u200b1875", "opacity")]
# check red inner Rectangle
runChecks("text='Rectangle' occurrence='2'", mainRect, checks)
checks = [("color", u"#\u200bff0000"), ("width", "100"), ("height", "100"),
("radius = Math.min(width, height) / 2", "50", "radius"),
("parent.objectName= 'mainRect'", "mainRect")]
# check green inner Rectangle
runChecks("text='Rectangle'", mainRect, checks)
checks = [("color", u"#\u200b000000"), ("font.pointSize=14", "14", "font.pointSize"),
("font.bold", "false"), ("font.weight=Font.Bold", "75", "font.bold", "true"),
("rotation", "0"), ("rotation = 180", "180", "rotation")]
# check Text element
runChecks("text='Text'", mainRect, checks)
# extended check must be done separately
originalVal = useDebuggerConsole("x", None)
if originalVal:
# Text element uses anchors.centerIn, so modification of x should not do anything
useDebuggerConsole("x=0", "0", "x", originalVal)
useDebuggerConsole("anchors.centerIn", "mainRect")
# ignore output as it has none
useDebuggerConsole("anchors.centerIn = null", None)
useDebuggerConsole("x = 0", "0", "x")
testLoggingFeatures()
test.log("Calling Qt.quit() from inside Qml/JS Console - inferior should quit.")
useDebuggerConsole("Qt.quit()", "<undefined>")
if not debuggerHasStopped():
__stopDebugger__()
invokeMenuItem("File", "Exit")
| qtproject/qt-creator | tests/system/suite_debugger/tst_qml_js_console/test.py | Python | gpl-3.0 | 9,763 |
from .nvd3 import NVD3
from flask import jsonify, request
import numpy as np
class TwoAxisFocus(NVD3):
_allowed_axes = ["sigma", "minmax"]
def __init__(self, x, y1, y2, data_source, init_params={},
chart_id="new_chart", url="/new_chart/", colors=[], auto_scale="sigma",
y1_axis_range=[], y2_axis_range=[], sigma=3,
x_label="", y1_label="", y2_label="",
margin={"top": 30, "right": 60, "bottom": 50, "left": 70}):
self.x = x
self.y1 = y1
self.y2 = y2
self.auto_scale = auto_scale if auto_scale in self._allowed_axes else "sigma"
self.sigma = 3
self.y1_axis_range = y1_axis_range
self.y2_axis_range = y2_axis_range
self.options = {
"type": "TwoAxisFocus",
"chartid": chart_id,
"url": url,
"colors": colors,
"init_params": init_params,
"labels": {
"xAxis": x_label,
"yAxis1": y1_label,
"yAxis2": y2_label
},
"margin": margin,
"type": "TwoAxisFocus"
}
def get_data():
args = {}
for c in init_params:
if request.args.get(c):
args[c] = request.args[c]
else:
args[c] = init_params[c]
return jsonify(self.to_json(
self.apply_filters(data_source, args)
))
super(TwoAxisFocus, self).__init__(self.options, get_data)
def get_bounds(self, y, method="sigma"):
if self.auto_scale == "sigma":
m_, s_ = y.mean(), y.std()
l = m_ - self.sigma*s_
u = m_ + self.sigma*s_
else:
l = y.min()
u = y.max()
return [l, u]
def to_json(self, df):
if df.empty:
return {
"data": [],
"yAxis1": {"lower": 0, "upper": 1},
"yAxis2": {"lower": 0, "upper": 1}
}
if not self.y1_axis_range:
bounds1 = self.get_bounds(df[self.y1], method=self.auto_scale)
else:
bounds1 = self.y1_axis_range
if not self.y2_axis_range:
bounds2 = self.get_bounds(df[self.y2], method=self.auto_scale)
else:
bounds2 = self.y2_axis_range
records = [
{"key": self.y1, "values": [], "yAxis": 1, "type": "line"},
{"key": self.y2, "values": [], "yAxis": 2, "type": "line"}
]
for n, r in df.iterrows():
records[0]["values"].append({"x": r[self.x], "y": r[self.y1]})
records[1]["values"].append({"x": r[self.x], "y": r[self.y2]})
return {
"data": records,
"yAxis1": {"bounds": bounds1},
"yAxis2": {"bounds": bounds2}
}
| MKridler/pyxley | pyxley/charts/nvd3/two_axis_focus.py | Python | mit | 2,870 |
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 20 15:50:23 2016
@author: jessime
"""
#TODO integrate this with play_levels and remove duplicated Code
import sys
import os
import subprocess as sp
import traceback
import numpy as np
from shutil import copyfile
from importlib import import_module, reload
class Tutorial():
script = None
def tutorial_gc(self):
error = None
gc_txt = '../results/tutorial/gc.txt'
if os.path.isfile(gc_txt):
os.remove(gc_txt)
cmd = 'python {} ATATATATGGGGGC'.format(self.script)
try:
sp.run(cmd.split(), stderr=sp.PIPE, check=True)
except sp.CalledProcessError as e:
error = e.stderr.decode("utf-8")
if error is None:
if not os.path.isfile(gc_txt):
error = 'Your program did not produce a file in the proper location.'
else:
with open(gc_txt) as infile:
result = infile.read().strip()
if not result:
error = 'There is nothing in the file you created.'
elif result != '43%':
error = 'Your answer is not correct.'
success = error is None
return success, error
def tutorial_sum(self):
error = None
sum_txt = '../results/tutorial/sum.txt'
if os.path.isfile(sum_txt):
os.remove(sum_txt)
#generate temp data
rand = np.random.randint(-10, 10, [10, 10])
max_val = max(rand.sum(0).max(), rand.sum(1).max())
rand_file = '../results/sum_rand.txt'
np.savetxt(rand_file, rand, delimiter=',')
cmd = 'python {} {}'.format(self.script, rand_file)
try:
sp.run(cmd.split(), stderr=sp.PIPE, check=True)
except sp.CalledProcessError as e:
error = e.stderr.decode("utf-8")
if error is None:
if not os.path.isfile(sum_txt):
error = 'Your program did not produce a file in the proper location.'
else:
with open(sum_txt) as infile:
result = infile.read().strip()
if not result:
error = 'There is nothing in the file you created.'
elif result != str(max_val):
error = 'Your answer is not correct.'
success = error is None
return success, error
def tutorial_task1(self):
error = None
new = self.temp_copy(self)
module_name = new.split('.')[0]
try:
if module_name in sys.modules:
user_import = reload(sys.modules[module_name])
else:
user_import = import_module(module_name)
result1 = user_import.squared_sum([1, 2, 3])
result2 = user_import.squared_sum([-1, 3])
if result1 != 14 or result2 != 10:
error = 'Your answer is not correct.'
except Exception:
error = traceback.format_exc()
self.temp_del(self, new)
success = error is None
return success, error
def tutorial_task2(self):
error = None
new = self.temp_copy(self)
module_name = new.split('.')[0]
try:
if module_name in sys.modules:
user_import = reload(sys.modules[module_name])
else:
user_import = import_module(module_name)
result1 = set(user_import.seen([1, 2, 3], [1,2,3,4,4,5, 'what']))
result2 = user_import.seen(['s', 9], ['s', 9])
if result1 != set(['what', 4, 5]) or result2 != []:
error = 'Your answer is not correct.'
except Exception:
error = traceback.format_exc()
self.temp_del(self, new)
success = error is None
return success, error
def temp_copy(self):
"""Creates a copy of a user file into the src dir to be imported"""
new = os.path.basename(self.script)
copyfile(self.script, new)
return new
def temp_del(self, temp):
"""Delete file created by temp_copy."""
if os.path.isfile(temp):
os.remove(temp)
@classmethod
def process_request(self, func_name):
"""Execute the method corresponding to func_name."""
result = vars(self)[func_name](self)
return result
| Jessime/Excision | src/tutorial.py | Python | mit | 4,392 |
#Author: Maxwell Bertolero, [email protected], [email protected]
import numpy as np
from random import choice
import networkx as nx
def within_community_degree(weighted_partition, nan = 0.0, catch_edgeless_node=True):
''' Computes "within-module degree" (z-score) for each node (Guimera 2007, J Stat Mech)
------
Parameters
------
weighted_partition: Louvain Weighted Partition
louvain = weighted_modularity.LouvainCommunityDetection(graph)
weighted_partitions = louvain.run()
weighted_partition = weighted_partition[0], where index is the partition level
nan : int
number to replace unexpected values (e.g., -infinity) with
default = 0.0
catch_edgeless_node: Boolean
raise ValueError if node degree is zero
default = True
------
Returns
------
within_community_degree: dict
Dictionary of the within community degree of each node.
'''
wc_dict = {}
for c, community in enumerate(weighted_partition.communities):
community_degrees = []
for node in community: #get average within-community-degree
node_degree = weighted_partition.node_degree(node)
if node_degree == 0.0: #catch edgeless nodes
if catch_edgeless_node:
raise ValueError("Node {} is edgeless".format(node))
wc_dict[node] = 0.0
continue
community_degrees.append(weighted_partition.node_degree_by_community(node)[c])
for node in community: #get node's within_community-degree z-score
within_community_degree = weighted_partition.node_degree_by_community(node)[c]
std = np.std(community_degrees) # std of community's degrees
mean = np.mean(community_degrees) # mean of community's degrees
if std == 0.0: #so we don't divide by 0
wc_dict[node] = (within_community_degree - mean) #z_score
continue
wc_dict[node] = (within_community_degree - mean / std) #z_score
return wc_dict
def participation_coefficient(weighted_partition, catch_edgeless_node=True):
'''
Computes the participation coefficient for each node (Guimera 2007, J Stat Mech)
------
Parameters
------
weighted_partition: Louvain Weighted Partition
louvain = weighted_modularity.LouvainCommunityDetection(graph)
weighted_partitions = louvain.run()
weighted_partition = weighted_partition[0], where index is the partition level
catch_edgeless_node: Boolean
raise ValueError if node degree is zero
default = True
------
Returns
------
participation_coefficient: dict
Dictionary of the participation coefficient of each node.
'''
pc_dict = {}
graph = weighted_partition.graph
for node in graph:
node_degree = weighted_partition.node_degree(node)
if node_degree == 0.0:
if catch_edgeless_node:
raise ValueError("Node {} is edgeless".format(node))
pc_dict[node] = 0.0
continue
deg_per_comm = weighted_partition.node_degree_by_community(node)
deg_per_comm.pop(weighted_partition.get_node_community(node))
bc_degree = sum(deg_per_comm) #between community degree
if bc_degree == 0.0:
pc_dict[node] = 0.0
continue
pc = 1 - ((float(bc_degree) / float(node_degree))**2)
pc_dict[node] = pc
return pc_dict | jrcohen02/brainx_archive2 | build/lib/brainx/nodal_roles.py | Python | bsd-3-clause | 3,526 |
"""
Definition of test files and directories which can be used in the tests.
"""
from __future__ import absolute_import
import os
TESTDATA_DIR = os.path.dirname(os.path.realpath(__file__))
SEDML_TEST_DIR = os.path.join(TESTDATA_DIR, 'sedml', 'sed-ml')
OMEX_TEST_DIR = os.path.join(TESTDATA_DIR, 'sedml', 'omex')
OMEX_SHOWCASE = os.path.join(OMEX_TEST_DIR, "CombineArchiveShowCase.omex")
OMEX_REPRESSILATOR = os.path.join(OMEX_TEST_DIR, "tellurium/repressilator.omex")
FEEDBACK_SBML = os.path.join(TESTDATA_DIR, 'models/feedback.xml')
| kirichoi/tellurium | tellurium/tests/testdata/__init__.py | Python | apache-2.0 | 538 |
"""Distance utils."""
import numpy
from scipy.linalg import eigvalsh
from .base import logm, sqrtm
def distance_kullback(A, B):
"""Kullback leibler divergence between two covariance matrices A and B.
:param A: First covariance matrix
:param B: Second covariance matrix
:returns: Kullback leibler divergence between A and B
"""
dim = A.shape[0]
logdet = numpy.log(numpy.linalg.det(B) / numpy.linalg.det(A))
kl = numpy.trace(numpy.dot(numpy.linalg.inv(B), A)) - dim + logdet
return 0.5 * kl
def distance_kullback_right(A, B):
"""wrapper for right kullblack leibler div."""
return distance_kullback(B, A)
def distance_kullback_sym(A, B):
"""Symetrized kullback leibler divergence."""
return distance_kullback(A, B) + distance_kullback_right(A, B)
def distance_euclid(A, B):
"""Euclidean distance between two covariance matrices A and B.
The Euclidean distance is defined by the Froebenius norm between the two
matrices.
.. math::
d = \Vert \mathbf{A} - \mathbf{B} \Vert_F
:param A: First covariance matrix
:param B: Second covariance matrix
:returns: Eclidean distance between A and B
"""
return numpy.linalg.norm(A - B, ord='fro')
def distance_logeuclid(A, B):
"""Log Euclidean distance between two covariance matrices A and B.
.. math::
d = \Vert \log(\mathbf{A}) - \log(\mathbf{B}) \Vert_F
:param A: First covariance matrix
:param B: Second covariance matrix
:returns: Log-Eclidean distance between A and B
"""
return distance_euclid(logm(A), logm(B))
def distance_riemann(A, B):
"""Riemannian distance between two covariance matrices A and B.
.. math::
d = {\left( \sum_i \log(\lambda_i)^2 \\right)}^{-1/2}
where :math:`\lambda_i` are the joint eigenvalues of A and B
:param A: First covariance matrix
:param B: Second covariance matrix
:returns: Riemannian distance between A and B
"""
return numpy.sqrt((numpy.log(eigvalsh(A, B))**2).sum())
def distance_logdet(A, B):
"""Log-det distance between two covariance matrices A and B.
.. math::
d = \sqrt{\left(\log(\det(\\frac{\mathbf{A}+\mathbf{B}}{2})) - 0.5 \\times \log(\det(\mathbf{A}) \det(\mathbf{B}))\\right)}
:param A: First covariance matrix
:param B: Second covariance matrix
:returns: Log-Euclid distance between A and B
"""
return numpy.sqrt(numpy.log(numpy.linalg.det(
(A + B) / 2.0)) - 0.5 * numpy.log(numpy.linalg.det(A)*numpy.linalg.det(B)))
def distance_wasserstein(A, B):
"""Wasserstein distance between two covariances matrices.
.. math::
d = \left( {tr(A + B - 2(A^{1/2}BA^{1/2})^{1/2})}\\right )^{1/2}
:param A: First covariance matrix
:param B: Second covariance matrix
:returns: Wasserstein distance between A and B
"""
B12 = sqrtm(B)
C = sqrtm(numpy.dot(numpy.dot(B12, A), B12))
return numpy.sqrt(numpy.trace(A + B - 2*C))
def distance(A, B, metric='riemann'):
"""Distance between two covariance matrices A and B according to the metric.
:param A: First covariance matrix
:param B: Second covariance matrix
:param metric: the metric (Default value 'riemann'), can be : 'riemann' ,
'logeuclid' , 'euclid' , 'logdet', 'kullback', 'kullback_right',
'kullback_sym'.
:returns: the distance between A and B
"""
distance_methods = {'riemann': distance_riemann,
'logeuclid': distance_logeuclid,
'euclid': distance_euclid,
'logdet': distance_logdet,
'kullback': distance_kullback,
'kullback_right': distance_kullback_right,
'kullback_sym': distance_kullback_sym,
'wasserstein': distance_wasserstein}
if len(A.shape) == 3:
d = numpy.empty((len(A), 1))
for i in range(len(A)):
d[i] = distance_methods[metric](A[i], B)
else:
d = distance_methods[metric](A, B)
return d
| alexandrebarachant/decoding-brain-challenge-2016 | models/pyriemann/utils/distance.py | Python | bsd-3-clause | 4,099 |
# Copyright (c) 2014 VMware, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo_log import log as logging
from congress.api import api_utils
from congress.api import webservice
from congress.dse import deepsix
from congress.managers import datasource as datasource_manager
LOG = logging.getLogger(__name__)
def d6service(name, keys, inbox, datapath, args):
return SchemaModel(name, keys, inbox=inbox, dataPath=datapath, **args)
class SchemaModel(deepsix.deepSix):
"""Model for handling API requests about Schemas."""
def __init__(self, name, keys, inbox=None, dataPath=None,
policy_engine=None, datasource_mgr=None):
super(SchemaModel, self).__init__(name, keys, inbox=inbox,
dataPath=dataPath)
self.datasource_mgr = datasource_mgr
def rpc(self, caller, name, *args, **kwargs):
f = getattr(caller, name)
return f(*args, **kwargs)
def get_item(self, id_, params, context=None):
"""Retrieve item with id id_ from model.
Args:
id_: The ID of the item to retrieve
params: A dict-like object containing parameters
from the request query string and body.
context: Key-values providing frame of reference of request
Returns:
The matching item or None if item with id_ does not exist.
"""
datasource = context.get('ds_id')
table = context.get('table_id')
try:
schema = self.rpc(self.datasource_mgr, 'get_datasource_schema',
datasource)
except (datasource_manager.DatasourceNotFound,
datasource_manager.DriverNotFound) as e:
raise webservice.DataModelException(e.code, e.message,
http_status_code=e.code)
# request to see the schema for one table
if table:
if table not in schema:
raise webservice.DataModelException(
404, ("Table '{}' for datasource '{}' has no "
"schema ".format(id_, datasource)),
http_status_code=404)
return api_utils.create_table_dict(table, schema)
tables = [api_utils.create_table_dict(table_, schema)
for table_ in schema]
return {'tables': tables}
| ekcs/congress | congress/api/schema_model.py | Python | apache-2.0 | 2,959 |
#!/usr/bin/env python
#
# Send/receive UDP multicast packets.
# Requires that your OS kernel supports IP multicast.
#
# Usage:
# mcast -s (sender, IPv4)
# mcast -s -6 (sender, IPv6)
# mcast (receivers, IPv4)
# mcast -6 (receivers, IPv6)
MYPORT = 8123
MYGROUP_4 = '225.0.0.250'
MYGROUP_6 = 'ff15:7079:7468:6f6e:6465:6d6f:6d63:6173'
MYTTL = 1 # Increase to reach other networks
import time
import struct
import socket
import sys
def main():
group = MYGROUP_6 if "-6" in sys.argv[1:] else MYGROUP_4
if "-s" in sys.argv[1:]:
sender(group)
else:
receiver(group)
def sender(group):
addrinfo = socket.getaddrinfo(group, None)[0]
s = socket.socket(addrinfo[0], socket.SOCK_DGRAM)
# Set Time-to-live (optional)
ttl_bin = struct.pack('@i', MYTTL)
if addrinfo[0] == socket.AF_INET: # IPv4
s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl_bin)
else:
s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_HOPS, ttl_bin)
while True:
data = repr(time.time())
s.sendto(data + '\0', (addrinfo[4][0], MYPORT))
time.sleep(1)
def receiver(group):
# Look up multicast group address in name server and find out IP version
addrinfo = socket.getaddrinfo(group, None)[0]
# Create a socket
s = socket.socket(addrinfo[0], socket.SOCK_DGRAM)
# Allow multiple copies of this program on one machine
# (not strictly needed)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Bind it to the port
s.bind(('', MYPORT))
group_bin = socket.inet_pton(addrinfo[0], addrinfo[4][0])
# Join group
if addrinfo[0] == socket.AF_INET: # IPv4
mreq = group_bin + struct.pack('=I', socket.INADDR_ANY)
s.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
else:
mreq = group_bin + struct.pack('@I', 0)
s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, mreq)
# Loop, printing any data we receive
while True:
data, sender = s.recvfrom(1500)
while data[-1:] == '\0': data = data[:-1] # Strip trailing \0's
print (str(sender) + ' ' + repr(data))
if __name__ == '__main__':
main()
| google/google-ctf | third_party/edk2/AppPkg/Applications/Python/Python-2.7.2/Demo/sockets/mcast.py | Python | apache-2.0 | 2,294 |
#!/usr/bin/env python2
import subprocess
import atexit
import sys
from math import pi, sin, cos
from chips.api.api import Chip, Stimulus, Response, Wire, Component
try:
import scipy as s
except ImportError:
print "You need scipy to run this script!"
exit(0)
try:
import numpy as n
except ImportError:
print "You need numpy to run this script!"
exit(0)
try:
from matplotlib import pyplot
except ImportError:
print "You need matplotlib to run this script!"
exit(0)
def test():
chip = Chip("fft")
x_re = [0.0 for i in range(1024)]
x_im = [0.0 for i in range(1024)]
x_re[0:63] = [sin(2.0 * pi * (i/64.0)) for i in range(64)]
x_re = Stimulus(chip, "x_re", "double", x_re)
x_im = Stimulus(chip, "x_im", "double", x_im)
fft_x_re = Response(chip, "fft_x_re", "double")
fft_x_im = Response(chip, "fft_x_im", "double")
#create a filter component using the C code
fft = Component("fft.c")
#add an instance to the chip
fft(
chip,
inputs = {
"x_re":x_re,
"x_im":x_im,
},
outputs = {
"fft_x_re":fft_x_re,
"fft_x_im":fft_x_im,
},
)
#run the simulation
chip.simulation_reset()
while len(fft_x_im) < len(x_im):
chip.simulation_step()
x_re = list(x_re)
x_im = list(x_im)
fft_x_re = list(fft_x_re)[:len(x_re)]
fft_x_im = list(fft_x_im)[:len(x_im)]
time_complex = [i + (j*1.0) for i, j in zip(x_re, x_im)]
numpy_complex = s.fft(time_complex)
numpy_magnitude = n.abs(numpy_complex)
chips_complex = [i + (j*1.0j) for i, j in zip(fft_x_re, fft_x_im)]
chips_magnitude = n.abs(chips_complex)
f, subplot = pyplot.subplots(3, sharex=True)
pyplot.subplots_adjust(hspace=1.0)
subplot[0].plot(x_re, 'g')
subplot[1].plot(numpy_magnitude, 'r')
subplot[2].plot(chips_magnitude, 'b')
pyplot.xlim(0, 1023)
subplot[0].set_title("Time Domain Signal (64 point sine)")
subplot[1].set_title("Frequency Spectrum - Numpy")
subplot[2].set_title("Frequency Spectrum - Chips")
subplot[0].set_xlabel("Sample")
subplot[1].set_xlabel("Sample")
subplot[2].set_xlabel("Sample")
pyplot.savefig("../docs/source/examples/images/example_5.png")
pyplot.show()
def indent(lines):
return "\n ".join(lines.splitlines())
def generate_docs():
documentation = """
Fast Fourier Transform
======================
This example builds on the Taylor series example. We assume that the sin and
cos routines have been placed into a library of math functions math.h, along
with the definitions of :math:`\\pi`, M_PI.
The `Fast Fourier Transform (FFT) <http://en.wikipedia.org/wiki/Fast_Fourier_transform>`_
is an efficient method of decomposing discretely sampled signals into a frequency spectrum, it
is one of the most important algorithms in Digital Signal Processing (DSP).
`The Scientist and Engineer's Guide to Digital Signal Processing <http://www.dspguide.com/>`_
gives a straight forward introduction, and can be viewed on-line for free.
The example shows a practical method of calculating the FFT using the
`Cooley-Tukey algorithm <http://en.wikipedia.org/wiki/Fast_Fourier_transform#Cooley.E2.80.93Tukey_algorithm>`_.
.. code-block:: c
%s
The C code includes a simple test routine that calculates the frequency spectrum of a 64 point sine wave.
.. image:: images/example_5.png
"""%indent(open("fft.c").read())
document = open("../docs/source/examples/example_5.rst", "w").write(documentation)
test()
generate_docs()
| dawsonjon/Chips-2.0 | examples/example_5.py | Python | mit | 3,603 |
# -*- coding: utf-8 -*-
'''
Verhoeff algorithm
~~~~~~~~~~~~~~~~~~
'''
from ..checksum import (
ChecksumStrategy,
)
class Verhoeff(ChecksumStrategy):
'''
Provides Verhoeff checksum algorithm.
'''
name = 'verhoeff'
d = [
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
[1, 2, 3, 4, 0, 6, 7, 8, 9, 5],
[2, 3, 4, 0, 1, 7, 8, 9, 5, 6],
[3, 4, 0, 1, 2, 8, 9, 5, 6, 7],
[4, 0, 1, 2, 3, 9, 5, 6, 7, 8],
[5, 9, 8, 7, 6, 0, 4, 3, 2, 1],
[6, 5, 9, 8, 7, 1, 0, 4, 3, 2],
[7, 6, 5, 9, 8, 2, 1, 0, 4, 3],
[8, 7, 6, 5, 9, 3, 2, 1, 0, 4],
[9, 8, 7, 6, 5, 4, 3, 2, 1, 0],
]
p = [
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
[1, 5, 7, 6, 2, 8, 3, 0, 9, 4],
[5, 8, 0, 3, 7, 9, 6, 1, 4, 2],
[8, 9, 1, 6, 0, 4, 3, 5, 2, 7],
[9, 4, 5, 3, 1, 2, 6, 8, 7, 0],
[4, 2, 8, 6, 5, 7, 3, 9, 0, 1],
[2, 7, 9, 3, 8, 0, 6, 4, 1, 5],
[7, 0, 4, 6, 9, 1, 3, 2, 5, 8],
]
inv = [0, 4, 3, 2, 1, 5, 6, 7, 8, 9]
def checksum(self, body):
digits = self._prepare(body)
checksum = 0
for i, digit in enumerate(digits):
checksum = self.d[checksum][self.p[(i + 1) % 8][int(digit)]]
return str(self.inv[checksum])
def split(self, value):
return (value[:-1], value[-1])
def _prepare(self, body):
body = body[::-1]
return [int(d) for d in str(body)]
| vaiski/checksum | src/checksum/strategies/verhoeff.py | Python | mit | 1,448 |
from announcements import views
from django.conf.urls import url
app_name = 'announcements'
urlpatterns = [
url(r'^$', views.list, name='list'), # function based view
url(r'^b/$', views.list2, name='list2'), # function based view
# url(r'^$', views.List.as_view(), name='list'), # CBV
# url(r'^create/$', views.create, name='create'),
url(r'^create/$', views.Create.as_view(), name='create'), # CBV
url(r'^(?P<pk>\d+)/delete/$', views.Delete.as_view(), name='delete'),
url(r'^(?P<pk>\d+)/edit/$', views.Update.as_view(), name='update'),
url(r'^(?P<ann_id>\d+)/copy/$', views.copy, name='copy'),
url(r'^(?P<ann_id>\d+)/publish/$', views.publish, name='publish'),
url(r'^(?P<ann_id>\d+)/comment/$', views.comment, name='comment'),
url(r'^(?P<ann_id>\d+)/$', views.list, name='list'),
]
| timberline-secondary/hackerspace | src/announcements/urls.py | Python | gpl-3.0 | 836 |
from SimpleCV import *
import time
import serial
cam = JpegStreamCamera('http://192.168.1.6:8080/videofeed')
disp=Display()
"""This script was used for the demonstration of doing control with visual feedback
A android mobile phone was used with ipcam application to stream the video
A green fresbee was attached to a line rolled over the axis of the motor which was controlled"""
ser = serial.Serial('/dev/ttyACM2', 9600)
alpha = 0.8
time.sleep(1)
previous_z = 200;
while True:
img = cam.getImage()
myLayer = DrawingLayer((img.width,img.height))
disk_img = img.hueDistance(color=Color.GREEN).invert().morphClose().morphClose().threshold(200)
disk = disk_img.findBlobs(minsize=2000)
if disk:
disk[0].drawMinRect(layer=myLayer, color=Color.RED)
disk_img.addDrawingLayer(myLayer)
position = disk[0].centroid()
print position
z = alpha*position[1]+(1-alpha)*previous_z
ser.write(str((z-200)*0.03))
previous_z=z
disk_img.save(disp)
time.sleep(0.01)
| baptistelabat/robokite | ObjectTracking/test/basicControl1D.py | Python | mit | 1,041 |
Project_list = ['CMIP5', 'GeoMIP', 'LUCID', 'TAMIP', 'obs4MIPs', 'sdc5p', 'test']
Freq = {'3h': '3-hourly', '6h': '6-hourly', 'clm': 'Climatology-Monthly-Mean',
'day': 'Daily', 'fx': 'Fixed', 'mon': 'Monthly', 'sh': 'Sub-Hourly',
'yr': 'Yearly'}
Realm = {'Ae': 'Aerosol', 'A': 'Atmosphere', 'L': 'Land', 'LI': 'LandIce',
'O': 'Ocean', 'OI': 'OceanIce', 'SI': 'SeaIce', 'o': 'ocnBgchem'}
| arulalant/CMIPs-Handler | scripts/mv/projectdatatypes.py | Python | gpl-3.0 | 415 |
# *****************************************************************************
# Copyright (c) 2020, Intel Corporation All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# *****************************************************************************
import pandas as pd
from numba import njit
@njit
def series_mod():
s1 = pd.Series([5, 4, 3, 2, 1])
s2 = pd.Series([0, 2, 3, 6, 8])
return s1.mod(s2) # Expect series of 0, 0, 0, 2, 1
print(series_mod())
| IntelLabs/hpat | examples/series/series_mod.py | Python | bsd-2-clause | 1,733 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-20 22:29
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('swiper', '0009_userprofile'),
]
operations = [
migrations.AddField(
model_name='restaurant',
name='location_lat',
field=models.DecimalField(decimal_places=6, default=40.7128, max_digits=9),
),
migrations.AddField(
model_name='restaurant',
name='location_lon',
field=models.DecimalField(decimal_places=6, default=-74.006, max_digits=9),
),
migrations.AddField(
model_name='userprofile',
name='address',
field=models.TextField(default='Boston, MA', help_text='123 Sample St, City ST 90210', max_length=125),
),
migrations.AlterField(
model_name='restaurant',
name='address',
field=models.TextField(default='Boston, MA', help_text='123 Sample St, City ST 90210', max_length=125),
),
]
| GFynbo/GoudaTime | swiper/migrations/0010_auto_20171220_1729.py | Python | mit | 1,131 |
# -*- coding: UTF-8 -*-
#
# Copyright (C) 2007 by ULPM: Alexandre Yukio Harano
# Fábio Cassarotti Parronchi Navarro
# Gabriel Geraldo França Marcondes
# Luiz Carlos Irber Júnior
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import pygame
import os
import simplejson as json
from Gambiarra.objects import *
from Gambiarra.command import Play, Help, Quit
class SimulationView(object):
""" This widget holds the objects being simulated. """
running = None
background = None
objects = None
def __init__(self, objects):
self.running = False
self.background = pygame.Surface((1200, 770))
self.background.fill([99, 157, 237])
self.objects = pygame.sprite.RenderPlain()
self.static_objs = []
for obj in objects.values():
if obj.mobility:
obj.add(self.objects)
else:
self.static_objs.append(obj)
self.static_objs.append(LeftWall())
self.static_objs.append(RightWall())
self.static_objs.append(UpWall())
self.static_objs.append(DownWall())
def draw(self, pos = None):
screen = pygame.display.get_surface()
if pos:
screen.blit(self.background, (pos[0], pos[1]), pos)
else:
screen.blit(self.background, (0, 0))
for obj in self.static_objs:
obj.draw(screen, obj.rect)
for item in self.objects:
item.draw(screen, item.rect.topleft)
def add(self, obj):
if obj.mobility:
obj.add(self.objects)
else:
self.static_objs.append(obj)
class ObjectBar(object):
""" This widget contains the objects available for the problem. """
def __init__(self, objects):
self.background = pygame.Surface((1000, 130))
self.background.fill([0, 255, 0])
self.objects = pygame.sprite.RenderPlain(objects.values())
def draw(self, pos = None):
screen = pygame.display.get_surface()
if pos:
screen.blit(self.background, (pos[0], 770 + pos[1]), pos)
else:
screen.blit(self.background, (0, 770))
objpos = [15, 785]
for item in self.objects:
item.rect.topleft = objpos
item.draw(screen, item.rect.topleft )
objpos[0] += item.image.get_width() + 15
def update(self):
pass
class CommandBar(object):
""" This widget contains the commands: play, help, and quit. KISS! =D """
def __init__(self):
self.background = pygame.Surface((200, 130))
self.width, self.height = self.background.get_size()
self.background.fill([0, 0, 255])
self.commands = [ Play(), Help(), Quit() ]
def draw(self, pos=None):
screen = pygame.display.get_surface()
if pos:
screen.blit(self.background, (1000 + pos[0], 770 + pos[1]), pos)
else:
screen.blit(self.background, (1000, 770))
objpos = [1015, 810]
for cmd in self.commands:
cmd.rect.topleft = objpos
cmd.draw(screen, cmd.rect.topleft )
objpos[0] += cmd.image.get_width() + 15
def update(self):
pass
class Level(object):
"""This widget contains the objects in the scenario and their positions
on the screen"""
objects = None
def __init__(self, obj_in_place, obj_to_add, goals, help_img):
self.simulator = SimulationView(obj_in_place)
self.objbar = ObjectBar(obj_to_add)
self.cmdbar = CommandBar()
self.goals = goals
self.help_img = help_img
def goal_reached(self):
for obj, goal in self.goals:
if not obj.rect.collidepoint(goal.rect.center):
return False
return True
def draw(self):
self.simulator.draw()
self.objbar.draw()
self.cmdbar.draw()
def show_help(self, screen):
screen.blit(self.help_img, (600 - self.help_img.get_width()/2,
450 - self.help_img.get_height()/2) )
pygame.display.flip()
while True:
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
return
def init_levels():
return load_levels()
def load_levels():
level_dir = os.path.join('data', 'levels')
files = os.listdir(level_dir)
levels = []
for level_file in sorted(f for f in files if f.split(".")[-1] == "level"):
raw = open(os.path.join(level_dir, level_file))
try:
level = json.load(raw)
except ValueError, error:
print level_file, "-> invalid json file: ", error
raw.close()
else:
lvl = load_level(level, level_dir, level_file)
if lvl:
levels.append(lvl)
return levels
def load_level(level, level_dir, level_name):
objs = {}
for obj in level["placed"]:
try:
klass = globals()[obj["type"]]
except KeyError, error:
print level_name, "-> Invalid type for object:", error
return None
new = klass( ( int(obj["xpos"]), int(obj["ypos"]) ), editable=False)
objs[obj["name"]] = new
toadd = {}
for obj in level["available"]:
try:
klass = globals()[obj["type"]]
except KeyError, error:
print level_name, "-> Invalid type for object:", error
return None
try:
toadd[obj["name"]] = klass()
except KeyError:
print level_name, "-> Object name not available"
return None
goals = []
for goal in level["goals"]:
try:
proj = objs[ goal[0] ]
trg = objs[ goal[1] ]
except KeyError, error:
print level_name, "-> Object not available:", error
return None
goals.append( (proj, trg) )
img_file = os.path.join(level_dir, level['help'])
if os.path.isfile(img_file):
help_image = pygame.image.load(img_file)
else:
print level_name, "-> Invalid help file:", level['help']
return None
return Level(objs, toadd, goals, help_image)
| Lasanha/Gambiarra | Gambiarra/levels.py | Python | gpl-2.0 | 6,934 |
#!/usr/bin/env python
#coding=utf8
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)) + "/..")
import time
from sample_common import MNSSampleCommon
from mns.mns_account import Account
from mns.mns_queue import *
#从sample.cfg中读取基本配置信息
## WARNING: Please do not hard code your accessId and accesskey in next line.(more information: https://yq.aliyun.com/articles/55947)
accid,acckey,endpoint,token = MNSSampleCommon.LoadConfig()
#初始化 my_account, my_queue
my_account = Account(endpoint, accid, acckey, token)
queue_name = MNSSampleCommon.LoadIndexParam(1)
if not queue_name:
print("Error: get parameter failed")
sys.exit(0)
my_queue = my_account.get_queue(queue_name)
#删除队列
try:
my_queue.delete()
print("Delete Queue Succeed! QueueName:%s\n" % queue_name)
except MNSExceptionBase as e:
print("Delete Queue Fail! Exception:%s\n" % e)
| stormtrader/gw_trade | dysms_python/mns_python_sdk/sample/deletequeue.py | Python | gpl-3.0 | 924 |
from django.conf.urls import patterns, include, url, static
from django.conf import settings
from django.contrib import admin
from ticketing.views import GetTicket, GetVideos, ViewVideoByTicket, stream_video, stream_mp4
from videostore.views import watermark
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
urlpatterns = patterns('',
url(r'^media/wmthumbs/(?P<file>.*)$', watermark, name='make_watermark'),
url(r'^video/(?P<ticket>[0-9a-f]+)/$', ViewVideoByTicket.as_view(), name='video_by_ticket'),
url(r'^stream/(?P<ticket>[0-9a-f]+)/$', stream_video, name='video_stream'),
url(r'^streammp4/(?P<ticket>[0-9a-f]+)/$', stream_mp4, name='video_streammp4'),
url(r'^get_ticket/(?P<video_id>[0-9]+)/$', GetTicket.as_view(), name='get_ticket'),
url(r'^get_videos/$', GetVideos.as_view(), name='get_videos'),
url(r'^', include(admin.site.urls)),
)
urlpatterns += static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
#urlpatterns += static.static('/static/', document_root=settings.STATIC_ROOT)
| alrusdi/video-store | core/urls.py | Python | mit | 1,075 |
# -*- coding: utf-8 -*-
"""setup.py -- setup file for Matrioshka deployment mother brain.
"""
from setuptools import setup
setup(
name = "matrioshka",
packages = ['matrioshka'],
install_requires = [
'Fabric',
'boto',
'Paved',
'path.py',
],
zip_safe = False,
version = "0.1",
description = "Mother brain for deploying applications with Fabric. Batteries included.",
author = "David Eyk",
author_email = "[email protected]",
url = "http://github.com/eykd/matrioshka",
#download_url = "http://github.com/eykd/matrioshka",
long_description = open('README.md').read(),
)
| eykd/matrioshka | setup.py | Python | bsd-3-clause | 659 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for consumer_tracking_pipeline_visitor."""
# pytype: skip-file
from __future__ import absolute_import
import logging
import unittest
import apache_beam as beam
from apache_beam import pvalue
from apache_beam.pipeline import Pipeline
from apache_beam.pvalue import AsList
from apache_beam.runners.direct import DirectRunner
from apache_beam.runners.direct.consumer_tracking_pipeline_visitor import ConsumerTrackingPipelineVisitor
from apache_beam.transforms import CoGroupByKey
from apache_beam.transforms import Create
from apache_beam.transforms import DoFn
from apache_beam.transforms import FlatMap
from apache_beam.transforms import Flatten
from apache_beam.transforms import ParDo
# Disable frequent lint warning due to pipe operator for chaining transforms.
# pylint: disable=expression-not-assigned
# pylint: disable=pointless-statement
class ConsumerTrackingPipelineVisitorTest(unittest.TestCase):
def setUp(self):
self.pipeline = Pipeline(DirectRunner())
self.visitor = ConsumerTrackingPipelineVisitor()
try: # Python 2
self.assertCountEqual = self.assertItemsEqual
except AttributeError: # Python 3
pass
def test_root_transforms(self):
root_read = beam.Impulse()
root_flatten = Flatten(pipeline=self.pipeline)
pbegin = pvalue.PBegin(self.pipeline)
pcoll_read = pbegin | 'read' >> root_read
pcoll_read | FlatMap(lambda x: x)
[] | 'flatten' >> root_flatten
self.pipeline.visit(self.visitor)
root_transforms = [t.transform for t in self.visitor.root_transforms]
self.assertCountEqual(root_transforms, [root_read, root_flatten])
pbegin_consumers = [
c.transform for c in self.visitor.value_to_consumers[pbegin]
]
self.assertCountEqual(pbegin_consumers, [root_read])
self.assertEqual(len(self.visitor.step_names), 3)
def test_side_inputs(self):
class SplitNumbersFn(DoFn):
def process(self, element):
if element < 0:
yield pvalue.TaggedOutput('tag_negative', element)
else:
yield element
class ProcessNumbersFn(DoFn):
def process(self, element, negatives):
yield element
def _process_numbers(pcoll, negatives):
first_output = (
pcoll
| 'process numbers step 1' >> ParDo(ProcessNumbersFn(), negatives))
second_output = (
first_output
| 'process numbers step 2' >> ParDo(ProcessNumbersFn(), negatives))
output_pc = ((first_output, second_output)
| 'flatten results' >> beam.Flatten())
return output_pc
root_read = beam.Impulse()
result = (
self.pipeline
| 'read' >> root_read
| ParDo(SplitNumbersFn()).with_outputs('tag_negative', main='positive'))
positive, negative = result
_process_numbers(positive, AsList(negative))
self.pipeline.visit(self.visitor)
root_transforms = [t.transform for t in self.visitor.root_transforms]
self.assertEqual(root_transforms, [root_read])
self.assertEqual(len(self.visitor.step_names), 5)
self.assertEqual(len(self.visitor.views), 1)
self.assertTrue(isinstance(self.visitor.views[0], pvalue.AsList))
def test_co_group_by_key(self):
emails = self.pipeline | 'email' >> Create([('joe', '[email protected]')])
phones = self.pipeline | 'phone' >> Create([('mary', '111-222-3333')])
{'emails': emails, 'phones': phones} | CoGroupByKey()
self.pipeline.visit(self.visitor)
root_transforms = [t.transform for t in self.visitor.root_transforms]
self.assertEqual(len(root_transforms), 2)
self.assertGreater(
len(self.visitor.step_names), 3) # 2 creates + expanded CoGBK
self.assertEqual(len(self.visitor.views), 0)
def test_visitor_not_sorted(self):
p = Pipeline()
# pylint: disable=expression-not-assigned
from apache_beam.testing.test_stream import TestStream
p | TestStream().add_elements(['']) | beam.Map(lambda _: _)
original_graph = p.to_runner_api(return_context=False)
out_of_order_graph = p.to_runner_api(return_context=False)
root_id = out_of_order_graph.root_transform_ids[0]
root = out_of_order_graph.components.transforms[root_id]
tmp = root.subtransforms[0]
root.subtransforms[0] = root.subtransforms[1]
root.subtransforms[1] = tmp
p = beam.Pipeline().from_runner_api(
out_of_order_graph, runner='BundleBasedDirectRunner', options=None)
v_out_of_order = ConsumerTrackingPipelineVisitor()
p.visit(v_out_of_order)
p = beam.Pipeline().from_runner_api(
original_graph, runner='BundleBasedDirectRunner', options=None)
v_original = ConsumerTrackingPipelineVisitor()
p.visit(v_original)
# Convert to string to assert they are equal.
out_of_order_labels = {
str(k): [str(t) for t in v_out_of_order.value_to_consumers[k]]
for k in v_out_of_order.value_to_consumers
}
original_labels = {
str(k): [str(t) for t in v_original.value_to_consumers[k]]
for k in v_original.value_to_consumers
}
self.assertDictEqual(out_of_order_labels, original_labels)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
| iemejia/incubator-beam | sdks/python/apache_beam/runners/direct/consumer_tracking_pipeline_visitor_test.py | Python | apache-2.0 | 5,990 |
from openerp import models, fields
class testwidget(models.Model):
_name = 'testwidget'
name = fields.Char()
| nishad-jobsglobal/odoo-marriot | openerp/addons/testwidget/testwidget.py | Python | agpl-3.0 | 129 |
#!/usr/bin/python
import xml.etree.ElementTree as ET
import os
import urllib
import urllib2
if 'MY_HOME' not in os.environ:
os.environ['MY_HOME']='/usr/libexec/cernvm-appliance-agent'
def prepare():
transaction=ET.Element('cernvm-api', {'version':'1.0'})
username=ET.Element('username')
username.text='admin'
apikey=ET.Element('apikey')
apikey.text='42a7gXpcAVWfE'
transaction.append(username)
transaction.append(apikey)
return transaction
def test_table(transaction):
addrequest=ET.Element('add')
title=ET.Element('title')
title.text='Hello Family'
cg=ET.Element('command-group')
command=ET.Element('command', {'title':'Hello Family', 'format':'table'})
command.text='echo "Message Name"; echo "Hello Gabriel"; '+\
'echo "Hello Styliani"; echo "Hello Vasiliki"; echo "Hello George"'
addrequest.append(title)
cg.append(command)
addrequest.append(cg)
transaction.append(addrequest)
return transaction
def test_basic(transaction):
addrequest=ET.Element('add')
title=ET.Element('title')
title.text='Hello World'
cg=ET.Element('command-group')
command=ET.Element('command', {'title':'Hello'})
command.text='echo "Hello World"'
addrequest.append(title)
cg.append(command)
addrequest.append(cg)
transaction.append(addrequest)
return transaction
def post(transaction):
xml_string = ET.tostring(transaction, encoding='UTF-8')
data = urllib.urlencode({'xml': xml_string})
url='https://127.0.0.1:8003/cgi-bin/api/cernvm.py'
response = urllib2.urlopen(url, data)
for line in response.readlines():
print line
def test_remove_basic(transaction):
removerequest = ET.Element('remove')
reqID = ET.Element('id')
reqID.text = 'Hello World'
removerequest.append(reqID)
transaction.append(removerequest)
return transaction
def test_list(transaction):
el_list = ET.Element('list')
transaction.append(el_list)
return transaction
transaction=prepare()
'''
ftr=test_basic(transaction)
ftr=test_remove_basic(transaction)
'''
ftr = test_list(transaction)
post(ftr)
| cernvm/cernvm-appliance-agent | api-tests/xml_poster.py | Python | gpl-3.0 | 2,177 |
import sys
import importlib
import os
import pickle
import numpy as np
import random
class Problem(object):
settings = {}
def __init__(self, default_settings_filename = None):
# set defaults
if default_settings_filename:
self.load_from_file(default_settings_filename, only_settings=True)
def __getitem__(self, index):
return self.settings[index]
def set_settings_from_dictionary(self, settings=None, check_for_None=True):
self.settings.update(settings)
def load_from_file(self, filename, only_settings=False):
sys.path.append(os.path.dirname(filename))
modulename = os.path.splitext(os.path.basename(filename))[0]
imported = importlib.import_module(modulename)
sys.path.pop()
settings = imported.settings
self.set_settings_from_dictionary(settings=settings,
check_for_None=False)
if not only_settings:
materials = imported.materials
boundaries = imported.boundaries
moving_cells = imported.moving_cells
return materials, boundaries, moving_cells
def load_model(self, filename):
materials, boundaries, moving_cells = self.load_from_file(filename)
mxx, myy, values, moving_cells_index_list, markers_index_list = self.load_image(filename, moving_cells)
rho_key = np.asarray([material['rho'] for material in materials])
eta_key = np.asarray([material['eta'] for material in materials])
mu_key = np.asarray([material['mu'] for material in materials])
C_key = np.asarray([material['C'] for material in materials])
sinphi_key = np.asarray([material['sinphi'] for material in materials])
m_cat = np.copy(values)
m_rho = rho_key[values]
m_eta = eta_key[values]
m_mu = mu_key[values]
m_C = C_key[values]
m_sinphi = sinphi_key[values]
self.settings["mxx"] = mxx
self.settings["myy"] = myy
self.settings["m_cat"] = m_cat
self.settings["m_rho"] = m_rho
self.settings["m_eta"] = m_eta
self.settings["m_mu"] = m_mu
self.settings["m_C"] = m_C
self.settings["m_sinphi"] = m_sinphi
self.settings["top_bound"] = boundaries['top_bound']
self.settings["bottom_bound"] = boundaries['bottom_bound']
self.settings["left_bound"] = boundaries['left_bound']
self.settings["right_bound"] = boundaries['right_bound']
self.settings["m_s_xx"] = np.zeros(np.shape(mxx))
self.settings["m_s_xy"] = np.zeros(np.shape(mxx))
self.settings["m_e_xx"] = np.zeros(np.shape(mxx))
self.settings["m_e_xy"] = np.zeros(np.shape(mxx))
self.settings["m_P"] = np.zeros(np.shape(mxx))
self.settings['moving_points_index_list'] = moving_cells_index_list
self.settings['markers_index_list'] = markers_index_list
def create_grid_of_points(self, mxx, myy, res):
x_res, y_res = res
x = np.linspace(0,self['j_res']-2,x_res)
y = np.linspace(0,self['i_res']-2,y_res)
x_ = np.linspace(0,self['j_res']-2,x_res*5)
y_ = np.linspace(0,self['i_res']-2,y_res*5)
xx, yy = np.meshgrid(x,y)
list_of_indexes = []
for x1 in x[:-1]:
# for y1 in np.delete(y_, np.s_[:-1:5]):
for y1 in y_:
mxx.append(np.asarray([x1]))
myy.append(np.asarray([y1]))
list_of_indexes.append(len(mxx)-1)
for y1 in y[:-1]:
# for x1 in np.delete(x_, np.s_[:-1:5]):
for x1 in x_:
mxx.append(np.asarray([x1]))
myy.append(np.asarray([y1]))
list_of_indexes.append(len(mxx)-1)
# for x in xx.flatten():
# mxx.append(np.asarray([x]))
# list_of_indexes.append(len(mxx)-1)
# for y in yy.flatten():
# myy.append(np.asarray([y]))
return list_of_indexes
def load_image(self, fname, moving_cells):
image = np.load(f'{fname[:-3]}.npy')
image_i, image_j = image.shape
j_res = self['j_res']
i_res = self['i_res']
marker_density = self['pdensity']
if not self['seed'] is None:
print('seed')
np.random.seed(self['seed'])
# markers
mxx = []
myy = []
for x in range(j_res-1):
for y in range(i_res-1):
for _ in range(marker_density):
mxx.append(x+np.random.uniform(0,.5,1))
myy.append(y+np.random.uniform(0,.5,1))
mxx.append(x+np.random.uniform(0,.5,1))
myy.append(y+np.random.uniform(.5,1,1))
mxx.append(x+np.random.uniform(.5,1,1))
myy.append(y+np.random.uniform(.5,1,1))
mxx.append(x+np.random.uniform(.5,1,1))
myy.append(y+np.random.uniform(0,.5,1))
moving_cells_index_list = []
moving_cells_coordinates_list = [(xy) for xy, VxVy in moving_cells]
print(moving_cells_coordinates_list)
moving_x = np.asarray([x for (x,y), VxVy in moving_cells])
moving_y = np.asarray([y for (x,y), VxVy in moving_cells])
moving_j = (moving_x*(j_res-1)/image_j).astype(int)
moving_i = (moving_y*(i_res-1)/image_i).astype(int)
moving_points = []
for ind, (j,i) in enumerate(zip(moving_j, moving_i)):
_, (VxVy) = moving_cells[ind]
# j,i = int(j), int(i)
mxx.append(np.asarray([j]))
myy.append(np.asarray([i]))
moving_points.append((len(mxx)-1,VxVy))
markers_index_list = []
if self['markers_grid'] != (0,0):
markers_index_list = self.create_grid_of_points(mxx, myy, self['markers_grid'])
mxx = np.asarray(mxx)
myy = np.asarray(myy)
# TODO: Refactor following block to be inside previous cascade of for loops
mj = (mxx*image_j/(j_res-1)).astype(int)
mi = (myy*image_i/(i_res-1)).astype(int)
values = np.zeros(np.shape(mxx))
for idx in range(len(mxx)):
j,i = mj[idx], mi[idx]
values[idx] = image[i,j]
if (j,i) in moving_cells_coordinates_list:
idx_ = moving_cells_coordinates_list.index((j,i))
_, (Vx, Vy) = moving_cells[idx_]
moving_cells_index_list.append((idx, Vx, Vy))
if moving_cells_index_list:
moving_cells_index_list = [random.choice(moving_cells_index_list)
for _ in range(5)]
values = values.astype(int)
return mxx, myy, values, moving_points, markers_index_list
| mkondratyev85/pgm | problem.py | Python | mit | 6,816 |
bl_info = {
"name": "Cars Generator",
"category": "Object",
"author": "Cottet & Hurteau"
}
import bpy, os, math
import random
import mathutils
from mathutils import Vector
from math import pi
from bpy.props import FloatVectorProperty, FloatProperty
class CarsGenerator(bpy.types.Operator):
"""It generates cars !""" # blender will use this as a tooltip for menu items and buttons.
bl_idname = "object.move_x" # unique identifier for buttons and menu items to reference.
bl_label = "CarsGenerator" # display name in the interface.
bl_options = {'REGISTER', 'UNDO'} # enable undo for the operator.
longueurMin = FloatVectorProperty(
name="Taille minimal(m)",
default=(1.00, 2.00, 1.00),
subtype='XYZ',
description="move direction"
)
longueurMax = FloatVectorProperty(
name="Taille maximal(m)",
default=(2.50, 9.00, 3.00),
subtype='XYZ',
description="move direction"
)
my_bool = bpy.props.BoolProperty(name="Toggle Option")
def execute(self, context): # execute() is called by blender when running the operator.
print('lMin: ',self.longueurMin[0])
voiture(dimensionMax=(self.longueurMax[0],self.longueurMax[1],self.longueurMax[2]),
dimensionMin=(self.longueurMin[0],self.longueurMin[1],self.longueurMin[2]))
return {'FINISHED'} # this lets blender know the operator finished successfully.
def register():
bpy.utils.register_class(CarsGenerator)
def unregister():
bpy.utils.unregister_class(CarsGenerator)
def min(a,b):
if a<b:
return a
else:
return b
def max(a,b):
if a>b:
return a
else:
return b
def voiture(
origin=(0,0,0),
dimensionMax=(2.5,9,3),
dimensionMin=(1,2,1)
):
def createMeshFromOperator(verts, faces):
bpy.ops.object.add(
type='MESH',
enter_editmode=False,
location=origin)
ob = bpy.context.object
#ob.name = name
#ob.show_name = True
me = ob.data
#me.name = name+'Mesh'
# Create mesh from given verts, faces.
me.from_pydata(verts, [], faces)
# Update mesh with new data
me.update()
# Set object mode
bpy.ops.object.mode_set(mode='OBJECT')
return ob
posx=origin[0]
posy=origin[1]
posz=origin[2]
xMax=dimensionMax[0]
yMax=dimensionMax[1]
zMax=dimensionMax[2]
xMin=dimensionMin[0]
yMin=dimensionMin[1]
zMin=dimensionMin[2]
#dimension de la voiture
x=random.randrange(int(xMin*100), int(xMax*100))/100.00
y=random.randrange(int(max(yMin, x)*100), int(yMax*100))/100.00
z=random.randrange(int(zMin*100), int(zMax*100))/100.00
print('dimensions:',x,' ',y,' ',z)
zChassis=random.randrange(1, int(min(40,20*z)))/100.00
print(zChassis)
posz=posz+zChassis
xStruct=x
yStruct=random.randrange(int(0.25*y*100), int((0.75*(y-zChassis))*100))/100.00
zStruct=random.randrange(int(0.25*z*100), int(0.75*z*100))/100.00
xFront=random.randrange(int(50*xStruct), int(x*100))/100.00
yFront=random.randrange(0, int((y-yStruct)*100))/100.00
zFront=random.randrange(0, int(zStruct*100))/100.00
xBack=random.randrange(int(50*xStruct), int(x*100))/100.00
yBack=y-yStruct-yFront
zBack=random.randrange(0, int(z*100))/100.00
xRoof=random.randrange(int((xStruct/2)*100), int(xStruct*100))/100.00
yRoof=random.randrange(50, int(yStruct*100))/100.00
zRoof=z-zStruct-zChassis
wheelSize=random.randrange(int(max(30,zChassis*1.25)), 100)/100
wheelWidth=random.randrange(85, 400, 5)/1000
posXWheel=(random.randrange(int(((xStruct/2)-wheelWidth/2)*100), int((xStruct/2+wheelWidth/2)*100))/100)
print('Z')
print(posz)
posZWheel=(wheelSize/2)+posz-zChassis
posYFrontWheel= (0.75*y)+posy-(y/2)
posYBackWheel= 0.25*y+posy-y/2
print(wheelWidth)
def createStruct():
pos=(origin[0],origin[1]+(yBack-yFront)/2,origin[2]+zStruct/2+zChassis)
bpy.ops.mesh.primitive_cube_add(radius=0.5, location=pos)
bpy.ops.transform.resize(value=(xStruct,yStruct,zStruct))
#Right side
(mx,my,mz) = (xStruct/2, yStruct, 0.965926)
verts = (
(pos[0]+xStruct/2, pos,-1),
)
faces = (
(1,2,3)
)
#createMeshFromOperator(verts, faces)
def createWheels():
pos=(origin[0], origin[1]+yStruct/2+yFront/2, origin[2]+zFront/2)
#FRONT
#Right
bpy.ops.mesh.primitive_cylinder_add(radius=wheelSize/2, depth=wheelWidth,location=(posXWheel,posYFrontWheel,posZWheel))
bpy.ops.transform.rotate(value=pi/2, axis=(0,1,0), constraint_axis=(False, True, False))
#Left
bpy.ops.mesh.primitive_cylinder_add(radius=wheelSize/2, depth=wheelWidth,location=(-posXWheel,posYFrontWheel,posZWheel))
bpy.ops.transform.rotate(value=pi/2, axis=(0,1,0), constraint_axis=(False, True, False))
#BACK
#Right
bpy.ops.mesh.primitive_cylinder_add(radius=wheelSize/2, depth=wheelWidth,location=(posXWheel,posYBackWheel,posZWheel))
bpy.ops.transform.rotate(value=pi/2, axis=(0,1,0), constraint_axis=(False, True, False))
#Left
bpy.ops.mesh.primitive_cylinder_add(radius=wheelSize/2, depth=wheelWidth,location=(-posXWheel,posYBackWheel,posZWheel))
bpy.ops.transform.rotate(value=pi/2, axis=(0,1,0), constraint_axis=(False, True, False))
#bpy.ops.transform.resize(value=(xFront,yFront,zFront))
def createFront():
pos=(posx, origin[1]+yStruct/2+(yBack-yFront)/2, origin[2]+zFront/2+zChassis)
#bpy.ops.mesh.primitive_cube_add(radius=0.5, location=(pos[0],pos[1]+yFront/2,pos[2]))
#bpy.ops.transform.resize(value=(xFront,yFront,zFront))
(mx,my,mz) = (xStruct/2, yStruct, 0.965926)
verts = (
#face du bas
(pos[0]-xStruct/2, pos[1], posz),
(pos[0]-xFront/2,pos[1],posz),
(pos[0]-xFront/2,pos[1]+yFront,posz),
(pos[0]+xStruct/2, pos[1], posz),
(pos[0]+xFront/2,pos[1],posz),
(pos[0]+xFront/2,pos[1]+yFront,posz),
#face haute
(pos[0]-xStruct/2, pos[1], posz+zStruct),
(pos[0]+xStruct/2, pos[1], posz+zStruct),
(pos[0]+xFront/2,pos[1]+yFront,posz+zFront),
(pos[0]-xFront/2,pos[1]+yFront,posz+zFront),
)
faces = (
(0,1,2),#bas gauche
(3,4,5),#bas droit
(6,7,8,9),#haut
(2,5,8,9),#devant
(0,6,9,2),#gauche
(3,7,8,5),
)
#verts = ((x,x,-1), (x,-x,-1), (-x,-x,-1), (-x,x,-1), (0,0,1))
#faces = ((1,0,4), (4,2,1), (4,3,2), (4,0,3), (0,1,2,3))
createMeshFromOperator(verts, faces)
def createBack():
posYBack = origin[1]-((yStruct/2)+(yBack/2))+(yBack-yFront)/2
pos=(origin[0], posYBack, origin[2]+zBack/2+zChassis)
bpy.ops.mesh.primitive_cube_add(radius=0.5, location=(pos))
bpy.ops.transform.resize(value=(xBack,yBack,zBack))
def createRoof():
pos=(posx, posy+(yBack-yFront)/2, posz+zStruct)
bpy.ops.mesh.primitive_cube_add(radius=0.5, location=(pos[0],pos[1],pos[2]+zRoof/2))
bpy.ops.transform.resize(value=(xRoof,yRoof,zRoof))
verts = (
#face du pare brise
(pos[0]+xRoof/2, pos[1]+yRoof/2, pos[2]+zRoof),
(pos[0]-xRoof/2, pos[1]+yRoof/2, pos[2]+zRoof),
(pos[0]-xStruct/2, pos[1]+yStruct/2,pos[2]),
(pos[0]+xStruct/2, pos[1]+yStruct/2, pos[2]),
(pos[0]+xFront/2,pos[1],posz),
(pos[0]+xFront/2,pos[1]+yFront,posz),
#face haute
(pos[0]-xStruct/2, pos[1], posz+zStruct),
(pos[0]+xStruct/2, pos[1], posz+zStruct),
(pos[0]+xFront/2,pos[1]+yFront,posz+zFront),
(pos[0]-xFront/2,pos[1]+yFront,posz+zFront),
)
faces = (
(0,1,2,3),#pare brise
(3,4,5),#bas droit
)
#verts = ((x,x,-1), (x,-x,-1), (-x,-x,-1), (-x,x,-1), (0,0,1))
#faces = ((1,0,4), (4,2,1), (4,3,2), (4,0,3), (0,1,2,3))
#createMeshFromOperator(verts, faces)
def test():
pos=(posx, posy+(yBack-yFront)/2, posz+zStruct)
print('pos',yBack-yFront)
yhaut=pos[1]+yRoof/2
ybas=pos[1]+yStruct/2
zhaut=pos[2]+zRoof
zbas=pos[2]
yhautroof=random.randrange(0,30)/100
verts=[]
faces=[]
print('xRoof', xRoof)
precision=10
decalageHaut=math.exp((xRoof/2)-(int((xRoof/2)*precision)-int((xRoof/2)*precision)))/10
decalageBas=math.exp((xStruct/2)-(int((xStruct/2)*precision)-int((xStruct/2)*precision)))/10
for i in range(0, int((xRoof/2)*precision)):
print('i',i)
decalageYHaut=decalageHaut-math.exp((xRoof/2)-(int((xRoof/2)*precision)-i/precision*10))/10#math.exp((xRoof/2)*precision-i)/100
decalageYBas=decalageBas-math.exp((xStruct/2)-(int((xStruct/2)*precision)-i))/10#math.exp((xRoof/2)*precision-i)/100
verts.append((pos[0]+i/precision,yhaut+decalageYHaut,zhaut))
verts.append((pos[0]+i/precision,ybas+decalageYBas,zbas))
verts.append((pos[0]-i/precision,yhaut+decalageYHaut,zhaut))
verts.append((pos[0]-i/precision,ybas+decalageYBas,zbas))
lv=len(verts)
if (i>0):
faces.append((lv-4,lv-3,lv-7,lv-8))
faces.append((lv-6,lv-5,lv-1,lv-2))
verts.append((pos[0]+i/precision,yhaut+decalageYHaut,zhaut))
verts.append((pos[0]+i/precision,ybas+decalageYBas,zbas))
verts.append((pos[0]-i/precision,yhaut+decalageYHaut,zhaut))
verts.append((pos[0]-i/precision,ybas+decalageYBas,zbas))
for i in range(int((xRoof/2)*precision), int((xStruct/2)*precision)):
print('ii',i)
decalageYBas=decalageBas-math.exp((xStruct/2)-(int((xStruct/2)*precision)-i))/10#math.exp((xRoof/2)*precision-i)/100
verts.append((pos[0]+i/precision,yhaut+decalageYHaut,zhaut))
verts.append((pos[0]+i/precision,ybas+decalageYBas,zbas))
verts.append((pos[0]-i/precision,yhaut+decalageYHaut,zhaut))
verts.append((pos[0]-i/precision,ybas+decalageYBas,zbas))
lv=len(verts)
if (i>0):
faces.append((lv-4,lv-3,lv-7,lv-8))
faces.append((lv-6,lv-5,lv-1,lv-2))
createMeshFromOperator(verts, faces)
'lumiere'
'''scene = bpy.context.scene
lamp_data = bpy.data.lamps.new(name="New Lamp", type='POINT')
lamp_object = bpy.data.objects.new(name="New Lamp", object_data=lamp_data)
scene.objects.link(lamp_object)
lamp_object.location = (x+5.0, y+5.0, z+5.0)
lamp_object.select = True
scene.objects.active = lamp_object'''
createStruct()
createFront()
createBack()
createRoof()
test()
createWheels()
def add_object_button(self, context):
self.layout.operator(
OBJECT_OT_add_object.bl_idname,
text="Add Object",
icon="PLUGIN")
# This allows you to run the script directly from blenders text editor
# to test the addon without having to install it.
if __name__ == "__main__":
register() | MarionLeHerisson/CarsGenerator | CarsGeneratorAddOn.py | Python | gpl-3.0 | 11,781 |
#!/usr/bin/python
#
# Copyright (C) Citrix Systems Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Talk to the multipathd cli
from __future__ import print_function
import util
import re
import exceptions
import time
class MPathCLIFail(exceptions.Exception):
def __init__(self):
return
def __str__(self):
print("", "MPath CLI failed")
mpathcmd = ["/usr/sbin/multipathd", "-k"]
def mpexec(cmd):
util.SMlog("mpath cmd: %s" % cmd)
(rc, stdout, stderr) = util.doexec(mpathcmd, cmd)
if stdout != "multipathd> ok\nmultipathd> " \
and stdout != "multipathd> " + cmd + "\nok\nmultipathd> ":
raise MPathCLIFail
def add_path(path):
mpexec("add path %s" % path)
def remove_path(path):
mpexec("remove path %s" % path)
def remove_map(m):
mpexec("remove map %s" % m)
def resize_map(m):
mpexec("resize map %s" % m)
def reconfigure():
mpexec("reconfigure")
regex = re.compile("[0-9]+:[0-9]+:[0-9]+:[0-9]+\s*([a-z]*)")
regex2 = re.compile("multipathd>(\s*[^:]*:)?\s+(.*)")
regex3 = re.compile("switchgroup")
def is_working():
cmd = "help"
try:
(rc, stdout, stderr) = util.doexec(mpathcmd, cmd)
m = regex3.search(stdout)
if m:
return True
else:
return False
except:
return False
def do_get_topology(cmd):
util.SMlog("mpath cmd: %s" % cmd)
(rc, stdout, stderr) = util.doexec(mpathcmd, cmd)
util.SMlog("mpath output: %s" % stdout)
lines = stdout.split('\n')[:-1]
if len(lines):
m = regex2.search(lines[0])
lines[0] = str(m.group(2))
return lines
def get_topology(scsi_id):
cmd = "show map %s topology" % scsi_id
return do_get_topology(cmd)
def get_all_topologies():
cmd = "show topology"
return do_get_topology(cmd)
def list_paths(scsi_id):
lines = get_topology(scsi_id)
matches = []
for line in lines:
m = regex.search(line)
if(m):
matches.append(m.group(1))
return matches
def list_maps():
cmd = "list maps"
util.SMlog("mpath cmd: %s" % cmd)
(rc, stdout, stderr) = util.doexec(mpathcmd, cmd)
util.SMlog("mpath output: %s" % stdout)
return map(lambda x: x.split(' ')[0], stdout.split('\n')[2:-1])
def ensure_map_gone(scsi_id):
while True:
paths = list_paths(scsi_id)
util.SMlog("list_paths succeeded")
if len(paths) == 0:
return
time.sleep(1)
| xapi-project/sm | drivers/mpath_cli.py | Python | lgpl-2.1 | 3,107 |
#!/usr/bin/python
import os
import sys
extra_opts = {'test_suite': 'tests'}
extra_deps = []
extra_test_deps = []
if sys.version_info[:2] == (2, 6):
extra_deps.append('argparse')
extra_deps.append('simplejson')
extra_test_deps.append('unittest2')
extra_opts['test_suite'] = 'unittest2.collector'
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
try:
with open('README.rst', 'r') as fd:
extra_opts['long_description'] = fd.read()
except IOError:
pass # Install without README.rst
setup(
name='mongo-orchestration',
version='0.4.dev0',
author='MongoDB, Inc.',
author_email='[email protected]',
description='Restful service for managing MongoDB servers',
keywords=['mongo-orchestration', 'mongodb', 'mongo', 'rest', 'testing'],
license="http://www.apache.org/licenses/LICENSE-2.0.html",
platforms=['any'],
url='https://github.com/10gen/mongo-orchestration',
install_requires=['pymongo>=3.0.2',
'bottle>=0.12.7',
'CherryPy>=3.5.0'] + extra_deps,
tests_require=['coverage>=3.5'] + extra_test_deps,
packages=find_packages(exclude=('tests',)),
package_data={
'mongo_orchestration': [
os.path.join('configurations', config_dir, '*.json')
for config_dir in ('servers', 'replica_sets', 'sharded_clusters')
] + [os.path.join('lib', 'client.pem')]
},
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: CPython"
],
entry_points={
'console_scripts': [
'mongo-orchestration = mongo_orchestration.server:main'
]
},
**extra_opts
)
| agilemobiledev/mongo-orchestration | setup.py | Python | apache-2.0 | 2,438 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
def randomize():
return random.randint(-250, 250)
class Pressure(object):
def __init__(self):
pass
def get_value(self):
return randomize()
| robinsonsantos/sensorsimulator | utils/pressure.py | Python | gpl-2.0 | 231 |
#
# Convenience module for fetching files from a network source
#
# Copyright 2006-2007 Red Hat, Inc.
# Daniel P. Berrange <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
import logging
import os
import stat
import subprocess
import urlgrabber.grabber as grabber
import urllib2
import urlparse
import ftplib
import tempfile
# This is a generic base class for fetching/extracting files from
# a media source, such as CD ISO, NFS server, or HTTP/FTP server
class ImageFetcher:
def __init__(self, location, scratchdir):
self.location = location
self.scratchdir = scratchdir
def _make_path(self, filename):
if hasattr(self, "srcdir"):
path = getattr(self, "srcdir")
else:
path = self.location
if filename:
if not path.endswith("/"):
path += "/"
path += filename
return path
def saveTemp(self, fileobj, prefix):
if not os.path.exists(self.scratchdir):
os.makedirs(self.scratchdir, 0750)
(fd, fn) = tempfile.mkstemp(prefix="virtinst-" + prefix,
dir=self.scratchdir)
block_size = 16384
try:
while 1:
buff = fileobj.read(block_size)
if not buff:
break
os.write(fd, buff)
finally:
os.close(fd)
return fn
def prepareLocation(self):
return True
def cleanupLocation(self):
pass
def acquireFile(self, filename, progresscb):
# URLGrabber works for all network and local cases
f = None
try:
path = self._make_path(filename)
base = os.path.basename(filename)
logging.debug("Fetching URI: %s", path)
try:
f = grabber.urlopen(path,
progress_obj=progresscb,
text=_("Retrieving file %s...") % base)
except Exception, e:
raise ValueError(_("Couldn't acquire file %s: %s") %
(path, str(e)))
tmpname = self.saveTemp(f, prefix=base + ".")
logging.debug("Saved file to " + tmpname)
return tmpname
finally:
if f:
f.close()
def hasFile(self, src):
raise NotImplementedError("Must be implemented in subclass")
# Base class for downloading from FTP / HTTP
class URIImageFetcher(ImageFetcher):
def hasFile(self, filename):
raise NotImplementedError
def prepareLocation(self):
if not self.hasFile(""):
raise ValueError(_("Opening URL %s failed.") %
(self.location))
class HTTPImageFetcher(URIImageFetcher):
def hasFile(self, filename):
try:
path = self._make_path(filename)
request = urllib2.Request(path)
request.get_method = lambda: "HEAD"
urllib2.urlopen(request)
except Exception, e:
logging.debug("HTTP hasFile: didn't find %s: %s", path, str(e))
return False
return True
class FTPImageFetcher(URIImageFetcher):
def __init__(self, location, scratchdir):
URIImageFetcher.__init__(self, location, scratchdir)
self.ftp = None
def prepareLocation(self):
url = urlparse.urlparse(self._make_path(""))
self.ftp = ftplib.FTP(url[1])
self.ftp.login()
def hasFile(self, filename):
path = self._make_path(filename)
url = urlparse.urlparse(path)
try:
try:
# If it's a file
self.ftp.size(url[2])
except ftplib.all_errors:
# If it's a dir
self.ftp.cwd(url[2])
except ftplib.all_errors, e:
logging.debug("FTP hasFile: couldn't access %s: %s",
path, str(e))
return False
return True
class LocalImageFetcher(ImageFetcher):
def __init__(self, location, scratchdir, srcdir=None):
ImageFetcher.__init__(self, location, scratchdir)
self.srcdir = srcdir
def hasFile(self, filename):
src = self._make_path(filename)
if os.path.exists(src):
return True
else:
logging.debug("local hasFile: Couldn't find %s", src)
return False
# This is a fetcher capable of extracting files from a NFS server
# or loopback mounted file, or local CDROM device
class MountedImageFetcher(LocalImageFetcher):
def prepareLocation(self):
cmd = None
self.srcdir = tempfile.mkdtemp(prefix="virtinstmnt.",
dir=self.scratchdir)
mountcmd = "/bin/mount"
if os.uname()[0] == "SunOS":
mountcmd = "/usr/sbin/mount"
logging.debug("Preparing mount at " + self.srcdir)
if self.location.startswith("nfs:"):
cmd = [mountcmd, "-o", "ro", self.location[4:], self.srcdir]
else:
if stat.S_ISBLK(os.stat(self.location)[stat.ST_MODE]):
mountopt = "ro"
else:
mountopt = "ro,loop"
if os.uname()[0] == 'SunOS':
cmd = [mountcmd, "-F", "hsfs", "-o",
mountopt, self.location, self.srcdir]
else:
cmd = [mountcmd, "-o", mountopt, self.location, self.srcdir]
ret = subprocess.call(cmd)
if ret != 0:
self.cleanupLocation()
raise ValueError(_("Mounting location '%s' failed") %
(self.location))
return True
def cleanupLocation(self):
logging.debug("Cleaning up mount at " + self.srcdir)
if os.uname()[0] == "SunOS":
cmd = ["/usr/sbin/umount", self.srcdir]
else:
cmd = ["/bin/umount", self.srcdir]
subprocess.call(cmd)
try:
os.rmdir(self.srcdir)
except:
pass
class DirectImageFetcher(LocalImageFetcher):
def prepareLocation(self):
self.srcdir = self.location
| cardoe/virt-manager | virtinst/ImageFetcher.py | Python | gpl-2.0 | 6,859 |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2021, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import torch.nn as nn
class SyncBatchNorm:
"""
This mixin converts the BatchNorm modules to SyncBatchNorm modules when utilizing
distributed training on GPUs.
Example config:
config=dict(
use_sync_batchnorm=True
)
"""
def create_model(self, config, device):
model = super().create_model(config, device)
use_sync_batchnorm = config.get("use_sync_batchnorm", True)
distributed = config.get("distributed", False)
if use_sync_batchnorm and distributed and next(model.parameters()).is_cuda:
# Convert batch norm to sync batch norms
model = nn.modules.SyncBatchNorm.convert_sync_batchnorm(module=model)
return model
@classmethod
def get_execution_order(cls):
eo = super().get_execution_order()
eo["setup_experiment"].insert(0, "Sync Batchnorm begin")
eo["setup_experiment"].append("Sync Batchnorm end")
return eo
| mrcslws/nupic.research | src/nupic/research/frameworks/vernon/mixins/sync_batchnorm.py | Python | agpl-3.0 | 1,943 |
from bettercache.utils import CachingMixin, strip_wsgi
from bettercache.proxy import proxy
import logging
logger = logging.getLogger(__name__)
class BetterView(CachingMixin):
"""Accepts any path and attempts to serve it from the cache. If it cannot
find the response in the cache, it will use ``bettercache.proxy`` to fulfill
it, and cache the response.
"""
def get(self, request):
response = None
#should this bypass this replicates part of the irule
if not self.should_bypass_cache(request):
response, expired = self.get_cache(request)
# send off the celery task if it's expired
if expired:
logger.info("EXPIRED sending task for %s" % request.build_absolute_uri())
self.send_task(request, response)
elif response:
logger.debug("not sending task for %s" % request.build_absolute_uri())
else:
logger.info("MISS for: %s" % request.build_absolute_uri())
# if response is still none we have to proxy
if response is None:
logger.debug('PROXY from: %s' % request.build_absolute_uri())
response = proxy(request)
response['X-Bettercache-Proxy'] = 'true'
else:
response['X-Bettercache-Proxy'] = 'false'
logger.info('HIT for: %s' % request.build_absolute_uri())
return response
#TODO: properly implement a class based view
BV = BetterView()
cache_view = BV.get
| ironfroggy/django-better-cache | bettercache/views.py | Python | mit | 1,519 |
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.middleware.csrf import CsrfViewMiddleware
from django.middleware.csrf import _sanitize_token as sanitize_csrf_token
if DJANGO_VERSION < (1, 10):
from django.middleware.csrf import _get_new_csrf_key as generate_csrf_token
else:
from django.middleware.csrf import _get_new_csrf_token as generate_csrf_token
from yepes.conf import settings
class CsrfTokenMiddleware(CsrfViewMiddleware):
"""
Middleware that ensures that all views have a correct ``csrf_token``
available to ``RequestContext``, but without the CSRF protection that
``CsrfViewMiddleware`` enforces.
Very useful when you need to render forms targeting a view with CSRF
protection.
"""
def process_view(self, request, callback, callback_args, callback_kwargs):
if getattr(request, 'csrf_processing_done', False):
return None
try:
cookie_token = request.COOKIES[settings.CSRF_COOKIE_NAME]
except KeyError:
# Generate token and store it in the request, so it's
# available to the view.
request.META['CSRF_COOKIE'] = generate_csrf_token()
else:
csrf_token = sanitize_csrf_token(cookie_token)
if csrf_token != cookie_token:
# Cookie token needed to be replaced; the cookie
# needs to be reset.
request.csrf_cookie_needs_reset = True
# Use same token next time.
request.META['CSRF_COOKIE'] = csrf_token
return self._accept(request)
| samuelmaudo/yepes | yepes/middleware/csrf_token.py | Python | bsd-3-clause | 1,653 |
import logging
from urllib import urlencode
from requests.exceptions import ConnectionError, Timeout
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from waffle import switch_is_active
from courses.views import CourseTemplateWithNavView
from learner_analytics_api.v0.clients import LearnerAPIClient
logger = logging.getLogger(__name__)
class LearnersView(CourseTemplateWithNavView):
template_name = 'courses/learners.html'
active_primary_nav_item = 'learners'
page_title = _('Learners')
page_name = {
'scope': 'course',
'lens': 'learners',
'report': 'roster',
'depth': ''
}
def get_context_data(self, **kwargs):
context = super(LearnersView, self).get_context_data(**kwargs)
context['js_data']['course'].update({
'learner_list_url': reverse('learner_analytics_api:v0:LearnerList'),
'course_learner_metadata_url': reverse(
'learner_analytics_api:v0:CourseMetadata',
args=(self.course_id,)
),
'learner_engagement_timeline_url': reverse(
'learner_analytics_api:v0:EngagementTimeline',
# Unfortunately, we need to pass a username to the `reverse`
# function. This will get dynamically interpolated with the
# actual users' usernames on the client side.
kwargs={'username': 'temporary_username'}
),
})
# Try to prefetch API responses. If anything fails, the front-end will
# retry the requests and gracefully fail.
client = LearnerAPIClient()
for data_name, request_function, error_message in [
(
'learner_list_json',
lambda: client.learners.get(course_id=self.course_id).json(),
'Failed to reach the Learner List endpoint',
),
(
'course_learner_metadata_json',
lambda: client.course_learner_metadata(self.course_id).get().json(),
'Failed to reach the Course Learner Metadata endpoint',
)
]:
try:
context[data_name] = request_function()
except (Timeout, ConnectionError, ValueError):
# ValueError may be thrown by the call to .json()
logger.exception(error_message)
context[data_name] = error_message
context['js_data']['course'].update({
data_name: context[data_name]
})
# Only show learner download button(s) if switch is enabled
if switch_is_active('enable_learner_download'):
list_download_url = reverse('learner_analytics_api:v0:LearnerListCSV')
# Append the 'fields' parameter if configured
list_fields = getattr(settings, 'LEARNER_API_LIST_DOWNLOAD_FIELDS', None)
if list_fields is not None:
list_download_url = '{}?{}'.format(list_download_url,
urlencode(dict(fields=list_fields)))
context['js_data']['course'].update({
'learner_list_download_url': list_download_url,
})
context['page_data'] = self.get_page_data(context)
return context
| Stanford-Online/edx-analytics-dashboard | analytics_dashboard/courses/views/learners.py | Python | agpl-3.0 | 3,430 |
from pipeline.conf import settings
from pipeline.compressors import SubProcessCompressor
class UglifyJSCompressor(SubProcessCompressor):
def compress_js(self, js):
command = '%s -nc %s' % (settings.PIPELINE_UGLIFYJS_BINARY, settings.PIPELINE_UGLIFYJS_ARGUMENTS)
if self.verbose:
command += ' --verbose'
return self.execute_command(command, js)
| GbalsaC/bitnamiP | venv/src/django-pipeline/pipeline/compressors/uglifyjs.py | Python | agpl-3.0 | 386 |
"""=============================
Subclassing ndarray in python
=============================
Introduction
------------
Subclassing ndarray is relatively simple, but it has some complications
compared to other Python objects. On this page we explain the machinery
that allows you to subclass ndarray, and the implications for
implementing a subclass.
ndarrays and object creation
============================
Subclassing ndarray is complicated by the fact that new instances of
ndarray classes can come about in three different ways. These are:
#. Explicit constructor call - as in ``MySubClass(params)``. This is
the usual route to Python instance creation.
#. View casting - casting an existing ndarray as a given subclass
#. New from template - creating a new instance from a template
instance. Examples include returning slices from a subclassed array,
creating return types from ufuncs, and copying arrays. See
:ref:`new-from-template` for more details
The last two are characteristics of ndarrays - in order to support
things like array slicing. The complications of subclassing ndarray are
due to the mechanisms numpy has to support these latter two routes of
instance creation.
.. _view-casting:
View casting
------------
*View casting* is the standard ndarray mechanism by which you take an
ndarray of any subclass, and return a view of the array as another
(specified) subclass:
>>> import numpy as np
>>> # create a completely useless ndarray subclass
>>> class C(np.ndarray): pass
>>> # create a standard ndarray
>>> arr = np.zeros((3,))
>>> # take a view of it, as our useless subclass
>>> c_arr = arr.view(C)
>>> type(c_arr)
<class 'C'>
.. _new-from-template:
Creating new from template
--------------------------
New instances of an ndarray subclass can also come about by a very
similar mechanism to :ref:`view-casting`, when numpy finds it needs to
create a new instance from a template instance. The most obvious place
this has to happen is when you are taking slices of subclassed arrays.
For example:
>>> v = c_arr[1:]
>>> type(v) # the view is of type 'C'
<class 'C'>
>>> v is c_arr # but it's a new instance
False
The slice is a *view* onto the original ``c_arr`` data. So, when we
take a view from the ndarray, we return a new ndarray, of the same
class, that points to the data in the original.
There are other points in the use of ndarrays where we need such views,
such as copying arrays (``c_arr.copy()``), creating ufunc output arrays
(see also :ref:`array-wrap`), and reducing methods (like
``c_arr.mean()``.
Relationship of view casting and new-from-template
--------------------------------------------------
These paths both use the same machinery. We make the distinction here,
because they result in different input to your methods. Specifically,
:ref:`view-casting` means you have created a new instance of your array
type from any potential subclass of ndarray. :ref:`new-from-template`
means you have created a new instance of your class from a pre-existing
instance, allowing you - for example - to copy across attributes that
are particular to your subclass.
Implications for subclassing
----------------------------
If we subclass ndarray, we need to deal not only with explicit
construction of our array type, but also :ref:`view-casting` or
:ref:`new-from-template`. NumPy has the machinery to do this, and this
machinery that makes subclassing slightly non-standard.
There are two aspects to the machinery that ndarray uses to support
views and new-from-template in subclasses.
The first is the use of the ``ndarray.__new__`` method for the main work
of object initialization, rather then the more usual ``__init__``
method. The second is the use of the ``__array_finalize__`` method to
allow subclasses to clean up after the creation of views and new
instances from templates.
A brief Python primer on ``__new__`` and ``__init__``
=====================================================
``__new__`` is a standard Python method, and, if present, is called
before ``__init__`` when we create a class instance. See the `python
__new__ documentation
<https://docs.python.org/reference/datamodel.html#object.__new__>`_ for more detail.
For example, consider the following Python code:
.. testcode::
class C(object):
def __new__(cls, *args):
print('Cls in __new__:', cls)
print('Args in __new__:', args)
# The `object` type __new__ method takes a single argument.
return object.__new__(cls)
def __init__(self, *args):
print('type(self) in __init__:', type(self))
print('Args in __init__:', args)
meaning that we get:
>>> c = C('hello')
Cls in __new__: <class 'C'>
Args in __new__: ('hello',)
type(self) in __init__: <class 'C'>
Args in __init__: ('hello',)
When we call ``C('hello')``, the ``__new__`` method gets its own class
as first argument, and the passed argument, which is the string
``'hello'``. After python calls ``__new__``, it usually (see below)
calls our ``__init__`` method, with the output of ``__new__`` as the
first argument (now a class instance), and the passed arguments
following.
As you can see, the object can be initialized in the ``__new__``
method or the ``__init__`` method, or both, and in fact ndarray does
not have an ``__init__`` method, because all the initialization is
done in the ``__new__`` method.
Why use ``__new__`` rather than just the usual ``__init__``? Because
in some cases, as for ndarray, we want to be able to return an object
of some other class. Consider the following:
.. testcode::
class D(C):
def __new__(cls, *args):
print('D cls is:', cls)
print('D args in __new__:', args)
return C.__new__(C, *args)
def __init__(self, *args):
# we never get here
print('In D __init__')
meaning that:
>>> obj = D('hello')
D cls is: <class 'D'>
D args in __new__: ('hello',)
Cls in __new__: <class 'C'>
Args in __new__: ('hello',)
>>> type(obj)
<class 'C'>
The definition of ``C`` is the same as before, but for ``D``, the
``__new__`` method returns an instance of class ``C`` rather than
``D``. Note that the ``__init__`` method of ``D`` does not get
called. In general, when the ``__new__`` method returns an object of
class other than the class in which it is defined, the ``__init__``
method of that class is not called.
This is how subclasses of the ndarray class are able to return views
that preserve the class type. When taking a view, the standard
ndarray machinery creates the new ndarray object with something
like::
obj = ndarray.__new__(subtype, shape, ...
where ``subdtype`` is the subclass. Thus the returned view is of the
same class as the subclass, rather than being of class ``ndarray``.
That solves the problem of returning views of the same type, but now
we have a new problem. The machinery of ndarray can set the class
this way, in its standard methods for taking views, but the ndarray
``__new__`` method knows nothing of what we have done in our own
``__new__`` method in order to set attributes, and so on. (Aside -
why not call ``obj = subdtype.__new__(...`` then? Because we may not
have a ``__new__`` method with the same call signature).
The role of ``__array_finalize__``
==================================
``__array_finalize__`` is the mechanism that numpy provides to allow
subclasses to handle the various ways that new instances get created.
Remember that subclass instances can come about in these three ways:
#. explicit constructor call (``obj = MySubClass(params)``). This will
call the usual sequence of ``MySubClass.__new__`` then (if it exists)
``MySubClass.__init__``.
#. :ref:`view-casting`
#. :ref:`new-from-template`
Our ``MySubClass.__new__`` method only gets called in the case of the
explicit constructor call, so we can't rely on ``MySubClass.__new__`` or
``MySubClass.__init__`` to deal with the view casting and
new-from-template. It turns out that ``MySubClass.__array_finalize__``
*does* get called for all three methods of object creation, so this is
where our object creation housekeeping usually goes.
* For the explicit constructor call, our subclass will need to create a
new ndarray instance of its own class. In practice this means that
we, the authors of the code, will need to make a call to
``ndarray.__new__(MySubClass,...)``, a class-hierarchy prepared call to
``super(MySubClass, cls).__new__(cls, ...)``, or do view casting of an
existing array (see below)
* For view casting and new-from-template, the equivalent of
``ndarray.__new__(MySubClass,...`` is called, at the C level.
The arguments that ``__array_finalize__`` receives differ for the three
methods of instance creation above.
The following code allows us to look at the call sequences and arguments:
.. testcode::
import numpy as np
class C(np.ndarray):
def __new__(cls, *args, **kwargs):
print('In __new__ with class %s' % cls)
return super(C, cls).__new__(cls, *args, **kwargs)
def __init__(self, *args, **kwargs):
# in practice you probably will not need or want an __init__
# method for your subclass
print('In __init__ with class %s' % self.__class__)
def __array_finalize__(self, obj):
print('In array_finalize:')
print(' self type is %s' % type(self))
print(' obj type is %s' % type(obj))
Now:
>>> # Explicit constructor
>>> c = C((10,))
In __new__ with class <class 'C'>
In array_finalize:
self type is <class 'C'>
obj type is <type 'NoneType'>
In __init__ with class <class 'C'>
>>> # View casting
>>> a = np.arange(10)
>>> cast_a = a.view(C)
In array_finalize:
self type is <class 'C'>
obj type is <type 'numpy.ndarray'>
>>> # Slicing (example of new-from-template)
>>> cv = c[:1]
In array_finalize:
self type is <class 'C'>
obj type is <class 'C'>
The signature of ``__array_finalize__`` is::
def __array_finalize__(self, obj):
One sees that the ``super`` call, which goes to
``ndarray.__new__``, passes ``__array_finalize__`` the new object, of our
own class (``self``) as well as the object from which the view has been
taken (``obj``). As you can see from the output above, the ``self`` is
always a newly created instance of our subclass, and the type of ``obj``
differs for the three instance creation methods:
* When called from the explicit constructor, ``obj`` is ``None``
* When called from view casting, ``obj`` can be an instance of any
subclass of ndarray, including our own.
* When called in new-from-template, ``obj`` is another instance of our
own subclass, that we might use to update the new ``self`` instance.
Because ``__array_finalize__`` is the only method that always sees new
instances being created, it is the sensible place to fill in instance
defaults for new object attributes, among other tasks.
This may be clearer with an example.
Simple example - adding an extra attribute to ndarray
-----------------------------------------------------
.. testcode::
import numpy as np
class InfoArray(np.ndarray):
def __new__(subtype, shape, dtype=float, buffer=None, offset=0,
strides=None, order=None, info=None):
# Create the ndarray instance of our type, given the usual
# ndarray input arguments. This will call the standard
# ndarray constructor, but return an object of our type.
# It also triggers a call to InfoArray.__array_finalize__
obj = super(InfoArray, subtype).__new__(subtype, shape, dtype,
buffer, offset, strides,
order)
# set the new 'info' attribute to the value passed
obj.info = info
# Finally, we must return the newly created object:
return obj
def __array_finalize__(self, obj):
# ``self`` is a new object resulting from
# ndarray.__new__(InfoArray, ...), therefore it only has
# attributes that the ndarray.__new__ constructor gave it -
# i.e. those of a standard ndarray.
#
# We could have got to the ndarray.__new__ call in 3 ways:
# From an explicit constructor - e.g. InfoArray():
# obj is None
# (we're in the middle of the InfoArray.__new__
# constructor, and self.info will be set when we return to
# InfoArray.__new__)
if obj is None: return
# From view casting - e.g arr.view(InfoArray):
# obj is arr
# (type(obj) can be InfoArray)
# From new-from-template - e.g infoarr[:3]
# type(obj) is InfoArray
#
# Note that it is here, rather than in the __new__ method,
# that we set the default value for 'info', because this
# method sees all creation of default objects - with the
# InfoArray.__new__ constructor, but also with
# arr.view(InfoArray).
self.info = getattr(obj, 'info', None)
# We do not need to return anything
Using the object looks like this:
>>> obj = InfoArray(shape=(3,)) # explicit constructor
>>> type(obj)
<class 'InfoArray'>
>>> obj.info is None
True
>>> obj = InfoArray(shape=(3,), info='information')
>>> obj.info
'information'
>>> v = obj[1:] # new-from-template - here - slicing
>>> type(v)
<class 'InfoArray'>
>>> v.info
'information'
>>> arr = np.arange(10)
>>> cast_arr = arr.view(InfoArray) # view casting
>>> type(cast_arr)
<class 'InfoArray'>
>>> cast_arr.info is None
True
This class isn't very useful, because it has the same constructor as the
bare ndarray object, including passing in buffers and shapes and so on.
We would probably prefer the constructor to be able to take an already
formed ndarray from the usual numpy calls to ``np.array`` and return an
object.
Slightly more realistic example - attribute added to existing array
-------------------------------------------------------------------
Here is a class that takes a standard ndarray that already exists, casts
as our type, and adds an extra attribute.
.. testcode::
import numpy as np
class RealisticInfoArray(np.ndarray):
def __new__(cls, input_array, info=None):
# Input array is an already formed ndarray instance
# We first cast to be our class type
obj = np.asarray(input_array).view(cls)
# add the new attribute to the created instance
obj.info = info
# Finally, we must return the newly created object:
return obj
def __array_finalize__(self, obj):
# see InfoArray.__array_finalize__ for comments
if obj is None: return
self.info = getattr(obj, 'info', None)
So:
>>> arr = np.arange(5)
>>> obj = RealisticInfoArray(arr, info='information')
>>> type(obj)
<class 'RealisticInfoArray'>
>>> obj.info
'information'
>>> v = obj[1:]
>>> type(v)
<class 'RealisticInfoArray'>
>>> v.info
'information'
.. _array-ufunc:
``__array_ufunc__`` for ufuncs
------------------------------
.. versionadded:: 1.13
A subclass can override what happens when executing numpy ufuncs on it by
overriding the default ``ndarray.__array_ufunc__`` method. This method is
executed *instead* of the ufunc and should return either the result of the
operation, or :obj:`NotImplemented` if the operation requested is not
implemented.
The signature of ``__array_ufunc__`` is::
def __array_ufunc__(ufunc, method, *inputs, **kwargs):
- *ufunc* is the ufunc object that was called.
- *method* is a string indicating how the Ufunc was called, either
``"__call__"`` to indicate it was called directly, or one of its
:ref:`methods<ufuncs.methods>`: ``"reduce"``, ``"accumulate"``,
``"reduceat"``, ``"outer"``, or ``"at"``.
- *inputs* is a tuple of the input arguments to the ``ufunc``
- *kwargs* contains any optional or keyword arguments passed to the
function. This includes any ``out`` arguments, which are always
contained in a tuple.
A typical implementation would convert any inputs or outputs that are
instances of one's own class, pass everything on to a superclass using
``super()``, and finally return the results after possible
back-conversion. An example, taken from the test case
``test_ufunc_override_with_super`` in ``core/tests/test_umath.py``, is the
following.
.. testcode::
input numpy as np
class A(np.ndarray):
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs):
args = []
in_no = []
for i, input_ in enumerate(inputs):
if isinstance(input_, A):
in_no.append(i)
args.append(input_.view(np.ndarray))
else:
args.append(input_)
outputs = kwargs.pop('out', None)
out_no = []
if outputs:
out_args = []
for j, output in enumerate(outputs):
if isinstance(output, A):
out_no.append(j)
out_args.append(output.view(np.ndarray))
else:
out_args.append(output)
kwargs['out'] = tuple(out_args)
else:
outputs = (None,) * ufunc.nout
info = {}
if in_no:
info['inputs'] = in_no
if out_no:
info['outputs'] = out_no
results = super(A, self).__array_ufunc__(ufunc, method,
*args, **kwargs)
if results is NotImplemented:
return NotImplemented
if method == 'at':
if isinstance(inputs[0], A):
inputs[0].info = info
return
if ufunc.nout == 1:
results = (results,)
results = tuple((np.asarray(result).view(A)
if output is None else output)
for result, output in zip(results, outputs))
if results and isinstance(results[0], A):
results[0].info = info
return results[0] if len(results) == 1 else results
So, this class does not actually do anything interesting: it just
converts any instances of its own to regular ndarray (otherwise, we'd
get infinite recursion!), and adds an ``info`` dictionary that tells
which inputs and outputs it converted. Hence, e.g.,
>>> a = np.arange(5.).view(A)
>>> b = np.sin(a)
>>> b.info
{'inputs': [0]}
>>> b = np.sin(np.arange(5.), out=(a,))
>>> b.info
{'outputs': [0]}
>>> a = np.arange(5.).view(A)
>>> b = np.ones(1).view(A)
>>> c = a + b
>>> c.info
{'inputs': [0, 1]}
>>> a += b
>>> a.info
{'inputs': [0, 1], 'outputs': [0]}
Note that another approach would be to to use ``getattr(ufunc,
methods)(*inputs, **kwargs)`` instead of the ``super`` call. For this example,
the result would be identical, but there is a difference if another operand
also defines ``__array_ufunc__``. E.g., lets assume that we evalulate
``np.add(a, b)``, where ``b`` is an instance of another class ``B`` that has
an override. If you use ``super`` as in the example,
``ndarray.__array_ufunc__`` will notice that ``b`` has an override, which
means it cannot evaluate the result itself. Thus, it will return
`NotImplemented` and so will our class ``A``. Then, control will be passed
over to ``b``, which either knows how to deal with us and produces a result,
or does not and returns `NotImplemented`, raising a ``TypeError``.
If instead, we replace our ``super`` call with ``getattr(ufunc, method)``, we
effectively do ``np.add(a.view(np.ndarray), b)``. Again, ``B.__array_ufunc__``
will be called, but now it sees an ``ndarray`` as the other argument. Likely,
it will know how to handle this, and return a new instance of the ``B`` class
to us. Our example class is not set up to handle this, but it might well be
the best approach if, e.g., one were to re-implement ``MaskedArray`` using
``__array_ufunc__``.
As a final note: if the ``super`` route is suited to a given class, an
advantage of using it is that it helps in constructing class hierarchies.
E.g., suppose that our other class ``B`` also used the ``super`` in its
``__array_ufunc__`` implementation, and we created a class ``C`` that depended
on both, i.e., ``class C(A, B)`` (with, for simplicity, not another
``__array_ufunc__`` override). Then any ufunc on an instance of ``C`` would
pass on to ``A.__array_ufunc__``, the ``super`` call in ``A`` would go to
``B.__array_ufunc__``, and the ``super`` call in ``B`` would go to
``ndarray.__array_ufunc__``, thus allowing ``A`` and ``B`` to collaborate.
.. _array-wrap:
``__array_wrap__`` for ufuncs and other functions
-------------------------------------------------
Prior to numpy 1.13, the behaviour of ufuncs could only be tuned using
``__array_wrap__`` and ``__array_prepare__``. These two allowed one to
change the output type of a ufunc, but, in contrast to
``__array_ufunc__``, did not allow one to make any changes to the inputs.
It is hoped to eventually deprecate these, but ``__array_wrap__`` is also
used by other numpy functions and methods, such as ``squeeze``, so at the
present time is still needed for full functionality.
Conceptually, ``__array_wrap__`` "wraps up the action" in the sense of
allowing a subclass to set the type of the return value and update
attributes and metadata. Let's show how this works with an example. First
we return to the simpler example subclass, but with a different name and
some print statements:
.. testcode::
import numpy as np
class MySubClass(np.ndarray):
def __new__(cls, input_array, info=None):
obj = np.asarray(input_array).view(cls)
obj.info = info
return obj
def __array_finalize__(self, obj):
print('In __array_finalize__:')
print(' self is %s' % repr(self))
print(' obj is %s' % repr(obj))
if obj is None: return
self.info = getattr(obj, 'info', None)
def __array_wrap__(self, out_arr, context=None):
print('In __array_wrap__:')
print(' self is %s' % repr(self))
print(' arr is %s' % repr(out_arr))
# then just call the parent
return super(MySubClass, self).__array_wrap__(self, out_arr, context)
We run a ufunc on an instance of our new array:
>>> obj = MySubClass(np.arange(5), info='spam')
In __array_finalize__:
self is MySubClass([0, 1, 2, 3, 4])
obj is array([0, 1, 2, 3, 4])
>>> arr2 = np.arange(5)+1
>>> ret = np.add(arr2, obj)
In __array_wrap__:
self is MySubClass([0, 1, 2, 3, 4])
arr is array([1, 3, 5, 7, 9])
In __array_finalize__:
self is MySubClass([1, 3, 5, 7, 9])
obj is MySubClass([0, 1, 2, 3, 4])
>>> ret
MySubClass([1, 3, 5, 7, 9])
>>> ret.info
'spam'
Note that the ufunc (``np.add``) has called the ``__array_wrap__`` method
with arguments ``self`` as ``obj``, and ``out_arr`` as the (ndarray) result
of the addition. In turn, the default ``__array_wrap__``
(``ndarray.__array_wrap__``) has cast the result to class ``MySubClass``,
and called ``__array_finalize__`` - hence the copying of the ``info``
attribute. This has all happened at the C level.
But, we could do anything we wanted:
.. testcode::
class SillySubClass(np.ndarray):
def __array_wrap__(self, arr, context=None):
return 'I lost your data'
>>> arr1 = np.arange(5)
>>> obj = arr1.view(SillySubClass)
>>> arr2 = np.arange(5)
>>> ret = np.multiply(obj, arr2)
>>> ret
'I lost your data'
So, by defining a specific ``__array_wrap__`` method for our subclass,
we can tweak the output from ufuncs. The ``__array_wrap__`` method
requires ``self``, then an argument - which is the result of the ufunc -
and an optional parameter *context*. This parameter is returned by
ufuncs as a 3-element tuple: (name of the ufunc, arguments of the ufunc,
domain of the ufunc), but is not set by other numpy functions. Though,
as seen above, it is possible to do otherwise, ``__array_wrap__`` should
return an instance of its containing class. See the masked array
subclass for an implementation.
In addition to ``__array_wrap__``, which is called on the way out of the
ufunc, there is also an ``__array_prepare__`` method which is called on
the way into the ufunc, after the output arrays are created but before any
computation has been performed. The default implementation does nothing
but pass through the array. ``__array_prepare__`` should not attempt to
access the array data or resize the array, it is intended for setting the
output array type, updating attributes and metadata, and performing any
checks based on the input that may be desired before computation begins.
Like ``__array_wrap__``, ``__array_prepare__`` must return an ndarray or
subclass thereof or raise an error.
Extra gotchas - custom ``__del__`` methods and ndarray.base
-----------------------------------------------------------
One of the problems that ndarray solves is keeping track of memory
ownership of ndarrays and their views. Consider the case where we have
created an ndarray, ``arr`` and have taken a slice with ``v = arr[1:]``.
The two objects are looking at the same memory. NumPy keeps track of
where the data came from for a particular array or view, with the
``base`` attribute:
>>> # A normal ndarray, that owns its own data
>>> arr = np.zeros((4,))
>>> # In this case, base is None
>>> arr.base is None
True
>>> # We take a view
>>> v1 = arr[1:]
>>> # base now points to the array that it derived from
>>> v1.base is arr
True
>>> # Take a view of a view
>>> v2 = v1[1:]
>>> # base points to the view it derived from
>>> v2.base is v1
True
In general, if the array owns its own memory, as for ``arr`` in this
case, then ``arr.base`` will be None - there are some exceptions to this
- see the numpy book for more details.
The ``base`` attribute is useful in being able to tell whether we have
a view or the original array. This in turn can be useful if we need
to know whether or not to do some specific cleanup when the subclassed
array is deleted. For example, we may only want to do the cleanup if
the original array is deleted, but not the views. For an example of
how this can work, have a look at the ``memmap`` class in
``numpy.core``.
Subclassing and Downstream Compatibility
----------------------------------------
When sub-classing ``ndarray`` or creating duck-types that mimic the ``ndarray``
interface, it is your responsibility to decide how aligned your APIs will be
with those of numpy. For convenience, many numpy functions that have a corresponding
``ndarray`` method (e.g., ``sum``, ``mean``, ``take``, ``reshape``) work by checking
if the first argument to a function has a method of the same name. If it exists, the
method is called instead of coercing the arguments to a numpy array.
For example, if you want your sub-class or duck-type to be compatible with
numpy's ``sum`` function, the method signature for this object's ``sum`` method
should be the following:
.. testcode::
def sum(self, axis=None, dtype=None, out=None, keepdims=False):
...
This is the exact same method signature for ``np.sum``, so now if a user calls
``np.sum`` on this object, numpy will call the object's own ``sum`` method and
pass in these arguments enumerated above in the signature, and no errors will
be raised because the signatures are completely compatible with each other.
If, however, you decide to deviate from this signature and do something like this:
.. testcode::
def sum(self, axis=None, dtype=None):
...
This object is no longer compatible with ``np.sum`` because if you call ``np.sum``,
it will pass in unexpected arguments ``out`` and ``keepdims``, causing a TypeError
to be raised.
If you wish to maintain compatibility with numpy and its subsequent versions (which
might add new keyword arguments) but do not want to surface all of numpy's arguments,
your function's signature should accept ``**kwargs``. For example:
.. testcode::
def sum(self, axis=None, dtype=None, **unused_kwargs):
...
This object is now compatible with ``np.sum`` again because any extraneous arguments
(i.e. keywords that are not ``axis`` or ``dtype``) will be hidden away in the
``**unused_kwargs`` parameter.
"""
from __future__ import division, absolute_import, print_function
| MSeifert04/numpy | numpy/doc/subclassing.py | Python | bsd-3-clause | 28,624 |
# -*- coding: utf-8 -*-
"""
Django Extensions additional model fields
"""
import re
import six
import string
import warnings
try:
import uuid
HAS_UUID = True
except ImportError:
HAS_UUID = False
try:
import shortuuid
HAS_SHORT_UUID = True
except ImportError:
HAS_SHORT_UUID = False
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.models import DateTimeField, CharField, SlugField
from django.template.defaultfilters import slugify
from django.utils.crypto import get_random_string
from django.utils.encoding import force_text
MAX_UNIQUE_QUERY_ATTEMPTS = getattr(settings, 'EXTENSIONS_MAX_UNIQUE_QUERY_ATTEMPTS', 100)
class UniqueFieldMixin(object):
def check_is_bool(self, attrname):
if not isinstance(getattr(self, attrname), bool):
raise ValueError("'{}' argument must be True or False".format(attrname))
@staticmethod
def _get_fields(model_cls):
return [
(f, f.model if f.model != model_cls else None) for f in model_cls._meta.get_fields()
if not f.is_relation or f.one_to_one or (f.many_to_one and f.related_model)
]
def get_queryset(self, model_cls, slug_field):
for field, model in self._get_fields(model_cls):
if model and field == slug_field:
return model._default_manager.all()
return model_cls._default_manager.all()
def find_unique(self, model_instance, field, iterator, *args):
# exclude the current model instance from the queryset used in finding
# next valid hash
queryset = self.get_queryset(model_instance.__class__, field)
if model_instance.pk:
queryset = queryset.exclude(pk=model_instance.pk)
# form a kwarg dict used to impliment any unique_together contraints
kwargs = {}
for params in model_instance._meta.unique_together:
if self.attname in params:
for param in params:
kwargs[param] = getattr(model_instance, param, None)
new = six.next(iterator)
kwargs[self.attname] = new
while not new or queryset.filter(**kwargs):
new = six.next(iterator)
kwargs[self.attname] = new
setattr(model_instance, self.attname, new)
return new
class AutoSlugField(UniqueFieldMixin, SlugField):
""" AutoSlugField
By default, sets editable=False, blank=True.
Required arguments:
populate_from
Specifies which field or list of fields the slug is populated from.
Optional arguments:
separator
Defines the used separator (default: '-')
overwrite
If set to True, overwrites the slug on every save (default: False)
Inspired by SmileyChris' Unique Slugify snippet:
http://www.djangosnippets.org/snippets/690/
"""
def __init__(self, *args, **kwargs):
kwargs.setdefault('blank', True)
kwargs.setdefault('editable', False)
populate_from = kwargs.pop('populate_from', None)
if populate_from is None:
raise ValueError("missing 'populate_from' argument")
else:
self._populate_from = populate_from
self.slugify_function = kwargs.pop('slugify_function', slugify)
self.separator = kwargs.pop('separator', six.u('-'))
self.overwrite = kwargs.pop('overwrite', False)
self.check_is_bool('overwrite')
self.allow_duplicates = kwargs.pop('allow_duplicates', False)
self.check_is_bool('allow_duplicates')
self.max_unique_query_attempts = kwargs.pop('max_unique_query_attempts', MAX_UNIQUE_QUERY_ATTEMPTS)
super(AutoSlugField, self).__init__(*args, **kwargs)
def _slug_strip(self, value):
"""
Cleans up a slug by removing slug separator characters that occur at
the beginning or end of a slug.
If an alternate separator is used, it will also replace any instances
of the default '-' separator with the new separator.
"""
re_sep = '(?:-|%s)' % re.escape(self.separator)
value = re.sub('%s+' % re_sep, self.separator, value)
return re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value)
def slugify_func(self, content):
if content:
return self.slugify_function(content)
return ''
def slug_generator(self, original_slug, start):
yield original_slug
for i in range(start, self.max_unique_query_attempts):
slug = original_slug
end = '%s%s' % (self.separator, i)
end_len = len(end)
if self.slug_len and len(slug) + end_len > self.slug_len:
slug = slug[:self.slug_len - end_len]
slug = self._slug_strip(slug)
slug = '%s%s' % (slug, end)
yield slug
raise RuntimeError('max slug attempts for %s exceeded (%s)' % (original_slug, self.max_unique_query_attempts))
def create_slug(self, model_instance, add):
# get fields to populate from and slug field to set
if not isinstance(self._populate_from, (list, tuple)):
self._populate_from = (self._populate_from, )
slug_field = model_instance._meta.get_field(self.attname)
if add or self.overwrite:
# slugify the original field content and set next step to 2
slug_for_field = lambda field: self.slugify_func(getattr(model_instance, field))
slug = self.separator.join(map(slug_for_field, self._populate_from))
start = 2
else:
# get slug from the current model instance
slug = getattr(model_instance, self.attname)
# model_instance is being modified, and overwrite is False,
# so instead of doing anything, just return the current slug
return slug
# strip slug depending on max_length attribute of the slug field
# and clean-up
self.slug_len = slug_field.max_length
if self.slug_len:
slug = slug[:self.slug_len]
slug = self._slug_strip(slug)
original_slug = slug
if self.allow_duplicates:
setattr(model_instance, self.attname, slug)
return slug
return super(AutoSlugField, self).find_unique(
model_instance, slug_field, self.slug_generator(original_slug, start))
def pre_save(self, model_instance, add):
value = force_text(self.create_slug(model_instance, add))
return value
def get_internal_type(self):
return "SlugField"
def deconstruct(self):
name, path, args, kwargs = super(AutoSlugField, self).deconstruct()
kwargs['populate_from'] = self._populate_from
if not self.separator == six.u('-'):
kwargs['separator'] = self.separator
if self.overwrite is not False:
kwargs['overwrite'] = True
if self.allow_duplicates is not False:
kwargs['allow_duplicates'] = True
return name, path, args, kwargs
class RandomCharField(UniqueFieldMixin, CharField):
""" RandomCharField
By default, sets editable=False, blank=True, unique=False.
Required arguments:
length
Specifies the length of the field
Optional arguments:
unique
If set to True, duplicate entries are not allowed (default: False)
lowercase
If set to True, lowercase the alpha characters (default: False)
uppercase
If set to True, uppercase the alpha characters (default: False)
include_alpha
If set to True, include alpha characters (default: True)
include_digits
If set to True, include digit characters (default: True)
include_punctuation
If set to True, include punctuation characters (default: False)
"""
def __init__(self, *args, **kwargs):
kwargs.setdefault('blank', True)
kwargs.setdefault('editable', False)
self.length = kwargs.pop('length', None)
if self.length is None:
raise ValueError("missing 'length' argument")
kwargs['max_length'] = self.length
self.lowercase = kwargs.pop('lowercase', False)
self.check_is_bool('lowercase')
self.uppercase = kwargs.pop('uppercase', False)
self.check_is_bool('uppercase')
if self.uppercase and self.lowercase:
raise ValueError("the 'lowercase' and 'uppercase' arguments are mutually exclusive")
self.include_digits = kwargs.pop('include_digits', True)
self.check_is_bool('include_digits')
self.include_alpha = kwargs.pop('include_alpha', True)
self.check_is_bool('include_alpha')
self.include_punctuation = kwargs.pop('include_punctuation', False)
self.check_is_bool('include_punctuation')
self.max_unique_query_attempts = kwargs.pop('max_unique_query_attempts', MAX_UNIQUE_QUERY_ATTEMPTS)
# Set unique=False unless it's been set manually.
if 'unique' not in kwargs:
kwargs['unique'] = False
super(RandomCharField, self).__init__(*args, **kwargs)
def random_char_generator(self, chars):
for i in range(self.max_unique_query_attempts):
yield ''.join(get_random_string(self.length, chars))
raise RuntimeError('max random character attempts exceeded (%s)' % self.max_unique_query_attempts)
def pre_save(self, model_instance, add):
if not add and getattr(model_instance, self.attname) != '':
return getattr(model_instance, self.attname)
population = ''
if self.include_alpha:
if self.lowercase:
population += string.ascii_lowercase
elif self.uppercase:
population += string.ascii_uppercase
else:
population += string.ascii_letters
if self.include_digits:
population += string.digits
if self.include_punctuation:
population += string.punctuation
random_chars = self.random_char_generator(population)
if not self.unique:
new = six.next(random_chars)
setattr(model_instance, self.attname, new)
return new
return super(RandomCharField, self).find_unique(
model_instance,
model_instance._meta.get_field(self.attname),
random_chars,
)
def internal_type(self):
return "CharField"
def deconstruct(self):
name, path, args, kwargs = super(RandomCharField, self).deconstruct()
kwargs['length'] = self.length
del kwargs['max_length']
if self.lowercase is True:
kwargs['lowercase'] = self.lowercase
if self.uppercase is True:
kwargs['uppercase'] = self.uppercase
if self.include_alpha is False:
kwargs['include_alpha'] = self.include_alpha
if self.include_digits is False:
kwargs['include_digits'] = self.include_digits
if self.include_punctuation is True:
kwargs['include_punctuation'] = self.include_punctuation
if self.unique is True:
kwargs['unique'] = self.unique
return name, path, args, kwargs
class CreationDateTimeField(DateTimeField):
""" CreationDateTimeField
By default, sets editable=False, blank=True, auto_now_add=True
"""
def __init__(self, *args, **kwargs):
kwargs.setdefault('editable', False)
kwargs.setdefault('blank', True)
kwargs.setdefault('auto_now_add', True)
DateTimeField.__init__(self, *args, **kwargs)
def get_internal_type(self):
return "DateTimeField"
def deconstruct(self):
name, path, args, kwargs = super(CreationDateTimeField, self).deconstruct()
if self.editable is not False:
kwargs['editable'] = True
if self.blank is not True:
kwargs['blank'] = False
if self.auto_now_add is not False:
kwargs['auto_now_add'] = True
return name, path, args, kwargs
class ModificationDateTimeField(CreationDateTimeField):
""" ModificationDateTimeField
By default, sets editable=False, blank=True, auto_now=True
Sets value to now every time the object is saved.
"""
def __init__(self, *args, **kwargs):
kwargs.setdefault('auto_now', True)
DateTimeField.__init__(self, *args, **kwargs)
def get_internal_type(self):
return "DateTimeField"
def deconstruct(self):
name, path, args, kwargs = super(ModificationDateTimeField, self).deconstruct()
if self.auto_now is not False:
kwargs['auto_now'] = True
return name, path, args, kwargs
def pre_save(self, model_instance, add):
if not getattr(model_instance, 'update_modified', True):
return model_instance.modified
return super(ModificationDateTimeField, self).pre_save(model_instance, add)
class UUIDVersionError(Exception):
pass
class UUIDField(CharField):
""" UUIDField
By default uses UUID version 4 (randomly generated UUID).
The field support all uuid versions which are natively supported by the uuid python module, except version 2.
For more information see: http://docs.python.org/lib/module-uuid.html
"""
DEFAULT_MAX_LENGTH = 36
def __init__(self, verbose_name=None, name=None, auto=True, version=4, node=None, clock_seq=None, namespace=None, uuid_name=None, *args, **kwargs):
warnings.warn("Django 1.8 features a native UUIDField, this UUIDField will be removed after Django 1.7 becomes unsupported.", DeprecationWarning)
if not HAS_UUID:
raise ImproperlyConfigured("'uuid' module is required for UUIDField. (Do you have Python 2.5 or higher installed ?)")
kwargs.setdefault('max_length', self.DEFAULT_MAX_LENGTH)
if auto:
self.empty_strings_allowed = False
kwargs['blank'] = True
kwargs.setdefault('editable', False)
self.auto = auto
self.version = version
self.node = node
self.clock_seq = clock_seq
self.namespace = namespace
self.uuid_name = uuid_name or name
super(UUIDField, self).__init__(verbose_name=verbose_name, *args, **kwargs)
def create_uuid(self):
if not self.version or self.version == 4:
return uuid.uuid4()
elif self.version == 1:
return uuid.uuid1(self.node, self.clock_seq)
elif self.version == 2:
raise UUIDVersionError("UUID version 2 is not supported.")
elif self.version == 3:
return uuid.uuid3(self.namespace, self.uuid_name)
elif self.version == 5:
return uuid.uuid5(self.namespace, self.uuid_name)
else:
raise UUIDVersionError("UUID version %s is not valid." % self.version)
def pre_save(self, model_instance, add):
value = super(UUIDField, self).pre_save(model_instance, add)
if self.auto and add and value is None:
value = force_text(self.create_uuid())
setattr(model_instance, self.attname, value)
return value
else:
if self.auto and not value:
value = force_text(self.create_uuid())
setattr(model_instance, self.attname, value)
return value
def formfield(self, **kwargs):
if self.auto:
return None
return super(UUIDField, self).formfield(**kwargs)
def deconstruct(self):
name, path, args, kwargs = super(UUIDField, self).deconstruct()
if kwargs.get('max_length', None) == self.DEFAULT_MAX_LENGTH:
del kwargs['max_length']
if self.auto is not True:
kwargs['auto'] = self.auto
if self.version != 4:
kwargs['version'] = self.version
if self.node is not None:
kwargs['node'] = self.node
if self.clock_seq is not None:
kwargs['clock_seq'] = self.clock_seq
if self.namespace is not None:
kwargs['namespace'] = self.namespace
if self.uuid_name is not None:
kwargs['uuid_name'] = self.name
return name, path, args, kwargs
class PostgreSQLUUIDField(UUIDField):
def __init__(self, *args, **kwargs):
warnings.warn("Django 1.8 features a native UUIDField, this UUIDField will be removed after Django 1.7 becomes unsupported.", DeprecationWarning)
super(PostgreSQLUUIDField, self).__init__(*args, **kwargs)
def db_type(self, connection=None):
return "UUID"
def get_db_prep_value(self, value, connection, prepared=False):
if isinstance(value, six.integer_types):
value = uuid.UUID(int=value)
elif isinstance(value, (six.string_types, six.binary_type)):
if len(value) == 16:
value = uuid.UUID(bytes=value)
else:
value = uuid.UUID(value)
return super(PostgreSQLUUIDField, self).get_db_prep_value(
value, connection, prepared=False)
class ShortUUIDField(UUIDField):
""" ShortUUIDFied
Generates concise (22 characters instead of 36), unambiguous, URL-safe UUIDs.
Based on `shortuuid`: https://github.com/stochastic-technologies/shortuuid
"""
DEFAULT_MAX_LENGTH = 22
def __init__(self, *args, **kwargs):
super(ShortUUIDField, self).__init__(*args, **kwargs)
if not HAS_SHORT_UUID:
raise ImproperlyConfigured("'shortuuid' module is required for ShortUUIDField. (Do you have Python 2.5 or higher installed ?)")
kwargs.setdefault('max_length', self.DEFAULT_MAX_LENGTH)
def create_uuid(self):
if not self.version or self.version == 4:
return shortuuid.uuid()
elif self.version == 1:
return shortuuid.uuid()
elif self.version == 2:
raise UUIDVersionError("UUID version 2 is not supported.")
elif self.version == 3:
raise UUIDVersionError("UUID version 3 is not supported.")
elif self.version == 5:
return shortuuid.uuid(name=self.namespace)
else:
raise UUIDVersionError("UUID version %s is not valid." % self.version)
| neilpelow/wmap-django | venv/lib/python3.5/site-packages/django_extensions/db/fields/__init__.py | Python | gpl-3.0 | 18,263 |
#!/usr/bin/env python3
# Safe Eyes is a utility to remind you to take break frequently
# to protect your eyes from eye strain.
# Copyright (C) 2016 Gobinath
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Safe Eyes is a utility to remind you to take break frequently to protect your eyes from eye strain.
"""
import argparse
import gettext
import locale
import logging
import signal
import sys
from threading import Timer
import gi
import psutil
from safeeyes import utility
from safeeyes.model import Config
from safeeyes.safeeyes import SafeEyes
from safeeyes.safeeyes import SAFE_EYES_VERSION
from safeeyes.rpc import RPCClient
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
gettext.install('safeeyes', utility.LOCALE_PATH)
def __running():
"""
Check if SafeEyes is already running.
"""
process_count = 0
for proc in psutil.process_iter():
if not proc.cmdline:
continue
try:
# Check if safeeyes is in process arguments
if callable(proc.cmdline):
# Latest psutil has cmdline function
cmd_line = proc.cmdline()
else:
# In older versions cmdline was a list object
cmd_line = proc.cmdline
if ('python3' in cmd_line[0] or 'python' in cmd_line[0]) and ('safeeyes' in cmd_line[1] or 'safeeyes' in cmd_line):
process_count += 1
if process_count > 1:
return True
# Ignore if process does not exist or does not have command line args
except (IndexError, psutil.NoSuchProcess):
pass
return False
def __evaluate_arguments(args, safe_eyes):
"""
Evaluate the arguments and execute the operations.
"""
if args.about:
utility.execute_main_thread(safe_eyes.show_about)
elif args.disable:
utility.execute_main_thread(safe_eyes.disable_safeeyes)
elif args.enable:
utility.execute_main_thread(safe_eyes.enable_safeeyes)
elif args.settings:
utility.execute_main_thread(safe_eyes.show_settings)
elif args.take_break:
utility.execute_main_thread(safe_eyes.take_break)
def main():
"""
Start the Safe Eyes.
"""
system_locale = gettext.translation('safeeyes', localedir=utility.LOCALE_PATH, languages=[utility.system_locale(), 'en_US'], fallback=True)
system_locale.install()
# locale.bindtextdomain is required for Glade files
# gettext.bindtextdomain(gettext.textdomain(), Utility.LOCALE_PATH)
locale.bindtextdomain('safeeyes', utility.LOCALE_PATH)
parser = argparse.ArgumentParser(prog='safeeyes', description=_('description'))
group = parser.add_mutually_exclusive_group()
group.add_argument('-a', '--about', help=_('show the about dialog'), action='store_true')
group.add_argument('-d', '--disable', help=_('disable the currently running safeeyes instance'), action='store_true')
group.add_argument('-e', '--enable', help=_('enable the currently running safeeyes instance'), action='store_true')
group.add_argument('-q', '--quit', help=_('quit the running safeeyes instance and exit'), action='store_true')
group.add_argument('-s', '--settings', help=_('show the settings dialog'), action='store_true')
group.add_argument('-t', '--take-break', help=_('Take a break now').lower(), action='store_true')
parser.add_argument('--debug', help=_('start safeeyes in debug mode'), action='store_true')
parser.add_argument('--status', help=_('print the status of running safeeyes instance and exit'), action='store_true')
parser.add_argument('--version', action='version', version='%(prog)s ' + SAFE_EYES_VERSION)
args = parser.parse_args()
# Initialize the logging
utility.intialize_logging(args.debug)
utility.initialize_platform()
config = Config()
if __running():
logging.info("Safe Eyes is already running")
if not config.get("use_rpc_server", True):
# RPC sever is disabled
print(_('Safe Eyes is running without an RPC server. Turn it on to use command-line arguments.'))
sys.exit(0)
return
rpc_client = RPCClient(config.get('rpc_port'))
if args.about:
rpc_client.show_about()
elif args.disable:
rpc_client.disable_safeeyes()
elif args.enable:
rpc_client.enable_safeeyes()
elif args.settings:
rpc_client.show_settings()
elif args.take_break:
rpc_client.take_break()
elif args.status:
print(rpc_client.status())
elif args.quit:
rpc_client.quit()
else:
# Default behavior is opening settings
rpc_client.show_settings()
sys.exit(0)
else:
if args.status:
print(_('Safe Eyes is not running'))
sys.exit(0)
elif not args.quit:
logging.info("Starting Safe Eyes")
safe_eyes = SafeEyes(system_locale, config)
safe_eyes.start()
Timer(1.0, lambda: __evaluate_arguments(args, safe_eyes)).start()
Gtk.main()
if __name__ == '__main__':
signal.signal(signal.SIGINT, signal.SIG_DFL) # Handle Ctrl + C
main()
| slgobinath/SafeEyes | safeeyes/__main__.py | Python | gpl-3.0 | 5,874 |
from __future__ import division, print_function, absolute_import
from numpy.testing import dec, assert_, assert_array_equal
try:
import matplotlib
matplotlib.rcParams['backend'] = 'Agg'
import matplotlib.pyplot as plt
from matplotlib.collections import LineCollection
has_matplotlib = True
except:
has_matplotlib = False
from scipy.spatial import \
delaunay_plot_2d, voronoi_plot_2d, convex_hull_plot_2d, \
Delaunay, Voronoi, ConvexHull
class TestPlotting:
points = [(0,0), (0,1), (1,0), (1,1)]
@dec.skipif(not has_matplotlib, "Matplotlib not available")
def test_delaunay(self):
# Smoke test
fig = plt.figure()
obj = Delaunay(self.points)
s_before = obj.simplices.copy()
r = delaunay_plot_2d(obj, ax=fig.gca())
assert_array_equal(obj.simplices, s_before) # shouldn't modify
assert_(r is fig)
delaunay_plot_2d(obj, ax=fig.gca())
@dec.skipif(not has_matplotlib, "Matplotlib not available")
def test_voronoi(self):
# Smoke test
fig = plt.figure()
obj = Voronoi(self.points)
r = voronoi_plot_2d(obj, ax=fig.gca())
assert_(r is fig)
voronoi_plot_2d(obj)
voronoi_plot_2d(obj, show_vertices=False)
@dec.skipif(not has_matplotlib, "Matplotlib not available")
def test_convex_hull(self):
# Smoke test
fig = plt.figure()
tri = ConvexHull(self.points)
r = convex_hull_plot_2d(tri, ax=fig.gca())
assert_(r is fig)
convex_hull_plot_2d(tri)
| jlcarmic/producthunt_simulator | venv/lib/python2.7/site-packages/scipy/spatial/tests/test__plotutils.py | Python | mit | 1,567 |
# Copyright (c) 2008 David Aguilar
# Copyright (c) 2015 Daniel Harding
"""Provides an filesystem monitoring for Linux (via inotify) and for Windows
(via pywin32 and the ReadDirectoryChanges function)"""
from __future__ import division, absolute_import, unicode_literals
import errno
import os
import os.path
import select
from threading import Lock
from . import utils
from . import version
from .decorators import memoize
AVAILABLE = None
if utils.is_win32():
try:
import pywintypes
import win32con
import win32event
import win32file
except ImportError:
pass
else:
AVAILABLE = 'pywin32'
elif utils.is_linux():
try:
from . import inotify
except ImportError:
pass
else:
AVAILABLE = 'inotify'
from qtpy import QtCore
from qtpy.QtCore import Signal
from . import core
from . import gitcfg
from . import gitcmds
from .compat import bchr
from .git import git
from .i18n import N_
from .interaction import Interaction
class _Monitor(QtCore.QObject):
files_changed = Signal()
def __init__(self, thread_class):
QtCore.QObject.__init__(self)
self._thread_class = thread_class
self._thread = None
def start(self):
if self._thread_class is not None:
assert self._thread is None
self._thread = self._thread_class(self)
self._thread.start()
def stop(self):
if self._thread_class is not None:
assert self._thread is not None
self._thread.stop()
self._thread.wait()
self._thread = None
def refresh(self):
if self._thread is not None:
self._thread.refresh()
class _BaseThread(QtCore.QThread):
#: The delay, in milliseconds, between detecting file system modification
#: and triggering the 'files_changed' signal, to coalesce multiple
#: modifications into a single signal.
_NOTIFICATION_DELAY = 888
def __init__(self, monitor):
QtCore.QThread.__init__(self)
self._monitor = monitor
self._running = True
self._use_check_ignore = version.check('check-ignore',
version.git_version())
self._force_notify = False
self._file_paths = set()
@property
def _pending(self):
return self._force_notify or self._file_paths
def refresh(self):
"""Do any housekeeping necessary in response to repository changes."""
pass
def notify(self):
"""Notifies all observers"""
do_notify = False
if self._force_notify:
do_notify = True
elif self._file_paths:
proc = core.start_command(['git', 'check-ignore', '--verbose',
'--non-matching', '-z', '--stdin'])
path_list = bchr(0).join(core.encode(path)
for path in self._file_paths)
out, err = proc.communicate(path_list)
if proc.returncode:
do_notify = True
else:
# Each output record is four fields separated by NULL
# characters (records are also separated by NULL characters):
# <source> <NULL> <linenum> <NULL> <pattern> <NULL> <pathname>
# For paths which are not ignored, all fields will be empty
# except for <pathname>. So to see if we have any non-ignored
# files, we simply check every fourth field to see if any of
# them are empty.
source_fields = out.split(bchr(0))[0:-1:4]
do_notify = not all(source_fields)
self._force_notify = False
self._file_paths = set()
if do_notify:
self._monitor.files_changed.emit()
@staticmethod
def _log_enabled_message():
msg = N_('File system change monitoring: enabled.\n')
Interaction.safe_log(msg)
if AVAILABLE == 'inotify':
class _InotifyThread(_BaseThread):
_TRIGGER_MASK = (
inotify.IN_ATTRIB |
inotify.IN_CLOSE_WRITE |
inotify.IN_CREATE |
inotify.IN_DELETE |
inotify.IN_MODIFY |
inotify.IN_MOVED_FROM |
inotify.IN_MOVED_TO
)
_ADD_MASK = (
_TRIGGER_MASK |
inotify.IN_EXCL_UNLINK |
inotify.IN_ONLYDIR
)
def __init__(self, monitor):
_BaseThread.__init__(self, monitor)
worktree = git.worktree()
if worktree is not None:
worktree = core.abspath(worktree)
self._worktree = worktree
self._git_dir = git.git_path()
self._lock = Lock()
self._inotify_fd = None
self._pipe_r = None
self._pipe_w = None
self._worktree_wd_to_path_map = {}
self._worktree_path_to_wd_map = {}
self._git_dir_wd_to_path_map = {}
self._git_dir_path_to_wd_map = {}
self._git_dir_wd = None
@staticmethod
def _log_out_of_wds_message():
msg = N_('File system change monitoring: disabled because the'
' limit on the total number of inotify watches was'
' reached. You may be able to increase the limit on'
' the number of watches by running:\n'
'\n'
' echo fs.inotify.max_user_watches=100000 |'
' sudo tee -a /etc/sysctl.conf &&'
' sudo sysctl -p\n')
Interaction.safe_log(msg)
def run(self):
try:
with self._lock:
self._inotify_fd = inotify.init()
self._pipe_r, self._pipe_w = os.pipe()
poll_obj = select.poll()
poll_obj.register(self._inotify_fd, select.POLLIN)
poll_obj.register(self._pipe_r, select.POLLIN)
self.refresh()
self._log_enabled_message()
while self._running:
if self._pending:
timeout = self._NOTIFICATION_DELAY
else:
timeout = None
try:
events = poll_obj.poll(timeout)
except OSError as e:
if e.errno == errno.EINTR:
continue
else:
raise
except select.error:
continue
else:
if not self._running:
break
elif not events:
self.notify()
else:
for fd, event in events:
if fd == self._inotify_fd:
self._handle_events()
finally:
with self._lock:
if self._inotify_fd is not None:
os.close(self._inotify_fd)
self._inotify_fd = None
if self._pipe_r is not None:
os.close(self._pipe_r)
self._pipe_r = None
os.close(self._pipe_w)
self._pipe_w = None
def refresh(self):
with self._lock:
if self._inotify_fd is None:
return
try:
if self._worktree is not None:
tracked_dirs = set(
os.path.dirname(os.path.join(self._worktree,
path))
for path in gitcmds.tracked_files())
self._refresh_watches(tracked_dirs,
self._worktree_wd_to_path_map,
self._worktree_path_to_wd_map)
git_dirs = set()
git_dirs.add(self._git_dir)
for dirpath, dirnames, filenames in core.walk(
os.path.join(self._git_dir, 'refs')):
git_dirs.add(dirpath)
self._refresh_watches(git_dirs,
self._git_dir_wd_to_path_map,
self._git_dir_path_to_wd_map)
self._git_dir_wd = \
self._git_dir_path_to_wd_map[self._git_dir]
except OSError as e:
if e.errno == errno.ENOSPC:
self._log_out_of_wds_message()
self._running = False
else:
raise
def _refresh_watches(self, paths_to_watch, wd_to_path_map,
path_to_wd_map):
watched_paths = set(path_to_wd_map)
for path in watched_paths - paths_to_watch:
wd = path_to_wd_map.pop(path)
wd_to_path_set.pop(wd)
try:
inotify.rm_watch(self._inotify_fd, wd)
except OSError as e:
if e.errno == errno.EINVAL:
# This error can occur if the target of the wd was
# removed on the filesystem before we call
# inotify.rm_watch() so ignore it.
pass
else:
raise
for path in paths_to_watch - watched_paths:
try:
wd = inotify.add_watch(self._inotify_fd, core.encode(path),
self._ADD_MASK)
except OSError as e:
if e.errno in (errno.ENOENT, errno.ENOTDIR):
# These two errors should only occur as a result of
# race conditions: the first if the directory
# referenced by path was removed or renamed before the
# call to inotify.add_watch(); the second if the
# directory referenced by path was replaced with a file
# before the call to inotify.add_watch(). Therefore we
# simply ignore them.
pass
else:
raise
else:
wd_to_path_map[wd] = path
path_to_wd_map[path] = wd
def _check_event(self, wd, mask, name):
if mask & inotify.IN_Q_OVERFLOW:
self._force_notify = True
elif not mask & self._TRIGGER_MASK:
pass
elif mask & inotify.IN_ISDIR:
pass
elif wd in self._worktree_wd_to_path_map:
if self._use_check_ignore:
self._file_paths.add(
os.path.join(self._worktree_wd_to_path_map[wd],
core.decode(name)))
else:
self._force_notify = True
elif wd == self._git_dir_wd:
name = core.decode(name)
if name == 'HEAD' or name == 'index':
self._force_notify = True
elif (wd in self._git_dir_wd_to_path_map
and not core.decode(name).endswith('.lock')):
self._force_notify = True
def _handle_events(self):
for wd, mask, cookie, name in \
inotify.read_events(self._inotify_fd):
if not self._force_notify:
self._check_event(wd, mask, name)
def stop(self):
self._running = False
with self._lock:
if self._pipe_w is not None:
os.write(self._pipe_w, bchr(0))
self.wait()
if AVAILABLE == 'pywin32':
class _Win32Watch(object):
def __init__(self, path, flags):
self.flags = flags
self.handle = None
self.event = None
try:
self.handle = win32file.CreateFileW(
path,
0x0001, # FILE_LIST_DIRECTORY
win32con.FILE_SHARE_READ | win32con.FILE_SHARE_WRITE,
None,
win32con.OPEN_EXISTING,
win32con.FILE_FLAG_BACKUP_SEMANTICS |
win32con.FILE_FLAG_OVERLAPPED,
None)
self.buffer = win32file.AllocateReadBuffer(8192)
self.event = win32event.CreateEvent(None, True, False, None)
self.overlapped = pywintypes.OVERLAPPED()
self.overlapped.hEvent = self.event
self._start()
except:
self.close()
raise
def _start(self):
win32file.ReadDirectoryChangesW(self.handle, self.buffer, True,
self.flags, self.overlapped)
def read(self):
if win32event.WaitForSingleObject(self.event, 0) \
== win32event.WAIT_TIMEOUT:
result = []
else:
nbytes = win32file.GetOverlappedResult(self.handle,
self.overlapped, False)
result = win32file.FILE_NOTIFY_INFORMATION(self.buffer, nbytes)
self._start()
return result
def close(self):
if self.handle is not None:
win32file.CancelIo(self.handle)
win32file.CloseHandle(self.handle)
if self.event is not None:
win32file.CloseHandle(self.event)
class _Win32Thread(_BaseThread):
_FLAGS = (win32con.FILE_NOTIFY_CHANGE_FILE_NAME |
win32con.FILE_NOTIFY_CHANGE_DIR_NAME |
win32con.FILE_NOTIFY_CHANGE_ATTRIBUTES |
win32con.FILE_NOTIFY_CHANGE_SIZE |
win32con.FILE_NOTIFY_CHANGE_LAST_WRITE |
win32con.FILE_NOTIFY_CHANGE_SECURITY)
def __init__(self, monitor):
_BaseThread.__init__(self, monitor)
worktree = git.worktree()
if worktree is not None:
worktree = self._transform_path(core.abspath(worktree))
self._worktree = worktree
self._worktree_watch = None
self._git_dir = self._transform_path(core.abspath(git.git_path()))
self._git_dir_watch = None
self._stop_event_lock = Lock()
self._stop_event = None
@staticmethod
def _transform_path(path):
return path.replace('\\', '/').lower()
def _read_watch(self, watch):
if win32event.WaitForSingleObject(watch.event, 0) \
== win32event.WAIT_TIMEOUT:
nbytes = 0
else:
nbytes = win32file.GetOverlappedResult(watch.handle,
watch.overlapped, False)
return win32file.FILE_NOTIFY_INFORMATION(watch.buffer, nbytes)
def run(self):
try:
with self._stop_event_lock:
self._stop_event = win32event.CreateEvent(None, True,
False, None)
events = [self._stop_event]
if self._worktree is not None:
self._worktree_watch = _Win32Watch(self._worktree,
self._FLAGS)
events.append(self._worktree_watch.event)
self._git_dir_watch = _Win32Watch(self._git_dir, self._FLAGS)
events.append(self._git_dir_watch.event)
self._log_enabled_message()
while self._running:
if self._pending:
timeout = self._NOTIFICATION_DELAY
else:
timeout = win32event.INFINITE
rc = win32event.WaitForMultipleObjects(events, False,
timeout)
if not self._running:
break
elif rc == win32event.WAIT_TIMEOUT:
self.notify()
else:
self._handle_results()
finally:
with self._stop_event_lock:
if self._stop_event is not None:
win32file.CloseHandle(self._stop_event)
self._stop_event = None
if self._worktree_watch is not None:
self._worktree_watch.close()
if self._git_dir_watch is not None:
self._git_dir_watch.close()
def _handle_results(self):
if self._worktree_watch is not None:
for action, path in self._worktree_watch.read():
if not self._running:
break
if self._force_notify:
continue
path = self._worktree + '/' + self._transform_path(path)
if (path != self._git_dir
and not path.startswith(self._git_dir + '/')
and not os.path.isdir(path)
):
if self._use_check_ignore:
self._file_paths.add(path)
else:
self._force_notify = True
for action, path in self._git_dir_watch.read():
if not self._running:
break
if self._force_notify:
continue
path = self._transform_path(path)
if path.endswith('.lock'):
continue
if (path == 'head'
or path == 'index'
or path.startswith('refs/')
):
self._force_notify = True
def stop(self):
self._running = False
with self._stop_event_lock:
if self._stop_event is not None:
win32event.SetEvent(self._stop_event)
self.wait()
@memoize
def current():
return _create_instance()
def _create_instance():
thread_class = None
cfg = gitcfg.current()
if not cfg.get('cola.inotify', True):
msg = N_('File system change monitoring: disabled because'
' "cola.inotify" is false.\n')
Interaction.log(msg)
elif AVAILABLE == 'inotify':
thread_class = _InotifyThread
elif AVAILABLE == 'pywin32':
thread_class = _Win32Thread
else:
if utils.is_win32():
msg = N_('File system change monitoring: disabled because pywin32'
' is not installed.\n')
Interaction.log(msg)
elif utils.is_linux():
msg = N_('File system change monitoring: disabled because libc'
' does not support the inotify system calls.\n')
Interaction.log(msg)
return _Monitor(thread_class)
| dirtycold/git-cola | cola/fsmonitor.py | Python | gpl-2.0 | 19,637 |
#!/usr/bin/python
import matplotlib.pyplot as plt, numpy as np, subprocess as sp, argparse
parser=argparse.ArgumentParser()
parser.add_argument('--option_tag',nargs='*')
parser.add_argument('--path',nargs='*')
args=parser.parse_args()
option_tag=''.join(args.option_tag)
path_current=''.join(args.path)
folder = path_current+"Results%s/sensitivity_stuff/"%option_tag
p1 = sp.Popen(['ls','%s'%folder],stdout=sp.PIPE)
input_files = p1.communicate()[0].strip().split("\n")
#print input_files
max_UR = 10
out = open(folder.split("sensitivity_stuff")[0]+"TPM_stats_90_maxUR_%i.txt"%(max_UR),"w")
out.write("individual_ID\tUR\tTPM\t%_sensitivity\n")
TPM_UR = {}
small = 100.0
for x in range(0,len(input_files),1):
target_UR = 0
flag_UR = False
flag_TPM = False
# print input_files[x]
ind = input_files[x].split("_")[0]
fh = open(folder+input_files[x],"r")
data = fh.readlines()
fh.close()
UR_list = data[0].strip().split("\t")
del UR_list[0]
del data[0]
TPM_UR[ind] = np.random.rand(len(data),len(UR_list))
for row in range(0,len(data),1):
data[row]=data[row].strip().split("\t")
del data[row][0]
for col in range(0,len(data[row]),1):
TPM_UR[ind][row,col]=data[row][col]
if float(data[row][col]) < small:
small = float(data[row][col])
if ind != "overall" and ind != "average":
if not flag_UR and col < max_UR+1 and float(data[row][col]) < 90:
flag_UR = True
if col != max_UR and row > 0:
target_UR = max_UR-1
else:
target_UR = col-1
if ind != "overall" and ind != "average" and not flag_TPM and flag_UR and float(data[row][target_UR]) < 90:
flag_TPM = True
out.write(ind+"\t%i\t%i\t%s\n"%(target_UR+1,row,data[row-1][target_UR]))
out.close()
#print small
sorted_ID = sorted(TPM_UR.keys())
avg_ov = sorted_ID[-2:]
del sorted_ID[-2:]
#print avg_ov
#print sorted_ID
two_heatmap = [sorted_ID,avg_ov]
for loop in range(0,len(two_heatmap),1):
count = 0
fig = plt.figure()
for each in two_heatmap[loop]:
count += 1
if loop == 0:
ax1 = fig.add_subplot(7,3,count)
size = ['%.2f',1.00]
else:
ax1 = fig.add_subplot(4,2,count)
size = ['%.0f',1.02]
heatmap = plt.pcolor(TPM_UR[each],vmin=int(small),vmax=100)
for y in range(TPM_UR[each].shape[0]):
for x in range(TPM_UR[each].shape[1]):
plt.text(x + 0.5, y + 0.5, size[0] % TPM_UR[each][y,x],
horizontalalignment='center',
verticalalignment='center',
)
plt.colorbar(heatmap)
plt.xlim(0,TPM_UR[each].shape[1])
plt.ylim(0,TPM_UR[each].shape[0])
ax1.set_title('%s'%each,fontsize=45,y=size[1])
ax1.set_ylabel('TPM',fontsize = 30)
ax1.set_xlabel('UR',fontsize = 30)
if loop == 0:
name = "individual"
fig.set_size_inches(75, 75)
else:
name = "average_overall"
fig.set_size_inches(25, 30)
fig.savefig(folder.split("sensitivity_stuff")[0]+'heatmap_sensitivity_%s_analysis.pdf'%(name))
plt.close(fig)
| JXing-Lab/ME-SCAN-SVA | modules/JW_heatmap.py | Python | gpl-3.0 | 2,930 |
# -*- coding: utf-8 -*-
#################################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012 Julius Network Solutions SARL <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#################################################################################
{
"name" : "Stock tracking add packs",
"version" : "1.0",
"author" : "Julius Network Solutions,Odoo Community Association (OCA)",
"description" : """Presentation:
This module add a wizard to fill in packaging.
This wizard is used to add or remove an object from a package.
Adding to the historical movements and parent objects
""",
"website" : "http://www.julius.fr",
"depends" : [
"stock_tracking_extended",
"stock_tracking_child",
"stock_tracking_add_remove",
],
"category" : "Warehouse Management",
"images" : [],
"demo" : [],
"data" : [
"wizard/add_pack_view.xml",
"wizard/remove_pack_view.xml",
"data/type.xml",
'stock_view.xml',
],
'test': [],
'installable': False,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| xpansa/stock-logistics-tracking | stock_tracking_add_remove_pack/__openerp__.py | Python | agpl-3.0 | 1,858 |
from aospy import Run
am2_control = Run(
name='am2_control',
description=(
'Preindustrial control simulation.'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31'),
idealized=False
)
am2_tropics = Run(
name='am2_tropics',
description=(
'Anthropogenic sulfate aerosol forcing only in the'
' Northern Hemisphere tropics (EQ to 30N)'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie2_tropical_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31'),
idealized=False
)
am2_extratropics = Run(
name='am2_extratropics',
description=(
'Anthropogenic sulfate aerosol forcing only in the'
' Northern Hemisphere extratropics (30N to Pole)'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie2_extropical_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31'),
idealized=False
)
am2_tropics_and_extratropics = Run(
name='am2_tropics+extratropics',
description=(
'Anthropogenic sulfate aerosol forcing everywhere'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie2_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31'),
idealized=False
)
# REYOI Runs - First year is 1982; we throw that out as spinup;
# start analysis in 1983.
am2_HadISST_control = Run(
name='am2_HadISST_control',
description=(
'1981-2000 HadISST climatological annual cycle of SSTs and sea '
'ice repeated annually, with PD atmospheric composition.'
),
data_in_direc=('/archive/yim/siena_201203/m45_am2p14_1990/'
'gfdl.ncrc2-intel-prod/pp'),
data_in_dur=16,
data_in_start_date='1983-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_control = Run(
name='am2_reyoi_control',
tags=['reyoi', 'cont'],
description='PI atmos and Reynolds OI climatological SSTs',
data_in_direc=('/archive/Spencer.Hill/am2/am2clim_reyoi/'
'gfdl.ncrc2-default-prod/pp'),
data_in_dur=1,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_extratropics_full = Run(
name='am2_reyoi_extratropics_full',
description=(
'Full SST anomaly pattern applied to REYOI fixed SST climatology.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_extratropics_full/'
'gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_extratropics_sp = Run(
name='am2_reyoi_extratropics_sp',
description=(
'Spatial Pattern SST anomaly pattern applied to'
' REYOI fixed SST climatology.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_extratropics_sp/gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_tropics_sp_SI = Run(
name='am2_reyoi_tropics_sp_SI',
description=(
'Spatial Pattern SST anomaly pattern applied to REYOI fixed SST'
' climatology.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_tropics_sp_SI/gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_tropics_full = Run(
name='am2_reyoi_tropics_full',
description=(
'Full SST anomaly pattern applied to REYOI fixed SST climatology.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_tropics_full/gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_extratropics_sp_SI = Run(
name='am2_reyoi_extratropics_sp_SI',
description=(
'Spatial Pattern SST anomaly pattern applied to REYOI fixed'
' SST climatology. Fixed sea-ice.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_extratropics_sp_SI/'
'gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_extratropics_u = Run(
name='am2_reyoi_extratropics_u',
description=(
'Uniform SST anomaly pattern applied to REYOI fixed SST climatology.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_extratropics_u/gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_tropics_u = Run(
name='am2_reyoi_tropics_u',
description=(
'Uniform SST anomaly pattern applied to REYOI fixed SST climatology.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_tropics_u/gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
| spencerkclark/aospy-obj-lib | aospy_user/runs/cases.py | Python | gpl-3.0 | 6,191 |
# -*- coding: utf-8 -*-
###############################################################################
#
# RunInstances
# Launches the specified number of instances of an AMI for which you have permissions.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class RunInstances(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the RunInstances Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(RunInstances, self).__init__(temboo_session, '/Library/Amazon/EC2/RunInstances')
def new_input_set(self):
return RunInstancesInputSet()
def _make_result_set(self, result, path):
return RunInstancesResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return RunInstancesChoreographyExecution(session, exec_id, path)
class RunInstancesInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the RunInstances
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AWSAccessKeyId(self, value):
"""
Set the value of the AWSAccessKeyId input for this Choreo. ((required, string) The Access Key ID provided by Amazon Web Services.)
"""
super(RunInstancesInputSet, self)._set_input('AWSAccessKeyId', value)
def set_AWSSecretKeyId(self, value):
"""
Set the value of the AWSSecretKeyId input for this Choreo. ((required, string) The Secret Key ID provided by Amazon Web Services.)
"""
super(RunInstancesInputSet, self)._set_input('AWSSecretKeyId', value)
def set_DeleteOnTermination(self, value):
"""
Set the value of the DeleteOnTermination input for this Choreo. ((optional, boolean) Sets whether the volume is deleted on instance termination. Defaults to "true". This is a Block Device Mapping parameter.)
"""
super(RunInstancesInputSet, self)._set_input('DeleteOnTermination', value)
def set_DeviceName(self, value):
"""
Set the value of the DeviceName input for this Choreo. ((optional, string) The device name exposed to the instance (i.e. /dev/sdh or xvdh). This is a Block Device Mapping parameter.)
"""
super(RunInstancesInputSet, self)._set_input('DeviceName', value)
def set_ImageId(self, value):
"""
Set the value of the ImageId input for this Choreo. ((required, string) The ID of the AMI.)
"""
super(RunInstancesInputSet, self)._set_input('ImageId', value)
def set_InstanceType(self, value):
"""
Set the value of the InstanceType input for this Choreo. ((optional, string) The instance type (i.e. t1.micro, m1.small, m1.medium, m1.large, m1.xlarge). Default is m1.small.)
"""
super(RunInstancesInputSet, self)._set_input('InstanceType', value)
def set_Iops(self, value):
"""
Set the value of the Iops input for this Choreo. ((optional, integer) The number of I/O operations per second (IOPS) that the volume supports. Valid range is 100 to 2000. This is a Block Device Mapping parameter.)
"""
super(RunInstancesInputSet, self)._set_input('Iops', value)
def set_KernelId(self, value):
"""
Set the value of the KernelId input for this Choreo. ((optional, string) The ID of the kernel with which to launch the instance.)
"""
super(RunInstancesInputSet, self)._set_input('KernelId', value)
def set_KeyName(self, value):
"""
Set the value of the KeyName input for this Choreo. ((optional, string) The name of the key pair to use.)
"""
super(RunInstancesInputSet, self)._set_input('KeyName', value)
def set_MaxCount(self, value):
"""
Set the value of the MaxCount input for this Choreo. ((required, integer) The maximum number of instances to launch. If the value is more than Amazon EC2 can launch, the largest possible number above MinCount will be launched instead.)
"""
super(RunInstancesInputSet, self)._set_input('MaxCount', value)
def set_MinCount(self, value):
"""
Set the value of the MinCount input for this Choreo. ((required, integer) The minimum number of instances to launch. If the value is more than Amazon EC2 can launch, no instances are launched at all.)
"""
super(RunInstancesInputSet, self)._set_input('MinCount', value)
def set_MonitoringEnabled(self, value):
"""
Set the value of the MonitoringEnabled input for this Choreo. ((optional, boolean) Enables monitoring for the instance. Defaults to false.)
"""
super(RunInstancesInputSet, self)._set_input('MonitoringEnabled', value)
def set_NoDevice(self, value):
"""
Set the value of the NoDevice input for this Choreo. ((optional, boolean) Suppresses a device mapping. This is a Block Device Mapping parameter.)
"""
super(RunInstancesInputSet, self)._set_input('NoDevice', value)
def set_PlacementAvailabilityZone(self, value):
"""
Set the value of the PlacementAvailabilityZone input for this Choreo. ((optional, string) The Availability Zone to launch the instance into.)
"""
super(RunInstancesInputSet, self)._set_input('PlacementAvailabilityZone', value)
def set_PlacementGroupName(self, value):
"""
Set the value of the PlacementGroupName input for this Choreo. ((optional, string) The name of an existing placement group you want to launch the instance into (for cluster instances).)
"""
super(RunInstancesInputSet, self)._set_input('PlacementGroupName', value)
def set_PlacementTenancy(self, value):
"""
Set the value of the PlacementTenancy input for this Choreo. ((optional, string) The tenancy of the instance. When set to "dedicated", the instance will run on single-tenant hardware and can only be launched into a VPC.)
"""
super(RunInstancesInputSet, self)._set_input('PlacementTenancy', value)
def set_RamdiskId(self, value):
"""
Set the value of the RamdiskId input for this Choreo. ((optional, string) The ID of the RAM disk.)
"""
super(RunInstancesInputSet, self)._set_input('RamdiskId', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are "xml" (the default) and "json".)
"""
super(RunInstancesInputSet, self)._set_input('ResponseFormat', value)
def set_SecurityGroupId(self, value):
"""
Set the value of the SecurityGroupId input for this Choreo. ((optional, string) One or more security group IDs. This can be a comma-separated list of up to 10 security group ids.)
"""
super(RunInstancesInputSet, self)._set_input('SecurityGroupId', value)
def set_SecurityGroup(self, value):
"""
Set the value of the SecurityGroup input for this Choreo. ((optional, string) One or more security group names. This can be a comma-separated list of up to 10 security group names.)
"""
super(RunInstancesInputSet, self)._set_input('SecurityGroup', value)
def set_ShutdownBehavior(self, value):
"""
Set the value of the ShutdownBehavior input for this Choreo. ((optional, string) Whether the instance stops or terminates on instance-initiated shutdown. Valid values are: stop and terminate.)
"""
super(RunInstancesInputSet, self)._set_input('ShutdownBehavior', value)
def set_SnapshotId(self, value):
"""
Set the value of the SnapshotId input for this Choreo. ((optional, string) The ID of the snapshot. This is a Block Device Mapping parameter.)
"""
super(RunInstancesInputSet, self)._set_input('SnapshotId', value)
def set_SubnetId(self, value):
"""
Set the value of the SubnetId input for this Choreo. ((optional, string) The ID of the subnet to launch the instance into (i.e. subnet-dea63cb7).)
"""
super(RunInstancesInputSet, self)._set_input('SubnetId', value)
def set_UserData(self, value):
"""
Set the value of the UserData input for this Choreo. ((optional, string) The Base64-encoded MIME user data to be made available to the instance(s).)
"""
super(RunInstancesInputSet, self)._set_input('UserData', value)
def set_UserRegion(self, value):
"""
Set the value of the UserRegion input for this Choreo. ((optional, string) The AWS region that corresponds to the EC2 endpoint you wish to access. The default region is "us-east-1". See description below for valid values.)
"""
super(RunInstancesInputSet, self)._set_input('UserRegion', value)
def set_VirtualName(self, value):
"""
Set the value of the VirtualName input for this Choreo. ((optional, string) The name of the virtual device. This is a Block Device Mapping parameter.)
"""
super(RunInstancesInputSet, self)._set_input('VirtualName', value)
def set_VolumeSize(self, value):
"""
Set the value of the VolumeSize input for this Choreo. ((optional, string) The size of the volume, in GiBs. Required unless you're creating the volume from a snapshot which indicates that the size will be the size of the snapshot. This is a Block Device Mapping parameter.)
"""
super(RunInstancesInputSet, self)._set_input('VolumeSize', value)
def set_VolumeType(self, value):
"""
Set the value of the VolumeType input for this Choreo. ((optional, string) The volume type. Valid values are: standard (the default) and io1. This is a Block Device Mapping parameter.)
"""
super(RunInstancesInputSet, self)._set_input('VolumeType', value)
class RunInstancesResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the RunInstances Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Amazon.)
"""
return self._output.get('Response', None)
class RunInstancesChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return RunInstancesResultSet(response, path)
| jordanemedlock/psychtruths | temboo/core/Library/Amazon/EC2/RunInstances.py | Python | apache-2.0 | 11,517 |
from __future__ import absolute_import
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
from .parcel_analysis import ParcelAnalysis, parcel_analysis
from nipy.testing import Tester
test = Tester().test
bench = Tester().bench
| alexis-roche/nipy | nipy/algorithms/group/__init__.py | Python | bsd-3-clause | 291 |
import os
import numpy as np
import MMTK
class Grid:
"""
Class to read and write alchemical grids.
Data is a dictionary with
spacing - the grid spacing, in Angstroms.
counts - the number of points in each dimension.
vals - the values.
All are numpy arrays.
"""
def __init__(self):
pass
def read(self, FN, multiplier=None):
"""
Reads a grid in dx or netcdf format
The multiplier affects the origin and spacing.
"""
if FN is None:
raise Exception('File is not defined')
elif FN.endswith('.dx') or FN.endswith('.dx.gz'):
data = self._read_dx(FN)
elif FN.endswith('.nc'):
data = self._read_nc(FN)
else:
raise Exception('File type not supported')
if multiplier is not None:
data['origin'] = multiplier*data['origin']
data['spacing'] = multiplier*data['spacing']
return data
def _read_dx(self, FN):
"""
Reads a grid in dx format
"""
if FN.endswith('.dx'):
F = open(FN,'r')
else:
import gzip
F = gzip.open(FN,'r')
# Read the header
line = F.readline()
while line.find('object')==-1:
line = F.readline()
header = {}
header['counts'] = [int(x) for x in line.split(' ')[-3:]]
for name in ['origin','d0','d1','d2']:
header[name] = [float(x) for x in F.readline().split(' ')[-3:]]
F.readline()
header['npts'] = int(F.readline().split(' ')[-3])
# Test to make sure the grid type is okay.
# These conditions are not absolultely essential,
# but they reduce the number of subtraction operations.
if not (header['d0'][1]==0 and header['d0'][2]==0 and
header['d1'][0]==0 and header['d1'][2]==0 and
header['d2'][0]==0 and header['d2'][1]==0):
raise Exception('Trilinear grid must be in original basis')
if not (header['d0'][0]>0 and header['d1'][1]>0 and header['d2'][2]>0):
raise Exception('Trilinear grid must have positive coordinates')
# Read the data
vals = np.ndarray(shape=header['npts'], dtype=float)
index = 0
while index<header['npts']:
line = F.readline()[:-1]
items = [float(item) for item in line.split()]
vals[index:index+len(items)] = items
index = index + len(items)
F.close()
data = {
'origin':np.array(header['origin']), \
'spacing':np.array([header['d0'][0],header['d1'][1],header['d2'][2]]), \
'counts':np.array(header['counts']), \
'vals':vals}
return data
def _read_nc(self, FN):
"""
Reads a grid in netcdf format
"""
from netCDF4 import Dataset
grid_nc = Dataset(FN,'r')
data = {}
for key in list(grid_nc.variables):
data[key] = np.array(grid_nc.variables[key][:][0][:])
grid_nc.close()
return data
def write(self, FN, data, multiplier=None):
"""
Writes a grid in dx or netcdf format.
The multiplier affects the origin and spacing.
"""
if multiplier is not None:
data_n = {'origin':multiplier*data['origin'],
'counts':data['counts'],
'spacing':multiplier*data['spacing'],
'vals':data['vals']}
else:
data_n = data
if FN.endswith('.nc'):
self._write_nc(FN, data_n)
elif FN.endswith('.dx') or FN.endswith('.dx.gz'):
self._write_dx(FN, data_n)
else:
raise Exception('File type not supported')
def _write_dx(self, FN, data):
"""
Writes a grid in dx format
"""
n_points = data['counts'][0]*data['counts'][1]*data['counts'][2]
if FN.endswith('.dx'):
F = open(FN,'w')
else:
import gzip
F = gzip.open(FN,'w')
F.write("""object 1 class gridpositions counts {0[0]} {0[1]} {0[2]}
origin {1[0]} {1[1]} {1[2]}
delta {2[0]} 0.0 0.0
delta 0.0 {2[1]} 0.0
delta 0.0 0.0 {2[2]}
object 2 class gridconnections counts {0[0]} {0[1]} {0[2]}
object 3 class array type double rank 0 items {3} data follows
""".format(data['counts'],data['origin'],data['spacing'],n_points))
for start_n in range(0,len(data['vals']),3):
F.write(' '.join(['%6e'%c for c in data['vals'][start_n:start_n+3]]) + '\n')
F.write('object 4 class field\n')
F.write('component "positions" value 1\n')
F.write('component "connections" value 2\n')
F.write('component "data" value 3\n')
F.close()
def _write_nc(self, FN, data):
"""
Writes a grid in netcdf format
"""
n_points = data['counts'][0]*data['counts'][1]*data['counts'][2]
from netCDF4 import Dataset
grid_nc = Dataset(FN,'w',format='NETCDF4')
grid_nc.createDimension('one', 1)
grid_nc.createDimension('n_cartesian', 3)
grid_nc.createDimension('n_points', n_points)
grid_nc.createVariable('origin','f8',('one','n_cartesian'))
grid_nc.createVariable('counts','i8',('one','n_cartesian'))
grid_nc.createVariable('spacing','f8',('one','n_cartesian'))
grid_nc.createVariable('vals','f8',('one','n_points'), zlib=True)
for key in data.keys():
grid_nc.variables[key][:] = data[key]
grid_nc.close()
def truncate(self, in_FN, out_FN, counts, multiplier=None):
"""
Truncates the grid at the origin and
with a limited number of counts per dimension
multiplier is for the values, not the grid scaling
"""
data_o = self.read(in_FN)
nyz_o = data_o['counts'][1]*data_o['counts'][2]
nz_o = data_o['counts'][2]
min_i = int(-data_o['origin'][0]/data_o['spacing'][0])
min_j = int(-data_o['origin'][1]/data_o['spacing'][1])
min_k = int(-data_o['origin'][2]/data_o['spacing'][2])
# vals = np.ndarray(shape=tuple(counts), dtype=float)
# for i in range(counts[0]):
# for j in range(counts[1]):
# for k in range(counts[2]):
# vals[i,j,k] = data_o['vals'][(i+min_i)*nyz_o + (j+min_j)*nz_o + (k+min_k)]
vals = np.array(
[[[data_o['vals'][(i+min_i)*nyz_o + (j+min_j)*nz_o + (k+min_k)]
for k in range(counts[2])]
for j in range(counts[1])]
for i in range(counts[0])])
if multiplier is not None:
vals = vals*multiplier
data_n = {'origin':np.array([0., 0., 0.]), \
'counts':counts, 'spacing':data_o['spacing'], 'vals':vals.flatten()}
self.write(out_FN,data_n)
class crd:
"""
Class to read and write AMBER coordinate/restart and trajectory files.
"""
def __init__(self):
pass
def read(self, FN, natoms=None, return_title=False, \
multiplier=None, trajectory=False):
"""
Reads an AMBER coordinate/restart or trajectory file.
If natoms is not none, then the coordinates will be split
into a list of natoms X 3 arrays.
The coordinates will be multiplied by multiplier.
The default of 0.1 converts Angstroms into nanometers.
"""
if not os.path.isfile(FN):
raise Exception('Coordinate file %s does not exist!'%FN)
if FN.endswith('.gz'):
import gzip
F = gzip.open(FN, 'r')
else:
F = open(FN,'r')
dat = F.read().strip().split('\n')
F.close()
title = dat.pop(0) # Title
if len(dat[0].split())>1:
# VMD format (does not specify number of atoms)
crd = []
for line in dat:
crd = crd + [float(x) for x in line.split()]
crd = np.resize(crd,(len(crd)/3,3))
else:
# AMBER format
file_natoms = int(dat.pop(0)) # Number of atoms
if (natoms is not None) and (file_natoms!=natoms):
print "Incorrect number of atoms in crd file"
return np.array([])
if trajectory:
w = 8 # For mdcrd
else:
w = 12 # For inpcrd
crd = []
for line in dat:
crd = crd + [float(line[x:x+w]) for x in range(0,len(line),w)]
crd = np.resize(crd,(len(crd)/3,3))
if multiplier is not None:
crd = multiplier*crd
if (natoms is not None):
crd = np.vsplit(crd,crd.shape[0]/natoms)
print " read %d configurations from %s"%(len(crd), FN)
if return_title:
return (crd, title)
else:
return crd
def write(self, FN, crd, title='', append=False, \
multiplier=None, trajectory=False):
"""
Writes an AMBER coordinate/restart or trajectory file
"""
if (append and os.path.isfile(FN)):
if FN.endswith('.gz'):
import gzip
F = gzip.open(FN,'a')
else:
F = open(FN,'a')
else:
if os.path.isfile(FN):
os.rename(FN,FN+'.BAK')
if FN.endswith('.gz'):
import gzip
F = gzip.open(FN,'w')
else:
F = open(FN,'w')
# Write the header
F.write(title+'\n') # Title
if not trajectory:
F.write('%d\n'%crd.shape[0])
if not trajectory:
flattened = np.vstack(crd).flatten()
if multiplier is not None:
flattened = multiplier*flattened
for n in range(0,len(flattened),6):
F.write(''.join(['%12.7f'%val for val in flattened[n:n+6]]) + '\n')
else:
for c in crd:
flattened = c.flatten()
if multiplier is not None:
flattened = multiplier*flattened
for n in range(0,len(flattened),10):
F.write(''.join(['%8.3f'%val for val in flattened[n:n+10]]) + '\n')
F.close()
class dock6_mol2:
"""
Class to read output from UCSF DOCK 6
"""
def __init__(self):
pass
def read(self, FN, reorder=None):
crds = []
E = {}
if (FN is None) or (not os.path.isfile(FN)):
return (crds,E)
# Specifically to read output from UCSF dock6
if FN.endswith('.mol2'):
mol2F = open(FN,'r')
elif FN.endswith('.mol2.gz'):
import gzip
mol2F = gzip.open(FN,'r')
else:
raise Exception('Unknown file type')
models = mol2F.read().strip().split('########## Name:')
mol2F.close()
models.pop(0)
if len(models)>0:
for line in models[0].split('\n'):
if line.startswith('##########'):
label = line[11:line.find(':')].strip()
E[label] = []
for model in models:
fields = model.split('<TRIPOS>')
crd = np.array([l.split()[2:5] for l in fields[2].split('\n')[1:-1]],
dtype=float)/10.
if reorder is not None:
crd = crd[reorder,:]
for line in fields[0].split('\n'):
if line.startswith('##########'):
label = line[11:line.find(':')].strip()
E[label].append(float(line.split()[-1]))
crds.append(crd)
return (crds,E)
class dcd:
"""
Class to write DCD files
"""
def __init__(self, molecule, ligand_atom_order=None, \
receptorConf=None, ligand_first_atom=0):
self.molecule = molecule
self.receptorConf = receptorConf
self.ligand_first_atom = ligand_first_atom
if ligand_atom_order is None:
self.ligand_atom_order = range(len(self.molecule.atoms))
else:
self.ligand_atom_order = ligand_atom_order
pass
def write(self, FN, confs,
includeLigand=True, includeReceptor=False,
factor=1.0/MMTK.Units.Ang,
delta_t=0.1):
"""
Writes a DCD file for a trajectory.
If includeReceptor==True, the receptor coordinates are included.
"""
import MMTK_DCD # @UnresolvedImport
from Scientific import N
if not isinstance(confs,list):
confs = [confs]
if includeReceptor and (self.receptorConf is None):
raise Exception("Missing receptor configuration")
n_atoms = 0
if includeReceptor:
receptor_x0 = factor*self.receptorConf[:self.ligand_first_atom,0]
receptor_y0 = factor*self.receptorConf[:self.ligand_first_atom,1]
receptor_z0 = factor*self.receptorConf[:self.ligand_first_atom,2]
receptor_x1 = factor*self.receptorConf[self.ligand_first_atom:,0]
receptor_y1 = factor*self.receptorConf[self.ligand_first_atom:,1]
receptor_z1 = factor*self.receptorConf[self.ligand_first_atom:,2]
n_atoms += self.receptorConf.shape[0]
if includeLigand:
n_atoms += len(self.molecule.atoms)
n_snaps = len(confs)
fd = MMTK_DCD.writeOpenDCD(FN, n_atoms, n_snaps, 1, 1, delta_t)
if includeReceptor and includeLigand:
for array in confs:
array = factor*array
x = N.concatenate((receptor_x0,N.take(array[:,0],self.ligand_atom_order),receptor_x1)).astype(N.Float16)
y = N.concatenate((receptor_y0,N.take(array[:,1],self.ligand_atom_order),receptor_y1)).astype(N.Float16)
z = N.concatenate((receptor_z0,N.take(array[:,2],self.ligand_atom_order),receptor_z1)).astype(N.Float16)
MMTK_DCD.writeDCDStep(fd, x, y, z)
MMTK_DCD.writeCloseDCD(fd)
elif includeLigand:
for array in confs:
array = factor*array
x = N.take(array[:,0], self.ligand_atom_order).astype(N.Float16)
y = N.take(array[:,1], self.ligand_atom_order).astype(N.Float16)
z = N.take(array[:,2], self.ligand_atom_order).astype(N.Float16)
MMTK_DCD.writeDCDStep(fd, x, y, z)
MMTK_DCD.writeCloseDCD(fd)
else:
x = N.concatenate((receptor_x0,receptor_x1)).astype(N.Float16)
y = N.concatenate((receptor_y0,receptor_y1)).astype(N.Float16)
z = N.concatenate((receptor_z0,receptor_z1)).astype(N.Float16)
MMTK_DCD.writeDCDStep(fd, x, y, z)
MMTK_DCD.writeCloseDCD(fd)
class prmtop:
"""
Class to read AMBER prmtop files
"""
def __init__(self):
pass
def read(self, FN, varnames=['RESIDUE_LABEL','RESIDUE_POINTER']):
"""
Reads an AMBER prmtop file, returning a dictionary
"""
if not os.path.isfile(FN):
raise Exception('prmtop file %s does not exist!'%FN)
if FN.endswith('.gz'):
import gzip
F = gzip.open(FN, 'r')
else:
F = open(FN,'r')
data = F.read().split('%FLAG ')
F.close()
prmtop = {}
for record in data:
name = record[:record.find('\n')].strip()
if name in varnames:
prmtop[name] = self._load_record(record)
return prmtop
def _load_record(self, record):
items = []
lines = record.split('\n')
lines.pop(0) # Name
FORMAT = lines.pop(0).strip()[8:-1] # Format
if FORMAT.find('a')>-1: # Text
w = int(FORMAT[FORMAT.find('a')+1:])
for line in lines:
items = items + [line[x:x+w] for x in range(0,len(line),w)]
return np.array(items)
elif FORMAT.find('I')>-1: # Integer
w = int(FORMAT[FORMAT.find('I')+1:])
for line in lines:
items = items + [int(line[x:x+w]) for x in range(0,len(line),w)]
return np.array(items, dtype=int)
elif FORMAT.find('E')>-1: # Scientific
w = int(FORMAT[FORMAT.find('E')+1:FORMAT.find('.')])
for line in lines:
items = items + [float(line[x:x+w]) for x in range(0,len(line),w)]
return np.array(items, dtype=float)
| luizcieslak/AlGDock | AlGDock/IO.py | Python | mit | 14,740 |
import logging
from collections import defaultdict
from itertools import count
from claripy.utils.orderedset import OrderedSet
from ...sim_variable import SimStackVariable, SimMemoryVariable, SimRegisterVariable, SimMemoryVariablePhi, \
SimStackVariablePhi, SimRegisterVariablePhi
from ...keyed_region import KeyedRegion
from .variable_access import VariableAccess
from ..plugin import KnowledgeBasePlugin
l = logging.getLogger("angr.knowledge.variable_manager")
class VariableType(object):
REGISTER = 0
MEMORY = 1
class LiveVariables(object):
"""
A collection of live variables at a program point.
"""
def __init__(self, register_region, stack_region):
self.register_region = register_region
self.stack_region = stack_region
class VariableManagerInternal(object):
"""
Manage variables for a function. It is meant to be used internally by VariableManager.
"""
def __init__(self, manager, func_addr=None):
self.manager = manager
self.func_addr = func_addr
self._variables = OrderedSet() # all variables that are added to any region
self._stack_region = KeyedRegion()
self._register_region = KeyedRegion()
self._live_variables = { } # a mapping between addresses of program points and live variable collections
self._variable_accesses = defaultdict(set)
self._insn_to_variable = defaultdict(set)
self._block_to_variable = defaultdict(set)
self._stmt_to_variable = defaultdict(set)
self._variable_counters = {
'register': count(),
'stack': count(),
'argument': count(),
'phi': count(),
}
#
# Public methods
#
def next_variable_ident(self, sort):
if sort not in self._variable_counters:
raise ValueError('Unsupported variable sort %s' % sort)
if sort == 'register':
prefix = "r"
elif sort == 'stack':
prefix = "s"
elif sort == 'argument':
prefix = 'arg'
else:
prefix = "m"
return "i%s_%d" % (prefix, self._variable_counters[sort].next())
def add_variable(self, sort, start, variable):
if sort == 'stack':
self._stack_region.add_variable(start, variable)
elif sort == 'register':
self._register_region.add_variable(start, variable)
else:
raise ValueError('Unsupported sort %s in add_variable().' % sort)
def set_variable(self, sort, start, variable):
if sort == 'stack':
self._stack_region.set_variable(start, variable)
elif sort == 'register':
self._register_region.set_variable(start, variable)
else:
raise ValueError('Unsupported sort %s in add_variable().' % sort)
def write_to(self, variable, offset, location, overwrite=False):
self._record_variable_access('write', variable, offset, location, overwrite=overwrite)
def read_from(self, variable, offset, location, overwrite=False):
self._record_variable_access('read', variable, offset, location, overwrite=overwrite)
def reference_at(self, variable, offset, location, overwrite=False):
self._record_variable_access('reference', variable, offset, location, overwrite=overwrite)
def _record_variable_access(self, sort, variable, offset, location, overwrite=False):
self._variables.add(variable)
if overwrite:
self._variable_accesses[variable] = {VariableAccess(variable, sort, location)}
self._insn_to_variable[location.ins_addr] = {(variable, offset)}
self._block_to_variable[location.block_addr] = {(variable, offset)}
self._stmt_to_variable[(location.block_addr, location.stmt_idx)] = {(variable, offset)}
else:
self._variable_accesses[variable].add(VariableAccess(variable, sort, location))
self._insn_to_variable[location.ins_addr].add((variable, offset))
self._block_to_variable[location.block_addr].add((variable, offset))
self._stmt_to_variable[(location.block_addr, location.stmt_idx)].add((variable, offset))
def make_phi_node(self, *variables):
# unpack phi nodes
existing_phi = [ ]
unpacked = set()
for var in variables:
if isinstance(var, (SimRegisterVariablePhi, SimStackVariablePhi, SimMemoryVariablePhi)):
unpacked |= var.variables
existing_phi.append(var)
else:
unpacked.add(var)
# optimization: if a phi node already contains all of the unpacked variables, just return that phi node
for phi_node in existing_phi:
if phi_node.variables.issuperset(unpacked):
return phi_node
variables = unpacked
repre = next(iter(variables))
repre_type = type(repre)
if repre_type is SimRegisterVariable:
cls = SimRegisterVariablePhi
ident_sort = 'register'
elif repre_type is SimMemoryVariable:
cls = SimMemoryVariablePhi
ident_sort = 'memory'
elif repre_type is SimStackVariable:
cls = SimStackVariablePhi
ident_sort = 'stack'
else:
raise TypeError('make_phi_node(): Unsupported variable type "%s".' % type(repre))
a = cls(ident=self.next_variable_ident(ident_sort),
region=self.func_addr,
variables=variables,
)
return a
def set_live_variables(self, addr, register_region, stack_region):
lv = LiveVariables(register_region, stack_region)
self._live_variables[addr] = lv
def find_variables_by_insn(self, ins_addr, sort):
if ins_addr not in self._insn_to_variable:
return None
if sort == VariableType.MEMORY or sort == 'memory':
vars_and_offset = [(var, offset) for var, offset in self._insn_to_variable[ins_addr]
if isinstance(var, (SimStackVariable, SimMemoryVariable))]
elif sort == VariableType.REGISTER or sort == 'register':
vars_and_offset = [(var, offset) for var, offset in self._insn_to_variable[ins_addr]
if isinstance(var, SimRegisterVariable)]
else:
l.error('find_variable_by_insn(): Unsupported variable sort "%s".', sort)
return [ ]
return vars_and_offset
def find_variable_by_stmt(self, block_addr, stmt_idx, sort):
return next(iter(self.find_variables_by_stmt(block_addr, stmt_idx, sort)), None)
def find_variables_by_stmt(self, block_addr, stmt_idx, sort):
key = block_addr, stmt_idx
if key not in self._stmt_to_variable:
return [ ]
variables = self._stmt_to_variable[key]
if not variables:
return [ ]
if sort == 'memory':
var_and_offsets = list((var, offset) for var, offset in self._stmt_to_variable[key]
if isinstance(var, (SimStackVariable, SimMemoryVariable)))
elif sort == 'register':
var_and_offsets = list((var, offset) for var, offset in self._stmt_to_variable[key]
if isinstance(var, SimRegisterVariable))
else:
l.error('find_variables_by_stmt(): Unsupported variable sort "%s".', sort)
return [ ]
return var_and_offsets
def get_variable_accesses(self, variable, same_name=False):
if not same_name:
if variable in self._variable_accesses:
return self._variable_accesses[variable]
return [ ]
# find all variables with the same variable name
vars_list = [ ]
for var in self._variable_accesses.keys():
if variable.name == var.name:
vars_list.append(var)
accesses = [ ]
for var in vars_list:
accesses.extend(self.get_variable_accesses(var))
return accesses
def get_variables(self, sort=None, collapse_same_ident=False):
"""
Get a list of variables.
:param str or None sort: Sort of the variable to get.
:param collapse_same_ident: Whether variables of the same identifier should be collapsed or not.
:return: A list of variables.
:rtype: list
"""
variables = [ ]
if collapse_same_ident:
raise NotImplementedError()
for var in self._variables:
if sort == 'stack' and not isinstance(var, SimStackVariable):
continue
if sort == 'reg' and not isinstance(var, SimRegisterVariable):
continue
variables.append(var)
return variables
def input_variables(self):
"""
Get all variables that have never been written to.
:return: A list of variables that are never written to.
"""
def has_write_access(accesses):
return any(acc for acc in accesses if acc.access_type == 'write')
def has_read_access(accesses):
return any(acc for acc in accesses if acc.access_type == 'read')
input_variables = [ ]
for variable, accesses in self._variable_accesses.iteritems():
if not has_write_access(accesses) and has_read_access(accesses):
input_variables.append(variable)
return input_variables
def assign_variable_names(self):
"""
Assign default names to all variables.
:return: None
"""
for var in self._variables:
if isinstance(var, SimStackVariable):
if var.name is not None:
continue
if var.ident.startswith('iarg'):
var.name = 'arg_%x' % var.offset
else:
var.name = 's_%x' % (-var.offset)
# var.name = var.ident
elif isinstance(var, SimRegisterVariable):
if var.name is not None:
continue
var.name = var.ident
class VariableManager(KnowledgeBasePlugin):
"""
Manage variables.
"""
def __init__(self, kb):
super(VariableManager, self).__init__()
self._kb = kb
self.global_manager = VariableManagerInternal(self)
self.function_managers = { }
def __getitem__(self, key):
"""
Get the VariableManagerInternal object for a function or a region.
:param str or int key: Key of the region. "global" for the global region, or a function address for the
function.
:return: The VariableManagerInternal object.
:rtype: VariableManagerInternal
"""
if key == 'global': # pylint:disable=no-else-return
return self.global_manager
else:
# key refers to a function address
return self.get_function_manager(key)
def get_function_manager(self, func_addr):
if not isinstance(func_addr, (int, long)):
raise TypeError('Argument "func_addr" must be an int.')
if func_addr not in self.function_managers:
self.function_managers[func_addr] = VariableManagerInternal(self, func_addr=func_addr)
return self.function_managers[func_addr]
def initialize_variable_names(self):
self.global_manager.assign_variable_names()
for manager in self.function_managers.itervalues():
manager.assign_variable_names()
def get_variable_accesses(self, variable, same_name=False):
"""
Get a list of all references to the given variable.
:param SimVariable variable: The variable.
:param bool same_name: Whether to include all variables with the same variable name, or just
based on the variable identifier.
:return: All references to the variable.
:rtype: list
"""
if variable.region == 'global':
return self.global_manager.get_variable_accesses(variable, same_name=same_name)
elif variable.region in self.function_managers:
return self.function_managers[variable.region].get_variable_accesses(variable, same_name=same_name)
l.warning('get_variable_accesses(): Region %s is not found.', variable.region)
return [ ]
def copy(self):
raise NotImplementedError
KnowledgeBasePlugin.register_default('variables', VariableManager)
| chubbymaggie/angr | angr/knowledge_plugins/variables/variable_manager.py | Python | bsd-2-clause | 12,737 |
#!/usr/bin/env python
# Tests check_format.py. This must be run in a context where the clang
# version and settings are compatible with the one in the Envoy
# docker. Normally this is run via check_format_test.sh, which
# executes it in under docker.
from __future__ import print_function
import argparse
import logging
import os
import shutil
import subprocess
import sys
os.putenv("BUILDIFIER_BIN", "/usr/local/bin/buildifier")
tools = os.path.dirname(os.path.realpath(__file__))
tmp = os.path.join(os.getenv('TEST_TMPDIR', "/tmp"), "check_format_test")
src = os.path.join(tools, 'testdata', 'check_format')
check_format = sys.executable + " " + os.path.join(tools, 'check_format.py')
errors = 0
# Echoes and runs an OS command, returning exit status and the captured
# stdout+stderr as a string array.
def runCommand(command):
stdout = []
status = 0
try:
out = subprocess.check_output(command, shell=True, stderr=subprocess.STDOUT).strip()
if out:
stdout = out.split("\n")
except subprocess.CalledProcessError as e:
status = e.returncode
for line in e.output.splitlines():
stdout.append(line)
logging.info("%s" % command)
return status, stdout
# Runs the 'check_format' operation, on the specified file, printing
# the comamnd run and the status code as well as the stdout, and returning
# all of that to the caller.
def runCheckFormat(operation, filename):
command = check_format + " " + operation + " " + filename
status, stdout = runCommand(command)
return (command, status, stdout)
def getInputFile(filename):
infile = os.path.join(src, filename)
directory = os.path.dirname(filename)
if not directory == '' and not os.path.isdir(directory):
os.makedirs(directory)
shutil.copyfile(infile, filename)
return filename
# Attempts to fix file, returning a 4-tuple: the command, input file name,
# output filename, captured stdout as an array of lines, and the error status
# code.
def fixFileHelper(filename):
infile = os.path.join(src, filename)
directory = os.path.dirname(filename)
if not directory == '' and not os.path.isdir(directory):
os.makedirs(directory)
shutil.copyfile(infile, filename)
command, status, stdout = runCheckFormat("fix", getInputFile(filename))
return (command, infile, filename, status, stdout)
# Attempts to fix a file, returning the status code and the generated output.
# If the fix was successful, the diff is returned as a string-array. If the file
# was not fixable, the error-messages are returned as a string-array.
def fixFileExpectingSuccess(file):
command, infile, outfile, status, stdout = fixFileHelper(file)
if status != 0:
print("FAILED:")
emitStdoutAsError(stdout)
return 1
status, stdout = runCommand('diff ' + outfile + ' ' + infile + '.gold')
if status != 0:
print("FAILED:")
emitStdoutAsError(stdout)
return 1
return 0
def fixFileExpectingNoChange(file):
command, infile, outfile, status, stdout = fixFileHelper(file)
if status != 0:
return 1
status, stdout = runCommand('diff ' + outfile + ' ' + infile)
if status != 0:
logging.error(file + ': expected file to remain unchanged')
return 1
return 0
def emitStdoutAsError(stdout):
logging.error("\n".join(stdout))
def expectError(status, stdout, expected_substring):
if status == 0:
logging.error("Expected failure `%s`, but succeeded" % expected_substring)
return 1
for line in stdout:
if expected_substring in line:
return 0
logging.error("Could not find '%s' in:\n" % expected_substring)
emitStdoutAsError(stdout)
return 1
def fixFileExpectingFailure(filename, expected_substring):
command, infile, outfile, status, stdout = fixFileHelper(filename)
return expectError(status, stdout, expected_substring)
def checkFileExpectingError(filename, expected_substring):
command, status, stdout = runCheckFormat("check", getInputFile(filename))
return expectError(status, stdout, expected_substring)
def checkAndFixError(filename, expected_substring):
errors = checkFileExpectingError(filename, expected_substring)
errors += fixFileExpectingSuccess(filename)
return errors
def checkToolNotFoundError():
# Temporarily change PATH to test the error about lack of external tools.
oldPath = os.environ["PATH"]
os.environ["PATH"] = "/sbin:/usr/sbin"
clang_format = os.getenv("CLANG_FORMAT", "clang-format-8")
errors = checkFileExpectingError("no_namespace_envoy.cc", "Command %s not found." % clang_format)
os.environ["PATH"] = oldPath
return errors
def checkUnfixableError(filename, expected_substring):
errors = checkFileExpectingError(filename, expected_substring)
errors += fixFileExpectingFailure(filename, expected_substring)
return errors
def checkFileExpectingOK(filename):
command, status, stdout = runCheckFormat("check", getInputFile(filename))
if status != 0:
logging.error("Expected %s to have no errors; status=%d, output:\n" % (filename, status))
emitStdoutAsError(stdout)
return status + fixFileExpectingNoChange(filename)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='tester for check_format.py.')
parser.add_argument('--log', choices=['INFO', 'WARN', 'ERROR'], default='INFO')
args = parser.parse_args()
logging.basicConfig(format='%(message)s', level=args.log)
errors = 0
# Now create a temp directory to copy the input files, so we can fix them
# without actually fixing our testdata. This requires chdiring to the temp
# directory, so it's annoying to comingle check-tests and fix-tests.
shutil.rmtree(tmp, True)
os.makedirs(tmp)
os.chdir(tmp)
# The following error is the error about unavailability of external tools.
errors += checkToolNotFoundError()
# The following errors can be detected but not fixed automatically.
errors += checkUnfixableError("no_namespace_envoy.cc",
"Unable to find Envoy namespace or NOLINT(namespace-envoy)")
errors += checkUnfixableError("mutex.cc", "Don't use <mutex> or <condition_variable*>")
errors += checkUnfixableError("condition_variable.cc",
"Don't use <mutex> or <condition_variable*>")
errors += checkUnfixableError("condition_variable_any.cc",
"Don't use <mutex> or <condition_variable*>")
errors += checkUnfixableError("shared_mutex.cc", "shared_mutex")
errors += checkUnfixableError("shared_mutex.cc", "shared_mutex")
real_time_inject_error = (
"Don't reference real-world time sources from production code; use injection")
errors += checkUnfixableError("real_time_source.cc", real_time_inject_error)
errors += checkUnfixableError("real_time_system.cc", real_time_inject_error)
errors += checkUnfixableError("system_clock.cc", real_time_inject_error)
errors += checkUnfixableError("steady_clock.cc", real_time_inject_error)
errors += checkUnfixableError("condvar_wait_for.cc", real_time_inject_error)
errors += checkUnfixableError("sleep.cc", real_time_inject_error)
errors += checkUnfixableError("std_atomic_free_functions.cc", "std::atomic_*")
errors += checkUnfixableError("std_get_time.cc", "std::get_time")
errors += checkUnfixableError("no_namespace_envoy.cc",
"Unable to find Envoy namespace or NOLINT(namespace-envoy)")
errors += checkUnfixableError("bazel_tools.BUILD", "unexpected @bazel_tools reference")
errors += checkUnfixableError("proto.BUILD", "unexpected direct external dependency on protobuf")
errors += checkUnfixableError("proto_deps.cc", "unexpected direct dependency on google.protobuf")
errors += checkUnfixableError("attribute_packed.cc", "Don't use __attribute__((packed))")
errors += checkUnfixableError("designated_initializers.cc", "Don't use designated initializers")
errors += checkUnfixableError("elvis_operator.cc", "Don't use the '?:' operator")
errors += checkUnfixableError("testing_test.cc",
"Don't use 'using testing::Test;, elaborate the type instead")
errors += checkUnfixableError(
"serialize_as_string.cc",
"Don't use MessageLite::SerializeAsString for generating deterministic serialization")
errors += checkUnfixableError(
"version_history.rst",
"Version history line malformed. Does not match VERSION_HISTORY_NEW_LINE_REGEX in "
"check_format.py")
errors += fixFileExpectingFailure(
"api/missing_package.proto",
"Unable to find package name for proto file: ./api/missing_package.proto")
# The following files have errors that can be automatically fixed.
errors += checkAndFixError("over_enthusiastic_spaces.cc",
"./over_enthusiastic_spaces.cc:3: over-enthusiastic spaces")
errors += checkAndFixError("extra_enthusiastic_spaces.cc",
"./extra_enthusiastic_spaces.cc:3: over-enthusiastic spaces")
errors += checkAndFixError("angle_bracket_include.cc",
"envoy includes should not have angle brackets")
errors += checkAndFixError("proto_style.cc", "incorrect protobuf type reference")
errors += checkAndFixError("long_line.cc", "clang-format check failed")
errors += checkAndFixError("header_order.cc", "header_order.py check failed")
errors += checkAndFixError("license.BUILD", "envoy_build_fixer check failed")
errors += checkAndFixError("bad_envoy_build_sys_ref.BUILD", "Superfluous '@envoy//' prefix")
errors += checkAndFixError("proto_format.proto", "clang-format check failed")
errors += checkAndFixError("api/java_options.proto", "Java proto option")
errors += checkFileExpectingOK("real_time_source_override.cc")
errors += checkFileExpectingOK("time_system_wait_for.cc")
if errors != 0:
logging.error("%d FAILURES" % errors)
exit(1)
logging.warn("PASS")
| dnoe/envoy | tools/check_format_test_helper.py | Python | apache-2.0 | 9,849 |
# -*- coding: utf-8 -*-
#
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'IRMA'
copyright = '2013-2016, Quarkslab'
# The version info for the project yo're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.5'
# The full version, including alpha/beta/rc tags.
release = '1.5.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
html_theme_path = ["_themes", ]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'IRMAdoc'
# -- Options for LaTeX output -------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index',
'IRMA.tex',
'IRMA Documentation',
'Quarkslab',
'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output -------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'IRMA', 'IRMA Documentation',
['Quarkslab'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index',
'IRMA',
'IRMA Documentation',
'Quarkslab',
'IRMA',
'Incident Response & Malware Analysis Platform.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# -- Options for Epub output --------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = 'IRMA'
epub_author = 'Quarkslab'
epub_publisher = 'Quarkslab'
epub_copyright = '2013-2016, Quarkslab'
# The language of the text. It defaults to the language option
# or en if the language is not set.
# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
# epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
# epub_identifier = ''
# A unique identification for the text.
# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
# epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_post_files = []
# A list of files that should not be packed into the epub file.
# epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
# epub_tocdepth = 3
# Allow duplicate toc entries.
# epub_tocdup = True
| hirokihamasaki/irma | docs/conf.py | Python | apache-2.0 | 9,028 |
#! /usr/bin/python
# filename: conserved_positions.py
import os
import time
import glob
import argparse
from Bio import SeqIO
import multiprocessing
from position import position
parser = argparse.ArgumentParser("Identifies Env isolates (from the LANL database) that have glycans at a user-defined position.")
parser.add_argument('-in', dest='in_file', default='', help="The input FASTA file of Env amino acid sequences. Only required if there is no pre-existing alignment (using the '-alignment' flag.")
parser.add_argument('-dir', dest='align_dir', default='', help="Folder for saving the alignment files, which will be named by clade. Only required if not supplying a pre-existing alignment.")
parser.add_argument('-alignments', dest='alignments', default='', help="To save processing time, a pre-existing directory of alignments can be used instead of building one for every run. Required only if '-in' and '-dir' aren't supplied.")
parser.add_argument('-ref', dest='reference', default='', help="The reference sequence, to be used to define the numbering scheme.")
parser.add_argument('-pos', dest='pos_glycans', required=True, help="The position(s) (using reference sequence numbering) to query for potential glycosylation. Positions may be separated by any sort of whitespace (space, tab, etc). If looking for only a single glycan, use the position as the option. If using multiple glycans, input a file with the positions.")
parser.add_argument('-neg', dest='neg_glycans', default=True, help="If looking for a pair of glycans in combination, use this flag for a file containing the second glycan(s). The script will search for all of the glycans in this file only among the group of sequences that have all of the glycans in the first (-pos1) file.")
parser.add_argument('-print_ids', dest='print_ids', default=False, action='store_true', help="Prints two groups of Env IDs: passed and failed.")
parser.add_argument('-align_type', dest='align_type', default='pairwise', choices=['pairwise', 'msa'], help="Determine the alignment type. Options are 'pairwise' and 'msa'. Defaul is 'pairwise'.")
args = parser.parse_args()
def list_files(d):
# tilde expansion
expanded_dir = os.path.expanduser(d)
# return a list of all files (except for hidden ones)
return glob.glob(expanded_dir + '/*')
def parse_positives(pos):
pos_list = open(pos, 'r').read().split('\n')
if '' in pos_list:
pos_list.remove('')
return pos_list
def parse_negatives(neg):
neg_list = open(neg, 'r').read().split('\n')
if '' in neg_list:
neg_list.remove('')
return neg_list
def check_for_alignment():
if args.alignments != '':
return True
else:
if args.reference == '' or args.in_file == '':
raise Exception('You need to provide either an existing alignment or reference and query sequences.')
else:
return False
def parse_alignments():
# set up an empty dict to hold the alignments, grouped by clade
alignments = {}
# get all of the alignment files from the alignment directory
alignment_files = list_files(args.alignments)
# iterate through the alignment files
for alignment in alignment_files:
# make sure the alignment file isn't empty
if open(alignment, 'r').read() == '':
continue
# get the clade name, which is the prefix of the basename
clade = os.path.basename(alignment).split('.')[0]
alignment_list = [line.split() for line in open(alignment, 'r').read().split('\n')]
# add to a growing dict of alignments
alignments[clade] = alignment_list
return alignments
def print_positions(pos_list, neg_list):
# inform the user
print ''
print ''
print 'Looking for glycans at the following positions:'
print '\n'.join(pos_list)
print ''
print ''
print 'Looking for the absence of glycans at the following positions:'
print '\n'.join(neg_list)
print ''
def print_clade(c):
if c.upper() == 'OTHER':
print "\nProcessing sequences from all other clades..."
else:
print "\nProcessing clade %s sequences..." % c.upper()
def process_without_alignment():
# parse the pre-existing alignment files:
alignments = parse_alignments()
# parse the positives and negatives files
positives = parse_positives(args.pos_glycans)
negatives = parse_negatives(args.neg_glycans)
print_positions(positives, negatives)
# set up counters for passed and failed sequences
passed = 0
failed = 0
# iterate through the clades
for clade in sorted(alignments.keys()):
# inform the user
print_clade(clade)
# make a glycan object with pre-existing alignments
pos = position(alignment=alignments[clade], pre_aligned=True)
# find glycans
y, n, y_ids, n_ids = find_positions(pos, positives, negatives)
# adjust the passed/failed counters
passed += y
failed += n
return passed, failed
def process():
# parse the reference sequence and ID
ref = SeqIO.read(open(args.reference, 'r'), 'fasta')
ref_id = ref.id
ref_seq = str(ref.seq)
# set up an envs dict to hold env sequences segregated by clade
envs = {'A': [],
'B': [],
'C': [],
'D': [],
'E': [],
'G': [],
'AE': [],
'AG': [],
'other': [] }
# parse the FASTA input file and build a dict of sequences, segregated by clade
for env in SeqIO.parse(open(args.in_file, 'r'), 'fasta'):
# grab the clade, ID, and sequence
env_id = env.id
clade = env_id.split('.')[0].upper()
env_seq = str(env.seq)
# refine the clade name for clade A and the CRFs
if clade in ('A1', 'A2'):
clade = 'A'
elif clade in ['CRF01_AE', '01_AE']:
clade = 'AE'
elif clade in ['CRF02_AG', '02_AG']:
clade = 'AG'
# only look at sequences that are long enough to be relavent
if len(env_seq) >= 250:
# for the major clades, just append the sequence to the appropriate clade list
if clade in envs.keys():
envs[clade].append([env_id, env_seq])
# for the minor clades (not present in the env dict), append the env sequence to the 'other' category
else:
envs['other'].append([env_id, env_seq])
# set up an alphabetical list of clades, so that we can process them in order.
sorted_clades = sorted(envs.keys())
# set up a list of passed and failed counts and ids
passed = 0
failed = 0
passed_ids = []
failed_ids = []
# get positions lists
pos_list = parse_positives(args.pos_glycans)
neg_list = parse_negatives(args.neg_glycans)
print_positions(pos_list, neg_list)
# set up a temporary file path (only used for MSA alignments)
temp_alignment_file = os.path.join(os.path.dirname(args.in_file), 'temp_alignment.fasta')
# iterate through the clades and get scores
for c in sorted_clades:
# let the user know what's up
print_clade(c)
# only process if there are sequences in the clade group
if len(envs[c]) < 1:
print "No sequences in this clade."
continue
# make the glycan object
pos = position(ref=ref_seq, input_list=envs[c], align_type=args.align_type, align_file=temp_alignment_file)
# determine the presence of glycans at the first position
y, n, y_ids, n_ids = find_positions(pos, pos_list, neg_list)
# add the passed and failed to the appropriate var
passed += y
failed += n
passed_ids.extend(y_ids)
failed_ids.extend(n_ids)
# if the alignment directory isnt' defined, don't write the alignments to file
if args.align_dir == '':
print ''
print ''
print 'ALIGNMENTS ARE NOT BEING WRITTEN TO FILE.'
print ''
print ''
# if there is an alignment directory
else:
# define the full path of the alignment file
alignment_file = os.path.join(args.align_dir, c.upper() + '.alignment')
# write the alignment
pos.write_alignment(alignment_file)
# if the 'print_ids' flag is on, print all of the ids that either passed or failed
if args.print_ids:
all_ids = passed_ids + failed_ids
all_ids = sorted(all_ids)
for i in all_ids:
if i in passed_ids: val = 'Yes'
else: val = 'No'
print i + '\t' + val
return passed, failed
def find_positions(g, pos_list, neg_list):
# set up some vars to hold the output
yes = 0
no = 0
passed_list = []
failed_list = []
# get a list of sequence IDs
seq_ids = g.get_ids()
for env in seq_ids:
# set up lists to hold the positive and negative glycan results
positives = []
negatives = []
# look for positive positions (where glycans are desired)
if len(pos_list) > 0 and pos_list[0] != '':
for pos in pos_list:
# if it's a single position
if len(pos.split()) == 1:
split_pos = pos.split()[0]
res = split_pos[0]
num = split_pos[1:]
positives.append(g.residue_at(res, num, env))
# if it's multiple positions (it's any of them, so basically a giant OR)
if len(pos.split()) > 1:
glycan_sum = 0
for p in pos.split():
res = p[0]
num = p[1:]
glycan_sum += g.residue_at(res, num, env)
# if any of the positions are glycosylated, the whole batch of positions is positive
if glycan_sum > 0:
positives.append(1)
else:
positives.append(0)
else:
positives.append(1)
# look for negative positions (where the lack of glycans is desired)
if len(neg_list) > 0 and neg_list[0] != '':
for neg in neg_list:
# if it's a single position
if len(neg.split()) == 1:
split_neg = pos.split()[0]
res = split_neg[0]
num = split_neg[1:]
negatives.append(g.residue_at(res, num, env))
# if it's multiple positions, then the sequence passes if one or more of the sites isn't glycoslyated
if len(neg.split()) > 1:
glycan_sum = 0
neg_count = len(neg.split())
for n in neg.split():
res = n[0]
num = n[1:]
glycan_sum += g.residue_at(res, num, env)
# if any of the positions are glycosylated, the whole batch of positions is positive
if glycan_sum < neg_count:
negatives.append(0)
else:
negatives.append(1)
else:
negatives.append(0)
# if all the positives are positive and all the negatives are negative, the sequence passes
if min(positives) == 1 and max(negatives) == 0:
yes += 1
passed_list.append(env)
else:
no += 1
failed_list.append(env)
return yes, no, passed_list, failed_list
def main():
# set the start time
start_time = time.time()
# check to see if there's an existing alignment
existing_alignment = check_for_alignment()
# decide which type of run to do
if existing_alignment:
passed, failed = process_without_alignment()
else:
passed, failed = process()
# write the output
print "\nDone!\n{0} sequences were processed.".format(passed + failed)
print '{0} isolates passed.'.format(passed)
print '{0} isolates failed.'.format(failed)
end_time = time.time()
run_time = (end_time - start_time)
print "Run was completed in %s seconds.\n" % run_time
if __name__ == '__main__':
main()
| bryanbriney/env-analysis | glycans/conserved_positions.py | Python | mit | 10,785 |
#!/usr/bin/env python3
import sys
v=str()
f=open(sys.argv[2], 'wb')
string=str()
for i in sys.argv[1].strip():
v = v + i
if len(v) == 2:
v.encode("ascii")
h = int(str(v), base=16)
f.write(chr(h).encode("ISO-8859-1"))
v = str()
| stenbock/rdisasm | script/hexbin.py | Python | gpl-2.0 | 269 |
"""Kuler Sky lights integration."""
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .const import DATA_ADDRESSES, DATA_DISCOVERY_SUBSCRIPTION, DOMAIN
PLATFORMS = [Platform.LIGHT]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Kuler Sky from a config entry."""
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
if DATA_ADDRESSES not in hass.data[DOMAIN]:
hass.data[DOMAIN][DATA_ADDRESSES] = set()
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
# Stop discovery
unregister_discovery = hass.data[DOMAIN].pop(DATA_DISCOVERY_SUBSCRIPTION, None)
if unregister_discovery:
unregister_discovery()
hass.data.pop(DOMAIN, None)
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
| home-assistant/home-assistant | homeassistant/components/kulersky/__init__.py | Python | apache-2.0 | 1,040 |
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package des différents contextes de création de joueur.""" | vlegoff/tsunami | src/primaires/joueur/contextes/creation/__init__.py | Python | bsd-3-clause | 1,633 |
# coding=utf-8
# author = 'Gavin'
# project is py_learning
# the file name is print
# date = 2018/7/26
print("Hello python world!") | gavinshaw/py_learning | python_learing/print.py | Python | mit | 131 |
#pylint: disable=C0111
#pylint: disable=W0621
from lettuce import world, step
from common import *
from nose.tools import assert_equal
############### ACTIONS ####################
@step('I have opened a new course section in Studio$')
def i_have_opened_a_new_course_section(step):
open_new_course()
add_section()
@step('I have added a new subsection$')
def i_have_added_a_new_subsection(step):
add_subsection()
@step('I have opened a new subsection in Studio$')
def i_have_opened_a_new_subsection(step):
step.given('I have opened a new course section in Studio')
step.given('I have added a new subsection')
world.css_click('span.subsection-name-value')
@step('I click the New Subsection link')
def i_click_the_new_subsection_link(step):
world.css_click('a.new-subsection-item')
@step('I enter the subsection name and click save$')
def i_save_subsection_name(step):
save_subsection_name('Subsection One')
@step('I enter a subsection name with a quote and click save$')
def i_save_subsection_name_with_quote(step):
save_subsection_name('Subsection With "Quote"')
@step('I click on the subsection$')
def click_on_subsection(step):
world.css_click('span.subsection-name-value')
@step('I see the complete subsection name with a quote in the editor$')
def i_see_complete_subsection_name_with_quote_in_editor(step):
css = '.subsection-display-name-input'
assert world.is_css_present(css)
assert_equal(world.css_value(css), 'Subsection With "Quote"')
@step('I set the subsection release date to ([0-9/-]+)( [0-9:]+)?')
def set_subsection_release_date(_step, datestring, timestring):
if hasattr(timestring, "strip"):
timestring = timestring.strip()
if not timestring:
timestring = "00:00"
set_date_and_time(
'input#start_date', datestring,
'input#start_time', timestring)
@step('I set the subsection due date to ([0-9/-]+)( [0-9:]+)?')
def set_subsection_due_date(_step, datestring, timestring):
if hasattr(timestring, "strip"):
timestring = timestring.strip()
if not timestring:
timestring = "00:00"
if not world.css_visible('input#due_date'):
world.css_click('.due-date-input .set-date')
set_date_and_time(
'input#due_date', datestring,
'input#due_time', timestring)
@step('I mark it as Homework$')
def i_mark_it_as_homework(step):
world.css_click('a.menu-toggle')
world.browser.click_link_by_text('Homework')
@step('I see it marked as Homework$')
def i_see_it_marked__as_homework(step):
assert_equal(world.css_value(".status-label"), 'Homework')
@step('I click the link to sync release date to section')
def click_sync_release_date(step):
world.css_click('.sync-date')
############ ASSERTIONS ###################
@step('I see my subsection on the Courseware page$')
def i_see_my_subsection_on_the_courseware_page(step):
see_subsection_name('Subsection One')
@step('I see my subsection name with a quote on the Courseware page$')
def i_see_my_subsection_name_with_quote_on_the_courseware_page(step):
see_subsection_name('Subsection With "Quote"')
@step('the subsection does not exist$')
def the_subsection_does_not_exist(step):
css = 'span.subsection-name'
assert world.browser.is_element_not_present_by_css(css)
@step('I see the subsection release date is ([0-9/-]+)( [0-9:]+)?')
def i_see_subsection_release(_step, datestring, timestring):
if hasattr(timestring, "strip"):
timestring = timestring.strip()
assert_equal(datestring, get_date('input#start_date'))
if timestring:
assert_equal(timestring, get_date('input#start_time'))
@step('I see the subsection due date is ([0-9/-]+)( [0-9:]+)?')
def i_see_subsection_due(_step, datestring, timestring):
if hasattr(timestring, "strip"):
timestring = timestring.strip()
assert_equal(datestring, get_date('input#due_date'))
if timestring:
assert_equal(timestring, get_date('input#due_time'))
############ HELPER METHODS ###################
def get_date(css):
return world.css_find(css).first.value.strip()
def save_subsection_name(name):
name_css = 'input.new-subsection-name-input'
save_css = 'input.new-subsection-name-save'
world.css_fill(name_css, name)
world.css_click(save_css)
def see_subsection_name(name):
css = 'span.subsection-name'
assert world.is_css_present(css)
css = 'span.subsection-name-value'
assert world.css_has_text(css, name)
| wwj718/edx-video | cms/djangoapps/contentstore/features/subsection.py | Python | agpl-3.0 | 4,511 |
import time
import datetime
from django import forms
from django.forms.util import ErrorDict
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from models import Comment
from django.utils.encoding import force_unicode
from django.utils.hashcompat import sha_constructor
from django.utils.text import get_text_list
from django.utils.translation import ungettext, ugettext_lazy as _
COMMENT_MAX_LENGTH = getattr(settings,'COMMENT_MAX_LENGTH', 3000)
class CommentForm(forms.Form):
name = forms.CharField(label=_("Name"), max_length=50)
email = forms.EmailField(label=_("Email address"))
url = forms.URLField(label=_("URL"), required=False)
comment = forms.CharField(label=_('Comment'), widget=forms.Textarea,
max_length=COMMENT_MAX_LENGTH)
honeypot = forms.CharField(required=False,
label=_('If you enter anything in this field '\
'your comment will be treated as spam'))
content_type = forms.CharField(widget=forms.HiddenInput)
object_pk = forms.CharField(widget=forms.HiddenInput)
timestamp = forms.IntegerField(widget=forms.HiddenInput)
security_hash = forms.CharField(min_length=40, max_length=40, widget=forms.HiddenInput)
def __init__(self, target_object, data=None, initial=None):
self.target_object = target_object
if initial is None:
initial = {}
initial.update(self.generate_security_data())
super(CommentForm, self).__init__(data=data, initial=initial)
def get_comment_object(self):
"""
Return a new (unsaved) comment object based on the information in this
form. Assumes that the form is already validated and will throw a
ValueError if not.
Does not set any of the fields that would come from a Request object
(i.e. ``user`` or ``ip_address``).
"""
if not self.is_valid():
raise ValueError("get_comment_object may only be called on valid forms")
new = Comment(
content_type = ContentType.objects.get_for_model(self.target_object),
object_pk = force_unicode(self.target_object._get_pk_val()),
user_name = self.cleaned_data["name"],
user_email = self.cleaned_data["email"],
user_url = self.cleaned_data["url"],
comment = self.cleaned_data["comment"],
submit_date = datetime.datetime.now(),
site_id = settings.SITE_ID,
is_public = True,
is_removed = False,
)
# Check that this comment isn't duplicate. (Sometimes people post comments
# twice by mistake.) If it is, fail silently by returning the old comment.
possible_duplicates = Comment.objects.filter(
content_type = new.content_type,
object_pk = new.object_pk,
user_name = new.user_name,
user_email = new.user_email,
user_url = new.user_url,
)
for old in possible_duplicates:
if old.submit_date.date() == new.submit_date.date() and old.comment == new.comment:
return old
return new
def security_errors(self):
"""Return just those errors associated with security"""
errors = ErrorDict()
for f in ["honeypot", "timestamp", "security_hash"]:
if f in self.errors:
errors[f] = self.errors[f]
return errors
def clean_honeypot(self):
"""Check that nothing's been entered into the honeypot."""
value = self.cleaned_data["honeypot"]
if value:
raise forms.ValidationError(self.fields["honeypot"].label)
return value
def clean_security_hash(self):
"""Check the security hash."""
security_hash_dict = {
'content_type' : self.data.get("content_type", ""),
'object_pk' : self.data.get("object_pk", ""),
'timestamp' : self.data.get("timestamp", ""),
}
expected_hash = self.generate_security_hash(**security_hash_dict)
actual_hash = self.cleaned_data["security_hash"]
if expected_hash != actual_hash:
raise forms.ValidationError("Security hash check failed.")
return actual_hash
def clean_timestamp(self):
"""Make sure the timestamp isn't too far (> 2 hours) in the past."""
ts = self.cleaned_data["timestamp"]
if time.time() - ts > (2 * 60 * 60):
raise forms.ValidationError("Timestamp check failed")
return ts
def clean_comment(self):
"""
If COMMENTS_ALLOW_PROFANITIES is False, check that the comment doesn't
contain anything in PROFANITIES_LIST.
"""
comment = self.cleaned_data["comment"]
if settings.COMMENTS_ALLOW_PROFANITIES == False:
bad_words = [w for w in settings.PROFANITIES_LIST if w in comment.lower()]
if bad_words:
plural = len(bad_words) > 1
raise forms.ValidationError(ungettext(
"Watch your mouth! The word %s is not allowed here.",
"Watch your mouth! The words %s are not allowed here.", plural) % \
get_text_list(['"%s%s%s"' % (i[0], '-'*(len(i)-2), i[-1]) for i in bad_words], 'and'))
return comment
def generate_security_data(self):
"""Generate a dict of security data for "initial" data."""
timestamp = int(time.time())
security_dict = {
'content_type' : str(self.target_object._meta),
'object_pk' : str(self.target_object._get_pk_val()),
'timestamp' : str(timestamp),
'security_hash' : self.initial_security_hash(timestamp),
}
return security_dict
def initial_security_hash(self, timestamp):
"""
Generate the initial security hash from self.content_object
and a (unix) timestamp.
"""
initial_security_dict = {
'content_type' : str(self.target_object._meta),
'object_pk' : str(self.target_object._get_pk_val()),
'timestamp' : str(timestamp),
}
return self.generate_security_hash(**initial_security_dict)
def generate_security_hash(self, content_type, object_pk, timestamp):
"""Generate a (SHA1) security hash from the provided info."""
info = (content_type, object_pk, timestamp, settings.SECRET_KEY)
return sha_constructor("".join(info)).hexdigest()
| chewable/django | django/contrib/comments/forms.py | Python | bsd-3-clause | 6,680 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import frappe
test_records = frappe.get_test_records('Employment Type')
| mhbu50/erpnext | erpnext/hr/doctype/employment_type/test_employment_type.py | Python | gpl-3.0 | 201 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Ryota MIBU
import mox
from quantum import context
from quantum.openstack.common import uuidutils
from quantum.plugins.nec.common import ofc_client
from quantum.plugins.nec.db import api as ndb
from quantum.plugins.nec.db import models as nmodels
from quantum.plugins.nec import drivers
from quantum.tests import base
class TestConfig(object):
"""Configuration for this test"""
host = '127.0.0.1'
port = 8888
class TremaDriverTestBase(base.BaseTestCase):
driver_name = "trema"
def setUp(self):
super(TremaDriverTestBase, self).setUp()
self.mox = mox.Mox()
self.driver = drivers.get_driver(self.driver_name)(TestConfig)
self.mox.StubOutWithMock(ofc_client.OFCClient, 'do_request')
self.addCleanup(self.mox.UnsetStubs)
def get_ofc_item_random_params(self):
"""create random parameters for ofc_item test"""
tenant_id = uuidutils.generate_uuid()
network_id = uuidutils.generate_uuid()
port_id = uuidutils.generate_uuid()
portinfo = nmodels.PortInfo(id=port_id, datapath_id="0x123456789",
port_no=1234, vlan_id=321,
mac="11:22:33:44:55:66")
return tenant_id, network_id, portinfo
class TremaDriverNetworkTestBase(TremaDriverTestBase):
def testa_create_network(self):
t, n, p = self.get_ofc_item_random_params()
description = "desc of %s" % n
body = {'id': n, 'description': description}
ofc_client.OFCClient.do_request("POST", "/networks", body=body)
self.mox.ReplayAll()
ret = self.driver.create_network(t, description, n)
self.mox.VerifyAll()
self.assertEqual(ret, '/networks/%s' % n)
def testc_delete_network(self):
t, n, p = self.get_ofc_item_random_params()
net_path = "/networks/%s" % n
ofc_client.OFCClient.do_request("DELETE", net_path)
self.mox.ReplayAll()
self.driver.delete_network(net_path)
self.mox.VerifyAll()
class TremaPortBaseDriverTest(TremaDriverNetworkTestBase):
driver_name = "trema_port"
def testd_create_port(self):
_t, n, p = self.get_ofc_item_random_params()
net_path = "/networks/%s" % n
body = {'id': p.id,
'datapath_id': p.datapath_id,
'port': str(p.port_no),
'vid': str(p.vlan_id)}
ofc_client.OFCClient.do_request("POST",
"/networks/%s/ports" % n, body=body)
self.mox.ReplayAll()
ret = self.driver.create_port(net_path, p, p.id)
self.mox.VerifyAll()
self.assertEqual(ret, '/networks/%s/ports/%s' % (n, p.id))
def testd_delete_port(self):
t, n, p = self.get_ofc_item_random_params()
p_path = "/networks/%s/ports/%s" % (n, p.id)
ofc_client.OFCClient.do_request("DELETE", p_path)
self.mox.ReplayAll()
self.driver.delete_port(p_path)
self.mox.VerifyAll()
class TremaPortMACBaseDriverTest(TremaDriverNetworkTestBase):
driver_name = "trema_portmac"
def testd_create_port(self):
t, n, p = self.get_ofc_item_random_params()
dummy_port = "dummy-%s" % p.id
net_path = "/networks/%s" % n
path_1 = "/networks/%s/ports" % n
body_1 = {'id': dummy_port,
'datapath_id': p.datapath_id,
'port': str(p.port_no),
'vid': str(p.vlan_id)}
ofc_client.OFCClient.do_request("POST", path_1, body=body_1)
path_2 = "/networks/%s/ports/%s/attachments" % (n, dummy_port)
body_2 = {'id': p.id, 'mac': p.mac}
ofc_client.OFCClient.do_request("POST", path_2, body=body_2)
path_3 = "/networks/%s/ports/%s" % (n, dummy_port)
ofc_client.OFCClient.do_request("DELETE", path_3)
self.mox.ReplayAll()
ret = self.driver.create_port(net_path, p, p.id)
self.mox.VerifyAll()
port_path = "/networks/%s/ports/%s/attachments/%s" % (n, dummy_port,
p.id)
self.assertEqual(ret, port_path)
def testd_delete_port(self):
t, n, p = self.get_ofc_item_random_params()
dummy_port = "dummy-%s" % p.id
path = "/networks/%s/ports/%s/attachments/%s" % (n, dummy_port, p.id)
ofc_client.OFCClient.do_request("DELETE", path)
self.mox.ReplayAll()
self.driver.delete_port(path)
self.mox.VerifyAll()
class TremaMACBaseDriverTest(TremaDriverNetworkTestBase):
driver_name = "trema_mac"
def testd_create_port(self):
t, n, p = self.get_ofc_item_random_params()
net_path = "/networks/%s" % n
path = "/networks/%s/attachments" % n
body = {'id': p.id, 'mac': p.mac}
ofc_client.OFCClient.do_request("POST", path, body=body)
self.mox.ReplayAll()
ret = self.driver.create_port(net_path, p, p.id)
self.mox.VerifyAll()
self.assertEqual(ret, '/networks/%s/attachments/%s' % (n, p.id))
def testd_delete_port(self):
t, n, p = self.get_ofc_item_random_params()
path = "/networks/%s/attachments/%s" % (n, p.id)
ofc_client.OFCClient.do_request("DELETE", path)
self.mox.ReplayAll()
self.driver.delete_port(path)
self.mox.VerifyAll()
class TremaFilterDriverTest(TremaDriverTestBase):
def get_ofc_item_random_params(self):
"""create random parameters for ofc_item test"""
t, n, p = (super(TremaFilterDriverTest, self).
get_ofc_item_random_params())
filter_id = uuidutils.generate_uuid()
filter_dict = {'tenant_id': t,
'id': filter_id,
'network_id': n,
'priority': 123,
'action': "ACCEPT",
'in_port': p.id,
'src_mac': p.mac,
'dst_mac': "",
'eth_type': 0,
'src_cidr': "",
'dst_cidr': "",
'src_port': 0,
'dst_port': 0,
'protocol': "TCP",
'admin_state_up': True,
'status': "ACTIVE"}
filter_item = nmodels.PacketFilter(**filter_dict)
return t, n, p, filter_item
def testa_create_filter(self):
t, n, p, f = self.get_ofc_item_random_params()
net_path = "/networks/%s" % n
ofp_wildcards = 'dl_vlan,dl_vlan_pcp,nw_tos,dl_dst,' + \
'nw_src:32,nw_dst:32,tp_src,tp_dst'
body = {'id': f.id,
'action': 'ALLOW',
'priority': 123,
'slice': n,
'in_datapath_id': '0x123456789',
'in_port': 1234,
'nw_proto': '0x6',
'dl_type': '0x800',
'dl_src': p.mac,
'ofp_wildcards': ofp_wildcards}
ofc_client.OFCClient.do_request("POST", "/filters", body=body)
self.mox.ReplayAll()
ret = self.driver.create_filter(net_path, f, p, f.id)
self.mox.VerifyAll()
self.assertEqual(ret, '/filters/%s' % f.id)
def testb_delete_filter(self):
t, n, p, f = self.get_ofc_item_random_params()
f_path = "/filters/%s" % f.id
ofc_client.OFCClient.do_request("DELETE", f_path)
self.mox.ReplayAll()
self.driver.delete_filter(f_path)
self.mox.VerifyAll()
def generate_random_ids(count=1):
if count == 1:
return uuidutils.generate_uuid()
else:
return [uuidutils.generate_uuid() for i in xrange(count)]
class TremaIdConvertTest(base.BaseTestCase):
driver_name = 'trema'
def setUp(self):
super(TremaIdConvertTest, self).setUp()
self.driver = drivers.get_driver(self.driver_name)(TestConfig)
self.mox = mox.Mox()
self.ctx = self.mox.CreateMock(context.Context)
self.addCleanup(self.mox.UnsetStubs)
def test_convert_tenant_id(self):
ofc_t_id = generate_random_ids(1)
ret = self.driver.convert_ofc_tenant_id(self.ctx, ofc_t_id)
self.assertEqual(ret, '/tenants/%s' % ofc_t_id)
def test_convert_tenant_id_noconv(self):
ofc_t_id = '/tenants/%s' % generate_random_ids(1)
ret = self.driver.convert_ofc_tenant_id(self.ctx, ofc_t_id)
self.assertEqual(ret, ofc_t_id)
def test_convert_network_id(self):
t_id, ofc_t_id, ofc_n_id = generate_random_ids(3)
ret = self.driver.convert_ofc_network_id(self.ctx, ofc_n_id, t_id)
self.assertEqual(ret, ('/networks/%s' % ofc_n_id))
def test_convert_network_id_noconv(self):
t_id = 'dummy'
ofc_t_id, ofc_n_id = generate_random_ids(2)
ofc_n_id = '/networks/%s' % ofc_n_id
ret = self.driver.convert_ofc_network_id(self.ctx, ofc_n_id, t_id)
def test_convert_filter_id(self):
ofc_f_id = generate_random_ids(1)
ret = self.driver.convert_ofc_filter_id(self.ctx, ofc_f_id)
self.assertEqual(ret, '/filters/%s' % ofc_f_id)
def test_convert_filter_id_noconv(self):
ofc_f_id = '/filters/%s' % generate_random_ids(1)
ret = self.driver.convert_ofc_filter_id(self.ctx, ofc_f_id)
self.assertEqual(ret, ofc_f_id)
class TremaIdConvertTestBase(base.BaseTestCase):
def setUp(self):
super(TremaIdConvertTestBase, self).setUp()
self.mox = mox.Mox()
self.driver = drivers.get_driver(self.driver_name)(TestConfig)
self.ctx = self.mox.CreateMock(context.Context)
self.ctx.session = "session"
self.mox.StubOutWithMock(ndb, 'get_ofc_id_lookup_both')
self.addCleanup(self.mox.UnsetStubs)
def _test_convert_port_id(self, port_path_template):
t_id, n_id = generate_random_ids(2)
ofc_n_id, ofc_p_id = generate_random_ids(2)
ndb.get_ofc_id_lookup_both(
self.ctx.session, 'ofc_network', n_id).AndReturn(ofc_n_id)
self.mox.ReplayAll()
ret = self.driver.convert_ofc_port_id(self.ctx, ofc_p_id, t_id, n_id)
exp = port_path_template % {'network': ofc_n_id, 'port': ofc_p_id}
self.assertEqual(ret, exp)
self.mox.VerifyAll()
def _test_convert_port_id_with_new_network_id(self, port_path_template):
t_id, n_id = generate_random_ids(2)
ofc_n_id, ofc_p_id = generate_random_ids(2)
ofc_n_path = '/networks/%s' % ofc_n_id
ndb.get_ofc_id_lookup_both(
self.ctx.session, 'ofc_network', n_id).AndReturn(ofc_n_path)
self.mox.ReplayAll()
ret = self.driver.convert_ofc_port_id(self.ctx, ofc_p_id, t_id, n_id)
exp = port_path_template % {'network': ofc_n_id, 'port': ofc_p_id}
print 'exp=', exp
print 'ret=', ret
self.assertEqual(ret, exp)
self.mox.VerifyAll()
def _test_convert_port_id_noconv(self, port_path_template):
t_id = n_id = 'dummy'
ofc_n_id, ofc_p_id = generate_random_ids(2)
ofc_p_id = port_path_template % {'network': ofc_n_id, 'port': ofc_p_id}
ret = self.driver.convert_ofc_port_id(self.ctx, ofc_p_id, t_id, n_id)
self.assertEqual(ret, ofc_p_id)
class TremaIdConvertPortBaseTest(TremaIdConvertTestBase):
driver_name = "trema_port"
def test_convert_port_id(self):
self._test_convert_port_id('/networks/%(network)s/ports/%(port)s')
def test_convert_port_id_with_new_network_id(self):
self._test_convert_port_id_with_new_network_id(
'/networks/%(network)s/ports/%(port)s')
def test_convert_port_id_noconv(self):
self._test_convert_port_id_noconv(
'/networs/%(network)s/ports/%(port)s')
class TremaIdConvertPortMACBaseTest(TremaIdConvertTestBase):
driver_name = "trema_portmac"
def test_convert_port_id(self):
self._test_convert_port_id(
'/networks/%(network)s/ports/dummy-%(port)s/attachments/%(port)s')
def test_convert_port_id_with_new_network_id(self):
self._test_convert_port_id_with_new_network_id(
'/networks/%(network)s/ports/dummy-%(port)s/attachments/%(port)s')
def test_convert_port_id_noconv(self):
self._test_convert_port_id_noconv(
'/networs/%(network)s/ports/dummy-%(port)s/attachments/%(port)s')
class TremaIdConvertMACBaseTest(TremaIdConvertTestBase):
driver_name = "trema_mac"
def test_convert_port_id(self):
self._test_convert_port_id(
'/networks/%(network)s/attachments/%(port)s')
def test_convert_port_id_with_new_network_id(self):
self._test_convert_port_id_with_new_network_id(
'/networks/%(network)s/attachments/%(port)s')
def test_convert_port_id_noconv(self):
self._test_convert_port_id_noconv(
'/networs/%(network)s/attachments/%(port)s')
| wallnerryan/quantum_migrate | quantum/tests/unit/nec/test_trema_driver.py | Python | apache-2.0 | 13,627 |
#!/usr/bin/env python
import sys
from struct import pack
from VariableBiteCode import vb_encode
if len(sys.argv) < 2:
print "usage: %s in.txt > out.txt" % sys.argv[0]
sys.exit(1)
for line in open(sys.argv[1], 'r'):
(tag, id_list) = line.rstrip().split('\t')
bytes = []
pre = 0
for id in id_list.split(','):
id = int(id)
bytes.append(vb_encode(id - pre))
pre = id
data = ''.join(bytes)
sys.stdout.write('%s%s%s' % (
pack('=2i', len(tag), len(data)), tag, data))
| enfk/hugedatabook-vbcode-python | ch6_encode.py | Python | mit | 528 |
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from nova.tests.functional.api_sample_tests import api_sample_base
from nova.tests.unit.image import fake as fake_image
class AggregatesSampleJsonTest(api_sample_base.ApiSampleTestBaseV21):
ADMIN_API = True
sample_dir = "os-aggregates"
# extra_subs is a noop in the base v2.1 test class; it's used to sub in
# additional details for response verification of actions performed on an
# existing aggregate.
extra_subs = {}
def _test_aggregate_create(self):
subs = {
"aggregate_id": r'(?P<id>\d+)'
}
response = self._do_post('os-aggregates', 'aggregate-post-req', subs)
return self._verify_response('aggregate-post-resp',
subs, response, 200)
def test_aggregate_create(self):
self._test_aggregate_create()
def _test_add_host(self, aggregate_id, host):
subs = {
"host_name": host
}
response = self._do_post('os-aggregates/%s/action' % aggregate_id,
'aggregate-add-host-post-req', subs)
subs.update(self.extra_subs)
self._verify_response('aggregates-add-host-post-resp', subs,
response, 200)
def test_list_aggregates(self):
aggregate_id = self._test_aggregate_create()
self._test_add_host(aggregate_id, self.compute.host)
response = self._do_get('os-aggregates')
self._verify_response('aggregates-list-get-resp', {}, response, 200)
def test_aggregate_get(self):
agg_id = self._test_aggregate_create()
response = self._do_get('os-aggregates/%s' % agg_id)
self._verify_response('aggregates-get-resp', self.extra_subs,
response, 200)
def test_add_metadata(self):
agg_id = self._test_aggregate_create()
response = self._do_post('os-aggregates/%s/action' % agg_id,
'aggregate-metadata-post-req',
{'action': 'set_metadata'})
self._verify_response('aggregates-metadata-post-resp', self.extra_subs,
response, 200)
def test_add_host(self):
aggregate_id = self._test_aggregate_create()
self._test_add_host(aggregate_id, self.compute.host)
def test_remove_host(self):
self.test_add_host()
subs = {
"host_name": self.compute.host,
}
response = self._do_post('os-aggregates/1/action',
'aggregate-remove-host-post-req', subs)
subs.update(self.extra_subs)
self._verify_response('aggregates-remove-host-post-resp',
subs, response, 200)
def test_update_aggregate(self):
aggregate_id = self._test_aggregate_create()
response = self._do_put('os-aggregates/%s' % aggregate_id,
'aggregate-update-post-req', {})
self._verify_response('aggregate-update-post-resp',
self.extra_subs, response, 200)
class AggregatesV2_41_SampleJsonTest(AggregatesSampleJsonTest):
microversion = '2.41'
scenarios = [
(
"v2_41", {
'api_major_version': 'v2.1',
},
)
]
def _test_aggregate_create(self):
subs = {
"aggregate_id": r'(?P<id>\d+)',
}
response = self._do_post('os-aggregates', 'aggregate-post-req', subs)
# This feels like cheating since we're getting the uuid from the
# response before we even validate that it exists in the response based
# on the sample, but we'll fail with a KeyError if it doesn't which is
# maybe good enough. Alternatively we have to mock out the DB API
# to return a fake aggregate with a hard-coded uuid that matches the
# API sample which isn't fun either.
subs['uuid'] = jsonutils.loads(response.content)['aggregate']['uuid']
# save off the uuid for subs validation on other actions performed
# on this aggregate
self.extra_subs['uuid'] = subs['uuid']
return self._verify_response('aggregate-post-resp',
subs, response, 200)
class AggregatesV2_81_SampleJsonTest(AggregatesV2_41_SampleJsonTest):
microversion = '2.81'
scenarios = [
(
"v2_81", {
'api_major_version': 'v2.1',
},
)
]
def test_images(self):
agg_id = self._test_aggregate_create()
image = fake_image.get_valid_image_id()
response = self._do_post('os-aggregates/%s/images' % agg_id,
'aggregate-images-post-req',
{'image_id': image})
# No response body, so just check the status
self.assertEqual(202, response.status_code)
| rahulunair/nova | nova/tests/functional/api_sample_tests/test_aggregates.py | Python | apache-2.0 | 5,569 |
#!/usr/bin/env python
""" hg-to-git.py - A Mercurial to GIT converter
Copyright (C)2007 Stelian Pop <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
"""
import os, os.path, sys
import tempfile, pickle, getopt
import re
if sys.hexversion < 0x02030000:
# The behavior of the pickle module changed significantly in 2.3
sys.stderr.write("hg-to-git.py: requires Python 2.3 or later.\n")
sys.exit(1)
# Maps hg version -> git version
hgvers = {}
# List of children for each hg revision
hgchildren = {}
# List of parents for each hg revision
hgparents = {}
# Current branch for each hg revision
hgbranch = {}
# Number of new changesets converted from hg
hgnewcsets = 0
#------------------------------------------------------------------------------
def usage():
print """\
%s: [OPTIONS] <hgprj>
options:
-s, --gitstate=FILE: name of the state to be saved/read
for incrementals
-n, --nrepack=INT: number of changesets that will trigger
a repack (default=0, -1 to deactivate)
-v, --verbose: be verbose
required:
hgprj: name of the HG project to import (directory)
""" % sys.argv[0]
#------------------------------------------------------------------------------
def getgitenv(user, date):
env = ''
elems = re.compile('(.*?)\s+<(.*)>').match(user)
if elems:
env += 'export GIT_AUTHOR_NAME="%s" ;' % elems.group(1)
env += 'export GIT_COMMITTER_NAME="%s" ;' % elems.group(1)
env += 'export GIT_AUTHOR_EMAIL="%s" ;' % elems.group(2)
env += 'export GIT_COMMITTER_EMAIL="%s" ;' % elems.group(2)
else:
env += 'export GIT_AUTHOR_NAME="%s" ;' % user
env += 'export GIT_COMMITTER_NAME="%s" ;' % user
env += 'export GIT_AUTHOR_EMAIL= ;'
env += 'export GIT_COMMITTER_EMAIL= ;'
env += 'export GIT_AUTHOR_DATE="%s" ;' % date
env += 'export GIT_COMMITTER_DATE="%s" ;' % date
return env
#------------------------------------------------------------------------------
state = ''
opt_nrepack = 0
verbose = False
try:
opts, args = getopt.getopt(sys.argv[1:], 's:t:n:v', ['gitstate=', 'tempdir=', 'nrepack=', 'verbose'])
for o, a in opts:
if o in ('-s', '--gitstate'):
state = a
state = os.path.abspath(state)
if o in ('-n', '--nrepack'):
opt_nrepack = int(a)
if o in ('-v', '--verbose'):
verbose = True
if len(args) != 1:
raise Exception('params')
except:
usage()
sys.exit(1)
hgprj = args[0]
os.chdir(hgprj)
if state:
if os.path.exists(state):
if verbose:
print 'State does exist, reading'
f = open(state, 'r')
hgvers = pickle.load(f)
else:
print 'State does not exist, first run'
sock = os.popen('hg tip --template "{rev}"')
tip = sock.read()
if sock.close():
sys.exit(1)
if verbose:
print 'tip is', tip
# Calculate the branches
if verbose:
print 'analysing the branches...'
hgchildren["0"] = ()
hgparents["0"] = (None, None)
hgbranch["0"] = "master"
for cset in range(1, int(tip) + 1):
hgchildren[str(cset)] = ()
prnts = os.popen('hg log -r %d --template "{parents}"' % cset).read().strip().split(' ')
prnts = map(lambda x: x[:x.find(':')], prnts)
if prnts[0] != '':
parent = prnts[0].strip()
else:
parent = str(cset - 1)
hgchildren[parent] += ( str(cset), )
if len(prnts) > 1:
mparent = prnts[1].strip()
hgchildren[mparent] += ( str(cset), )
else:
mparent = None
hgparents[str(cset)] = (parent, mparent)
if mparent:
# For merge changesets, take either one, preferably the 'master' branch
if hgbranch[mparent] == 'master':
hgbranch[str(cset)] = 'master'
else:
hgbranch[str(cset)] = hgbranch[parent]
else:
# Normal changesets
# For first children, take the parent branch, for the others create a new branch
if hgchildren[parent][0] == str(cset):
hgbranch[str(cset)] = hgbranch[parent]
else:
hgbranch[str(cset)] = "branch-" + str(cset)
if not hgvers.has_key("0"):
print 'creating repository'
os.system('git init')
# loop through every hg changeset
for cset in range(int(tip) + 1):
# incremental, already seen
if hgvers.has_key(str(cset)):
continue
hgnewcsets += 1
# get info
log_data = os.popen('hg log -r %d --template "{tags}\n{date|date}\n{author}\n"' % cset).readlines()
tag = log_data[0].strip()
date = log_data[1].strip()
user = log_data[2].strip()
parent = hgparents[str(cset)][0]
mparent = hgparents[str(cset)][1]
#get comment
(fdcomment, filecomment) = tempfile.mkstemp()
csetcomment = os.popen('hg log -r %d --template "{desc}"' % cset).read().strip()
os.write(fdcomment, csetcomment)
os.close(fdcomment)
print '-----------------------------------------'
print 'cset:', cset
print 'branch:', hgbranch[str(cset)]
print 'user:', user
print 'date:', date
print 'comment:', csetcomment
if parent:
print 'parent:', parent
if mparent:
print 'mparent:', mparent
if tag:
print 'tag:', tag
print '-----------------------------------------'
# checkout the parent if necessary
if cset != 0:
if hgbranch[str(cset)] == "branch-" + str(cset):
print 'creating new branch', hgbranch[str(cset)]
os.system('git checkout -b %s %s' % (hgbranch[str(cset)], hgvers[parent]))
else:
print 'checking out branch', hgbranch[str(cset)]
os.system('git checkout %s' % hgbranch[str(cset)])
# merge
if mparent:
if hgbranch[parent] == hgbranch[str(cset)]:
otherbranch = hgbranch[mparent]
else:
otherbranch = hgbranch[parent]
print 'merging', otherbranch, 'into', hgbranch[str(cset)]
os.system(getgitenv(user, date) + 'git merge --no-commit -s ours "" %s %s' % (hgbranch[str(cset)], otherbranch))
# remove everything except .git and .hg directories
os.system('find . \( -path "./.hg" -o -path "./.git" \) -prune -o ! -name "." -print | xargs rm -rf')
# repopulate with checkouted files
os.system('hg update -C %d' % cset)
# add new files
os.system('git ls-files -x .hg --others | git update-index --add --stdin')
# delete removed files
os.system('git ls-files -x .hg --deleted | git update-index --remove --stdin')
# commit
os.system(getgitenv(user, date) + 'git commit --allow-empty -a -F %s' % filecomment)
os.unlink(filecomment)
# tag
if tag and tag != 'tip':
os.system(getgitenv(user, date) + 'git tag %s' % tag)
# delete branch if not used anymore...
if mparent and len(hgchildren[str(cset)]):
print "Deleting unused branch:", otherbranch
os.system('git branch -d %s' % otherbranch)
# retrieve and record the version
vvv = os.popen('git show --quiet --pretty=format:%H').read()
print 'record', cset, '->', vvv
hgvers[str(cset)] = vvv
if hgnewcsets >= opt_nrepack and opt_nrepack != -1:
os.system('git repack -a -d')
# write the state for incrementals
if state:
if verbose:
print 'Writing state'
f = open(state, 'w')
pickle.dump(hgvers, f)
# vim: et ts=8 sw=4 sts=4
| pniebla/test-repo-console | svn/git-1.8.3.3.tar/git-1.8.3.3/git-1.8.3.3/contrib/hg-to-git/hg-to-git.py | Python | mit | 8,052 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'DatabaseResize.target_offer'
db.alter_column(u'maintenance_databaseresize', 'target_offer_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, on_delete=models.SET_NULL, to=orm['dbaas_cloudstack.CloudStackPack']))
# Changing field 'DatabaseResize.source_offer'
db.alter_column(u'maintenance_databaseresize', 'source_offer_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, on_delete=models.SET_NULL, to=orm['dbaas_cloudstack.CloudStackPack']))
def backwards(self, orm):
# Changing field 'DatabaseResize.target_offer'
db.alter_column(u'maintenance_databaseresize', 'target_offer_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['dbaas_cloudstack.CloudStackPack']))
# Changing field 'DatabaseResize.source_offer'
db.alter_column(u'maintenance_databaseresize', 'source_offer_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['dbaas_cloudstack.CloudStackPack']))
models = {
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'backup.backupgroup': {
'Meta': {'object_name': 'BackupGroup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'dbaas_cloudstack.cloudstackoffering': {
'Meta': {'object_name': 'CloudStackOffering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'equivalent_offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['dbaas_cloudstack.CloudStackOffering']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_offering_region'", 'null': 'True', 'to': u"orm['dbaas_cloudstack.CloudStackRegion']"}),
'serviceofferingid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'dbaas_cloudstack.cloudstackpack': {
'Meta': {'object_name': 'CloudStackPack'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_packs'", 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_offering_packs'", 'to': u"orm['dbaas_cloudstack.CloudStackOffering']"}),
'script_file': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'dbaas_cloudstack.cloudstackregion': {
'Meta': {'object_name': 'CloudStackRegion'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_environment_region'", 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'logical.database': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'name', u'environment'),)", 'object_name': 'Database'},
'backup_path': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DatabaseInfra']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_auto_resize': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_quarantine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['logical.Project']"}),
'quarantine_dt': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'quarantine_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_quarantine'", 'null': 'True', 'to': u"orm['auth.User']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'logical.project': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Project'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasechangeparameter': {
'Meta': {'object_name': 'DatabaseChangeParameter'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'change_parameters'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_change_parameters'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasecreate': {
'Meta': {'object_name': 'DatabaseCreate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'create_database'", 'to': u"orm['notification.TaskHistory']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasereinstallvm': {
'Meta': {'object_name': 'DatabaseReinstallVM'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'reinstall_vm'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinstall_vm'", 'to': u"orm['physical.Instance']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinsgtall_vm'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseresize': {
'Meta': {'object_name': 'DatabaseResize'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'resizes'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['dbaas_cloudstack.CloudStackPack']"}),
'source_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['dbaas_cloudstack.CloudStackPack']"}),
'target_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestore': {
'Meta': {'object_name': 'DatabaseRestore'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['backup.BackupGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_restore_new'", 'null': 'True', 'to': u"orm['backup.BackupGroup']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestoreinstancepair': {
'Meta': {'unique_together': "((u'master', u'slave', u'restore'),)", 'object_name': 'DatabaseRestoreInstancePair'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_master'", 'to': u"orm['physical.Instance']"}),
'restore': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_instances'", 'to': u"orm['maintenance.DatabaseRestore']"}),
'slave': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_slave'", 'to': u"orm['physical.Instance']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseupgrade': {
'Meta': {'object_name': 'DatabaseUpgrade'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'upgrades'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'source_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'target_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmaintenance': {
'Meta': {'unique_together': "((u'host', u'maintenance'),)", 'object_name': 'HostMaintenance', 'index_together': "[[u'host', u'maintenance']]"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_maintenance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Host']"}),
'hostname': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance'", 'to': u"orm['maintenance.Maintenance']"}),
'rollback_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenance': {
'Meta': {'object_name': 'Maintenance'},
'affected_hosts': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'hostsid': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '10000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_script': ('django.db.models.fields.TextField', [], {}),
'maximum_workers': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'revoked_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'rollback_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scheduled_for': ('django.db.models.fields.DateTimeField', [], {'unique': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenanceparameters': {
'Meta': {'object_name': 'MaintenanceParameters'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'function_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance_params'", 'to': u"orm['maintenance.Maintenance']"}),
'parameter_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_class': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'WAITING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shard': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['maintenance'] | globocom/database-as-a-service | dbaas/maintenance/migrations/0028_auto__chg_field_databaseresize_target_offer__chg_field_databaseresize_.py | Python | bsd-3-clause | 44,593 |
# The MIT License
#
# Copyright (c) 2008 James Piechota
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
import os.path
# Node definition
class WReader:
def __init__(self):
self._fullName = ""
self._path = ""
self._maxInfluences = 0
self.deformers = []
self.weights = []
def name(self):
return self._fullName
def read( self, fullName ):
'''Load skin weights from a Massive .w (weights) file'''
try:
if not os.path.isfile(fullName):
return
self._fullName = fullName
self._path = os.path.dirname( fullName )
fileHandle = open(self._fullName, "r")
deformers = []
tokens = []
weights = []
maxInfluences = 0
for line in fileHandle:
tokens = line.strip().split()
if tokens:
if tokens[0][0] == "#":
# Comment
continue
elif tokens[0] == "deformer":
id = int(tokens[1])
numDeformers = len(self.deformers)
if id >= numDeformers:
self.deformers.extend([ "" ] * (id - numDeformers + 1))
self.deformers[id] = tokens[2]
else:
# TODO: see if storing 0s for joints that have
# no influence is a problem. Storing the influences
# sparsely may make applying the weights later more
# complex
#
numTokens = len(tokens)
vtx = int(tokens[0][:-1])
influences = [0] * len(self.deformers)
count = 0
for i in range(1, numTokens, 2):
influences[int(tokens[i])] = float(tokens[i+1])
count += 1
# keep track of the maximum number of influences on a
# given vertex so we can use it to optimize the skin
# deformers later
#
if count > self._maxInfluences:
self._maxInfluences = count
self.weights.append(influences)
fileHandle.close()
except:
print >> sys.stderr, "Error reading Weights file: %s" % self._fullName
raise
| redpawfx/massiveImporter | python/ns/bridge/io/WReader.py | Python | mit | 2,920 |
#
# This file is part of the PyMeasure package.
#
# Copyright (c) 2013-2017 PyMeasure Developers
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
from .anritsuMG3692C import AnritsuMG3692C
from .anritsuMS9710C import AnritsuMS9710C
| TvBMcMaster/pymeasure | pymeasure/instruments/anritsu/__init__.py | Python | mit | 1,243 |
from django import template
register = template.Library()
@register.filter
def drawLight(value):
if value == 1:
return "green.png"
else:
return "red.png"
| UNINETT/nav | python/nav/django/templatetags/portadmin.py | Python | gpl-2.0 | 180 |
"""
These settings are used by the ``manage.py`` command.
With normal tests we want to use the fastest possible way which is an
in-memory sqlite database but if you want to create South migrations you
need a persistant database.
Unfortunately there seems to be an issue with either South or syncdb so that
defining two routers ("default" and "south") does not work.
"""
from enquiry.tests.test_settings import * # NOQA
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'db.sqlite',
}
}
INSTALLED_APPS.append('south', )
| bitmazk/django-enquiry | enquiry/tests/south_settings.py | Python | mit | 575 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2010, Nicolas Clairon
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the University of California, Berkeley nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class SchemaOperator(object):
repr = None
def __init__(self, *args):
assert self.repr is not None
self._operands = list(args)
def __repr__(self):
return str(self)
def __iter__(self):
for operand in self._operands:
yield operand
def __eq__(self, other):
return type(self) == type(other) and self._operands == other._operands
def validate(self, value):
raise NotImplementedError
class OR(SchemaOperator):
repr = 'or'
def __init__(self, *args):
super(OR, self).__init__(*args)
def __str__(self):
repr = ' %s ' % self.repr
return '<'+repr.join([i.__name__ for i in self._operands]) + '>'
def validate(self, value):
return isinstance(value, tuple(self._operands))
class NOT(SchemaOperator):
repr = 'not'
def __init__(self, *args):
super(NOT, self).__init__(*args)
def __str__(self):
repr = ', %s ' % self.repr
return '<not '+repr.join([i.__name__ for i in self._operands]) + '>'
def validate(self, value):
return not isinstance(value, tuple(self._operands))
class IS(SchemaOperator):
repr = 'is'
def __init__(self, *args):
super(IS, self).__init__(*args)
def __str__(self):
representation = ' or %s ' % self.repr
return '<is '+representation.join([repr(i) for i in self._operands]) + '>'
def validate(self, value):
if value in self._operands:
for op in self._operands:
if value == op and isinstance(value, type(op)):
return True
return False
| goanpeca/mongokit | mongokit/operators.py | Python | bsd-3-clause | 3,264 |
#!/usr/bin/env python
###############################################################################
# vim: tabstop=4:shiftwidth=4:expandtab:
# Copyright (c) 2017-2018 SIOS Technology Corp. All rights reserved.
##############################################################################
"""
This script will spray events for various environments and VMs.
"""
import logging
import sys
from datetime import datetime, timedelta
from pytz import timezone
from os.path import dirname, realpath
curr_path = dirname(realpath(__file__))
sys.path.insert(0, '{}/../../'.format(curr_path))
from SignaliQ.client import Client
from SignaliQ.model.CloudProviderEvent import CloudProviderEvent
from SignaliQ.model.CloudVM import CloudVM
from SignaliQ.model.NetworkInterface import NetworkInterface
from SignaliQ.model.ProviderEventsUpdateMessage import ProviderEventsUpdateMessage
__log__ = logging.getLogger(__name__)
def main(args):
# Setup the client and send the data!
client = Client()
client.connect()
id_interf = {
500: [
"00:50:56:9b:3a:9b",
"00:50:56:9b:51:f2",
"00:50:56:9b:6f:09",
],
505: [
"00:50:56:93:7a:b9",
],
}
id_list = [id_interf for xx in range(5)]
event_time = (
datetime
.now()
.replace(tzinfo = timezone('US/Eastern'))
)
for idx in id_list:
for env_id, hws in id_interf.items():
event_time += timedelta(minutes = 5)
__log__.info(
"Creating event with time {} and env id of {}".format(
event_time.strftime('%Y-%m-%dT%H:%M:%S%z'), env_id,
)
)
events = [
CloudProviderEvent(
description = "Caused by a bad mood",
environment_id = env_id,
layer = "Storage",
severity = "Critical",
time = event_time.strftime('%Y-%m-%dT%H:%M:%S%z'),
event_type = "SDK Event",
vms = [
CloudVM(network_interfaces = [NetworkInterface(hw_address = hw)]) for hw in hws
],
)
]
event_message = ProviderEventsUpdateMessage(
environment_id = env_id,
events = events,
)
client.send(event_message)
# DONE
client.disconnect()
if __name__ == "__main__":
main(sys.argv)
| siostechcorp/Signal_iQ | python/test/scripts/hw_minigun.py | Python | mit | 2,521 |
import random, time, sys
# key catching code
import sys,tty,termios
class _Getch:
def __call__(self):
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
# end key catching code
maze = '''#####################################
# # # # # # #
# # ##### # ### ##### ### ### ### # #
# # # # # # # # # #
##### # ##### ##### ### # # # ##### #
# # # # # # # # # # #
# # ####### # # ##### ### # ##### # #
# # # # # # # # # #
# ####### ### ### # ### ##### # ### #
# # # # # # # # # #
# ### ### # ### # ##### # # # #######
# # # # # # # # # # # #
####### # # # ##### # ### # ### ### #
# # # # # # # # # #
# ### # ##### ### # ### ### ####### #
# # # # # # # # # #
# # ##### # ### ##### # # ####### # #
# # # # # # # # # # #
# ##### # # # ### ##### ##### # #####
# # # # # # # # # #
# # ### ### ### ##### ### # ##### # #
# # # # # # #
#X###################################'''
maze_old = '''#########################################################################
# # # # # # #
# # ######### # ##### ######### ##### ##### ##### # #
# # # # # # # # # #
######### # ######### ######### ##### # # # ######### #
# # # # # # # # # # #
# # ############# # # ######### ##### # ######### # #
# # # # # # # # # #
# ############# ##### ##### # ##### ######### # ##### #
# # # # # # # # # #
# ##### ##### # ##### # ######### # # # #############
# # # # # # # # # # # #
############# # # # ######### # ##### # ##### ##### #
# # # # # # # # # #
# ##### # ######### ##### # ##### ##### ############# #
# # # # # # # # # #
# # ######### # ##### ######### # # ############# # #
# # # # # # # # # # #
# ######### # # # ##### ######### ######### # #########
# # # # # # # # # #
# # ##### ##### ##### ######### ##### # ######### # #
# # # # # # #
# X #####################################################################'''
game_won = False
def get_maze_width():
return len(maze.splitlines()[0])
def get_maze_height():
return len(maze.splitlines())
def replace_character(x, y, character):
'''
Replace character on the map
'''
global maze
i = 0
j = 0
temporary_maze = list(maze)
for c in range(0, len(temporary_maze)):
if (temporary_maze[c] == '\n'):
j += 1
i = 0
continue
if ((i == x) and (j == y)):
temporary_maze[c] = character
maze = ''.join(temporary_maze)
break
else:
i += 1
def check_for_obstacle(x, y):
'''
Return whether there is an obstacle at the location on the map
'''
global game_won
is_obstacle = False
if ((x < 0) or (x > get_maze_width() - 1) or (y < 0) or (y > get_maze_height() - 1)):
is_obstacle = True
elif (maze.splitlines()[y][x] == 'X'):
game_won = True
elif (maze.splitlines()[y][x] != ' '):
is_obstacle = True
return is_obstacle
def obstacle_found(x, y, direction):
'''
Return whether there is an obstacle in the direction the player is intending to move
'''
intended_x = x
intended_y = y
if (direction == 'u'):
intended_y -= 1
elif (direction == 'd'):
intended_y += 1
elif (direction == 'l'):
intended_x -= 1
elif (direction == 'r'):
intended_x += 1
return check_for_obstacle(intended_x, intended_y)
class Player:
def __init__(self, x, y):
self.x = x
self.y = y
def move_up(self):
if (not obstacle_found(self.x, self.y, 'u')):
replace_character(self.x, self.y, ' ')
replace_character(self.x, self.y - 1, '^')
self.y -= 1
else:
replace_character(self.x, self.y, '^')
def move_down(self):
if (not obstacle_found(self.x, self.y, 'd')):
replace_character(self.x, self.y, ' ')
replace_character(self.x, self.y + 1, 'v')
self.y += 1
else:
replace_character(self.x, self.y, 'v')
def move_left(self):
if (not obstacle_found(self.x, self.y, 'l')):
replace_character(self.x, self.y, ' ')
replace_character(self.x - 1, self.y, '<')
self.x -= 1
else:
replace_character(self.x, self.y, '<')
def move_right(self):
if (not obstacle_found(self.x, self.y, 'r')):
replace_character(self.x, self.y, ' ')
replace_character(self.x + 1, self.y, '>')
self.x += 1
else:
replace_character(self.x, self.y, '>')
def spawn_player():
'''
Spawn player on map
'''
maze_width = get_maze_width()
maze_height = get_maze_height()
while True:
x = random.randrange(maze_width)
y = random.randrange(maze_height)
if (not check_for_obstacle(x, y)):
player = Player(x, y)
replace_character(player.x, player.y, '^')
return player
def show_map():
'''
Show the map
'''
print()
print(maze)
def play(player):
'''
Main game function
'''
show_map()
# key catching code
inkey = _Getch()
while(1):
k=inkey()
if k!='':
break
key = ord(k)
# end key catching code
if key == 32:
sys.exit()
elif key == 65: # up arrow
player.move_up()
elif key == 66: # down arrow
player.move_down()
elif key == 68: # left arrow
player.move_left()
elif key == 67: # right arrow
player.move_right()
def init():
'''
Spawn player object and start main game loop
'''
player = spawn_player()
while (not game_won):
play(player)
print("Congrats! You won!")
init()
| kas/r-DailyProgrammer | weekly/25 Escape the trolls/app.py | Python | mit | 7,148 |
from status.util import SafeHandler
from datetime import datetime
thresholds = {
'HiSeq X': 320,
'RapidHighOutput': 188,
'HighOutput': 143,
'RapidRun': 114,
'MiSeq Version3': 18,
'MiSeq Version2': 10,
'MiSeq Version2Nano': 0.75,
'NovaSeq SP': 325,
'NovaSeq S1': 650,
'NovaSeq S2': 1650,
'NovaSeq S4': 2000,
'NextSeq Mid' : 25,
'NextSeq High' : 75,
'NextSeq 2000 P2' : 400,
'NextSeq 2000 P3' : 1100
}
class FlowcellHandler(SafeHandler):
""" Serves a page which shows information for a given flowcell.
"""
def __init__(self, application, request, **kwargs):
# to cache a list of project_names ->
# then we don't query statusdb each time when accessing the flowcell page
self._project_names = {}
super(SafeHandler, self).__init__(application, request, **kwargs)
def _get_project_id_by_name(self, project_name):
if project_name not in self._project_names:
view = self.application.projects_db.view('project/project_name')[project_name]
# should be only one row, if not - will overwrite
for row in view.rows:
doc_id = row.value
project_doc = self.application.projects_db.get(doc_id)
project_id = project_doc.get('project_id')
self._project_names[project_name] = project_id
return self._project_names.get(project_name, '')
def _get_project_list(self, flowcell):
# replace '__' in project name
replaced_plist = []
if 'plist' in flowcell:
for project in flowcell['plist']:
if '__' in project:
project = project.replace('__', '.')
else: # replace only the first one
project = project.replace('_', '.', 1)
if project != 'default':
replaced_plist.append(project)
return replaced_plist
def find_DB_entry(self, flowcell_id):
#Returns Runid (key), contents (complex)
view = self.application.x_flowcells_db.view('info/summary2_full_id', key=flowcell_id)
if view.rows:
return view.rows[0]
# No hit for a full name, check if the short name is found:
complete_flowcell_rows = self.application.x_flowcells_db.view(
'info/short_name_to_full_name',
key=flowcell_id
).rows
if complete_flowcell_rows:
complete_flowcell_id = complete_flowcell_rows[0].value
view = self.application.x_flowcells_db.view(
'info/summary2_full_id',
key=complete_flowcell_id,
)
if view.rows:
return view.rows[0]
return False
def get(self, flowcell_id):
entry = self.find_DB_entry(flowcell_id)
if not entry:
extra_message=""
flowcell_date = datetime.strptime(flowcell_id[0:6], "%y%m%d")
first_xflowcell_record = datetime(2015,3,13)
if first_xflowcell_record>flowcell_date:
extra_message = "Your flowcell is in an older database. It can still be accessed, contact your administrator."
self.set_status(200)
t = self.application.loader.load("flowcell_error.html")
self.write(t.generate(gs_globals=self.application.gs_globals,
flowcell_id=flowcell_id,
user=self.get_current_user(),
extra_message=extra_message
))
return
else:
# replace '__' in project name
entry.value['plist'] = self._get_project_list(entry.value)
# list of project_names -> to create links to project page and bioinfo tab
project_names = {project_name: self._get_project_id_by_name(project_name) for project_name in entry.value['plist']}
t = self.application.loader.load("flowcell.html")
self.write(t.generate(gs_globals=self.application.gs_globals,
flowcell=entry.value,
flowcell_id=flowcell_id,
thresholds=thresholds,
project_names=project_names,
user=self.get_current_user()))
| remiolsen/status | status/flowcell.py | Python | mit | 4,508 |
import re
datapath = "./data/"
outputpath = "./output/"
vectorfile = open(datapath + 'doc2Dep20MWU57k_1000concat2000.tab', 'r')
wordfile = open(datapath + 'doc2Dep20MWU57k_1000concat2000.txt', 'r')
num_lines = sum([1 for line in wordfile])
vecNum = 1000
wordfile.seek(0)
outputfile = open(outputpath + 'dependencyVec.txt', 'w')
outputfile.write('%d %d\n' % (num_lines, vecNum))
for i in range(num_lines):
outputfile.write(wordfile.readline().strip())
dependencyVec = re.split(r'\t+', vectorfile.readline().rstrip('\t\n'))[1000:]
for value in dependencyVec:
outputfile.write(' %s' % (value))
outputfile.write('\n')
outputfile.close()
| sidenver/ConstituentRetrofit | parseInput.py | Python | apache-2.0 | 665 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.