code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1
value | license stringclasses 15
values | size int64 3 1.05M |
|---|---|---|---|---|---|
import sys
class RedminePage:
def __init__(self,id):
self._id = id # redmine ID
self._question = "" #question # question
self._question_time = ""#question_time # ask question time
self._floor = 0 # floor sum
self._commont = list()
def get_question(self):
return self._question
def get_question_time(self):
return self._question_time
def get_max_floor(self):
return self._floor-1
def add_commont(self,context,time):
#self._commont[self._floor]["context"] = context
#self._commont[self._floor]["time"] = time
self._commont.append([context,time])
self._floor += 1
def get_last_commont(self):
l = list()
l.append(self._commont[self.get_max_floor()][0])
l.append(self._commont[self.get_max_floor()][1])
return l
def set_json(self,s):
#for key in s:
# print key
# print s[key]
i = 0
while "content%d" % i in s:
#print s["content%d" % i]
#print s["time%d" % i]
self.add_commont(s["content%d" % i],s["time%d" % i])
i += 1
def print_one_commont(self,f):
print self._commont[f][0]
print self._commont[f][1]
def print_all_commont(self):
for i in range(0,self._floor):
print self._commont[i][0]
print self._commont[i][1]
def print_last_commont(self):
self.print_one_commont(self.get_max_floor())
| labdong801/easyredmine | src/eobj.py | Python | apache-2.0 | 1,572 |
from twilio.rest.resources.util import normalize_dates
from twilio.rest.resources import InstanceResource, ListResource
class Transcription(InstanceResource):
pass
class Transcriptions(ListResource):
name = "Transcriptions"
instance = Transcription
def list(self, **kwargs):
"""
Return a list of :class:`Transcription` resources
"""
return self.get_instances(kwargs)
class Recording(InstanceResource):
subresources = [Transcriptions]
def __init__(self, *args, **kwargs):
super(Recording, self).__init__(*args, **kwargs)
self.formats = {
"mp3": self.uri + ".mp3",
"wav": self.uri + ".wav",
}
def delete(self):
"""
Delete this recording
"""
return self.delete_instance()
class Recordings(ListResource):
name = "Recordings"
instance = Recording
@normalize_dates
def list(self, before=None, after=None, **kwargs):
"""
Returns a page of :class:`Recording` resources as a list.
For paging information see :class:`ListResource`.
:param date after: Only list recordings logged after this datetime
:param date before: Only list recordings logger before this datetime
:param call_sid: Only list recordings from this :class:`Call`
"""
kwargs["DateCreated<"] = before
kwargs["DateCreated>"] = after
return self.get_instances(kwargs)
def delete(self, sid):
"""
Delete the given recording
"""
return self.delete_instance(sid)
| balanced/status.balancedpayments.com | venv/lib/python2.7/site-packages/twilio/rest/resources/recordings.py | Python | mit | 1,602 |
# encoding: utf-8
"""
The data type and use of it for declaratively creating test courses.
"""
# used to create course subtrees in ModuleStoreTestCase.create_test_course
# adds to self properties w/ the given block_id which hold the UsageKey for easy retrieval.
# fields is a dictionary of keys and values. sub_tree is a collection of BlockInfo
from collections import namedtuple
import datetime
BlockInfo = namedtuple('BlockInfo', 'block_id, category, fields, sub_tree') # pylint: disable=invalid-name
default_block_info_tree = [ # pylint: disable=invalid-name
BlockInfo(
'chapter_x', 'chapter', {}, [
BlockInfo(
'sequential_x1', 'sequential', {}, [
BlockInfo(
'vertical_x1a', 'vertical', {}, [
BlockInfo('problem_x1a_1', 'problem', {}, []),
BlockInfo('problem_x1a_2', 'problem', {}, []),
BlockInfo('problem_x1a_3', 'problem', {}, []),
BlockInfo('html_x1a_1', 'html', {}, []),
]
)
]
)
]
),
BlockInfo(
'chapter_y', 'chapter', {}, [
BlockInfo(
'sequential_y1', 'sequential', {}, [
BlockInfo(
'vertical_y1a', 'vertical', {}, [
BlockInfo('problem_y1a_1', 'problem', {}, []),
BlockInfo('problem_y1a_2', 'problem', {}, []),
BlockInfo('problem_y1a_3', 'problem', {}, []),
]
)
]
)
]
)
]
# equivalent to toy course in xml
TOY_BLOCK_INFO_TREE = [
BlockInfo(
'Overview', "chapter", {"display_name": "Overview"}, [
BlockInfo(
"Toy_Videos", "videosequence", {
"xml_attributes": {"filename": ["", None]}, "display_name": "Toy Videos", "format": "Lecture Sequence"
}, [
BlockInfo(
"secret:toylab", "html", {
"data": "<b>Lab 2A: Superposition Experiment</b>\n\n\n<p>Isn't the toy course great?</p>\n\n<p>Let's add some markup that uses non-ascii characters.\n'For example, we should be able to write words like encyclopædia, or foreign words like français.\nLooking beyond latin-1, we should handle math symbols: πr² ≤ ∞.\nAnd it shouldn't matter if we use entities or numeric codes — Ω ≠ π ≡ Ω ≠ π.\n</p>\n\n", # pylint: disable=line-too-long
"xml_attributes": {"filename": ["html/secret/toylab.xml", "html/secret/toylab.xml"]},
"display_name": "Toy lab"
}, []
),
BlockInfo(
"toyjumpto", "html", {
"data": "<a href=\"/jump_to_id/vertical_test\">This is a link to another page and some Chinese 四節比分和七年前</a> <p>Some more Chinese 四節比分和七年前</p>\n",
"xml_attributes": {"filename": ["html/toyjumpto.xml", "html/toyjumpto.xml"]}
}, []),
BlockInfo(
"toyhtml", "html", {
"data": "<a href='/static/handouts/sample_handout.txt'>Sample</a>",
"xml_attributes": {"filename": ["html/toyhtml.xml", "html/toyhtml.xml"]}
}, []),
BlockInfo(
"nonportable", "html", {
"data": "<a href=\"/static/foo.jpg\">link</a>\n",
"xml_attributes": {"filename": ["html/nonportable.xml", "html/nonportable.xml"]}
}, []),
BlockInfo(
"nonportable_link", "html", {
"data": "<a href=\"/jump_to_id/nonportable_link\">link</a>\n\n",
"xml_attributes": {"filename": ["html/nonportable_link.xml", "html/nonportable_link.xml"]}
}, []),
BlockInfo(
"badlink", "html", {
"data": "<img src=\"/static//file.jpg\" />\n",
"xml_attributes": {"filename": ["html/badlink.xml", "html/badlink.xml"]}
}, []),
BlockInfo(
"with_styling", "html", {
"data": "<p style=\"font:italic bold 72px/30px Georgia, serif; color: red; \">Red text here</p>",
"xml_attributes": {"filename": ["html/with_styling.xml", "html/with_styling.xml"]}
}, []),
BlockInfo(
"just_img", "html", {
"data": "<img src=\"/static/foo_bar.jpg\" />",
"xml_attributes": {"filename": ["html/just_img.xml", "html/just_img.xml"]}
}, []),
BlockInfo(
"Video_Resources", "video", {
"youtube_id_1_0": "1bK-WdDi6Qw", "display_name": "Video Resources"
}, []),
]),
BlockInfo(
"Welcome", "video", {"data": "", "youtube_id_1_0": "p2Q6BrNhdh8", "display_name": "Welcome"}, []
),
BlockInfo(
"video_123456789012", "video", {"data": "", "youtube_id_1_0": "p2Q6BrNhdh8", "display_name": "Test Video"}, []
),
BlockInfo(
"video_4f66f493ac8f", "video", {"youtube_id_1_0": "p2Q6BrNhdh8"}, []
)
]
),
BlockInfo(
"secret:magic", "chapter", {
"xml_attributes": {"filename": ["chapter/secret/magic.xml", "chapter/secret/magic.xml"]}
}, [
BlockInfo(
"toyvideo", "video", {"youtube_id_1_0": "OEoXaMPEzfMA", "display_name": "toyvideo"}, []
)
]
),
BlockInfo(
"poll_test", "chapter", {}, [
BlockInfo(
"T1_changemind_poll_foo", "poll_question", {
"question": "<p>Have you changed your mind? ’</p>",
"answers": [{"text": "Yes", "id": "yes"}, {"text": "No", "id": "no"}],
"xml_attributes": {"reset": "false", "filename": ["", None]},
"display_name": "Change your answer"
}, [])]
),
BlockInfo(
"vertical_container", "chapter", {
"xml_attributes": {"filename": ["chapter/vertical_container.xml", "chapter/vertical_container.xml"]}
}, [
BlockInfo("vertical_sequential", "sequential", {}, [
BlockInfo("vertical_test", "vertical", {
"xml_attributes": {"filename": ["vertical/vertical_test.xml", "vertical_test"]}
}, [
BlockInfo(
"sample_video", "video", {
"youtube_id_1_25": "AKqURZnYqpk",
"youtube_id_0_75": "JMD_ifUUfsU",
"youtube_id_1_0": "OEoXaMPEzfM",
"display_name": "default",
"youtube_id_1_5": "DYpADpL7jAY"
}, []),
BlockInfo(
"separate_file_video", "video", {
"youtube_id_1_25": "AKqURZnYqpk",
"youtube_id_0_75": "JMD_ifUUfsU",
"youtube_id_1_0": "OEoXaMPEzfM",
"display_name": "default",
"youtube_id_1_5": "DYpADpL7jAY"
}, []),
BlockInfo(
"video_with_end_time", "video", {
"youtube_id_1_25": "AKqURZnYqpk",
"display_name": "default",
"youtube_id_1_0": "OEoXaMPEzfM",
"end_time": datetime.timedelta(seconds=10),
"youtube_id_1_5": "DYpADpL7jAY",
"youtube_id_0_75": "JMD_ifUUfsU"
}, []),
BlockInfo(
"T1_changemind_poll_foo_2", "poll_question", {
"question": "<p>Have you changed your mind?</p>",
"answers": [{"text": "Yes", "id": "yes"}, {"text": "No", "id": "no"}],
"xml_attributes": {"reset": "false", "filename": ["", None]},
"display_name": "Change your answer"
}, []),
]),
BlockInfo("unicode", "html", {
"data": "…", "xml_attributes": {"filename": ["", None]}
}, [])
]),
]
),
BlockInfo(
"handout_container", "chapter", {
"xml_attributes": {"filename": ["chapter/handout_container.xml", "chapter/handout_container.xml"]}
}, [
BlockInfo(
"html_7e5578f25f79", "html", {
"data": "<a href=\"/static/handouts/sample_handout.txt\"> handouts</a>",
"xml_attributes": {"filename": ["", None]}
}, []
),
]
)
]
| ahmadiga/min_edx | common/lib/xmodule/xmodule/modulestore/tests/sample_courses.py | Python | agpl-3.0 | 9,544 |
# -*- coding: utf8 -*-
import random
import re
from helga.plugins import match
def imgur(image):
"""
Returns an imgur link with a given hash
"""
return 'http://i.imgur.com/{0}.gif'.format(image)
RESPONSES = {
# Direct text responses
r'(gross|disgusting|eww)': (imgur('XEEI0Rn'),), # Dumb and Dumber Gag
r'(\sGFY\s|GTFO|Fuck (You|Off))': (imgur('VPqgYjF'), # Ryan Stiles pulling middle finger from pocket
imgur('rWhZY3k'),), # half baked
r'womp womp': ("http://www.sadtrombone.com/?play=true",
"http://www.youtube.com/watch?v=_-GaXa8tSBE"),
r'^:w?q$': ("this ain't your vi",),
r'^(pwd$|(sudo|ls|cd|rm)(\s\w+|$))': "this ain't your shell",
r'php': ("php is just terrible",
"php's motto: MERGE ALL THE PULL REQUESTS"),
r'^select( .* )from(.*)': "'; DROP TABLES;",
r'mongo(db)?\s': 'http://youtu.be/b2F-DItXtZs', # MongoDB is webscale
r'gem install': "ruby. not even once.",
r'\\m/': 'rock on',
r'((beetle|betel)(geuse|juice)\s?){3}': "i'm the ghost with the most",
# lol, gifs
r'(bravo|well done)': (imgur('wSvsV'), # Citizen Kane slow clap
imgur('HUKCsCv'), # Colbert & Stewart bravo
imgur('FwqHZ6Z')), # Gamer conceding defeat
r'is \w+ down\?': imgur('yX5o8rZ'), # THE F5 HAMMER
r"(i don't care|do i look like i care|zero fucks)": (
imgur('oKydfNm'), # Bird bouncing on hawk's head
imgur('KowlC'), # Gangam style 'do i look like i care'
imgur('xYOqXJv'), # Dog hitting cat with tail
imgur('1b2YNU3'), # But wait! bubble
),
r'^nope$': (imgur('iSm1aZu'), # Arrested development NOPE
imgur('2xwe756'), # Lonley island like a boss NOPE
imgur('zCtbl'), # Tracy Morgan NOPE
imgur('foEHo'), # Spongebob buried in sand
imgur('xKYs9'), # Puppy does not like lime
imgur('ST9lw3U'), # Seinfeld I'm Out
imgur('c4gTe5p'), # Cat thriller walk I'm out
'http://i.minus.com/iUgVCKwjISSke.gif', # The Nope Bader
),
r'tl;?dr': (imgur('dnMjc'), # Lightsaber did not read
imgur('V2H9y')), # Craig Robinson did not read
r'panic': (imgur('tpGQV'), # Aladding start panicking
imgur('WS4S2'), # Colbert screaming in terror
imgur('rhNOy3I'), # Panic cat bug eyes
imgur('SNvM6CZ'), # Girl leans on escalator handrail
imgur('H7PXV'), # Ain't nobody got time for that
imgur('fH9e2')), # Out of control truck on collision course
r'shock(ed|ing)?': (imgur('zVyOBlR'), # Cartoon is shocked
imgur('Q4bI5'), # Shocked cat is shocked
imgur('wdA2Z'), # Monsters Inc watching Boo in compactor
imgur('nj3yp'), # Spock is shocked
imgur('AGnOQ'), # PeeWee is shocked
imgur('wkY1FUI'), # Shocked looks around
imgur('AXuUYIj')), # Simpsons jaw drop
r'(bloody mary|vodka)': imgur('W9SS4iJ'), # Archer: Bloody Mary, blessed are you among cocktails
r'popcorn': (imgur('00IJgSZ'), # Thriller popcorn
imgur('5px9l')), # Colbert popcorn
r'deal with it': (imgur('12WoH'), # Slip n slide DWI
imgur('6E6n1'), # WTF Oprah
imgur('hYdy4'), # Baseball catch deal with it
imgur('pqmfX'), # WTF pouring water from nose
imgur('9WbAL'), # A three toed sloth in a chair
imgur('KdldmZk'), # Polar bear jumping out of water
imgur('49UtI5N'), # The Fresh Prince of DEAL WITH IT
imgur('1pkNeOy'), # Skyler
imgur('KzEXQDq'), # Tom & Jerry
imgur('1kxk9z6'), # deal with it dance
u'(⌐■_■)',
# Multiline
(u'( •_•)',
u'( •_•)>⌐■-■',
u'(⌐■_■)',
'deal with it'),
(u'. B :-|',
u'. B :-|',
u'. B :-|',
u'. B-| deal with it')),
r'(mind blown|blew my mind)': (imgur('U6kCXUp'), # Head asploding
imgur('1HMveGj')), # Tim and Eric mind blown
r'(sweet jesus|mother of god)': (imgur('5vXdAOV'), # Captain Kirk
imgur('g155Wra'), # Star Trek freaking out
imgur('dyeHb'), # BJ Novak looks confused
imgur('VkHiG6D'), # Face twitching
imgur('aiH4Mts'), # Christopher Lloyd realizes something
imgur('nOJme'), # Cookie monster sweet jesus
imgur('KtdHWhs'), # Fight club realization
imgur('z5hhSsU'), # Cat with toy: OMG it was you!
imgur('zuc9tAm')), # Dinosaurs show - drops beer
r'nailed it': (imgur('KsQzQTF'), # Cat not trying to catch rat
imgur('5nrEk'), # Olympic diving fail
imgur('n9zw0'), # squirrel spinning on bird feeder
imgur('puZy04m'), # Kid jumping into pool fail
imgur('MBdxv'), # Girl trying to jump bike ramp fail
imgur('6XRqt'), # FIXME
imgur('dFuBE'), # Cat jumps into a box
imgur('vUACp'), # Backflip off bleachers
imgur('59h9A8e')), # Backflip off tree
r'unacceptable': imgur('BwdP2xl'), # 3D rendering goes wrong
r'^(iknorite|right)\?$': imgur('RvquHs0'), # Breaking Bad: You're god damn right
r'fuck yea': (imgur('GZ5CD5r'), # Data shooting dice
imgur('nEmrMkq')), # Top Gun ... DANGER ZONE
r'\w+ broke prod': (imgur('SuCGnum'), # Anchorman: You ate the whole wheel of cheese?
imgur('sbQUDbF'),), # fail boat
r'^indeed$': (imgur('bQcbpki'), # Leonardo DiCaprio in Django Unchained
imgur('CRIcP'),), # Teal'c from Stargate SG-1
r'f(f{6}|7)u(u{11}|12)': 'http://i.minus.com/ibnfJRQi1h4z30.gif', # Workaholics: FUUUUUUUUUUUUUU
r'wtf': imgur('bpW6Xkd'), # WTF supercut
# Various modern unicode emoticons
r'(why|y) (u|you) no': u'ლ(ಠ益ಠლ)',
r'i (don\'?t know|dunno),? lol': u'¯\(°_o)/¯',
r'look.?of.?disapproval(\.jpg|\.gif)?': u'ಠ_ಠ',
r'i (am disappoint|disapprove)': u'ಠ_ಠ',
r'^not sure if \w+': u'≖_≖',
r'(tableflip|flip (a|the|some) tables?)': (u'(╯°□°)╯︵ ┻━┻',
u'(ノಠ益ಠ)ノ彡┻━┻'),
r'(gonna|going to) (make \w+ )?cry': u'(ಥ﹏ಥ)',
r'(bro ?fist|fist ?bump)': u'( _)=mm=(^_^ )',
r'hi(gh)?[ -]?five': ('\o',
u'( ‘-’)人(゚_゚ )'),
r'(^|[^\\])o/$': '\o',
r'^\\o$': 'o/'
}
def find_response(message):
for pat, resp in RESPONSES.iteritems():
if re.findall(pat, message, re.I):
found = resp
break
else:
return None
if isinstance(resp, (tuple, list)):
return random.choice(found)
return found
@match(find_response, priority=0)
def oneliner(client, channel, nick, message, match):
"""
Maybe some of these will become their own thing, but for
now, they live here.
DEAL WITH IT
"""
return match # pragma: no cover
| shaunduncan/helga-oneliner | helga_oneliner.py | Python | mit | 8,137 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('warehouse', '0002_gogglesjob'),
]
operations = [
migrations.RenameModel(
old_name='GogglesJob',
new_name='ImportJob',
),
]
| smn/goggles | goggles/warehouse/migrations/0003_auto_20141216_2052.py | Python | bsd-2-clause | 355 |
# projectParams.py
# ----------------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to
# http://inst.eecs.berkeley.edu/~cs188/pacman/pacman.html
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# ([email protected]) and Dan Klein ([email protected]).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel ([email protected]).
STUDENT_CODE_DEFAULT = 'searchAgents.py,search.py'
PROJECT_TEST_CLASSES = 'searchTestClasses.py'
PROJECT_NAME = 'Project 1: Search'
BONUS_PIC = False
| anthonybrice/COMP469 | w2/search/projectParams.py | Python | gpl-3.0 | 854 |
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import os
import re
from contextlib import contextmanager
from uuid import uuid4
from flask import Blueprint, Flask, current_app, g, request
from flask.blueprints import BlueprintSetupState
from flask.helpers import locked_cached_property
from flask.testing import FlaskClient
from flask.wrappers import Request
from flask_pluginengine import PluginFlaskMixin
from flask_webpackext import current_webpack
from jinja2 import FileSystemLoader, TemplateNotFound
from jinja2.runtime import StrictUndefined
from ua_parser import user_agent_parser
from werkzeug.datastructures import ImmutableOrderedMultiDict
from werkzeug.user_agent import UserAgent
from werkzeug.utils import cached_property
from indico.core.config import config
from indico.util.json import IndicoJSONEncoder
from indico.web.flask.session import IndicoSessionInterface
from indico.web.flask.templating import CustomizationLoader, IndicoEnvironment
from indico.web.flask.util import make_view_func
AUTH_BEARER_RE = re.compile(r'^Bearer (.+)$')
class ParsedUserAgent(UserAgent):
@cached_property
def _details(self):
return user_agent_parser.Parse(self.string)
@property
def platform(self):
return self._details['os']['family']
@property
def browser(self):
return self._details['user_agent']['family']
@property
def version(self):
return '.'.join(
part
for key in ('major', 'minor', 'patch')
if (part := self._details['user_agent'][key]) is not None
)
class IndicoRequest(Request):
parameter_storage_class = ImmutableOrderedMultiDict
user_agent_class = ParsedUserAgent
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.remote_addr is not None and self.remote_addr.startswith('::ffff:'):
# convert ipv6-style ipv4 to the regular ipv4 notation
self.remote_addr = self.remote_addr[7:]
@cached_property
def id(self):
return uuid4().hex[:16]
@cached_property
def relative_url(self):
"""The request's path including its query string if applicable."""
return self.script_root + self.full_path.rstrip('?')
@cached_property
def bearer_token(self):
"""Bearer token included in the request, if any."""
auth_header = request.headers.get('Authorization')
if not auth_header:
return None
m = AUTH_BEARER_RE.match(auth_header)
return m.group(1) if m else None
@property
def is_xhr(self):
# XXX: avoid using this in new code; this header is non-standard and only set
# by default in jquery, but not by anything else. check if the request accepts
# json as an alternative.
return self.headers.get('X-Requested-With', '').lower() == 'xmlhttprequest'
class IndicoFlaskClient(FlaskClient):
def open(self, *args, **kwargs):
# our tests always push an app context, but we do not want to leak `g` between
# test client calls, so we always use a throwaway app context for the requests
with current_app.app_context():
return super().open(*args, **kwargs)
class IndicoFlask(PluginFlaskMixin, Flask):
json_encoder = IndicoJSONEncoder
request_class = IndicoRequest
session_interface = IndicoSessionInterface()
test_client_class = IndicoFlaskClient
jinja_environment = IndicoEnvironment
jinja_options = dict(Flask.jinja_options, undefined=StrictUndefined)
@property
def session_cookie_name(self):
name = super().session_cookie_name
if not request.is_secure:
name += '_http'
return name
def create_global_jinja_loader(self):
default_loader = super().create_global_jinja_loader()
# use an empty list if there's no global customization dir so we can
# add directories of plugins later once they are available
customization_dir = os.path.join(config.CUSTOMIZATION_DIR, 'templates') if config.CUSTOMIZATION_DIR else []
return CustomizationLoader(default_loader, customization_dir, config.CUSTOMIZATION_DEBUG)
def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
from indico.web.rh import RHSimple
# Endpoints from Flask-Multipass need to be wrapped in the RH
# logic to get the autocommit logic and error handling for code
# running inside the identity handler.
if endpoint is not None and endpoint.startswith('_flaskmultipass'):
view_func = RHSimple.wrap_function(view_func)
return super().add_url_rule(rule, endpoint=endpoint, view_func=view_func, **options)
@property
def has_static_folder(self):
return False
@property
def manifest(self):
if 'custom_manifests' in g:
return g.custom_manifests[None]
return current_webpack.manifest
class IndicoBlueprintSetupState(BlueprintSetupState):
@contextmanager
def _unprefixed(self):
prefix = self.url_prefix
self.url_prefix = None
yield
self.url_prefix = prefix
def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
if rule.startswith('!/'):
with self._unprefixed():
super().add_url_rule(rule[1:], endpoint, view_func, **options)
else:
super().add_url_rule(rule, endpoint, view_func, **options)
class IndicoBlueprint(Blueprint):
"""A Blueprint implementation that allows prefixing URLs with `!` to
ignore the url_prefix of the blueprint.
It also supports automatically creating rules in two versions - with and
without a prefix.
:param event_feature: If set, this blueprint will raise `NotFound`
for all its endpoints unless the event referenced
by the `event_id` URL argument has the specified
feature.
"""
def __init__(self, *args, **kwargs):
self.__prefix = None
self.__default_prefix = ''
self.__virtual_template_folder = kwargs.pop('virtual_template_folder', None)
event_feature = kwargs.pop('event_feature', None)
super().__init__(*args, **kwargs)
if event_feature:
@self.before_request
def _check_event_feature():
from indico.modules.events.features.util import require_feature
event_id = request.view_args.get('event_id')
if event_id is not None:
require_feature(event_id, event_feature)
@locked_cached_property
def jinja_loader(self):
if self.template_folder is not None:
return IndicoFileSystemLoader(os.path.join(self.root_path, self.template_folder),
virtual_path=self.__virtual_template_folder)
def make_setup_state(self, app, options, first_registration=False):
return IndicoBlueprintSetupState(self, app, options, first_registration)
def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
if view_func is not None:
# We might have a RH class here - convert it to a callable suitable as a view func.
view_func = make_view_func(view_func)
super().add_url_rule(self.__default_prefix + rule, endpoint, view_func, **options)
if self.__prefix:
super().add_url_rule(self.__prefix + rule, endpoint, view_func, **options)
@contextmanager
def add_prefixed_rules(self, prefix, default_prefix=''):
"""Create prefixed rules in addition to the normal ones.
When specifying a default_prefix, too, the normally "unprefixed" rules
are prefixed with it.
"""
assert self.__prefix is None and not self.__default_prefix
self.__prefix = prefix
self.__default_prefix = default_prefix
yield
self.__prefix = None
self.__default_prefix = ''
class IndicoFileSystemLoader(FileSystemLoader):
"""FileSystemLoader that makes namespacing easier.
The `virtual_path` kwarg lets you specify a path segment that's
handled as if all templates inside the loader's `searchpath` were
actually inside ``searchpath/virtual_path``. That way you don't
have to create subdirectories in your template folder.
"""
def __init__(self, searchpath, encoding='utf-8', virtual_path=None):
super().__init__(searchpath, encoding)
self.virtual_path = virtual_path
def list_templates(self):
templates = super().list_templates()
if self.virtual_path:
templates = [os.path.join(self.virtual_path, t) for t in templates]
return templates
def get_source(self, environment, template):
if self.virtual_path:
if not template.startswith(self.virtual_path):
raise TemplateNotFound(template)
template = template[len(self.virtual_path):]
return super().get_source(environment, template)
| pferreir/indico | indico/web/flask/wrappers.py | Python | mit | 9,244 |
from django.contrib import admin
from django.forms import ModelForm, CharField
from open_municipio.events.models import *
from tinymce.widgets import TinyMCE
from django.forms.models import inlineformset_factory, BaseInlineFormSet
from django.forms import Textarea, ModelForm, TextInput
# TODO place these widget and field in a more reusable location
from open_municipio.widgets import SplitTimeWidget, SortWidget
from open_municipio.fields import SplitTimeField
class EventForm(ModelForm):
description = CharField(widget=TinyMCE(
attrs={'cols': 80, 'rows': 25},
mce_attrs={
'theme': "advanced",
'theme_advanced_buttons1': "formatselect,bold,italic,underline|,bullist,numlist,|,undo,redo,|,link,unlink,|,code,help",
'theme_advanced_buttons2': "",
'theme_advanced_buttons3': "",
'theme_advanced_blockformats': "p,blockquote",
'theme_advanced_resizing': True,
'theme_advanced_statusbar_location': "bottom",
'theme_advanced_toolbar_location': "top",
'theme_advanced_path': False
},
),
required=False)
event_time = SplitTimeField(widget=SplitTimeWidget)
class Meta:
model = Event
class EventActInlineForm(ModelForm):
class Meta:
widgets = {
'order' : SortWidget()
}
class EventActInline(admin.TabularInline):
raw_id_fields = ('act', )
form = EventActInlineForm
model = EventAct
extra = 0
class EventAdmin(admin.ModelAdmin):
filter_horizontal = ('acts',)
form = EventForm
inlines = [ EventActInline, ]
list_display = [ "title", "date", "institution", ]
search_fields = [ "title", "institution__name", ]
list_filter = ("institution", )
admin.site.register(Event, EventAdmin)
| openpolis/open_municipio | open_municipio/events/admin.py | Python | agpl-3.0 | 1,812 |
"""experimenting with versioning modes
Revision ID: 4f22490b9071
Revises: 4371f183fc40
Create Date: 2019-08-24 23:56:44.249781
"""
# revision identifiers, used by Alembic.
revision = '4f22490b9071'
down_revision = '4371f183fc40'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy_utils.types import TSVectorType
from sqlalchemy_searchable import make_searchable
import sqlalchemy_utils
# Patch in knowledge of the citext type, so it reflects properly.
from sqlalchemy.dialects.postgresql.base import ischema_names
import citext
import queue
import datetime
from sqlalchemy.dialects.postgresql import ENUM
from sqlalchemy.dialects.postgresql import JSON
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.dialects.postgresql import TSVECTOR
ischema_names['citext'] = citext.CIText
from sqlalchemy.dialects import postgresql
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("SET statement_timeout TO 14400000;")
op.drop_column('raw_web_pages_version', 'end_transaction_id')
op.drop_index('ix_rss_parser_feed_name_lut_version_end_transaction_id', table_name='rss_parser_feed_name_lut_version')
op.drop_column('rss_parser_feed_name_lut_version', 'end_transaction_id')
op.drop_index('ix_rss_parser_funcs_version_end_transaction_id', table_name='rss_parser_funcs_version')
op.drop_column('rss_parser_funcs_version', 'end_transaction_id')
op.drop_column('web_pages_version', 'end_transaction_id')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('web_pages_version', sa.Column('end_transaction_id', sa.BIGINT(), autoincrement=False, nullable=True))
op.add_column('rss_parser_funcs_version', sa.Column('end_transaction_id', sa.BIGINT(), autoincrement=False, nullable=True))
op.create_index('ix_rss_parser_funcs_version_end_transaction_id', 'rss_parser_funcs_version', ['end_transaction_id'], unique=False)
op.add_column('rss_parser_feed_name_lut_version', sa.Column('end_transaction_id', sa.BIGINT(), autoincrement=False, nullable=True))
op.create_index('ix_rss_parser_feed_name_lut_version_end_transaction_id', 'rss_parser_feed_name_lut_version', ['end_transaction_id'], unique=False)
op.add_column('raw_web_pages_version', sa.Column('end_transaction_id', sa.BIGINT(), autoincrement=False, nullable=True))
# ### end Alembic commands ###
| fake-name/ReadableWebProxy | alembic/versions/00046_4f22490b9071_experimenting_with_versioning_modes.py | Python | bsd-3-clause | 2,493 |
# Copyright (c) 2014, 2016, Oracle and/or its affiliates. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
ZFS Storage Appliance Proxy
"""
import json
from oslo_log import log
from oslo_service import loopingcall
from cinder import exception
from cinder.i18n import _
from cinder.volume.drivers.zfssa import restclient
from cinder.volume.drivers.zfssa import webdavclient
LOG = log.getLogger(__name__)
def factory_restclient(url, **kwargs):
return restclient.RestClientURL(url, **kwargs)
class ZFSSAApi(object):
"""ZFSSA API proxy class"""
def __init__(self):
self.host = None
self.url = None
self.rclient = None
def __del__(self):
if self.rclient and self.rclient.islogin():
self.rclient.logout()
def _is_pool_owned(self, pdata):
"""Check pool ownership.
Returns True if the pool's owner is the same as the host, or
the peer, if (and only if) it's stripped from the cluster
"""
svc = '/api/system/v1/version'
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error getting version: '
'svc: %(svc)s.'
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'svc': svc,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
vdata = json.loads(ret.data)['version']
if vdata['asn'] == pdata['pool']['asn'] and \
vdata['nodename'] == pdata['pool']['owner']:
return True
svc = '/api/hardware/v1/cluster'
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error getting cluster: '
'svc: %(svc)s.'
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'svc': svc,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
cdata = json.loads(ret.data)['cluster']
if cdata['peer_asn'] == pdata['pool']['asn'] and \
cdata['peer_hostname'] == pdata['pool']['owner'] and \
cdata['peer_state'] == 'AKCS_STRIPPED':
LOG.warning('Cluster node %(nodename)s is stripped',
{'nodename': pdata['pool']['owner']})
return True
return False
def get_pool_details(self, pool):
"""Get properties of a pool."""
svc = '/api/storage/v1/pools/%s' % pool
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error Getting Pool Stats: '
'Pool: %(pool)s '
'Return code: %(status)d '
'Message: %(data)s.')
% {'pool': pool,
'status': ret.status,
'data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
val = json.loads(ret.data)
if not self._is_pool_owned(val):
exception_msg = (_('Error Pool ownership: '
'Pool %(pool)s is not owned '
'by %(host)s.')
% {'pool': pool,
'host': self.host})
LOG.error(exception_msg)
raise exception.InvalidInput(reason=exception_msg)
return val['pool']
def set_host(self, host, timeout=None):
self.host = host
self.url = "https://" + self.host + ":215"
self.rclient = factory_restclient(self.url, timeout=timeout)
def login(self, auth_str):
"""Login to the appliance"""
if self.rclient and not self.rclient.islogin():
self.rclient.login(auth_str)
def logout(self):
self.rclient.logout()
def verify_service(self, service, status='online'):
"""Checks whether a service is online or not"""
svc = '/api/service/v1/services/' + service
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error Verifying '
'Service: %(service)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'service': service,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
data = json.loads(ret.data)['service']
if data['<status>'] != status:
exception_msg = (_('%(service)s Service is not %(status)s '
'on storage appliance: %(host)s')
% {'service': service,
'status': status,
'host': self.host})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def get_asn(self):
"""Returns appliance asn."""
svc = '/api/system/v1/version'
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error getting appliance version details. '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
val = json.loads(ret.data)
return val['version']['asn']
def get_replication_targets(self):
"""Returns all replication targets configured on the appliance."""
svc = '/api/storage/v1/replication/targets'
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error getting replication target details. '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
val = json.loads(ret.data)
return val
def edit_inherit_replication_flag(self, pool, project, volume, set=True):
"""Edit the inherit replication flag for volume."""
svc = ('/api/storage/v1/pools/%(pool)s/projects/%(project)s'
'/filesystems/%(volume)s/replication'
% {'pool': pool,
'project': project,
'volume': volume})
arg = {'inherited': set}
ret = self.rclient.put(svc, arg)
if ret.status != restclient.Status.ACCEPTED:
exception_msg = (_('Error setting replication inheritance '
'to %(set)s '
'for volume: %(vol)s '
'project %(project)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'set': set,
'project': project,
'vol': volume,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def create_replication_action(self, host_pool, host_project, tgt_name,
tgt_pool, volume):
"""Create a replication action."""
arg = {'pool': host_pool,
'project': host_project,
'target_pool': tgt_pool,
'target': tgt_name}
if volume is not None:
arg.update({'share': volume})
svc = '/api/storage/v1/replication/actions'
ret = self.rclient.post(svc, arg)
if ret.status != restclient.Status.CREATED:
exception_msg = (_('Error Creating replication action on: '
'pool: %(pool)s '
'Project: %(proj)s '
'volume: %(vol)s '
'for target: %(tgt)s and pool: %(tgt_pool)s'
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'pool': host_pool,
'proj': host_project,
'vol': volume,
'tgt': tgt_name,
'tgt_pool': tgt_pool,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
val = json.loads(ret.data)
return val['action']['id']
def delete_replication_action(self, action_id):
"""Delete a replication action."""
svc = '/api/storage/v1/replication/actions/%s' % action_id
ret = self.rclient.delete(svc)
if ret.status != restclient.Status.NO_CONTENT:
exception_msg = (_('Error Deleting '
'replication action: %(id)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'id': action_id,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def send_repl_update(self, action_id):
"""Send replication update
Send replication update to the target appliance and then wait for
it to complete.
"""
svc = '/api/storage/v1/replication/actions/%s/sendupdate' % action_id
ret = self.rclient.put(svc)
if ret.status != restclient.Status.ACCEPTED:
exception_msg = (_('Error sending replication update '
'for action id: %(id)s . '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'id': action_id,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def _loop_func():
svc = '/api/storage/v1/replication/actions/%s' % action_id
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error getting replication action: %(id)s. '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'id': action_id,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
val = json.loads(ret.data)
if val['action']['last_result'] == 'success':
raise loopingcall.LoopingCallDone()
elif (val['action']['last_result'] == '<unknown>' and
val['action']['state'] == 'sending'):
pass
else:
exception_msg = (_('Error sending replication update. '
'Returned error: %(err)s. '
'Action: %(id)s.')
% {'err': val['action']['last_result'],
'id': action_id})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
timer = loopingcall.FixedIntervalLoopingCall(_loop_func)
timer.start(interval=5).wait()
def get_replication_source(self, asn):
"""Return the replication source json which has a matching asn."""
svc = "/api/storage/v1/replication/sources"
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error getting replication source details. '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
val = json.loads(ret.data)
for source in val['sources']:
if source['asn'] == asn:
return source
return None
def sever_replication(self, package, src_name, project=None):
"""Sever Replication at the destination.
This method will sever the package and move the volume to a project,
if project name is not passed in then the package name is selected
as the project name
"""
svc = ('/api/storage/v1/replication/sources/%(src)s/packages/%(pkg)s'
'/sever' % {'src': src_name, 'pkg': package})
if not project:
project = package
arg = {'projname': project}
ret = self.rclient.put(svc, arg)
if ret.status != restclient.Status.ACCEPTED:
exception_msg = (_('Error severing the package: %(package)s '
'from source: %(src)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'package': package,
'src': src_name,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def move_volume(self, pool, project, volume, tgt_project):
"""Move a LUN from one project to another within the same pool."""
svc = ('/api/storage/v1/pools/%(pool)s/projects/%(project)s'
'/filesystems/%(volume)s' % {'pool': pool,
'project': project,
'volume': volume})
arg = {'project': tgt_project}
ret = self.rclient.put(svc, arg)
if ret.status != restclient.Status.ACCEPTED:
exception_msg = (_('Error moving volume: %(vol)s '
'from source project: %(src)s '
'to target project: %(tgt)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'vol': volume,
'src': project,
'tgt': tgt_project,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def delete_project(self, pool, project):
"""Delete a project."""
svc = ('/api/storage/v1/pools/%(pool)s/projects/%(project)s' %
{'pool': pool,
'project': project})
ret = self.rclient.delete(svc)
if ret.status != restclient.Status.NO_CONTENT:
exception_msg = (_('Error Deleting '
'project: %(project)s '
'on pool: %(pool)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'project': project,
'pool': pool,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def get_project_stats(self, pool, project):
"""Get project stats.
Get available space and total space of a project
returns (avail, total).
"""
svc = '/api/storage/v1/pools/%s/projects/%s' % (pool, project)
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error Getting Project Stats: '
'Pool: %(pool)s '
'Project: %(project)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'pool': pool,
'project': project,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
val = json.loads(ret.data)
avail = val['project']['space_available']
total = avail + val['project']['space_total']
return avail, total
def create_project(self, pool, project, compression=None, logbias=None):
"""Create a project on a pool.
Check first whether the pool exists.
"""
self.verify_pool(pool)
svc = '/api/storage/v1/pools/' + pool + '/projects/' + project
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
svc = '/api/storage/v1/pools/' + pool + '/projects'
arg = {
'name': project
}
if compression and compression != '':
arg.update({'compression': compression})
if logbias and logbias != '':
arg.update({'logbias': logbias})
ret = self.rclient.post(svc, arg)
if ret.status != restclient.Status.CREATED:
exception_msg = (_('Error Creating Project: '
'%(project)s on '
'Pool: %(pool)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'project': project,
'pool': pool,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def create_initiator(self, initiator, alias, chapuser=None,
chapsecret=None):
"""Create an iSCSI initiator."""
svc = '/api/san/v1/iscsi/initiators/alias=' + alias
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
svc = '/api/san/v1/iscsi/initiators'
arg = {
'initiator': initiator,
'alias': alias
}
if chapuser and chapuser != '' and chapsecret and chapsecret != '':
arg.update({'chapuser': chapuser,
'chapsecret': chapsecret})
ret = self.rclient.post(svc, arg)
if ret.status != restclient.Status.CREATED:
exception_msg = (_('Error Creating Initiator: '
'%(initiator)s on '
'Alias: %(alias)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'initiator': initiator,
'alias': alias,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def add_to_initiatorgroup(self, initiator, initiatorgroup):
"""Add an iSCSI initiator to initiatorgroup"""
svc = '/api/san/v1/iscsi/initiator-groups/' + initiatorgroup
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
svc = '/api/san/v1/iscsi/initiator-groups'
arg = {
'name': initiatorgroup,
'initiators': [initiator]
}
ret = self.rclient.post(svc, arg)
if ret.status != restclient.Status.CREATED:
exception_msg = (_('Error Adding Initiator: '
'%(initiator)s on group'
'InitiatorGroup: %(initiatorgroup)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'initiator': initiator,
'initiatorgroup': initiatorgroup,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
else:
val = json.loads(ret.data)
inits = val['group']['initiators']
if inits is None:
exception_msg = (_('Error Getting Initiators: '
'InitiatorGroup: %(initiatorgroup)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'initiatorgroup': initiatorgroup,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
if initiator in inits:
return
inits.append(initiator)
svc = '/api/san/v1/iscsi/initiator-groups/' + initiatorgroup
arg = {
'initiators': inits
}
ret = self.rclient.put(svc, arg)
if ret.status != restclient.Status.ACCEPTED:
exception_msg = (_('Error Adding Initiator: '
'%(initiator)s on group'
'InitiatorGroup: %(initiatorgroup)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'initiator': initiator,
'initiatorgroup': initiatorgroup,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def create_target(self, alias, interfaces=None, tchapuser=None,
tchapsecret=None):
"""Create an iSCSI target.
:param interfaces: an array with network interfaces
:param tchapuser, tchapsecret: target's chapuser and chapsecret
:returns: target iqn
"""
svc = '/api/san/v1/iscsi/targets/alias=' + alias
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
svc = '/api/san/v1/iscsi/targets'
arg = {
'alias': alias
}
if tchapuser and tchapuser != '' and tchapsecret and \
tchapsecret != '':
arg.update({'targetchapuser': tchapuser,
'targetchapsecret': tchapsecret,
'auth': 'chap'})
if interfaces is not None and len(interfaces) > 0:
arg.update({'interfaces': interfaces})
ret = self.rclient.post(svc, arg)
if ret.status != restclient.Status.CREATED:
exception_msg = (_('Error Creating Target: '
'%(alias)s'
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'alias': alias,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
val = json.loads(ret.data)
return val['target']['iqn']
def get_target(self, alias):
"""Get an iSCSI target iqn."""
svc = '/api/san/v1/iscsi/targets/alias=' + alias
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error Getting Target: '
'%(alias)s'
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'alias': alias,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
val = json.loads(ret.data)
return val['target']['iqn']
def add_to_targetgroup(self, iqn, targetgroup):
"""Add an iSCSI target to targetgroup."""
svc = '/api/san/v1/iscsi/target-groups/' + targetgroup
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
svccrt = '/api/san/v1/iscsi/target-groups'
arg = {
'name': targetgroup,
'targets': [iqn]
}
ret = self.rclient.post(svccrt, arg)
if ret.status != restclient.Status.CREATED:
exception_msg = (_('Error Creating TargetGroup: '
'%(targetgroup)s with'
'IQN: %(iqn)s'
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'targetgroup': targetgroup,
'iqn': iqn,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
return
arg = {
'targets': [iqn]
}
ret = self.rclient.put(svc, arg)
if ret.status != restclient.Status.ACCEPTED:
exception_msg = (_('Error Adding to TargetGroup: '
'%(targetgroup)s with'
'IQN: %(iqn)s'
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'targetgroup': targetgroup,
'iqn': iqn,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def verify_pool(self, pool):
"""Checks whether pool exists."""
svc = '/api/storage/v1/pools/' + pool
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error Verifying Pool: '
'%(pool)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'pool': pool,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def verify_project(self, pool, project):
"""Checks whether project exists."""
svc = '/api/storage/v1/pools/' + pool + '/projects/' + project
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error Verifying '
'Project: %(project)s on '
'Pool: %(pool)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'project': project,
'pool': pool,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def verify_initiator(self, iqn):
"""Check whether initiator iqn exists."""
svc = '/api/san/v1/iscsi/initiators/' + iqn
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error Verifying '
'Initiator: %(iqn)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'initiator': iqn,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def verify_target(self, alias):
"""Check whether target alias exists."""
svc = '/api/san/v1/iscsi/targets/alias=' + alias
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error Verifying '
'Target: %(alias)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'alias': alias,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def create_lun(self, pool, project, lun, volsize, targetgroup, specs):
"""Create a LUN.
specs - contains volume properties (e.g blocksize, compression).
"""
svc = '/api/storage/v1/pools/' + pool + '/projects/' + \
project + '/luns'
arg = {
'name': lun,
'volsize': volsize,
'targetgroup': targetgroup,
'initiatorgroup': 'com.sun.ms.vss.hg.maskAll'
}
if specs:
arg.update(specs)
ret = self.rclient.post(svc, arg)
if ret.status != restclient.Status.CREATED:
exception_msg = (_('Error Creating '
'Volume: %(lun)s '
'Size: %(size)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'lun': lun,
'size': volsize,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
val = json.loads(ret.data)
return val
def get_lun(self, pool, project, lun):
"""return iscsi lun properties."""
svc = '/api/storage/v1/pools/' + pool + '/projects/' + \
project + "/luns/" + lun
ret = self.rclient.get(svc)
if ret.status == restclient.Status.NOT_FOUND:
# Sometimes a volume exists in cinder for which there is no
# corresponding LUN (e.g. LUN create failed). In this case,
# allow deletion to complete (without doing anything on the
# ZFSSA). Any other exception should be passed up.
LOG.warning('LUN with name %(lun)s not found in project '
'%(project)s, pool %(pool)s.',
{'lun': lun,
'project': project,
'pool': pool})
raise exception.VolumeNotFound(volume_id=lun)
elif ret.status != restclient.Status.OK:
exception_msg = (_('Error Getting '
'Volume: %(lun)s on '
'Pool: %(pool)s '
'Project: %(project)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'lun': lun,
'pool': pool,
'project': project,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
val = json.loads(ret.data)
# For backward-compatibility with 2013.1.2.x, convert initiatorgroup
# and number to lists if they're not already
def _listify(item):
return item if isinstance(item, list) else [item]
initiatorgroup = _listify(val['lun']['initiatorgroup'])
number = _listify(val['lun']['assignednumber'])
# Hide special maskAll value when LUN is not currently presented to
# any initiatorgroups:
if 'com.sun.ms.vss.hg.maskAll' in initiatorgroup:
initiatorgroup = []
number = []
ret = {
'name': val['lun']['name'],
'guid': val['lun']['lunguid'],
'number': number,
'initiatorgroup': initiatorgroup,
'size': val['lun']['volsize'],
'nodestroy': val['lun']['nodestroy'],
'targetgroup': val['lun']['targetgroup']
}
if 'origin' in val['lun']:
ret.update({'origin': val['lun']['origin']})
if 'custom:image_id' in val['lun']:
ret.update({'image_id': val['lun']['custom:image_id']})
ret.update({'updated_at': val['lun']['custom:updated_at']})
if 'custom:cinder_managed' in val['lun']:
ret.update({'cinder_managed': val['lun']['custom:cinder_managed']})
return ret
def get_lun_snapshot(self, pool, project, lun, snapshot):
"""Return iscsi lun snapshot properties."""
svc = ('/api/storage/v1/pools/' + pool + '/projects/' +
project + '/luns/' + lun + '/snapshots/' + snapshot)
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = ('Error Getting '
'Snapshot: %(snapshot)s of '
'Volume: %(lun)s in '
'Pool: %(pool)s, '
'Project: %(project)s '
'Return code: %(ret.status)d, '
'Message: %(ret.data)s.',
{'snapshot': snapshot,
'lun': lun,
'pool': pool,
'project': project,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.SnapshotNotFound(snapshot_id=snapshot)
val = json.loads(ret.data)['snapshot']
ret = {
'name': val['name'],
'numclones': val['numclones'],
}
return ret
def set_lun_initiatorgroup(self, pool, project, lun, initiatorgroup):
"""Set the initiatorgroup property of a LUN."""
# For backward-compatibility with 2013.1.2.x, set initiatorgroup
# to a single string if there's only one item in the list.
# Live-migration won't work, but existing functionality should still
# work. If the list is empty, substitute the special "maskAll" value.
if len(initiatorgroup) == 0:
initiatorgroup = 'com.sun.ms.vss.hg.maskAll'
elif len(initiatorgroup) == 1:
initiatorgroup = initiatorgroup[0]
svc = '/api/storage/v1/pools/' + pool + '/projects/' + \
project + '/luns/' + lun
arg = {
'initiatorgroup': initiatorgroup
}
LOG.debug('Setting LUN initiatorgroup. pool=%(pool)s, '
'project=%(project)s, lun=%(lun)s, '
'initiatorgroup=%(initiatorgroup)s',
{'project': project,
'pool': pool,
'lun': lun,
'initiatorgroup': initiatorgroup})
ret = self.rclient.put(svc, arg)
if ret.status != restclient.Status.ACCEPTED:
LOG.error('Error Setting Volume: %(lun)s to InitiatorGroup: '
'%(initiatorgroup)s Pool: %(pool)s Project: '
'%(project)s Return code: %(ret.status)d Message: '
'%(ret.data)s.',
{'lun': lun,
'initiatorgroup': initiatorgroup,
'pool': pool,
'project': project,
'ret.status': ret.status,
'ret.data': ret.data})
def delete_lun(self, pool, project, lun):
"""delete iscsi lun."""
svc = '/api/storage/v1/pools/' + pool + '/projects/' + \
project + '/luns/' + lun
ret = self.rclient.delete(svc)
if ret.status != restclient.Status.NO_CONTENT:
exception_msg = (_('Error Deleting Volume: %(lun)s from '
'Pool: %(pool)s, Project: %(project)s. '
'Return code: %(ret.status)d, '
'Message: %(ret.data)s.'),
{'lun': lun,
'pool': pool,
'project': project,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
if ret.status == restclient.Status.FORBIDDEN:
# This means that the lun exists but it can't be deleted:
raise exception.VolumeBackendAPIException(data=exception_msg)
def create_snapshot(self, pool, project, lun, snapshot):
"""create snapshot."""
svc = '/api/storage/v1/pools/' + pool + '/projects/' + \
project + '/luns/' + lun + '/snapshots'
arg = {
'name': snapshot
}
ret = self.rclient.post(svc, arg)
if ret.status != restclient.Status.CREATED:
exception_msg = (_('Error Creating '
'Snapshot: %(snapshot)s on'
'Volume: %(lun)s to '
'Pool: %(pool)s '
'Project: %(project)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.'),
{'snapshot': snapshot,
'lun': lun,
'pool': pool,
'project': project,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def delete_snapshot(self, pool, project, lun, snapshot):
"""delete snapshot."""
svc = '/api/storage/v1/pools/' + pool + '/projects/' + \
project + '/luns/' + lun + '/snapshots/' + snapshot
ret = self.rclient.delete(svc)
if ret.status != restclient.Status.NO_CONTENT:
exception_msg = (_('Error Deleting '
'Snapshot: %(snapshot)s on '
'Volume: %(lun)s to '
'Pool: %(pool)s '
'Project: %(project)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'snapshot': snapshot,
'lun': lun,
'pool': pool,
'project': project,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def clone_snapshot(self, pool, project, lun, snapshot, clone_proj, clone,
specs):
"""clone 'snapshot' to a lun named 'clone' in project 'clone_proj'."""
svc = '/api/storage/v1/pools/' + pool + '/projects/' + \
project + '/luns/' + lun + '/snapshots/' + snapshot + '/clone'
arg = {
'project': clone_proj,
'share': clone,
'nodestroy': True
}
if specs:
arg.update(specs)
# API fails if volblocksize is specified when cloning
arg.pop('volblocksize', '')
ret = self.rclient.put(svc, arg)
if ret.status != restclient.Status.CREATED:
exception_msg = (_('Error Cloning '
'Snapshot: %(snapshot)s on '
'Volume: %(lun)s of '
'Pool: %(pool)s '
'Project: %(project)s '
'Clone project: %(clone_proj)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'snapshot': snapshot,
'lun': lun,
'pool': pool,
'project': project,
'clone_proj': clone_proj,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def set_lun_props(self, pool, project, lun, **kargs):
"""set lun properties."""
svc = '/api/storage/v1/pools/' + pool + '/projects/' + \
project + '/luns/' + lun
if kargs is None:
return
if 'schema' in kargs:
kargs.update(kargs.pop('schema'))
ret = self.rclient.put(svc, kargs)
if ret.status != restclient.Status.ACCEPTED:
exception_msg = (_('Error Setting props '
'Props: %(props)s on '
'Volume: %(lun)s of '
'Pool: %(pool)s '
'Project: %(project)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'props': kargs,
'lun': lun,
'pool': pool,
'project': project,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def num_clones(self, pool, project, lun, snapshot):
"""Checks whether snapshot has clones or not."""
svc = '/api/storage/v1/pools/' + pool + '/projects/' + \
project + '/luns/' + lun + '/snapshots/' + snapshot
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error Getting '
'Snapshot: %(snapshot)s on '
'Volume: %(lun)s to '
'Pool: %(pool)s '
'Project: %(project)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'snapshot': snapshot,
'lun': lun,
'pool': pool,
'project': project,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
val = json.loads(ret.data)
return val['snapshot']['numclones']
def get_initiator_initiatorgroup(self, initiator):
"""Returns the initiator group of the initiator."""
groups = []
svc = "/api/san/v1/iscsi/initiator-groups"
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
msg = _('Error getting initiator groups.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
val = json.loads(ret.data)
for initiator_group in val['groups']:
if initiator in initiator_group['initiators']:
groups.append(initiator_group["name"])
return groups
def create_schema(self, schema):
"""Create a custom ZFSSA schema."""
base = '/api/storage/v1/schema'
svc = "%(base)s/%(prop)s" % {'base': base, 'prop': schema['property']}
ret = self.rclient.get(svc)
if ret.status == restclient.Status.OK:
LOG.warning('Property %s already exists.', schema['property'])
return
ret = self.rclient.post(base, schema)
if ret.status != restclient.Status.CREATED:
exception_msg = (_('Error Creating '
'Property: %(property)s '
'Type: %(type)s '
'Description: %(description)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'property': schema['property'],
'type': schema['type'],
'description': schema['description'],
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def create_schemas(self, schemas):
"""Create multiple custom ZFSSA schemas."""
ret = []
for schema in schemas:
res = self.create_schema(schema)
ret.append(res)
return ret
class ZFSSANfsApi(ZFSSAApi):
"""ZFSSA API proxy class for NFS driver"""
projects_path = '/api/storage/v1/pools/%s/projects'
project_path = projects_path + '/%s'
shares_path = project_path + '/filesystems'
share_path = shares_path + '/%s'
share_snapshots_path = share_path + '/snapshots'
share_snapshot_path = share_snapshots_path + '/%s'
services_path = '/api/service/v1/services/'
def __init__(self, *args, **kwargs):
super(ZFSSANfsApi, self).__init__(*args, **kwargs)
self.webdavclient = None
def set_webdav(self, https_path, auth_str):
self.webdavclient = webdavclient.ZFSSAWebDAVClient(https_path,
auth_str)
def verify_share(self, pool, project, share):
"""Checks whether the share exists"""
svc = self.share_path % (pool, project, share)
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error Verifying '
'share: %(share)s on '
'Project: %(project)s and '
'Pool: %(pool)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'share': share,
'project': project,
'pool': pool,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def create_snapshot(self, pool, project, share, snapshot):
"""create snapshot of a share"""
svc = self.share_snapshots_path % (pool, project, share)
arg = {
'name': snapshot
}
ret = self.rclient.post(svc, arg)
if ret.status != restclient.Status.CREATED:
exception_msg = (_('Error Creating '
'Snapshot: %(snapshot)s on'
'share: %(share)s to '
'Pool: %(pool)s '
'Project: %(project)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'snapshot': snapshot,
'share': share,
'pool': pool,
'project': project,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def delete_snapshot(self, pool, project, share, snapshot):
"""delete snapshot of a share"""
svc = self.share_snapshot_path % (pool, project, share, snapshot)
ret = self.rclient.delete(svc)
if ret.status != restclient.Status.NO_CONTENT:
exception_msg = (_('Error Deleting '
'Snapshot: %(snapshot)s on '
'Share: %(share)s to '
'Pool: %(pool)s '
'Project: %(project)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'snapshot': snapshot,
'share': share,
'pool': pool,
'project': project,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def create_snapshot_of_volume_file(self, src_file="", dst_file=""):
src_file = '.zfs/snapshot/' + src_file
return self.webdavclient.request(src_file=src_file, dst_file=dst_file,
method='COPY')
def delete_snapshot_of_volume_file(self, src_file=""):
return self.webdavclient.request(src_file=src_file, method='DELETE')
def create_volume_from_snapshot_file(self, src_file="", dst_file="",
method='COPY'):
return self.webdavclient.request(src_file=src_file, dst_file=dst_file,
method=method)
def _change_service_state(self, service, state=''):
svc = self.services_path + service + '/' + state
ret = self.rclient.put(svc)
if ret.status != restclient.Status.ACCEPTED:
exception_msg = (_('Error Verifying '
'Service: %(service)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'service': service,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
data = json.loads(ret.data)['service']
LOG.debug('%(service)s service state: %(data)s',
{'service': service, 'data': data})
status = 'online' if state == 'enable' else 'disabled'
if data['<status>'] != status:
exception_msg = (_('%(service)s Service is not %(status)s '
'on storage appliance: %(host)s')
% {'service': service,
'status': status,
'host': self.host})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def enable_service(self, service):
self._change_service_state(service, state='enable')
self.verify_service(service)
def disable_service(self, service):
self._change_service_state(service, state='disable')
self.verify_service(service, status='offline')
def modify_service(self, service, edit_args=None):
"""Edit service properties"""
if edit_args is None:
edit_args = {}
svc = self.services_path + service
ret = self.rclient.put(svc, edit_args)
if ret.status != restclient.Status.ACCEPTED:
exception_msg = (_('Error modifying '
'Service: %(service)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'service': service,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
data = json.loads(ret.data)['service']
LOG.debug('Modify %(service)s service '
'return data: %(data)s',
{'service': service,
'data': data})
def create_share(self, pool, project, share, args):
"""Create a share in the specified pool and project"""
svc = self.share_path % (pool, project, share)
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
svc = self.shares_path % (pool, project)
args.update({'name': share})
ret = self.rclient.post(svc, args)
if ret.status != restclient.Status.CREATED:
exception_msg = (_('Error Creating '
'Share: %(name)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'name': share,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
else:
LOG.debug('Editing properties of a pre-existing share')
ret = self.rclient.put(svc, args)
if ret.status != restclient.Status.ACCEPTED:
exception_msg = (_('Error editing share: '
'%(share)s on '
'Pool: %(pool)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s .')
% {'share': share,
'pool': pool,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
def get_share(self, pool, project, share):
"""return share properties"""
svc = self.share_path % (pool, project, share)
ret = self.rclient.get(svc)
if ret.status != restclient.Status.OK:
exception_msg = (_('Error Getting '
'Share: %(share)s on '
'Pool: %(pool)s '
'Project: %(project)s '
'Return code: %(ret.status)d '
'Message: %(ret.data)s.')
% {'share': share,
'pool': pool,
'project': project,
'ret.status': ret.status,
'ret.data': ret.data})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
val = json.loads(ret.data)
return val['filesystem']
def get_volume(self, volume):
LOG.debug('Getting volume %s.', volume)
try:
resp = self.webdavclient.request(src_file=volume,
method='PROPFIND')
except Exception:
raise exception.VolumeNotFound(volume_id=volume)
resp = resp.read()
numclones = self._parse_prop(resp, 'numclones')
result = {
'numclones': int(numclones) if numclones != '' else 0,
'updated_at': self._parse_prop(resp, 'updated_at'),
'image_id': self._parse_prop(resp, 'image_id'),
'origin': self._parse_prop(resp, 'origin'),
'cinder_managed': self._parse_prop(resp, 'cinder_managed'),
}
return result
def delete_file(self, filename):
try:
self.webdavclient.request(src_file=filename, method='DELETE')
except Exception:
exception_msg = ('Cannot delete file %s.', filename)
LOG.error(exception_msg)
def set_file_props(self, file, specs):
"""Set custom properties to a file."""
for key in specs:
self.webdavclient.set_file_prop(file, key, specs[key])
def _parse_prop(self, response, prop):
"""Parse a property value from the WebDAV response."""
propval = ""
for line in response.split("\n"):
if prop in line:
try:
propval = line[(line.index('>') + 1):line.index('</')]
except Exception:
pass
return propval
def create_directory(self, dirname):
try:
self.webdavclient.request(src_file=dirname, method='GET')
LOG.debug('Directory %s already exists.', dirname)
except Exception:
# The directory does not exist yet
try:
self.webdavclient.request(src_file=dirname, method='MKCOL')
except Exception:
exception_msg = (_('Cannot create directory %s.'), dirname)
raise exception.VolumeBackendAPIException(data=exception_msg)
def rename_volume(self, src, dst):
return self.webdavclient.request(src_file=src, dst_file=dst,
method='MOVE')
| phenoxim/cinder | cinder/volume/drivers/zfssa/zfssarest.py | Python | apache-2.0 | 60,863 |
from pyelt.datalayers.database import Column, Columns
from pyelt.datalayers.dv import Sat, DvEntity, Link, Hub, HybridSat, LinkReference
class Role:
pass
class Act:
pass
class Participation:
pass
class Zorgverlener(DvEntity, Role):
class Default(Sat):
zorgverlenernummer = Columns.TextColumn()
aanvangsdatum = Columns.DateColumn()
einddatum = Columns.DateColumn()
class Personalia(Sat):
achternaam = Columns.TextColumn()
tussenvoegsels = Columns.TextColumn()
voorletters = Columns.TextColumn()
voornaam = Columns.TextColumn()
bijnaam = Columns.TextColumn()
# wordt niet gebruikt in dwh2.0; hier gebruikt voor testen uitgevoerd in test03r_domain.py
class ContactGegevens(HybridSat):
class Types(HybridSat.Types):
telefoon = 'telefoon'
mobiel = 'mobiel'
mobiel2 = 'mobiel2'
telnummer = Columns.TextColumn()
datum = Columns.DateColumn()
landcode = Columns.TextColumn()
default = Default()
personalia = Personalia()
contactgegevens = ContactGegevens()
class Adres(DvEntity, Role):
class Default(Sat):
postcode = Columns.TextColumn()
huisnummer = Columns.IntColumn()
huisnummer_toevoeging = Columns.TextColumn()
straat = Columns.TextColumn()
plaats = Columns.TextColumn()
land = Columns.TextColumn()
default = Default()
class Zorginstelling(DvEntity, Role):
class Default(Sat):
zorginstellings_naam = Columns.TextColumn()
zorginstellings_nummer = Columns.TextColumn()
default = Default()
#Dit is een link:
class Zorgverlener_Zorginstelling_Link(Link, Participation):
zorgverlener = LinkReference(Zorgverlener)
zorginstelling = LinkReference(Zorginstelling)
# Dit is een HybridLink:
class Zorgverlener_Adres_Link(Link):
class Types:
post = 'post'
bezoek = 'bezoek'
woon = 'woon'
zorgverlener = LinkReference(Zorgverlener)
adres = LinkReference(Adres)
class Zorginstelling_Adres_Link(Link):
zorginstelling = LinkReference(Zorginstelling)
adres = LinkReference(Adres)
| NLHEALTHCARE/PYELT | tests/old/unit_tests_rob/_domain_rob_unittest.py | Python | gpl-3.0 | 2,194 |
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2014 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Stacked Bar chart
"""
from __future__ import division
from pygal.graph.bar import Bar
from pygal.util import compute_scale, swap, ident
from pygal.adapters import none_to_zero
class StackedBar(Bar):
"""Stacked Bar graph"""
_adapters = [none_to_zero]
def _get_separated_values(self, secondary=False):
series = self.secondary_series if secondary else self.series
transposed = list(zip(*[serie.values for serie in series]))
positive_vals = [sum([
val for val in vals
if val is not None and val >= self.zero])
for vals in transposed]
negative_vals = [sum([
val
for val in vals
if val is not None and val < self.zero])
for vals in transposed]
return positive_vals, negative_vals
def _compute_box(self, positive_vals, negative_vals):
self._box.ymin = negative_vals and min(min(negative_vals), self.zero)
self._box.ymax = positive_vals and max(max(positive_vals), self.zero)
def _compute(self):
positive_vals, negative_vals = self._get_separated_values()
self._compute_box(positive_vals, negative_vals)
if self.logarithmic:
positive_vals = list(filter(lambda x: x > 0, positive_vals))
negative_vals = list(filter(lambda x: x > 0, negative_vals))
positive_vals = positive_vals or [self.zero]
negative_vals = negative_vals or [self.zero]
x_pos = [
x / self._len for x in range(self._len + 1)
] if self._len > 1 else [0, 1] # Center if only one value
self._points(x_pos)
y_pos = compute_scale(
self._box.ymin, self._box.ymax, self.logarithmic, self.order_min
) if not self.y_labels else list(map(float, self.y_labels))
self._x_ranges = zip(x_pos, x_pos[1:])
self._x_labels = self.x_labels and list(zip(self.x_labels, [
sum(x_range) / 2 for x_range in self._x_ranges]))
self._y_labels = list(zip(map(self._format, y_pos), y_pos))
self.negative_cumulation = [0] * self._len
self.positive_cumulation = [0] * self._len
if self.secondary_series:
positive_vals, negative_vals = self._get_separated_values(True)
self.secondary_negative_cumulation = [0] * self._len
self.secondary_positive_cumulation = [0] * self._len
# In case of pyramids
sum_ = lambda x: sum(x) if isinstance(x, tuple) else x
self._secondary_min = (negative_vals and min(
sum_(min(negative_vals)), self.zero)) or self.zero
self._secondary_max = (positive_vals and max(
sum_(max(positive_vals)), self.zero)) or self.zero
def _bar(self, parent, x, y, index, i, zero, shift=False, secondary=False):
if secondary:
cumulation = (self.secondary_negative_cumulation
if y < self.zero else
self.secondary_positive_cumulation)
else:
cumulation = (self.negative_cumulation
if y < self.zero else
self.positive_cumulation)
zero = cumulation[i]
cumulation[i] = zero + y
if zero == 0:
zero = self.zero
y -= self.zero
y += zero
width = (self.view.x(1) - self.view.x(0)) / self._len
x, y = self.view((x, y))
series_margin = width * self._series_margin
x += series_margin
width -= 2 * series_margin
if self.secondary_series:
width /= 2
x += int(secondary) * width
serie_margin = width * self._serie_margin
x += serie_margin
width -= 2 * serie_margin
height = self.view.y(zero) - y
r = self.rounded_bars * 1 if self.rounded_bars else 0
self.svg.transposable_node(
parent, 'rect',
x=x, y=y, rx=r, ry=r, width=width, height=height,
class_='rect reactive tooltip-trigger')
transpose = swap if self.horizontal else ident
return transpose((x + width / 2, y + height / 2))
| funkring/fdoo | pygal/graph/stackedbar.py | Python | agpl-3.0 | 4,948 |
#!/usr/bin/env python
# Script by Jason Kwong
# Extracts porB sequence from Neisseria meningitidis
# Use modern print function from python 3.x
from __future__ import print_function
# Modules
import argparse
from argparse import RawTextHelpFormatter
import sys
import os
from io import StringIO
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.Blast.Applications import NcbiblastnCommandline
from Bio.Blast import NCBIXML
from pkg_resources import resource_string, resource_filename
# Import local modules
from . import nmen
# Standard functions
# Log a message to stderr
def msg(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
# Log an error to stderr and quit with non-zero error code
def err(*args, **kwargs):
msg(*args, **kwargs)
sys.exit(1);
# BLAST
def porBBLAST(f, blastdb):
porB = 'new'
blast_qseqid = '-'
blast_pident = '-'
blast_cov = '<99'
porBRECR = None
fBLAST = NcbiblastnCommandline(query=f, db=blastdb, outfmt="'6 qseqid sseqid pident length sstrand qstart qend sstart send slen'", dust='no', culling_limit=1)
stdout, stderr = fBLAST()
blastOUT = stdout.split('\t')
if len(blastOUT) == 10:
blast_qseqid = blastOUT[0]
blast_sseqid = blastOUT[1]
blast_pident = float(blastOUT[2])
blast_length = int(blastOUT[3])
blast_sstrand = blastOUT[4]
blast_qstart = int(blastOUT[5])
blast_qend = int(blastOUT[6])
blast_sstart = int(blastOUT[7])
blast_send = int(blastOUT[8])
blast_slen = int(blastOUT[9])
blast_cov = float(blast_length)/float(blast_slen)*100
if blast_cov > 99:
for s in SeqIO.parse(f, 'fasta'):
if s.id == blast_qseqid:
blastCONTIG = s.seq
if blast_sstrand == 'plus':
start = blast_qstart - blast_sstart
end = start + blast_slen
porBSEQ = blastCONTIG[start:end]
else:
end = blast_qend + blast_send - 1
start = end - blast_slen
porBSEQ = blastCONTIG[start:end].reverse_complement()
porBRECR = SeqRecord(porBSEQ, id=f, description='PorB')
if blast_cov == 100 and blast_pident == 100:
porB = blast_sseqid
elif blast_cov > 99:
porB = ''.join([blast_sseqid, '-like'])
result = [f, blast_qseqid, porB, str(blast_pident), str(blast_cov), porBRECR]
return result
def main():
# Usage
parser = argparse.ArgumentParser(
formatter_class=RawTextHelpFormatter,
description='PorB typing of Neisseria meningitidis',
usage='\n %(prog)s FASTA-1 FASTA-2 ... FASTA-N')
parser.add_argument('fasta', metavar='FASTA', nargs='+', help='FASTA file to search (required)')
parser.add_argument('--db', metavar='DB', help='specify custom directory containing allele databases')
parser.add_argument('--printseq', action='store_true', help='save porB allele sequences to file (default=off)')
parser.add_argument('--version', action='version', version=
'=====================================\n'
'%(prog)s v0.1\n'
'Updated 22-Feb-2017 by Jason Kwong\n'
'Dependencies: Python 2.x, BioPython, BLAST\n'
'=====================================')
args = parser.parse_args()
if args.db:
DBpath = str(args.db).rstrip('/')
else:
DBpath = resource_filename(__name__, 'db')
porBDB = os.path.join( DBpath, 'blast', 'porB' )
# Main
porBSEQS = []
print('\t'.join(['SAMPLE_ID', 'CONTIG', 'PorB', '%ID', 'COV']))
for f in args.fasta:
result = porBBLAST(f, porBDB)
print('\t'.join(result[:-1]))
porBSEQS.append(result[5])
# Print allele sequences to file
if args.printseq:
if porBSEQS:
with open('printseq/porB_seqs.fasta', 'w') as output:
SeqIO.write(porBSEQS, output, 'fasta')
if __name__ == "__main__":
main() | MDU-PHL/meningotype | meningotype/porB.py | Python | gpl-3.0 | 3,596 |
"""test hybrid construction"""
from __future__ import print_function
import logging
logging.basicConfig(level=logging.INFO)
import time
import numpy as np
import matplotlib as mpl
mpl.use('Agg')
from tulip import abstract, hybrid
from polytope import box2poly
input_bound = 0.4
uncertainty = 0.05
cont_state_space = box2poly([[0., 3.], [0., 2.]])
cont_props = {}
cont_props['home'] = box2poly([[0., 1.], [0., 1.]])
cont_props['lot'] = box2poly([[2., 3.], [1., 2.]])
sys_dyn = dict()
allh = [0.5, 1.1, 1.5]
modes = []
modes.append(('normal', 'fly'))
modes.append(('refuel', 'fly'))
modes.append(('emergency', 'fly'))
"""First PWA mode"""
def subsys0(h):
A = np.array([[1.105_2, 0.], [ 0., 1.105_2]])
B = np.array([[1.105_2, 0.], [ 0., 1.105_2]])
E = np.array([[1,0], [0,1]])
U = box2poly([[-1., 1.], [-1., 1.]])
U.scale(input_bound)
W = box2poly([[-1., 1.], [-1., 1.]])
W.scale(uncertainty)
dom = box2poly([[0., 3.], [h, 2.]])
sys_dyn = hybrid.LtiSysDyn(A, B, E, None, U, W, dom)
return sys_dyn
def subsys1(h):
A = np.array([[0.994_8, 0.], [0., 1.105_2]])
B = np.array([[-1.105_2, 0.], [0., 1.105_2]])
E = np.array([[1, 0], [0, 1]])
U = box2poly([[-1., 1.], [-1., 1.]])
U.scale(input_bound)
W = box2poly([[-1., 1.], [-1., 1.]])
W.scale(uncertainty)
dom = box2poly([[0., 3.], [0., h]])
sys_dyn = hybrid.LtiSysDyn(A, B, E, None, U, W, dom)
return sys_dyn
for mode, h in zip(modes, allh):
subsystems = [subsys0(h), subsys1(h)]
sys_dyn[mode] = hybrid.PwaSysDyn(subsystems, cont_state_space)
"""Switched Dynamics"""
# collect env, sys_modes
env_modes, sys_modes = zip(*modes)
msg = 'Found:\n'
msg += '\t Environment modes: ' + str(env_modes)
msg += '\t System modes: ' + str(sys_modes)
switched_dynamics = hybrid.SwitchedSysDyn(
disc_domain_size=(len(env_modes), len(sys_modes)),
dynamics=sys_dyn,
env_labels=env_modes,
disc_sys_labels=sys_modes,
cts_ss=cont_state_space
)
print(switched_dynamics)
ppp = abstract.prop2part(cont_state_space, cont_props)
ppp, new2old = abstract.part2convex(ppp)
"""Discretize to establish transitions"""
start = time.time()
N = 8
trans_len=1
disc_params = {}
for mode in modes:
disc_params[mode] = {'N':N, 'trans_length':trans_len}
swab = abstract.multiproc_discretize_switched(
ppp, switched_dynamics, disc_params,
plot=True, show_ts=True
)
print(swab)
axs = swab.plot(show_ts=True)
for i, ax in enumerate(axs):
ax.figure.savefig('swab_' + str(i) + '.pdf')
#ax = sys_ts.ts.plot()
elapsed = (time.time() - start)
print('Discretization lasted: ' + str(elapsed))
| tulip-control/tulip-control | examples/developer/fuel_tank/continuous_switched_test.py | Python | bsd-3-clause | 2,645 |
"""
Newton search algorithm.
A powerful method for unconstrained minimization problem.
"""
import sys
import numpy as np
from numpy.linalg import LinAlgError
from models.quadratic import FuncQuadratic
from models.base import FuncModel
class NewtonMethod(object):
_model = None
def __init__(self, model, dim, A, b, c):
if not isinstance(model, FuncModel):
raise TypeError("Type Error. %s is not a instance of class FuncModel." % model)
else:
self._model = model
self.dim_ = dim
self.A_ = np.mat(A, dtype=float)
self.b_ = np.mat(b, dtype=float)
self.c_ = c
self.max_iter_ = sys.maxint
self.tol_ = 0
self.method_ = None
def _newton_step(self, variable):
"""
A descent direction called Newton step defined as delta(x_nt) = -invHessian(x)*Gradient(x)
"""
x = np.mat(variable)
gradient = self._model.gradient(x)
hessian = self._model.hessian(x)
try:
inv_hessian = np.linalg.inv(hessian)
except LinAlgError:
# noinspection PyTypeChecker
hessian = hessian + np.dot(np.eye(self.dim_), 1e-6)
inv_hessian = np.linalg.inv(hessian)
# Newton step
step = -np.dot(inv_hessian, gradient.T).T
return step
def _stop_error(self, variable, newton_step):
"""
Use Newton decrement as stopping as stopping error.
lambda^2 = gradient(x) * hessian(x) * gradient(x)
"""
g = self._model.gradient(variable)
newton_decrement = np.dot(g, newton_step.T)
return newton_decrement
def set_max_iter(self, max_iter):
if max_iter >= 1:
self.max_iter_ = max_iter
return True
else:
return False
def set_tol(self, tol):
if tol >= 0:
self.tol_ = tol
return True
else:
return False
def set_search_method(self, method='linear_search'):
self.method_ = method
def base_newton_search(self):
"""
Base Newton Method which use Newton step: - inv_Hessian(x) * Gradient(x).T as the search step.
"""
count = 0
error = sys.float_info.max
x = np.random.random(self.dim_)
while error > self.tol_ and count < self.max_iter_:
newton_step = self._newton_step(x)
x = x + newton_step
error = self._stop_error(x, newton_step)
count += 1
return x
def search(self):
"""
Find the minimal solution of the given Quadratic function.
"""
count = 0
error = sys.float_info.max
x = np.random.random(self.dim_)
# Newton search
while error > self.tol_ and count < self.max_iter_:
newton_step = self._newton_step(x)
alpha = -1
beta = 1
lambda_ = alpha + 0.383 * (beta - alpha)
mu = alpha + 0.618 * (beta - alpha)
# Linear search
while np.math.fabs(beta - alpha) > 1e-10:
# print 'search_lambda', alpha_k
f_lambda = self._model.func_value(x + lambda_ * newton_step)
f_mu = self._model.func_value(x + mu * newton_step)
if f_lambda > f_mu:
alpha = lambda_
beta = beta
lambda_ = mu
mu = alpha + 0.618 * (beta - alpha)
else:
alpha = alpha
beta = mu
mu = lambda_
lambda_ = alpha + 0.382 * (beta - alpha)
# Final search direction
x += alpha * newton_step
count += 1
error = self._stop_error(x, newton_step)
print 'error: ', error
# Optimal solution x
return x
def main():
A = np.mat([[1, 0], [0, 1]])
b = np.mat([1, 1])
c = 1.0
# x = np.array([1, 1])
test_func = FuncQuadratic(name="quadratic", A=A, b=b, c=c)
a = NewtonMethod(dim=2, model=test_func, A=[[8.0, 0.0], [0.0, 8.0]], b=[0, 2], c=1)
a.set_max_iter(500)
a.set_tol(1e-4)
x = a.base_newton_search()
print x
print "gradient:\n", np.dot(np.mat([[8, 0], [1, 8]]), x.T) + np.mat([0, 2]).T
if __name__ == '__main__':
main() | HaiQW/Optimal | search/newton.py | Python | apache-2.0 | 4,376 |
# Copyright (C) 2016 Semenovsky, Oleg <[email protected]>
# Author: Semenovsky, Oleg <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import shlex
import glob
class MandatoryOptionMissing(Exception):
pass
class UnknownDirective(Exception):
pass
class SyntaxError(Exception):
pass
class ConfigError(Exception):
pass
def stripmany(s, chars, w=False):
for c in chars:
s = s.strip(c)
if w:
s.strip()
return s
class Rule():
"""
Class, defining a rule, by which directive is handled
"""
def __init__(self, name="root", handler=None, unique=False, mandatory=False):
self.name = name
self.handler = handler
self.unique = unique
self.mandatory = mandatory
self.subrules = {}
def __repr__(self):
r = 'oshlex rule {}, unique: {}, mandatory: {}'.format(self.name, self.unique, self.mandatory)
if self.subrules:
r += '\n'
r += repr(self.subrules)
return r
def __getitem__(self, key):
return self.__dict__[key]
def get(self, name):
return self.subrules[name]
def add(self, *rules):
for rule in rules:
self.subrules[rule.name] = rule
return self
def in_context(self, name):
if name in self.subrules:
return True
else:
return False
def check(self, config):
for k, v in self.subrules.items():
if not k in config and v.mandatory:
raise MandatoryOptionMissing("Missing mandatory option {}".format(k))
class Tokenizer(shlex.shlex):
def __init__(self, instream=None, infile=None, posix=True):
super(Tokenizer, self).__init__(instream=instream, infile=infile, posix=posix)
self.wordchars += ',-./*@:'
self.source = 'include'
def sourcehook(self, newfile):
"Hook called on a filename to be sourced."
newfile = stripmany(newfile, ["\"", "\'"], w=True)
# This implements cpp-like semantics for relative-path inclusion.
if isinstance(self.infile, str) and not os.path.isabs(newfile):
newfile = os.path.join(os.path.dirname(self.infile), newfile)
filelist = glob.glob(newfile)
for f in filelist:
self.push_source(open(f, 'r'), f)
class Configuration():
def __init__(self, root, tokenizer=None):
self._root = root
if not tokenizer:
self._tokenizer = Tokenizer
else:
self._tokenizer = tokenizer
self.tokenizer = None
self._config = {}
def read(self, path):
self.tokenizer = self._tokenizer(open(path, 'r'), infile=path)
self._config = self._parse()
def validate(self, path):
self.tokenizer = self._tokenizer(open(path, 'r'), infile=path)
self._parse()
def __str__(self):
return str(self._config)
def __repr__(self):
return repr(self._config)
def __getitem__(self, key):
return self._config[key]
def _parse(self, state=None):
stack = []
config = {}
directive = None
value = None
if not state:
state = self._root
while True:
token = self.tokenizer.get_token()
if token == ";":
if len(stack) >= 2:
rule = state.get(stack[0])
if rule.unique:
if stack[0] in config:
raise ConfigError("Unique directive already set: {}".format(stack[0]))
else:
try:
config[stack[0]] = rule.handler(stack[1:])
except Exception as e:
raise ConfigError("{}:{} Handler couldn't process tokens, got an exception {}".format(
self.tokenizer.infile,
self.tokenizer.lineno,
e,
))
else:
if not stack[0] in config:
config[stack[0]] = []
try:
config[stack[0]].append(rule.handler(stack[1:]))
except Exception as e:
raise ConfigError("{}:{} Handler couldn't process tokens, got an exception {} ".format(
self.tokenizer.infile,
self.tokenizer.lineno,
e,
))
stack = []
else:
raise ConfigError("Syntax error")
elif token == "{":
if len(stack) == 1:
rule = state.get(stack[0])
value = self._parse(rule)
if rule.unique:
if stack[0] in config:
raise ConfigError("Unique value already set")
else:
config[stack[0]] = value
else:
if not stack[0] in config:
config[stack[0]] = []
config[stack[0]].append(value)
stack = []
else:
raise ConfigError("Syntax error")
elif token == "}" or not token:
if not stack:
state.check(config)
return config
else:
raise ConfigError("Syntax error")
else:
if not stack:
if state.in_context(token):
stack.append(token)
else:
raise ConfigError("Unknown directive: {}".format(token))
else:
stack.append(token)
| osemenovsky/oshlex | oshlex/config.py | Python | gpl-2.0 | 6,614 |
#!/usr/bin/env python
#
# Computation of the rate-distortion function for source coding with side
# information at the decoder using the Blahut-Arimoto algorithm.
#
# Formulation similar to R.E. Blahut "Computation of Channel Capacity and
# Rate-Distortion Functions," IEEE Transactions on Information Theory, 18,
# no. 4, 1972.
#
# Author: Christophe Ramananjaona
# (c) 2005, Department of Electrical and Computer Engineering, Duke University.
# (c) 2017, Isloux, for the Python version.
from numpy import shape,sum,zeros,ones,arange,log,exp,array,longdouble,finfo
from sys import float_info
from os.path import isfile
from sys import argv
#from code_generator0 import code_generator
from code_generator import code_generator
def distortion_measure(n):
# Hamming distance
D=ones((n,n),dtype='longdouble')
for i in range(n):
D[i][i]=0.0
return(D)
def blahut_arimoto(q):
nx,ny=shape(q)
qx=[]
for i in range(nx):
qx.append(longdouble(sum(q[i,:])))
qy=[]
for j in range(ny):
qy.append(longdouble(sum(q[:,j])))
nz=nx
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# The array t contains all the possible codes that map Y into Z
nt=nx+1
t,nt=code_generator(nz,ny)
# If nx!=nz there is a problem
D=distortion_measure(max(nx,ny))
npoints=100
ds=arange(-10.0,0.0,0.1)
c=zeros((nx,nt),dtype='longdouble')
vtx=zeros((nt,nx),dtype='longdouble')
sexp=zeros(nt,dtype='longdouble')
#epsilon=finfo(longdouble(1.0)).eps
epsilon=1.0e-7
for s in range(npoints):
qty=ones((nt,ny),dtype='longdouble')
qty=qty/nt/ny
# Initialise stop test
stop=longdouble(1.0e5)
n=0
while stop>epsilon:
n=n+1
for i in range(nx):
if qx[i]!=0.0:
qxid=longdouble(1.0)/qx[i]
for k in range(nt):
ssum=longdouble(0.0)
for j in range(ny):
if qx[i]!=0.0:
ssum+=q[i][j]*qxid*log(qty[k][j])
else:
ssum+=qy[j]*log(qty[k][j])
c[i][k]=exp(ssum)
for i in range(nx):
sexp=zeros(nt,dtype='longdouble')
sd=longdouble(0.0)
if qx[i]!=0.0:
qxid=longdouble(1.0)/qx[i]
for k in range(nt):
for j in range(ny):
if qx[i]!=0.0:
sexp[k]+=q[i][j]*qxid*D[i,t[2][k,j]]
else:
sexp[k]+=qy[j]*D[i,t[2][k,j]]
sd+=c[i][k]*exp(ds[s]*sexp[k])
sd=longdouble(1.0)/sd
for k in range(nt):
vtx[k][i]=c[i][k]*exp(ds[s]*sexp[k])*sd
qtym=qty
qty=zeros((nt,ny),dtype='longdouble')
stop=longdouble(0.0)
for j in range(ny):
qyjd=longdouble(1.0)/qy[j]
for k in range(nt):
for i in range(nx):
qty[k][j]+=q[i][j]*qyjd*vtx[k][i]
stop+=qy[j]*qty[k][j]*log(qty[k][j]/qtym[k][j])
ssum=longdouble(0.0)
dv=longdouble(0.0)
for i in range(nx):
ssum2=longdouble(0.0)
if qx[i]!=0.0:
qxid=longdouble(1.0)/qx[i]
for k in range(nt):
ssexp=longdouble(0.0)
for j in range(ny):
if qx[i]!=0.0:
ssexp+=q[i][j]*qxid*D[i,t[2][k,j]]
else:
ssexp+=qy[j]*D[i,t[2][k,j]]
dv+=q[i][j]*vtx[k][i]*D[i,t[2][k,j]]
ssum2+=c[i][k]*exp(ds[s]*ssexp)
ssum+=qx[i]*log(ssum2)
R=ds[s]*dv-ssum
print dv,R,n
def readinputfile(inputfile):
a=[ line.split() for line in file(inputfile) ]
nx=len(a) # Number of lines
ny=len(a[0]) # Number of columns
q=zeros((nx,ny),dtype='longdouble')
for i in range(nx):
for j in range(ny):
q[i][j]=a[i][j]
return(q)
def main(inputfile="q.txt"):
if isfile(inputfile):
q=readinputfile(inputfile)
else:
nx=2
ny=2
q=array([[0.3,0.2],[0.24,0.26]],dtype='longdouble')
blahut_arimoto(q)
if __name__=="__main__":
if len(argv)>1:
main(argv[1])
else:
main()
| isloux/Shannon | python/rbawz2.py | Python | bsd-3-clause | 3,690 |
"""
Запуск через командную строку
$ python "10_exceptions in requests.py" https://github.com
"""
import requests
import sys
url = sys.argv[1]
try:
response = requests.get(url, timeout=30)
response.raise_for_status()
except requests.Timeout:
print("ошибка timeout, url:", url)
except requests.HTTPError as err:
code = err.response.status_code
print("ошибка url: (0), (1)".format(url, code))
except requests.RequestException:
print("ошибка скачивания url:", url)
else:
print(response)
# url = "https://github.com"
# response = requests.get(url)
# print(response)
| Foxfanmedium/python_training | OnlineCoursera/mail_ru/Python_1/Week_3/10_exceptions in requests.py | Python | apache-2.0 | 663 |
"""SCons.Tool.mslink
Tool-specific initialization for the Microsoft linker.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/mslink.py 2009/09/04 16:33:07 david"
import os.path
import SCons.Action
import SCons.Defaults
import SCons.Errors
import SCons.Platform.win32
import SCons.Tool
import SCons.Tool.msvc
import SCons.Tool.msvs
import SCons.Util
from MSCommon import merge_default_version, msvs_exists
def pdbGenerator(env, target, source, for_signature):
try:
return ['/PDB:%s' % target[0].attributes.pdb, '/DEBUG']
except (AttributeError, IndexError):
return None
def _dllTargets(target, source, env, for_signature, paramtp):
listCmd = []
dll = env.FindIxes(target, '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp)
if dll: listCmd.append("/out:%s"%dll.get_string(for_signature))
implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX')
if implib: listCmd.append("/implib:%s"%implib.get_string(for_signature))
return listCmd
def _dllSources(target, source, env, for_signature, paramtp):
listCmd = []
deffile = env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX")
for src in source:
# Check explicitly for a non-None deffile so that the __cmp__
# method of the base SCons.Util.Proxy class used for some Node
# proxies doesn't try to use a non-existent __dict__ attribute.
if deffile and src == deffile:
# Treat this source as a .def file.
listCmd.append("/def:%s" % src.get_string(for_signature))
else:
# Just treat it as a generic source file.
listCmd.append(src)
return listCmd
def windowsShlinkTargets(target, source, env, for_signature):
return _dllTargets(target, source, env, for_signature, 'SHLIB')
def windowsShlinkSources(target, source, env, for_signature):
return _dllSources(target, source, env, for_signature, 'SHLIB')
def _windowsLdmodTargets(target, source, env, for_signature):
"""Get targets for loadable modules."""
return _dllTargets(target, source, env, for_signature, 'LDMODULE')
def _windowsLdmodSources(target, source, env, for_signature):
"""Get sources for loadable modules."""
return _dllSources(target, source, env, for_signature, 'LDMODULE')
def _dllEmitter(target, source, env, paramtp):
"""Common implementation of dll emitter."""
SCons.Tool.msvc.validate_vars(env)
extratargets = []
extrasources = []
dll = env.FindIxes(target, '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp)
no_import_lib = env.get('no_import_lib', 0)
if not dll:
raise SCons.Errors.UserError, 'A shared library should have exactly one target with the suffix: %s' % env.subst('$%sSUFFIX' % paramtp)
insert_def = env.subst("$WINDOWS_INSERT_DEF")
if not insert_def in ['', '0', 0] and \
not env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"):
# append a def file to the list of sources
extrasources.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"))
version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0'))
if version_num >= 8.0 and env.get('WINDOWS_INSERT_MANIFEST', 0):
# MSVC 8 automatically generates .manifest files that must be installed
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSSHLIBMANIFESTPREFIX", "WINDOWSSHLIBMANIFESTSUFFIX"))
if env.has_key('PDB') and env['PDB']:
pdb = env.arg2nodes('$PDB', target=target, source=source)[0]
extratargets.append(pdb)
target[0].attributes.pdb = pdb
if not no_import_lib and \
not env.FindIxes(target, "LIBPREFIX", "LIBSUFFIX"):
# Append an import library to the list of targets.
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"LIBPREFIX", "LIBSUFFIX"))
# and .exp file is created if there are exports from a DLL
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSEXPPREFIX", "WINDOWSEXPSUFFIX"))
return (target+extratargets, source+extrasources)
def windowsLibEmitter(target, source, env):
return _dllEmitter(target, source, env, 'SHLIB')
def ldmodEmitter(target, source, env):
"""Emitter for loadable modules.
Loadable modules are identical to shared libraries on Windows, but building
them is subject to different parameters (LDMODULE*).
"""
return _dllEmitter(target, source, env, 'LDMODULE')
def prog_emitter(target, source, env):
SCons.Tool.msvc.validate_vars(env)
extratargets = []
exe = env.FindIxes(target, "PROGPREFIX", "PROGSUFFIX")
if not exe:
raise SCons.Errors.UserError, "An executable should have exactly one target with the suffix: %s" % env.subst("$PROGSUFFIX")
version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0'))
if version_num >= 8.0 and env.get('WINDOWS_INSERT_MANIFEST', 0):
# MSVC 8 automatically generates .manifest files that have to be installed
extratargets.append(
env.ReplaceIxes(exe,
"PROGPREFIX", "PROGSUFFIX",
"WINDOWSPROGMANIFESTPREFIX", "WINDOWSPROGMANIFESTSUFFIX"))
if env.has_key('PDB') and env['PDB']:
pdb = env.arg2nodes('$PDB', target=target, source=source)[0]
extratargets.append(pdb)
target[0].attributes.pdb = pdb
return (target+extratargets,source)
def RegServerFunc(target, source, env):
if env.has_key('register') and env['register']:
ret = regServerAction([target[0]], [source[0]], env)
if ret:
raise SCons.Errors.UserError, "Unable to register %s" % target[0]
else:
print "Registered %s sucessfully" % target[0]
return ret
return 0
regServerAction = SCons.Action.Action("$REGSVRCOM", "$REGSVRCOMSTR")
regServerCheck = SCons.Action.Action(RegServerFunc, None)
shlibLinkAction = SCons.Action.Action('${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_SHLINK_SOURCES")}')
compositeShLinkAction = shlibLinkAction + regServerCheck
ldmodLinkAction = SCons.Action.Action('${TEMPFILE("$LDMODULE $LDMODULEFLAGS $_LDMODULE_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_LDMODULE_SOURCES")}')
compositeLdmodAction = ldmodLinkAction + regServerCheck
def generate(env):
"""Add Builders and construction variables for ar to an Environment."""
SCons.Tool.createSharedLibBuilder(env)
SCons.Tool.createProgBuilder(env)
env['SHLINK'] = '$LINK'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS /dll')
env['_SHLINK_TARGETS'] = windowsShlinkTargets
env['_SHLINK_SOURCES'] = windowsShlinkSources
env['SHLINKCOM'] = compositeShLinkAction
env.Append(SHLIBEMITTER = [windowsLibEmitter])
env['LINK'] = 'link'
env['LINKFLAGS'] = SCons.Util.CLVar('/nologo')
env['_PDB'] = pdbGenerator
env['LINKCOM'] = '${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $_LIBDIRFLAGS $_LIBFLAGS $_PDB $SOURCES.windows")}'
env.Append(PROGEMITTER = [prog_emitter])
env['LIBDIRPREFIX']='/LIBPATH:'
env['LIBDIRSUFFIX']=''
env['LIBLINKPREFIX']=''
env['LIBLINKSUFFIX']='$LIBSUFFIX'
env['WIN32DEFPREFIX'] = ''
env['WIN32DEFSUFFIX'] = '.def'
env['WIN32_INSERT_DEF'] = 0
env['WINDOWSDEFPREFIX'] = '${WIN32DEFPREFIX}'
env['WINDOWSDEFSUFFIX'] = '${WIN32DEFSUFFIX}'
env['WINDOWS_INSERT_DEF'] = '${WIN32_INSERT_DEF}'
env['WIN32EXPPREFIX'] = ''
env['WIN32EXPSUFFIX'] = '.exp'
env['WINDOWSEXPPREFIX'] = '${WIN32EXPPREFIX}'
env['WINDOWSEXPSUFFIX'] = '${WIN32EXPSUFFIX}'
env['WINDOWSSHLIBMANIFESTPREFIX'] = ''
env['WINDOWSSHLIBMANIFESTSUFFIX'] = '${SHLIBSUFFIX}.manifest'
env['WINDOWSPROGMANIFESTPREFIX'] = ''
env['WINDOWSPROGMANIFESTSUFFIX'] = '${PROGSUFFIX}.manifest'
env['REGSVRACTION'] = regServerCheck
env['REGSVR'] = os.path.join(SCons.Platform.win32.get_system_root(),'System32','regsvr32')
env['REGSVRFLAGS'] = '/s '
env['REGSVRCOM'] = '$REGSVR $REGSVRFLAGS ${TARGET.windows}'
# Set-up ms tools paths for default version
merge_default_version(env)
# Loadable modules are on Windows the same as shared libraries, but they
# are subject to different build parameters (LDMODULE* variables).
# Therefore LDMODULE* variables correspond as much as possible to
# SHLINK*/SHLIB* ones.
SCons.Tool.createLoadableModuleBuilder(env)
env['LDMODULE'] = '$SHLINK'
env['LDMODULEPREFIX'] = '$SHLIBPREFIX'
env['LDMODULESUFFIX'] = '$SHLIBSUFFIX'
env['LDMODULEFLAGS'] = '$SHLINKFLAGS'
env['_LDMODULE_TARGETS'] = _windowsLdmodTargets
env['_LDMODULE_SOURCES'] = _windowsLdmodSources
env['LDMODULEEMITTER'] = [ldmodEmitter]
env['LDMODULECOM'] = compositeLdmodAction
def exists(env):
return msvs_exists()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| cournape/numscons | numscons/scons-local/scons-local-1.2.0/SCons/Tool/mslink.py | Python | bsd-3-clause | 10,711 |
"""empty message
Revision ID: f3c0682a08dd
Revises: 83c5499c55ed
Create Date: 2016-05-24 07:57:32.946000
"""
# revision identifiers, used by Alembic.
revision = 'f3c0682a08dd'
down_revision = '83c5499c55ed'
from alembic import op
import sqlalchemy as sa
import geoalchemy2
from sqlalchemy_utils import URLType
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('Activity', 'id',
existing_type=sa.INTEGER(),
nullable=True,
existing_server_default=sa.text(u'nextval(\'"Activity_id_seq"\'::regclass)'))
op.create_index(op.f('ix_Activity_act_start_point'), 'Activity', ['act_start_point'], unique=False)
op.alter_column('Athlete', 'id',
existing_type=sa.INTEGER(),
nullable=True,
existing_server_default=sa.text(u'nextval(\'"Athlete_id_seq"\'::regclass)'))
'''
op.drop_index('idx_Segment_end_point', table_name='Segment')
op.drop_index('idx_Segment_start_point', table_name='Segment')
op.drop_index('ix_Segment_act_type', table_name='Segment')
op.drop_index('ix_Segment_cat', table_name='Segment')
op.drop_index('ix_Segment_date_created', table_name='Segment')
op.drop_index('ix_Segment_elev_gain', table_name='Segment')
op.alter_column('Stream', 'act_id',
existing_type=sa.INTEGER(),
nullable=True)
op.create_index(op.f('ix_Stream_act_id'), 'Stream', ['act_id'], unique=False)
op.create_index(op.f('ix_Stream_point'), 'Stream', ['point'], unique=False)
op.drop_index('idx_Stream_point', table_name='Stream')'''
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
'''
op.create_index('idx_Stream_point', 'Stream', ['point'], unique=False)
op.drop_index(op.f('ix_Stream_point'), table_name='Stream')
op.drop_index(op.f('ix_Stream_act_id'), table_name='Stream')
op.alter_column('Stream', 'act_id',
existing_type=sa.INTEGER(),
nullable=False)
'''
op.create_index('ix_Segment_start_point', 'Segment', ['start_point'], unique=False)
op.create_index('ix_Segment_end_point', 'Segment', ['end_point'], unique=False)
op.create_index('ix_Segment_elev_gain', 'Segment', ['elev_gain'], unique=False)
op.create_index('ix_Segment_date_created', 'Segment', ['date_created'], unique=False)
op.create_index('ix_Segment_cat', 'Segment', ['cat'], unique=False)
op.create_index('ix_Segment_act_type', 'Segment', ['act_type'], unique=False)
op.create_index('idx_Segment_start_point', 'Segment', ['start_point'], unique=False)
op.create_index('idx_Segment_end_point', 'Segment', ['end_point'], unique=False)
'''
op.alter_column('Athlete', 'id',
existing_type=sa.INTEGER(),
nullable=False,
existing_server_default=sa.text(u'nextval(\'"Athlete_id_seq"\'::regclass)'))
op.drop_index(op.f('ix_Activity_act_start_point'), table_name='Activity')
op.alter_column('Activity', 'id',
existing_type=sa.INTEGER(),
nullable=False,
existing_server_default=sa.text(u'nextval(\'"Activity_id_seq"\'::regclass)'))
op.create_table('spatial_ref_sys',
sa.Column('srid', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('auth_name', sa.VARCHAR(length=256), autoincrement=False, nullable=True),
sa.Column('auth_srid', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('srtext', sa.VARCHAR(length=2048), autoincrement=False, nullable=True),
sa.Column('proj4text', sa.VARCHAR(length=2048), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('srid', name=u'spatial_ref_sys_pkey')
)
op.create_table('Stream_LineString',
sa.Column('ath_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('act_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('act_name', sa.VARCHAR(length=200), autoincrement=False, nullable=True),
sa.Column('act_type', sa.VARCHAR(length=20), autoincrement=False, nullable=True),
sa.Column('linestring', geoalchemy2.types.Geometry(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['act_id'], [u'Activity.act_id'], name=u'Act id fk'),
sa.PrimaryKeyConstraint('act_id', name=u'act_id_pk')
)
op.create_table('Stream_HeatPoint',
sa.Column('ath_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('point', geoalchemy2.types.Geometry(), autoincrement=False, nullable=True),
sa.Column('density', postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('speed', postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('grade', postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('power', postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('hr', postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('cadence', postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['ath_id'], [u'Athlete.ath_id'], name=u'ath_id FK')
)
'''
### end Alembic commands ### | ryanbaumann/athletedataviz | migrations/versions/f3c0682a08dd_.py | Python | mit | 5,384 |
import CatalogGardenItem
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from otp.otpbase import OTPLocalizer
from direct.interval.IntervalGlobal import *
from toontown.estate import GardenGlobals
class CatalogToonStatueItem(CatalogGardenItem.CatalogGardenItem):
pictureToonStatue = None
def makeNewItem(self, itemIndex = 105, count = 1, tagCode = 1, endPoseIndex = 108):
self.startPoseIndex = itemIndex
self.endPoseIndex = endPoseIndex
CatalogGardenItem.CatalogGardenItem.makeNewItem(self, itemIndex, count, tagCode)
def needsCustomize(self):
return self.endPoseIndex - self.startPoseIndex > 0
def getPicture(self, avatar):
from toontown.estate import DistributedToonStatuary
toonStatuary = DistributedToonStatuary.DistributedToonStatuary(None)
toonStatuary.setupStoneToon(base.localAvatar.style)
toonStatuary.poseToonFromSpecialsIndex(self.gardenIndex)
toonStatuary.toon.setZ(0)
model, ival = self.makeFrameModel(toonStatuary.toon, 1)
self.pictureToonStatue = toonStatuary
self.hasPicture = True
return (model, ival)
def cleanupPicture(self):
self.pictureToonStatue.deleteToon()
self.pictureToonStatue = None
CatalogGardenItem.CatalogGardenItem.cleanupPicture(self)
return
def decodeDatagram(self, di, versionNumber, store):
CatalogGardenItem.CatalogGardenItem.decodeDatagram(self, di, versionNumber, store)
self.startPoseIndex = di.getUint8()
self.endPoseIndex = di.getUint8()
def encodeDatagram(self, dg, store):
CatalogGardenItem.CatalogGardenItem.encodeDatagram(self, dg, store)
dg.addUint8(self.startPoseIndex)
dg.addUint8(self.endPoseIndex)
def compareTo(self, other):
if self.gardenIndex >= self.startPoseIndex and self.gardenIndex <= self.endPoseIndex:
return 0
return 1
def getAllToonStatues(self):
self.statueList = []
for index in range(self.startPoseIndex, self.endPoseIndex + 1):
self.statueList.append(CatalogToonStatueItem(index, 1, endPoseIndex=index))
return self.statueList
def deleteAllToonStatues(self):
while len(self.statueList):
item = self.statueList[0]
if item.pictureToonStatue:
item.pictureToonStatue.deleteToon()
self.statueList.remove(item)
| ksmit799/Toontown-Source | toontown/catalog/CatalogToonStatueItem.py | Python | mit | 2,471 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import sys
nodes, vBucketsNR = 1, 4096
vBuckets = [nodes] * vBucketsNR
while nodes <= 16:
print min(vBucketsNR * 2, 256) * "-"
nodes += 1
newAdd = [nodes] * (vBucketsNR / nodes)
canBeAssgined = []
for i in range(1, nodes):
# calculate every exist node's assignment slots(bucket)
canBeAssgined.append(vBucketsNR / (nodes - 1) - (vBucketsNR / nodes))
s = len(vBuckets)
# for balance the bucket distribution we iterate all buckets
# randomly. actually start from head or tail
seq = range(0, s) if nodes % 2 == 0 else range(s - 1, 0, -1)
for index in seq:
if len(newAdd) == 0:
break
if canBeAssgined[vBuckets[index] - 1] != 0:
canBeAssgined[vBuckets[index] - 1] = canBeAssgined[vBuckets[index] - 1] - 1
vBuckets[index] = newAdd.pop()
for i in range(0, len(vBuckets)):
sys.stdout.write("%d " % vBuckets[i])
print ""
sum = []
for i in range(1, nodes + 1):
sum.append(vBuckets.count(i))
print "After add indexed node %d => %s" % (len(sum), sum)
| gugemichael/kvdb | tools/vbuckets.py | Python | apache-2.0 | 1,123 |
from .. import parser, LegacyItemAccess, CommandParser
from insights.specs import Specs
@parser(Specs.sestatus)
class SEStatus(LegacyItemAccess, CommandParser):
"""Class to parse the ``sestatus -b`` command
Attributes:
data (dict): A dict likes
{
"loaded_policy_name": "targeted",
"policy_booleans": {
"antivirus_use_jit": False,
"abrt_anon_write": False,
"abrt_upload_watch_anon_write": True,
"antivirus_can_scan_system": False,
"abrt_handle_event": False,
"auditadm_exec_content": True,
},
"mode_from_config_file": "enforcing",
"current_mode": "enforcing",
"policy_mls_status": "enabled",
"policy_deny_unknown_status": "allowed",
"max_kernel_policy_version": "30"
}
---Sample---
Loaded policy name: targeted
Current mode: enforcing
Mode from config file: enforcing
Policy MLS status: enabled
Policy deny_unknown status: allowed
Max kernel policy version: 30
Policy booleans:
abrt_anon_write off
abrt_handle_event off
abrt_upload_watch_anon_write on
antivirus_can_scan_system off
antivirus_use_jit off
auditadm_exec_content on
...
"""
def parse_content(self, content):
# Default to disabled if not found
sestatus_info = {
'loaded_policy_name': None,
'current_mode': 'disabled',
'mode_from_config_file': 'disabled',
'policy_mls_status': 'disabled',
'policy_deny_unknown_status': 'disabled',
'max_kernel_policy_version': None,
'policy_booleans': {},
}
for line in content:
if ":" in line:
if 'Policy booleans' in line:
pass
else:
key, val = [s.strip() for s in line.split(":", 1)]
sestatus_info[key.lower().replace(" ", "_")] = val
else:
if line.strip():
key, val = line.split()
# convert 'on' and 'off' strings to actual boolean values
sestatus_info['policy_booleans'][key] = val == 'on'
# When SELinux is disabled, sestatus has simply 'SELinux status: disabled'
# in its output. But 'SELinux status' is not included in the output
# when SELinux is enabled. So we include it as a nicety.
if sestatus_info['current_mode'] != 'disabled' and 'selinux_status' not in sestatus_info:
sestatus_info['selinux_status'] = sestatus_info['current_mode']
self.data = sestatus_info
| RedHatInsights/insights-core | insights/parsers/sestatus.py | Python | apache-2.0 | 2,984 |
#!/usr/bin/env python
# coding: utf-8
"""
Created On Jul 2, 2014
@author: jwang
"""
import logging
# from pymongo import DESCENDING
from weibonews.utils.decorators import perf_logging
from weibonews.db.utils import cursor_to_array
_LOGGER = logging.getLogger('weibonews.wechat')
DB = None
IDS = None
INDEXES = {
'vote_records': ['vote_id', 'group_id', 'sender'],
}
PAGE_SIZE = 100
NO_RAW_ID = {
'_id': 0
}
@perf_logging
def next_id(field):
"""
Get inc id by field.
"""
return IDS.next_id(field)
@perf_logging
def update_vote_task(cond, update, upsert=False):
'''update vote task'''
DB.vote_tasks.update(cond, update, upsert=upsert)
@perf_logging
def get_vote_task(cond):
'''get vote task'''
return DB.vote_tasks.find_one(cond)
@perf_logging
def get_vote_tasks(cond):
'''get vote tasks'''
cursor = DB.vote_tasks.find(cond)
return cursor_to_array(cursor)
@perf_logging
def update_vote_record(cond, update, upsert=False):
'''update vote item'''
DB.vote_records.update(cond, update, upsert=upsert)
@perf_logging
def get_vote_records(cond):
'''get vote records'''
cursor = DB.vote_records.find(cond)
return cursor_to_array(cursor)
@perf_logging
def update_vote_group(cond, update, upsert=False):
'''update vote group'''
DB.vote_groups.update(cond, update, upsert=upsert)
@perf_logging
def get_vote_by_group(group_id):
'''get vote group'''
vote_group = DB.vote_groups.find_one({'group_id': group_id})
if vote_group and 'vote_id' in vote_group:
return vote_group['vote_id']
else:
return None
@perf_logging
def remove_vote_group(group_id):
'''remove vote group'''
DB.vote_groups.remove({'group_id': group_id})
@perf_logging
def update_group_config(cond, update, upsert=False):
'''update group config'''
DB.group_configs.update(cond, update, upsert=upsert)
@perf_logging
def get_group_config(cond):
'''get group config'''
return DB.group_configs.find_one(cond)
| vispeal/VoteHelper | wechat/wechat/db/wechatdb.py | Python | gpl-2.0 | 2,009 |
from django.template import Library, Node, Variable
from converter.api import get_dimensions
from documents.conf.settings import PRINT_SIZE
register = Library()
class GetImageSizeNode(Node):
def __init__(self, document):
self.document = document
def render(self, context):
document = Variable(self.document).resolve(context)
width, height = get_dimensions(document)
context[u'document_width'], context['document_height'] = width, height
context[u'document_aspect'] = float(width) / float(height)
return u''
@register.tag
def get_document_size(parser, token):
tag_name, arg = token.contents.split(None, 1)
return GetImageSizeNode(document=arg)
| appsembler/mayan_appsembler | apps/documents/templatetags/printing_tags.py | Python | gpl-3.0 | 716 |
"""
Geocoding API Google
import geomapgoogle
geomapgoogle.geocode('San Francisco')
geomapgoogle.regeocode(latlng='40.714224,-73.961452')
"""
import urllib, json
GEOCODE_BASE_URL = 'http://maps.google.com/maps/api/geocode/json'
def geocode(address, sensor='false', **geo_args):
"""
geocoding
"""
geo_args = ({
'address': address,
'sensor': sensor
})
url = GEOCODE_BASE_URL + '?' + urllib.urlencode(geo_args)
result = json.load(urllib.urlopen(url))
return json.dumps([s['formatted_address']
for s in result['results']])
def regeocode(latlng, sensor='false', **geo_args):
"""
Reverse Geocoding
"""
geo_args = ({
'latlng' : latlng,
'sensor' : sensor
})
url = GEOCODE_BASE_URL + '?' + urllib.urlencode(geo_args)
result = json.load(urllib.urlopen(url))
return json.dumps([s['formatted_address']
for s in result['results']])
| jolth/gpservi-1.0.1 | Location/geomapgoogle.py | Python | bsd-3-clause | 902 |
"""Provide the _dbm module as a dbm submodule."""
from _dbm import *
| Microvellum/Fluid-Designer | win64-vc/2.78/python/lib/dbm/ndbm.py | Python | gpl-3.0 | 70 |
from math import sqrt
class Vector:
# __slots__ = ['x', 'y']
def __init__(self, x, y):
super().__setattr__('x', x)
super().__setattr__('y', y)
def __eq__(self, other):
return (self.x, self.y) == (other.x, other.y)
def __repr__(self):
return ''.join(('Vector(', str(self.x), ', ', str(self.y), ')'))
def __str__(self):
return ''.join(('(', str(self.x), ', ', str(self.y), ')'))
def __add__(self, other):
x = self.x + other.x
y = self.y + other.y
return Vector(x, y)
def __mul__(self, other):
x = self.x * other
y = self.y * other
return Vector(x, y)
def __abs__(self):
return sqrt(pow(self.x, 2) + pow(self.y, 2))
def __bool__(self):
return not self.x == self.y == 0
def dot(self, other):
return self.x * other.x + self.y * other.y
def __setattr__(self, key, value):
msg = "'%s' does not support attribute assignment" % (self.__class__)
raise AttributeError(msg)
def main():
assert Vector(2, 4)
assert (Vector(2, 4).x, Vector(2, 4).y) == (2, 4)
assert repr(Vector(2, 4)) == 'Vector(2, 4)'
assert str(Vector(2, 4)) == '(2, 4)'
assert Vector(2, 4) == Vector(2, 4)
assert Vector(2, 4) + Vector(1, 2) == Vector(3, 6)
assert Vector(2, 4) * 2 == Vector(4, 8)
assert abs(Vector(3, 4)) == 5.0
assert bool(Vector(0, 0)) == False
assert bool(Vector(0, 1)) == bool(Vector(1, 0)) == bool(Vector(1, 1))
assert Vector(2, 2).dot(Vector(3, 4)) == 14
if __name__ == '__main__':
main() | g-sobral/google-python-exercises | basic/vector.py | Python | apache-2.0 | 1,601 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
import sys
import logging
def print_error(msg):
"""
Print a string to stderr.
:param msg: The message to print.
"""
print(msg, file=sys.stderr)
def decode_cli_arg(arg):
"""
Turn a bytestring provided by `argparse` into unicode.
:param arg: The bytestring to decode.
:return: The argument as a unicode object.
:raises ValueError: If arg is None.
"""
if arg is None:
raise ValueError('Argument cannot be None')
if sys.version_info.major == 3:
# already decoded
return arg
return arg.decode(sys.getfilesystemencoding())
def log_level_from_vebosity(verbosity):
"""
Get the `logging` module log level from a verbosity.
:param verbosity: The number of times the `-v` option was specified.
:return: The corresponding log level.
"""
if verbosity == 0:
return logging.WARNING
if verbosity == 1:
return logging.INFO
return logging.DEBUG
| gebn/Stitcher | stitcher/util.py | Python | mit | 1,046 |
# Copyright 2014-2016 Ivan Kravets <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from copy import deepcopy
from os import environ, getenv
from os.path import getmtime, isfile, join
from time import time
from lockfile import LockFile
from platformio import __version__, util
from platformio.exception import InvalidSettingName, InvalidSettingValue
DEFAULT_SETTINGS = {
"check_platformio_interval": {
"description": "Check for the new PlatformIO interval (days)",
"value": 3
},
"check_platforms_interval": {
"description": "Check for the platform updates interval (days)",
"value": 7
},
"check_libraries_interval": {
"description": "Check for the library updates interval (days)",
"value": 7
},
"auto_update_platforms": {
"description": "Automatically update platforms (Yes/No)",
"value": False
},
"auto_update_libraries": {
"description": "Automatically update libraries (Yes/No)",
"value": False
},
"enable_telemetry": {
"description": (
"Telemetry service <http://docs.platformio.org/en/latest/"
"userguide/cmd_settings.html?#enable-telemetry> (Yes/No)"),
"value": True
},
"enable_prompts": {
"description": (
"Can PlatformIO communicate with you via prompts: "
"propose to install platforms which aren't installed yet, "
"paginate over library search results and etc.)? ATTENTION!!! "
"If you call PlatformIO like subprocess, "
"please disable prompts to avoid blocking (Yes/No)"),
"value": True
}
}
SESSION_VARS = {
"command_ctx": None,
"force_option": False,
"caller_id": None
}
class State(object):
def __init__(self, path=None, lock=False):
self.path = path
self.lock = lock
if not self.path:
self.path = join(util.get_home_dir(), "appstate.json")
self._state = {}
self._prev_state = {}
self._lockfile = None
def __enter__(self):
try:
self._lock_state_file()
if isfile(self.path):
self._state = util.load_json(self.path)
except ValueError:
self._state = {}
self._prev_state = deepcopy(self._state)
return self._state
def __exit__(self, type_, value, traceback):
if self._prev_state != self._state:
with open(self.path, "w") as fp:
if "dev" in __version__:
json.dump(self._state, fp, indent=4)
else:
json.dump(self._state, fp)
self._unlock_state_file()
def _lock_state_file(self):
if not self.lock:
return
self._lockfile = LockFile(self.path)
if (self._lockfile.is_locked() and
(time() - getmtime(self._lockfile.lock_file)) > 10):
self._lockfile.break_lock()
self._lockfile.acquire()
def _unlock_state_file(self):
if self._lockfile:
self._lockfile.release()
def sanitize_setting(name, value):
if name not in DEFAULT_SETTINGS:
raise InvalidSettingName(name)
defdata = DEFAULT_SETTINGS[name]
try:
if "validator" in defdata:
value = defdata['validator']()
elif isinstance(defdata['value'], bool):
if not isinstance(value, bool):
value = str(value).lower() in ("true", "yes", "y", "1")
elif isinstance(defdata['value'], int):
value = int(value)
except Exception:
raise InvalidSettingValue(value, name)
return value
def get_state_item(name, default=None):
with State() as data:
return data.get(name, default)
def set_state_item(name, value):
with State(lock=True) as data:
data[name] = value
def get_setting(name):
if name == "enable_prompts":
# disable prompts for Continuous Integration systems
# and when global "--force" option is set
if any([util.is_ci(), get_session_var("force_option")]):
return False
_env_name = "PLATFORMIO_SETTING_%s" % name.upper()
if _env_name in environ:
return sanitize_setting(name, getenv(_env_name))
with State() as data:
if "settings" in data and name in data['settings']:
return data['settings'][name]
return DEFAULT_SETTINGS[name]['value']
def set_setting(name, value):
with State(lock=True) as data:
if "settings" not in data:
data['settings'] = {}
data['settings'][name] = sanitize_setting(name, value)
def reset_settings():
with State(lock=True) as data:
if "settings" in data:
del data['settings']
def get_session_var(name, default=None):
return SESSION_VARS.get(name, default)
def set_session_var(name, value):
assert name in SESSION_VARS
SESSION_VARS[name] = value
def is_disabled_progressbar():
return (not get_setting("enable_prompts") or
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true")
| valeros/platformio | platformio/app.py | Python | apache-2.0 | 5,626 |
"""This module contains code from
Think Python by Allen B. Downey
http://thinkpython.com
Copyright 2012 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from lumpy_demo import *
from swampy.Lumpy import Lumpy
def countdown(n):
if n <= 0:
print 'Blastoff!'
lumpy.object_diagram()
print_diagram(lumpy, 'lumpydemo2.eps')
else:
print n
countdown(n-1)
lumpy = Lumpy()
lumpy.make_reference()
countdown(2)
| simontakite/sysadmin | pythonscripts/thinkpython/lumpy_demo2.py | Python | gpl-2.0 | 482 |
"""call with DBFILE THRESH1 THRES2.
DBFILE - pickle file of list of passages, whose terminals are pos-tagged
THRESH1 - int, how many appearances a noun needs in order to be count as
a valid target (less than this threshold is ignored).
THRESH2 - float, what is the ratio between scene-evoking instances to not
which will label this target as scene evoker
Output: both prints the targets with num_instances and ratio to stdout,
and saves the targets and labels in a pickle file.
"""
import pickle
import sys
import numpy as np
from scenes import scenes
dbfile = sys.argv[1]
appear_thresh = int(sys.argv[2])
ratio_thresh = float(sys.argv[3])
with open(dbfile, 'rb') as f:
passages = pickle.load(f)
nouns = scenes.extract_all_nouns(passages)
targets = []
labels = []
for noun, terminals in nouns.items():
evokers = [scenes.is_scene_evoking(x) for x in terminals].count(True)
print("{}\t{}\t{}".format(noun, evokers / len(terminals),
len(terminals)))
targets.append(noun)
if (len(terminals) >= appear_thresh and
evokers / len(terminals) >= ratio_thresh):
labels.append(1)
else:
labels.append(0)
np_labels = np.array(labels, dtype=np.int32)
with open(dbfile + '.tl.pickle', 'wb') as f:
pickle.dump((targets, labels), f)
| borgr/ucca | scenes/get_noun_targets.py | Python | gpl-3.0 | 1,307 |
#! /usr/bin/python
import sys
import string
fout = file('user-likesmovie', 'w')
def writetxn(fout, txn) :
for i in txn:
fout.write(str(i) + ' ')
fout.write('\n')
def read_mov(f):
line = f.readline()
movieid = int(line.strip(':\n'))
print '<%s>' % movieid
lines = f.readlines()
def parse_rating(line):
x = line.split(',')
return (int(x[0]), int(x[1]), x[2])
ratings = map(parse_rating, lines)
return (movieid, ratings)
for no in range(1, 17701):
fmov = open('mv_%07d.txt' % no)
mvid, ratings = read_mov(fmov)
uidlist = map(lambda x: x[0], filter(lambda x: x[1]>3, ratings))
uidlist.sort()
writetxn(fout, uidlist)
fout.close()
| examachine/bitdrill | scripts/netflix2user-likesmovie.py | Python | agpl-3.0 | 699 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import sys
if (sys.version_info > (3,)):
from urllib.parse import quote
else:
from urllib import quote
from io import StringIO
import os
import json
import jsonpickle
from cairis.test.CairisDaemonTestCase import CairisDaemonTestCase
from cairis.mio.ModelImport import importModelFile
from cairis.tools.JsonConverter import json_deserialize
import os
__author__ = 'Shamal Faily'
class RiskLevelAPITests(CairisDaemonTestCase):
@classmethod
def setUpClass(cls):
importModelFile(os.environ['CAIRIS_SRC'] + '/../examples/exemplars/ACME_Water/ACME_Water.xml',1,'test')
def setUp(self):
self.logger = logging.getLogger(__name__)
self.existing_asset_name = 'ICT PC'
self.existing_threat_name = 'Password enumeration'
def test_get_risk_level(self):
method = 'test_get_risk_level'
url = '/api/risk_level/asset/%s?session_id=test' % quote(self.existing_asset_name)
self.logger.info('[%s] URL: %s', method, url)
rv = self.app.get(url)
if (sys.version_info > (3,)):
responseData = rv.data.decode('utf-8')
else:
responseData = rv.data
level = jsonpickle.decode(responseData)
self.assertIsNotNone(level, 'No results after deserialization')
self.assertIsInstance(level, int, 'The result is not an integer as expected')
self.assertEqual(level, 9)
def test_get_risk_level_by_environment(self):
method = 'test_get_risk_level_by_environment'
url = '/api/risk_level/asset/ICT%20Application/environment/Day?session_id=test'
self.logger.info('[%s] URL: %s', method, url)
rv = self.app.get(url)
if (sys.version_info > (3,)):
responseData = rv.data.decode('utf-8')
else:
responseData = rv.data
level = jsonpickle.decode(responseData)
self.assertIsNotNone(level, 'No results after deserialization')
self.assertIsInstance(level, int, 'The result is not an integer as expected')
self.assertEqual(level, 9)
url = '/api/risk_level/asset/ICT%20Application/environment/Night?session_id=test'
self.logger.info('[%s] URL: %s', method, url)
rv = self.app.get(url)
if (sys.version_info > (3,)):
responseData = rv.data.decode('utf-8')
else:
responseData = rv.data
level = jsonpickle.decode(responseData)
self.assertIsNotNone(level, 'No results after deserialization')
self.assertIsInstance(level, int, 'The result is not an integer as expected')
self.assertEqual(level, 9)
def test_get_risk_threat_level(self):
method = 'test_get_risk_level'
url = '/api/risk_level/asset/threat_type/' + quote(self.existing_asset_name) + '/' + quote(self.existing_threat_name) + '?session_id=test'
self.logger.info('[%s] URL: %s', method, url)
rv = self.app.get(url)
if (sys.version_info > (3,)):
responseData = rv.data.decode('utf-8')
else:
responseData = rv.data
level = jsonpickle.decode(responseData)
self.assertIsNotNone(level, 'No results after deserialization')
self.assertIsInstance(level, int, 'The result is not an integer as expected')
self.assertEqual(level, 9)
def test_get_risk_threat_level_by_environment(self):
method = 'test_get_risk_level_by_environment'
url = '/api/risk_level/asset/threat_type/ICT%20Application/Enumeration/environment/Day?session_id=test'
self.logger.info('[%s] URL: %s', method, url)
rv = self.app.get(url)
if (sys.version_info > (3,)):
responseData = rv.data.decode('utf-8')
else:
responseData = rv.data
level = jsonpickle.decode(responseData)
self.assertIsNotNone(level, 'No results after deserialization')
self.assertIsInstance(level, int, 'The result is not an integer as expected')
self.assertEqual(level, 9)
url = '/api/risk_level/asset/threat_type/ICT%20Application/Enumeration/environment/Night?session_id=test'
self.logger.info('[%s] URL: %s', method, url)
rv = self.app.get(url)
if (sys.version_info > (3,)):
responseData = rv.data.decode('utf-8')
else:
responseData = rv.data
level = jsonpickle.decode(responseData)
self.assertIsNotNone(level, 'No results after deserialization')
self.assertIsInstance(level, int, 'The result is not an integer as expected')
self.assertEqual(level, 0)
| nathanbjenx/cairis | cairis/test/test_RiskLevelAPI.py | Python | apache-2.0 | 5,044 |
### extends 'class_empty.py'
### block ClassImports
# NOTICE: Do not edit anything here, it is generated code
from . import gxapi_cy
from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref
### endblock ClassImports
### block Header
# NOTICE: The code generator will not replace the code in this block
### endblock Header
### block ClassImplementation
# NOTICE: Do not edit anything here, it is generated code
class GXGER(gxapi_cy.WrapGER):
"""
GXGER class.
Allows access to a Geosoft format error message file. This class
does not in itself produce an error message, but retrieves a
selected message from the file, and allows the
setting of replacement parameters within the message. It
is up to the user to display or use the message.
**Note:**
`GXGER <geosoft.gxapi.GXGER>` message files contain numbered messages that can be used within GXs.
Following is an example from the file GEOSOFT.`GXGER <geosoft.gxapi.GXGER>`:
#20008
! Invalid password. The product installation has failed.
#20009
! Unable to find INI file: %1
! See the documentation for details
A '#' character in column 1 indicates a message number. The message
follows on lines that begin with a '!' character. Strings in the message
may be replaced at run time with values using the `set_string <geosoft.gxapi.GXGER.set_string>`,
`set_int <geosoft.gxapi.GXGER.set_int>` and `set_double <geosoft.gxapi.GXGER.set_double>` methods. The iGet_GER will return the message
with strings replaced by their settings. By convention, we recommend
that you use "%1", "%2", etc. as replacement strings.
"""
def __init__(self, handle=0):
super(GXGER, self).__init__(GXContext._get_tls_geo(), handle)
@classmethod
def null(cls):
"""
A null (undefined) instance of `GXGER <geosoft.gxapi.GXGER>`
:returns: A null `GXGER <geosoft.gxapi.GXGER>`
:rtype: GXGER
"""
return GXGER()
def is_null(self):
"""
Check if this is a null (undefined) instance
:returns: True if this is a null (undefined) instance, False otherwise.
:rtype: bool
"""
return self._internal_handle() == 0
# Miscellaneous
@classmethod
def create(cls, file):
"""
Opens an ASCII error file to read from.
:param file: `GXGER <geosoft.gxapi.GXGER>` file name.
:type file: str
:returns: `GXGER <geosoft.gxapi.GXGER>` Object
:rtype: GXGER
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** The `GXGER <geosoft.gxapi.GXGER>` file may be in the local directory or the GEOSOFT
directory.
"""
ret_val = gxapi_cy.WrapGER._create(GXContext._get_tls_geo(), file.encode())
return GXGER(ret_val)
def get(self, num, message):
"""
Get a message string.
:param num: Message number
:param message: Message string returned, replacements filtered
:type num: int
:type message: str_ref
:returns: 0 if message found
1 if no message, passed message remains unchanged
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val, message.value = self._get(num, message.value.encode())
return ret_val
def set_int(self, parm, set):
"""
Set a replacement string value to an int.
:param parm: Replacement string (ie. "%1")
:param set: Setting
:type parm: str
:type set: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
self._set_int(parm.encode(), set)
def set_double(self, parm, set):
"""
Set a replacement string value to a real.
:param parm: Replacement string (ie. "%1")
:param set: Setting
:type parm: str
:type set: float
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
self._set_double(parm.encode(), set)
def set_string(self, parm, set):
"""
Set a replacement string value.
:param parm: Replacement string (ie. "%1")
:param set: Setting
:type parm: str
:type set: str
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
self._set_string(parm.encode(), set.encode())
### endblock ClassImplementation
### block ClassExtend
# NOTICE: The code generator will not replace the code in this block
### endblock ClassExtend
### block Footer
# NOTICE: The code generator will not replace the code in this block
### endblock Footer | GeosoftInc/gxpy | geosoft/gxapi/GXGER.py | Python | bsd-2-clause | 5,416 |
# -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# file: $Id$
# auth: metagriffin <[email protected]>
# date: 2012/05/13
# copy: (C) Copyright 2012-EOT metagriffin -- see LICENSE.txt
#------------------------------------------------------------------------------
# This software is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#------------------------------------------------------------------------------
'''
The ``pysyncml.items.note`` module defines the abstract interface to a
Note object via the :class:`pysyncml.NoteItem
<pysyncml.items.note.NoteItem>` class.
.. warning::
Be aware that this is NOT an object type defined by the SyncML
specification, but rather is a *de-facto* standard object type.
'''
import os, re
import xml.etree.ElementTree as ET
from .base import Item, Ext
from .. import constants, common
#------------------------------------------------------------------------------
class NoteItem(Item, Ext):
'''
The NoteItem is an abstract sub-class of a :class:`pysyncml.Item
<pysyncml.items.base.Item>` which primarily defines a "Note" as
having a `name` and a `body`. It also provides implementations of
the :meth:`dump` and :meth:`load` methods, which support the following
content-types:
* text/plain
* text/x-s4j-sifn, version 1.1
.. warning::
The ``text/plain`` content-type does NOT support the `name` attribute,
and therefore does not get synchronized when using that content-type.
'''
#----------------------------------------------------------------------------
def __init__(self, name=None, body=None, *args, **kw):
'''
NoteItem constructor which takes attributes `name` and `body`.
'''
super(NoteItem, self).__init__(*args, **kw)
self.name = name
self.body = body
#----------------------------------------------------------------------------
def dump(self, stream, contentType=None, version=None):
'''
Serializes this NoteItem to a byte-stream and writes it to the
file-like object `stream`. `contentType` and `version` must be one
of the supported content-types, and if not specified, will default
to ``text/plain``.
'''
if contentType is None or contentType == constants.TYPE_TEXT_PLAIN:
stream.write(self.body)
return
if contentType == constants.TYPE_SIF_NOTE:
root = ET.Element('note')
# TODO: check `version`...
ET.SubElement(root, 'SIFVersion').text = '1.1'
if self.name is not None:
ET.SubElement(root, 'Subject').text = self.name
if self.body is not None:
ET.SubElement(root, 'Body').text = self.body
for name, values in self.extensions.items():
for value in values:
ET.SubElement(root, name).text = value
ET.ElementTree(root).write(stream)
return
raise common.InvalidContentType('cannot serialize NoteItem to "%s"' % (contentType,))
#----------------------------------------------------------------------------
@classmethod
def load(cls, stream, contentType=None, version=None):
'''
Reverses the effects of the :meth:`dump` method, creating a NoteItem
from the specified file-like `stream` object.
'''
if contentType is None or contentType == constants.TYPE_TEXT_PLAIN:
data = stream.read()
name = data.split('\n')[0]
# todo: localize?!...
name = re.compile(r'^(title|name):\s*', re.IGNORECASE).sub('', name).strip()
return NoteItem(name=name, body=data)
if contentType == constants.TYPE_SIF_NOTE:
data = ET.parse(stream).getroot()
ret = NoteItem(name=data.findtext('Subject'), body=data.findtext('Body'))
for child in data:
if child.tag in ('SIFVersion', 'Subject', 'Body'):
continue
ret.addExtension(child.tag, child.text)
return ret
raise common.InvalidContentType('cannot de-serialize NoteItem from "%s"' % (contentType,))
#----------------------------------------------------------------------------
def __eq__(self, other):
return self.name == other.name and self.body == other.body
#------------------------------------------------------------------------------
# end of $Id$
#------------------------------------------------------------------------------
| metagriffin/pysyncml | pysyncml/items/note.py | Python | gpl-3.0 | 4,879 |
"""Initial migration
Revision ID: ff7e7ae6d711
Revises:
Create Date: 2017-04-23 19:06:15.552092
"""
# revision identifiers, used by Alembic.
revision = 'ff7e7ae6d711'
down_revision = None
branch_labels = ('uber',)
depends_on = None
from alembic import op
import sqlalchemy as sa
import sideboard.lib.sa
try:
is_sqlite = op.get_context().dialect.name == 'sqlite'
except:
is_sqlite = False
if is_sqlite:
op.get_context().connection.execute('PRAGMA foreign_keys=ON;')
utcnow_server_default = "(datetime('now', 'utc'))"
else:
utcnow_server_default = "timezone('utc', current_timestamp)"
def upgrade():
op.create_table('approved_email',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('ident', sa.Unicode(), server_default='', nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_approved_email'))
)
op.create_table('arbitrary_charge',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('amount', sa.Integer(), nullable=False),
sa.Column('what', sa.Unicode(), server_default='', nullable=False),
sa.Column('when', sideboard.lib.sa.UTCDateTime(), nullable=False),
sa.Column('reg_station', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_arbitrary_charge'))
)
op.create_table('email',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('fk_id', sideboard.lib.sa.UUID(), nullable=True),
sa.Column('ident', sa.Unicode(), server_default='', nullable=False),
sa.Column('model', sa.Unicode(), server_default='', nullable=False),
sa.Column('when', sideboard.lib.sa.UTCDateTime(), nullable=False),
sa.Column('subject', sa.Unicode(), server_default='', nullable=False),
sa.Column('dest', sa.Unicode(), server_default='', nullable=False),
sa.Column('body', sa.Unicode(), server_default='', nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_email'))
)
op.create_table('group',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('public_id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('name', sa.Unicode(), server_default='', nullable=False),
sa.Column('tables', sa.Numeric(), server_default='0', nullable=False),
sa.Column('address', sa.Unicode(), server_default='', nullable=False),
sa.Column('website', sa.Unicode(), server_default='', nullable=False),
sa.Column('wares', sa.Unicode(), server_default='', nullable=False),
sa.Column('description', sa.Unicode(), server_default='', nullable=False),
sa.Column('special_needs', sa.Unicode(), server_default='', nullable=False),
sa.Column('amount_paid', sa.Integer(), server_default='0', nullable=False),
sa.Column('amount_refunded', sa.Integer(), server_default='0', nullable=False),
sa.Column('cost', sa.Integer(), server_default='0', nullable=False),
sa.Column('auto_recalc', sa.Boolean(), server_default='True', nullable=False),
sa.Column('can_add', sa.Boolean(), server_default='False', nullable=False),
sa.Column('admin_notes', sa.Unicode(), server_default='', nullable=False),
sa.Column('status', sa.Integer(), server_default='172070601', nullable=False),
sa.Column('registered', sideboard.lib.sa.UTCDateTime(), server_default=sa.text(utcnow_server_default), nullable=False),
sa.Column('approved', sideboard.lib.sa.UTCDateTime(), nullable=True),
sa.Column('leader_id', sideboard.lib.sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['leader_id'], ['attendee.id'], name='fk_leader', use_alter=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_group'))
)
op.create_table('job',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('type', sa.Integer(), server_default='252034462', nullable=False),
sa.Column('name', sa.Unicode(), server_default='', nullable=False),
sa.Column('description', sa.Unicode(), server_default='', nullable=False),
sa.Column('location', sa.Integer(), nullable=False),
sa.Column('start_time', sideboard.lib.sa.UTCDateTime(), nullable=False),
sa.Column('duration', sa.Integer(), nullable=False),
sa.Column('weight', sa.Float(), server_default='1', nullable=False),
sa.Column('slots', sa.Integer(), nullable=False),
sa.Column('restricted', sa.Boolean(), server_default='False', nullable=False),
sa.Column('extra15', sa.Boolean(), server_default='False', nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_job'))
)
op.create_table('page_view_tracking',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('when', sideboard.lib.sa.UTCDateTime(), nullable=False),
sa.Column('who', sa.Unicode(), server_default='', nullable=False),
sa.Column('page', sa.Unicode(), server_default='', nullable=False),
sa.Column('what', sa.Unicode(), server_default='', nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_page_view_tracking'))
)
op.create_table('tracking',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('fk_id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('model', sa.Unicode(), server_default='', nullable=False),
sa.Column('when', sideboard.lib.sa.UTCDateTime(), nullable=False),
sa.Column('who', sa.Unicode(), server_default='', nullable=False),
sa.Column('page', sa.Unicode(), server_default='', nullable=False),
sa.Column('which', sa.Unicode(), server_default='', nullable=False),
sa.Column('links', sa.Unicode(), server_default='', nullable=False),
sa.Column('action', sa.Integer(), nullable=False),
sa.Column('data', sa.Unicode(), server_default='', nullable=False),
sa.Column('snapshot', sa.Unicode(), server_default='', nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_tracking'))
)
op.create_index(op.f('ix_tracking_fk_id'), 'tracking', ['fk_id'], unique=False)
op.create_table('watch_list',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('first_names', sa.Unicode(), server_default='', nullable=False),
sa.Column('last_name', sa.Unicode(), server_default='', nullable=False),
sa.Column('email', sa.Unicode(), server_default='', nullable=False),
sa.Column('birthdate', sa.Date(), nullable=True),
sa.Column('reason', sa.Unicode(), server_default='', nullable=False),
sa.Column('action', sa.Unicode(), server_default='', nullable=False),
sa.Column('active', sa.Boolean(), server_default='True', nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_watch_list'))
)
op.create_table('attendee',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('watchlist_id', sideboard.lib.sa.UUID(), nullable=True),
sa.Column('group_id', sideboard.lib.sa.UUID(), nullable=True),
sa.Column('placeholder', sa.Boolean(), server_default='False', nullable=False),
sa.Column('first_name', sa.Unicode(), server_default='', nullable=False),
sa.Column('last_name', sa.Unicode(), server_default='', nullable=False),
sa.Column('legal_name', sa.Unicode(), server_default='', nullable=False),
sa.Column('email', sa.Unicode(), server_default='', nullable=False),
sa.Column('birthdate', sa.Date(), nullable=True),
sa.Column('age_group', sa.Integer(), server_default='178244408', nullable=True),
sa.Column('international', sa.Boolean(), server_default='False', nullable=False),
sa.Column('zip_code', sa.Unicode(), server_default='', nullable=False),
sa.Column('address1', sa.Unicode(), server_default='', nullable=False),
sa.Column('address2', sa.Unicode(), server_default='', nullable=False),
sa.Column('city', sa.Unicode(), server_default='', nullable=False),
sa.Column('region', sa.Unicode(), server_default='', nullable=False),
sa.Column('country', sa.Unicode(), server_default='', nullable=False),
sa.Column('no_cellphone', sa.Boolean(), server_default='False', nullable=False),
sa.Column('ec_name', sa.Unicode(), server_default='', nullable=False),
sa.Column('ec_phone', sa.Unicode(), server_default='', nullable=False),
sa.Column('cellphone', sa.Unicode(), server_default='', nullable=False),
sa.Column('interests', sa.Unicode(), server_default='', nullable=False),
sa.Column('found_how', sa.Unicode(), server_default='', nullable=False),
sa.Column('comments', sa.Unicode(), server_default='', nullable=False),
sa.Column('for_review', sa.Unicode(), server_default='', nullable=False),
sa.Column('admin_notes', sa.Unicode(), server_default='', nullable=False),
sa.Column('public_id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('badge_num', sa.Integer(), nullable=True),
sa.Column('badge_type', sa.Integer(), server_default='51352218', nullable=False),
sa.Column('badge_status', sa.Integer(), server_default='163076611', nullable=False),
sa.Column('ribbon', sa.Integer(), server_default='154973361', nullable=False),
sa.Column('affiliate', sa.Unicode(), server_default='', nullable=False),
sa.Column('shirt', sa.Integer(), server_default='0', nullable=False),
sa.Column('can_spam', sa.Boolean(), server_default='False', nullable=False),
sa.Column('regdesk_info', sa.Unicode(), server_default='', nullable=False),
sa.Column('extra_merch', sa.Unicode(), server_default='', nullable=False),
sa.Column('got_merch', sa.Boolean(), server_default='False', nullable=False),
sa.Column('reg_station', sa.Integer(), nullable=True),
sa.Column('registered', sideboard.lib.sa.UTCDateTime(), server_default=sa.text(utcnow_server_default), nullable=False),
sa.Column('checked_in', sideboard.lib.sa.UTCDateTime(), nullable=True),
sa.Column('paid', sa.Integer(), server_default='121378471', nullable=False),
sa.Column('overridden_price', sa.Integer(), nullable=True),
sa.Column('amount_paid', sa.Integer(), server_default='0', nullable=False),
sa.Column('amount_extra', sa.Integer(), server_default='0', nullable=False),
sa.Column('payment_method', sa.Integer(), nullable=True),
sa.Column('amount_refunded', sa.Integer(), server_default='0', nullable=False),
sa.Column('badge_printed_name', sa.Unicode(), server_default='', nullable=False),
sa.Column('staffing', sa.Boolean(), server_default='False', nullable=False),
sa.Column('requested_depts', sa.Unicode(), server_default='', nullable=False),
sa.Column('assigned_depts', sa.Unicode(), server_default='', nullable=False),
sa.Column('trusted_depts', sa.Unicode(), server_default='', nullable=False),
sa.Column('nonshift_hours', sa.Integer(), server_default='0', nullable=False),
sa.Column('past_years', sa.Unicode(), server_default='', nullable=False),
sa.Column('can_work_setup', sa.Boolean(), server_default='False', nullable=False),
sa.Column('can_work_teardown', sa.Boolean(), server_default='False', nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['group.id'], name=op.f('fk_attendee_group_id_group'), ondelete='SET NULL'),
sa.ForeignKeyConstraint(['watchlist_id'], ['watch_list.id'], name=op.f('fk_attendee_watchlist_id_watch_list'), ondelete='set null'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_attendee')),
*[c for c in [sa.UniqueConstraint('badge_num', deferrable='True', initially='DEFERRED', name=op.f('uq_attendee_badge_num'))] if not is_sqlite]
)
op.create_table('admin_account',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('attendee_id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('hashed', sa.Unicode(), server_default='', nullable=False),
sa.Column('access', sa.Unicode(), server_default='', nullable=False),
sa.ForeignKeyConstraint(['attendee_id'], ['attendee.id'], name=op.f('fk_admin_account_attendee_id_attendee')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_admin_account')),
sa.UniqueConstraint('attendee_id', name=op.f('uq_admin_account_attendee_id'))
)
op.create_table('dept_checklist_item',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('attendee_id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('slug', sa.Unicode(), server_default='', nullable=False),
sa.Column('comments', sa.Unicode(), server_default='', nullable=False),
sa.ForeignKeyConstraint(['attendee_id'], ['attendee.id'], name=op.f('fk_dept_checklist_item_attendee_id_attendee')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_dept_checklist_item')),
sa.UniqueConstraint('attendee_id', 'slug', name='_dept_checklist_item_uniq')
)
op.create_table('food_restrictions',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('attendee_id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('standard', sa.Unicode(), server_default='', nullable=False),
sa.Column('sandwich_pref', sa.Unicode(), server_default='', nullable=False),
sa.Column('freeform', sa.Unicode(), server_default='', nullable=False),
sa.ForeignKeyConstraint(['attendee_id'], ['attendee.id'], name=op.f('fk_food_restrictions_attendee_id_attendee')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_food_restrictions')),
sa.UniqueConstraint('attendee_id', name=op.f('uq_food_restrictions_attendee_id'))
)
op.create_table('m_points_for_cash',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('attendee_id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('amount', sa.Integer(), nullable=False),
sa.Column('when', sideboard.lib.sa.UTCDateTime(), nullable=False),
sa.ForeignKeyConstraint(['attendee_id'], ['attendee.id'], name=op.f('fk_m_points_for_cash_attendee_id_attendee')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_m_points_for_cash'))
)
op.create_table('merch_discount',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('attendee_id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('uses', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['attendee_id'], ['attendee.id'], name=op.f('fk_merch_discount_attendee_id_attendee')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_merch_discount')),
sa.UniqueConstraint('attendee_id', name=op.f('uq_merch_discount_attendee_id'))
)
op.create_table('merch_pickup',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('picked_up_by_id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('picked_up_for_id', sideboard.lib.sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['picked_up_by_id'], ['attendee.id'], name=op.f('fk_merch_pickup_picked_up_by_id_attendee')),
sa.ForeignKeyConstraint(['picked_up_for_id'], ['attendee.id'], name=op.f('fk_merch_pickup_picked_up_for_id_attendee')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_merch_pickup')),
sa.UniqueConstraint('picked_up_for_id', name=op.f('uq_merch_pickup_picked_up_for_id'))
)
op.create_table('no_shirt',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('attendee_id', sideboard.lib.sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['attendee_id'], ['attendee.id'], name=op.f('fk_no_shirt_attendee_id_attendee')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_no_shirt')),
sa.UniqueConstraint('attendee_id', name=op.f('uq_no_shirt_attendee_id'))
)
op.create_table('old_m_point_exchange',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('attendee_id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('amount', sa.Integer(), nullable=False),
sa.Column('when', sideboard.lib.sa.UTCDateTime(), nullable=False),
sa.ForeignKeyConstraint(['attendee_id'], ['attendee.id'], name=op.f('fk_old_m_point_exchange_attendee_id_attendee')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_old_m_point_exchange'))
)
op.create_table('sale',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('attendee_id', sideboard.lib.sa.UUID(), nullable=True),
sa.Column('what', sa.Unicode(), server_default='', nullable=False),
sa.Column('cash', sa.Integer(), server_default='0', nullable=False),
sa.Column('mpoints', sa.Integer(), server_default='0', nullable=False),
sa.Column('when', sideboard.lib.sa.UTCDateTime(), nullable=False),
sa.Column('reg_station', sa.Integer(), nullable=True),
sa.Column('payment_method', sa.Integer(), server_default='251700478', nullable=False),
sa.ForeignKeyConstraint(['attendee_id'], ['attendee.id'], name=op.f('fk_sale_attendee_id_attendee'), ondelete='set null'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_sale'))
)
op.create_table('shift',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('job_id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('attendee_id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('worked', sa.Integer(), server_default='176686787', nullable=False),
sa.Column('rating', sa.Integer(), server_default='54944008', nullable=False),
sa.Column('comment', sa.Unicode(), server_default='', nullable=False),
sa.ForeignKeyConstraint(['attendee_id'], ['attendee.id'], name=op.f('fk_shift_attendee_id_attendee'), ondelete='cascade'),
sa.ForeignKeyConstraint(['job_id'], ['job.id'], name=op.f('fk_shift_job_id_job'), ondelete='cascade'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_shift'))
)
op.create_table('password_reset',
sa.Column('id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('account_id', sideboard.lib.sa.UUID(), nullable=False),
sa.Column('generated', sideboard.lib.sa.UTCDateTime(), server_default=sa.text(utcnow_server_default), nullable=False),
sa.Column('hashed', sa.Unicode(), server_default='', nullable=False),
sa.ForeignKeyConstraint(['account_id'], ['admin_account.id'], name=op.f('fk_password_reset_account_id_admin_account')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_password_reset')),
sa.UniqueConstraint('account_id', name=op.f('uq_password_reset_account_id'))
)
def downgrade():
op.drop_table('password_reset')
op.drop_table('shift')
op.drop_table('sale')
op.drop_table('old_m_point_exchange')
op.drop_table('no_shirt')
op.drop_table('merch_pickup')
op.drop_table('merch_discount')
op.drop_table('m_points_for_cash')
op.drop_table('food_restrictions')
op.drop_table('dept_checklist_item')
op.drop_table('admin_account')
op.drop_table('attendee')
op.drop_table('watch_list')
op.drop_index(op.f('ix_tracking_fk_id'), table_name='tracking')
op.drop_table('tracking')
op.drop_table('page_view_tracking')
op.drop_table('job')
op.drop_table('group')
op.drop_table('email')
op.drop_table('arbitrary_charge')
op.drop_table('approved_email')
| RAMSProject/rams | alembic/versions/ff7e7ae6d711_initial_migration.py | Python | agpl-3.0 | 18,774 |
'''
Created on May 25, 2014
@author: doerig
'''
import logging
import logging.config
from testrest.handler.JsonHandler import JsonHandler
class LogHandler(object):
'''
classdocs
'''
_dict = None
def __init__(self, dict):
'''
Constructor
'''
self._dict = JsonHandler()
self._dict.set(dict)
self._logger = None
def getLogger(self, name):
if (self._logger == None):
logger = logging.getLogger(self._dict.get('name'))
# Not checking for any error - if the user configured a
# inexistent level option the program must die
logger.setLevel(getattr(logging, self._dict.get('fileloglevel')))
# create file handler which logs even debug messages
fh = logging.FileHandler(self._dict.get('file'))
fh.setLevel(getattr(logging, self._dict.get('fileloglevel')))
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(getattr(logging, self._dict.get('consoleloglevel')))
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
fh.setFormatter(formatter)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(fh)
self._logger = logger
return LoggerWrapper(name, self._logger)
class LoggerWrapper(object):
"""
Wrapper class around the logger class. It's purpose is just to make
it easy printing the class name of to instance using the LoggerWrapper.
"""
_class = None
def __init__(self, name, logger):
self._class = name
self._logger = logger
def critical(self, msg):
self._logger.critical(self._class + ": " + str(msg))
def info(self, msg):
self._logger.info(self._class + ": " + str(msg))
def debug(self, msg):
self._logger.debug(self._class + ": " + str(msg))
def warning(self, msg):
self._logger.warning(self._class + ": " + str(msg))
def error(self, msg):
self._logger.error(self._class + ": " + str(msg))
| sdoerig/TestRest | testrest/testrest/logger/LogHandler.py | Python | gpl-3.0 | 2,281 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#======================================================================
#
# compinit.py - python shell tab completion
#
# Created by skywind on 2018/01/27
# Last change: 2018/01/27 21:48:26
#
#======================================================================
def __completion_init():
try:
import readline
import rlcompleter
import atexit
import os
import sys
except ImportError:
return -1
try:
readline.parse_and_bind('tab: complete')
except:
return -2
local = os.path.expanduser('~/.local')
if not os.path.exists(local):
try:
os.mkdir(local)
except:
return -2
if not os.path.exists(local + '/var'):
try:
os.mkdir(local + '/var')
except:
return -3
history = local + '/var/python%d_hist'%sys.version_info[0]
try:
readline.read_history_file(history)
except:
pass
atexit.register(readline.write_history_file, history)
return 0
__completion_init()
del __completion_init
| skywind3000/collection | script/compinit.py | Python | mit | 1,023 |
#!/usr/bin/env python
import logging
import sys
import os
from . import helpers
helpers.setup_path()
from plist.nextstep import NSPlistReader, NSPlistWriter
fname = sys.argv[1]
f = open(fname)
r = NSPlistReader(f, name=fname)
w = NSPlistWriter(sys.stdout)
w.write_plist(r.read())
| alessandrostone/mergepbx | tools/nsplist.py | Python | gpl-3.0 | 285 |
def fast_distinct(model_cls, column):
"""
Use a loose indexscan http://wiki.postgresql.org/wiki/Loose_indexscan
to get all distinct values for a given column
Functionally equivalent to
model_cls.distinct(column).values_list(column, flat=True)
"""
table = model_cls._meta.db_table
assert column in [field.name for field in model_cls._meta.fields]
command = """
WITH RECURSIVE t AS (
SELECT min({column}) AS col FROM {table}
UNION ALL
SELECT (SELECT min({column}) FROM {table} WHERE {column} > t.col)
FROM t WHERE t.col IS NOT NULL
)
SELECT col FROM t WHERE col IS NOT NULL
UNION ALL
SELECT NULL WHERE EXISTS(SELECT * FROM {table} WHERE {column} IS NULL);
""".format(column=column, table=table)
from django.db import connection
cursor = connection.cursor()
cursor.execute(command)
result = []
for value, in cursor.fetchall():
result.append(value)
return result
| puttarajubr/commcare-hq | corehq/util/queries.py | Python | bsd-3-clause | 984 |
#!/usr/bin/env @python@
# ROOT command line tools: rootmkdir
# Author: Julien Ripoche
# Mail: [email protected]
# Date: 20/08/15
"""Command line to add directories in ROOT files"""
import cmdLineUtils
import sys
# Help strings
COMMAND_HELP = "Add directories in ROOT files"
PARENT_HELP = "make parent directories as needed, no error if existing."
EPILOG="""Examples:
- rootmkdir example.root:dir
Add the directory 'dir' to the ROOT file 'example.root'
- rootmkdir example.root:dir1/dir2
Add the directory 'dir2' in 'dir1' which is into the ROOT file 'example.root'
- rootmkdir -p example.root:dir1/dir2/dir3
Make parent directories of 'dir3' as needed, no error if existing
- rootmkdir example.root
Create an empty ROOT file named 'example.root'
"""
def execute():
# Collect arguments with the module argparse
parser = cmdLineUtils.getParserFile(COMMAND_HELP, EPILOG)
parser.add_argument("-p", "--parents", help=PARENT_HELP, action="store_true")
# Put arguments in shape
sourceList, optDict = cmdLineUtils.getSourceListOptDict(parser, wildcards = False)
# Process rootMkdir
return cmdLineUtils.rootMkdir(sourceList, parents=optDict["parents"])
sys.exit(execute())
| BerserkerTroll/root | main/python/rootmkdir.py | Python | lgpl-2.1 | 1,220 |
# -*- coding: utf-8 -*-
"""
Public Python API to create CMS contents.
WARNING: None of the functions defined in this module checks for permissions.
You must implement the necessary permission checks in your own code before
calling these methods!
"""
import datetime
from cms.utils.conf import get_cms_setting
from django.core.exceptions import PermissionDenied
from cms.utils.i18n import get_language_list
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.db.models import Max
from django.template.defaultfilters import slugify
from menus.menu_pool import menu_pool
from cms.admin.forms import save_permissions
from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from cms.models.pagemodel import Page
from cms.models.permissionmodels import (PageUser, PagePermission,
GlobalPagePermission, ACCESS_PAGE_AND_DESCENDANTS)
from cms.models.placeholdermodel import Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.models.titlemodels import Title
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cms.utils import moderator
from cms.utils.permissions import _thread_locals
#===============================================================================
# Constants
#===============================================================================
VISIBILITY_ALL = None
VISIBILITY_USERS = 1
VISIBILITY_STAFF = 2
#===============================================================================
# Helpers/Internals
#===============================================================================
def _generate_valid_slug(source, parent, language):
"""
Generate a valid slug for a page from source for the given language.
Parent is passed so we can make sure the slug is unique for this level in
the page tree.
"""
if parent:
qs = Title.objects.filter(language=language, page__parent=parent)
else:
qs = Title.objects.filter(language=language, page__parent__isnull=True)
used = qs.values_list('slug', flat=True)
baseslug = slugify(source)
slug = baseslug
i = 1
while slug in used:
slug = '%s-%s' % (baseslug, i)
i += 1
return slug
def _verify_apphook(apphook):
"""
Verifies the apphook given is valid and returns the normalized form (name)
"""
if hasattr(apphook, '__module__') and issubclass(apphook, CMSApp):
apphook_pool.discover_apps()
assert apphook in apphook_pool.apps.values()
return apphook.__name__
elif isinstance(apphook, basestring):
apphook_pool.discover_apps()
assert apphook in apphook_pool.apps
return apphook
else:
raise TypeError("apphook must be string or CMSApp instance")
def _verify_plugin_type(plugin_type):
"""
Verifies the given plugin_type is valid and returns a tuple of
(plugin_model, plugin_type)
"""
if (hasattr(plugin_type, '__module__') and
issubclass(plugin_type, CMSPluginBase)):
plugin_model = plugin_type.model
assert plugin_type in plugin_pool.plugins.values()
plugin_type = plugin_type.__name__
elif isinstance(plugin_type, basestring):
try:
plugin_model = plugin_pool.get_plugin(plugin_type).model
except KeyError:
raise TypeError(
'plugin_type must be CMSPluginBase subclass or string'
)
else:
raise TypeError('plugin_type must be CMSPluginBase subclass or string')
return plugin_model, plugin_type
#===============================================================================
# Public API
#===============================================================================
def create_page(title, template, language, menu_title=None, slug=None,
apphook=None, redirect=None, meta_description=None,
created_by='python-api', parent=None,
publication_date=None, publication_end_date=None,
in_navigation=False, soft_root=False, reverse_id=None,
navigation_extenders=None, published=False, site=None,
login_required=False, limit_visibility_in_menu=VISIBILITY_ALL,
position="last-child", overwrite_url=None):
"""
Create a CMS Page and it's title for the given language
See docs/extending_cms/api_reference.rst for more info
"""
# ugly permissions hack
if created_by and isinstance(created_by, User):
_thread_locals.user = created_by
created_by = created_by.username
else:
_thread_locals.user = None
# validate template
assert template in [tpl[0] for tpl in get_cms_setting('TEMPLATES')]
# validate site
if not site:
site = Site.objects.get_current()
else:
assert isinstance(site, Site)
# validate language:
assert language in get_language_list(site), get_cms_setting('LANGUAGES').get(site.pk)
# set default slug:
if not slug:
slug = _generate_valid_slug(title, parent, language)
# validate and normalize apphook
if apphook:
application_urls = _verify_apphook(apphook)
else:
application_urls = None
# validate parent
if parent:
assert isinstance(parent, Page)
parent = Page.objects.get(pk=parent.pk)
# validate publication date
if publication_date:
assert isinstance(publication_date, datetime.date)
# validate publication end date
if publication_end_date:
assert isinstance(publication_end_date, datetime.date)
# validate softroot
assert get_cms_setting('SOFTROOT') or not soft_root
if navigation_extenders:
raw_menus = menu_pool.get_menus_by_attribute("cms_enabled", True)
menus = [menu[0] for menu in raw_menus]
assert navigation_extenders in menus
# validate menu visibility
accepted_limitations = (VISIBILITY_ALL, VISIBILITY_USERS, VISIBILITY_STAFF)
assert limit_visibility_in_menu in accepted_limitations
# validate position
assert position in ('last-child', 'first-child', 'left', 'right')
page = Page(
created_by=created_by,
changed_by=created_by,
parent=parent,
publication_date=publication_date,
publication_end_date=publication_end_date,
in_navigation=in_navigation,
soft_root=soft_root,
reverse_id=reverse_id,
navigation_extenders=navigation_extenders,
published=False, # will be published later
template=template,
site=site,
login_required=login_required,
limit_visibility_in_menu=limit_visibility_in_menu,
)
page.insert_at(parent, position)
page.save()
create_title(
language=language,
title=title,
menu_title=menu_title,
slug=slug,
apphook=application_urls,
redirect=redirect,
meta_description=meta_description,
page=page,
overwrite_url=overwrite_url
)
if published:
page.publish()
del _thread_locals.user
return page.reload()
def create_title(language, title, page, menu_title=None, slug=None,
apphook=None, redirect=None, meta_description=None,
parent=None, overwrite_url=None):
"""
Create a title.
Parent is only used if slug=None.
See docs/extending_cms/api_reference.rst for more info
"""
# validate page
assert isinstance(page, Page)
# validate language:
assert language in get_language_list(page.site_id)
# set default slug:
if not slug:
slug = _generate_valid_slug(title, parent, language)
# validate and normalize apphook
if apphook:
application_urls = _verify_apphook(apphook)
else:
application_urls = None
title = Title.objects.create(
language=language,
title=title,
menu_title=menu_title,
slug=slug,
application_urls=application_urls,
redirect=redirect,
meta_description=meta_description,
page=page
)
if overwrite_url:
title.has_url_overwrite = True
title.path = overwrite_url
title.save()
return title
def add_plugin(placeholder, plugin_type, language, position='last-child',
target=None, **data):
"""
Add a plugin to a placeholder
See docs/extending_cms/api_reference.rst for more info
"""
# validate placeholder
assert isinstance(placeholder, Placeholder)
# validate and normalize plugin type
plugin_model, plugin_type = _verify_plugin_type(plugin_type)
max_pos = CMSPlugin.objects.filter(language=language,
placeholder=placeholder).aggregate(Max('position'))['position__max'] or 0
plugin_base = CMSPlugin(
plugin_type=plugin_type,
placeholder=placeholder,
position=max_pos + 1,
language=language
)
plugin_base.insert_at(target, position=position, save=False)
plugin = plugin_model(**data)
plugin_base.set_base_attr(plugin)
plugin.save()
return plugin
def create_page_user(created_by, user,
can_add_page=True, can_view_page=True,
can_change_page=True, can_delete_page=True,
can_recover_page=True, can_add_pageuser=True,
can_change_pageuser=True, can_delete_pageuser=True,
can_add_pagepermission=True,
can_change_pagepermission=True,
can_delete_pagepermission=True, grant_all=False):
"""
Creates a page user.
See docs/extending_cms/api_reference.rst for more info
"""
if grant_all:
# just be lazy
return create_page_user(created_by, user, True, True, True, True,
True, True, True, True, True, True, True)
# validate created_by
assert isinstance(created_by, User)
data = {
'can_add_page': can_add_page,
'can_view_page': can_view_page,
'can_change_page': can_change_page,
'can_delete_page': can_delete_page,
'can_recover_page': can_recover_page,
'can_add_pageuser': can_add_pageuser,
'can_change_pageuser': can_change_pageuser,
'can_delete_pageuser': can_delete_pageuser,
'can_add_pagepermission': can_add_pagepermission,
'can_change_pagepermission': can_change_pagepermission,
'can_delete_pagepermission': can_delete_pagepermission,
}
user.is_staff = True
user.is_active = True
page_user = PageUser(created_by=created_by)
for field in [f.name for f in User._meta.local_fields]:
setattr(page_user, field, getattr(user, field))
user.save()
page_user.save()
save_permissions(data, page_user)
return user
def assign_user_to_page(page, user, grant_on=ACCESS_PAGE_AND_DESCENDANTS,
can_add=False, can_change=False, can_delete=False,
can_change_advanced_settings=False, can_publish=False,
can_change_permissions=False, can_move_page=False,
can_recover_page=True, can_view=False,
grant_all=False, global_permission=False):
"""
Assigns given user to page, and gives him requested permissions.
See docs/extending_cms/api_reference.rst for more info
"""
grant_all = grant_all and not global_permission
data = {
'can_add': can_add or grant_all,
'can_change': can_change or grant_all,
'can_delete': can_delete or grant_all,
'can_change_advanced_settings': can_change_advanced_settings or grant_all,
'can_publish': can_publish or grant_all,
'can_change_permissions': can_change_permissions or grant_all,
'can_move_page': can_move_page or grant_all,
'can_view': can_view or grant_all,
}
page_permission = PagePermission(page=page, user=user,
grant_on=grant_on, **data)
page_permission.save()
if global_permission:
page_permission = GlobalPagePermission(
user=user, can_recover_page=can_recover_page, **data)
page_permission.save()
page_permission.sites.add(Site.objects.get_current())
return page_permission
def publish_page(page, user):
"""
Publish a page. This sets `page.published` to `True` and calls publish()
which does the actual publishing.
See docs/extending_cms/api_reference.rst for more info
"""
page = page.reload()
class FakeRequest(object):
def __init__(self, user):
self.user = user
request = FakeRequest(user)
if not page.has_publish_permission(request):
raise PermissionDenied()
page.published = True
page.save()
page.publish()
return page.reload()
| foobacca/django-cms | cms/api.py | Python | bsd-3-clause | 12,953 |
# -*- coding: utf-8 -*-
import os
from sys import platform
from random import randint
## Inicializa a lista de amigos que vão participar do sorteio
amigos = []
## Preenche a lista de amigos que vão participar do sorteio
qtd_amigos = int(input("Quantos amigos seu grupo possui? "))
for i in range(qtd_amigos):
amigo = input("Qual o nome do {0}º amigo? ".format(i+1))
amigos.append({'id': i+1, 'nome': amigo})
## Inicializa a lista onde ficará o resultado do sorteio
sorteio = []
def sortear(sorteando, amigos, sorteados, sorteio, contador):
## Verifica se a quantidade de chamadas recursivas não está próxima
## de ultrapassar a quantidade máxima
## Se estiver, retornamos False para recomeçar o sorteio
contador += 1
if contador > 900:
return False
## Sorteia um amigo
sorteado = amigos[randint(0,qtd_amigos-1)]
## Verifica se o amigo sorteado já não foi sorteado por outro
requisito_1 = (sorteado['id'] in sorteados)
## Verifica se o amigo sorteado já não sorteou quem o está sorteando
## Só evita aquelas coisas chatas de um sair com o outro e o outro com o um
## É opcional, você pode remover :)
requisito_2 = ([x for x in sorteio if x['sorteante'] == sorteando['id'] and \
x['sorteado'] == sorteando['id']])
## Verifica se quem sorteia não sorteou ele mesmo
requisito_3 = (sorteado['id'] == sorteando['id'])
if (requisito_1 or requisito_2 or requisito_3):
## Se qualquer um dos requisitos acima for verdadeiro
## realiza-se o sorteio novamente até que encontre um resultado satisfatório
sortear(sorteando, amigos, sorteados, sorteio, contador)
else:
## Se não, adicionamos o resultado do sorteio na lista de resultados
sorteio.append({'sorteante': sorteando['id'], 'sorteado':sorteado['id']})
return True
## Enquanto a função sortear retornar False e não tiver um sorteio satisfatório
## o sorteio será realizado novamente
while len(sorteio) != qtd_amigos:
sorteio = []
for rodada in range(qtd_amigos):
## O sorteio é feito um por um e sempre conferido
sorteados = [x['sorteado'] for x in sorteio]
## Contador de chamadas recursivas
contador = 0
sortear(amigos[rodada], amigos, sorteados, sorteio, contador)
## Abre arquivo txt de resultado e escreve "Resultado do sorteio nele"
file = open("resultado.txt", "w")
file.write("Resultado do sorteio: \n")
## Divulga o resultado do sorteio
for rodada in sorteio:
for amigo in amigos:
if rodada['sorteante'] == amigo['id']:
sorteante = amigo['nome']
elif rodada['sorteado'] == amigo['id']:
sorteado = amigo['nome']
## Sempre que um novo resultado for exibido, a tela da linha de comando é
## limpa de forma que o próximo amigo não veja o sorteado pelo anterior
## Não queremos estragar a surpresa né ;)
if platform == 'linux2' or platform == 'darwin' or platform == 'linux':
os.system("clear")
elif platform == 'win32' or platform == 'cygwin':
os.system("cls")
input("Por favor, chame o amigo {0} e pressione ENTER para ver quem ele sorteou.".format(sorteante))
input("Você sorteou o amigo: {0}\n\nPressione ENTER para continuar.".format(sorteado))
## Escreve no arquivo resultado.txt "Fulando sorteou Ciclano"
file.write("{0} sorteou {1}\n" .format(sorteante, sorteado))
## Fecha o arquivo resultado.txt
file.close()
print("Sorteio encerrado. Divirta-se!")
| juliarizza/script-amigo-secreto | amigo_secreto.py | Python | mit | 3,539 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-26 16:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('students', '0011_auto_20170422_0211'),
]
operations = [
migrations.AlterField(
model_name='education',
name='level',
field=models.CharField(choices=[('Undergraduate', 'Undergraduate'), ('Graduate', 'Graduate'), ('PHD', 'PHD')], max_length=80),
),
]
| jamesaud/se1-group4 | jmatcher/students/migrations/0012_auto_20170426_1606.py | Python | mit | 542 |
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import unittest
from concurrent import futures
from mock import mock
from mock import patch
from oslo_serialization import jsonutils
from oslo_utils import timeutils
import pika
import oslo_messaging
from oslo_messaging._drivers.pika_driver import pika_commons as pika_drv_cmns
from oslo_messaging._drivers.pika_driver import pika_message as pika_drv_msg
class PikaIncomingMessageTestCase(unittest.TestCase):
def setUp(self):
self._pika_engine = mock.Mock()
self._channel = mock.Mock()
self._delivery_tag = 12345
self._method = pika.spec.Basic.Deliver(delivery_tag=self._delivery_tag)
self._properties = pika.BasicProperties(
content_type="application/json",
headers={"version": "1.0"},
)
self._body = (
b'{"_$_key_context":"context_value",'
b'"payload_key": "payload_value"}'
)
def test_message_body_parsing(self):
message = pika_drv_msg.PikaIncomingMessage(
self._pika_engine, self._channel, self._method, self._properties,
self._body
)
self.assertEqual("context_value",
message.ctxt.get("key_context", None))
self.assertEqual("payload_value",
message.message.get("payload_key", None))
def test_message_acknowledge(self):
message = pika_drv_msg.PikaIncomingMessage(
self._pika_engine, self._channel, self._method, self._properties,
self._body
)
message.acknowledge()
self.assertEqual(1, self._channel.basic_ack.call_count)
self.assertEqual({"delivery_tag": self._delivery_tag},
self._channel.basic_ack.call_args[1])
def test_message_acknowledge_no_ack(self):
message = pika_drv_msg.PikaIncomingMessage(
self._pika_engine, None, self._method, self._properties,
self._body
)
message.acknowledge()
self.assertEqual(0, self._channel.basic_ack.call_count)
def test_message_requeue(self):
message = pika_drv_msg.PikaIncomingMessage(
self._pika_engine, self._channel, self._method, self._properties,
self._body
)
message.requeue()
self.assertEqual(1, self._channel.basic_nack.call_count)
self.assertEqual({"delivery_tag": self._delivery_tag, 'requeue': True},
self._channel.basic_nack.call_args[1])
def test_message_requeue_no_ack(self):
message = pika_drv_msg.PikaIncomingMessage(
self._pika_engine, None, self._method, self._properties,
self._body
)
message.requeue()
self.assertEqual(0, self._channel.basic_nack.call_count)
class RpcPikaIncomingMessageTestCase(unittest.TestCase):
def setUp(self):
self._pika_engine = mock.Mock()
self._pika_engine.rpc_reply_retry_attempts = 3
self._pika_engine.rpc_reply_retry_delay = 0.25
self._channel = mock.Mock()
self._delivery_tag = 12345
self._method = pika.spec.Basic.Deliver(delivery_tag=self._delivery_tag)
self._body = (
b'{"_$_key_context":"context_value",'
b'"payload_key":"payload_value"}'
)
self._properties = pika.BasicProperties(
content_type="application/json",
headers={"version": "1.0"},
)
def test_call_message_body_parsing(self):
self._properties.correlation_id = 123456789
self._properties.reply_to = "reply_queue"
message = pika_drv_msg.RpcPikaIncomingMessage(
self._pika_engine, self._channel, self._method, self._properties,
self._body
)
self.assertEqual("context_value",
message.ctxt.get("key_context", None))
self.assertEqual(123456789, message.msg_id)
self.assertEqual("reply_queue", message.reply_q)
self.assertEqual("payload_value",
message.message.get("payload_key", None))
def test_cast_message_body_parsing(self):
message = pika_drv_msg.RpcPikaIncomingMessage(
self._pika_engine, self._channel, self._method, self._properties,
self._body
)
self.assertEqual("context_value",
message.ctxt.get("key_context", None))
self.assertIsNone(message.msg_id)
self.assertIsNone(message.reply_q)
self.assertEqual("payload_value",
message.message.get("payload_key", None))
@patch(("oslo_messaging._drivers.pika_driver.pika_message."
"PikaOutgoingMessage.send"))
def test_reply_for_cast_message(self, send_reply_mock):
message = pika_drv_msg.RpcPikaIncomingMessage(
self._pika_engine, self._channel, self._method, self._properties,
self._body
)
self.assertEqual("context_value",
message.ctxt.get("key_context", None))
self.assertIsNone(message.msg_id)
self.assertIsNone(message.reply_q)
self.assertEqual("payload_value",
message.message.get("payload_key", None))
message.reply(reply=object())
self.assertEqual(0, send_reply_mock.call_count)
@patch("oslo_messaging._drivers.pika_driver.pika_message."
"RpcReplyPikaOutgoingMessage")
@patch("tenacity.retry")
def test_positive_reply_for_call_message(self,
retry_mock,
outgoing_message_mock):
self._properties.correlation_id = 123456789
self._properties.reply_to = "reply_queue"
message = pika_drv_msg.RpcPikaIncomingMessage(
self._pika_engine, self._channel, self._method, self._properties,
self._body
)
self.assertEqual("context_value",
message.ctxt.get("key_context", None))
self.assertEqual(123456789, message.msg_id)
self.assertEqual("reply_queue", message.reply_q)
self.assertEqual("payload_value",
message.message.get("payload_key", None))
reply = "all_fine"
message.reply(reply=reply)
outgoing_message_mock.assert_called_once_with(
self._pika_engine, 123456789, failure_info=None, reply='all_fine',
content_type='application/json'
)
outgoing_message_mock().send.assert_called_once_with(
reply_q='reply_queue', stopwatch=mock.ANY, retrier=mock.ANY
)
retry_mock.assert_called_once_with(
stop=mock.ANY, retry=mock.ANY, wait=mock.ANY
)
@patch("oslo_messaging._drivers.pika_driver.pika_message."
"RpcReplyPikaOutgoingMessage")
@patch("tenacity.retry")
def test_negative_reply_for_call_message(self,
retry_mock,
outgoing_message_mock):
self._properties.correlation_id = 123456789
self._properties.reply_to = "reply_queue"
message = pika_drv_msg.RpcPikaIncomingMessage(
self._pika_engine, self._channel, self._method, self._properties,
self._body
)
self.assertEqual("context_value",
message.ctxt.get("key_context", None))
self.assertEqual(123456789, message.msg_id)
self.assertEqual("reply_queue", message.reply_q)
self.assertEqual("payload_value",
message.message.get("payload_key", None))
failure_info = object()
message.reply(failure=failure_info)
outgoing_message_mock.assert_called_once_with(
self._pika_engine, 123456789,
failure_info=failure_info,
reply=None,
content_type='application/json'
)
outgoing_message_mock().send.assert_called_once_with(
reply_q='reply_queue', stopwatch=mock.ANY, retrier=mock.ANY
)
retry_mock.assert_called_once_with(
stop=mock.ANY, retry=mock.ANY, wait=mock.ANY
)
class RpcReplyPikaIncomingMessageTestCase(unittest.TestCase):
def setUp(self):
self._pika_engine = mock.Mock()
self._pika_engine.allowed_remote_exmods = [
pika_drv_cmns.EXCEPTIONS_MODULE, "oslo_messaging.exceptions"
]
self._channel = mock.Mock()
self._delivery_tag = 12345
self._method = pika.spec.Basic.Deliver(delivery_tag=self._delivery_tag)
self._properties = pika.BasicProperties(
content_type="application/json",
headers={"version": "1.0"},
correlation_id=123456789
)
def test_positive_reply_message_body_parsing(self):
body = b'{"s": "all fine"}'
message = pika_drv_msg.RpcReplyPikaIncomingMessage(
self._pika_engine, self._channel, self._method, self._properties,
body
)
self.assertEqual(123456789, message.msg_id)
self.assertIsNone(message.failure)
self.assertEqual("all fine", message.result)
def test_negative_reply_message_body_parsing(self):
body = (b'{'
b' "e": {'
b' "s": "Error message",'
b' "t": ["TRACE HERE"],'
b' "c": "MessagingException",'
b' "m": "oslo_messaging.exceptions"'
b' }'
b'}')
message = pika_drv_msg.RpcReplyPikaIncomingMessage(
self._pika_engine, self._channel, self._method, self._properties,
body
)
self.assertEqual(123456789, message.msg_id)
self.assertIsNone(message.result)
self.assertEqual(
'Error message\n'
'TRACE HERE',
str(message.failure)
)
self.assertIsInstance(message.failure,
oslo_messaging.MessagingException)
class PikaOutgoingMessageTestCase(unittest.TestCase):
def setUp(self):
self._pika_engine = mock.MagicMock()
self._pika_engine.default_content_type = "application/json"
self._exchange = "it is exchange"
self._routing_key = "it is routing key"
self._expiration = 1
self._stopwatch = (
timeutils.StopWatch(duration=self._expiration).start()
)
self._mandatory = object()
self._message = {"msg_type": 1, "msg_str": "hello"}
self._context = {"request_id": 555, "token": "it is a token"}
@patch("oslo_serialization.jsonutils.dump_as_bytes",
new=functools.partial(jsonutils.dump_as_bytes, sort_keys=True))
def test_send_with_confirmation(self):
message = pika_drv_msg.PikaOutgoingMessage(
self._pika_engine, self._message, self._context
)
message.send(
exchange=self._exchange,
routing_key=self._routing_key,
confirm=True,
mandatory=self._mandatory,
persistent=True,
stopwatch=self._stopwatch,
retrier=None
)
self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.assert_called_once_with(
body=mock.ANY,
exchange=self._exchange, mandatory=self._mandatory,
properties=mock.ANY,
routing_key=self._routing_key
)
body = self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.call_args[1]["body"]
self.assertEqual(
b'{"_$_request_id": 555, "_$_token": "it is a token", '
b'"msg_str": "hello", "msg_type": 1}',
body
)
props = self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.call_args[1]["properties"]
self.assertEqual('application/json', props.content_type)
self.assertEqual(2, props.delivery_mode)
self.assertTrue(self._expiration * 1000 - float(props.expiration) <
100)
self.assertEqual({'version': '1.0'}, props.headers)
self.assertTrue(props.message_id)
@patch("oslo_serialization.jsonutils.dump_as_bytes",
new=functools.partial(jsonutils.dump_as_bytes, sort_keys=True))
def test_send_without_confirmation(self):
message = pika_drv_msg.PikaOutgoingMessage(
self._pika_engine, self._message, self._context
)
message.send(
exchange=self._exchange,
routing_key=self._routing_key,
confirm=False,
mandatory=self._mandatory,
persistent=False,
stopwatch=self._stopwatch,
retrier=None
)
self._pika_engine.connection_without_confirmation_pool.acquire(
).__enter__().channel.publish.assert_called_once_with(
body=mock.ANY,
exchange=self._exchange, mandatory=self._mandatory,
properties=mock.ANY,
routing_key=self._routing_key
)
body = self._pika_engine.connection_without_confirmation_pool.acquire(
).__enter__().channel.publish.call_args[1]["body"]
self.assertEqual(
b'{"_$_request_id": 555, "_$_token": "it is a token", '
b'"msg_str": "hello", "msg_type": 1}',
body
)
props = self._pika_engine.connection_without_confirmation_pool.acquire(
).__enter__().channel.publish.call_args[1]["properties"]
self.assertEqual('application/json', props.content_type)
self.assertEqual(1, props.delivery_mode)
self.assertTrue(self._expiration * 1000 - float(props.expiration)
< 100)
self.assertEqual({'version': '1.0'}, props.headers)
self.assertTrue(props.message_id)
class RpcPikaOutgoingMessageTestCase(unittest.TestCase):
def setUp(self):
self._exchange = "it is exchange"
self._routing_key = "it is routing key"
self._pika_engine = mock.MagicMock()
self._pika_engine.get_rpc_exchange_name.return_value = self._exchange
self._pika_engine.get_rpc_queue_name.return_value = self._routing_key
self._pika_engine.default_content_type = "application/json"
self._message = {"msg_type": 1, "msg_str": "hello"}
self._context = {"request_id": 555, "token": "it is a token"}
@patch("oslo_serialization.jsonutils.dump_as_bytes",
new=functools.partial(jsonutils.dump_as_bytes, sort_keys=True))
def test_send_cast_message(self):
message = pika_drv_msg.RpcPikaOutgoingMessage(
self._pika_engine, self._message, self._context
)
expiration = 1
stopwatch = timeutils.StopWatch(duration=expiration).start()
message.send(
exchange=self._exchange,
routing_key=self._routing_key,
reply_listener=None,
stopwatch=stopwatch,
retrier=None
)
self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.assert_called_once_with(
body=mock.ANY,
exchange=self._exchange, mandatory=True,
properties=mock.ANY,
routing_key=self._routing_key
)
body = self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.call_args[1]["body"]
self.assertEqual(
b'{"_$_request_id": 555, "_$_token": "it is a token", '
b'"msg_str": "hello", "msg_type": 1}',
body
)
props = self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.call_args[1]["properties"]
self.assertEqual('application/json', props.content_type)
self.assertEqual(1, props.delivery_mode)
self.assertTrue(expiration * 1000 - float(props.expiration) < 100)
self.assertEqual({'version': '1.0'}, props.headers)
self.assertIsNone(props.correlation_id)
self.assertIsNone(props.reply_to)
self.assertTrue(props.message_id)
@patch("oslo_serialization.jsonutils.dump_as_bytes",
new=functools.partial(jsonutils.dump_as_bytes, sort_keys=True))
def test_send_call_message(self):
message = pika_drv_msg.RpcPikaOutgoingMessage(
self._pika_engine, self._message, self._context
)
expiration = 1
stopwatch = timeutils.StopWatch(duration=expiration).start()
result = "it is a result"
reply_queue_name = "reply_queue_name"
future = futures.Future()
future.set_result(result)
reply_listener = mock.Mock()
reply_listener.register_reply_waiter.return_value = future
reply_listener.get_reply_qname.return_value = reply_queue_name
res = message.send(
exchange=self._exchange,
routing_key=self._routing_key,
reply_listener=reply_listener,
stopwatch=stopwatch,
retrier=None
)
self.assertEqual(result, res)
self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.assert_called_once_with(
body=mock.ANY,
exchange=self._exchange, mandatory=True,
properties=mock.ANY,
routing_key=self._routing_key
)
body = self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.call_args[1]["body"]
self.assertEqual(
b'{"_$_request_id": 555, "_$_token": "it is a token", '
b'"msg_str": "hello", "msg_type": 1}',
body
)
props = self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.call_args[1]["properties"]
self.assertEqual('application/json', props.content_type)
self.assertEqual(1, props.delivery_mode)
self.assertTrue(expiration * 1000 - float(props.expiration) < 100)
self.assertEqual({'version': '1.0'}, props.headers)
self.assertEqual(message.msg_id, props.correlation_id)
self.assertEqual(reply_queue_name, props.reply_to)
self.assertTrue(props.message_id)
class RpcReplyPikaOutgoingMessageTestCase(unittest.TestCase):
def setUp(self):
self._reply_q = "reply_queue_name"
self._expiration = 1
self._stopwatch = (
timeutils.StopWatch(duration=self._expiration).start()
)
self._pika_engine = mock.MagicMock()
self._rpc_reply_exchange = "rpc_reply_exchange"
self._pika_engine.rpc_reply_exchange = self._rpc_reply_exchange
self._pika_engine.default_content_type = "application/json"
self._msg_id = 12345567
@patch("oslo_serialization.jsonutils.dump_as_bytes",
new=functools.partial(jsonutils.dump_as_bytes, sort_keys=True))
def test_success_message_send(self):
message = pika_drv_msg.RpcReplyPikaOutgoingMessage(
self._pika_engine, self._msg_id, reply="all_fine"
)
message.send(self._reply_q, stopwatch=self._stopwatch, retrier=None)
self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.assert_called_once_with(
body=b'{"s": "all_fine"}',
exchange=self._rpc_reply_exchange, mandatory=True,
properties=mock.ANY,
routing_key=self._reply_q
)
props = self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.call_args[1]["properties"]
self.assertEqual('application/json', props.content_type)
self.assertEqual(1, props.delivery_mode)
self.assertTrue(self._expiration * 1000 - float(props.expiration) <
100)
self.assertEqual({'version': '1.0'}, props.headers)
self.assertEqual(message.msg_id, props.correlation_id)
self.assertIsNone(props.reply_to)
self.assertTrue(props.message_id)
@patch("traceback.format_exception", new=lambda x, y, z: z)
@patch("oslo_serialization.jsonutils.dump_as_bytes",
new=functools.partial(jsonutils.dump_as_bytes, sort_keys=True))
def test_failure_message_send(self):
failure_info = (oslo_messaging.MessagingException,
oslo_messaging.MessagingException("Error message"),
['It is a trace'])
message = pika_drv_msg.RpcReplyPikaOutgoingMessage(
self._pika_engine, self._msg_id, failure_info=failure_info
)
message.send(self._reply_q, stopwatch=self._stopwatch, retrier=None)
self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.assert_called_once_with(
body=mock.ANY,
exchange=self._rpc_reply_exchange,
mandatory=True,
properties=mock.ANY,
routing_key=self._reply_q
)
body = self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.call_args[1]["body"]
self.assertEqual(
b'{"e": {"c": "MessagingException", '
b'"m": "oslo_messaging.exceptions", "s": "Error message", '
b'"t": ["It is a trace"]}}',
body
)
props = self._pika_engine.connection_with_confirmation_pool.acquire(
).__enter__().channel.publish.call_args[1]["properties"]
self.assertEqual('application/json', props.content_type)
self.assertEqual(1, props.delivery_mode)
self.assertTrue(self._expiration * 1000 - float(props.expiration) <
100)
self.assertEqual({'version': '1.0'}, props.headers)
self.assertEqual(message.msg_id, props.correlation_id)
self.assertIsNone(props.reply_to)
self.assertTrue(props.message_id)
| ozamiatin/oslo.messaging | oslo_messaging/tests/drivers/pika/test_message.py | Python | apache-2.0 | 22,701 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-03-27 10:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tags', '0020_auto_20190326_1547'),
]
operations = [
migrations.AddField(
model_name='tagversiontype',
name='archive_type',
field=models.BooleanField(default=False, verbose_name='archive type'),
),
]
| ESSolutions/ESSArch_Core | ESSArch_Core/tags/migrations/0021_tagversiontype_archive_type.py | Python | gpl-3.0 | 452 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-31 17:41
from __future__ import unicode_literals
from django.db import migrations
from ..models import User
import json
import os
def fill_from_mock(apps, schema_editor):
try:
with open(os.path.join('mock', 'account', 'users.json')) as f:
content = f.read()
f.close()
except IOError:
content = '[]'
records = json.loads(content)
for record in records:
try:
password = record['password']
except:
password = ''
try:
username = record['username']
except:
username = ''
try:
first_name = record['firstname']
except:
first_name = None
try:
last_name = record['lastname']
except:
last_name = None
try:
roles = record['roles']
except:
roles = []
if password == '':
password = record['email']
if username == '':
username = record['email']
is_admin = False
is_staff = False
for role in roles:
if role == 'user':
is_staff = True
if role == 'admin':
is_admin = True
try:
user = User.objects.get(email=record['email'])
except User.DoesNotExist:
user = User.objects.create_user(email=record['email'], password=password, username=username)
user.backend = 'django.contrib.auth.backends.ModelBackend'
user.first_name = first_name
user.last_name = last_name
user.is_staff = is_staff
user.is_superuser = is_admin
user.is_active = is_staff or is_admin
user.save()
class Migration(migrations.Migration):
dependencies = [
('account', '0001_initial'),
]
operations = [
migrations.RunPython(fill_from_mock),
]
| EndyKaufman/django-postgres-angularjs-blog | app/account/migrations/0002_fill_from_mock.py | Python | mit | 1,995 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Jun 17 2015)
## http://www.wxformbuilder.org/
##
## PLEASE DO "NOT" EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
from wx.lib.newevent import NewCommandEvent
TimerChangeEvent, EVT_TIMER_CHANGE = NewCommandEvent()
DisplayUpdate, EVT_DISP_UPDATE = wx.lib.newevent.NewEvent()
import numpy as np
import matplotlib
from matplotlib import dates
from datetime import datetime,timedelta
import time
# matplotlib采用WXAgg为后台,将matplotlib嵌入wxPython中
matplotlib.use("WXAgg")
from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas
from matplotlib.backends.backend_wxagg import NavigationToolbar2WxAgg as NavigationToolbar
from matplotlib.ticker import MultipleLocator, FuncFormatter
import pylab
from matplotlib import pyplot
import dataCollect
from Queue import Queue, Empty
from threading import Thread
EVENT_TIMER = 'eTimer'
###########################################################################
## Event type
###########################################################################
class Event(object):
#----------------------------------------------------------------------
def __init__(self, handle, type_=None):
"""Constructor"""
self.handle = handle
self.type_ = type_
class MainFrame ( wx.Frame ):
def __init__( self, parent ):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = u"MyAQI", \
pos = wx.DefaultPosition, size = wx.Size( 800,400 ), style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
#####################################################
# Manual Add Code
self.dpi = 100
# self.Figure = matplotlib.figure.Figure(figsize=(10,3), dpi=self.dpi)
self.Figure = matplotlib.figure.Figure(figsize=(50,30))
self.Figure.set_facecolor('white')
# self.axes = self.Figure.add_axes([0.1,0.1,0.8,0.8])
self.axes25 = self.Figure.add_subplot(111)
self.axes10 = self.axes25.twinx()
self.FigureCanvas = FigureCanvas(self,-1,self.Figure)
#####################################################
self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize )
self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOW ) )
# self.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOW ) )
MainSizer = wx.FlexGridSizer( 1, 3, 0, 0 )
MainSizer.SetFlexibleDirection( wx.BOTH )
MainSizer.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_ALL )
leftSizer = wx.FlexGridSizer( 11, 1, 0, 0 )
leftSizer.SetFlexibleDirection( wx.BOTH )
leftSizer.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_ALL )
self.m_staticText1 = wx.StaticText( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText1.Wrap( -1 )
leftSizer.Add( self.m_staticText1, 0, wx.ALL | wx.EXPAND, 5 )
self.m_btn_start = wx.Button( self, wx.ID_ANY, u"Start", wx.DefaultPosition, wx.DefaultSize, 0 )
leftSizer.Add( self.m_btn_start, 0, wx.ALL | wx.EXPAND, 5 )
self.m_staticText2 = wx.StaticText( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText2.Wrap( -1 )
leftSizer.Add( self.m_staticText2, 0, wx.ALL | wx.EXPAND, 5 )
self.m_btn_stop = wx.Button( self, wx.ID_ANY, u"Stop", wx.DefaultPosition, wx.DefaultSize, 0 )
leftSizer.Add( self.m_btn_stop, 0, wx.ALL | wx.EXPAND, 5 )
self.m_staticText3 = wx.StaticText( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText3.Wrap( -1 )
leftSizer.Add( self.m_staticText3, 0, wx.ALL | wx.EXPAND, 5 )
self.m_btn_quit = wx.Button( self, wx.ID_ANY, u"Quit", wx.DefaultPosition, wx.DefaultSize, 0 )
leftSizer.Add( self.m_btn_quit, 0, wx.ALL | wx.EXPAND, 5 )
self.m_staticText4 = wx.StaticText( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText4.Wrap( -1 )
leftSizer.Add( self.m_staticText4, 0, wx.ALL | wx.EXPAND, 5 )
self.m__staticPM25label = wx.StaticText( self, wx.ID_ANY, u"PM2.5", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m__staticPM25label.Wrap( -1 )
leftSizer.Add( self.m__staticPM25label, 0, wx.ALL, 5 )
self.m_textPM25 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 40,-1 ), style =wx.TE_RIGHT )
leftSizer.Add( self.m_textPM25, 0, wx.ALL, 5 )
self.m_staticPM10label = wx.StaticText( self, wx.ID_ANY, u"PM10", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticPM10label.Wrap( -1 )
leftSizer.Add( self.m_staticPM10label, 0, wx.ALL, 5 )
self.m_textPM10 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 40,-1 ), style =wx.TE_RIGHT )
leftSizer.Add( self.m_textPM10, 0, wx.ALL, 5 )
MainSizer.Add( leftSizer, 1, wx.ALL | wx.EXPAND, 5 )
self.m_staticline1 = wx.StaticLine( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LI_VERTICAL )
MainSizer.Add( self.m_staticline1, 0, wx.EXPAND | wx.ALL, 5 )
MainSizer.Add(self.FigureCanvas,proportion =-10, border = 2,flag = wx.ALL | wx.GROW)
self.SetSizer( MainSizer )
self.Layout()
self.timer = wx.Timer()
self.timer.SetOwner( self, wx.ID_ANY )
self.Centre( wx.BOTH )
# Connect Events
self.m_btn_start.Bind( wx.EVT_BUTTON, self.onStart )
self.m_btn_stop.Bind( wx.EVT_BUTTON, self.onStop )
self.m_btn_quit.Bind( wx.EVT_BUTTON, self.onQuit )
self.Bind( wx.EVT_TIMER, self.onTimer, id=wx.ID_ANY )
# Timer event
self.Bind(EVT_TIMER_CHANGE, self.onChangeTimer)
#customer event
self.Bind(EVT_DISP_UPDATE, self.onDisplayUpdate)
# Create object for AQI data
self.tickerData = dataCollect.AQIdata2()
# initial plot the graphy here, only need to update data later
# self.plot(self.tickerData.xTicker,self.tickerData.y25Ticker, '--+r', self.tickerData.xTicker,self.tickerData.y10Ticker,'--*g')
self.plot_data25 = self.axes25.plot(self.tickerData.xTicker,self.tickerData.y25Ticker,'-sr')[0]
self.plot_data10 = self.axes10.plot(self.tickerData.xTicker,self.tickerData.y10Ticker,'-dg')[0]
self.axes25.set_axis_bgcolor('gray')
self.axes25.set_ybound(lower=0, upper=500)
self.axes10.set_ybound(lower=0, upper=500)
# hfmt = dates.DateFormatter('%m/%d %H:%M')
hfmt = dates.DateFormatter('%H:%M')
# self.axes25.xaxis.set_major_locator(dates.MinuteLocator())
self.axes25.xaxis.set_major_locator(dates.HourLocator())
self.axes25.xaxis.set_major_formatter(hfmt)
# self.axes10.xaxis.set_major_locator(dates.MinuteLocator())
self.axes25.xaxis.set_major_locator(dates.HourLocator())
self.axes10.xaxis.set_major_formatter(hfmt)
# self.axes25.get_xticklabels(), fontsize=8)
# self.axes25.get_yticklabels(), fontsize=8)
# self.axes10.get_xticklabels(), fontsize=8)
# self.axes10.get_yticklabels(), fontsize=8)
self.sleepTime = 10 # 10 second delay
self.maxDatalen = 100000 #max 10E5 point
self.__queue = Queue()
self.__active = False
def __del__( self ):
self.timer.Stop()
if self.__active == True:
self.__active = False
self.__thread.join()
# Virtual event handlers, overide them in your derived class
def onStart( self, event ):
self.__Start()
def __Start(self):
self.timer.Start(self.sleepTime)
if self.__active == False:
self.__thread = Thread(target = self.__run)
self.__active = True
self.__thread.start()
def onStop( self, event ):
self.__Stop()
def __Stop(self):
self.timer.Stop()
if self.__active == True:
self.__active = False
self.__thread.join()
def post_timer_change_event(self, value):
'''
create a change timer event
'''
evt = TimerChangeEvent(self.Id, value=value)
wx.PostEvent(self, evt)
def onChangeTimer(self, event):
value = event.value
self.timer.Start(value)
def onQuit( self, event ):
self.timer.Stop()
if self.__active == True:
self.__active = False
self.__thread.join()
self.Close()
def onTimer( self, event ):
event_ = Event(self.updateGraphy, type_=EVENT_TIMER)
self.__queue.put(event_)
def updateGraphy(self):
evt = DisplayUpdate()
wx.PostEvent(self, evt)
def onDisplayUpdate(self, event):
nplen = len(self.tickerData.xTicker)
if nplen>self.maxDatalen:
for i in range((nplen/2)):
self.tickerData.xTicker = np.delete(self.tickerData.xTicker, i+1, 0)
self.tickerData.y25Ticker = np.delete(self.tickerData.y25Ticker, i+1, 0)
self.tickerData.y10Ticker = np.delete(self.tickerData.y10Ticker, i+1, 0)
self.sleepTime = self.sleepTime *2
self.post_timer_change_event(self.sleepTime)
self.tickerData.updateElement(self.sleepTime)
self.m_textPM10.SetValue(str(int(self.tickerData.y10Ticker[-1])))
self.m_textPM25.SetValue(str(int(self.tickerData.y25Ticker[-1])))
self.__plot()
def __run(self):
while self.__active == True:
try:
event_ = self.__queue.get(block = True, timeout = 1)
self.__process(event_)
except Empty:
pass
def __process(self, event_):
event_.handle()
def __plot(self,*args,**kwargs):
'''update the plot here'''
# how to change the x axis to time format
dts = map(datetime.fromtimestamp, self.tickerData.xTicker)
fds = dates.date2num(dts) # converted
xmin = fds[0]
xmax = fds[-1]+0.001
diff = dts[-1]-dts[0]
ymin = 0
ymax = max(max(self.tickerData.y25Ticker), max(self.tickerData.y10Ticker))*1.5
self.axes25.set_xbound(lower=xmin, upper=xmax)
self.axes25.set_ybound(lower=ymin, upper=ymax)
self.axes10.set_xbound(lower=xmin, upper=xmax)
self.axes10.set_ybound(lower=ymin, upper=ymax)
# X axis format setting
if diff < timedelta(minutes=20):
hfmt = dates.DateFormatter('%H:%M')
self.axes25.xaxis.set_major_formatter(hfmt)
self.axes25.xaxis.set_major_locator(dates.MinuteLocator(byminute=range(60), interval=2))
self.axes25.xaxis.set_minor_locator(dates.MinuteLocator(interval=1))
elif diff < timedelta(hours=1):
hfmt = dates.DateFormatter('%H:%M')
self.axes25.xaxis.set_major_formatter(hfmt)
self.axes25.xaxis.set_major_locator(dates.MinuteLocator(byminute=range(60), interval=5))
self.axes25.xaxis.set_minor_locator(dates.MinuteLocator(interval=2))
elif diff < timedelta(hours=6):
hfmt = dates.DateFormatter('%H:%M')
self.axes25.xaxis.set_major_formatter(hfmt)
self.axes25.xaxis.set_major_locator(dates.MinuteLocator(interval=30))
self.axes25.xaxis.set_minor_locator(dates.MinuteLocator(interval=10))
elif diff < timedelta(days=2):
hfmt = dates.DateFormatter('%H:%M')
self.axes25.xaxis.set_major_formatter(hfmt)
self.axes25.xaxis.set_major_locator(dates.HourLocator(interval=4))
self.axes25.xaxis.set_minor_locator(dates.HourLocator(interval=1))
elif diff < timedelta(days=10):
hfmt = dates.DateFormatter('%m/%d')
self.axes25.xaxis.set_major_formatter(hfmt)
self.axes25.xaxis.set_major_locator(dates.DayLocator(interval=1))
self.axes25.xaxis.set_minor_locator(dates.HourLocator(interval=6))
elif diff < timedelta(days=40):
hfmt = dates.DateFormatter('%m/%d')
self.axes25.xaxis.set_major_formatter(hfmt)
self.axes25.xaxis.set_major_locator(dates.DayLocator(interval=2))
self.plot_data25.set_xdata(fds)
self.plot_data25.set_ydata(self.tickerData.y25Ticker)
self.plot_data10.set_xdata(fds)
self.plot_data10.set_ydata(self.tickerData.y10Ticker)
xlabels = self.axes25.get_xticklabels()
for xl in xlabels:
xl.set_rotation(45)
self.__updatePlot()
def __updatePlot(self):
'''''need to use this function update graphy if any data updated '''
self.FigureCanvas.draw()
if __name__ == '__main__':
app = wx.App()
# wx.InitAllImageHandlers()
frame = MainFrame(None)
app.SetTopWindow(frame)
frame.Show()
app.MainLoop()
| cgqyh/MyAQI | src/myAQIGUI.py | Python | mit | 13,446 |
# Copyright 2015-present The Scikit Flow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from sklearn import datasets, metrics, cross_validation
import tensorflow as tf
from tensorflow.contrib import skflow
iris = datasets.load_iris()
X_train, X_test, y_train, y_test = cross_validation.train_test_split(iris.data, iris.target,
test_size=0.2, random_state=42)
def my_model(X, y):
"""
This is DNN with 10, 20, 10 hidden layers, and dropout of 0.5 probability.
Note: If you want to run this example with multiple GPUs, Cuda Toolkit 7.0 and
CUDNN 6.5 V2 from NVIDIA need to be installed beforehand.
"""
with tf.device('/gpu:1'):
layers = skflow.ops.dnn(X, [10, 20, 10], dropout=0.5)
with tf.device('/gpu:2'):
return skflow.models.logistic_regression(layers, y)
classifier = skflow.TensorFlowEstimator(model_fn=my_model, n_classes=3)
classifier.fit(X_train, y_train)
score = metrics.accuracy_score(y_test, classifier.predict(X_test))
print('Accuracy: {0:f}'.format(score))
| shishaochen/TensorFlow-0.8-Win | tensorflow/examples/skflow/multiple_gpu.py | Python | apache-2.0 | 1,662 |
#!/usr/bin/env python
import os
from setuptools import setup
def recursive_files(*roots):
for r in roots:
for root, directories, files in os.walk(r):
for i in files:
yield os.path.join(root, i)
setup(name='npactflask',
version='0.6.1',
description='Website for PYNPACT, the Python N-Profile Analysis Computation Tool',
author='Nathan Bird',
author_email='[email protected]',
url='http://genome.ufl.edu/npact/',
packages=['npactflask'],
package_data={'npactflask': list(recursive_files('static', 'templates'))},
requires=["biopython(>=1.57)",
"flask(==0.10)",
"gunicorn",
"pytz"],
scripts=['bin/cleanup.py', 'bin/devserver']
)
| victor-lin/npact | npactflask/setup.py | Python | bsd-3-clause | 777 |
# -*- coding: utf-8 -*-
import pytest
from .fixtures import parametrize
from korona.lib.utils import validate_tag
@parametrize('tag,error,error_msg', [
('htmle', ValueError, 'tag is not supported'),
(None, AttributeError, 'Tag cannot be empty')
])
def test_validate_invalid_tags(tag, error, error_msg):
"""Test for validating the error for given invalid tags."""
with pytest.raises(error) as exc:
validate_tag(tag)
assert error_msg in str(exc)
| bharadwajyarlagadda/korona | tests/test_tags.py | Python | mit | 478 |
from __future__ import absolute_import
from django import template
from mailviews.helpers import should_use_staticfiles
from mailviews.previews import URL_NAMESPACE
register = template.Library()
def mailviews_static(path):
if should_use_staticfiles():
from django.templatetags.static import static
return static(path)
else:
from django.urls import reverse
return reverse('%s:static' % URL_NAMESPACE, kwargs={
'path': path,
})
register.simple_tag(mailviews_static)
| danxshap/django-mailviews | mailviews/templatetags/mailviews.py | Python | apache-2.0 | 532 |
from typing import List
import unittest
import heapq
class Worker:
def __init__(self, wage: float, quality: float):
self.wage = wage
self.quality = quality
self.ratio = wage / quality
def __str__(self) -> str:
return f"R {self.ratio} W {self.wage} Q {self.quality}"
class Solution:
def mincostToHireWorkers(
self, quality: List[int], wage: List[int], K: int
) -> float:
workers = [Worker(wage[i], quality[i]) for i in range(len(wage))]
workers.sort(key=lambda x: x.ratio)
quality = []
total_quality = 0.0
total_cost = float("inf")
for w in workers:
total_quality += w.quality
heapq.heappush(quality, -w.quality)
if len(quality) > K:
total_quality += heapq.heappop(quality)
if len(quality) == K:
total_cost = min(total_cost, w.ratio * total_quality)
return total_cost
class UnitTest(unittest.TestCase):
def test1(self):
self.assertEqual(
105.0, Solution().mincostToHireWorkers([10, 20, 5], [70, 50, 30], 2)
)
def test2(self):
self.assertEqual(
30.666666666666664,
Solution().mincostToHireWorkers([3, 1, 10, 10, 1], [4, 8, 2, 2, 7], 3),
)
| rralcala/random-scripts | py/lc/8/857.py | Python | gpl-2.0 | 1,315 |
"""
WSGI config for rockmylight project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "rockmylight.settings")
application = get_wsgi_application()
| RockMyLight/django-rml | rockmylight/rockmylight/wsgi.py | Python | mit | 399 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Wei Gao <[email protected]>
# Copyright: (c) 2018, Ansible Project
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_host_facts
short_description: Gathers facts about remote ESXi hostsystem
description:
- This module can be used to gathers facts like CPU, memory, datastore, network and system etc. about ESXi host system.
- Please specify hostname or IP address of ESXi host system as C(hostname).
- If hostname or IP address of vCenter is provided as C(hostname) and C(esxi_hostname) is not specified, then the
module will throw an error.
- VSAN facts added in 2.7 version.
version_added: 2.5
author:
- Wei Gao (@woshihaoren)
requirements:
- python >= 2.6
- PyVmomi
options:
esxi_hostname:
description:
- ESXi hostname.
- Host facts about the specified ESXi server will be returned.
- By specifying this option, you can select which ESXi hostsystem is returned if connecting to a vCenter.
version_added: 2.8
type: str
show_tag:
description:
- Tags related to Host are shown if set to C(True).
default: False
type: bool
required: False
version_added: 2.9
schema:
description:
- Specify the output schema desired.
- The 'summary' output schema is the legacy output from the module
- The 'vsphere' output schema is the vSphere API class definition
which requires pyvmomi>6.7.1
choices: ['summary', 'vsphere']
default: 'summary'
type: str
version_added: '2.10'
properties:
description:
- Specify the properties to retrieve.
- If not specified, all properties are retrieved (deeply).
- Results are returned in a structure identical to the vsphere API.
- 'Example:'
- ' properties: ['
- ' "hardware.memorySize",'
- ' "hardware.cpuInfo.numCpuCores",'
- ' "config.product.apiVersion",'
- ' "overallStatus"'
- ' ]'
- Only valid when C(schema) is C(vsphere).
type: list
required: False
version_added: '2.10'
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Gather vmware host facts
vmware_host_facts:
hostname: "{{ esxi_server }}"
username: "{{ esxi_username }}"
password: "{{ esxi_password }}"
register: host_facts
delegate_to: localhost
- name: Gather vmware host facts from vCenter
vmware_host_facts:
hostname: "{{ vcenter_server }}"
username: "{{ vcenter_user }}"
password: "{{ vcenter_pass }}"
esxi_hostname: "{{ esxi_hostname }}"
register: host_facts
delegate_to: localhost
- name: Gather vmware host facts from vCenter with tag information
vmware_host_facts:
hostname: "{{ vcenter_server }}"
username: "{{ vcenter_user }}"
password: "{{ vcenter_pass }}"
esxi_hostname: "{{ esxi_hostname }}"
show_tag: True
register: host_facts_tag
delegate_to: localhost
- name: Get VSAN Cluster UUID from host facts
vmware_host_facts:
hostname: "{{ esxi_server }}"
username: "{{ esxi_username }}"
password: "{{ esxi_password }}"
register: host_facts
- set_fact:
cluster_uuid: "{{ host_facts['ansible_facts']['vsan_cluster_uuid'] }}"
- name: Gather some info from a host using the vSphere API output schema
vmware_host_facts:
hostname: "{{ vcenter_server }}"
username: "{{ esxi_username }}"
password: "{{ esxi_password }}"
esxi_hostname: "{{ esxi_hostname }}"
schema: vsphere
properties:
- hardware.memorySize
- hardware.cpuInfo.numCpuCores
- config.product.apiVersion
- overallStatus
register: host_facts
'''
RETURN = r'''
ansible_facts:
description: system info about the host machine
returned: always
type: dict
sample:
{
"ansible_all_ipv4_addresses": [
"10.76.33.200"
],
"ansible_bios_date": "2011-01-01T00:00:00+00:00",
"ansible_bios_version": "0.5.1",
"ansible_datastore": [
{
"free": "11.63 GB",
"name": "datastore1",
"total": "12.50 GB"
}
],
"ansible_distribution": "VMware ESXi",
"ansible_distribution_build": "4887370",
"ansible_distribution_version": "6.5.0",
"ansible_hostname": "10.76.33.100",
"ansible_in_maintenance_mode": true,
"ansible_interfaces": [
"vmk0"
],
"ansible_memfree_mb": 2702,
"ansible_memtotal_mb": 4095,
"ansible_os_type": "vmnix-x86",
"ansible_processor": "Intel Xeon E312xx (Sandy Bridge)",
"ansible_processor_cores": 2,
"ansible_processor_count": 2,
"ansible_processor_vcpus": 2,
"ansible_product_name": "KVM",
"ansible_product_serial": "NA",
"ansible_system_vendor": "Red Hat",
"ansible_uptime": 1791680,
"ansible_vmk0": {
"device": "vmk0",
"ipv4": {
"address": "10.76.33.100",
"netmask": "255.255.255.0"
},
"macaddress": "52:54:00:56:7d:59",
"mtu": 1500
},
"vsan_cluster_uuid": null,
"vsan_node_uuid": null,
"vsan_health": "unknown",
"tags": [
{
"category_id": "urn:vmomi:InventoryServiceCategory:8eb81431-b20d-49f5-af7b-126853aa1189:GLOBAL",
"category_name": "host_category_0001",
"description": "",
"id": "urn:vmomi:InventoryServiceTag:e9398232-46fd-461a-bf84-06128e182a4a:GLOBAL",
"name": "host_tag_0001"
}
],
}
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.text.formatters import bytes_to_human
from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec, find_obj
try:
from pyVmomi import vim
except ImportError:
pass
from ansible.module_utils.vmware_rest_client import VmwareRestClient
class VMwareHostFactManager(PyVmomi):
def __init__(self, module):
super(VMwareHostFactManager, self).__init__(module)
esxi_host_name = self.params.get('esxi_hostname', None)
if self.is_vcenter():
if esxi_host_name is None:
self.module.fail_json(msg="Connected to a vCenter system without specifying esxi_hostname")
self.host = self.get_all_host_objs(esxi_host_name=esxi_host_name)
if len(self.host) > 1:
self.module.fail_json(msg="esxi_hostname matched multiple hosts")
self.host = self.host[0]
else:
self.host = find_obj(self.content, [vim.HostSystem], None)
if self.host is None:
self.module.fail_json(msg="Failed to find host system.")
def all_facts(self):
ansible_facts = {}
ansible_facts.update(self.get_cpu_facts())
ansible_facts.update(self.get_memory_facts())
ansible_facts.update(self.get_datastore_facts())
ansible_facts.update(self.get_network_facts())
ansible_facts.update(self.get_system_facts())
ansible_facts.update(self.get_vsan_facts())
ansible_facts.update(self.get_cluster_facts())
if self.params.get('show_tag'):
vmware_client = VmwareRestClient(self.module)
tag_info = {
'tags': vmware_client.get_tags_for_hostsystem(hostsystem_mid=self.host._moId)
}
ansible_facts.update(tag_info)
self.module.exit_json(changed=False, ansible_facts=ansible_facts)
def get_cluster_facts(self):
cluster_facts = {'cluster': None}
if self.host.parent and isinstance(self.host.parent, vim.ClusterComputeResource):
cluster_facts.update(cluster=self.host.parent.name)
return cluster_facts
def get_vsan_facts(self):
config_mgr = self.host.configManager.vsanSystem
if config_mgr is None:
return {
'vsan_cluster_uuid': None,
'vsan_node_uuid': None,
'vsan_health': "unknown",
}
status = config_mgr.QueryHostStatus()
return {
'vsan_cluster_uuid': status.uuid,
'vsan_node_uuid': status.nodeUuid,
'vsan_health': status.health,
}
def get_cpu_facts(self):
return {
'ansible_processor': self.host.summary.hardware.cpuModel,
'ansible_processor_cores': self.host.summary.hardware.numCpuCores,
'ansible_processor_count': self.host.summary.hardware.numCpuPkgs,
'ansible_processor_vcpus': self.host.summary.hardware.numCpuThreads,
}
def get_memory_facts(self):
return {
'ansible_memfree_mb': self.host.hardware.memorySize // 1024 // 1024 - self.host.summary.quickStats.overallMemoryUsage,
'ansible_memtotal_mb': self.host.hardware.memorySize // 1024 // 1024,
}
def get_datastore_facts(self):
facts = dict()
facts['ansible_datastore'] = []
for store in self.host.datastore:
_tmp = {
'name': store.summary.name,
'total': bytes_to_human(store.summary.capacity),
'free': bytes_to_human(store.summary.freeSpace),
}
facts['ansible_datastore'].append(_tmp)
return facts
def get_network_facts(self):
facts = dict()
facts['ansible_interfaces'] = []
facts['ansible_all_ipv4_addresses'] = []
for nic in self.host.config.network.vnic:
device = nic.device
facts['ansible_interfaces'].append(device)
facts['ansible_all_ipv4_addresses'].append(nic.spec.ip.ipAddress)
_tmp = {
'device': device,
'ipv4': {
'address': nic.spec.ip.ipAddress,
'netmask': nic.spec.ip.subnetMask,
},
'macaddress': nic.spec.mac,
'mtu': nic.spec.mtu,
}
facts['ansible_' + device] = _tmp
return facts
def get_system_facts(self):
sn = 'NA'
for info in self.host.hardware.systemInfo.otherIdentifyingInfo:
if info.identifierType.key == 'ServiceTag':
sn = info.identifierValue
facts = {
'ansible_distribution': self.host.config.product.name,
'ansible_distribution_version': self.host.config.product.version,
'ansible_distribution_build': self.host.config.product.build,
'ansible_os_type': self.host.config.product.osType,
'ansible_system_vendor': self.host.hardware.systemInfo.vendor,
'ansible_hostname': self.host.summary.config.name,
'ansible_product_name': self.host.hardware.systemInfo.model,
'ansible_product_serial': sn,
'ansible_bios_date': self.host.hardware.biosInfo.releaseDate,
'ansible_bios_version': self.host.hardware.biosInfo.biosVersion,
'ansible_uptime': self.host.summary.quickStats.uptime,
'ansible_in_maintenance_mode': self.host.runtime.inMaintenanceMode,
}
return facts
def properties_facts(self):
ansible_facts = self.to_json(self.host, self.params.get('properties'))
if self.params.get('show_tag'):
vmware_client = VmwareRestClient(self.module)
tag_info = {
'tags': vmware_client.get_tags_for_hostsystem(hostsystem_mid=self.host._moId)
}
ansible_facts.update(tag_info)
self.module.exit_json(changed=False, ansible_facts=ansible_facts)
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
esxi_hostname=dict(type='str', required=False),
show_tag=dict(type='bool', default=False),
schema=dict(type='str', choices=['summary', 'vsphere'], default='summary'),
properties=dict(type='list')
)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
vm_host_manager = VMwareHostFactManager(module)
if module.params['schema'] == 'summary':
vm_host_manager.all_facts()
else:
vm_host_manager.properties_facts()
if __name__ == '__main__':
main()
| pdellaert/ansible | lib/ansible/modules/cloud/vmware/vmware_host_facts.py | Python | gpl-3.0 | 12,620 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Executor.started'
db.add_column(u'fastapp_executor', 'started',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Executor.started'
db.delete_column(u'fastapp_executor', 'started')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'fastapp.apy': {
'Meta': {'object_name': 'Apy'},
'base': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'apys'", 'null': 'True', 'to': u"orm['fastapp.Base']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'module': ('django.db.models.fields.CharField', [], {'default': "'def func(self):\\n pass'", 'max_length': '16384'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
u'fastapp.authprofile': {
'Meta': {'object_name': 'AuthProfile'},
'access_token': ('django.db.models.fields.CharField', [], {'max_length': '72'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'authprofile'", 'unique': 'True', 'to': u"orm['auth.User']"})
},
u'fastapp.base': {
'Meta': {'object_name': 'Base'},
'content': ('django.db.models.fields.CharField', [], {'default': '\'{% extends "fastapp/index.html" %}\\n{% block content %}\\n{% endblock %}\\n\'', 'max_length': '16384', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': '0', 'related_name': "'+'", 'blank': 'True', 'to': u"orm['auth.User']"}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
u'fastapp.counter': {
'Meta': {'object_name': 'Counter'},
'apy': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'counter'", 'unique': 'True', 'to': u"orm['fastapp.Apy']"}),
'executed': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'failed': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'fastapp.executor': {
'Meta': {'object_name': 'Executor'},
'base': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'executor'", 'unique': 'True', 'to': u"orm['fastapp.Base']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num_instances': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'password': ('django.db.models.fields.CharField', [], {'default': "u'hCgE8pQDAJ'", 'max_length': '20'}),
'pid': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True'}),
'started': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'fastapp.host': {
'Meta': {'object_name': 'Host'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'fastapp.instance': {
'Meta': {'object_name': 'Instance'},
'executor': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'instances'", 'to': u"orm['fastapp.Executor']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_alive': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_beat': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
u'fastapp.process': {
'Meta': {'object_name': 'Process'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'running': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'fastapp.setting': {
'Meta': {'object_name': 'Setting'},
'base': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'setting'", 'to': u"orm['fastapp.Base']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '8192'})
},
u'fastapp.thread': {
'Meta': {'object_name': 'Thread'},
'health': ('django.db.models.fields.CharField', [], {'default': "'SO'", 'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'threads'", 'null': 'True', 'to': u"orm['fastapp.Process']"})
}
}
complete_apps = ['fastapp'] | sahlinet/fastapp | fastapp/south_migrations/0020_auto__add_field_executor_started.py | Python | mit | 9,248 |
# Add taintlib to PATH so it can be imported during runtime without any hassle
import sys; import os; sys.path.append(os.path.dirname(os.path.dirname((__file__))))
from taintlib import *
# This has no runtime impact, but allows autocomplete to work
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from ..taintlib import *
# Actual tests
class Context:
def __enter__(self):
return ""
def __exit__(self, exc_type, exc, tb):
pass
def test_with():
ctx = Context()
taint(ctx)
with ctx as tainted:
ensure_tainted(tainted) # $ tainted
class Context_taint:
def __enter__(self):
return TAINTED_STRING
def __exit__(self, exc_type, exc, tb):
pass
def test_with_taint():
ctx = Context_taint()
with ctx as tainted:
ensure_tainted(tainted) # $ MISSING: tainted
class Context_arg:
def __init__(self, arg):
self.arg = arg
def __enter__(self):
return self.arg
def __exit__(self, exc_type, exc, tb):
pass
def test_with_arg():
ctx = Context_arg(TAINTED_STRING)
with ctx as tainted:
ensure_tainted(tainted) # $ MISSING: tainted
# Make tests runable
test_with()
test_with_taint()
test_with_arg()
| github/codeql | python/ql/test/experimental/dataflow/tainttracking/defaultAdditionalTaintStep/test_with.py | Python | mit | 1,236 |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Generates a test suite from NIST PKITS test descriptions.
The output is a set of Type Parameterized Tests which are included by
pkits_unittest.h. See pkits_unittest.h for information on using the tests.
GoogleTest has a limit of 50 tests per type parameterized testcase, so the tests
are split up by section number (this also makes it possible to easily skip
sections that pertain to non-implemented features).
Usage:
generate_tests.py <PKITS.pdf> <output.h>
'''
import os
import re
import subprocess
import sys
import tempfile
def sanitize_name(s):
return s.translate(None, ' -')
def finalize_test_case(test_case_name, sanitized_test_names, output):
output.write('\nWRAPPED_REGISTER_TYPED_TEST_SUITE_P(%s' % test_case_name)
for name in sanitized_test_names:
output.write(',\n %s' % name)
output.write(');\n')
def bool_to_str(b):
return "true" if b else "false"
def make_policies_string(policies):
return '"' + ','.join(policies) + '"'
def output_test(test_case_name, test_number, raw_test_name, subpart_number,
info, certs, crls, sanitized_test_names, output):
'''Writes a test case to |output|, and appends the test name to
|sanitized_test_names|.'''
sanitized_test_name = 'Section%s%s' % (test_number.split('.')[1],
sanitize_name(raw_test_name))
subpart_comment = ''
if subpart_number is not None:
sanitized_test_name += "Subpart%d" % (subpart_number)
subpart_comment = ' (Subpart %d)' % (subpart_number)
sanitized_test_names.append(sanitized_test_name)
certs_formatted = ', '.join('"%s"' % n for n in certs)
crls_formatted = ', '.join('"%s"' % n for n in crls)
output.write('''
// %(test_number)s %(raw_test_name)s%(subpart_comment)s
WRAPPED_TYPED_TEST_P(%(test_case_name)s, %(sanitized_test_name)s) {
const char* const certs[] = {
%(certs_formatted)s
};
const char* const crls[] = {
%(crls_formatted)s
};
''' % vars())
default_info = TestInfo(None)
output.write('''PkitsTestInfo info;
info.test_number = "%s";
info.should_validate = %s;
''' % (test_number, bool_to_str(info.should_validate)))
# Output any non-default inputs/outputs. Only properties that differ from
# the defaults are written, so as to keep the generated file more readable.
if info.initial_policy_set != default_info.initial_policy_set:
output.write(''' info.SetInitialPolicySet(%s);
''' % make_policies_string(info.initial_policy_set))
if info.initial_explicit_policy != default_info.initial_explicit_policy:
output.write(''' info.SetInitialExplicitPolicy(%s);
''' % bool_to_str(info.initial_explicit_policy))
if (info.initial_policy_mapping_inhibit !=
default_info.initial_policy_mapping_inhibit):
output.write(''' info.SetInitialPolicyMappingInhibit(%s);
''' % bool_to_str(info.initial_policy_mapping_inhibit))
if (info.initial_inhibit_any_policy !=
default_info.initial_inhibit_any_policy):
output.write(''' info.SetInitialInhibitAnyPolicy(%s);
''' % bool_to_str(info.initial_inhibit_any_policy))
if (info.user_constrained_policy_set !=
default_info.user_constrained_policy_set):
output.write(''' info.SetUserConstrainedPolicySet(%s);
''' % make_policies_string(info.user_constrained_policy_set))
output.write('''
this->RunTest(certs, crls, info);
}
''' % vars())
# Matches a section header, ex: "4.1 Signature Verification"
SECTION_MATCHER = re.compile('^\s*(\d+\.\d+)\s+(.+)\s*$')
# Matches a test header, ex: "4.1.1 Valid Signatures Test1"
TEST_MATCHER = re.compile('^\s*(\d+\.\d+.\d+)\s+(.+)\s*$')
# Matches the various headers in a test specification.
EXPECTED_HEADER_MATCHER = re.compile('^\s*Expected Result:')
PROCEDURE_HEADER_MATCHER = re.compile('^\s*Procedure:')
PATH_HEADER_MATCHER = re.compile('^\s*Certification Path:')
# Matches the Procedure text if using default settings.
USING_DEFAULT_SETTINGS_MATCHER = re.compile(
'^.*using the \s*default settings.*')
# Matches the description text if using custom settings.
CUSTOM_SETTINGS_MATCHER = re.compile(
'.*this\s+test\s+be\s+validated\s+using\s+the\s+following\s+inputs:.*')
# Match an expected test result. Note that some results in the PDF have a typo
# "path not should validate" instead of "path should not validate".
TEST_RESULT_MATCHER = re.compile(
'^.*path (should validate|should not validate|not should validate)')
# Matches a line in the certification path, ex:
# "\u2022 Good CA Cert, Good CA CRL"
PATH_MATCHER = re.compile('^\s*\xe2\x80\xa2\s*(.+)\s*$')
# Matches a page number. These may appear in the middle of multi-line fields and
# thus need to be ignored.
PAGE_NUMBER_MATCHER = re.compile('^\s*\d+\s*$')
# Matches if an entry in a certification path refers to a CRL, ex:
# "onlySomeReasons CA2 CRL1".
CRL_MATCHER = re.compile('^.*CRL\d*$')
class TestSections(object):
def __init__(self):
self.description_lines = []
self.procedure_lines = []
self.expected_result_lines = []
self.cert_path_lines = []
def parse_main_test_sections(lines, i):
result = TestSections()
# Read the description lines (text after test name up until
# "Procedure:").
result.description_lines = []
while i < len(lines):
if PROCEDURE_HEADER_MATCHER.match(lines[i]):
break
result.description_lines.append(lines[i])
i += 1
# Read the procedure lines (text starting at "Procedure:" and up until
# "Expected Result:".
result.procedure_lines = []
while i < len(lines):
if EXPECTED_HEADER_MATCHER.match(lines[i]):
break
result.procedure_lines.append(lines[i])
i += 1
# Read the expected result lines (text starting at "Expected Result:" and up
# until "Certification Path:".
result.expected_result_lines = []
while i < len(lines):
if PATH_HEADER_MATCHER.match(lines[i]):
break
result.expected_result_lines.append(lines[i])
i += 1
# Read the certification path lines (text starting at "Certification Path:"
# and up until the next test title.
result.cert_path_lines = []
while i < len(lines):
if TEST_MATCHER.match(lines[i]) or SECTION_MATCHER.match(lines[i]):
break
result.cert_path_lines.append(lines[i])
i += 1
return i, result
def parse_cert_path_lines(lines):
path_lines = []
crls = []
certs = []
for line in lines[1:]:
line = line.strip()
if "is composed of the following objects:" in line:
continue
if "See the introduction to Section 4.4 for more information." in line:
continue
if not line or PAGE_NUMBER_MATCHER.match(line):
continue
path_match = PATH_MATCHER.match(line)
if path_match:
path_lines.append(path_match.group(1))
continue
# Continuation of previous path line.
path_lines[-1] += ' ' + line
for path_line in path_lines:
for path in path_line.split(','):
path = sanitize_name(path.strip())
if CRL_MATCHER.match(path):
crls.append(path)
else:
certs.append(path)
return certs, crls
ANY_POLICY = 'anyPolicy'
TEST_POLICY_1 = 'NIST-test-policy-1'
TEST_POLICY_2 = 'NIST-test-policy-2'
TEST_POLICY_3 = 'NIST-test-policy-3'
TEST_POLICY_6 = 'NIST-test-policy-6'
# Note: This omits some outputs from PKITS:
#
# * authorities-constrained-policy-set
# * explicit-policy-indicator
class TestInfo(object):
"""This structure describes a test inputs and outputs"""
def __init__(self, should_validate,
# These defaults come from section 3 of PKITS.pdf
initial_policy_set = [ANY_POLICY],
initial_explicit_policy = False,
initial_policy_mapping_inhibit = False,
initial_inhibit_any_policy = False,
# In all of the tests that are not related to policy processing,
# each certificate in the path asserts the certificate policy
# 2.16.840.1.101.3.2.1.48.1
user_constrained_policy_set = [TEST_POLICY_1]):
self.should_validate = should_validate
self.initial_policy_set = initial_policy_set
self.initial_explicit_policy = initial_explicit_policy
self.initial_policy_mapping_inhibit = initial_policy_mapping_inhibit
self.initial_inhibit_any_policy = initial_inhibit_any_policy
self.user_constrained_policy_set = user_constrained_policy_set
TEST_OVERRIDES = {
'4.8.1': [ # All Certificates Same Policy Test1
# 1. default settings, but with initial-explicit-policy set. The path
# should validate successfully
TestInfo(True, initial_explicit_policy=True,
user_constrained_policy_set=[TEST_POLICY_1]),
# 2. default settings, but with initial-explicit-policy set and
# initial-policy-set = {NIST-test-policy-1}. The path should validate
# successfully.
TestInfo(True, initial_explicit_policy=True,
initial_policy_set=[TEST_POLICY_1],
user_constrained_policy_set=[TEST_POLICY_1]),
# 3. default settings, but with initial-explicit-policy set and
# initial-policy-set = {NIST-test-policy-2}. The path should not validate
# successfully.
TestInfo(False, initial_explicit_policy=True,
initial_policy_set=[TEST_POLICY_2],
user_constrained_policy_set=[]),
# 4. default settings, but with initial-explicit-policy set and
# initial-policy-set = {NIST-test-policy-1, NIST-test-policy-2}. The path
# should validate successfully.
TestInfo(True, initial_explicit_policy=True,
initial_policy_set=[TEST_POLICY_1, TEST_POLICY_2],
user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.8.2': [ # All Certificates No Policies Test2
# 1. default settings. The path should validate successfully.
TestInfo(True, user_constrained_policy_set=[]),
# 2. default settings, but with initial-explicit-policy set. The path
# should not validate successfully
TestInfo(False, initial_explicit_policy=True,
user_constrained_policy_set=[]),
],
'4.8.3': [ # Different Policies Test3
# 1. default settings. The path should validate successfully.
TestInfo(True, user_constrained_policy_set=[]),
# 2. default settings, but with initial-explicit-policy set. The path
# should not validate successfully.
TestInfo(False, initial_explicit_policy=True, user_constrained_policy_set=[]),
# 3. default settings, but with initial-explicit-policy set and
# initial-policy-set = {NIST-test-policy-1, NIST-test-policy-2}. The path
# should not validate successfully.
TestInfo(False, initial_explicit_policy=True,
initial_policy_set=[TEST_POLICY_1, TEST_POLICY_2],
user_constrained_policy_set=[]),
],
'4.8.4': [ # Different Policies Test4
# Procedure: Validate Different Policies Test4 EE using the default
# settings or open and verify Signed Test Message 6.2.2.69 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set and the
# user-constrained-policy-set will be empty. The explicit-policy-indicator
# will be set if the application can process the policyConstraints
# extension. If the application can process the policyConstraints extension
# then the path should not validate successfully. If the application can
# not process the policyConstraints extension, then the path should
# validate successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.8.5': [ # 4.8.5 Different Policies Test5
# Procedure: Validate Different Policies Test5 EE using the default
# settings or open and verify Signed Test Message 6.2.2.70 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set and the
# user-constrained-policy-set will be empty. The explicit-policy-indicator
# will be set if the application can process the policyConstraints
# extension. If the application can process the policyConstraints extension
# then the path should not validate successfully. If the application can
# not process the policyConstraints extension, then the path should
# validate successfully
TestInfo(False, user_constrained_policy_set=[]),
],
'4.8.6': [ # Overlapping Policies Test6
# 1. default settings. The path should validate successfully.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
# 2. default settings, but with initial-policy-set = {NIST-test-policy-1}.
# The path should validate successfully.
TestInfo(True, initial_policy_set=[TEST_POLICY_1],
user_constrained_policy_set=[TEST_POLICY_1]),
# 3. default settings, but with initial-policy-set = {NIST-test-policy-2}.
# The path should not validate successfully.
TestInfo(False, initial_policy_set=[TEST_POLICY_2],
user_constrained_policy_set=[]),
],
'4.8.7': [ # Different Policies Test7
# Procedure: Validate Different Policies Test7 EE using the default
# settings or open and verify Signed Test Message 6.2.2.72 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set and the
# user-constrained-policy-set will be empty. If the
# explicit-policy-indicator will be set if the application can process the
# policyConstraints extension. If the application can process the
# policyConstraints extension, then the path should not validate
# successfully. If the application can not process the policyConstraints
# extension, then the path should validate successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.8.8': [ # Different Policies Test8
# Procedure: Validate Different Policies Test8 EE using the default
# settings or open and verify Signed Test Message 6.2.2.73 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set and the
# user-constrained-policy-set will be empty. The explicit-policy-indicator
# will be set if the application can process the policyConstraints
# extension. If the application can process the policyConstraints extension
# then the path should not validate successfully. If the application can
# not process the policyConstraints extension, then the path should
# validate successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.8.9': [ # Different Policies Test9
# Procedure: Validate Different Policies Test9 EE using the default
# settings or open and verify Signed Test Message 6.2.2.74 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set and the
# user-constrained-policy-set will be empty. The explicit-policy-indicator
# will be set if the application can process the policyConstraints
# extension. If the application can process the policyConstraints
# extension, then the path should not validate successfully. If the
# application can not process the policyConstraints extension, then the
# path should validate successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.8.10': [ # All Certificates Same Policies Test10
# 1. default settings. The path should validate successfully.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1, TEST_POLICY_2]),
# 2. default settings, but with initial-policy-set = {NIST-test-policy-1}.
# The path should validate successfully.
TestInfo(True, initial_policy_set=[TEST_POLICY_1],
user_constrained_policy_set=[TEST_POLICY_1]),
# 3. default settings, but with initial-policy-set = {NIST-test-policy-2}.
# The path should validate successfully.
TestInfo(True, initial_policy_set=[TEST_POLICY_2],
user_constrained_policy_set=[TEST_POLICY_2]),
],
'4.8.11': [ # All Certificates AnyPolicy Test11
# 1. default settings. The path should validate successfully.
TestInfo(True, user_constrained_policy_set=[ANY_POLICY]),
# 2. default settings, but with initial-policy-set = {NIST-test-policy-1}.
# The path should validate successfully.
TestInfo(True, initial_policy_set=[TEST_POLICY_1],
user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.8.12': [ # Different Policies Test12
# Procedure: Validate Different Policies Test12 EE using the default
# settings or open and verify Signed Test Message 6.2.2.77 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set and the
# user-constrained-policy-set will be empty. The explicit-policy-indicator
# will be set if the application can process the policyConstraints
# extension. If the application can process the policyConstraints
# extension, then the path should not validate successfully. If the
# application can not process the policyConstraints extension, then the
# path should validate successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.8.13': [ # All Certificates Same Policies Test13
# 1. default settings, but with initial-policy-set = {NIST-test-policy-1}.
# The path should validate successfully.
TestInfo(True, initial_policy_set=[TEST_POLICY_1],
user_constrained_policy_set=[TEST_POLICY_1]),
# 2. default settings, but with initial-policy-set = {NIST-test-policy-2}.
# The path should validate successfully.
TestInfo(True, initial_policy_set=[TEST_POLICY_2],
user_constrained_policy_set=[TEST_POLICY_2]),
# 3. default settings, but with initial-policy-set = {NIST-test-policy-3}.
# The path should validate successfully.
TestInfo(True, initial_policy_set=[TEST_POLICY_3],
user_constrained_policy_set=[TEST_POLICY_3]),
],
'4.8.14': [ # AnyPolicy Test14
# 1. default settings, but with initial-policy-set = {NIST-test-policy-1}.
# The path should validate successfully.
TestInfo(True, initial_policy_set=[TEST_POLICY_1],
user_constrained_policy_set=[TEST_POLICY_1]),
# 2. default settings, but with initial-policy-set = {NIST-test-policy-2}.
# The path should not validate successfully.
TestInfo(False, initial_policy_set=[TEST_POLICY_2],
user_constrained_policy_set=[]),
],
'4.8.15': [ # User Notice Qualifier Test15
# Procedure: Validate User Notice Qualifier Test15 EE using the default
# settings or open and verify Signed Test Message 6.2.2.80 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be the same
# as the initial-explicit-policy indicator. If the initial-policy-set is
# any-policy or otherwise includes NIST-test-policy-1, then the
# user-constrained-policy-set will be {NIST-test-policy-1}. If not, the
# user-constrained-policy-set will be empty. If the initial-explicit-policy
# indicator is set and the initial-policy-set does not include
# NIST-test-policy-1, then the path should be rejected, otherwise it should
# validate successfully. If the path validates successfully, then the
# application should display the user notice.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.8.16': [ # User Notice Qualifier Test16
# Procedure: Validate User Notice Qualifier Test16 EE using the default
# settings or open and verify Signed Test Message 6.2.2.81 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be the same
# as the initial-explicit-policy indicator. If the initial-policy-set is
# any-policy or otherwise includes NIST-test-policy-1, then the
# user-constrained-policy-set will be {NIST-test-policy-1}. If not, the
# user-constrained-policy-set will be empty. If the initial-explicit-policy
# indicator is set and the initial-policy-set does not include
# NIST-test-policy-1, then the path should be rejected, otherwise it should
# validate successfully. If the path validates successfully, then the
# application should display the user notice associated with
# NIST-test-policy-1. The user notice associated with NIST-test-policy-2
# should not be displayed.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.8.17': [ # User Notice Qualifier Test17
# Procedure: Validate User Notice Qualifier Test17 EE using the default
# settings or open and verify Signed Test Message 6.2.2.82 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be the same
# as the initial-explicit-policy indicator. If the initial-policy-set is
# any-policy or otherwise includes NIST-test-policy-1, then the
# user-constrained-policy-set will be {NIST-test-policy-1}. If not, the
# user-constrained-policy-set will be empty. If the initial-explicit-policy
# indicator is set and the initial-policy-set does not include
# NIST-test-policy-1, then the path should be rejected, otherwise it should
# validate successfully. If the path validates successfully, then the
# application should display the user notice associated with anyPolicy.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.8.18': [ # User Notice Qualifier Test18
# 1. default settings, but with initial-policy-set = {NIST-test-policy-1}.
# The path should validate successfully and the qualifier associated with
# NIST-test-policy-1 in the end entity certificate should be displayed.
TestInfo(True, initial_policy_set=[TEST_POLICY_1],
user_constrained_policy_set=[TEST_POLICY_1]),
# 2. default settings, but with initial-policy-set = {NIST-test-policy-2}.
# The path should validate successfully and the qualifier associated with
# anyPolicy in the end entity certificate should be displayed.
TestInfo(True, initial_policy_set=[TEST_POLICY_2],
user_constrained_policy_set=[TEST_POLICY_2]),
],
'4.8.19': [ # User Notice Qualifier Test19
# Procedure: Validate User Notice Qualifier Test19 EE using the default
# settings or open and verify Signed Test Message 6.2.2.84 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be the same
# as the initial-explicit-policy indicator. If the initial-policy-set is
# any-policy or otherwise includes NIST-test-policy-1, then the
# user-constrained-policy-set will be {NIST-test-policy-1}. If not, the
# user-constrained-policy-set will be empty. If the initial-explicit-policy
# indicator is set and the initial-policy-set does not include
# NIST-test-policy-1, then the path should be rejected, otherwise it should
# validate successfully. Since the explicitText exceeds the maximum size
# of 200 characters, the application may choose to reject the certificate.
# If the application accepts the certificate, display of the user notice is
# optional.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.8.20': [ # CPS Pointer Qualifier Test20
# Procedure: Validate CPS Pointer Qualifier Test20 EE using the default
# settings or open and verify Signed Test Message 6.2.2.85 using the
# default settings. (If possible, it is recommended that this test be run
# with the initial-explicit-policy indicator set. If this can not be done,
# manually check that the authorities-constrained-policy-set and
# user-constrained-policy-set are correct.)
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be the same
# as the initial-explicit-policy indicator. If the initial-policy-set is
# any-policy or otherwise includes NIST-test-policy-1, then the
# user-constrained-policy-set will be {NIST-test-policy-1}. If not, the
# user-constrained-policy-set will be empty. If the initial-explicit-policy
# indicator is set and the initial-policy-set does not include
# NIST-test-policy-1, then the path should be rejected, otherwise it should
# validate successfully. The CPS pointer in the qualifier should be
# associated with NIST-testpolicy-1 in the
# authorities-constrained-policy-set (and in the user-constrained-policy-set
# if NIST-test-policy-1 is in that set). There are no processing
# requirements associated with the CPS pointer qualifier.
TestInfo(True, initial_explicit_policy=True,
initial_policy_set=[TEST_POLICY_1],
user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.9.1': [ # Valid RequireExplicitPolicy Test1
# Procedure: Validate Valid requireExplicitPolicy Test1 EE using the
# default settings or open and verify Signed Test Message 6.2.2.86 using
# the default settings.
#
# Expected Result: The path should validate successfully since the
# explicit-policy-indicator is not set.
TestInfo(True, user_constrained_policy_set=[]),
],
'4.9.2': [ # Valid RequireExplicitPolicy Test2
# Procedure: Validate Valid requireExplicitPolicy Test2 EE using the
# default settings or open and verify Signed Test Message 6.2.2.87 using
# the default settings.
#
# Expected Result: The path should validate successfully since the
# explicit-policy-indicator is not set
TestInfo(True, user_constrained_policy_set=[]),
],
'4.9.6': [ # Valid Self-Issued requireExplicitPolicy Test6
# Procedure: Validate Valid Self-Issued requireExplicitPolicy Test6 EE using
# the default settings or open and verify Signed Test Message 6.2.2.91 using
# the default settings.
#
# Expected Result: The path should validate successfully since the
# explicit-policy-indicator is not set.
TestInfo(True, user_constrained_policy_set=[]),
],
'4.10.1': [ # Valid Policy Mapping Test1
# 1. default settings, but with initial-policy-set = {NIST-test-policy-1}.
# The path should validate successfully.
TestInfo(True, initial_policy_set=[TEST_POLICY_1],
user_constrained_policy_set=[TEST_POLICY_1]),
# 2. default settings, but with initial-policy-set = {NIST-test-policy-2}.
# The path should not validate successfully.
TestInfo(False, initial_policy_set=[TEST_POLICY_2],
user_constrained_policy_set=[]),
# 3. default settings, but with initial-policy-mapping-inhibit set. The
# path should not validate successfully.
TestInfo(False, initial_policy_mapping_inhibit=True,
user_constrained_policy_set=[]),
],
'4.10.2': [ # Invalid Policy Mapping Test2
# 1. default settings. The path should not validate successfully.
TestInfo(False, user_constrained_policy_set=[]),
# 2. default settings, but with initial-policy-mapping-inhibit set. The
# path should not validate successfully.
TestInfo(False, initial_policy_mapping_inhibit=True,
user_constrained_policy_set=[]),
],
'4.10.3': [ # Valid Policy Mapping Test3
# 1. default settings, but with initial-policy-set = {NIST-test-policy-1}.
# The path should not validate successfully.
TestInfo(False, initial_policy_set=[TEST_POLICY_1],
user_constrained_policy_set=[]),
# 2. default settings, but with initial-policy-set = {NIST-test-policy-2}.
# The path should validate successfully.
TestInfo(True, initial_policy_set=[TEST_POLICY_2],
user_constrained_policy_set=[TEST_POLICY_2]),
],
'4.10.4': [ # Invalid Policy Mapping Test4
# Procedure: Validate Invalid Policy Mapping Test4 EE using the default
# settings or open and verify Signed Test Message 6.2.2.97 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set and the
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set (if the application can process the
# policyConstraints extension). If the application can process the
# policyConstraints extension, then the path should be rejected, otherwise
# it should validate successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.10.5': [ # Valid Policy Mapping Test5
# 1. default settings, but with initial-policy-set = {NIST-test-policy-1}.
# The path should validate successfully.
TestInfo(True, initial_policy_set=[TEST_POLICY_1],
user_constrained_policy_set=[TEST_POLICY_1]),
# 2. default settings, but with initial-policy-set = {NIST-test-policy-6}.
# The path should not validate successfully.
TestInfo(False, initial_policy_set=[TEST_POLICY_6],
user_constrained_policy_set=[]),
],
'4.10.6': [ # Valid Policy Mapping Test6
# 1. default settings, but with initial-policy-set = {NIST-test-policy-1}.
# The path should validate successfully.
TestInfo(True, initial_policy_set=[TEST_POLICY_1],
user_constrained_policy_set=[TEST_POLICY_1]),
# 2. default settings, but with initial-policy-set = {NIST-test-policy-6}.
# The path should not validate successfully.
TestInfo(False, initial_policy_set=[TEST_POLICY_6],
user_constrained_policy_set=[]),
],
'4.10.7': [ # Invalid Mapping From anyPolicy Test7
# Procedure: Validate Invalid Mapping From anyPolicy Test7 EE using the
# default settings or open and verify Signed Test Message 6.2.2.100 using
# the default settings.
#
# Expected Result: The path should not validate successfully since the
# intermediate certificate includes a policy mapping extension in which
# anyPolicy appears as an issuerDomainPolicy.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.10.8': [ # Invalid Mapping To anyPolicy Test8
# Procedure: Validate Invalid Mapping To anyPolicy Test8 EE using the
# default settings or open and verify Signed Test Message 6.2.2.101 using
# the default settings.
#
# Expected Result: The path should not validate successfully since the
# intermediate certificate includes a policy mapping extension in which
# anyPolicy appears as an subjectDomainPolicy.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.10.9': [ # Valid Policy Mapping Test9
# Procedure: Validate Valid Policy Mapping Test9 EE using the default
# settings or open and verify Signed Test Message 6.2.2.102 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be set (if
# the application can process the policyConstraints extension). If the
# initial-policy-set is any-policy or otherwise includes
# NIST-test-policy-1, then the user-constrained-policy-set will be
# {NIST-test-policy-1}. If not, the user-constrained-policy-set will be
# empty. If the initial-policy-set does not include NIST-test-policy-1 (and
# the application can process the policyConstraints extension), then the
# path should be rejected, otherwise it should validate successfully.
TestInfo(True),
],
'4.10.10': [ # Invalid Policy Mapping Test10
# Procedure: Validate Invalid Policy Mapping Test10 EE using the default
# settings or open and verify Signed Test Message 6.2.2.103 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set and the
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set (if the application can process the
# policyConstraints extension). If the application can process the
# policyConstraints extension, then the path should be rejected, otherwise
# it should validate successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.10.11': [ # Valid Policy Mapping Test11
# Procedure: Validate Valid Policy Mapping Test11 EE using the default
# settings or open and verify Signed Test Message 6.2.2.104 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be set (if
# the application can process the policyConstraints extension). If the
# initial-policy-set is any-policy or otherwise includes
# NIST-test-policy-1, then the user-constrained-policy-set will be
# {NIST-test-policy-1}. If not, the user-constrained-policy-set will be
# empty. If the initial-policy-set does not include NIST-test-policy-1 (and
# the application can process the policyConstraints extension), then the
# path should be rejected, otherwise it should validate successfully.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.10.12': [ # Valid Policy Mapping Test12
# 1. default settings, but with initial-policy-set = {NIST-test-policy-1}.
# The path should validate successfully and the application should display
# the user notice associated with NIST-test-policy-3 in the end entity
# certificate.
TestInfo(True, initial_policy_set=[TEST_POLICY_1],
user_constrained_policy_set=[TEST_POLICY_1]),
# 2. default settings, but with initial-policy-set = {NIST-test-policy-2}.
# The path should validate successfully and the application should display
# the user notice associated with anyPolicy in the end entity certificate.
TestInfo(True, initial_policy_set=[TEST_POLICY_2],
user_constrained_policy_set=[TEST_POLICY_2]),
],
'4.10.13': [ # Valid Policy Mapping Test13
# Procedure: Validate Valid Policy Mapping Test13 EE using the default
# settings or open and verify Signed Test Message 6.2.2.106 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be set (if
# the application can process the policyConstraints extension). If the
# initial-policy-set is any-policy or otherwise includes
# NIST-test-policy-1, then the user-constrained-policy-set will be
# {NIST-test-policy-1}. If not, the user-constrained-policy-set will be
# empty. If the initial-policy-set does not include NIST-test-policy-1 (and
# the application can process the policyConstraints extension), then the
# path should be rejected, otherwise it should validate successfully. If
# the path is accepted, the application should display the user notice
# associated with NIST-testpolicy-1 in the intermediate certificate.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.10.14': [ # Valid Policy Mapping Test14
# Procedure: Validate Valid Policy Mapping Test14 EE using the default
# settings or open and verify Signed Test Message 6.2.2.107 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be set (if
# the application can process the policyConstraints extension). If the
# initial-policy-set is any-policy or otherwise includes
# NIST-test-policy-1, then the user-constrained-policy-set will be
# {NIST-test-policy-1}. If not, the user-constrained-policy-set will be
# empty. If the initial-policy-set does not include NIST-test-policy-1 (and
# the application can process the policyConstraints extension), then the
# path should be rejected, otherwise it should validate successfully. If
# the path is accepted, the application should display the user notice
# associated with anyPolicy in the intermediate certificate
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.11.1': [ # Invalid inhibitPolicyMapping Test1
# Procedure: Validate Invalid inhibitPolicyMapping Test1 EE using the
# default settings or open and verify Signed Test Message 6.2.2.108 using
# the default settings.
#
# Expected Result: The authorities-constrained-policy-set and the
# user-constrained-policy-set will be empty. The explicit-policy-indicator
# will be set. The path should not validate successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.11.2': [ # Valid inhibitPolicyMapping Test2
# Procedure: Validate Valid inhibitPolicyMapping Test2 EE using the default
# settings or open and verify Signed Test Message 6.2.2.109 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be set. If
# the initial-policy-set is any-policy or otherwise includes
# NIST-test-policy-1, then the path should validate successfully.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.11.3': [ # Invalid inhibitPolicyMapping Test3
# Procedure: Validate Invalid inhibitPolicyMapping Test3 EE using the
# default settings or open and verify Signed Test Message 6.2.2.110 using
# the default settings.
#
# Expected Result: The authorities-constrained-policy-set and the
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set. The path should not validate
# successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.11.4': [ # Valid inhibitPolicyMapping Test4
# Procedure: Validate Valid inhibitPolicyMapping Test4 EE using the default
# settings or open and verify Signed Test Message 6.2.2.111 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-2} and the explicit-policy-indicator will be set. If
# the initial-policy-set is any-policy or otherwise includes
# NIST-test-policy-2, then the path should validate successfully.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_2]),
],
'4.11.5': [ # Invalid inhibitPolicyMapping Test5
# Procedure: Validate Invalid inhibitPolicyMapping Test5 EE using the
# default settings or open and verify Signed Test Message 6.2.2.112 using
# the default settings.
#
# Expected Result: The authorities-constrained-policy-set and the
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set. The path should not validate
# successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.11.6': [ # Invalid inhibitPolicyMapping Test6
# Procedure: Validate Invalid inhibitPolicyMapping Test6 EE using the
# default settings or open and verify Signed Test Message 6.2.2.113 using
# the default settings.
#
# Expected Result: The authorities-constrained-policy-set and the
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set. The path should not validate
# successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.11.7': [ # Valid Self-Issued inhibitPolicyMapping Test7
# Procedure: Validate Valid Self-Issued inhibitPolicyMapping Test7 EE using
# the default settings or open and verify Signed Test Message 6.2.2.114
# using the default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be set. If
# the initial-policy-set is any-policy or otherwise includes
# NIST-test-policy-1, then the path should validate successfully.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.11.8': [ # Invalid Self-Issued inhibitPolicyMapping Test8
# Procedure: Validate Invalid Self-Issued inhibitPolicyMapping Test8 EE
# using the default settings or open and verify Signed Test Message
# 6.2.2.115 using the default settings.
#
# Expected Result: The authorities-constrained-policy-set and
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set. The path should not validate
# successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.11.9': [ # Invalid Self-Issued inhibitPolicyMapping Test9
# Procedure: Validate Invalid Self-Issued inhibitPolicyMapping Test9 EE
# using the default settings or open and verify Signed Test Message
# 6.2.2.116 using the default settings.
#
# Expected Result: The authorities-constrained-policy-set and
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set. The path should not validate
# successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.11.10': [ # Invalid Self-Issued inhibitPolicyMapping Test10
# Procedure: Validate Invalid Self-Issued inhibitPolicyMapping Test10 EE
# using the default settings or open and verify Signed Test Message
# 6.2.2.117 using the default settings.
#
# Expected Result: The authorities-constrained-policy-set and
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set. The path should not validate
# successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.11.11': [ # Invalid Self-Issued inhibitPolicyMapping Test11
# Procedure: Validate Invalid Self-Issued inhibitPolicyMapping Test11 EE
# using the default settings or open and verify Signed Test Message
# 6.2.2.118 using the default settings.
#
# Expected Result: The authorities-constrained-policy-set and
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set. The path should not validate
# successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.12.1': [ # Invalid inhibitAnyPolicy Test1
# Procedure: Validate Invalid inhibitAnyPolicy Test1 EE using the default
# settings or open and verify Signed Test Message 6.2.2.119 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set and
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set (if the application can process the
# policyConstraints extension). If the application can process the
# policyConstraints extension, then the path should not validate
# successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.12.2': [ # Valid inhibitAnyPolicy Test2
# Procedure: Validate Valid inhibitAnyPolicy Test2 EE using the default
# settings or open and verify Signed Test Message 6.2.2.120 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be set (if
# the application can process the policyConstraints extension). If the
# initial-policy-set is any-policy or otherwise includes
# NIST-test-policy-1, then the user-constrained-policy-set will be
# {NIST-test-policy-1} and the path should validate successfully. If not,
# then the user-constrained-policy-set will be empty. If the
# user-constrained-policy-set is empty and the application can process the
# policyConstraints extension, then the path should not validate
# successfully.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.12.3': [ # inhibitAnyPolicy Test3
# 1. default settings. The path should validate successfully.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
# 2. default settings, but with initial-inhibit-any-policy set. The path
# should not validate successfully.
TestInfo(False, initial_inhibit_any_policy=True,
user_constrained_policy_set=[]),
],
'4.12.4': [ # Invalid inhibitAnyPolicy Test4
# Procedure: Validate Invalid inhibitAnyPolicy Test4 EE using the default
# settings or open and verify Signed Test Message 6.2.2.122 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set and
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set (if the application can process the
# policyConstraints extension). If the application can process the
# policyConstraints extension, then the path should not validate
# successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.12.5': [ # Invalid inhibitAnyPolicy Test5
# Procedure: Validate Invalid inhibitAnyPolicy Test5 EE using the default
# settings or open and verify Signed Test Message 6.2.2.123 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set and
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set (if the application can process the
# policyConstraints extension). If the application can process the
# policyConstraints extension, then the path should not validate
# successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.12.6': [ # Invalid inhibitAnyPolicy Test6
# Procedure: Validate Invalid inhibitAnyPolicy Test6 EE using the default
# settings or open and verify Signed Test Message 6.2.2.124 using the
# default settings.
#
# Expected Result: The authorities-constrained-policy-set and
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set (if the application can process the
# policyConstraints extension). If the application can process the
# policyConstraints extension, then the path should not validate
# successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.12.7': [ # Valid Self-Issued inhibitAnyPolicy Test7
# Procedure: Validate Valid Self-Issued inhibitAnyPolicy Test7 EE using the
# default settings or open and verify Signed Test Message 6.2.2.125 using
# the default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be set (if
# the application can process the policyConstraints extension). If the
# initial-policy-set is any-policy or otherwise includes
# NIST-test-policy-1, then the user-constrained-policy-set will be
# {NIST-test-policy-1} and the path should validate successfully. If not,
# then the user-constrained-policy-set will be empty. If the
# user-constrained-policy-set is empty and the application can process the
# policyConstraints extension, then the path should not validate
# successfully.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.12.8': [ # Invalid Self-Issued inhibitAnyPolicy Test8
# Procedure: Validate Invalid Self-Issued inhibitAnyPolicy Test8 EE using
# the default settings or open and verify Signed Test Message 6.2.2.126
# using the default settings.
#
# Expected Result: The authorities-constrained-policy-set and
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set (if the application can process the
# policyConstraints extension). If the application can process the
# policyConstraints extension, then the path should not validate
# successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
'4.12.9': [ # Valid Self-Issued inhibitAnyPolicy Test9
# Procedure: Validate Valid Self-Issued inhibitAnyPolicy Test9 EE using the
# default settings or open and verify Signed Test Message 6.2.2.127 using
# the default settings.
#
# Expected Result: The authorities-constrained-policy-set will be
# {NIST-test-policy-1} and the explicit-policy-indicator will be set (if
# the application can process the policyConstraints extension). If the
# initial-policy-set is any-policy or otherwise includes
# NIST-test-policy-1, then the user-constrained-policy-set will be
# {NIST-test-policy-1} and the path should validate successfully. If not,
# then the user-constrained-policy-set will be empty. If the
# user-constrained-policy-set is empty and the application can process the
# policyConstraints extension, then the path should not validate
# successfully.
TestInfo(True, user_constrained_policy_set=[TEST_POLICY_1]),
],
'4.12.10': [ # Invalid Self-Issued inhibitAnyPolicy Test10
# Procedure: Validate Invalid Self-Issued inhibitAnyPolicy Test10 EE using
# the default settings or open and verify Signed Test Message 6.2.2.128
# using the default settings.
#
# Expected Result: The authorities-constrained-policy-set and
# user-constrained-policy-set will be empty and the
# explicit-policy-indicator will be set (if the application can process the
# policyConstraints extension). If the application can process the
# policyConstraints extension, then the path should not validate
# successfully.
TestInfo(False, user_constrained_policy_set=[]),
],
}
def parse_test(lines, i, test_case_name, test_number, test_name,
sanitized_test_names, output):
# Start by doing a coarse level of parsing that separates out the lines for
# the main sections.
i, test_sections = parse_main_test_sections(lines, i)
certs, crls = parse_cert_path_lines(test_sections.cert_path_lines)
# Most tests have a formulaic specification: they use the default
# settings, and have one expectation. These are easily parsed and are handled
# programmatically. In contrast, many of the policies tests have a more
# complicated specification which involves multiple subtests having various
# settings, as well as expectations described in terms of supported
# extensions. Rather than try to handle all the nuanced language, these are
# handled manually via "overrides".
overrides = TEST_OVERRIDES.get(test_number, None)
if overrides is None:
# Verify that the test description doesn't include numbered subparts (those
# are not handled here).
if CUSTOM_SETTINGS_MATCHER.match(" ".join(test_sections.description_lines)):
sys.stderr.write('Unexpected custom settings for %s\n' % test_number)
sys.exit(1)
# Verify that the test is using only default settings.
if not USING_DEFAULT_SETTINGS_MATCHER.match(
" ".join(test_sections.procedure_lines)):
sys.stderr.write('Unexpected procedure for %s: %s\n' %
(test_number, " ".join(test_section.procedure_lines)))
sys.exit(1)
# Check whether expected result is validation success or failure.
result_match = TEST_RESULT_MATCHER.match(
test_sections.expected_result_lines[0])
if not result_match:
sys.stderr.write('Unknown expectation for %s:\n%s\n' % (
test_number, " ".join(test_sections.expected_result_lines)))
sys.exit(1)
# Initializes with default settings.
info = TestInfo(result_match.group(1) == 'should validate')
# Special case the 4.9 test failures (require explicit policy) to set
# user_constrained_policy_set to empty. This is only done for the 4.9
# tests, because the other policy tests are special cased as overrides and
# hence set this manually on a per-test basis.
#
# user_constrained_policy_set enumerates the subset of the initial policy
# set (anyPolicy in the default case) that were valid for the path. For
# non-policy tests the expectation for user_constrained_policy_set is
# [TEST_POLICY_1] since each policy asserts that. However for these tests,
# the expectation is an empty user_constrained_policy_set since there was
# no valid policy for the path (in fact, that is why the path validation is
# expected to fail).
if test_number.startswith('4.9.') and not info.should_validate:
info.user_constrained_policy_set = []
output_test(test_case_name, test_number, test_name, None, info, certs,
crls, sanitized_test_names, output)
else:
# The overrides may have a series of inputs (settings) and outputs
# (success/failure) for this test. Output each as a separate test case.
for subpart_i in range(len(overrides)):
info = overrides[subpart_i]
# If the test has only 1 subpart, don't number it.
subpart_number = subpart_i + 1 if len(overrides) > 1 else None
output_test(test_case_name, test_number, test_name, subpart_number, info,
certs, crls, sanitized_test_names, output)
return i
def main():
pkits_pdf_path, output_path = sys.argv[1:]
pkits_txt_file = tempfile.NamedTemporaryFile()
subprocess.check_call(['pdftotext', '-layout', '-nopgbrk', '-eol', 'unix',
pkits_pdf_path, pkits_txt_file.name])
test_descriptions = pkits_txt_file.read()
# Extract section 4 of the text, which is the part that contains the tests.
test_descriptions = test_descriptions.split(
'4 Certification Path Validation Tests')[-1]
test_descriptions = test_descriptions.split(
'5 Relationship to Previous Test Suite', 1)[0]
output = open(output_path, 'w')
output.write('// Autogenerated by %s, do not edit\n\n' % sys.argv[0])
output.write("""
// This file intentionally does not have header guards, it's intended to
// be inlined in another header file. The following line silences a
// presubmit warning that would otherwise be triggered by this:
// no-include-guard-because-multiply-included
// NOLINT(build/header_guard)\n\n""")
output.write('// Hack to allow disabling type parameterized test cases.\n'
'// See https://github.com/google/googletest/issues/389\n')
output.write('#define WRAPPED_TYPED_TEST_P(CaseName, TestName) '
'TYPED_TEST_P(CaseName, TestName)\n')
output.write('#define WRAPPED_REGISTER_TYPED_TEST_SUITE_P(CaseName, ...) '
'REGISTER_TYPED_TEST_SUITE_P(CaseName, __VA_ARGS__)\n\n')
test_case_name = None
sanitized_test_names = []
lines = test_descriptions.splitlines()
i = 0
while i < len(lines):
section_match = SECTION_MATCHER.match(lines[i])
match = TEST_MATCHER.match(lines[i])
i += 1
if section_match:
if test_case_name:
finalize_test_case(test_case_name, sanitized_test_names, output)
sanitized_test_names = []
test_case_name = 'PkitsTest%02d%s' % (
int(section_match.group(1).split('.')[-1]),
sanitize_name(section_match.group(2)))
output.write('\ntemplate <typename PkitsTestDelegate>\n')
output.write('class %s : public PkitsTest<PkitsTestDelegate> {};\n' %
test_case_name)
output.write('TYPED_TEST_SUITE_P(%s);\n' % test_case_name)
if match:
test_number = match.group(1)
test_name = match.group(2)
if not test_case_name:
output.write('// Skipped %s %s\n' % (test_number, test_name))
continue
i, parse_test(lines, i, test_case_name, test_number,
test_name, sanitized_test_names, output)
if test_case_name:
finalize_test_case(test_case_name, sanitized_test_names, output)
if __name__ == '__main__':
main()
| endlessm/chromium-browser | net/third_party/nist-pkits/generate_tests.py | Python | bsd-3-clause | 55,596 |
import OOMP
newPart = OOMP.oompItem(9233)
newPart.addTag("oompType", "RESE")
newPart.addTag("oompSize", "0402")
newPart.addTag("oompColor", "X")
newPart.addTag("oompDesc", "O123")
newPart.addTag("oompIndex", "67")
OOMP.parts.append(newPart)
| oomlout/oomlout-OOMP | old/OOMPpart_RESE_0402_X_O123_67.py | Python | cc0-1.0 | 243 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import bawebauth.apps.bawebauth.fields
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Device',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, verbose_name='name')),
('ident', models.CharField(max_length=40, verbose_name='ident')),
('crdate', models.DateTimeField(auto_now_add=True, verbose_name='date created')),
('tstamp', models.DateTimeField(auto_now=True, verbose_name='date edited')),
('active', models.BooleanField(default=False, verbose_name='active')),
('enabled', models.BooleanField(default=False, verbose_name='enabled')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Usage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('send', bawebauth.apps.bawebauth.fields.PositiveBigIntegerField(verbose_name='bytes send')),
('received', bawebauth.apps.bawebauth.fields.PositiveBigIntegerField(verbose_name='bytes received')),
('crdate', models.DateTimeField(auto_now_add=True, verbose_name='date created')),
('device', models.ForeignKey(to='bawebauth.Device')),
],
options={
},
bases=(models.Model,),
),
]
| mback2k/django-bawebauth | bawebauth/apps/bawebauth/migrations/0001_initial.py | Python | mit | 1,925 |
import string
import datetime
import time
from functions import *
from cloudant.client import Cloudant
from cloudant.error import CloudantException
from cloudant.result import Result, ResultByKey
def getDataFor5min(load):
client = Cloudant("39a4348e-3ce1-40cd-b016-1f85569d409e-bluemix",
"48e26645f504209f85b4c44d74a4cb14bc0d059a22b361534b78f406a513f8ff",
url="https://39a4348e-3ce1-40cd-b016-1f85569d409e-bluemix:48e26645f504209f85b4c44d74a4cb14bc0d059a22b361534b78f406a513f8ff@39a4348e-3ce1-40cd-b016-1f85569d409e-bluemix.cloudant.com")
client.connect()
end_point = '{0}/{1}'.format("https://39a4348e-3ce1-40cd-b016-1f85569d409e-bluemix:48e26645f504209f85b4c44d74a4cb14bc0d059a22b361534b78f406a513f8ff@39a4348e-3ce1-40cd-b016-1f85569d409e-bluemix.cloudant.com", "load" + load+"/_all_docs?")
params = {'include_docs': 'true'}
response = client.r_session.get(end_point,params=params)
i=1
table = 0
while (i<2):
#make a function that adds all the data for past min
#each db entry is an average of 3 seconds
# there are 60/3 entries in a min
if i==1:
x=1
else:
x= (i-1)*60
total_power_for_one_min = 0
while (x< 60*i):
total_power_for_one_min += response.json()['rows'][-x]['doc']['Power']
x += 1
#since we are getting the average for 3 seconds we are only getting
#20 seconds worth of total power so multiply by 3 and you get one minute
table= total_power_for_one_min
# table[i] = (response.json
# table.insert(i,response.json()['rows'][i]['doc']['current'])
i = i+1
client.disconnect()
return table
while(True):
client = Cloudant("39a4348e-3ce1-40cd-b016-1f85569d409e-bluemix",
"48e26645f504209f85b4c44d74a4cb14bc0d059a22b361534b78f406a513f8ff",
url="https://39a4348e-3ce1-40cd-b016-1f85569d409e-bluemix:48e26645f504209f85b4c44d74a4cb14bc0d059a22b361534b78f406a513f8ff@39a4348e-3ce1-40cd-b016-1f85569d409e-bluemix.cloudant.com")
client.connect()
l2 =getDataFor5min("2")
l3 = getDataFor5min("3")
l4 = getDataFor5min("4")
cur_time = datetime.datetime.now()
awesome_cur_time = str(cur_time)
print("working")
myDatabase_load_2 = create_db("load2_min")
myDatabase_load_3 = create_db("load3_min")
myDatabase_load_4 = create_db("load4_min")
sampleData_load_2 = [
[l2, awesome_cur_time]
]
sampleData_load_3 = [
[l3, awesome_cur_time]
]
sampleData_load_4 = [
[l4, awesome_cur_time]
]
# Create docummments using the sample data.
# Go through each row in the array
for document in sampleData_load_2:
# Retrieve the fields in each row.
number_load_2 = document[0]
time_load_2 = document[1]
for document in sampleData_load_3:
# Retrieve the fields in each row.
number_load_3 = document[0]
time_load_3 = document[1]
for document in sampleData_load_4:
# Retrieve the fields in each row.
number_load_4 = document[0]
time_load_4 = document[1]
# Create a JSON document that represents
# all the data in the row.
jsonDocument_load_2 = {
"data": number_load_2,
"_id" : time_load_2
}
jsonDocument_load_3 = {
"data": number_load_3,
"_id" : time_load_3
}
jsonDocument_load_4 = {
"data": number_load_4,
"_id" : time_load_4
}
# Create a document using the Database API.
newDocument_load_2 = myDatabase_load_2.create_document(jsonDocument_load_2)
newDocument_load_3 = myDatabase_load_3.create_document(jsonDocument_load_3)
newDocument_load_4 = myDatabase_load_4.create_document(jsonDocument_load_4)
#increasing include_docs
# dat_of_stat = MofoData()
# status_circuit = dat_of_stat.getStatusCircuit()
# print(status_circuit)
# if status_circuit == '1':
# turnOn()
# else:
# turnOff()
client.disconnect()
| maurofm1992/smartpanel | conversions.py | Python | apache-2.0 | 4,085 |
import os
import django
from django.core.checks import run_checks
from django.core.exceptions import AppRegistryNotReady
from celery import Celery
from manage import init_hq_python_path, run_patches
init_hq_python_path()
run_patches()
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
app = Celery()
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
django.setup()
try:
run_checks()
except AppRegistryNotReady:
pass
| dimagi/commcare-hq | corehq/celery.py | Python | bsd-3-clause | 483 |
import pandas as pd
import numpy as np
import json
def get_data(filename):
with open(filename) as data_file:
# print(ps.read_json(content, orient='values'))
data = json.load(data_file)
dates = pd.to_datetime(data['dates'], format="%Y-%m-%d")
dataset = np.array(data['dataset'], dtype=np.float16)
# print(dates.shape)
# print(dataset[0][0])
return dates, dataset
| ansteh/quandl-url-service | labs/resources.py | Python | mit | 424 |
# -*- coding: utf-8 -*-
"""Test suite for the TG app's models"""
from nose.tools import eq_
from devcon import model
from devcon.tests.models import ModelTest
class TestGroup(ModelTest):
"""Unit test case for the ``Group`` model."""
klass = model.Group
attrs = dict(
group_name = u"test_group",
display_name = u"Test Group"
)
class TestUser(ModelTest):
"""Unit test case for the ``User`` model."""
klass = model.User
attrs = dict(
user_name = u"ignucius",
email_address = u"[email protected]"
)
def test_obj_creation_username(self):
"""The obj constructor must set the user name right"""
eq_(self.obj.user_name, u"ignucius")
def test_obj_creation_email(self):
"""The obj constructor must set the email right"""
eq_(self.obj.email_address, u"[email protected]")
def test_no_permissions_by_default(self):
"""User objects should have no permission by default."""
eq_(len(self.obj.permissions), 0)
def test_getting_by_email(self):
"""Users should be fetcheable by their email addresses"""
him = model.User.by_email_address(u"[email protected]")
eq_(him, self.obj)
class TestPermission(ModelTest):
"""Unit test case for the ``Permission`` model."""
klass = model.Permission
attrs = dict(
permission_name = u"test_permission",
description = u"This is a test Description"
)
| eriknyk/devcon | devcon/tests/models/test_auth.py | Python | lgpl-2.1 | 1,492 |
# -*- encoding: utf-8 -*-
from __future__ import print_function, unicode_literals, division, absolute_import
from enocean.communicators import Communicator
from enocean.protocol.packet import Packet
from enocean.protocol.constants import RORG, DB6
from enocean.decorators import timing
@timing(rounds=100, limit=750)
def test_ute_in():
communicator = Communicator()
communicator.base_id = [0xDE, 0xAD, 0xBE, 0xEF]
status, buf, packet = Packet.parse_msg(
bytearray([
0x55,
0x00, 0x0D, 0x07, 0x01,
0xFD,
0xD4, 0xA0, 0xFF, 0x3E, 0x00, 0x01, 0x01, 0xD2, 0x01, 0x94, 0xE3, 0xB9, 0x00,
0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0x40, 0x00,
0xAB
]),
communicator=communicator
)
assert packet.sender_hex == '01:94:E3:B9'
assert packet.unidirectional is False
assert packet.bidirectional is True
assert packet.response_expected is True
assert packet.number_of_channels == 0xFF
assert packet.rorg_manufacturer == 0x3E
assert packet.rorg_of_eep == RORG.VLD
assert packet.rorg_func == 0x01
assert packet.rorg_type == 0x01
assert packet.teach_in is True
assert packet.delete is False
assert packet.learn is True
assert packet.contains_eep is True
response_packet = packet._create_response_packet(communicator.base_id)
assert response_packet.sender_hex == 'DE:AD:BE:EF'
assert response_packet.destination_hex == '01:94:E3:B9'
assert response_packet._bit_data[DB6.BIT_5:DB6.BIT_3] == [False, True]
assert response_packet.data[2:7] == packet.data[2:7]
| Ethal/enocean | enocean/protocol/tests/test_teachin.py | Python | mit | 1,614 |
"""This module does the argument and config parsing, and contains the main
function (that is called when calling pep8radius from shell)."""
from __future__ import print_function
import os
import sys
try:
from configparser import ConfigParser as SafeConfigParser, NoSectionError
except ImportError: # py2, pragma: no cover
from ConfigParser import SafeConfigParser, NoSectionError
from pep8radius.radius import Radius, RadiusFromDiff
from pep8radius.shell import CalledProcessError # with 2.6 compat
__version__ = version = '0.9.2'
DEFAULT_IGNORE = 'E24'
DEFAULT_INDENT_SIZE = 4
if sys.platform == 'win32': # pragma: no cover
DEFAULT_CONFIG = os.path.expanduser(r'~\.pep8')
else:
DEFAULT_CONFIG = os.path.join(os.getenv('XDG_CONFIG_HOME') or
os.path.expanduser('~/.config'), 'pep8')
PROJECT_CONFIG = ('setup.cfg', 'tox.ini', '.pep8')
def main(args=None, vc=None, cwd=None, apply_config=False):
"""PEP8 clean only the parts of the files touched since the last commit, a
previous commit or branch."""
import signal
try: # pragma: no cover
# Exit on broken pipe.
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
except AttributeError: # pragma: no cover
# SIGPIPE is not available on Windows.
pass
try:
if args is None:
args = []
try:
# Note: argparse on py 2.6 you can't pass a set
# TODO neater solution for this!
args_set = set(args)
except TypeError:
args_set = args # args is a Namespace
if '--version' in args_set or getattr(args_set, 'version', 0):
print(version)
return 0
if '--list-fixes' in args_set or getattr(args_set, 'list_fixes', 0):
from autopep8 import supported_fixes
for code, description in sorted(supported_fixes()):
print('{code} - {description}'.format(
code=code, description=description))
return 0
try:
try:
args = parse_args(args, apply_config=apply_config)
except TypeError:
pass # args is already a Namespace (testing)
if args.from_diff: # pragma: no cover
r = Radius.from_diff(args.from_diff.read(),
options=args, cwd=cwd)
else:
r = Radius(rev=args.rev, options=args, vc=vc, cwd=cwd)
except NotImplementedError as e: # pragma: no cover
print(e)
return 1
except CalledProcessError as c: # pragma: no cover
# cut off usage and exit
output = c.output.splitlines()[0]
print(output)
return c.returncode
any_changes = r.fix()
if any_changes and args.error_status:
return 1
return 0
except KeyboardInterrupt: # pragma: no cover
return 1
def create_parser():
"""Create the parser for the pep8radius CLI."""
from argparse import ArgumentParser, FileType
description = ("PEP8 clean only the parts of the files which you have "
"touched since the last commit, a previous commit or "
"(the merge-base of) a branch.")
epilog = ("Run before you commit, against a previous commit or "
"branch before merging.")
parser = ArgumentParser(description=description,
epilog=epilog,
prog='pep8radius')
parser.add_argument('rev',
help='commit or name of branch to compare against',
nargs='?')
parser.add_argument('--version',
help='print version number and exit',
action='store_true')
parser.add_argument('-d', '--diff', action='store_true', dest='diff',
help='print the diff of fixed source vs original')
parser.add_argument('--error-status', action='store_true',
dest='error_status',
help="return a shell status code of 1 if there are"
" any fixes")
parser.add_argument('-i', '--in-place', action='store_true',
help="make the fixes in place; modify the files")
parser.add_argument('--no-color', action='store_true',
help='do not print diffs in color '
'(default is to use color)')
parser.add_argument('-v', '--verbose', action='count', dest='verbose',
default=0,
help='print verbose messages; '
'multiple -v result in more verbose messages '
'(one less -v is passed to autopep8)')
parser.add_argument('--from-diff', type=FileType('r'), metavar='DIFF',
help="Experimental: rather than calling out to version"
" control, just pass in a diff; "
"the modified lines will be fixed")
ap = parser.add_argument_group('pep8', 'Pep8 options to pass to autopep8.')
ap.add_argument('-p', '--pep8-passes', metavar='n',
default=-1, type=int,
help='maximum number of additional pep8 passes '
'(default: infinite)')
ap.add_argument('-a', '--aggressive', action='count', default=0,
help='enable non-whitespace changes; '
'multiple -a result in more aggressive changes')
ap.add_argument('--experimental', action='store_true',
help='enable experimental fixes')
ap.add_argument('--exclude', metavar='globs',
help='exclude file/directory names that match these '
'comma-separated globs')
ap.add_argument('--list-fixes', action='store_true',
help='list codes for fixes and exit; '
'used by --ignore and --select')
ap.add_argument('--ignore', metavar='errors', default='',
help='do not fix these errors/warnings '
'(default: {0})'.format(DEFAULT_IGNORE))
ap.add_argument('--select', metavar='errors', default='',
help='fix only these errors/warnings (e.g. E4,W)')
ap.add_argument('--max-line-length', metavar='n', default=79, type=int,
help='set maximum allowed line length '
'(default: %(default)s)')
ap.add_argument('--indent-size', default=DEFAULT_INDENT_SIZE,
type=int, metavar='n',
help='number of spaces per indent level '
'(default %(default)s)')
df = parser.add_argument_group('docformatter',
'Fix docstrings for PEP257.')
df.add_argument('-f', '--docformatter', action='store_true',
help='Use docformatter')
df.add_argument('--no-blank', dest='post_description_blank',
action='store_false',
help='Do not add blank line after description')
df.add_argument('--pre-summary-newline',
action='store_true',
help='add a newline before the summary of a '
'multi-line docstring')
df.add_argument('--force-wrap', action='store_true',
help='force descriptions to be wrapped even if it may '
'result in a mess')
cg = parser.add_argument_group('config',
'Change default options based on global '
'or local (project) config files.')
cg.add_argument('--global-config',
default=DEFAULT_CONFIG,
metavar='filename',
help='path to global pep8 config file; ' +
" if this file does not exist then this is ignored" +
'(default: %s)' % DEFAULT_CONFIG)
cg.add_argument('--ignore-local-config', action='store_true',
help="don't look for and apply local config files; "
'if not passed, defaults are updated with any '
"config files in the project's root directory")
yp = parser.add_argument_group('yapf',
'Options for yapf, alternative to autopep8. '
'Currently any other options are ignored.')
yp.add_argument('-y', '--yapf', action='store_true',
help='Use yapf rather than autopep8. '
'This ignores other arguments outside of this group.')
yp.add_argument('--style', metavar='', default='pep8',
help='style either pep8, google, name of file with style'
'settings, or a dict')
return parser
def parse_args(arguments=None, root=None, apply_config=False):
"""Parse the arguments from the CLI.
If apply_config then we first look up and apply configs using
apply_config_defaults.
"""
if arguments is None:
arguments = []
parser = create_parser()
args = parser.parse_args(arguments)
if apply_config:
parser = apply_config_defaults(parser, args, root=root)
args = parser.parse_args(arguments)
# sanity check args (from autopep8)
if args.max_line_length <= 0: # pragma: no cover
parser.error('--max-line-length must be greater than 0')
if args.select:
args.select = _split_comma_separated(args.select)
if args.ignore:
args.ignore = _split_comma_separated(args.ignore)
elif not args.select and args.aggressive:
# Enable everything by default if aggressive.
args.select = ['E', 'W']
else:
args.ignore = _split_comma_separated(DEFAULT_IGNORE)
if args.exclude:
args.exclude = _split_comma_separated(args.exclude)
else:
args.exclude = []
return args
def apply_config_defaults(parser, args, root):
"""Update the parser's defaults from either the arguments' config_arg or
the config files given in config_files(root)."""
if root is None:
try:
from pep8radius.vcs import VersionControl
root = VersionControl.which().root_dir()
except NotImplementedError:
pass # don't update local, could be using as module
config = SafeConfigParser()
config.read(args.global_config)
if root and not args.ignore_local_config:
config.read(local_config_files(root))
try:
defaults = dict((k.lstrip('-').replace('-', '_'), v)
for k, v in config.items("pep8"))
parser.set_defaults(**defaults)
except NoSectionError:
pass # just do nothing, potentially this could raise ?
return parser
def local_config_files(root):
"""Returns a list of (possible) config files in the project root
directory."""
return [os.path.join(root, c) for c in PROJECT_CONFIG]
def _split_comma_separated(string):
"""Return a set of strings."""
return set(filter(None, string.split(',')))
def _main(args=None, vc=None, cwd=None): # pragma: no cover
if args is None:
args = sys.argv[1:]
return sys.exit(main(args=args, vc=vc, cwd=cwd, apply_config=True))
if __name__ == "__main__": # pragma: no cover
_main()
| hayd/pep8radius | pep8radius/main.py | Python | mit | 11,438 |
# Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
# project
import importlib
from typing import Dict
from abc import (
ABCMeta, abstractmethod
)
from kiwi.defaults import Defaults
from kiwi.xml_state import XMLState
from kiwi.exceptions import KiwiRequestedTypeError
class ImageBuilder(metaclass=ABCMeta):
"""
Image builder factory
"""
@abstractmethod
def __init__(self) -> None:
return None # pragma: no cover
@staticmethod
def new(
xml_state: XMLState, target_dir: str,
root_dir: str, custom_args: Dict = None
):
image_type = xml_state.get_build_type_name()
if image_type in Defaults.get_filesystem_image_types():
name_token = ('filesystem', 'FileSystemBuilder')
elif image_type in Defaults.get_disk_image_types():
name_token = ('disk', 'DiskBuilder')
elif image_type in Defaults.get_live_image_types():
name_token = ('live', 'LiveImageBuilder')
elif image_type in Defaults.get_kis_image_types():
name_token = ('kis', 'KisBuilder')
elif image_type in Defaults.get_archive_image_types():
name_token = ('archive', 'ArchiveBuilder')
elif image_type in Defaults.get_container_image_types():
name_token = ('container', 'ContainerBuilder')
else:
name_token = ('None', 'None')
try:
(builder_namespace, builder_name) = name_token
builder = importlib.import_module(
'kiwi.builder.{0}'.format(builder_namespace)
)
return builder.__dict__[builder_name](
xml_state, target_dir, root_dir, custom_args
)
except Exception:
raise KiwiRequestedTypeError(
f'Requested image type {image_type} not supported'
)
| dirkmueller/kiwi | kiwi/builder/__init__.py | Python | gpl-3.0 | 2,508 |
"""
For more details, see the class documentation.
"""
class WikiInfoDTO:
"""
This class represents the data transfer object for a Wikipedia article.
"""
def __init__(self):
"""
Constructor to initialize this object.
"""
self.location = None
self.title = ""
self.link = ""
self.info = ""
self.pageid = ""
def json_dict(self):
"""
Returns a python dictionary to be used for building JSON response.
"""
return_dict = dict()
return_dict['title'] = self.title
return_dict['link'] = self.link
return_dict['info'] = self.info
return_dict['pageid'] = self.pageid
return_dict['location'] = self.location.json_dict()
return return_dict
| agarwalt/WikiNearby | map_annotate_app/dto/WikiInfoDTO.py | Python | mit | 796 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2004-2011
# Pexego Sistemas Informáticos. (http://pexego.es) All Rights Reserved
#
# Migración OpenERP 7.0. Top Consultant Software Creations S.L. (http://www.topconsultant.es/) 2013
# Ignacio Martínez y Miguel López.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import threading
import time
from openerp import netsvc
import re
from openerp.osv import orm
vat_regex = re.compile(u"[a-zA-Z]{2}.*", re.UNICODE | re.X)
class l10n_es_aeat_mod349_calculate_records(orm.TransientModel):
_name = "l10n.es.aeat.mod349.calculate_records"
_description = u"AEAT Model 349 Wizard - Calculate Records"
def _formatPartnerVAT(self, cr, uid, partner_vat=None, country_id=None,
context=None):
"""
Formats VAT to match XXVATNUMBER (where XX is country code)
"""
if partner_vat and \
not vat_regex.match(partner_vat) and country_id:
partner_vat = self.pool.get('res.country').\
browse(cr, uid, country_id, context=context).code + partner_vat
return partner_vat
def _create_partner_records_for_report(self, cr, uid, ids, report_id,\
partner_obj, operation_key,
context=None):
"""creates partner records in 349"""
invoices_ids = self.pool.get('account.invoice').browse(cr, uid, ids,
context=context)
obj = self.pool.get('l10n.es.aeat.mod349.partner_record')
partner_country = partner_obj.country_id
invoice_created = obj.create(cr, uid, {
'report_id': report_id,
'partner_id': partner_obj.id,
'partner_vat':
self._formatPartnerVAT(cr,
uid,
partner_vat=partner_obj.vat,
country_id=partner_country.id),
'operation_key': operation_key,
'country_id': partner_country.id or False,
'total_operation_amount': sum([invoice.cc_amount_untaxed for
invoice in invoices_ids if
invoice.type not in
('in_refund', 'out_refund')]) -
sum([invoice.cc_amount_untaxed
for invoice in invoices_ids
if invoice.type in
('in_refund',
'out_refund')])
})
### Creation of partner detail lines
for invoice in invoices_ids:
self.pool.get('l10n.es.aeat.mod349.partner_record_detail').\
create(cr, uid, {
'partner_record_id': invoice_created,
'invoice_id': invoice.id,
'amount_untaxed': invoice.cc_amount_untaxed
})
return invoice_created
def _create_refund_records_for_report(self, cr, uid, ids, report_id,\
partner_obj, operation_key,
context=None):
"""creates restitution records in 349"""
refunds = self.pool.get('account.invoice').browse(cr, uid, ids)
refundpol = self.pool.get('l10n.es.aeat.mod349.partner_record_detail')
refund_pool = self.pool.get('l10n.es.aeat.mod349.partner_record')
obj = self.pool.get('l10n.es.aeat.mod349.partner_refund')
obj_detail = self.pool.get('l10n.es.aeat.mod349.partner_refund_detail')
partner_country = [address.country_id.id for address in
partner_obj.address if address.type ==
'invoice' and address.country_id]
if not len(partner_country):
partner_country = [address.country_id.id for
address in partner_obj.address
if address.type == 'default' and
address.country_id]
record = {}
for invoice in refunds:
#goes around all refunded invoices
for origin_inv in invoice.origin_invoices_ids:
if origin_inv.state in ['open', 'paid']:
#searches for details of another 349s to restor
refund_detail = refundpol.search(cr, uid,
[('invoice_id', '=',
origin_inv.id)])
valid_refund_details = refund_detail
for detail in refundpol.browse(cr, uid, refund_detail):
if not detail.partner_record_id.report_id:
valid_refund_details.remove(detail.id)
if valid_refund_details:
rd = refundpol.browse(cr, uid, valid_refund_details[0])
#creates a dictionary key with partner_record id to
#after recover it
key = str(rd.partner_record_id.id)
#separates restitutive invoices and nomal, refund
#invoices of correct period
if record.get(key):
record[key].append(invoice)
#NOTE: Two or more refunded invoices declared in
#different 349s isn't implemented
break
else:
record[key] = [invoice]
#NOTE: Two or more refunded invoices declared in
#different 349s isn't implemented
break
#recorremos nuestro diccionario y vamos creando registros
for line in record:
partner_rec = refund_pool.browse(cr, uid, int(line))
record_created = obj.create(cr, uid, {
'report_id': report_id,
'partner_id': partner_obj.id,
'partner_vat':
self._formatPartnerVAT(cr,
uid,
partner_vat=partner_obj.vat,
country_id=partner_country,
context=context),
'operation_key': operation_key,
'country_id': partner_country and partner_country[0] or False,
'total_operation_amount': partner_rec.total_operation_amount\
- sum([x.cc_amount_untaxed for x in record[line]]),
'total_origin_amount': partner_rec.total_operation_amount,
'period_selection': partner_rec.report_id.period_selection,
'month_selection': partner_rec.report_id.month_selection,
'fiscalyear_id': partner_rec.report_id.fiscalyear_id.id
}, context=context)
### Creation of partner detail lines
for invoice in record[line]:
obj_detail.create(cr, uid, {
'refund_id': record_created,
'invoice_id': invoice.id,
'amount_untaxed': invoice.cc_amount_untaxed
}, context=context)
return True
def _wkf_calculate_records(self, cr, uid, ids, context=None):
"""moves forward workflow"""
if context is None:
context = {}
self._calculate_records(cr, uid, ids, context, recalculate=False)
##
## Advance current report status in workflow
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'l10n.es.aeat.mod349.report',
ids and ids[0], 'calculate', cr)
def _calculate_records(self, cr, uid, ids, context=None, recalculate=True):
"""computes the records in report"""
if context is None:
context = {}
try:
partner_obj = self.pool.get('res.partner')
invoice_obj = self.pool.get('account.invoice')
report_obj = self.pool.get('l10n.es.aeat.mod349.report')
partner_record_obj =\
self.pool.get('l10n.es.aeat.mod349.partner_record')
partner_refund_obj =\
self.pool.get('l10n.es.aeat.mod349.partner_refund')
##
## Remove previous partner records and parter refunds in 349 report
reports = report_obj.browse(cr, uid, ids and
ids[0], context=context)
report_obj.write(cr, uid, ids, {
'state': 'calculating',
'calculation_date': time.strftime('%Y-%m-%d %H:%M:%S')
}, context=context)
##
## Remove previous partner records and partner refunds in report
##
partner_record_obj.unlink(cr, uid, [record.id for record in
reports.partner_record_ids],
context=context)
partner_refund_obj.unlink(cr, uid, [refund.id for refund in
reports.partner_refund_ids],
context=context)
# Returns all partners
partner_ids = partner_obj.search(cr, uid, [], context=context)
for partner in partner_obj.browse(cr, uid, partner_ids,
context=context):
for operation_key in ['E', 'A', 'T', 'S', 'I', 'M', 'H']:
##
## Invoices
invoice_ids = invoice_obj._get_invoices_by_type(cr, uid,
partner.id,
operation_key=operation_key,
period_selection=reports.period_selection,
fiscalyear_id=reports.fiscalyear_id.id,
period_id=[x.id for x in reports.period_ids],
month=reports.month_selection, context=context)
# Separates normal invoices of restitutions
invoice_ids, refunds_ids = invoice_obj.\
clean_refund_invoices(cr, uid, invoice_ids, partner.id,
fiscalyear_id=reports.fiscalyear_id.id,
period_id=[x.id for x in reports.period_ids],
month=reports.month_selection,
period_selection=reports.period_selection, context=context)
##
## Partner records and partner records detail lines
##
if invoice_ids:
self._create_partner_records_for_report(cr, uid,
invoice_ids,
reports.id,
partner,
operation_key,
context=context)
##
## Refunds records and refunds detail lines
##
if refunds_ids:
self._create_refund_records_for_report(cr, uid,
refunds_ids,
reports.id,
partner,
operation_key,
context=context)
if recalculate:
report_obj.write(cr, uid, ids, {
'state': 'calculated',
'calculation_date': time.strftime('%Y-%m-%d %H:%M:%S')
}, context=context)
except Exception, ex:
raise
return {}
def calculation_threading(self, cr, uid, ids, context=None):
"""manages threading"""
if context is None:
context = {}
threaded_calculation = \
threading.Thread(target=self._calculate_records,
args=(cr, uid, ids, context))
threaded_calculation.start()
return {}
| jmesteve/saas3 | openerp/addons_extra/l10n_es_aeat_mod349/wizard/calculate_mod349_records.py | Python | agpl-3.0 | 13,600 |
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from selenium.common.exceptions import (
WebDriverException,
NoSuchElementException)
def test_should_find_element_by_xpath(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_name("form2")
child = element.find_element_by_xpath("select")
assert child.get_attribute("id") == "2"
def test_should_not_find_element_by_xpath(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_name("form2")
with pytest.raises(NoSuchElementException):
element.find_element_by_xpath("select/x")
def test_finding_dot_slash_elements_on_element_by_xpath_should_find_not_top_level_elements(driver, pages):
pages.load("simpleTest.html")
parent = driver.find_element_by_id("multiline")
children = parent.find_elements_by_xpath("./p")
assert 1 == len(children)
assert "A div containing" == children[0].text
def test_should_find_elements_by_xpath(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_name("form2")
children = element.find_elements_by_xpath("select/option")
assert len(children) == 8
assert children[0].text == "One"
assert children[1].text == "Two"
def test_should_not_find_elements_by_xpath(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_name("form2")
children = element.find_elements_by_xpath("select/x")
assert len(children) == 0
def test_finding_elements_on_element_by_xpath_should_find_top_level_elements(driver, pages):
pages.load("simpleTest.html")
parent = driver.find_element_by_id("multiline")
all_para_elements = driver.find_elements_by_xpath("//p")
children = parent.find_elements_by_xpath("//p")
assert len(all_para_elements) == len(children)
def test_should_find_element_by_name(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_name("form2")
child = element.find_element_by_name("selectomatic")
assert child.get_attribute("id") == "2"
def test_should_find_elements_by_name(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_name("form2")
children = element.find_elements_by_name("selectomatic")
assert len(children) == 2
def test_should_find_element_by_id(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_name("form2")
child = element.find_element_by_id("2")
assert child.get_attribute("name") == "selectomatic"
def test_should_find_elements_by_id(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_name("form2")
child = element.find_elements_by_id("2")
assert len(child) == 2
def test_should_find_element_by_id_when_multiple_matches_exist(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_id("test_id_div")
child = element.find_element_by_id("test_id")
assert child.text == "inside"
def test_should_find_element_by_id_when_no_match_in_context(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_id("test_id_div")
with pytest.raises(NoSuchElementException):
element.find_element_by_id("test_id_out")
def test_should_find_element_by_link_text(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_name("div1")
child = element.find_element_by_link_text("hello world")
assert child.get_attribute("name") == "link1"
def test_should_find_elements_by_link_text(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_name("div1")
children = element.find_elements_by_link_text("hello world")
assert len(children) == 2
assert "link1" == children[0].get_attribute("name")
assert "link2" == children[1].get_attribute("name")
def test_should_find_element_by_class_name(driver, pages):
pages.load("nestedElements.html")
parent = driver.find_element_by_name("classes")
element = parent.find_element_by_class_name("one")
assert "Find me" == element.text
def test_should_find_elements_by_class_name(driver, pages):
pages.load("nestedElements.html")
parent = driver.find_element_by_name("classes")
elements = parent.find_elements_by_class_name("one")
assert 2 == len(elements)
def test_should_find_element_by_tag_name(driver, pages):
pages.load("nestedElements.html")
parent = driver.find_element_by_name("div1")
element = parent.find_element_by_tag_name("a")
assert "link1" == element.get_attribute("name")
def test_should_find_elements_by_tag_name(driver, pages):
pages.load("nestedElements.html")
parent = driver.find_element_by_name("div1")
elements = parent.find_elements_by_tag_name("a")
assert 2 == len(elements)
def test_should_be_able_to_find_an_element_by_css_selector(driver, pages):
pages.load("nestedElements.html")
parent = driver.find_element_by_name("form2")
element = parent.find_element_by_css_selector('*[name="selectomatic"]')
assert "2" == element.get_attribute("id")
def test_should_be_able_to_find_multiple_elements_by_css_selector(driver, pages):
pages.load("nestedElements.html")
parent = driver.find_element_by_name("form2")
elements = parent.find_elements_by_css_selector(
'*[name="selectomatic"]')
assert 2 == len(elements)
def test_should_throw_an_error_if_user_passes_in_invalid_by(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_name("form2")
with pytest.raises(WebDriverException):
element.find_element("foo", "bar")
def test_should_throw_an_error_if_user_passes_in_invalid_by_when_find_elements(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element_by_name("form2")
with pytest.raises(WebDriverException):
element.find_elements("foo", "bar")
| davehunt/selenium | py/test/selenium/webdriver/common/children_finding_tests.py | Python | apache-2.0 | 6,753 |
# encoding:utf-8
import datetime
from django.core.urlresolvers import reverse
from django.db import models
from mptt.models import MPTTModel, TreeForeignKey
from tagging.fields import TagField
from apps.models import CommentEntity
class Category(MPTTModel):
"""
种类
"""
name = models.CharField(u'名称', max_length=200, blank=True)
slug = models.CharField(u'url标志', max_length=200, blank=True)
status = models.BooleanField(u"启用", default=True)
parent = TreeForeignKey('self', null=True, blank=True, related_name='children', db_index=True)
created = models.DateTimeField(u"创建时间", auto_now_add=True, editable=False)
updated = models.DateTimeField(u"更新时间", auto_now=True, editable=False)
class Meta:
ordering = ('name',)
verbose_name_plural = verbose_name = u"种类"
def __unicode__(self):
return u'%s' % self.name
# def get_absolute_url(self):
# return reverse('country_index', kwargs={'country_chart': self.chart})
class Industry(models.Model):
name = models.CharField(u"名称", max_length=200)
category = models.ForeignKey(Category, verbose_name=u"分类")
slug = models.SlugField(u"别名", max_length=100, db_index=True, unique=True)
enabled = models.BooleanField(u"是否可用", default=True)
content = models.TextField(u"介绍")
cover = models.CharField(u"封面", max_length=200)
tags = TagField()
url = models.URLField(u"来源地址")
publisher = models.CharField(u"发布人", max_length=200)
created = models.DateTimeField(u"创建时间", auto_now_add=True, editable=False)
updated = models.DateTimeField(u"更新时间", auto_now=True, editable=False)
def __unicode__(self):
return self.name
class Meta:
verbose_name_plural = verbose_name = u"业界"
get_latest_by = "created"
# @models.permalink
# def get_absolute_url(self):
# return 'item_detail', None, {'object_id': self.id}
class Comment(CommentEntity):
"""
点评,评论
"""
industry = models.ForeignKey(Industry, verbose_name=u"业界", blank=True)
| openslack/openslack-web | openslack/apps/industry/models.py | Python | apache-2.0 | 2,174 |
from __future__ import unicode_literals
from decimal import Decimal
import gittip
from aspen.utils import utcnow
from gittip.testing import Harness
from gittip.testing.client import TestClient
class TestRecordAnExchange(Harness):
# fixture
# =======
def setUp(self):
super(Harness, self).setUp()
self.client = TestClient()
def get_csrf_token(self):
response = self.client.get('/')
return response.request.context['csrf_token']
def record_an_exchange(self, amount, fee, note, make_participants=True):
if make_participants:
now = utcnow()
self.make_participant('alice', claimed_time=now, is_admin=True)
self.make_participant('bob', claimed_time=now)
return self.client.post( '/bob/history/record-an-exchange'
, { 'amount': amount, 'fee': fee, 'note': note
, 'csrf_token': self.get_csrf_token()
}
, 'alice'
)
# tests
# =====
def test_success_is_302(self):
actual = self.record_an_exchange('10', '0', 'foo').code
assert actual == 302, actual
def test_non_admin_is_404(self):
self.make_participant('alice', claimed_time=utcnow())
self.make_participant('bob', claimed_time=utcnow())
actual = self.record_an_exchange('10', '0', 'foo', False).code
assert actual == 404, actual
def test_non_post_is_405(self):
self.make_participant('alice', claimed_time=utcnow(), is_admin=True)
self.make_participant('bob', claimed_time=utcnow())
actual = \
self.client.get('/bob/history/record-an-exchange', 'alice').code
assert actual == 405, actual
def test_bad_amount_is_400(self):
actual = self.record_an_exchange('cheese', '0', 'foo').code
assert actual == 400, actual
def test_bad_fee_is_400(self):
actual = self.record_an_exchange('10', 'cheese', 'foo').code
assert actual == 400, actual
def test_no_note_is_400(self):
actual = self.record_an_exchange('10', '0', '').code
assert actual == 400, actual
def test_whitespace_note_is_400(self):
actual = self.record_an_exchange('10', '0', ' ').code
assert actual == 400, actual
def test_dropping_balance_below_zero_is_500(self):
actual = self.record_an_exchange('-10', '0', 'noted').code
assert actual == 500, actual
def test_success_records_exchange(self):
self.record_an_exchange('10', '0.50', 'noted')
expected = [{ "amount": Decimal('10.00')
, "fee": Decimal('0.50')
, "participant": "bob"
, "recorder": "alice"
, "note": "noted"
}]
SQL = "SELECT amount, fee, participant, recorder, note " \
"FROM exchanges"
actual = list(gittip.db.fetchall(SQL))
assert actual == expected, actual
def test_success_updates_balance(self):
self.record_an_exchange('10', '0', 'noted')
expected = [{"balance": Decimal('10.00')}]
SQL = "SELECT balance FROM participants WHERE username='bob'"
actual = list(gittip.db.fetchall(SQL))
assert actual == expected, actual
def test_withdrawals_work(self):
self.make_participant('alice', claimed_time=utcnow(), is_admin=True)
self.make_participant('bob', claimed_time=utcnow(), balance=20)
self.record_an_exchange('-7', '0', 'noted', False)
expected = [{"balance": Decimal('13.00')}]
SQL = "SELECT balance FROM participants WHERE username='bob'"
actual = list(gittip.db.fetchall(SQL))
assert actual == expected, actual
| bountysource/www.gittip.com | tests/test_record_an_exchange.py | Python | cc0-1.0 | 3,836 |
#!/usr/bin/env python
"""
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.datatype import AttribDict
_defaults = {
"csvDel": ',',
"timeSec": 5,
"googlePage": 1,
"verbose": 1,
"delay": 0,
"timeout": 30,
"retries": 3,
"saFreq": 0,
"threads": 1,
"level": 1,
"risk": 1,
"dumpFormat": "CSV",
"tech": "BEUSTQ",
"torType": "SOCKS5",
}
defaults = AttribDict(_defaults)
| hackersql/sq1map | lib/core/defaults.py | Python | gpl-3.0 | 566 |
from PyQt4.QtCore import QString
try:
_from_Utf8 = QString.fromUtf8
except AttributeError:
_from_Utf8 = lambda s: s
| dpr10/whooshle-insight | common/utilities.py | Python | gpl-2.0 | 125 |
import socket
UDP_IP = "127.0.0.1"
UDP_PORT = 6677
MESSAGE = "setsocket:Amp:1"
print( "UDP target IP:", UDP_IP )
print( "UDP target port:", UDP_PORT )
print( "message:", MESSAGE )
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.sendto(bytes(MESSAGE, "utf8"), (UDP_IP, UDP_PORT)) | coolchip/powerPiWidget | python_udp/sender.py | Python | mit | 332 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class JournalVoucherDetail(Document):
pass | suyashphadtare/vestasi-erp-1 | erpnext/erpnext/accounts/doctype/journal_voucher_detail/journal_voucher_detail.py | Python | agpl-3.0 | 273 |
import random
def gen_rand():
return random.randrange(0, 25)
def gen_rand_arr():
a = []
a.append(gen_rand())
a.append(gen_rand()+a[-1])
a.append(gen_rand()+a[-1])
a.append(gen_rand()+(a[-1]*3))
a.append(gen_rand()+(a[-1]*3))
for i in range(len(a)):
a[i] = str(a[i])
return a
k = []
for r in range(30):
for c in range(30):
f = gen_rand_arr()
s = gen_rand_arr()
k.append("{'x': " + str(r) + ",'y': " + str(c) + ",'time': {'one': "+f[0]+",'two': "+f[1]+",'three': "+f[2]+",'week': "+f[3]+",'month': "+f[4]+"},'pollen_time': {'one': "+s[0]+",'two': "+s[1]+",'three': "+s[2]+",'week': "+s[3]+",'month': "+s[4]+"}}")
print str(k).replace('\"', '')
| teja635/vandy_hacks | random_data_generator.py | Python | mit | 719 |
# coding: utf-8
from .platform import TestPlatform, make_test_fsm
| BernardFW/bernard | src/bernard/platforms/test/__init__.py | Python | agpl-3.0 | 66 |
#!/usr/bin/env python
class Solution:
def solveNQueens(self, n: int):
import itertools
def search(seq, col, dia1, dia2, k):
return [seq] if k == n else list(itertools.chain.from_iterable([search(seq+str(i), col+[i], dia1+[k+i], dia2+[k-i], k+1) for i in range(n) if (i not in col) and (k+i not in dia1) and (k-i not in dia2)]))
return [["."*int(c) + "Q" + "."*(n-int(c)-1) for c in b] for b in search('', [], [], [], 0)]
sol = Solution()
print(sol.solveNQueens(5))
# print('k = %d, seq = %s, col = %s, dia1 = %s, dia2 = %s' % (k, seq, col, dia1, dia2))
| eroicaleo/LearningPython | interview/leet/51_N-Queens_v2.py | Python | mit | 616 |
"""
cycapture provides python (cython) bindings for the libpcap and libtins libraries.
"""
__author__ = 'stef'
| stephane-martin/cycapture | cycapture/__init__.py | Python | lgpl-3.0 | 113 |
from __future__ import unicode_literals
from dvc.output.hdfs import OutputHDFS
from dvc.dependency.base import DependencyBase
class DependencyHDFS(DependencyBase, OutputHDFS):
pass
| dataversioncontrol/dvc | dvc/dependency/hdfs.py | Python | apache-2.0 | 188 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import uuid
from oslo_config import cfg
from six.moves import range
from keystone.common import controller
from keystone import exception
from keystone.tests import unit as tests
from keystone.tests.unit import test_v3
from keystone.tests.unit import utils
CONF = cfg.CONF
class AssignmentTestCase(test_v3.RestfulTestCase,
test_v3.AssignmentTestMixin):
"""Test domains, projects, roles and role assignments."""
def setUp(self):
super(AssignmentTestCase, self).setUp()
self.group = self.new_group_ref(
domain_id=self.domain_id)
self.group = self.identity_api.create_group(self.group)
self.group_id = self.group['id']
self.credential_id = uuid.uuid4().hex
self.credential = self.new_credential_ref(
user_id=self.user['id'],
project_id=self.project_id)
self.credential['id'] = self.credential_id
self.credential_api.create_credential(
self.credential_id,
self.credential)
# Domain CRUD tests
def test_create_domain(self):
"""Call ``POST /domains``."""
ref = self.new_domain_ref()
r = self.post(
'/domains',
body={'domain': ref})
return self.assertValidDomainResponse(r, ref)
def test_create_domain_case_sensitivity(self):
"""Call `POST /domains`` twice with upper() and lower() cased name."""
ref = self.new_domain_ref()
# ensure the name is lowercase
ref['name'] = ref['name'].lower()
r = self.post(
'/domains',
body={'domain': ref})
self.assertValidDomainResponse(r, ref)
# ensure the name is uppercase
ref['name'] = ref['name'].upper()
r = self.post(
'/domains',
body={'domain': ref})
self.assertValidDomainResponse(r, ref)
def test_create_domain_400(self):
"""Call ``POST /domains``."""
self.post('/domains', body={'domain': {}}, expected_status=400)
def test_list_domains(self):
"""Call ``GET /domains``."""
resource_url = '/domains'
r = self.get(resource_url)
self.assertValidDomainListResponse(r, ref=self.domain,
resource_url=resource_url)
def test_get_domain(self):
"""Call ``GET /domains/{domain_id}``."""
r = self.get('/domains/%(domain_id)s' % {
'domain_id': self.domain_id})
self.assertValidDomainResponse(r, self.domain)
def test_update_domain(self):
"""Call ``PATCH /domains/{domain_id}``."""
ref = self.new_domain_ref()
del ref['id']
r = self.patch('/domains/%(domain_id)s' % {
'domain_id': self.domain_id},
body={'domain': ref})
self.assertValidDomainResponse(r, ref)
def test_disable_domain(self):
"""Call ``PATCH /domains/{domain_id}`` (set enabled=False)."""
# Create a 2nd set of entities in a 2nd domain
self.domain2 = self.new_domain_ref()
self.resource_api.create_domain(self.domain2['id'], self.domain2)
self.project2 = self.new_project_ref(
domain_id=self.domain2['id'])
self.resource_api.create_project(self.project2['id'], self.project2)
self.user2 = self.new_user_ref(
domain_id=self.domain2['id'],
project_id=self.project2['id'])
password = self.user2['password']
self.user2 = self.identity_api.create_user(self.user2)
self.user2['password'] = password
self.assignment_api.add_user_to_project(self.project2['id'],
self.user2['id'])
# First check a user in that domain can authenticate. The v2 user
# cannot authenticate because they exist outside the default domain.
body = {
'auth': {
'passwordCredentials': {
'userId': self.user2['id'],
'password': self.user2['password']
},
'tenantId': self.project2['id']
}
}
self.admin_request(
path='/v2.0/tokens', method='POST', body=body, expected_status=401)
auth_data = self.build_authentication_request(
user_id=self.user2['id'],
password=self.user2['password'],
project_id=self.project2['id'])
self.v3_authenticate_token(auth_data)
# Now disable the domain
self.domain2['enabled'] = False
r = self.patch('/domains/%(domain_id)s' % {
'domain_id': self.domain2['id']},
body={'domain': {'enabled': False}})
self.assertValidDomainResponse(r, self.domain2)
# Make sure the user can no longer authenticate, via
# either API
body = {
'auth': {
'passwordCredentials': {
'userId': self.user2['id'],
'password': self.user2['password']
},
'tenantId': self.project2['id']
}
}
self.admin_request(
path='/v2.0/tokens', method='POST', body=body, expected_status=401)
# Try looking up in v3 by name and id
auth_data = self.build_authentication_request(
user_id=self.user2['id'],
password=self.user2['password'],
project_id=self.project2['id'])
self.v3_authenticate_token(auth_data, expected_status=401)
auth_data = self.build_authentication_request(
username=self.user2['name'],
user_domain_id=self.domain2['id'],
password=self.user2['password'],
project_id=self.project2['id'])
self.v3_authenticate_token(auth_data, expected_status=401)
def test_delete_enabled_domain_fails(self):
"""Call ``DELETE /domains/{domain_id}`` (when domain enabled)."""
# Try deleting an enabled domain, which should fail
self.delete('/domains/%(domain_id)s' % {
'domain_id': self.domain['id']},
expected_status=exception.ForbiddenAction.code)
def test_delete_domain(self):
"""Call ``DELETE /domains/{domain_id}``.
The sample data set up already has a user, group, project
and credential that is part of self.domain. Since the user
we will authenticate with is in this domain, we create a
another set of entities in a second domain. Deleting this
second domain should delete all these new entities. In addition,
all the entities in the regular self.domain should be unaffected
by the delete.
Test Plan:
- Create domain2 and a 2nd set of entities
- Disable domain2
- Delete domain2
- Check entities in domain2 have been deleted
- Check entities in self.domain are unaffected
"""
# Create a 2nd set of entities in a 2nd domain
self.domain2 = self.new_domain_ref()
self.resource_api.create_domain(self.domain2['id'], self.domain2)
self.project2 = self.new_project_ref(
domain_id=self.domain2['id'])
self.resource_api.create_project(self.project2['id'], self.project2)
self.user2 = self.new_user_ref(
domain_id=self.domain2['id'],
project_id=self.project2['id'])
self.user2 = self.identity_api.create_user(self.user2)
self.group2 = self.new_group_ref(
domain_id=self.domain2['id'])
self.group2 = self.identity_api.create_group(self.group2)
self.credential2 = self.new_credential_ref(
user_id=self.user2['id'],
project_id=self.project2['id'])
self.credential_api.create_credential(
self.credential2['id'],
self.credential2)
# Now disable the new domain and delete it
self.domain2['enabled'] = False
r = self.patch('/domains/%(domain_id)s' % {
'domain_id': self.domain2['id']},
body={'domain': {'enabled': False}})
self.assertValidDomainResponse(r, self.domain2)
self.delete('/domains/%(domain_id)s' % {
'domain_id': self.domain2['id']})
# Check all the domain2 relevant entities are gone
self.assertRaises(exception.DomainNotFound,
self.resource_api.get_domain,
self.domain2['id'])
self.assertRaises(exception.ProjectNotFound,
self.resource_api.get_project,
self.project2['id'])
self.assertRaises(exception.GroupNotFound,
self.identity_api.get_group,
self.group2['id'])
self.assertRaises(exception.UserNotFound,
self.identity_api.get_user,
self.user2['id'])
self.assertRaises(exception.CredentialNotFound,
self.credential_api.get_credential,
self.credential2['id'])
# ...and that all self.domain entities are still here
r = self.resource_api.get_domain(self.domain['id'])
self.assertDictEqual(r, self.domain)
r = self.resource_api.get_project(self.project['id'])
self.assertDictEqual(r, self.project)
r = self.identity_api.get_group(self.group['id'])
self.assertDictEqual(r, self.group)
r = self.identity_api.get_user(self.user['id'])
self.user.pop('password')
self.assertDictEqual(r, self.user)
r = self.credential_api.get_credential(self.credential['id'])
self.assertDictEqual(r, self.credential)
def test_delete_default_domain_fails(self):
# Attempting to delete the default domain results in 403 Forbidden.
# Need to disable it first.
self.patch('/domains/%(domain_id)s' % {
'domain_id': CONF.identity.default_domain_id},
body={'domain': {'enabled': False}})
self.delete('/domains/%(domain_id)s' % {
'domain_id': CONF.identity.default_domain_id},
expected_status=exception.ForbiddenAction.code)
def test_delete_new_default_domain_fails(self):
# If change the default domain ID, deleting the new default domain
# results in a 403 Forbidden.
# Create a new domain that's not the default
new_domain = self.new_domain_ref()
new_domain_id = new_domain['id']
self.resource_api.create_domain(new_domain_id, new_domain)
# Disable the new domain so can delete it later.
self.patch('/domains/%(domain_id)s' % {
'domain_id': new_domain_id},
body={'domain': {'enabled': False}})
# Change the default domain
self.config_fixture.config(group='identity',
default_domain_id=new_domain_id)
# Attempt to delete the new domain
self.delete('/domains/%(domain_id)s' % {'domain_id': new_domain_id},
expected_status=exception.ForbiddenAction.code)
def test_delete_old_default_domain(self):
# If change the default domain ID, deleting the old default domain
# works.
# Create a new domain that's not the default
new_domain = self.new_domain_ref()
new_domain_id = new_domain['id']
self.resource_api.create_domain(new_domain_id, new_domain)
old_default_domain_id = CONF.identity.default_domain_id
# Disable the default domain so we can delete it later.
self.patch('/domains/%(domain_id)s' % {
'domain_id': old_default_domain_id},
body={'domain': {'enabled': False}})
# Change the default domain
self.config_fixture.config(group='identity',
default_domain_id=new_domain_id)
# Delete the old default domain
self.delete(
'/domains/%(domain_id)s' % {'domain_id': old_default_domain_id})
def test_token_revoked_once_domain_disabled(self):
"""Test token from a disabled domain has been invalidated.
Test that a token that was valid for an enabled domain
becomes invalid once that domain is disabled.
"""
self.domain = self.new_domain_ref()
self.resource_api.create_domain(self.domain['id'], self.domain)
self.user2 = self.new_user_ref(domain_id=self.domain['id'])
password = self.user2['password']
self.user2 = self.identity_api.create_user(self.user2)
self.user2['password'] = password
# build a request body
auth_body = self.build_authentication_request(
user_id=self.user2['id'],
password=self.user2['password'])
# sends a request for the user's token
token_resp = self.post('/auth/tokens', body=auth_body)
subject_token = token_resp.headers.get('x-subject-token')
# validates the returned token and it should be valid.
self.head('/auth/tokens',
headers={'x-subject-token': subject_token},
expected_status=200)
# now disable the domain
self.domain['enabled'] = False
url = "/domains/%(domain_id)s" % {'domain_id': self.domain['id']}
self.patch(url,
body={'domain': {'enabled': False}},
expected_status=200)
# validates the same token again and it should be 'not found'
# as the domain has already been disabled.
self.head('/auth/tokens',
headers={'x-subject-token': subject_token},
expected_status=404)
def test_delete_domain_hierarchy(self):
"""Call ``DELETE /domains/{domain_id}``."""
domain = self.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
root_project = self.new_project_ref(
domain_id=domain['id'])
self.resource_api.create_project(root_project['id'], root_project)
leaf_project = self.new_project_ref(
domain_id=domain['id'],
parent_id=root_project['id'])
self.resource_api.create_project(leaf_project['id'], leaf_project)
# Need to disable it first.
self.patch('/domains/%(domain_id)s' % {
'domain_id': domain['id']},
body={'domain': {'enabled': False}})
self.delete(
'/domains/%(domain_id)s' % {
'domain_id': domain['id']})
self.assertRaises(exception.DomainNotFound,
self.resource_api.get_domain,
domain['id'])
self.assertRaises(exception.ProjectNotFound,
self.resource_api.get_project,
root_project['id'])
self.assertRaises(exception.ProjectNotFound,
self.resource_api.get_project,
leaf_project['id'])
def test_forbid_operations_on_federated_domain(self):
"""Make sure one cannot operate on federated domain.
This includes operations like create, update, delete
on domain identified by id and name where difference variations of
id 'Federated' are used.
"""
def create_domains():
for variation in ('Federated', 'FEDERATED',
'federated', 'fEderated'):
domain = self.new_domain_ref()
domain['id'] = variation
yield domain
for domain in create_domains():
self.assertRaises(
AssertionError, self.resource_api.create_domain,
domain['id'], domain)
self.assertRaises(
AssertionError, self.resource_api.update_domain,
domain['id'], domain)
self.assertRaises(
exception.DomainNotFound, self.resource_api.delete_domain,
domain['id'])
# swap 'name' with 'id' and try again, expecting the request to
# gracefully fail
domain['id'], domain['name'] = domain['name'], domain['id']
self.assertRaises(
AssertionError, self.resource_api.create_domain,
domain['id'], domain)
self.assertRaises(
AssertionError, self.resource_api.update_domain,
domain['id'], domain)
self.assertRaises(
exception.DomainNotFound, self.resource_api.delete_domain,
domain['id'])
def test_forbid_operations_on_defined_federated_domain(self):
"""Make sure one cannot operate on a user-defined federated domain.
This includes operations like create, update, delete.
"""
non_default_name = 'beta_federated_domain'
self.config_fixture.config(group='federation',
federated_domain_name=non_default_name)
domain = self.new_domain_ref()
domain['name'] = non_default_name
self.assertRaises(AssertionError,
self.resource_api.create_domain,
domain['id'], domain)
self.assertRaises(exception.DomainNotFound,
self.resource_api.delete_domain,
domain['id'])
self.assertRaises(AssertionError,
self.resource_api.update_domain,
domain['id'], domain)
# Project CRUD tests
def test_list_projects(self):
"""Call ``GET /projects``."""
resource_url = '/projects'
r = self.get(resource_url)
self.assertValidProjectListResponse(r, ref=self.project,
resource_url=resource_url)
def test_create_project(self):
"""Call ``POST /projects``."""
ref = self.new_project_ref(domain_id=self.domain_id)
r = self.post(
'/projects',
body={'project': ref})
self.assertValidProjectResponse(r, ref)
def test_create_project_400(self):
"""Call ``POST /projects``."""
self.post('/projects', body={'project': {}}, expected_status=400)
def test_create_project_invalid_domain_id(self):
"""Call ``POST /projects``."""
ref = self.new_project_ref(domain_id=uuid.uuid4().hex)
self.post('/projects', body={'project': ref}, expected_status=400)
def test_create_project_is_domain_not_allowed(self):
"""Call ``POST /projects``.
Setting is_domain=True is not supported yet and should raise
NotImplemented.
"""
ref = self.new_project_ref(domain_id=self.domain_id, is_domain=True)
self.post('/projects',
body={'project': ref},
expected_status=501)
@utils.wip('waiting for projects acting as domains implementation')
def test_create_project_without_parent_id_and_without_domain_id(self):
"""Call ``POST /projects``."""
# Grant a domain role for the user
collection_url = (
'/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': self.domain_id,
'user_id': self.user['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url)
# Create an authentication request for a domain scoped token
auth = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
domain_id=self.domain_id)
# Without domain_id and parent_id, the domain_id should be
# normalized to the domain on the token, when using a domain
# scoped token.
ref = self.new_project_ref()
r = self.post(
'/projects',
auth=auth,
body={'project': ref})
ref['domain_id'] = self.domain['id']
self.assertValidProjectResponse(r, ref)
@utils.wip('waiting for projects acting as domains implementation')
def test_create_project_with_parent_id_and_no_domain_id(self):
"""Call ``POST /projects``."""
# With only the parent_id, the domain_id should be
# normalized to the parent's domain_id
ref_child = self.new_project_ref(parent_id=self.project['id'])
r = self.post(
'/projects',
body={'project': ref_child})
self.assertEqual(r.result['project']['domain_id'],
self.project['domain_id'])
ref_child['domain_id'] = self.domain['id']
self.assertValidProjectResponse(r, ref_child)
def _create_projects_hierarchy(self, hierarchy_size=1):
"""Creates a single-branched project hierarchy with the specified size.
:param hierarchy_size: the desired hierarchy size, default is 1 -
a project with one child.
:returns projects: a list of the projects in the created hierarchy.
"""
new_ref = self.new_project_ref(domain_id=self.domain_id)
resp = self.post('/projects', body={'project': new_ref})
projects = [resp.result]
for i in range(hierarchy_size):
new_ref = self.new_project_ref(
domain_id=self.domain_id,
parent_id=projects[i]['project']['id'])
resp = self.post('/projects',
body={'project': new_ref})
self.assertValidProjectResponse(resp, new_ref)
projects.append(resp.result)
return projects
def test_list_projects_filtering_by_parent_id(self):
"""Call ``GET /projects?parent_id={project_id}``."""
projects = self._create_projects_hierarchy(hierarchy_size=2)
# Add another child to projects[1] - it will be projects[3]
new_ref = self.new_project_ref(
domain_id=self.domain_id,
parent_id=projects[1]['project']['id'])
resp = self.post('/projects',
body={'project': new_ref})
self.assertValidProjectResponse(resp, new_ref)
projects.append(resp.result)
# Query for projects[0] immediate children - it will
# be only projects[1]
r = self.get(
'/projects?parent_id=%(project_id)s' % {
'project_id': projects[0]['project']['id']})
self.assertValidProjectListResponse(r)
projects_result = r.result['projects']
expected_list = [projects[1]['project']]
# projects[0] has projects[1] as child
self.assertEqual(expected_list, projects_result)
# Query for projects[1] immediate children - it will
# be projects[2] and projects[3]
r = self.get(
'/projects?parent_id=%(project_id)s' % {
'project_id': projects[1]['project']['id']})
self.assertValidProjectListResponse(r)
projects_result = r.result['projects']
expected_list = [projects[2]['project'], projects[3]['project']]
# projects[1] has projects[2] and projects[3] as children
self.assertEqual(expected_list, projects_result)
# Query for projects[2] immediate children - it will be an empty list
r = self.get(
'/projects?parent_id=%(project_id)s' % {
'project_id': projects[2]['project']['id']})
self.assertValidProjectListResponse(r)
projects_result = r.result['projects']
expected_list = []
# projects[2] has no child, projects_result must be an empty list
self.assertEqual(expected_list, projects_result)
def test_create_hierarchical_project(self):
"""Call ``POST /projects``."""
self._create_projects_hierarchy()
def test_get_project(self):
"""Call ``GET /projects/{project_id}``."""
r = self.get(
'/projects/%(project_id)s' % {
'project_id': self.project_id})
self.assertValidProjectResponse(r, self.project)
def test_get_project_with_parents_as_list_with_invalid_id(self):
"""Call ``GET /projects/{project_id}?parents_as_list``."""
self.get('/projects/%(project_id)s?parents_as_list' % {
'project_id': None}, expected_status=404)
self.get('/projects/%(project_id)s?parents_as_list' % {
'project_id': uuid.uuid4().hex}, expected_status=404)
def test_get_project_with_subtree_as_list_with_invalid_id(self):
"""Call ``GET /projects/{project_id}?subtree_as_list``."""
self.get('/projects/%(project_id)s?subtree_as_list' % {
'project_id': None}, expected_status=404)
self.get('/projects/%(project_id)s?subtree_as_list' % {
'project_id': uuid.uuid4().hex}, expected_status=404)
def test_get_project_with_parents_as_ids(self):
"""Call ``GET /projects/{project_id}?parents_as_ids``."""
projects = self._create_projects_hierarchy(hierarchy_size=2)
# Query for projects[2] parents_as_ids
r = self.get(
'/projects/%(project_id)s?parents_as_ids' % {
'project_id': projects[2]['project']['id']})
self.assertValidProjectResponse(r, projects[2]['project'])
parents_as_ids = r.result['project']['parents']
# Assert parents_as_ids is a structured dictionary correctly
# representing the hierarchy. The request was made using projects[2]
# id, hence its parents should be projects[1] and projects[0]. It
# should have the following structure:
# {
# projects[1]: {
# projects[0]: None
# }
# }
expected_dict = {
projects[1]['project']['id']: {
projects[0]['project']['id']: None
}
}
self.assertDictEqual(expected_dict, parents_as_ids)
# Query for projects[0] parents_as_ids
r = self.get(
'/projects/%(project_id)s?parents_as_ids' % {
'project_id': projects[0]['project']['id']})
self.assertValidProjectResponse(r, projects[0]['project'])
parents_as_ids = r.result['project']['parents']
# projects[0] has no parents, parents_as_ids must be None
self.assertIsNone(parents_as_ids)
def test_get_project_with_parents_as_list_with_full_access(self):
"""``GET /projects/{project_id}?parents_as_list`` with full access.
Test plan:
- Create 'parent', 'project' and 'subproject' projects;
- Assign a user a role on each one of those projects;
- Check that calling parents_as_list on 'subproject' returns both
'project' and 'parent'.
"""
# Create the project hierarchy
parent, project, subproject = self._create_projects_hierarchy(2)
# Assign a role for the user on all the created projects
for proj in (parent, project, subproject):
self.put(self.build_role_assignment_link(
role_id=self.role_id, user_id=self.user_id,
project_id=proj['project']['id']))
# Make the API call
r = self.get('/projects/%(project_id)s?parents_as_list' %
{'project_id': subproject['project']['id']})
self.assertValidProjectResponse(r, subproject['project'])
# Assert only 'project' and 'parent' are in the parents list
self.assertIn(project, r.result['project']['parents'])
self.assertIn(parent, r.result['project']['parents'])
self.assertEqual(2, len(r.result['project']['parents']))
def test_get_project_with_parents_as_list_with_partial_access(self):
"""``GET /projects/{project_id}?parents_as_list`` with partial access.
Test plan:
- Create 'parent', 'project' and 'subproject' projects;
- Assign a user a role on 'parent' and 'subproject';
- Check that calling parents_as_list on 'subproject' only returns
'parent'.
"""
# Create the project hierarchy
parent, project, subproject = self._create_projects_hierarchy(2)
# Assign a role for the user on parent and subproject
for proj in (parent, subproject):
self.put(self.build_role_assignment_link(
role_id=self.role_id, user_id=self.user_id,
project_id=proj['project']['id']))
# Make the API call
r = self.get('/projects/%(project_id)s?parents_as_list' %
{'project_id': subproject['project']['id']})
self.assertValidProjectResponse(r, subproject['project'])
# Assert only 'parent' is in the parents list
self.assertIn(parent, r.result['project']['parents'])
self.assertEqual(1, len(r.result['project']['parents']))
def test_get_project_with_parents_as_list_and_parents_as_ids(self):
"""Call ``GET /projects/{project_id}?parents_as_list&parents_as_ids``.
"""
projects = self._create_projects_hierarchy(hierarchy_size=2)
self.get(
'/projects/%(project_id)s?parents_as_list&parents_as_ids' % {
'project_id': projects[1]['project']['id']},
expected_status=400)
def test_get_project_with_subtree_as_ids(self):
"""Call ``GET /projects/{project_id}?subtree_as_ids``.
This test creates a more complex hierarchy to test if the structured
dictionary returned by using the ``subtree_as_ids`` query param
correctly represents the hierarchy.
The hierarchy contains 5 projects with the following structure::
+--A--+
| |
+--B--+ C
| |
D E
"""
projects = self._create_projects_hierarchy(hierarchy_size=2)
# Add another child to projects[0] - it will be projects[3]
new_ref = self.new_project_ref(
domain_id=self.domain_id,
parent_id=projects[0]['project']['id'])
resp = self.post('/projects',
body={'project': new_ref})
self.assertValidProjectResponse(resp, new_ref)
projects.append(resp.result)
# Add another child to projects[1] - it will be projects[4]
new_ref = self.new_project_ref(
domain_id=self.domain_id,
parent_id=projects[1]['project']['id'])
resp = self.post('/projects',
body={'project': new_ref})
self.assertValidProjectResponse(resp, new_ref)
projects.append(resp.result)
# Query for projects[0] subtree_as_ids
r = self.get(
'/projects/%(project_id)s?subtree_as_ids' % {
'project_id': projects[0]['project']['id']})
self.assertValidProjectResponse(r, projects[0]['project'])
subtree_as_ids = r.result['project']['subtree']
# The subtree hierarchy from projects[0] should have the following
# structure:
# {
# projects[1]: {
# projects[2]: None,
# projects[4]: None
# },
# projects[3]: None
# }
expected_dict = {
projects[1]['project']['id']: {
projects[2]['project']['id']: None,
projects[4]['project']['id']: None
},
projects[3]['project']['id']: None
}
self.assertDictEqual(expected_dict, subtree_as_ids)
# Now query for projects[1] subtree_as_ids
r = self.get(
'/projects/%(project_id)s?subtree_as_ids' % {
'project_id': projects[1]['project']['id']})
self.assertValidProjectResponse(r, projects[1]['project'])
subtree_as_ids = r.result['project']['subtree']
# The subtree hierarchy from projects[1] should have the following
# structure:
# {
# projects[2]: None,
# projects[4]: None
# }
expected_dict = {
projects[2]['project']['id']: None,
projects[4]['project']['id']: None
}
self.assertDictEqual(expected_dict, subtree_as_ids)
# Now query for projects[3] subtree_as_ids
r = self.get(
'/projects/%(project_id)s?subtree_as_ids' % {
'project_id': projects[3]['project']['id']})
self.assertValidProjectResponse(r, projects[3]['project'])
subtree_as_ids = r.result['project']['subtree']
# projects[3] has no subtree, subtree_as_ids must be None
self.assertIsNone(subtree_as_ids)
def test_get_project_with_subtree_as_list_with_full_access(self):
"""``GET /projects/{project_id}?subtree_as_list`` with full access.
Test plan:
- Create 'parent', 'project' and 'subproject' projects;
- Assign a user a role on each one of those projects;
- Check that calling subtree_as_list on 'parent' returns both 'parent'
and 'subproject'.
"""
# Create the project hierarchy
parent, project, subproject = self._create_projects_hierarchy(2)
# Assign a role for the user on all the created projects
for proj in (parent, project, subproject):
self.put(self.build_role_assignment_link(
role_id=self.role_id, user_id=self.user_id,
project_id=proj['project']['id']))
# Make the API call
r = self.get('/projects/%(project_id)s?subtree_as_list' %
{'project_id': parent['project']['id']})
self.assertValidProjectResponse(r, parent['project'])
# Assert only 'project' and 'subproject' are in the subtree
self.assertIn(project, r.result['project']['subtree'])
self.assertIn(subproject, r.result['project']['subtree'])
self.assertEqual(2, len(r.result['project']['subtree']))
def test_get_project_with_subtree_as_list_with_partial_access(self):
"""``GET /projects/{project_id}?subtree_as_list`` with partial access.
Test plan:
- Create 'parent', 'project' and 'subproject' projects;
- Assign a user a role on 'parent' and 'subproject';
- Check that calling subtree_as_list on 'parent' returns 'subproject'.
"""
# Create the project hierarchy
parent, project, subproject = self._create_projects_hierarchy(2)
# Assign a role for the user on parent and subproject
for proj in (parent, subproject):
self.put(self.build_role_assignment_link(
role_id=self.role_id, user_id=self.user_id,
project_id=proj['project']['id']))
# Make the API call
r = self.get('/projects/%(project_id)s?subtree_as_list' %
{'project_id': parent['project']['id']})
self.assertValidProjectResponse(r, parent['project'])
# Assert only 'subproject' is in the subtree
self.assertIn(subproject, r.result['project']['subtree'])
self.assertEqual(1, len(r.result['project']['subtree']))
def test_get_project_with_subtree_as_list_and_subtree_as_ids(self):
"""Call ``GET /projects/{project_id}?subtree_as_list&subtree_as_ids``.
"""
projects = self._create_projects_hierarchy(hierarchy_size=2)
self.get(
'/projects/%(project_id)s?subtree_as_list&subtree_as_ids' % {
'project_id': projects[1]['project']['id']},
expected_status=400)
def test_update_project(self):
"""Call ``PATCH /projects/{project_id}``."""
ref = self.new_project_ref(domain_id=self.domain_id)
del ref['id']
r = self.patch(
'/projects/%(project_id)s' % {
'project_id': self.project_id},
body={'project': ref})
self.assertValidProjectResponse(r, ref)
def test_update_project_domain_id(self):
"""Call ``PATCH /projects/{project_id}`` with domain_id."""
project = self.new_project_ref(domain_id=self.domain['id'])
self.resource_api.create_project(project['id'], project)
project['domain_id'] = CONF.identity.default_domain_id
r = self.patch('/projects/%(project_id)s' % {
'project_id': project['id']},
body={'project': project},
expected_status=exception.ValidationError.code)
self.config_fixture.config(domain_id_immutable=False)
project['domain_id'] = self.domain['id']
r = self.patch('/projects/%(project_id)s' % {
'project_id': project['id']},
body={'project': project})
self.assertValidProjectResponse(r, project)
def test_update_project_parent_id(self):
"""Call ``PATCH /projects/{project_id}``."""
projects = self._create_projects_hierarchy()
leaf_project = projects[1]['project']
leaf_project['parent_id'] = None
self.patch(
'/projects/%(project_id)s' % {
'project_id': leaf_project['id']},
body={'project': leaf_project},
expected_status=403)
def test_update_project_is_domain_not_allowed(self):
"""Call ``PATCH /projects/{project_id}`` with is_domain.
The is_domain flag is immutable.
"""
project = self.new_project_ref(domain_id=self.domain['id'])
resp = self.post('/projects',
body={'project': project})
self.assertFalse(resp.result['project']['is_domain'])
project['is_domain'] = True
self.patch('/projects/%(project_id)s' % {
'project_id': resp.result['project']['id']},
body={'project': project},
expected_status=400)
def test_disable_leaf_project(self):
"""Call ``PATCH /projects/{project_id}``."""
projects = self._create_projects_hierarchy()
leaf_project = projects[1]['project']
leaf_project['enabled'] = False
r = self.patch(
'/projects/%(project_id)s' % {
'project_id': leaf_project['id']},
body={'project': leaf_project})
self.assertEqual(
leaf_project['enabled'], r.result['project']['enabled'])
def test_disable_not_leaf_project(self):
"""Call ``PATCH /projects/{project_id}``."""
projects = self._create_projects_hierarchy()
root_project = projects[0]['project']
root_project['enabled'] = False
self.patch(
'/projects/%(project_id)s' % {
'project_id': root_project['id']},
body={'project': root_project},
expected_status=403)
def test_delete_project(self):
"""Call ``DELETE /projects/{project_id}``
As well as making sure the delete succeeds, we ensure
that any credentials that reference this projects are
also deleted, while other credentials are unaffected.
"""
# First check the credential for this project is present
r = self.credential_api.get_credential(self.credential['id'])
self.assertDictEqual(r, self.credential)
# Create a second credential with a different project
self.project2 = self.new_project_ref(
domain_id=self.domain['id'])
self.resource_api.create_project(self.project2['id'], self.project2)
self.credential2 = self.new_credential_ref(
user_id=self.user['id'],
project_id=self.project2['id'])
self.credential_api.create_credential(
self.credential2['id'],
self.credential2)
# Now delete the project
self.delete(
'/projects/%(project_id)s' % {
'project_id': self.project_id})
# Deleting the project should have deleted any credentials
# that reference this project
self.assertRaises(exception.CredentialNotFound,
self.credential_api.get_credential,
credential_id=self.credential['id'])
# But the credential for project2 is unaffected
r = self.credential_api.get_credential(self.credential2['id'])
self.assertDictEqual(r, self.credential2)
def test_delete_not_leaf_project(self):
"""Call ``DELETE /projects/{project_id}``."""
projects = self._create_projects_hierarchy()
self.delete(
'/projects/%(project_id)s' % {
'project_id': projects[0]['project']['id']},
expected_status=403)
# Role CRUD tests
def test_create_role(self):
"""Call ``POST /roles``."""
ref = self.new_role_ref()
r = self.post(
'/roles',
body={'role': ref})
return self.assertValidRoleResponse(r, ref)
def test_create_role_400(self):
"""Call ``POST /roles``."""
self.post('/roles', body={'role': {}}, expected_status=400)
def test_list_roles(self):
"""Call ``GET /roles``."""
resource_url = '/roles'
r = self.get(resource_url)
self.assertValidRoleListResponse(r, ref=self.role,
resource_url=resource_url)
def test_get_role(self):
"""Call ``GET /roles/{role_id}``."""
r = self.get('/roles/%(role_id)s' % {
'role_id': self.role_id})
self.assertValidRoleResponse(r, self.role)
def test_update_role(self):
"""Call ``PATCH /roles/{role_id}``."""
ref = self.new_role_ref()
del ref['id']
r = self.patch('/roles/%(role_id)s' % {
'role_id': self.role_id},
body={'role': ref})
self.assertValidRoleResponse(r, ref)
def test_delete_role(self):
"""Call ``DELETE /roles/{role_id}``."""
self.delete('/roles/%(role_id)s' % {
'role_id': self.role_id})
def test_create_member_role(self):
"""Call ``POST /roles``."""
# specify only the name on creation
ref = self.new_role_ref()
ref['name'] = CONF.member_role_name
r = self.post(
'/roles',
body={'role': ref})
self.assertValidRoleResponse(r, ref)
# but the ID should be set as defined in CONF
self.assertEqual(CONF.member_role_id, r.json['role']['id'])
# Role Grants tests
def test_crud_user_project_role_grants(self):
collection_url = (
'/projects/%(project_id)s/users/%(user_id)s/roles' % {
'project_id': self.project['id'],
'user_id': self.user['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url)
self.head(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=self.role,
resource_url=collection_url)
# FIXME(gyee): this test is no longer valid as user
# have no role in the project. Can't get a scoped token
# self.delete(member_url)
# r = self.get(collection_url)
# self.assertValidRoleListResponse(r, expected_length=0)
# self.assertIn(collection_url, r.result['links']['self'])
def test_crud_user_project_role_grants_no_user(self):
"""Grant role on a project to a user that doesn't exist, 404 result.
When grant a role on a project to a user that doesn't exist, the server
returns 404 Not Found for the user.
"""
user_id = uuid.uuid4().hex
collection_url = (
'/projects/%(project_id)s/users/%(user_id)s/roles' % {
'project_id': self.project['id'], 'user_id': user_id})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url, expected_status=404)
def test_crud_user_domain_role_grants(self):
collection_url = (
'/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': self.domain_id,
'user_id': self.user['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url)
self.head(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=self.role,
resource_url=collection_url)
self.delete(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, expected_length=0,
resource_url=collection_url)
def test_crud_user_domain_role_grants_no_user(self):
"""Grant role on a domain to a user that doesn't exist, 404 result.
When grant a role on a domain to a user that doesn't exist, the server
returns 404 Not Found for the user.
"""
user_id = uuid.uuid4().hex
collection_url = (
'/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': self.domain_id, 'user_id': user_id})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url, expected_status=404)
def test_crud_group_project_role_grants(self):
collection_url = (
'/projects/%(project_id)s/groups/%(group_id)s/roles' % {
'project_id': self.project_id,
'group_id': self.group_id})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url)
self.head(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=self.role,
resource_url=collection_url)
self.delete(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, expected_length=0,
resource_url=collection_url)
def test_crud_group_project_role_grants_no_group(self):
"""Grant role on a project to a group that doesn't exist, 404 result.
When grant a role on a project to a group that doesn't exist, the
server returns 404 Not Found for the group.
"""
group_id = uuid.uuid4().hex
collection_url = (
'/projects/%(project_id)s/groups/%(group_id)s/roles' % {
'project_id': self.project_id,
'group_id': group_id})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url, expected_status=404)
def test_crud_group_domain_role_grants(self):
collection_url = (
'/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
'domain_id': self.domain_id,
'group_id': self.group_id})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url)
self.head(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=self.role,
resource_url=collection_url)
self.delete(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, expected_length=0,
resource_url=collection_url)
def test_crud_group_domain_role_grants_no_group(self):
"""Grant role on a domain to a group that doesn't exist, 404 result.
When grant a role on a domain to a group that doesn't exist, the server
returns 404 Not Found for the group.
"""
group_id = uuid.uuid4().hex
collection_url = (
'/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
'domain_id': self.domain_id,
'group_id': group_id})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url, expected_status=404)
def _create_new_user_and_assign_role_on_project(self):
"""Create a new user and assign user a role on a project."""
# Create a new user
new_user = self.new_user_ref(domain_id=self.domain_id)
user_ref = self.identity_api.create_user(new_user)
# Assign the user a role on the project
collection_url = (
'/projects/%(project_id)s/users/%(user_id)s/roles' % {
'project_id': self.project_id,
'user_id': user_ref['id']})
member_url = ('%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id})
self.put(member_url, expected_status=204)
# Check the user has the role assigned
self.head(member_url, expected_status=204)
return member_url, user_ref
def test_delete_user_before_removing_role_assignment_succeeds(self):
"""Call ``DELETE`` on the user before the role assignment."""
member_url, user = self._create_new_user_and_assign_role_on_project()
# Delete the user from identity backend
self.identity_api.driver.delete_user(user['id'])
# Clean up the role assignment
self.delete(member_url, expected_status=204)
# Make sure the role is gone
self.head(member_url, expected_status=404)
def test_delete_user_and_check_role_assignment_fails(self):
"""Call ``DELETE`` on the user and check the role assignment."""
member_url, user = self._create_new_user_and_assign_role_on_project()
# Delete the user from identity backend
self.identity_api.delete_user(user['id'])
# We should get a 404 when looking for the user in the identity
# backend because we're not performing a delete operation on the role.
self.head(member_url, expected_status=404)
def test_token_revoked_once_group_role_grant_revoked(self):
"""Test token is revoked when group role grant is revoked
When a role granted to a group is revoked for a given scope,
all tokens related to this scope and belonging to one of the members
of this group should be revoked.
The revocation should be independently to the presence
of the revoke API.
"""
# creates grant from group on project.
self.assignment_api.create_grant(role_id=self.role['id'],
project_id=self.project['id'],
group_id=self.group['id'])
# adds user to the group.
self.identity_api.add_user_to_group(user_id=self.user['id'],
group_id=self.group['id'])
# creates a token for the user
auth_body = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
token_resp = self.post('/auth/tokens', body=auth_body)
token = token_resp.headers.get('x-subject-token')
# validates the returned token; it should be valid.
self.head('/auth/tokens',
headers={'x-subject-token': token},
expected_status=200)
# revokes the grant from group on project.
self.assignment_api.delete_grant(role_id=self.role['id'],
project_id=self.project['id'],
group_id=self.group['id'])
# validates the same token again; it should not longer be valid.
self.head('/auth/tokens',
headers={'x-subject-token': token},
expected_status=404)
# Role Assignments tests
def test_get_role_assignments(self):
"""Call ``GET /role_assignments``.
The sample data set up already has a user, group and project
that is part of self.domain. We use these plus a new user
we create as our data set, making sure we ignore any
role assignments that are already in existence.
Since we don't yet support a first class entity for role
assignments, we are only testing the LIST API. To create
and delete the role assignments we use the old grant APIs.
Test Plan:
- Create extra user for tests
- Get a list of all existing role assignments
- Add a new assignment for each of the four combinations, i.e.
group+domain, user+domain, group+project, user+project, using
the same role each time
- Get a new list of all role assignments, checking these four new
ones have been added
- Then delete the four we added
- Get a new list of all role assignments, checking the four have
been removed
"""
# Since the default fixtures already assign some roles to the
# user it creates, we also need a new user that will not have any
# existing assignments
self.user1 = self.new_user_ref(
domain_id=self.domain['id'])
self.user1 = self.identity_api.create_user(self.user1)
collection_url = '/role_assignments'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
existing_assignments = len(r.result.get('role_assignments'))
# Now add one of each of the four types of assignment, making sure
# that we get them all back.
gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
group_id=self.group_id,
role_id=self.role_id)
self.put(gd_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 1,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
user_id=self.user1['id'],
role_id=self.role_id)
self.put(ud_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, ud_entity)
gp_entity = self.build_role_assignment_entity(
project_id=self.project_id, group_id=self.group_id,
role_id=self.role_id)
self.put(gp_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 3,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gp_entity)
up_entity = self.build_role_assignment_entity(
project_id=self.project_id, user_id=self.user1['id'],
role_id=self.role_id)
self.put(up_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 4,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, up_entity)
# Now delete the four we added and make sure they are removed
# from the collection.
self.delete(gd_entity['links']['assignment'])
self.delete(ud_entity['links']['assignment'])
self.delete(gp_entity['links']['assignment'])
self.delete(up_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments,
resource_url=collection_url)
self.assertRoleAssignmentNotInListResponse(r, gd_entity)
self.assertRoleAssignmentNotInListResponse(r, ud_entity)
self.assertRoleAssignmentNotInListResponse(r, gp_entity)
self.assertRoleAssignmentNotInListResponse(r, up_entity)
def test_get_effective_role_assignments(self):
"""Call ``GET /role_assignments?effective``.
Test Plan:
- Create two extra user for tests
- Add these users to a group
- Add a role assignment for the group on a domain
- Get a list of all role assignments, checking one has been added
- Then get a list of all effective role assignments - the group
assignment should have turned into assignments on the domain
for each of the group members.
"""
self.user1 = self.new_user_ref(
domain_id=self.domain['id'])
password = self.user1['password']
self.user1 = self.identity_api.create_user(self.user1)
self.user1['password'] = password
self.user2 = self.new_user_ref(
domain_id=self.domain['id'])
password = self.user2['password']
self.user2 = self.identity_api.create_user(self.user2)
self.user2['password'] = password
self.identity_api.add_user_to_group(self.user1['id'], self.group['id'])
self.identity_api.add_user_to_group(self.user2['id'], self.group['id'])
collection_url = '/role_assignments'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
existing_assignments = len(r.result.get('role_assignments'))
gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
group_id=self.group_id,
role_id=self.role_id)
self.put(gd_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 1,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
# Now re-read the collection asking for effective roles - this
# should mean the group assignment is translated into the two
# member user assignments
collection_url = '/role_assignments?effective'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
ud_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], domain_id=self.domain_id,
user_id=self.user1['id'], role_id=self.role_id)
self.assertRoleAssignmentInListResponse(r, ud_entity)
ud_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], domain_id=self.domain_id,
user_id=self.user2['id'], role_id=self.role_id)
self.assertRoleAssignmentInListResponse(r, ud_entity)
def test_check_effective_values_for_role_assignments(self):
"""Call ``GET /role_assignments?effective=value``.
Check the various ways of specifying the 'effective'
query parameter. If the 'effective' query parameter
is included then this should always be treated as meaning 'True'
unless it is specified as:
{url}?effective=0
This is by design to match the agreed way of handling
policy checking on query/filter parameters.
Test Plan:
- Create two extra user for tests
- Add these users to a group
- Add a role assignment for the group on a domain
- Get a list of all role assignments, checking one has been added
- Then issue various request with different ways of defining
the 'effective' query parameter. As we have tested the
correctness of the data coming back when we get effective roles
in other tests, here we just use the count of entities to
know if we are getting effective roles or not
"""
self.user1 = self.new_user_ref(
domain_id=self.domain['id'])
password = self.user1['password']
self.user1 = self.identity_api.create_user(self.user1)
self.user1['password'] = password
self.user2 = self.new_user_ref(
domain_id=self.domain['id'])
password = self.user2['password']
self.user2 = self.identity_api.create_user(self.user2)
self.user2['password'] = password
self.identity_api.add_user_to_group(self.user1['id'], self.group['id'])
self.identity_api.add_user_to_group(self.user2['id'], self.group['id'])
collection_url = '/role_assignments'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
existing_assignments = len(r.result.get('role_assignments'))
gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
group_id=self.group_id,
role_id=self.role_id)
self.put(gd_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 1,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
# Now re-read the collection asking for effective roles,
# using the most common way of defining "effective'. This
# should mean the group assignment is translated into the two
# member user assignments
collection_url = '/role_assignments?effective'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
# Now set 'effective' to false explicitly - should get
# back the regular roles
collection_url = '/role_assignments?effective=0'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 1,
resource_url=collection_url)
# Now try setting 'effective' to 'False' explicitly- this is
# NOT supported as a way of setting a query or filter
# parameter to false by design. Hence we should get back
# effective roles.
collection_url = '/role_assignments?effective=False'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
# Now set 'effective' to True explicitly
collection_url = '/role_assignments?effective=True'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
def test_filtered_role_assignments(self):
"""Call ``GET /role_assignments?filters``.
Test Plan:
- Create extra users, group, role and project for tests
- Make the following assignments:
Give group1, role1 on project1 and domain
Give user1, role2 on project1 and domain
Make User1 a member of Group1
- Test a series of single filter list calls, checking that
the correct results are obtained
- Test a multi-filtered list call
- Test listing all effective roles for a given user
- Test the equivalent of the list of roles in a project scoped
token (all effective roles for a user on a project)
"""
# Since the default fixtures already assign some roles to the
# user it creates, we also need a new user that will not have any
# existing assignments
self.user1 = self.new_user_ref(
domain_id=self.domain['id'])
password = self.user1['password']
self.user1 = self.identity_api.create_user(self.user1)
self.user1['password'] = password
self.user2 = self.new_user_ref(
domain_id=self.domain['id'])
password = self.user2['password']
self.user2 = self.identity_api.create_user(self.user2)
self.user2['password'] = password
self.group1 = self.new_group_ref(
domain_id=self.domain['id'])
self.group1 = self.identity_api.create_group(self.group1)
self.identity_api.add_user_to_group(self.user1['id'],
self.group1['id'])
self.identity_api.add_user_to_group(self.user2['id'],
self.group1['id'])
self.project1 = self.new_project_ref(
domain_id=self.domain['id'])
self.resource_api.create_project(self.project1['id'], self.project1)
self.role1 = self.new_role_ref()
self.role_api.create_role(self.role1['id'], self.role1)
self.role2 = self.new_role_ref()
self.role_api.create_role(self.role2['id'], self.role2)
# Now add one of each of the four types of assignment
gd_entity = self.build_role_assignment_entity(
domain_id=self.domain_id, group_id=self.group1['id'],
role_id=self.role1['id'])
self.put(gd_entity['links']['assignment'])
ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
user_id=self.user1['id'],
role_id=self.role2['id'])
self.put(ud_entity['links']['assignment'])
gp_entity = self.build_role_assignment_entity(
project_id=self.project1['id'], group_id=self.group1['id'],
role_id=self.role1['id'])
self.put(gp_entity['links']['assignment'])
up_entity = self.build_role_assignment_entity(
project_id=self.project1['id'], user_id=self.user1['id'],
role_id=self.role2['id'])
self.put(up_entity['links']['assignment'])
# Now list by various filters to make sure we get back the right ones
collection_url = ('/role_assignments?scope.project.id=%s' %
self.project1['id'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, gp_entity)
collection_url = ('/role_assignments?scope.domain.id=%s' %
self.domain['id'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, ud_entity)
self.assertRoleAssignmentInListResponse(r, gd_entity)
collection_url = '/role_assignments?user.id=%s' % self.user1['id']
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, ud_entity)
collection_url = '/role_assignments?group.id=%s' % self.group1['id']
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
self.assertRoleAssignmentInListResponse(r, gp_entity)
collection_url = '/role_assignments?role.id=%s' % self.role1['id']
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
self.assertRoleAssignmentInListResponse(r, gp_entity)
# Let's try combining two filers together....
collection_url = (
'/role_assignments?user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': self.user1['id'],
'project_id': self.project1['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=1,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, up_entity)
# Now for a harder one - filter for user with effective
# roles - this should return role assignment that were directly
# assigned as well as by virtue of group membership
collection_url = ('/role_assignments?effective&user.id=%s' %
self.user1['id'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=4,
resource_url=collection_url)
# Should have the two direct roles...
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, ud_entity)
# ...and the two via group membership...
gp1_link = self.build_role_assignment_link(
project_id=self.project1['id'], group_id=self.group1['id'],
role_id=self.role1['id'])
gd1_link = self.build_role_assignment_link(domain_id=self.domain_id,
group_id=self.group1['id'],
role_id=self.role1['id'])
up1_entity = self.build_role_assignment_entity(
link=gp1_link, project_id=self.project1['id'],
user_id=self.user1['id'], role_id=self.role1['id'])
ud1_entity = self.build_role_assignment_entity(
link=gd1_link, domain_id=self.domain_id, user_id=self.user1['id'],
role_id=self.role1['id'])
self.assertRoleAssignmentInListResponse(r, up1_entity)
self.assertRoleAssignmentInListResponse(r, ud1_entity)
# ...and for the grand-daddy of them all, simulate the request
# that would generate the list of effective roles in a project
# scoped token.
collection_url = (
'/role_assignments?effective&user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': self.user1['id'],
'project_id': self.project1['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
# Should have one direct role and one from group membership...
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, up1_entity)
class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase,
test_v3.AssignmentTestMixin):
"""Base class for testing /v3/role_assignments API behavior."""
MAX_HIERARCHY_BREADTH = 3
MAX_HIERARCHY_DEPTH = CONF.max_project_tree_depth - 1
def load_sample_data(self):
"""Creates sample data to be used on tests.
Created data are i) a role and ii) a domain containing: a project
hierarchy and 3 users within 3 groups.
"""
def create_project_hierarchy(parent_id, depth):
"Creates a random project hierarchy."
if depth == 0:
return
breadth = random.randint(1, self.MAX_HIERARCHY_BREADTH)
subprojects = []
for i in range(breadth):
subprojects.append(self.new_project_ref(
domain_id=self.domain_id, parent_id=parent_id))
self.resource_api.create_project(subprojects[-1]['id'],
subprojects[-1])
new_parent = subprojects[random.randint(0, breadth - 1)]
create_project_hierarchy(new_parent['id'], depth - 1)
super(RoleAssignmentBaseTestCase, self).load_sample_data()
# Create a domain
self.domain = self.new_domain_ref()
self.domain_id = self.domain['id']
self.resource_api.create_domain(self.domain_id, self.domain)
# Create a project hierarchy
self.project = self.new_project_ref(domain_id=self.domain_id)
self.project_id = self.project['id']
self.resource_api.create_project(self.project_id, self.project)
# Create a random project hierarchy
create_project_hierarchy(self.project_id,
random.randint(1, self.MAX_HIERARCHY_DEPTH))
# Create 3 users
self.user_ids = []
for i in range(3):
user = self.new_user_ref(domain_id=self.domain_id)
user = self.identity_api.create_user(user)
self.user_ids.append(user['id'])
# Create 3 groups
self.group_ids = []
for i in range(3):
group = self.new_group_ref(domain_id=self.domain_id)
group = self.identity_api.create_group(group)
self.group_ids.append(group['id'])
# Put 2 members on each group
self.identity_api.add_user_to_group(user_id=self.user_ids[i],
group_id=group['id'])
self.identity_api.add_user_to_group(user_id=self.user_ids[i % 2],
group_id=group['id'])
self.assignment_api.create_grant(user_id=self.user_id,
project_id=self.project_id,
role_id=self.role_id)
# Create a role
self.role = self.new_role_ref()
self.role_id = self.role['id']
self.role_api.create_role(self.role_id, self.role)
# Set default user and group to be used on tests
self.default_user_id = self.user_ids[0]
self.default_group_id = self.group_ids[0]
def get_role_assignments(self, expected_status=200, **filters):
"""Returns the result from querying role assignment API + queried URL.
Calls GET /v3/role_assignments?<params> and returns its result, where
<params> is the HTTP query parameters form of effective option plus
filters, if provided. Queried URL is returned as well.
:returns: a tuple containing the list role assignments API response and
queried URL.
"""
query_url = self._get_role_assignments_query_url(**filters)
response = self.get(query_url, expected_status=expected_status)
return (response, query_url)
def _get_role_assignments_query_url(self, **filters):
"""Returns non-effective role assignments query URL from given filters.
:param filters: query parameters are created with the provided filters
on role assignments attributes. Valid filters are:
role_id, domain_id, project_id, group_id, user_id and
inherited_to_projects.
:returns: role assignments query URL.
"""
return self.build_role_assignment_query_url(**filters)
class RoleAssignmentFailureTestCase(RoleAssignmentBaseTestCase):
"""Class for testing invalid query params on /v3/role_assignments API.
Querying domain and project, or user and group results in a HTTP 400, since
a role assignment must contain only a single pair of (actor, target). In
addition, since filtering on role assignments applies only to the final
result, effective mode cannot be combined with i) group or ii) domain and
inherited, because it would always result in an empty list.
"""
def test_get_role_assignments_by_domain_and_project(self):
self.get_role_assignments(domain_id=self.domain_id,
project_id=self.project_id,
expected_status=400)
def test_get_role_assignments_by_user_and_group(self):
self.get_role_assignments(user_id=self.default_user_id,
group_id=self.default_group_id,
expected_status=400)
def test_get_role_assignments_by_effective_and_inherited(self):
self.config_fixture.config(group='os_inherit', enabled=True)
self.get_role_assignments(domain_id=self.domain_id, effective=True,
inherited_to_projects=True,
expected_status=400)
def test_get_role_assignments_by_effective_and_group(self):
self.get_role_assignments(effective=True,
group_id=self.default_group_id,
expected_status=400)
class RoleAssignmentDirectTestCase(RoleAssignmentBaseTestCase):
"""Class for testing direct assignments on /v3/role_assignments API.
Direct assignments on a domain or project have effect on them directly,
instead of on their project hierarchy, i.e they are non-inherited. In
addition, group direct assignments are not expanded to group's users.
Tests on this class make assertions on the representation and API filtering
of direct assignments.
"""
def _test_get_role_assignments(self, **filters):
"""Generic filtering test method.
According to the provided filters, this method:
- creates a new role assignment;
- asserts that list role assignments API reponds correctly;
- deletes the created role assignment.
:param filters: filters to be considered when listing role assignments.
Valid filters are: role_id, domain_id, project_id,
group_id, user_id and inherited_to_projects.
"""
# Fills default assignment with provided filters
test_assignment = self._set_default_assignment_attributes(**filters)
# Create new role assignment for this test
self.assignment_api.create_grant(**test_assignment)
# Get expected role assignments
expected_assignments = self._list_expected_role_assignments(
**test_assignment)
# Get role assignments from API
response, query_url = self.get_role_assignments(**test_assignment)
self.assertValidRoleAssignmentListResponse(response,
resource_url=query_url)
self.assertEqual(len(expected_assignments),
len(response.result.get('role_assignments')))
# Assert that expected role assignments were returned by the API call
for assignment in expected_assignments:
self.assertRoleAssignmentInListResponse(response, assignment)
# Delete created role assignment
self.assignment_api.delete_grant(**test_assignment)
def _set_default_assignment_attributes(self, **attribs):
"""Inserts default values for missing attributes of role assignment.
If no actor, target or role are provided, they will default to values
from sample data.
:param attribs: info from a role assignment entity. Valid attributes
are: role_id, domain_id, project_id, group_id, user_id
and inherited_to_projects.
"""
if not any(target in attribs
for target in ('domain_id', 'projects_id')):
attribs['project_id'] = self.project_id
if not any(actor in attribs for actor in ('user_id', 'group_id')):
attribs['user_id'] = self.default_user_id
if 'role_id' not in attribs:
attribs['role_id'] = self.role_id
return attribs
def _list_expected_role_assignments(self, **filters):
"""Given the filters, it returns expected direct role assignments.
:param filters: filters that will be considered when listing role
assignments. Valid filters are: role_id, domain_id,
project_id, group_id, user_id and
inherited_to_projects.
:returns: the list of the expected role assignments.
"""
return [self.build_role_assignment_entity(**filters)]
# Test cases below call the generic test method, providing different filter
# combinations. Filters are provided as specified in the method name, after
# 'by'. For example, test_get_role_assignments_by_project_user_and_role
# calls the generic test method with project_id, user_id and role_id.
def test_get_role_assignments_by_domain(self, **filters):
self._test_get_role_assignments(domain_id=self.domain_id, **filters)
def test_get_role_assignments_by_project(self, **filters):
self._test_get_role_assignments(project_id=self.project_id, **filters)
def test_get_role_assignments_by_user(self, **filters):
self._test_get_role_assignments(user_id=self.default_user_id,
**filters)
def test_get_role_assignments_by_group(self, **filters):
self._test_get_role_assignments(group_id=self.default_group_id,
**filters)
def test_get_role_assignments_by_role(self, **filters):
self._test_get_role_assignments(role_id=self.role_id, **filters)
def test_get_role_assignments_by_domain_and_user(self, **filters):
self.test_get_role_assignments_by_domain(user_id=self.default_user_id,
**filters)
def test_get_role_assignments_by_domain_and_group(self, **filters):
self.test_get_role_assignments_by_domain(
group_id=self.default_group_id, **filters)
def test_get_role_assignments_by_project_and_user(self, **filters):
self.test_get_role_assignments_by_project(user_id=self.default_user_id,
**filters)
def test_get_role_assignments_by_project_and_group(self, **filters):
self.test_get_role_assignments_by_project(
group_id=self.default_group_id, **filters)
def test_get_role_assignments_by_domain_user_and_role(self, **filters):
self.test_get_role_assignments_by_domain_and_user(role_id=self.role_id,
**filters)
def test_get_role_assignments_by_domain_group_and_role(self, **filters):
self.test_get_role_assignments_by_domain_and_group(
role_id=self.role_id, **filters)
def test_get_role_assignments_by_project_user_and_role(self, **filters):
self.test_get_role_assignments_by_project_and_user(
role_id=self.role_id, **filters)
def test_get_role_assignments_by_project_group_and_role(self, **filters):
self.test_get_role_assignments_by_project_and_group(
role_id=self.role_id, **filters)
class RoleAssignmentInheritedTestCase(RoleAssignmentDirectTestCase):
"""Class for testing inherited assignments on /v3/role_assignments API.
Inherited assignments on a domain or project have no effect on them
directly, but on the projects under them instead.
Tests on this class do not make assertions on the effect of inherited
assignments, but in their representation and API filtering.
"""
def config_overrides(self):
super(RoleAssignmentBaseTestCase, self).config_overrides()
self.config_fixture.config(group='os_inherit', enabled=True)
def _test_get_role_assignments(self, **filters):
"""Adds inherited_to_project filter to expected entity in tests."""
super(RoleAssignmentInheritedTestCase,
self)._test_get_role_assignments(inherited_to_projects=True,
**filters)
class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase):
"""Class for testing inheritance effects on /v3/role_assignments API.
Inherited assignments on a domain or project have no effect on them
directly, but on the projects under them instead.
Tests on this class make assertions on the effect of inherited assignments
and API filtering.
"""
def _get_role_assignments_query_url(self, **filters):
"""Returns effective role assignments query URL from given filters.
For test methods in this class, effetive will always be true. As in
effective mode, inherited_to_projects, group_id, domain_id and
project_id will always be desconsidered from provided filters.
:param filters: query parameters are created with the provided filters.
Valid filters are: role_id, domain_id, project_id,
group_id, user_id and inherited_to_projects.
:returns: role assignments query URL.
"""
query_filters = filters.copy()
query_filters.pop('inherited_to_projects')
query_filters.pop('group_id', None)
query_filters.pop('domain_id', None)
query_filters.pop('project_id', None)
return self.build_role_assignment_query_url(effective=True,
**query_filters)
def _list_expected_role_assignments(self, **filters):
"""Given the filters, it returns expected direct role assignments.
:param filters: filters that will be considered when listing role
assignments. Valid filters are: role_id, domain_id,
project_id, group_id, user_id and
inherited_to_projects.
:returns: the list of the expected role assignments.
"""
# Get assignment link, to be put on 'links': {'assignment': link}
assignment_link = self.build_role_assignment_link(**filters)
# Expand group membership
user_ids = [None]
if filters.get('group_id'):
user_ids = [user['id'] for user in
self.identity_api.list_users_in_group(
filters['group_id'])]
else:
user_ids = [self.default_user_id]
# Expand role inheritance
project_ids = [None]
if filters.get('domain_id'):
project_ids = [project['id'] for project in
self.resource_api.list_projects_in_domain(
filters.pop('domain_id'))]
else:
project_ids = [project['id'] for project in
self.resource_api.list_projects_in_subtree(
self.project_id)]
# Compute expected role assignments
assignments = []
for project_id in project_ids:
filters['project_id'] = project_id
for user_id in user_ids:
filters['user_id'] = user_id
assignments.append(self.build_role_assignment_entity(
link=assignment_link, **filters))
return assignments
class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
test_v3.AssignmentTestMixin):
"""Test inheritance crud and its effects."""
def config_overrides(self):
super(AssignmentInheritanceTestCase, self).config_overrides()
self.config_fixture.config(group='os_inherit', enabled=True)
def test_get_token_from_inherited_user_domain_role_grants(self):
# Create a new user to ensure that no grant is loaded from sample data
user = self.new_user_ref(domain_id=self.domain_id)
password = user['password']
user = self.identity_api.create_user(user)
user['password'] = password
# Define domain and project authentication data
domain_auth_data = self.build_authentication_request(
user_id=user['id'],
password=user['password'],
domain_id=self.domain_id)
project_auth_data = self.build_authentication_request(
user_id=user['id'],
password=user['password'],
project_id=self.project_id)
# Check the user cannot get a domain nor a project token
self.v3_authenticate_token(domain_auth_data, expected_status=401)
self.v3_authenticate_token(project_auth_data, expected_status=401)
# Grant non-inherited role for user on domain
non_inher_ud_link = self.build_role_assignment_link(
domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id)
self.put(non_inher_ud_link)
# Check the user can get only a domain token
self.v3_authenticate_token(domain_auth_data)
self.v3_authenticate_token(project_auth_data, expected_status=401)
# Create inherited role
inherited_role = {'id': uuid.uuid4().hex, 'name': 'inherited'}
self.role_api.create_role(inherited_role['id'], inherited_role)
# Grant inherited role for user on domain
inher_ud_link = self.build_role_assignment_link(
domain_id=self.domain_id, user_id=user['id'],
role_id=inherited_role['id'], inherited_to_projects=True)
self.put(inher_ud_link)
# Check the user can get both a domain and a project token
self.v3_authenticate_token(domain_auth_data)
self.v3_authenticate_token(project_auth_data)
# Delete inherited grant
self.delete(inher_ud_link)
# Check the user can only get a domain token
self.v3_authenticate_token(domain_auth_data)
self.v3_authenticate_token(project_auth_data, expected_status=401)
# Delete non-inherited grant
self.delete(non_inher_ud_link)
# Check the user cannot get a domain token anymore
self.v3_authenticate_token(domain_auth_data, expected_status=401)
def test_get_token_from_inherited_group_domain_role_grants(self):
# Create a new group and put a new user in it to
# ensure that no grant is loaded from sample data
user = self.new_user_ref(domain_id=self.domain_id)
password = user['password']
user = self.identity_api.create_user(user)
user['password'] = password
group = self.new_group_ref(domain_id=self.domain['id'])
group = self.identity_api.create_group(group)
self.identity_api.add_user_to_group(user['id'], group['id'])
# Define domain and project authentication data
domain_auth_data = self.build_authentication_request(
user_id=user['id'],
password=user['password'],
domain_id=self.domain_id)
project_auth_data = self.build_authentication_request(
user_id=user['id'],
password=user['password'],
project_id=self.project_id)
# Check the user cannot get a domain nor a project token
self.v3_authenticate_token(domain_auth_data, expected_status=401)
self.v3_authenticate_token(project_auth_data, expected_status=401)
# Grant non-inherited role for user on domain
non_inher_gd_link = self.build_role_assignment_link(
domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id)
self.put(non_inher_gd_link)
# Check the user can get only a domain token
self.v3_authenticate_token(domain_auth_data)
self.v3_authenticate_token(project_auth_data, expected_status=401)
# Create inherited role
inherited_role = {'id': uuid.uuid4().hex, 'name': 'inherited'}
self.role_api.create_role(inherited_role['id'], inherited_role)
# Grant inherited role for user on domain
inher_gd_link = self.build_role_assignment_link(
domain_id=self.domain_id, user_id=user['id'],
role_id=inherited_role['id'], inherited_to_projects=True)
self.put(inher_gd_link)
# Check the user can get both a domain and a project token
self.v3_authenticate_token(domain_auth_data)
self.v3_authenticate_token(project_auth_data)
# Delete inherited grant
self.delete(inher_gd_link)
# Check the user can only get a domain token
self.v3_authenticate_token(domain_auth_data)
self.v3_authenticate_token(project_auth_data, expected_status=401)
# Delete non-inherited grant
self.delete(non_inher_gd_link)
# Check the user cannot get a domain token anymore
self.v3_authenticate_token(domain_auth_data, expected_status=401)
def _test_crud_inherited_and_direct_assignment_on_target(self, target_url):
# Create a new role to avoid assignments loaded from sample data
role = self.new_role_ref()
self.role_api.create_role(role['id'], role)
# Define URLs
direct_url = '%s/users/%s/roles/%s' % (
target_url, self.user_id, role['id'])
inherited_url = '/OS-INHERIT/%s/inherited_to_projects' % direct_url
# Create the direct assignment
self.put(direct_url)
# Check the direct assignment exists, but the inherited one does not
self.head(direct_url)
self.head(inherited_url, expected_status=404)
# Now add the inherited assignment
self.put(inherited_url)
# Check both the direct and inherited assignment exist
self.head(direct_url)
self.head(inherited_url)
# Delete indirect assignment
self.delete(inherited_url)
# Check the direct assignment exists, but the inherited one does not
self.head(direct_url)
self.head(inherited_url, expected_status=404)
# Now delete the inherited assignment
self.delete(direct_url)
# Check that none of them exist
self.head(direct_url, expected_status=404)
self.head(inherited_url, expected_status=404)
def test_crud_inherited_and_direct_assignment_on_domains(self):
self._test_crud_inherited_and_direct_assignment_on_target(
'/domains/%s' % self.domain_id)
def test_crud_inherited_and_direct_assignment_on_projects(self):
self._test_crud_inherited_and_direct_assignment_on_target(
'/projects/%s' % self.project_id)
def test_crud_user_inherited_domain_role_grants(self):
role_list = []
for _ in range(2):
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.role_api.create_role(role['id'], role)
role_list.append(role)
# Create a non-inherited role as a spoiler
self.assignment_api.create_grant(
role_list[1]['id'], user_id=self.user['id'],
domain_id=self.domain_id)
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': self.domain_id,
'user_id': self.user['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[0]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
# Check we can read it back
self.head(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[0],
resource_url=collection_url)
# Now delete and check its gone
self.delete(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, expected_length=0,
resource_url=collection_url)
def test_list_role_assignments_for_inherited_domain_grants(self):
"""Call ``GET /role_assignments with inherited domain grants``.
Test Plan:
- Create 4 roles
- Create a domain with a user and two projects
- Assign two direct roles to project1
- Assign a spoiler role to project2
- Issue the URL to add inherited role to the domain
- Issue the URL to check it is indeed on the domain
- Issue the URL to check effective roles on project1 - this
should return 3 roles.
"""
role_list = []
for _ in range(4):
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.role_api.create_role(role['id'], role)
role_list.append(role)
domain = self.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
user1 = self.new_user_ref(
domain_id=domain['id'])
password = user1['password']
user1 = self.identity_api.create_user(user1)
user1['password'] = password
project1 = self.new_project_ref(
domain_id=domain['id'])
self.resource_api.create_project(project1['id'], project1)
project2 = self.new_project_ref(
domain_id=domain['id'])
self.resource_api.create_project(project2['id'], project2)
# Add some roles to the project
self.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[0]['id'])
self.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[1]['id'])
# ..and one on a different project as a spoiler
self.assignment_api.add_role_to_user_and_project(
user1['id'], project2['id'], role_list[2]['id'])
# Now create our inherited role on the domain
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': domain['id'],
'user_id': user1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[3]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[3],
resource_url=collection_url)
# Now use the list domain role assignments api to check if this
# is included
collection_url = (
'/role_assignments?user.id=%(user_id)s'
'&scope.domain.id=%(domain_id)s' % {
'user_id': user1['id'],
'domain_id': domain['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=1,
resource_url=collection_url)
ud_entity = self.build_role_assignment_entity(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, ud_entity)
# Now ask for effective list role assignments - the role should
# turn into a project role, along with the two direct roles that are
# on the project
collection_url = (
'/role_assignments?effective&user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': user1['id'],
'project_id': project1['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
# An effective role for an inherited role will be a project
# entity, with a domain link to the inherited assignment
ud_url = self.build_role_assignment_link(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
up_entity = self.build_role_assignment_entity(
link=ud_url, project_id=project1['id'],
user_id=user1['id'], role_id=role_list[3]['id'],
inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, up_entity)
def test_list_role_assignments_for_disabled_inheritance_extension(self):
"""Call ``GET /role_assignments with inherited domain grants``.
Test Plan:
- Issue the URL to add inherited role to the domain
- Issue the URL to check effective roles on project include the
inherited role
- Disable the extension
- Re-check the effective roles, proving the inherited role no longer
shows up.
"""
role_list = []
for _ in range(4):
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.role_api.create_role(role['id'], role)
role_list.append(role)
domain = self.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
user1 = self.new_user_ref(
domain_id=domain['id'])
password = user1['password']
user1 = self.identity_api.create_user(user1)
user1['password'] = password
project1 = self.new_project_ref(
domain_id=domain['id'])
self.resource_api.create_project(project1['id'], project1)
project2 = self.new_project_ref(
domain_id=domain['id'])
self.resource_api.create_project(project2['id'], project2)
# Add some roles to the project
self.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[0]['id'])
self.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[1]['id'])
# ..and one on a different project as a spoiler
self.assignment_api.add_role_to_user_and_project(
user1['id'], project2['id'], role_list[2]['id'])
# Now create our inherited role on the domain
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': domain['id'],
'user_id': user1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[3]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[3],
resource_url=collection_url)
# Get effective list role assignments - the role should
# turn into a project role, along with the two direct roles that are
# on the project
collection_url = (
'/role_assignments?effective&user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': user1['id'],
'project_id': project1['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
ud_url = self.build_role_assignment_link(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
up_entity = self.build_role_assignment_entity(
link=ud_url, project_id=project1['id'],
user_id=user1['id'], role_id=role_list[3]['id'],
inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, up_entity)
# Disable the extension and re-check the list, the role inherited
# from the project should no longer show up
self.config_fixture.config(group='os_inherit', enabled=False)
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
self.assertRoleAssignmentNotInListResponse(r, up_entity)
def test_list_role_assignments_for_inherited_group_domain_grants(self):
"""Call ``GET /role_assignments with inherited group domain grants``.
Test Plan:
- Create 4 roles
- Create a domain with a user and two projects
- Assign two direct roles to project1
- Assign a spoiler role to project2
- Issue the URL to add inherited role to the domain
- Issue the URL to check it is indeed on the domain
- Issue the URL to check effective roles on project1 - this
should return 3 roles.
"""
role_list = []
for _ in range(4):
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.role_api.create_role(role['id'], role)
role_list.append(role)
domain = self.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
user1 = self.new_user_ref(
domain_id=domain['id'])
password = user1['password']
user1 = self.identity_api.create_user(user1)
user1['password'] = password
user2 = self.new_user_ref(
domain_id=domain['id'])
password = user2['password']
user2 = self.identity_api.create_user(user2)
user2['password'] = password
group1 = self.new_group_ref(
domain_id=domain['id'])
group1 = self.identity_api.create_group(group1)
self.identity_api.add_user_to_group(user1['id'],
group1['id'])
self.identity_api.add_user_to_group(user2['id'],
group1['id'])
project1 = self.new_project_ref(
domain_id=domain['id'])
self.resource_api.create_project(project1['id'], project1)
project2 = self.new_project_ref(
domain_id=domain['id'])
self.resource_api.create_project(project2['id'], project2)
# Add some roles to the project
self.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[0]['id'])
self.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[1]['id'])
# ..and one on a different project as a spoiler
self.assignment_api.add_role_to_user_and_project(
user1['id'], project2['id'], role_list[2]['id'])
# Now create our inherited role on the domain
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
'domain_id': domain['id'],
'group_id': group1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[3]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[3],
resource_url=collection_url)
# Now use the list domain role assignments api to check if this
# is included
collection_url = (
'/role_assignments?group.id=%(group_id)s'
'&scope.domain.id=%(domain_id)s' % {
'group_id': group1['id'],
'domain_id': domain['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=1,
resource_url=collection_url)
gd_entity = self.build_role_assignment_entity(
domain_id=domain['id'], group_id=group1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, gd_entity)
# Now ask for effective list role assignments - the role should
# turn into a user project role, along with the two direct roles
# that are on the project
collection_url = (
'/role_assignments?effective&user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': user1['id'],
'project_id': project1['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
# An effective role for an inherited role will be a project
# entity, with a domain link to the inherited assignment
up_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], project_id=project1['id'],
user_id=user1['id'], role_id=role_list[3]['id'],
inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, up_entity)
def test_filtered_role_assignments_for_inherited_grants(self):
"""Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``.
Test Plan:
- Create 5 roles
- Create a domain with a user, group and two projects
- Assign three direct spoiler roles to projects
- Issue the URL to add an inherited user role to the domain
- Issue the URL to add an inherited group role to the domain
- Issue the URL to filter by inherited roles - this should
return just the 2 inherited roles.
"""
role_list = []
for _ in range(5):
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.role_api.create_role(role['id'], role)
role_list.append(role)
domain = self.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
user1 = self.new_user_ref(
domain_id=domain['id'])
password = user1['password']
user1 = self.identity_api.create_user(user1)
user1['password'] = password
group1 = self.new_group_ref(
domain_id=domain['id'])
group1 = self.identity_api.create_group(group1)
project1 = self.new_project_ref(
domain_id=domain['id'])
self.resource_api.create_project(project1['id'], project1)
project2 = self.new_project_ref(
domain_id=domain['id'])
self.resource_api.create_project(project2['id'], project2)
# Add some spoiler roles to the projects
self.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[0]['id'])
self.assignment_api.add_role_to_user_and_project(
user1['id'], project2['id'], role_list[1]['id'])
# Create a non-inherited role as a spoiler
self.assignment_api.create_grant(
role_list[2]['id'], user_id=user1['id'], domain_id=domain['id'])
# Now create two inherited roles on the domain, one for a user
# and one for a domain
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': domain['id'],
'user_id': user1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[3]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[3],
resource_url=collection_url)
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
'domain_id': domain['id'],
'group_id': group1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[4]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[4],
resource_url=collection_url)
# Now use the list role assignments api to get a list of inherited
# roles on the domain - should get back the two roles
collection_url = (
'/role_assignments?scope.OS-INHERIT:inherited_to=projects')
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
ud_entity = self.build_role_assignment_entity(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
gd_entity = self.build_role_assignment_entity(
domain_id=domain['id'], group_id=group1['id'],
role_id=role_list[4]['id'], inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, ud_entity)
self.assertRoleAssignmentInListResponse(r, gd_entity)
def _setup_hierarchical_projects_scenario(self):
"""Creates basic hierarchical projects scenario.
This basic scenario contains a root with one leaf project and
two roles with the following names: non-inherited and inherited.
"""
# Create project hierarchy
root = self.new_project_ref(domain_id=self.domain['id'])
leaf = self.new_project_ref(domain_id=self.domain['id'],
parent_id=root['id'])
self.resource_api.create_project(root['id'], root)
self.resource_api.create_project(leaf['id'], leaf)
# Create 'non-inherited' and 'inherited' roles
non_inherited_role = {'id': uuid.uuid4().hex, 'name': 'non-inherited'}
self.role_api.create_role(non_inherited_role['id'], non_inherited_role)
inherited_role = {'id': uuid.uuid4().hex, 'name': 'inherited'}
self.role_api.create_role(inherited_role['id'], inherited_role)
return (root['id'], leaf['id'],
non_inherited_role['id'], inherited_role['id'])
def test_get_token_from_inherited_user_project_role_grants(self):
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Define root and leaf projects authentication data
root_project_auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=root_id)
leaf_project_auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=leaf_id)
# Check the user cannot get a token on root nor leaf project
self.v3_authenticate_token(root_project_auth_data, expected_status=401)
self.v3_authenticate_token(leaf_project_auth_data, expected_status=401)
# Grant non-inherited role for user on leaf project
non_inher_up_link = self.build_role_assignment_link(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_link)
# Check the user can only get a token on leaf project
self.v3_authenticate_token(root_project_auth_data, expected_status=401)
self.v3_authenticate_token(leaf_project_auth_data)
# Grant inherited role for user on root project
inher_up_link = self.build_role_assignment_link(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_link)
# Check the user still can get a token only on leaf project
self.v3_authenticate_token(root_project_auth_data, expected_status=401)
self.v3_authenticate_token(leaf_project_auth_data)
# Delete non-inherited grant
self.delete(non_inher_up_link)
# Check the inherited role still applies for leaf project
self.v3_authenticate_token(root_project_auth_data, expected_status=401)
self.v3_authenticate_token(leaf_project_auth_data)
# Delete inherited grant
self.delete(inher_up_link)
# Check the user cannot get a token on leaf project anymore
self.v3_authenticate_token(leaf_project_auth_data, expected_status=401)
def test_get_token_from_inherited_group_project_role_grants(self):
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Create group and add user to it
group = self.new_group_ref(domain_id=self.domain['id'])
group = self.identity_api.create_group(group)
self.identity_api.add_user_to_group(self.user['id'], group['id'])
# Define root and leaf projects authentication data
root_project_auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=root_id)
leaf_project_auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=leaf_id)
# Check the user cannot get a token on root nor leaf project
self.v3_authenticate_token(root_project_auth_data, expected_status=401)
self.v3_authenticate_token(leaf_project_auth_data, expected_status=401)
# Grant non-inherited role for group on leaf project
non_inher_gp_link = self.build_role_assignment_link(
project_id=leaf_id, group_id=group['id'],
role_id=non_inherited_role_id)
self.put(non_inher_gp_link)
# Check the user can only get a token on leaf project
self.v3_authenticate_token(root_project_auth_data, expected_status=401)
self.v3_authenticate_token(leaf_project_auth_data)
# Grant inherited role for group on root project
inher_gp_link = self.build_role_assignment_link(
project_id=root_id, group_id=group['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_gp_link)
# Check the user still can get a token only on leaf project
self.v3_authenticate_token(root_project_auth_data, expected_status=401)
self.v3_authenticate_token(leaf_project_auth_data)
# Delete no-inherited grant
self.delete(non_inher_gp_link)
# Check the inherited role still applies for leaf project
self.v3_authenticate_token(leaf_project_auth_data)
# Delete inherited grant
self.delete(inher_gp_link)
# Check the user cannot get a token on leaf project anymore
self.v3_authenticate_token(leaf_project_auth_data, expected_status=401)
def test_get_role_assignments_for_project_hierarchy(self):
"""Call ``GET /role_assignments``.
Test Plan:
- Create 2 roles
- Create a hierarchy of projects with one root and one leaf project
- Issue the URL to add a non-inherited user role to the root project
- Issue the URL to add an inherited user role to the root project
- Issue the URL to get all role assignments - this should return just
2 roles (non-inherited and inherited) in the root project.
"""
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role
non_inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_entity['links']['assignment'])
# Grant inherited role
inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_entity['links']['assignment'])
# Get role assignments
collection_url = '/role_assignments'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
# Assert that the user has non-inherited role on root project
self.assertRoleAssignmentInListResponse(r, non_inher_up_entity)
# Assert that the user has inherited role on root project
self.assertRoleAssignmentInListResponse(r, inher_up_entity)
# Assert that the user does not have non-inherited role on leaf project
non_inher_up_entity = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
# Assert that the user does not have inherited role on leaf project
inher_up_entity['scope']['project']['id'] = leaf_id
self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
def test_get_effective_role_assignments_for_project_hierarchy(self):
"""Call ``GET /role_assignments?effective``.
Test Plan:
- Create 2 roles
- Create a hierarchy of projects with one root and one leaf project
- Issue the URL to add a non-inherited user role to the root project
- Issue the URL to add an inherited user role to the root project
- Issue the URL to get effective role assignments - this should return
1 role (non-inherited) on the root project and 1 role (inherited) on
the leaf project.
"""
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role
non_inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_entity['links']['assignment'])
# Grant inherited role
inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_entity['links']['assignment'])
# Get effective role assignments
collection_url = '/role_assignments?effective'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
# Assert that the user has non-inherited role on root project
self.assertRoleAssignmentInListResponse(r, non_inher_up_entity)
# Assert that the user does not have inherited role on root project
self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
# Assert that the user does not have non-inherited role on leaf project
non_inher_up_entity = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
# Assert that the user has inherited role on leaf project
inher_up_entity['scope']['project']['id'] = leaf_id
self.assertRoleAssignmentInListResponse(r, inher_up_entity)
def test_get_inherited_role_assignments_for_project_hierarchy(self):
"""Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``.
Test Plan:
- Create 2 roles
- Create a hierarchy of projects with one root and one leaf project
- Issue the URL to add a non-inherited user role to the root project
- Issue the URL to add an inherited user role to the root project
- Issue the URL to filter inherited to projects role assignments - this
should return 1 role (inherited) on the root project.
"""
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role
non_inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_entity['links']['assignment'])
# Grant inherited role
inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_entity['links']['assignment'])
# Get inherited role assignments
collection_url = ('/role_assignments'
'?scope.OS-INHERIT:inherited_to=projects')
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
# Assert that the user does not have non-inherited role on root project
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
# Assert that the user has inherited role on root project
self.assertRoleAssignmentInListResponse(r, inher_up_entity)
# Assert that the user does not have non-inherited role on leaf project
non_inher_up_entity = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
# Assert that the user does not have inherited role on leaf project
inher_up_entity['scope']['project']['id'] = leaf_id
self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
class AssignmentInheritanceDisabledTestCase(test_v3.RestfulTestCase):
"""Test inheritance crud and its effects."""
def config_overrides(self):
super(AssignmentInheritanceDisabledTestCase, self).config_overrides()
self.config_fixture.config(group='os_inherit', enabled=False)
def test_crud_inherited_role_grants_failed_if_disabled(self):
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.role_api.create_role(role['id'], role)
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': self.domain_id,
'user_id': self.user['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url, expected_status=404)
self.head(member_url, expected_status=404)
self.get(collection_url, expected_status=404)
self.delete(member_url, expected_status=404)
class AssignmentV3toV2MethodsTestCase(tests.TestCase):
"""Test domain V3 to V2 conversion methods."""
def _setup_initial_projects(self):
self.project_id = uuid.uuid4().hex
self.domain_id = CONF.identity.default_domain_id
self.parent_id = uuid.uuid4().hex
# Project with only domain_id in ref
self.project1 = {'id': self.project_id,
'name': self.project_id,
'domain_id': self.domain_id}
# Project with both domain_id and parent_id in ref
self.project2 = {'id': self.project_id,
'name': self.project_id,
'domain_id': self.domain_id,
'parent_id': self.parent_id}
# Project with no domain_id and parent_id in ref
self.project3 = {'id': self.project_id,
'name': self.project_id,
'domain_id': self.domain_id,
'parent_id': self.parent_id}
# Expected result with no domain_id and parent_id
self.expected_project = {'id': self.project_id,
'name': self.project_id}
def test_v2controller_filter_domain_id(self):
# V2.0 is not domain aware, ensure domain_id is popped off the ref.
other_data = uuid.uuid4().hex
domain_id = CONF.identity.default_domain_id
ref = {'domain_id': domain_id,
'other_data': other_data}
ref_no_domain = {'other_data': other_data}
expected_ref = ref_no_domain.copy()
updated_ref = controller.V2Controller.filter_domain_id(ref)
self.assertIs(ref, updated_ref)
self.assertDictEqual(ref, expected_ref)
# Make sure we don't error/muck up data if domain_id isn't present
updated_ref = controller.V2Controller.filter_domain_id(ref_no_domain)
self.assertIs(ref_no_domain, updated_ref)
self.assertDictEqual(ref_no_domain, expected_ref)
def test_v3controller_filter_domain_id(self):
# No data should be filtered out in this case.
other_data = uuid.uuid4().hex
domain_id = uuid.uuid4().hex
ref = {'domain_id': domain_id,
'other_data': other_data}
expected_ref = ref.copy()
updated_ref = controller.V3Controller.filter_domain_id(ref)
self.assertIs(ref, updated_ref)
self.assertDictEqual(ref, expected_ref)
def test_v2controller_filter_domain(self):
other_data = uuid.uuid4().hex
domain_id = uuid.uuid4().hex
non_default_domain_ref = {'domain': {'id': domain_id},
'other_data': other_data}
default_domain_ref = {'domain': {'id': 'default'},
'other_data': other_data}
updated_ref = controller.V2Controller.filter_domain(default_domain_ref)
self.assertNotIn('domain', updated_ref)
self.assertRaises(exception.Unauthorized,
controller.V2Controller.filter_domain,
non_default_domain_ref)
def test_v2controller_filter_project_parent_id(self):
# V2.0 is not project hierarchy aware, ensure parent_id is popped off.
other_data = uuid.uuid4().hex
parent_id = uuid.uuid4().hex
ref = {'parent_id': parent_id,
'other_data': other_data}
ref_no_parent = {'other_data': other_data}
expected_ref = ref_no_parent.copy()
updated_ref = controller.V2Controller.filter_project_parent_id(ref)
self.assertIs(ref, updated_ref)
self.assertDictEqual(ref, expected_ref)
# Make sure we don't error/muck up data if parent_id isn't present
updated_ref = controller.V2Controller.filter_project_parent_id(
ref_no_parent)
self.assertIs(ref_no_parent, updated_ref)
self.assertDictEqual(ref_no_parent, expected_ref)
def test_v3_to_v2_project_method(self):
self._setup_initial_projects()
updated_project1 = controller.V2Controller.v3_to_v2_project(
self.project1)
self.assertIs(self.project1, updated_project1)
self.assertDictEqual(self.project1, self.expected_project)
updated_project2 = controller.V2Controller.v3_to_v2_project(
self.project2)
self.assertIs(self.project2, updated_project2)
self.assertDictEqual(self.project2, self.expected_project)
updated_project3 = controller.V2Controller.v3_to_v2_project(
self.project3)
self.assertIs(self.project3, updated_project3)
self.assertDictEqual(self.project3, self.expected_project)
def test_v3_to_v2_project_method_list(self):
self._setup_initial_projects()
project_list = [self.project1, self.project2, self.project3]
updated_list = controller.V2Controller.v3_to_v2_project(project_list)
self.assertEqual(len(updated_list), len(project_list))
for i, ref in enumerate(updated_list):
# Order should not change.
self.assertIs(ref, project_list[i])
self.assertDictEqual(self.project1, self.expected_project)
self.assertDictEqual(self.project2, self.expected_project)
self.assertDictEqual(self.project3, self.expected_project)
| jonnary/keystone | keystone/tests/unit/test_v3_assignment.py | Python | apache-2.0 | 135,102 |
'''
Created on Nov 22, 2012
@author: Buddhika De Seram
'''
class MQTT:
database5 = None
threed_pub = None
client_cosm = "LIB_cosm"
client_ts = "LIB_ts"
client_3d = "LIB_3d"
client_sense = "LIB_sense"
client_3bpub = "3dPub"
server = "winter.ceit.uq.edu.au"
client = "LIB_temp"
client_dblib = "lib_database"
topic_db = "/LIB/config/level"
topic_temp = "/LIB/level4/climate_raw"
topic_3d = "/LIB/3d/data"
packet = {"id":"11.11.11", "value":0, "timestamp":0}
rc = None
pi_id = None
| craigknott/CEIT_Sensors_Server | MQTT.py | Python | mit | 542 |
# -*- encoding: utf-8 -*-
#
# Copyright © 2012 New Dream Network, LLC (DreamHost)
#
# Author: Doug Hellmann <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from ironic.api.controllers import base
from ironic.api.controllers import link
from ironic.api.controllers import v1
class Version(base.APIBase):
"""An API version representation."""
id = wtypes.text
"""The ID of the version, also acts as the release number"""
links = [link.Link]
"""A Link that point to a specific version of the API"""
@staticmethod
def convert(id):
version = Version()
version.id = id
version.links = [link.Link.make_link('self', pecan.request.host_url,
id, '', bookmark=True)]
return version
class Root(base.APIBase):
name = wtypes.text
"""The name of the API"""
description = wtypes.text
"""Some information about this API"""
versions = [Version]
"""Links to all the versions available in this API"""
default_version = Version
"""A link to the default version of the API"""
@staticmethod
def convert():
root = Root()
root.name = "OpenStack Ironic API"
root.description = ("Ironic is an OpenStack project which aims to "
"provision baremetal machines.")
root.versions = [Version.convert('v1')]
root.default_version = Version.convert('v1')
return root
class RootController(rest.RestController):
_versions = ['v1']
"""All supported API versions"""
_default_version = 'v1'
"""The default API version"""
v1 = v1.Controller()
@wsme_pecan.wsexpose(Root)
def get(self):
# NOTE: The reason why convert() it's being called for every
# request is because we need to get the host url from
# the request object to make the links.
return Root.convert()
@pecan.expose()
def _route(self, args):
"""Overrides the default routing behavior.
It redirects the request to the default version of the ironic API
if the version number is not specified in the url.
"""
if args[0] and args[0] not in self._versions:
args = [self._default_version] + args
return super(RootController, self)._route(args)
| ramineni/myironic | ironic/api/controllers/root.py | Python | apache-2.0 | 2,972 |
# coding=utf-8
# Copyright 2022 The ML Fairness Gym Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
"""Utilities for running and measuring gym simulations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
import gin
import tqdm
FLAGS = flags.FLAGS
flags.DEFINE_boolean("use_tqdm", True,
"Use tqdm to visually represent progress in simulations.")
@gin.configurable
def run_simulation(env, agent, metrics, num_steps, seed=100, agent_seed=50):
"""Perform a simple simulation and return a measurement.
Args:
env: A `core.FairnessEnv`.
agent: A `core.Agent`.
metrics: A list of `core.Metric` instances, a dict of {name: `core.Metric`}
or a single `core.Metric` instance.
num_steps: An integer indicating the number of steps to simulate in each
episode.
seed: An integer indicating a random seed.
agent_seed: An integer indicating a random seed for the agent.
Returns:
A list of measurements if multiple metrics else a single measurement for a
single metric.
"""
agent.seed(agent_seed)
env.seed(seed)
observation = env.reset()
done = False
print("Starting simulation")
simulation_iterator = tqdm.trange if FLAGS.use_tqdm else range
for _ in simulation_iterator(num_steps):
# Update the agent with any changes to the observation or action space.
agent.action_space, agent.observation_space = (env.action_space,
env.observation_space)
action = agent.act(observation, done)
# TODO(): Remove reward from this loop.
observation, _, done, _ = env.step(action)
if done:
break
print("Measuring metrics")
if isinstance(metrics, list):
return [metric.measure(env) for metric in metrics]
elif isinstance(metrics, dict):
return {name: metric.measure(env) for name, metric in metrics.items()}
else:
return metrics.measure(env)
@gin.configurable
def run_stackelberg_simulation(env,
agent,
metrics,
num_steps,
seed=100,
agent_seed=100):
"""Performs a Stackelberg simulation.
A Stackelberg Simulation involves a two player game between a Jury (Agent) and
Contestants (Environment's population). In this setup the game proceeds as
follows:
1. Agent Publishes a classifier
2. Contestants manipualte features to game the classifier
3. Agent receives manipulated features and makes decision
4. Environment receives agent's decision and calculates penalties/reward.
In this case, we have folded steps 2, 3, 4 into the environment, where once
the agent publishes its classifier, the feature manipulation, classification
and reward calculation is done in one step in the environment.
Args:
env: A `core.FairnessEnv`.
agent: A `core.Agent`.
metrics: A list of `core.Metric` instances, a dict of {name: `core.Metric`}
or a single `core.Metric` instance.
num_steps: An integer indicating the numnber of steps to simulate.
seed: An integer indicating a random seed.
agent_seed: An integer indicating a random seed for the agent.
Returns:
A list of measurements if multiple metrics else a single measurement.
"""
env.seed(seed)
agent.seed(agent_seed)
_ = env.reset()
agent.action_space = env.action_space
action = agent.initial_action()
done = False
print("Starting simulation")
simulation_iterator = tqdm.trange if FLAGS.use_tqdm else range
for _ in simulation_iterator(num_steps):
# TODO(): Remove reward from this loop.
observation, _, done, _ = env.step(action)
# Update the agent with any changes to the observation or action space.
agent.action_space, agent.observation_space = (env.action_space,
env.observation_space)
action = agent.act(observation, done)
if done:
break
print("Measuring metrics")
if isinstance(metrics, list):
return [metric.measure(env) for metric in metrics]
elif isinstance(metrics, dict):
return {name: metric.measure(env) for name, metric in metrics.items()}
else:
return metrics.measure(env)
| google/ml-fairness-gym | run_util.py | Python | apache-2.0 | 4,856 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import urlparse
from odoo import http
from odoo.http import request
from odoo.addons.website_mail.controllers.main import WebsiteMail
class WebsiteMailController(WebsiteMail):
@http.route(['/website_mail/post/json'], type='json', auth='public', website=True)
def chatter_json(self, res_model='', res_id=None, message='', **kw):
params = kw.copy()
params.pop('rating', False)
message_data = super(WebsiteMailController, self).chatter_json(res_model=res_model, res_id=res_id, message=message, **params)
if message_data and kw.get('rating') and res_model == 'product.template': # restrict rating only for product template
rating = request.env['rating.rating'].create({
'rating': float(kw.get('rating')),
'res_model': res_model,
'res_id': res_id,
'message_id': message_data['id'],
})
message_data.update({
'rating_default_value': rating.rating,
'rating_disabled': True,
})
return message_data
@http.route(['/website_mail/post/post'], type='http', methods=['POST'], auth='public', website=True)
def chatter_post(self, res_model='', res_id=None, message='', redirect=None, **kw):
params = kw.copy()
params.pop('rating')
response = super(WebsiteMailController, self).chatter_post(res_model=res_model, res_id=res_id, message=message, redirect=redirect, **params)
if kw.get('rating') and res_model == 'product.template': # restrict rating only for product template
try:
fragment = urlparse.urlparse(response.location).fragment
message_id = int(fragment.replace('message-', ''))
request.env['rating.rating'].create({
'rating': float(kw.get('rating')),
'res_model': res_model,
'res_id': res_id,
'message_id': message_id,
})
except Exception:
pass
return response
| ayepezv/GAD_ERP | addons/website_sale/controllers/website_mail.py | Python | gpl-3.0 | 2,183 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class PoolUpgradeOSParameter(Model):
"""Options for upgrading the operating system of compute nodes in a pool.
:param target_os_version: The Azure Guest OS version to be installed on
the virtual machines in the pool.
:type target_os_version: str
"""
_validation = {
'target_os_version': {'required': True},
}
_attribute_map = {
'target_os_version': {'key': 'targetOSVersion', 'type': 'str'},
}
def __init__(self, target_os_version):
self.target_os_version = target_os_version
| v-iam/azure-sdk-for-python | azure-batch/azure/batch/models/pool_upgrade_os_parameter.py | Python | mit | 1,060 |
#/usr/bin/python3.4
# Lista os usuários cadastrados
# Autor: Everton de Vargas Agilar
# Data: 20/10/2014
# python
import http.client
conn = http.client.HTTPConnection('localhost', 8000)
conn.request("GET",'/acesso/api1/adm/Usuario/')
response = conn.getresponse().read().decode("utf-8")
conn.close()
print(response)
# curl
# curl -X GET http://localhost:8000/acesso/api1/adm/Usuario/
| eliot-framework/eliot | test/lista_users.py | Python | gpl-2.0 | 391 |
###############################################################################
# lazyflow: data flow based lazy parallel computation framework
#
# Copyright (C) 2011-2014, the ilastik developers
# <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the Lesser GNU General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# See the files LICENSE.lgpl2 and LICENSE.lgpl3 for full text of the
# GNU Lesser General Public License version 2.1 and 3 respectively.
# This information is also available on the ilastik web site at:
# http://ilastik.org/license/
###############################################################################
import numpy
import vigra
from lazyflow.graph import Graph
from lazyflow.operators.opColorizeLabels import OpColorizeLabels
class TestOpColorizeLabels(object):
def setUp(self):
# Create a label array that's symmetric about the x-y axes
data = numpy.indices((10,10), dtype=int).sum(0)
assert (data == data.transpose()).all()
data = data.view(vigra.VigraArray)
data.axistags = vigra.defaultAxistags('xy')
data = data.withAxes(*'txyzc')
assert data.shape == (1,10,10,1,1)
graph = Graph()
op = OpColorizeLabels(graph=graph)
op.Input.setValue(data)
self.op = op
def testBasic(self):
op = self.op
# Test requesting specific channels: Don't get RGBA, just get GBA.
colorizedData = op.Output[:,5:,5:,:,1:4].wait()
# Output is colorized (3 channels)
assert colorizedData.shape == (1,5,5,1,3)
# If we transpose x-y, then the data should still be the same,
# which implies that identical labels got identical colors
# (i.e. we chose deterministic colors)
assert (colorizedData == colorizedData.transpose(0,2,1,3,4)).all()
# Different labels should get different colors
assert ( colorizedData[0,1,1,0,0] != colorizedData[0,2,2,0,0]
or colorizedData[0,1,1,0,1] != colorizedData[0,2,2,0,1]
or colorizedData[0,1,1,0,2] != colorizedData[0,2,2,0,2] )
assert (colorizedData[0,1:,1:,0,2] == 255).all(), "Alpha should be 255 for all labels except label 0"
def testOverrideColors(self):
op = self.op
overrides = {}
overrides[1] = (1,2,3,4)
overrides[2] = (5,6,7,8)
# Label 0 override is black and transparent by default
colorizedData = op.Output[...].wait()
assert (colorizedData[0,0,0,0,:] == 0).all()
# Apply custom overrides
op.OverrideColors.setValue( overrides )
colorizedData = op.Output[...].wait()
# Check for non-random colors on the labels we want to override
assert (colorizedData[0,1,0,0,:] == overrides[1]).all()
assert (colorizedData[0,0,1,0,:] == overrides[1]).all()
assert (colorizedData[0,1,1,0,:] == overrides[2]).all()
assert (colorizedData[0,2,0,0,:] == overrides[2]).all()
assert (colorizedData[0,0,2,0,:] == overrides[2]).all()
# Other labels should be random
assert not (colorizedData[0,0,3,0,:] == overrides[2]).all()
if __name__ == "__main__":
import sys
import nose
sys.argv.append("--nocapture") # Don't steal stdout. Show it on the console as usual.
sys.argv.append("--nologcapture") # Don't set the logging level to DEBUG. Leave it alone.
ret = nose.run(defaultTest=__file__)
if not ret: sys.exit(1)
| stuarteberg/lazyflow | tests/testOpColorizeLabels.py | Python | lgpl-3.0 | 3,980 |
# --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
import os
from os.path import join as pjoin
import numpy as np
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
def find_in_path(name, path):
"Find a file in a search path"
# adapted fom http://code.activestate.com/recipes/52224-find-a-file-given-a-search-path/
for dir in path.split(os.pathsep):
binpath = pjoin(dir, name)
if os.path.exists(binpath):
return os.path.abspath(binpath)
return None
def locate_cuda():
"""Locate the CUDA environment on the system
Returns a dict with keys 'home', 'nvcc', 'include', and 'lib64'
and values giving the absolute path to each directory.
Starts by looking for the CUDAHOME env variable. If not found, everything
is based on finding 'nvcc' in the PATH.
"""
# first check if the CUDAHOME env variable is in use
if 'CUDAHOME' in os.environ:
home = os.environ['CUDAHOME']
nvcc = pjoin(home, 'bin', 'nvcc')
else:
# otherwise, search the PATH for NVCC
default_path = pjoin(os.sep, 'usr', 'local', 'cuda', 'bin')
nvcc = find_in_path('nvcc', os.environ['PATH'] + os.pathsep + default_path)
if nvcc is None:
raise EnvironmentError('The nvcc binary could not be '
'located in your $PATH. Either add it to your path, or set $CUDAHOME')
home = os.path.dirname(os.path.dirname(nvcc))
cudaconfig = {'home': home, 'nvcc': nvcc,
'include': pjoin(home, 'include'),
'lib64': pjoin(home, 'lib64')}
for k, v in cudaconfig.iteritems():
if not os.path.exists(v):
raise EnvironmentError('The CUDA %s path could not be located in %s' % (k, v))
return cudaconfig
CUDA = locate_cuda()
# Obtain the numpy include directory. This logic works across numpy versions.
try:
numpy_include = np.get_include()
except AttributeError:
numpy_include = np.get_numpy_include()
def customize_compiler_for_nvcc(self):
"""inject deep into distutils to customize how the dispatch
to gcc/nvcc works.
If you subclass UnixCCompiler, it's not trivial to get your subclass
injected in, and still have the right customizations (i.e.
distutils.sysconfig.customize_compiler) run on it. So instead of going
the OO route, I have this. Note, it's kindof like a wierd functional
subclassing going on."""
# tell the compiler it can processes .cu
self.src_extensions.append('.cu')
# save references to the default compiler_so and _comple methods
default_compiler_so = self.compiler_so
super = self._compile
# now redefine the _compile method. This gets executed for each
# object but distutils doesn't have the ability to change compilers
# based on source extension: we add it.
def _compile(obj, src, ext, cc_args, extra_postargs, pp_opts):
print(extra_postargs)
if os.path.splitext(src)[1] == '.cu':
# use the cuda for .cu files
self.set_executable('compiler_so', CUDA['nvcc'])
# use only a subset of the extra_postargs, which are 1-1 translated
# from the extra_compile_args in the Extension class
postargs = extra_postargs['nvcc']
else:
postargs = extra_postargs['gcc']
super(obj, src, ext, cc_args, postargs, pp_opts)
# reset the default compiler_so, which we might have changed for cuda
self.compiler_so = default_compiler_so
# inject our redefined _compile method into the class
self._compile = _compile
# run the customize_compiler
class custom_build_ext(build_ext):
def build_extensions(self):
customize_compiler_for_nvcc(self.compiler)
build_ext.build_extensions(self)
ext_modules = [
Extension(
"utils.cython_bbox",
["utils/bbox.pyx"],
extra_compile_args={'gcc': ["-Wno-cpp", "-Wno-unused-function"]},
include_dirs=[numpy_include]
),
Extension(
"utils.cython_nms",
["utils/nms.pyx"],
extra_compile_args={'gcc': ["-Wno-cpp", "-Wno-unused-function"]},
include_dirs=[numpy_include]
),
Extension(
"nms.cpu_nms",
["nms/cpu_nms.pyx"],
extra_compile_args={'gcc': ["-Wno-cpp", "-Wno-unused-function"]},
include_dirs=[numpy_include]
),
Extension('nms.gpu_nms',
['nms/nms_kernel.cu', 'nms/gpu_nms.pyx'],
library_dirs=[CUDA['lib64']],
libraries=['cudart'],
language='c++',
runtime_library_dirs=[CUDA['lib64']],
# this syntax is specific to this build system
# we're only going to use certain compiler args with nvcc and not with gcc
# the implementation of this trick is in customize_compiler() below
extra_compile_args={'gcc': ["-Wno-unused-function"],
'nvcc': ['-arch=sm_35',
'--ptxas-options=-v',
'-c',
'--compiler-options',
"'-fPIC'"]},
include_dirs=[numpy_include, CUDA['include']]
),
Extension(
'pycocotools._mask',
sources=['pycocotools/maskApi.c', 'pycocotools/_mask.pyx'],
include_dirs=[numpy_include, 'pycocotools'],
extra_compile_args={
'gcc': ['-Wno-cpp', '-Wno-unused-function', '-std=c99']},
),
]
setup(
name='fast_rcnn',
ext_modules=ext_modules,
# inject our custom trigger
cmdclass={'build_ext': custom_build_ext},
)
| zhongyx12/Faster-RCNN-Refinement | faster_rcnn/setup.py | Python | mit | 5,979 |
# Generated by Django 1.9.13 on 2018-05-18 10:17
import django.core.validators
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("candidates", "0044_remove_membership_fk_to_election"),
("popolo", "0002_update_models_from_upstream"),
]
operations = [
migrations.AddField(
model_name="membership",
name="elected",
field=models.NullBooleanField(),
),
migrations.AddField(
model_name="membership",
name="party_list_position",
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name="membership",
name="post_election",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="candidates.PostExtraElection",
),
),
]
| DemocracyClub/yournextrepresentative | ynr/apps/popolo/migrations/0003_move-extra-fields-to-base.py | Python | agpl-3.0 | 990 |
#!/usr/local/lib/mailinabox/env/bin/python
# This script performs a backup of all user data:
# 1) System services are stopped.
# 2) STORAGE_ROOT/backup/before-backup is executed if it exists.
# 3) An incremental encrypted backup is made using duplicity.
# 4) The stopped services are restarted.
# 5) STORAGE_ROOT/backup/after-backup is executed if it exists.
import os, os.path, shutil, glob, re, datetime, sys
import dateutil.parser, dateutil.relativedelta, dateutil.tz
import rtyaml
from exclusiveprocess import Lock
from utils import load_environment, shell, wait_for_service, fix_boto
rsync_ssh_options = [
"--ssh-options= -i /root/.ssh/id_rsa_miab",
"--rsync-options= -e \"/usr/bin/ssh -oStrictHostKeyChecking=no -oBatchMode=yes -p 22 -i /root/.ssh/id_rsa_miab\"",
]
def backup_status(env):
# If backups are dissbled, return no status.
config = get_backup_config(env)
if config["target"] == "off":
return { }
# Query duplicity to get a list of all full and incremental
# backups available.
backups = { }
now = datetime.datetime.now(dateutil.tz.tzlocal())
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
backup_cache_dir = os.path.join(backup_root, 'cache')
def reldate(date, ref, clip):
if ref < date: return clip
rd = dateutil.relativedelta.relativedelta(ref, date)
if rd.years > 1: return "%d years, %d months" % (rd.years, rd.months)
if rd.years == 1: return "%d year, %d months" % (rd.years, rd.months)
if rd.months > 1: return "%d months, %d days" % (rd.months, rd.days)
if rd.months == 1: return "%d month, %d days" % (rd.months, rd.days)
if rd.days >= 7: return "%d days" % rd.days
if rd.days > 1: return "%d days, %d hours" % (rd.days, rd.hours)
if rd.days == 1: return "%d day, %d hours" % (rd.days, rd.hours)
return "%d hours, %d minutes" % (rd.hours, rd.minutes)
# Get duplicity collection status and parse for a list of backups.
def parse_line(line):
keys = line.strip().split()
date = dateutil.parser.parse(keys[1]).astimezone(dateutil.tz.tzlocal())
return {
"date": keys[1],
"date_str": date.strftime("%Y-%m-%d %X") + " " + now.tzname(),
"date_delta": reldate(date, now, "the future?"),
"full": keys[0] == "full",
"size": 0, # collection-status doesn't give us the size
"volumes": int(keys[2]), # number of archive volumes for this backup (not really helpful)
}
code, collection_status = shell('check_output', [
"/usr/bin/duplicity",
"collection-status",
"--archive-dir", backup_cache_dir,
"--gpg-options", "--cipher-algo=AES256",
"--log-fd", "1",
config["target"],
] + rsync_ssh_options,
get_env(env),
trap=True)
if code != 0:
# Command failed. This is likely due to an improperly configured remote
# destination for the backups or the last backup job terminated unexpectedly.
raise Exception("Something is wrong with the backup: " + collection_status)
for line in collection_status.split('\n'):
if line.startswith(" full") or line.startswith(" inc"):
backup = parse_line(line)
backups[backup["date"]] = backup
# Look at the target directly to get the sizes of each of the backups. There is more than one file per backup.
# Starting with duplicity in Ubuntu 18.04, "signatures" files have dates in their
# filenames that are a few seconds off the backup date and so don't line up
# with the list of backups we have. Track unmatched files so we know how much other
# space is used for those.
unmatched_file_size = 0
for fn, size in list_target_files(config):
m = re.match(r"duplicity-(full|full-signatures|(inc|new-signatures)\.(?P<incbase>\d+T\d+Z)\.to)\.(?P<date>\d+T\d+Z)\.", fn)
if not m: continue # not a part of a current backup chain
key = m.group("date")
if key in backups:
backups[key]["size"] += size
else:
unmatched_file_size += size
# Ensure the rows are sorted reverse chronologically.
# This is relied on by should_force_full() and the next step.
backups = sorted(backups.values(), key = lambda b : b["date"], reverse=True)
# Get the average size of incremental backups, the size of the
# most recent full backup, and the date of the most recent
# backup and the most recent full backup.
incremental_count = 0
incremental_size = 0
first_date = None
first_full_size = None
first_full_date = None
for bak in backups:
if first_date is None:
first_date = dateutil.parser.parse(bak["date"])
if bak["full"]:
first_full_size = bak["size"]
first_full_date = dateutil.parser.parse(bak["date"])
break
incremental_count += 1
incremental_size += bak["size"]
# When will the most recent backup be deleted? It won't be deleted if the next
# backup is incremental, because the increments rely on all past increments.
# So first guess how many more incremental backups will occur until the next
# full backup. That full backup frees up this one to be deleted. But, the backup
# must also be at least min_age_in_days old too.
deleted_in = None
if incremental_count > 0 and incremental_size > 0 and first_full_size is not None:
# How many days until the next incremental backup? First, the part of
# the algorithm based on increment sizes:
est_days_to_next_full = (.5 * first_full_size - incremental_size) / (incremental_size/incremental_count)
est_time_of_next_full = first_date + datetime.timedelta(days=est_days_to_next_full)
# ...And then the part of the algorithm based on full backup age:
est_time_of_next_full = min(est_time_of_next_full, first_full_date + datetime.timedelta(days=config["min_age_in_days"]*10+1))
# It still can't be deleted until it's old enough.
est_deleted_on = max(est_time_of_next_full, first_date + datetime.timedelta(days=config["min_age_in_days"]))
deleted_in = "approx. %d days" % round((est_deleted_on-now).total_seconds()/60/60/24 + .5)
# When will a backup be deleted? Set the deleted_in field of each backup.
saw_full = False
for bak in backups:
if deleted_in:
# The most recent increment in a chain and all of the previous backups
# it relies on are deleted at the same time.
bak["deleted_in"] = deleted_in
if bak["full"]:
# Reset when we get to a full backup. A new chain start *next*.
saw_full = True
deleted_in = None
elif saw_full and not deleted_in:
# We're now on backups prior to the most recent full backup. These are
# free to be deleted as soon as they are min_age_in_days old.
deleted_in = reldate(now, dateutil.parser.parse(bak["date"]) + datetime.timedelta(days=config["min_age_in_days"]), "on next daily backup")
bak["deleted_in"] = deleted_in
return {
"backups": backups,
"unmatched_file_size": unmatched_file_size,
}
def should_force_full(config, env):
# Force a full backup when the total size of the increments
# since the last full backup is greater than half the size
# of that full backup.
inc_size = 0
for bak in backup_status(env)["backups"]:
if not bak["full"]:
# Scan through the incremental backups cumulating
# size...
inc_size += bak["size"]
else:
# ...until we reach the most recent full backup.
# Return if we should to a full backup, which is based
# on the size of the increments relative to the full
# backup, as well as the age of the full backup.
if inc_size > .5*bak["size"]:
return True
if dateutil.parser.parse(bak["date"]) + datetime.timedelta(days=config["min_age_in_days"]*10+1) < datetime.datetime.now(dateutil.tz.tzlocal()):
return True
return False
else:
# If we got here there are no (full) backups, so make one.
# (I love for/else blocks. Here it's just to show off.)
return True
def get_passphrase(env):
# Get the encryption passphrase. secret_key.txt is 2048 random
# bits base64-encoded and with line breaks every 65 characters.
# gpg will only take the first line of text, so sanity check that
# that line is long enough to be a reasonable passphrase. It
# only needs to be 43 base64-characters to match AES256's key
# length of 32 bytes.
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
with open(os.path.join(backup_root, 'secret_key.txt')) as f:
passphrase = f.readline().strip()
if len(passphrase) < 43: raise Exception("secret_key.txt's first line is too short!")
return passphrase
def get_env(env):
config = get_backup_config(env)
env = { "PASSPHRASE" : get_passphrase(env) }
if get_target_type(config) == 's3':
env["AWS_ACCESS_KEY_ID"] = config["target_user"]
env["AWS_SECRET_ACCESS_KEY"] = config["target_pass"]
return env
def get_target_type(config):
protocol = config["target"].split(":")[0]
return protocol
def perform_backup(full_backup):
env = load_environment()
# Create an global exclusive lock so that the backup script
# cannot be run more than one.
Lock(die=True).forever()
config = get_backup_config(env)
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
backup_cache_dir = os.path.join(backup_root, 'cache')
backup_dir = os.path.join(backup_root, 'encrypted')
# Are backups disabled?
if config["target"] == "off":
return
# On the first run, always do a full backup. Incremental
# will fail. Otherwise do a full backup when the size of
# the increments since the most recent full backup are
# large.
try:
full_backup = full_backup or should_force_full(config, env)
except Exception as e:
# This was the first call to duplicity, and there might
# be an error already.
print(e)
sys.exit(1)
# Stop services.
def service_command(service, command, quit=None):
# Execute silently, but if there is an error then display the output & exit.
code, ret = shell('check_output', ["/usr/sbin/service", service, command], capture_stderr=True, trap=True)
if code != 0:
print(ret)
if quit:
sys.exit(code)
service_command("php7.2-fpm", "stop", quit=True)
service_command("postfix", "stop", quit=True)
service_command("dovecot", "stop", quit=True)
# Execute a pre-backup script that copies files outside the homedir.
# Run as the STORAGE_USER user, not as root. Pass our settings in
# environment variables so the script has access to STORAGE_ROOT.
pre_script = os.path.join(backup_root, 'before-backup')
if os.path.exists(pre_script):
shell('check_call',
['su', env['STORAGE_USER'], '-c', pre_script, config["target"]],
env=env)
# Run a backup of STORAGE_ROOT (but excluding the backups themselves!).
# --allow-source-mismatch is needed in case the box's hostname is changed
# after the first backup. See #396.
try:
shell('check_call', [
"/usr/bin/duplicity",
"full" if full_backup else "incr",
"--verbosity", "warning", "--no-print-statistics",
"--archive-dir", backup_cache_dir,
"--exclude", backup_root,
"--volsize", "250",
"--gpg-options", "--cipher-algo=AES256",
env["STORAGE_ROOT"],
config["target"],
"--allow-source-mismatch"
] + rsync_ssh_options,
get_env(env))
finally:
# Start services again.
service_command("dovecot", "start", quit=False)
service_command("postfix", "start", quit=False)
service_command("php7.2-fpm", "start", quit=False)
# Remove old backups. This deletes all backup data no longer needed
# from more than 3 days ago.
shell('check_call', [
"/usr/bin/duplicity",
"remove-older-than",
"%dD" % config["min_age_in_days"],
"--verbosity", "error",
"--archive-dir", backup_cache_dir,
"--force",
config["target"]
] + rsync_ssh_options,
get_env(env))
# From duplicity's manual:
# "This should only be necessary after a duplicity session fails or is
# aborted prematurely."
# That may be unlikely here but we may as well ensure we tidy up if
# that does happen - it might just have been a poorly timed reboot.
shell('check_call', [
"/usr/bin/duplicity",
"cleanup",
"--verbosity", "error",
"--archive-dir", backup_cache_dir,
"--force",
config["target"]
] + rsync_ssh_options,
get_env(env))
# Change ownership of backups to the user-data user, so that the after-bcakup
# script can access them.
if get_target_type(config) == 'file':
shell('check_call', ["/bin/chown", "-R", env["STORAGE_USER"], backup_dir])
# Execute a post-backup script that does the copying to a remote server.
# Run as the STORAGE_USER user, not as root. Pass our settings in
# environment variables so the script has access to STORAGE_ROOT.
post_script = os.path.join(backup_root, 'after-backup')
if os.path.exists(post_script):
shell('check_call',
['su', env['STORAGE_USER'], '-c', post_script, config["target"]],
env=env)
# Our nightly cron job executes system status checks immediately after this
# backup. Since it checks that dovecot and postfix are running, block for a
# bit (maximum of 10 seconds each) to give each a chance to finish restarting
# before the status checks might catch them down. See #381.
wait_for_service(25, True, env, 10)
wait_for_service(993, True, env, 10)
def run_duplicity_verification():
env = load_environment()
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
config = get_backup_config(env)
backup_cache_dir = os.path.join(backup_root, 'cache')
shell('check_call', [
"/usr/bin/duplicity",
"--verbosity", "info",
"verify",
"--compare-data",
"--archive-dir", backup_cache_dir,
"--exclude", backup_root,
config["target"],
env["STORAGE_ROOT"],
] + rsync_ssh_options, get_env(env))
def run_duplicity_restore(args):
env = load_environment()
config = get_backup_config(env)
backup_cache_dir = os.path.join(env["STORAGE_ROOT"], 'backup', 'cache')
shell('check_call', [
"/usr/bin/duplicity",
"restore",
"--archive-dir", backup_cache_dir,
config["target"],
] + rsync_ssh_options + args,
get_env(env))
def list_target_files(config):
import urllib.parse
try:
target = urllib.parse.urlparse(config["target"])
except ValueError:
return "invalid target"
if target.scheme == "file":
return [(fn, os.path.getsize(os.path.join(target.path, fn))) for fn in os.listdir(target.path)]
elif target.scheme == "rsync":
rsync_fn_size_re = re.compile(r'.* ([^ ]*) [^ ]* [^ ]* (.*)')
rsync_target = '{host}:{path}'
target_path = target.path
if not target_path.endswith('/'):
target_path = target_path + '/'
if target_path.startswith('/'):
target_path = target_path[1:]
rsync_command = [ 'rsync',
'-e',
'/usr/bin/ssh -i /root/.ssh/id_rsa_miab -oStrictHostKeyChecking=no -oBatchMode=yes',
'--list-only',
'-r',
rsync_target.format(
host=target.netloc,
path=target_path)
]
code, listing = shell('check_output', rsync_command, trap=True, capture_stderr=True)
if code == 0:
ret = []
for l in listing.split('\n'):
match = rsync_fn_size_re.match(l)
if match:
ret.append( (match.groups()[1], int(match.groups()[0].replace(',',''))) )
return ret
else:
if 'Permission denied (publickey).' in listing:
reason = "Invalid user or check you correctly copied the SSH key."
elif 'No such file or directory' in listing:
reason = "Provided path {} is invalid.".format(target_path)
elif 'Network is unreachable' in listing:
reason = "The IP address {} is unreachable.".format(target.hostname)
elif 'Could not resolve hostname' in listing:
reason = "The hostname {} cannot be resolved.".format(target.hostname)
else:
reason = "Unknown error." \
"Please check running 'management/backup.py --verify'" \
"from mailinabox sources to debug the issue."
raise ValueError("Connection to rsync host failed: {}".format(reason))
elif target.scheme == "s3":
# match to a Region
fix_boto() # must call prior to importing boto
import boto.s3
from boto.exception import BotoServerError
custom_region = False
for region in boto.s3.regions():
if region.endpoint == target.hostname:
break
else:
# If region is not found this is a custom region
custom_region = True
bucket = target.path[1:].split('/')[0]
path = '/'.join(target.path[1:].split('/')[1:]) + '/'
# Create a custom region with custom endpoint
if custom_region:
from boto.s3.connection import S3Connection
region = boto.s3.S3RegionInfo(name=bucket, endpoint=target.hostname, connection_cls=S3Connection)
# If no prefix is specified, set the path to '', otherwise boto won't list the files
if path == '/':
path = ''
if bucket == "":
raise ValueError("Enter an S3 bucket name.")
# connect to the region & bucket
try:
conn = region.connect(aws_access_key_id=config["target_user"], aws_secret_access_key=config["target_pass"])
bucket = conn.get_bucket(bucket)
except BotoServerError as e:
if e.status == 403:
raise ValueError("Invalid S3 access key or secret access key.")
elif e.status == 404:
raise ValueError("Invalid S3 bucket name.")
elif e.status == 301:
raise ValueError("Incorrect region for this bucket.")
raise ValueError(e.reason)
return [(key.name[len(path):], key.size) for key in bucket.list(prefix=path)]
elif target.scheme == 'b2':
from b2sdk.v1 import InMemoryAccountInfo, B2Api
from b2sdk.v1.exception import NonExistentBucket
info = InMemoryAccountInfo()
b2_api = B2Api(info)
# Extract information from target
b2_application_keyid = target.netloc[:target.netloc.index(':')]
b2_application_key = target.netloc[target.netloc.index(':')+1:target.netloc.index('@')]
b2_bucket = target.netloc[target.netloc.index('@')+1:]
try:
b2_api.authorize_account("production", b2_application_keyid, b2_application_key)
bucket = b2_api.get_bucket_by_name(b2_bucket)
except NonExistentBucket as e:
raise ValueError("B2 Bucket does not exist. Please double check your information!")
return [(key.file_name, key.size) for key, _ in bucket.ls()]
else:
raise ValueError(config["target"])
def backup_set_custom(env, target, target_user, target_pass, min_age):
config = get_backup_config(env, for_save=True)
# min_age must be an int
if isinstance(min_age, str):
min_age = int(min_age)
config["target"] = target
config["target_user"] = target_user
config["target_pass"] = target_pass
config["min_age_in_days"] = min_age
# Validate.
try:
if config["target"] not in ("off", "local"):
# these aren't supported by the following function, which expects a full url in the target key,
# which is what is there except when loading the config prior to saving
list_target_files(config)
except ValueError as e:
return str(e)
write_backup_config(env, config)
return "OK"
def get_backup_config(env, for_save=False, for_ui=False):
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
# Defaults.
config = {
"min_age_in_days": 3,
"target": "local",
}
# Merge in anything written to custom.yaml.
try:
custom_config = rtyaml.load(open(os.path.join(backup_root, 'custom.yaml')))
if not isinstance(custom_config, dict): raise ValueError() # caught below
config.update(custom_config)
except:
pass
# When updating config.yaml, don't do any further processing on what we find.
if for_save:
return config
# When passing this back to the admin to show the current settings, do not include
# authentication details. The user will have to re-enter it.
if for_ui:
for field in ("target_user", "target_pass"):
if field in config:
del config[field]
# helper fields for the admin
config["file_target_directory"] = os.path.join(backup_root, 'encrypted')
config["enc_pw_file"] = os.path.join(backup_root, 'secret_key.txt')
if config["target"] == "local":
# Expand to the full URL.
config["target"] = "file://" + config["file_target_directory"]
ssh_pub_key = os.path.join('/root', '.ssh', 'id_rsa_miab.pub')
if os.path.exists(ssh_pub_key):
config["ssh_pub_key"] = open(ssh_pub_key, 'r').read()
return config
def write_backup_config(env, newconfig):
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
with open(os.path.join(backup_root, 'custom.yaml'), "w") as f:
f.write(rtyaml.dump(newconfig))
if __name__ == "__main__":
import sys
if sys.argv[-1] == "--verify":
# Run duplicity's verification command to check a) the backup files
# are readable, and b) report if they are up to date.
run_duplicity_verification()
elif sys.argv[-1] == "--list":
# List the saved backup files.
for fn, size in list_target_files(get_backup_config(load_environment())):
print("{}\t{}".format(fn, size))
elif sys.argv[-1] == "--status":
# Show backup status.
ret = backup_status(load_environment())
print(rtyaml.dump(ret["backups"]))
print("Storage for unmatched files:", ret["unmatched_file_size"])
elif len(sys.argv) >= 2 and sys.argv[1] == "--restore":
# Run duplicity restore. Rest of command line passed as arguments
# to duplicity. The restore path should be specified.
run_duplicity_restore(sys.argv[2:])
else:
# Perform a backup. Add --full to force a full backup rather than
# possibly performing an incremental backup.
full_backup = "--full" in sys.argv
perform_backup(full_backup)
| mail-in-a-box/mailinabox | management/backup.py | Python | cc0-1.0 | 20,895 |
"""
WSGI config for admin_readonly_model project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "admin_readonly_model.settings")
application = get_wsgi_application()
| mozillazg/django-simple-projects | projects/admin_readonly_model/admin_readonly_model/wsgi.py | Python | mit | 417 |
#! /usr/bin/env python
# kicad2protel: kicad gerber output normalizer
# Copyright (C) 2015 Jeff Ciesielski <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
import sys
import shutil
import copy
import argparse
import logging
import operator
import zipfile
logging.debug("")
_log = logging.getLogger('kicad2protel')
_log.setLevel(logging.INFO)
# NOTE: All excellon info gleaned from :
# http://web.archive.org/web/20071030075236/http://www.excellon.com/manuals/program.htm
gerber_extension_map = {
'-F_SilkS.gbr': '.GTO',
'-F_Mask.gbr': '.GTS',
'-F_Cu.gbr': '.GTL',
'-B_Cu.gbr': '.GBL',
'-B_Mask.gbr': '.GBS',
'-B_SilkS.gbr': '.GBO',
'-Edge_Cuts.gbr': '.GML',
'-In1_Cu.gbr': '.G1',
'-In2_Cu.gbr': '.G2',
'-F_SilkS.gto': '.GTO',
'-F_Mask.gts': '.GTS',
'-F_Cu.gtl': '.GTL',
'-B_Cu.gbl': '.GBL',
'-B_Mask.gbs': '.GBS',
'-B_SilkS.gbo': '.GBO',
'-Edge_Cuts.gml': '.GML',
'-In1_Cu.g2': '.G1',
'-In2_Cu.g3': '.G2',
}
class IncompatibleInstructionException(Exception):
pass
class ExcellonHeader(object):
def __init__(self, header_lines):
self.tools = {}
self._process(header_lines)
def _process(self, lines):
_handler_map = {
'T': self._handle_tool,
'INCH': self._handle_measurement,
'METRIC': self._handle_measurement
}
for line in lines:
for pos in range(len(line)):
if line[:pos] in _handler_map:
_handler_map[line[:pos]](line)
break
def _handle_tool(self, line):
t = ExcellonTool(line)
self.tools[t.tool_id] = t
def _handle_measurement(self, line):
self.meas_mode, self.zero_style = line.strip().split(',')
def dumps(self):
return '\n'.join([
'M48',
'{},{}'.format(self.meas_mode, self.zero_style),
'\n'.join([x.dumps() for x in self.tool_list]),
'%'
])
def __str__(self):
return '\n'.join([
20*'-',
'Excellon Header',
'Measurement Mode:{}'.format(self.meas_mode),
'Zero Style: {}'.format(self.zero_style),
'Tools:',
' -{}'.format('\n -'.join(['{}:{}'.format(x.tool_id, x.diameter)
for x in self.tool_list])),
''
])
@property
def tool_list(self):
return [self.tools[x] for x in sorted(self.tools)]
def optimize(self):
drill_map = {}
for t in self.tool_list:
if not t.diameter in drill_map:
drill_map[t.diameter] = []
drill_map[t.diameter].append(t)
tool_remap = {}
new_tools = {}
for idx, d in enumerate(sorted(drill_map)):
tool_id = 'T{}'.format(idx + 1)
new_tools[tool_id] = copy.deepcopy(drill_map[d][0])
new_tools[tool_id].tool_id = tool_id
for tool in drill_map[d]:
tool_remap[tool.tool_id] = tool_id
self.tools = new_tools
return tool_remap
def __add__(self, other):
if not isinstance(other, ExcellonHeader):
raise ValueError('Cannot add ExcellonHeader and {}'.format(type(other)))
# TODO: Not sure how to handle this for now, bail out
if not self.meas_mode == other.meas_mode:
raise IncompatibleInstructionException(
'\n'.join([
'Cannot merge due to differing measurement modes:',
' F1: {}'.format(self.meas_mode),
' F2: {}'.format(other.meas_mode)
])
)
# Create a working copy of self
wh = copy.deepcopy(self)
# Move over and rename all tools from the 'other' instance
for t in sorted(other.tools):
t_copy = copy.deepcopy(other.tools[t])
t_idx = 'T{}'.format(len(wh.tools) + 1)
t_copy.tool_id = t_idx
wh.tools[t_idx] = t_copy
return wh
class ExcellonTool(object):
def __init__(self, tooldefstr):
diam_idx = tooldefstr.index('C')
self.tool_id = tooldefstr[:diam_idx]
self.diameter = tooldefstr[diam_idx + 1:]
def __eq__(self, other):
return self.diameter == other.diameter
def __hash__(self):
return hash(self.diameter)
def __lt__(self, other):
return float(self.diameter) < float(other.diameter)
def dumps(self):
return '{}C{}'.format(self.tool_id, self.diameter)
class InvalidToolException(Exception):
pass
class InvalidToolpathException(Exception):
pass
class ExcellonDrillInstr(object):
def __init__(self, filepath):
self._lines = [x.strip() for x in open(filepath).readlines() if len(x.strip())]
self._toolpaths = {}
self._current_tool = None
sidx, eidx = self._get_header_bounds()
self.header = ExcellonHeader(self._lines[sidx:eidx])
for tool in self.header.tools:
self._toolpaths[tool] = []
sidx, eidx = self._get_body_bounds()
self._process(self._lines[sidx:eidx])
def _get_header_bounds(self):
return self._lines.index('M48') + 1, self._lines.index('%')
def _get_body_bounds(self):
return self._lines.index('%') + 1, self._lines.index('M30')
def _handle_tool(self, line):
tool = line.strip()
if tool not in self.header.tools and not tool == 'T0':
raise InvalidToolException('Unknown tool: {}'.format(tool))
self._current_tool = tool
def _handle_coord(self, line):
if not self._current_tool:
raise InvalidToolpathException('No Tool selected')
self._toolpaths[self._current_tool].append(line.strip())
def _process(self, lines):
_handler_map = {
'T': self._handle_tool,
'X': self._handle_coord,
}
self._current_tool = None
for line in lines:
for pos in range(len(line)):
if line[:pos] in _handler_map:
_handler_map[line[:pos]](line)
break
def _dumps_toolpaths(self):
rs = ''
for t in sorted(self._toolpaths):
rs += '\n{}\n'.format(t)
rs += '\n'.join(self._toolpaths[t])
# Return the slice to strip off the leading newline
return rs[1:]
def dumps(self):
_meas_mode_map = {
'INCH':'M72',
'METRIC':'M71',
}
return '\n'.join([
self.header.dumps(),
'G90', #absolute mode
'G05', #drill mode
_meas_mode_map[self.header.meas_mode], # Metric or Inch mode
self._dumps_toolpaths(),
'T0',
'M30',
])
def __add__(self, other):
if not isinstance(other, ExcellonDrillInstr):
raise ValueError('Cannot add ExcellonDrillInstr and {}'.format(type(other)))
# Create a working instruction set
wi = copy.deepcopy(self)
# First, Add the toolpaths together, renumbering starting from
# the end of our tool numbering
tp_len = len(self._toolpaths)
for idx, tp_id in enumerate(sorted(other._toolpaths)):
tp_idx = tp_len + idx + 1
new_tp_id = 'T{}'.format(tp_idx)
wi._toolpaths[new_tp_id] = other._toolpaths[tp_id][:]
# Now, combine the headers
wi.header += other.header
# Optimize the header and get the new mapping
tool_remap = wi.header.optimize()
# Now, remap our toolpath
new_toolpaths = {}
for tp in sorted(wi._toolpaths):
if not tool_remap[tp] in new_toolpaths:
new_toolpaths[tool_remap[tp]] = wi._toolpaths[tp][:]
else:
new_toolpaths[tool_remap[tp]].extend(wi._toolpaths[tp][:])
wi._toolpaths = new_toolpaths
return wi
# http://stackoverflow.com/questions/1855095/how-to-create-a-zip-archive-of-a-directory
def zipdir(path, ziph):
# ziph is zipfile handle
for root, dirs, files in os.walk(path):
for file in files:
ziph.write(os.path.join(root, file))
if __name__ == '__main__':
main()
def main():
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--in_dir', '-i',
help='Directory containing KiCad plot output',
required=True)
arg_parser.add_argument('--out_dir', '-o',
help='Directory to store newly created files. '
'(Will be created automatically if nonexistant)',
required=True)
arg_parser.add_argument('--zip', '-z', help="Create zip archive of new files", default=False, action='store_true')
args = arg_parser.parse_args()
if not os.path.isdir(args.out_dir):
os.makedirs(args.out_dir)
drill_files = {}
for filename in os.listdir(args.in_dir):
print "Testing:", filename
for ext in gerber_extension_map:
if filename[-len(ext):] == ext:
new_name = filename[:-len(ext)] + gerber_extension_map[ext]
shutil.copy(os.path.join(args.in_dir, filename), os.path.join(args.out_dir, new_name))
_log.info('{} converted to: {}'.format(filename, new_name))
if filename[-4:] == '.drl':
if '-NPTH.drl' in filename:
ext = '-NPTH.drl'
else:
ext = '.drl'
_log.info('Processing Excellon File: {}'.format(filename))
base_name = filename[:-len(ext)]
di = ExcellonDrillInstr(os.path.join(args.in_dir, filename))
if not base_name in drill_files:
drill_files[base_name] = []
drill_files[base_name].append(di)
# For each drill file in the project, create a combined optimized
# version and dump it in the new directory
for dfile, excellon_objs in drill_files.items():
output_file = os.path.join(args.out_dir, '{}.txt'.format(dfile))
cmb_exc = reduce(operator.add, excellon_objs)
with open(output_file, 'w') as fhandle:
fhandle.write(cmb_exc.dumps())
if args.zip:
with zipfile.ZipFile('{}.zip'.format(args.out_dir), 'w') as zf:
zipdir(args.out_dir, zf)
shutil.rmtree(args.out_dir)
| Jeff-Ciesielski/kicad2protel | kicad2protel.py | Python | gpl-2.0 | 11,099 |
#!/bin/env python
import os
from setuptools import setup
def find_xdg_data_files(syspath, relativepath, pkgname, data_files=[]):
for (dirname, _, filenames) in os.walk(relativepath):
if filenames:
syspath = syspath.format(pkgname=pkgname)
subpath = dirname.split(relativepath)[1]
if subpath.startswith("/"):
subpath = subpath[1:]
files = [os.path.join(dirname, f) for f in filenames]
data_files.append((os.path.join(syspath, subpath), files))
return data_files
def find_data_files(data_map, pkgname):
data_files = []
for (syspath, relativepath) in data_map:
find_xdg_data_files(syspath, relativepath, pkgname, data_files)
return data_files
DATA_FILES = [
("share/{pkgname}/plugins", "data/plugins"),
]
setup(
author="Elio Esteves Duarte",
author_email="[email protected]",
description="Tomate plugin that shows screen notifications.",
include_package_data=True,
keywords="pomodoro,timer",
license="GPL-3",
long_description=open("README.md").read(),
name="tomate-notify-plugin",
data_files=find_data_files(DATA_FILES, "tomate"),
url="https://github.com/eliostvs/tomate-notify-plugin",
version="0.14.0",
zip_safe=False,
)
| eliostvs/tomate-notify-plugin | setup.py | Python | gpl-3.0 | 1,311 |
# 1-example.py -- Examples which rely on accessories
# (and therefore also on xutils)
#
# Copyright 2008 Alberto Milone <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
from accessories import *
import os
def main():
'''
Replace the first line of this example with a source and a destination file
'''
destination = os.path.join(os.path.expanduser('~'), 'xorgnew.txt')
source = None
a = Accessories(source)
'''
Remove the Defaultdepth from all the Screen sections
'''
a.removeOption('Screen', 'DefaultDepth')
'''
Remove the Defaultdepth from the 1st Screen section
'''
a.removeOption('Screen', 'DefaultDepth', position=0)
'''
Set the Defaultdepth to 24 bit in all sections
'''
a.addOption('Screen', 'Defaultdepth', '24', prefix='')
'''
Set the Defaultdepth to 24 bit in the 1st Screen section
'''
a.addOption('Screen', 'Defaultdepth', '24', position=0, prefix='')
'''
Get the value assigned to the AddARGBGLXVisuals option in the 1st Screen section
'''
try:
print 'AddARGBGLXVisuals', a.getValue('Screen', 'AddARGBGLXVisuals', position=1, identifier='Display')
except OptionNotAvailableException, e:
print 'Error:', e
'''
Add an Option (with the Option prefix) to the 1st Screen section
'''
a.addOption('Screen', 'AddARGBGLXVisuals', 'True', optiontype='Option', position=0)
'''
Print the contents of all the Device sections
'''
a.printSection('Device')
'''
Print the global dict i.e. the dict which contains all sections
'''
print '\nGlobal dict is the dict which contains all sections\n', a.globaldict
#print '\nGlobal iters is\n', a.globaliters
'''
Make a new Device and a Screen section with an identifier
'''
a.makeSection('Device', identifier='Name of this new Device Section')
a.makeSection('Screen', identifier='Default Screen')
'''
Add a reference to the Screen section identified as "New Default Screen" to
the 1st ServerLayout section
'''
a.addReference('ServerLayout', 'Screen', 'New Default Screen')
'''
Add a reference to the Screen section identified as "New Default Screen" to
all the ServerLayout sections
'''
a.addReference('ServerLayout', 'Screen', 'New Default Screen', position=0)
'''
Remove a reference to the Screen section identified as "New Default Screen"
from all the ServerLayout sections
'''
a.removeReference('ServerLayout', 'Screen', 'New Default Screen')#, position=0)
'''
Create a new "Display" SubSection inside all the Screen sections
'''
a.makeSubSection('Screen', 'Display')#, position=0)
'''
Remove a "Display" SubSection inside all the Screen sections
'''
#a.removeSubSection('Screen', 'Display')#, position=0)
'''
Add an option to the Display subsection of the 1st Screen section
'''
a.addSubOption('Screen', 'Display', 'Depth', value='24', position=0, prefix='')
a.addSubOption('Screen', 'Display', 'Virtual', value='1600 1200', position=0)
a.addSubOption('Screen', 'Display', 'Name', value='Whatever', optiontype='Option', position=None)
'''
Remove options from the Display subsection of all or of the 1st Screen section
'''
a.removeSubOption('Screen', 'Display', 'Depth')
a.removeSubOption('Screen', 'Martin', 'Virtual', position=0)
'''
Get the identifier of the 1st Device section
'''
print a.getValue('Device', 'Identifier', 0)
#print a.getValue('SubSection', 'Name', position=0, identifier='Display', sect='Screen')
'''
Set the driver of the 1st Device section
'''
a.setDriver('Device', 'fbdev', 0)
'''
Get the driver of the 1st Device section
'''
print a.getDriver('Device', 0)
a.makeSection('Screen', 'New Screen')
a.makeSection('Screen', 'New Screen')#this new section won't be created
a.setDefaultDepth(24, 0)
print a.getDefaultDepth(0)
'''
Create a new device section
add a new option to it
and make a reference to it in the Screen section
'''
dev = a.makeSection('Device', 'My Device')
a.addOption('Device', 'BusID', 'PCI:1:0:0', position=dev)
a.addReference('Screen', 'Device', 'My Device', position=0)
a.addReference('Device', 'Screen', 4, position=0)
print a.getReferences('Screen', 0, reflist=['Device'])
a.enableComposite()
a.addArgbGlxVisuals(0)
print 'Virtual', a.getValue('SubSection', 'Virtual', position=0, identifier='Display', sect='Screen')
print 'Modes', a.getValue('SubSection', 'Modes', position=0, identifier='Display', sect='Screen')
'''
Get the identifier of the first Device section
'''
print 'ID of the 1st Device Section =', a.getIdentifier('Device', 0)
'''
Get the position of the Device section identified as 'Configured Video Device'
'''
try:
print 'Position of "Configured Video Device" =', a.getPosition('Device', 'Configured Video Device')
except IdentifierException, e:
print e
'''
See if a section exists
'''
print 'Section Device "Configured Video Device" exists =', a.isSection('Device', 'Configured Video Device')
print 'Section Device "Whatever" exists =', a.isSection('Device', 'Whatever')
'''
Create a new Device section and print the list of identifiers so as to see
that the new identifier and position are included in identifiers
'''
a.makeSection('Device', identifier='New Graphics Card')
a.makeSection('Screen', identifier='New Screeeeeeeeeen')
print '\nIdentifiers after creating a new device section', a.identifiers
print '\nCreate Broken Screen section'
pos = a.makeSection('Screen', identifier='Broken Screen Section')
print '\nAdding References'
a.addReference('Screen', 'Monitor', 'Broken Monitor Section', position=pos)
a.addReference('Screen', 'Device', 'Broken Device Section', position=pos)
'''
Try to fix section with broken references
'''
a.checkNFixSection('Screen', identifier='Broken Screen Section')
'''
Write the changes to the destination file
'''
a.writeFile(destination)
if __name__ == '__main__': main()
| tseliot/XKit | examples/1-example.py | Python | gpl-2.0 | 7,180 |
# -*- coding: utf-8 -*-
import os
import re
import urllib
import requests
from bs4 import BeautifulSoup
import simplejson as json
class zamunda():
def __init__(
self,
xxx,
base_url,
usr,
passwd,
path,
baud = 0,
bsub = 0,
dbg = False,
):
self.__usr = usr
self.__pass = passwd
self.__s = requests.Session()
self.__base_url = base_url
self.__bsub = bsub
self.__baud = baud
self.__dbg = dbg
self.__CUSTOM_TRACKERS = (
('tr', 'http://tracker.zamunda.net/announce.php?passkey=95b29926c5b7adab4a133cfc490ed0aa'),
('tr', 'http://tracker.zamunda.net/announce.php?passkey=92149dad63bdd68fedffcd44d27209dc'),
('tr', 'http://flashtorrents.org/announce.php'),
('tr', 'http://94.228.192.98/announce'),
('tr', 'udp://9.rarbg.com:2710/announce')
)
self.__categories = [
{'cat_ids':'5','cat_name':u'HD Movies'},
{'cat_ids':'31','cat_name':u'Science Movies'},
{'cat_ids':'28','cat_name':u'Russian Movies'},
{'cat_ids':'24','cat_name':u'Bg Movies'},
{'cat_ids':'33','cat_name':u'HD Series'},
{'cat_ids':'7','cat_name':u'Series'},
{'cat_ids':'43','cat_name':u'HD Sport'},
{'cat_ids':'41','cat_name':u'Sport'},
{'cat_ids':'35','cat_name':u'Movies x264'},
{'cat_ids':'19','cat_name':u'Movies XviD'},
# {'cat_ids':'20','cat_name':u'Movies DVD-R'},
# {'cat_ids':'23','cat_name':u'Clips Concerts'},
# {'cat_ids':'29','cat_name':u'Music DVD-R'},
]
self.__HEADERS = {
'Host' : self.__base_url.split('//')[1],
'User-Agent' : 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:32.0) Gecko/20100101 Firefox/32.0'
}
if xxx == True:
self.__categories += [
{'cat_ids':'9','cat_name':u'XXX'},
{'cat_ids':'49','cat_name':u'HD XXX'}
]
self.__ids = [d['cat_ids'] for d in self.__categories]
self.__s_path = os.path.join(path, '', 'sesion')
if not os.path.exists(os.path.dirname(self.__s_path)):
os.makedirs(os.path.dirname(self.__s_path))
if not os.path.exists(self.__s_path):
self.__log('Use Login')
self.__do_login()
with open(self.__s_path, 'wb') as f:
json.dump(
requests.utils.dict_from_cookiejar(self.__s.cookies),
f,
sort_keys=True,
indent=True,
encoding='utf-8',
)
else:
self.__log('Use Sessinon')
with open(self.__s_path, 'rb') as f:
self.__s.cookies = requests.utils.cookiejar_from_dict(json.load(f))
def __del__(self):
if getattr(self, '_use_log', None):
self.__do_logout()
self.__log('Exit')
self.__s.close()
def __log(self, msg):
if self.__dbg:
if isinstance(msg, basestring):
print msg.encode('utf-8')
else:
print str(msg)
def __do_logout(self):
self.__log('Logout')
self.__s.get('%s/logout.php' % self.__base_url, headers = self.__HEADERS)
def __do_login(self):
r = self.__s.post('%s/takelogin.php' % self.__base_url, data={'username' : self.__usr, 'password' : self.__pass}, headers = self.__HEADERS)
if r.status_code == requests.codes.ok and re.search(self.__usr, r.text, re.IGNORECASE):
self.__log('Login OK')
self._use_log = True
return True
else:
self.__log('Login Error')
raise Exception("LoginFail")
def __info_get(self, txt):
if txt:
txt = re.sub(r"Tip\('|\\'|'\)", '', txt)
ff = BeautifulSoup(txt, 'html5lib')
return {'img': ff.find('img').get('src'),'info': re.sub( r'(?:\s+)', ' ', ff.get_text(' '))}
else:
return {'img': 'DefaultVideo.png', 'info': ''}
def __claean_name(self, n):
n_sub = [
r'\s\[.*?\]',
r'\s\/\s.*',
]
self.__log(n)
for _sub in n_sub:
n = re.sub(_sub, '', n)
self.__log(n)
return n
def index(self):
yield {'label': u'Search', 'cat': '0', 'page': 0, 'search': '!search!', 'is_playable': 'False'}
yield {'label': u'Browse latest', 'cat': '0', 'page': 0, 'search': '!none!', 'is_playable': 'False'}
for cat in self.__categories:
yield {'label': cat['cat_name'],'cat': cat['cat_ids'], 'page': 0, 'search': '!none!', 'is_playable': 'False'}
def page(self, page, cat, search=None):
fnd = {
'search': '',
'incldead': '0',
'in': 'name',
}
fnd['page'] = page
fnd['cat'] = cat
fnd['bgsubs'] = self.__bsub
fnd['bgaudio'] = self.__baud
if search != '!none!' and search != '!search!':
fnd['search'] = search.encode('cp1251')
self.__log('Search string: %s' % (search,))
self.__log('Payload: %s' % (str(fnd),))
r = self.__s.get('%s/bananas' % self.__base_url, params=fnd, headers = self.__HEADERS)
if self.__dbg:
with open(os.path.join(os.path.dirname(self.__s_path), '', 'dump.html'), 'wb') as f:
f.write(r.text.encode('utf-8'))
if r.status_code != requests.codes.ok:
self.__log('Page Error')
raise Exception("PageError")
if not re.search(self.__usr, r.text, re.IGNORECASE):
if os.path.exists(self.__s_path):
os.remove(self.__s_path)
self.__log('Sesion Error')
raise Exception("SesionError")
sp = BeautifulSoup(r.text, 'html5lib')
#for link in sp.findAll('a', href=re.compile(r'\/download_go\.php\?id=\d+&m=x')):
for link in sp.findAll('a', href=re.compile(r'\/magnetlink\/*')):
pr = link.find_parent('td')
if cat != '0' or pr.find_previous_sibling('td').find('a', href=re.compile(r'list\?cat=\d+'))['href'].split('=')[1] in self.__ids:
ss = pr.find_next_siblings('td')
dat = pr.find('a', href=re.compile(r'banan\?id=\d+'))
r = self.__info_get(dat.get('onmouseover'))
yield {
'label': self.__claean_name(dat.string),
'path': link['href'],
'is_playable': 'True',
'info': {'plot': '[COLOR CC00FF00]%s - DLs:%s S:%s[/COLOR][CR]%s' % (ss[3].get_text(' '), ss[4].get_text(' '), ss[5].string, r['info'],)},
'thumbnail': r['img'],
'cat': cat,
'page': page,
'search': search,
'properties': {'fanart_image': r['img']}
}
nn = sp.find('b', text=re.compile(u'(?:Следваща.)'))
if nn and nn.find_parent('a'):
yield {
'label': '>> Next page',
'path': 'next_page',
'cat': cat,
'page': page+1,
'is_playable': 'False',
'search': search,
}
def get_magnet(self, p):
r = self.__s.get('%s%s' % (self.__base_url, p), headers = self.__HEADERS)
s = re.search(r"^.*?href='(magnet.*)'", r.text, re.M|re.I)
if s:
return'%s&%s' % (s.group(1), urllib.urlencode(self.__CUSTOM_TRACKERS),)
| kodi1/plugin.video.zamunda | resources/lib/zamunda.py | Python | gpl-3.0 | 7,252 |
from lib.layer_utils import *
from lib.grad_check import *
from lib.optim import *
import numpy as np
import copy
class CIFAR10_DataLoader(object):
"""
Data loader class for CIFAR-10 Data.
Arguments:
- data: Array of input data, of shape (batch_size, d_1, ..., d_k)
- labels: Array of labels, of shape (batch_size,)
- batch_size: The size of each returned minibatch
"""
def __init__(self, data, labels, batch_size):
self.data = data
self.labels = labels
self.batch_size = batch_size
self.indices = np.asarray(range(data.shape[0]))
# reset the indices to be full length
def _reset(self):
self.indices = np.asarray(range(self.data.shape[0]))
# Call this shuffle function after the last batch for each epoch
def _shuffle(self):
np.random.shuffle(self.indices)
# Get the next batch of data
def get_batch(self):
if len(self.indices) < self.batch_size:
self._reset()
self._shuffle()
indices_curr = self.indices[0:self.batch_size]
data_batch = self.data[indices_curr]
labels_batch = self.labels[indices_curr]
self.indices = np.delete(self.indices, range(self.batch_size))
return data_batch, labels_batch
def compute_acc(model, data, labels, num_samples=None, batch_size=100):
"""
Compute the accuracy of given data and labels
Arguments:
- data: Array of input data, of shape (batch_size, d_1, ..., d_k)
- labels: Array of labels, of shape (batch_size,)
- num_samples: If not None, subsample the data and only test the model
on these sampled datapoints.
- batch_size: Split data and labels into batches of this size to avoid using
too much memory.
Returns:
- accuracy: Scalar indicating fraction of inputs that were correctly
classified by the model.
"""
N = data.shape[0]
if num_samples is not None and N > num_samples:
indices = np.random.choice(N, num_samples)
N = num_samples
data = data[indices]
labels = labels[indices]
num_batches = N // batch_size
if N % batch_size != 0:
num_batches += 1
preds = []
for i in range(num_batches):
start = i * batch_size
end = (i + 1) * batch_size
output = model.forward(data[start:end], False)
scores = softmax(output)
pred = np.argmax(scores, axis=1)
preds.append(pred)
preds = np.hstack(preds)
accuracy = np.mean(preds == labels)
return accuracy
""" Some comments """
def train_net(data, model, loss_func, optimizer, batch_size, max_epochs,
lr_decay=1.0, lr_decay_every=1000, show_every=10, verbose=False):
"""
Train a network with this function, parameters of the network are updated
using stochastic gradient descent methods defined in optim.py.
The parameters which achive the best performance after training for given epochs
will be returned as a param dict. The training history and the validation history
is returned for post analysis.
Arguments:
- data: Fata instance should look like the followings:
- data_dict = {
"data_train": (# Training data, # Training GT Labels),
"data_val": (# Validation data, # Validation GT Labels),
"data_test": (# Testing data, # Testing GT Labels),
}
- model: An instance defined in the fully_conn.py, with a sequential object as attribute
- loss_func: An instance defined in the layer_utils.py, we only introduce cross-entropy
classification loss for this part of assignment
- batch_size: Batch size of the input data
- max_epochs: The total number of epochs to train the model
- lr_decay: The amount to decay the learning rate
- lr_decay_every: Decay the learning rate every given epochs
- show_every: Show the training information every given iterations
- verbose: To show the information or not
Returns:
- opt_params: optimal parameters
- loss_hist: Loss recorded during training
- train_acc_hist: Training accuracy recorded during training
- val_acc_hist: Validation accuracy recorded during training
"""
# Initialize the variables
data_train, labels_train = data["data_train"]
data_val, labels_val = data["data_val"]
dataloader = CIFAR10_DataLoader(data_train, labels_train, batch_size)
opt_val_acc = 0.0
opt_params = None
loss_hist = []
train_acc_hist = []
val_acc_hist = []
# Compute the maximum iterations and iterations per epoch
iters_per_epoch = max(data_train.shape[0] / batch_size, 1)
max_iters = iters_per_epoch * max_epochs
# Start the training
for epoch in xrange(max_epochs):
# Compute the starting iteration and ending iteration for current epoch
iter_start = epoch * iters_per_epoch
iter_end = (epoch + 1) * iters_per_epoch
# Decay the learning rate every specified epochs
if epoch % lr_decay_every == 0 and epoch > 0:
optimizer.lr = optimizer.lr * lr_decay
print "Decaying learning rate of the optimizer to {}".format(optimizer.lr)
# Main training loop
for iter in xrange(iter_start, iter_end):
data_batch, labels_batch = dataloader.get_batch()
#############################################################################
# TODO: Update the parameters by a forward pass for the network, a backward #
# pass to the network, and make a step for the optimizer #
#############################################################################
loss = None
scores = model.forward(data_batch, is_Training=True)
loss = loss_func.forward(scores, labels_batch)
dLoss = loss_func.backward()
dp = model.backward(dLoss)
optimizer.step()
#############################################################################
# END OF YOUR CODE #
#############################################################################
loss_hist.append(loss)
# Show the training loss
if verbose and iter % show_every == 0:
print "(Iteration {} / {}) loss: {}".format(iter+1, max_iters, loss_hist[-1])
# End of epoch, compute the accuracies
train_acc = 0
val_acc = 0
#############################################################################
# TODO: Compute the training accuracy and validation accuracy, store the #
# results to train_acc_hist, and val_acc_hist respectively #
#############################################################################
train_acc = compute_acc(model, data_train, labels_train, batch_size = batch_size)
val_acc = compute_acc(model, data_val, labels_val, batch_size = batch_size)
#############################################################################
# END OF YOUR CODE #
#############################################################################
train_acc_hist.append(train_acc)
val_acc_hist.append(val_acc)
# Save the best params for the model
if val_acc > opt_val_acc:
#############################################################################
# TODO: Save the optimal parameters to opt_params variable by name #
#############################################################################
opt_val_acc = val_acc
model.net.gather_params()
opt_params = copy.deepcopy(model.net.params)
#############################################################################
# END OF YOUR CODE #
#############################################################################
# Show the training accuracies
if verbose:
print "(Epoch {} / {}) Training Accuracy: {}, Validation Accuracy: {}".format(
epoch+1, max_epochs, train_acc, val_acc)
return opt_params, loss_hist, train_acc_hist, val_acc_hist | vatsalgit/Deep-Learning- | assignment1/lib/train.py | Python | gpl-3.0 | 7,568 |
# -*- encoding: utf-8 -*-
#
# Module Writen to OpenERP, Open Source Management Solution
#
# Copyright (c) 2013 Vauxoo - http://www.vauxoo.com/
# All Rights Reserved.
# info Vauxoo ([email protected])
#
# Coded by: Fernando Rangel ([email protected])
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import math
import re
from openerp.osv import fields, osv
class res_partner(osv.osv):
_inherit = 'res.partner'
def name_search(self, cr, user, name, args=None, operator='ilike',
context=None, limit=100):
if not args:
args = []
if context is None:
context = {}
ids = []
if name:
ptrn_name = re.compile('(\[(.*?)\])')
res_name = ptrn_name.search(name)
if res_name:
name = name.replace('['+res_name.group(2)+'] ', '')
partner_search = super(res_partner, self).name_search(cr, user,
name, args, operator, context, limit)
ids = [partner[0] for partner in partner_search]
if not ids:
ids = self.search(cr, user, [('vat', operator, name)]+ args,
limit=limit, context=context)
if not ids:
ptrn = re.compile('(\[(.*?)\])')
res = ptrn.search(name)
if res:
ids = self.search(cr, user,
[('vat', operator, res.group(2))]+ args, limit=limit,
context=context)
else:
return super(res_partner, self).name_search(cr, user,
name, args, operator, context, limit)
return self.name_get(cr, user, ids, context=context)
def name_get(self, cr, uid, ids, context=None):
if isinstance(ids, (list, tuple)) and not len(ids):
return []
if isinstance(ids, (long, int)):
ids = [ids]
res_name = super(res_partner, self).name_get(cr, uid, ids, context)
res = []
for record in res_name:
partner = self.browse(cr, uid, record[0], context=context)
name = record[1]
if partner.vat:
name = '['+partner.vat+'] '+name
res.append((record[0], name))
return res
| 3dfxsoftware/cbss-addons | partner_search_by_vat/model/res_partner.py | Python | gpl-2.0 | 3,218 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.