code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1
value | license stringclasses 15
values | size int64 3 1.05M |
|---|---|---|---|---|---|
#!/usr/bin/env python
# Copyright (c) 2010 SubDownloader Developers - See COPYING - GPLv3
'''
FileManagement package
'''
| matachi/subdownloader | languages/__init__.py | Python | gpl-3.0 | 122 |
# -*- coding: utf-8 -*-
"""
jinja2
~~~~~~
Jinja2 is a template engine written in pure Python. It provides a
Django inspired non-XML syntax but supports inline expressions and
an optional sandboxed environment.
Nutshell
--------
Here a small example of a Jinja2 template::
{% extends 'base.html' %}
{% block title %}Memberlist{% endblock %}
{% block content %}
<ul>
{% for user in users %}
<li><a href="{{ user.url }}">{{ user.username }}</a></li>
{% endfor %}
</ul>
{% endblock %}
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
__docformat__ = 'restructuredtext en'
__version__ = '2.7-dev'
# high level interface
from jinja2.environment import Environment, Template
# loaders
from jinja2.loaders import BaseLoader, FileSystemLoader, PackageLoader, \
DictLoader, FunctionLoader, PrefixLoader, ChoiceLoader, \
ModuleLoader
# bytecode caches
from jinja2.bccache import BytecodeCache, FileSystemBytecodeCache, \
MemcachedBytecodeCache
# undefined types
from jinja2.runtime import Undefined, DebugUndefined, StrictUndefined
# exceptions
from jinja2.exceptions import TemplateError, UndefinedError, \
TemplateNotFound, TemplatesNotFound, TemplateSyntaxError, \
TemplateAssertionError
# decorators and public utilities
from jinja2.filters import environmentfilter, contextfilter, \
evalcontextfilter
from jinja2.utils import Markup, escape, clear_caches, \
environmentfunction, evalcontextfunction, contextfunction, \
is_undefined
__all__ = [
'Environment', 'Template', 'BaseLoader', 'FileSystemLoader',
'PackageLoader', 'DictLoader', 'FunctionLoader', 'PrefixLoader',
'ChoiceLoader', 'BytecodeCache', 'FileSystemBytecodeCache',
'MemcachedBytecodeCache', 'Undefined', 'DebugUndefined',
'StrictUndefined', 'TemplateError', 'UndefinedError', 'TemplateNotFound',
'TemplatesNotFound', 'TemplateSyntaxError', 'TemplateAssertionError',
'ModuleLoader', 'environmentfilter', 'contextfilter', 'Markup', 'escape',
'environmentfunction', 'contextfunction', 'clear_caches', 'is_undefined',
'evalcontextfilter', 'evalcontextfunction'
]
| rogers0/namebench | third_party/jinja2/__init__.py | Python | apache-2.0 | 2,272 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Python utilities required by Keras."""
import tensorflow.compat.v2 as tf
import binascii
import codecs
import importlib
import marshal
import os
import re
import sys
import threading
import time
import types as python_types
import warnings
import weakref
import numpy as np
from keras.utils import io_utils
from keras.utils import tf_contextlib
from keras.utils import tf_inspect
from tensorflow.python.util.tf_export import keras_export
_GLOBAL_CUSTOM_OBJECTS = {}
_GLOBAL_CUSTOM_NAMES = {}
# Flag that determines whether to skip the NotImplementedError when calling
# get_config in custom models and layers. This is only enabled when saving to
# SavedModel, when the config isn't required.
_SKIP_FAILED_SERIALIZATION = False
# If a layer does not have a defined config, then the returned config will be a
# dictionary with the below key.
_LAYER_UNDEFINED_CONFIG_KEY = 'layer was saved without config'
@keras_export('keras.utils.custom_object_scope', # pylint: disable=g-classes-have-attributes
'keras.utils.CustomObjectScope')
class CustomObjectScope:
"""Exposes custom classes/functions to Keras deserialization internals.
Under a scope `with custom_object_scope(objects_dict)`, Keras methods such
as `tf.keras.models.load_model` or `tf.keras.models.model_from_config`
will be able to deserialize any custom object referenced by a
saved config (e.g. a custom layer or metric).
Example:
Consider a custom regularizer `my_regularizer`:
```python
layer = Dense(3, kernel_regularizer=my_regularizer)
config = layer.get_config() # Config contains a reference to `my_regularizer`
...
# Later:
with custom_object_scope({'my_regularizer': my_regularizer}):
layer = Dense.from_config(config)
```
Args:
*args: Dictionary or dictionaries of `{name: object}` pairs.
"""
def __init__(self, *args):
self.custom_objects = args
self.backup = None
def __enter__(self):
self.backup = _GLOBAL_CUSTOM_OBJECTS.copy()
for objects in self.custom_objects:
_GLOBAL_CUSTOM_OBJECTS.update(objects)
return self
def __exit__(self, *args, **kwargs):
_GLOBAL_CUSTOM_OBJECTS.clear()
_GLOBAL_CUSTOM_OBJECTS.update(self.backup)
@keras_export('keras.utils.get_custom_objects')
def get_custom_objects():
"""Retrieves a live reference to the global dictionary of custom objects.
Updating and clearing custom objects using `custom_object_scope`
is preferred, but `get_custom_objects` can
be used to directly access the current collection of custom objects.
Example:
```python
get_custom_objects().clear()
get_custom_objects()['MyObject'] = MyObject
```
Returns:
Global dictionary of names to classes (`_GLOBAL_CUSTOM_OBJECTS`).
"""
return _GLOBAL_CUSTOM_OBJECTS
# Store a unique, per-object ID for shared objects.
#
# We store a unique ID for each object so that we may, at loading time,
# re-create the network properly. Without this ID, we would have no way of
# determining whether a config is a description of a new object that
# should be created or is merely a reference to an already-created object.
SHARED_OBJECT_KEY = 'shared_object_id'
SHARED_OBJECT_DISABLED = threading.local()
SHARED_OBJECT_LOADING = threading.local()
SHARED_OBJECT_SAVING = threading.local()
# Attributes on the threadlocal variable must be set per-thread, thus we
# cannot initialize these globally. Instead, we have accessor functions with
# default values.
def _shared_object_disabled():
"""Get whether shared object handling is disabled in a threadsafe manner."""
return getattr(SHARED_OBJECT_DISABLED, 'disabled', False)
def _shared_object_loading_scope():
"""Get the current shared object saving scope in a threadsafe manner."""
return getattr(SHARED_OBJECT_LOADING, 'scope', NoopLoadingScope())
def _shared_object_saving_scope():
"""Get the current shared object saving scope in a threadsafe manner."""
return getattr(SHARED_OBJECT_SAVING, 'scope', None)
class DisableSharedObjectScope:
"""A context manager for disabling handling of shared objects.
Disables shared object handling for both saving and loading.
Created primarily for use with `clone_model`, which does extra surgery that
is incompatible with shared objects.
"""
def __enter__(self):
SHARED_OBJECT_DISABLED.disabled = True
self._orig_loading_scope = _shared_object_loading_scope()
self._orig_saving_scope = _shared_object_saving_scope()
def __exit__(self, *args, **kwargs):
SHARED_OBJECT_DISABLED.disabled = False
SHARED_OBJECT_LOADING.scope = self._orig_loading_scope
SHARED_OBJECT_SAVING.scope = self._orig_saving_scope
class NoopLoadingScope:
"""The default shared object loading scope. It does nothing.
Created to simplify serialization code that doesn't care about shared objects
(e.g. when serializing a single object).
"""
def get(self, unused_object_id):
return None
def set(self, object_id, obj):
pass
class SharedObjectLoadingScope:
"""A context manager for keeping track of loaded objects.
During the deserialization process, we may come across objects that are
shared across multiple layers. In order to accurately restore the network
structure to its original state, `SharedObjectLoadingScope` allows us to
re-use shared objects rather than cloning them.
"""
def __enter__(self):
if _shared_object_disabled():
return NoopLoadingScope()
global SHARED_OBJECT_LOADING
SHARED_OBJECT_LOADING.scope = self
self._obj_ids_to_obj = {}
return self
def get(self, object_id):
"""Given a shared object ID, returns a previously instantiated object.
Args:
object_id: shared object ID to use when attempting to find already-loaded
object.
Returns:
The object, if we've seen this ID before. Else, `None`.
"""
# Explicitly check for `None` internally to make external calling code a
# bit cleaner.
if object_id is None:
return
return self._obj_ids_to_obj.get(object_id)
def set(self, object_id, obj):
"""Stores an instantiated object for future lookup and sharing."""
if object_id is None:
return
self._obj_ids_to_obj[object_id] = obj
def __exit__(self, *args, **kwargs):
global SHARED_OBJECT_LOADING
SHARED_OBJECT_LOADING.scope = NoopLoadingScope()
class SharedObjectConfig(dict):
"""A configuration container that keeps track of references.
`SharedObjectConfig` will automatically attach a shared object ID to any
configs which are referenced more than once, allowing for proper shared
object reconstruction at load time.
In most cases, it would be more proper to subclass something like
`collections.UserDict` or `collections.Mapping` rather than `dict` directly.
Unfortunately, python's json encoder does not support `Mapping`s. This is
important functionality to retain, since we are dealing with serialization.
We should be safe to subclass `dict` here, since we aren't actually
overriding any core methods, only augmenting with a new one for reference
counting.
"""
def __init__(self, base_config, object_id, **kwargs):
self.ref_count = 1
self.object_id = object_id
super(SharedObjectConfig, self).__init__(base_config, **kwargs)
def increment_ref_count(self):
# As soon as we've seen the object more than once, we want to attach the
# shared object ID. This allows us to only attach the shared object ID when
# it's strictly necessary, making backwards compatibility breakage less
# likely.
if self.ref_count == 1:
self[SHARED_OBJECT_KEY] = self.object_id
self.ref_count += 1
class SharedObjectSavingScope:
"""Keeps track of shared object configs when serializing."""
def __enter__(self):
if _shared_object_disabled():
return None
global SHARED_OBJECT_SAVING
# Serialization can happen at a number of layers for a number of reasons.
# We may end up with a case where we're opening a saving scope within
# another saving scope. In that case, we'd like to use the outermost scope
# available and ignore inner scopes, since there is not (yet) a reasonable
# use case for having these nested and distinct.
if _shared_object_saving_scope() is not None:
self._passthrough = True
return _shared_object_saving_scope()
else:
self._passthrough = False
SHARED_OBJECT_SAVING.scope = self
self._shared_objects_config = weakref.WeakKeyDictionary()
self._next_id = 0
return self
def get_config(self, obj):
"""Gets a `SharedObjectConfig` if one has already been seen for `obj`.
Args:
obj: The object for which to retrieve the `SharedObjectConfig`.
Returns:
The SharedObjectConfig for a given object, if already seen. Else,
`None`.
"""
try:
shared_object_config = self._shared_objects_config[obj]
except (TypeError, KeyError):
# If the object is unhashable (e.g. a subclass of `AbstractBaseClass`
# that has not overridden `__hash__`), a `TypeError` will be thrown.
# We'll just continue on without shared object support.
return None
shared_object_config.increment_ref_count()
return shared_object_config
def create_config(self, base_config, obj):
"""Create a new SharedObjectConfig for a given object."""
shared_object_config = SharedObjectConfig(base_config, self._next_id)
self._next_id += 1
try:
self._shared_objects_config[obj] = shared_object_config
except TypeError:
# If the object is unhashable (e.g. a subclass of `AbstractBaseClass`
# that has not overridden `__hash__`), a `TypeError` will be thrown.
# We'll just continue on without shared object support.
pass
return shared_object_config
def __exit__(self, *args, **kwargs):
if not getattr(self, '_passthrough', False):
global SHARED_OBJECT_SAVING
SHARED_OBJECT_SAVING.scope = None
def serialize_keras_class_and_config(
cls_name, cls_config, obj=None, shared_object_id=None):
"""Returns the serialization of the class with the given config."""
base_config = {'class_name': cls_name, 'config': cls_config}
# We call `serialize_keras_class_and_config` for some branches of the load
# path. In that case, we may already have a shared object ID we'd like to
# retain.
if shared_object_id is not None:
base_config[SHARED_OBJECT_KEY] = shared_object_id
# If we have an active `SharedObjectSavingScope`, check whether we've already
# serialized this config. If so, just use that config. This will store an
# extra ID field in the config, allowing us to re-create the shared object
# relationship at load time.
if _shared_object_saving_scope() is not None and obj is not None:
shared_object_config = _shared_object_saving_scope().get_config(obj)
if shared_object_config is None:
return _shared_object_saving_scope().create_config(base_config, obj)
return shared_object_config
return base_config
@keras_export('keras.utils.register_keras_serializable')
def register_keras_serializable(package='Custom', name=None):
"""Registers an object with the Keras serialization framework.
This decorator injects the decorated class or function into the Keras custom
object dictionary, so that it can be serialized and deserialized without
needing an entry in the user-provided custom object dict. It also injects a
function that Keras will call to get the object's serializable string key.
Note that to be serialized and deserialized, classes must implement the
`get_config()` method. Functions do not have this requirement.
The object will be registered under the key 'package>name' where `name`,
defaults to the object name if not passed.
Args:
package: The package that this class belongs to.
name: The name to serialize this class under in this package. If None, the
class' name will be used.
Returns:
A decorator that registers the decorated class with the passed names.
"""
def decorator(arg):
"""Registers a class with the Keras serialization framework."""
class_name = name if name is not None else arg.__name__
registered_name = package + '>' + class_name
if tf_inspect.isclass(arg) and not hasattr(arg, 'get_config'):
raise ValueError(
'Cannot register a class that does not have a get_config() method.')
if registered_name in _GLOBAL_CUSTOM_OBJECTS:
raise ValueError(
f'{registered_name} has already been registered to '
f'{_GLOBAL_CUSTOM_OBJECTS[registered_name]}')
if arg in _GLOBAL_CUSTOM_NAMES:
raise ValueError(
f'{arg} has already been registered to {_GLOBAL_CUSTOM_NAMES[arg]}')
_GLOBAL_CUSTOM_OBJECTS[registered_name] = arg
_GLOBAL_CUSTOM_NAMES[arg] = registered_name
return arg
return decorator
@keras_export('keras.utils.get_registered_name')
def get_registered_name(obj):
"""Returns the name registered to an object within the Keras framework.
This function is part of the Keras serialization and deserialization
framework. It maps objects to the string names associated with those objects
for serialization/deserialization.
Args:
obj: The object to look up.
Returns:
The name associated with the object, or the default Python name if the
object is not registered.
"""
if obj in _GLOBAL_CUSTOM_NAMES:
return _GLOBAL_CUSTOM_NAMES[obj]
else:
return obj.__name__
@tf_contextlib.contextmanager
def skip_failed_serialization():
global _SKIP_FAILED_SERIALIZATION
prev = _SKIP_FAILED_SERIALIZATION
try:
_SKIP_FAILED_SERIALIZATION = True
yield
finally:
_SKIP_FAILED_SERIALIZATION = prev
@keras_export('keras.utils.get_registered_object')
def get_registered_object(name, custom_objects=None, module_objects=None):
"""Returns the class associated with `name` if it is registered with Keras.
This function is part of the Keras serialization and deserialization
framework. It maps strings to the objects associated with them for
serialization/deserialization.
Example:
```
def from_config(cls, config, custom_objects=None):
if 'my_custom_object_name' in config:
config['hidden_cls'] = tf.keras.utils.get_registered_object(
config['my_custom_object_name'], custom_objects=custom_objects)
```
Args:
name: The name to look up.
custom_objects: A dictionary of custom objects to look the name up in.
Generally, custom_objects is provided by the user.
module_objects: A dictionary of custom objects to look the name up in.
Generally, module_objects is provided by midlevel library implementers.
Returns:
An instantiable class associated with 'name', or None if no such class
exists.
"""
if name in _GLOBAL_CUSTOM_OBJECTS:
return _GLOBAL_CUSTOM_OBJECTS[name]
elif custom_objects and name in custom_objects:
return custom_objects[name]
elif module_objects and name in module_objects:
return module_objects[name]
return None
# pylint: disable=g-bad-exception-name
class CustomMaskWarning(Warning):
pass
# pylint: enable=g-bad-exception-name
@keras_export('keras.utils.serialize_keras_object')
def serialize_keras_object(instance):
"""Serialize a Keras object into a JSON-compatible representation.
Calls to `serialize_keras_object` while underneath the
`SharedObjectSavingScope` context manager will cause any objects re-used
across multiple layers to be saved with a special shared object ID. This
allows the network to be re-created properly during deserialization.
Args:
instance: The object to serialize.
Returns:
A dict-like, JSON-compatible representation of the object's config.
"""
_, instance = tf.__internal__.decorator.unwrap(instance)
if instance is None:
return None
# pylint: disable=protected-access
#
# For v1 layers, checking supports_masking is not enough. We have to also
# check whether compute_mask has been overridden.
supports_masking = (getattr(instance, 'supports_masking', False)
or (hasattr(instance, 'compute_mask')
and not is_default(instance.compute_mask)))
if supports_masking and is_default(instance.get_config):
warnings.warn(
'Custom mask layers require a config and must override '
'get_config. When loading, the custom mask layer must be '
'passed to the custom_objects argument.',
category=CustomMaskWarning,
stacklevel=2)
# pylint: enable=protected-access
if hasattr(instance, 'get_config'):
name = get_registered_name(instance.__class__)
try:
config = instance.get_config()
except NotImplementedError as e:
if _SKIP_FAILED_SERIALIZATION:
return serialize_keras_class_and_config(
name, {_LAYER_UNDEFINED_CONFIG_KEY: True})
raise e
serialization_config = {}
for key, item in config.items():
if isinstance(item, str):
serialization_config[key] = item
continue
# Any object of a different type needs to be converted to string or dict
# for serialization (e.g. custom functions, custom classes)
try:
serialized_item = serialize_keras_object(item)
if isinstance(serialized_item, dict) and not isinstance(item, dict):
serialized_item['__passive_serialization__'] = True
serialization_config[key] = serialized_item
except ValueError:
serialization_config[key] = item
name = get_registered_name(instance.__class__)
return serialize_keras_class_and_config(
name, serialization_config, instance)
if hasattr(instance, '__name__'):
return get_registered_name(instance)
raise ValueError(f'Cannot serialize {instance} since it doesn\'t implement '
'`get_config()`, and also doesn\t have `__name__`')
def get_custom_objects_by_name(item, custom_objects=None):
"""Returns the item if it is in either local or global custom objects."""
if item in _GLOBAL_CUSTOM_OBJECTS:
return _GLOBAL_CUSTOM_OBJECTS[item]
elif custom_objects and item in custom_objects:
return custom_objects[item]
return None
def class_and_config_for_serialized_keras_object(
config,
module_objects=None,
custom_objects=None,
printable_module_name='object'):
"""Returns the class name and config for a serialized keras object."""
if (not isinstance(config, dict)
or 'class_name' not in config
or 'config' not in config):
raise ValueError(
f'Improper config format for {config}. '
'Expecting python dict contains `class_name` and `config` as keys')
class_name = config['class_name']
cls = get_registered_object(class_name, custom_objects, module_objects)
if cls is None:
raise ValueError(
f'Unknown {printable_module_name}: {class_name}. Please ensure this '
'object is passed to the `custom_objects` argument. See '
'https://www.tensorflow.org/guide/keras/save_and_serialize'
'#registering_the_custom_object for details.')
cls_config = config['config']
# Check if `cls_config` is a list. If it is a list, return the class and the
# associated class configs for recursively deserialization. This case will
# happen on the old version of sequential model (e.g. `keras_version` ==
# "2.0.6"), which is serialized in a different structure, for example
# "{'class_name': 'Sequential',
# 'config': [{'class_name': 'Embedding', 'config': ...}, {}, ...]}".
if isinstance(cls_config, list):
return (cls, cls_config)
deserialized_objects = {}
for key, item in cls_config.items():
if key == 'name':
# Assume that the value of 'name' is a string that should not be
# deserialized as a function. This avoids the corner case where
# cls_config['name'] has an identical name to a custom function and
# gets converted into that function.
deserialized_objects[key] = item
elif isinstance(item, dict) and '__passive_serialization__' in item:
deserialized_objects[key] = deserialize_keras_object(
item,
module_objects=module_objects,
custom_objects=custom_objects,
printable_module_name='config_item')
# TODO(momernick): Should this also have 'module_objects'?
elif (isinstance(item, str) and
tf_inspect.isfunction(get_registered_object(item, custom_objects))):
# Handle custom functions here. When saving functions, we only save the
# function's name as a string. If we find a matching string in the custom
# objects during deserialization, we convert the string back to the
# original function.
# Note that a potential issue is that a string field could have a naming
# conflict with a custom function name, but this should be a rare case.
# This issue does not occur if a string field has a naming conflict with
# a custom object, since the config of an object will always be a dict.
deserialized_objects[key] = get_registered_object(item, custom_objects)
for key, item in deserialized_objects.items():
cls_config[key] = deserialized_objects[key]
return (cls, cls_config)
@keras_export('keras.utils.deserialize_keras_object')
def deserialize_keras_object(identifier,
module_objects=None,
custom_objects=None,
printable_module_name='object'):
"""Turns the serialized form of a Keras object back into an actual object.
This function is for mid-level library implementers rather than end users.
Importantly, this utility requires you to provide the dict of `module_objects`
to use for looking up the object config; this is not populated by default.
If you need a deserialization utility that has preexisting knowledge of
built-in Keras objects, use e.g. `keras.layers.deserialize(config)`,
`keras.metrics.deserialize(config)`, etc.
Calling `deserialize_keras_object` while underneath the
`SharedObjectLoadingScope` context manager will cause any already-seen shared
objects to be returned as-is rather than creating a new object.
Args:
identifier: the serialized form of the object.
module_objects: A dictionary of built-in objects to look the name up in.
Generally, `module_objects` is provided by midlevel library implementers.
custom_objects: A dictionary of custom objects to look the name up in.
Generally, `custom_objects` is provided by the end user.
printable_module_name: A human-readable string representing the type of the
object. Printed in case of exception.
Returns:
The deserialized object.
Example:
A mid-level library implementer might want to implement a utility for
retrieving an object from its config, as such:
```python
def deserialize(config, custom_objects=None):
return deserialize_keras_object(
identifier,
module_objects=globals(),
custom_objects=custom_objects,
name="MyObjectType",
)
```
This is how e.g. `keras.layers.deserialize()` is implemented.
"""
if identifier is None:
return None
if isinstance(identifier, dict):
# In this case we are dealing with a Keras config dictionary.
config = identifier
(cls, cls_config) = class_and_config_for_serialized_keras_object(
config, module_objects, custom_objects, printable_module_name)
# If this object has already been loaded (i.e. it's shared between multiple
# objects), return the already-loaded object.
shared_object_id = config.get(SHARED_OBJECT_KEY)
shared_object = _shared_object_loading_scope().get(shared_object_id) # pylint: disable=assignment-from-none
if shared_object is not None:
return shared_object
if hasattr(cls, 'from_config'):
arg_spec = tf_inspect.getfullargspec(cls.from_config)
custom_objects = custom_objects or {}
if 'custom_objects' in arg_spec.args:
deserialized_obj = cls.from_config(
cls_config,
custom_objects=dict(
list(_GLOBAL_CUSTOM_OBJECTS.items()) +
list(custom_objects.items())))
else:
with CustomObjectScope(custom_objects):
deserialized_obj = cls.from_config(cls_config)
else:
# Then `cls` may be a function returning a class.
# in this case by convention `config` holds
# the kwargs of the function.
custom_objects = custom_objects or {}
with CustomObjectScope(custom_objects):
deserialized_obj = cls(**cls_config)
# Add object to shared objects, in case we find it referenced again.
_shared_object_loading_scope().set(shared_object_id, deserialized_obj)
return deserialized_obj
elif isinstance(identifier, str):
object_name = identifier
if custom_objects and object_name in custom_objects:
obj = custom_objects.get(object_name)
elif object_name in _GLOBAL_CUSTOM_OBJECTS:
obj = _GLOBAL_CUSTOM_OBJECTS[object_name]
else:
obj = module_objects.get(object_name)
if obj is None:
raise ValueError(
f'Unknown {printable_module_name}: {object_name}. Please ensure '
'this object is passed to the `custom_objects` argument. See '
'https://www.tensorflow.org/guide/keras/save_and_serialize'
'#registering_the_custom_object for details.')
# Classes passed by name are instantiated with no args, functions are
# returned as-is.
if tf_inspect.isclass(obj):
return obj()
return obj
elif tf_inspect.isfunction(identifier):
# If a function has already been deserialized, return as is.
return identifier
else:
raise ValueError(
f'Could not interpret serialized {printable_module_name}: {identifier}')
def func_dump(func):
"""Serializes a user defined function.
Args:
func: the function to serialize.
Returns:
A tuple `(code, defaults, closure)`.
"""
if os.name == 'nt':
raw_code = marshal.dumps(func.__code__).replace(b'\\', b'/')
code = codecs.encode(raw_code, 'base64').decode('ascii')
else:
raw_code = marshal.dumps(func.__code__)
code = codecs.encode(raw_code, 'base64').decode('ascii')
defaults = func.__defaults__
if func.__closure__:
closure = tuple(c.cell_contents for c in func.__closure__)
else:
closure = None
return code, defaults, closure
def func_load(code, defaults=None, closure=None, globs=None):
"""Deserializes a user defined function.
Args:
code: bytecode of the function.
defaults: defaults of the function.
closure: closure of the function.
globs: dictionary of global objects.
Returns:
A function object.
"""
if isinstance(code, (tuple, list)): # unpack previous dump
code, defaults, closure = code
if isinstance(defaults, list):
defaults = tuple(defaults)
def ensure_value_to_cell(value):
"""Ensures that a value is converted to a python cell object.
Args:
value: Any value that needs to be casted to the cell type
Returns:
A value wrapped as a cell object (see function "func_load")
"""
def dummy_fn():
# pylint: disable=pointless-statement
value # just access it so it gets captured in .__closure__
cell_value = dummy_fn.__closure__[0]
if not isinstance(value, type(cell_value)):
return cell_value
return value
if closure is not None:
closure = tuple(ensure_value_to_cell(_) for _ in closure)
try:
raw_code = codecs.decode(code.encode('ascii'), 'base64')
except (UnicodeEncodeError, binascii.Error):
raw_code = code.encode('raw_unicode_escape')
code = marshal.loads(raw_code)
if globs is None:
globs = globals()
return python_types.FunctionType(
code, globs, name=code.co_name, argdefs=defaults, closure=closure)
def has_arg(fn, name, accept_all=False):
"""Checks if a callable accepts a given keyword argument.
Args:
fn: Callable to inspect.
name: Check if `fn` can be called with `name` as a keyword argument.
accept_all: What to return if there is no parameter called `name` but the
function accepts a `**kwargs` argument.
Returns:
bool, whether `fn` accepts a `name` keyword argument.
"""
arg_spec = tf_inspect.getfullargspec(fn)
if accept_all and arg_spec.varkw is not None:
return True
return name in arg_spec.args or name in arg_spec.kwonlyargs
@keras_export('keras.utils.Progbar')
class Progbar:
"""Displays a progress bar.
Args:
target: Total number of steps expected, None if unknown.
width: Progress bar width on screen.
verbose: Verbosity mode, 0 (silent), 1 (verbose), 2 (semi-verbose)
stateful_metrics: Iterable of string names of metrics that should *not* be
averaged over time. Metrics in this list will be displayed as-is. All
others will be averaged by the progbar before display.
interval: Minimum visual progress update interval (in seconds).
unit_name: Display name for step counts (usually "step" or "sample").
"""
def __init__(self,
target,
width=30,
verbose=1,
interval=0.05,
stateful_metrics=None,
unit_name='step'):
self.target = target
self.width = width
self.verbose = verbose
self.interval = interval
self.unit_name = unit_name
if stateful_metrics:
self.stateful_metrics = set(stateful_metrics)
else:
self.stateful_metrics = set()
self._dynamic_display = ((hasattr(sys.stdout, 'isatty') and
sys.stdout.isatty()) or
'ipykernel' in sys.modules or
'posix' in sys.modules or
'PYCHARM_HOSTED' in os.environ)
self._total_width = 0
self._seen_so_far = 0
# We use a dict + list to avoid garbage collection
# issues found in OrderedDict
self._values = {}
self._values_order = []
self._start = time.time()
self._last_update = 0
self._time_at_epoch_start = self._start
self._time_at_epoch_end = None
self._time_after_first_step = None
def update(self, current, values=None, finalize=None):
"""Updates the progress bar.
Args:
current: Index of current step.
values: List of tuples: `(name, value_for_last_step)`. If `name` is in
`stateful_metrics`, `value_for_last_step` will be displayed as-is.
Else, an average of the metric over time will be displayed.
finalize: Whether this is the last update for the progress bar. If
`None`, defaults to `current >= self.target`.
"""
if finalize is None:
if self.target is None:
finalize = False
else:
finalize = current >= self.target
values = values or []
for k, v in values:
if k not in self._values_order:
self._values_order.append(k)
if k not in self.stateful_metrics:
# In the case that progress bar doesn't have a target value in the first
# epoch, both on_batch_end and on_epoch_end will be called, which will
# cause 'current' and 'self._seen_so_far' to have the same value. Force
# the minimal value to 1 here, otherwise stateful_metric will be 0s.
value_base = max(current - self._seen_so_far, 1)
if k not in self._values:
self._values[k] = [v * value_base, value_base]
else:
self._values[k][0] += v * value_base
self._values[k][1] += value_base
else:
# Stateful metrics output a numeric value. This representation
# means "take an average from a single value" but keeps the
# numeric formatting.
self._values[k] = [v, 1]
self._seen_so_far = current
message = ''
now = time.time()
info = ' - %.0fs' % (now - self._start)
if current == self.target:
self._time_at_epoch_end = now
if self.verbose == 1:
if now - self._last_update < self.interval and not finalize:
return
prev_total_width = self._total_width
if self._dynamic_display:
message += '\b' * prev_total_width
message += '\r'
else:
message += '\n'
if self.target is not None:
numdigits = int(np.log10(self.target)) + 1
bar = ('%' + str(numdigits) + 'd/%d [') % (current, self.target)
prog = float(current) / self.target
prog_width = int(self.width * prog)
if prog_width > 0:
bar += ('=' * (prog_width - 1))
if current < self.target:
bar += '>'
else:
bar += '='
bar += ('.' * (self.width - prog_width))
bar += ']'
else:
bar = '%7d/Unknown' % current
self._total_width = len(bar)
message += bar
time_per_unit = self._estimate_step_duration(current, now)
if self.target is None or finalize:
info += self._format_time(time_per_unit, self.unit_name)
else:
eta = time_per_unit * (self.target - current)
if eta > 3600:
eta_format = '%d:%02d:%02d' % (eta // 3600,
(eta % 3600) // 60, eta % 60)
elif eta > 60:
eta_format = '%d:%02d' % (eta // 60, eta % 60)
else:
eta_format = '%ds' % eta
info = ' - ETA: %s' % eta_format
for k in self._values_order:
info += ' - %s:' % k
if isinstance(self._values[k], list):
avg = np.mean(self._values[k][0] / max(1, self._values[k][1]))
if abs(avg) > 1e-3:
info += ' %.4f' % avg
else:
info += ' %.4e' % avg
else:
info += ' %s' % self._values[k]
self._total_width += len(info)
if prev_total_width > self._total_width:
info += (' ' * (prev_total_width - self._total_width))
if finalize:
info += '\n'
message += info
io_utils.print_msg(message, line_break=False)
message = ''
elif self.verbose == 2:
if finalize:
numdigits = int(np.log10(self.target)) + 1
count = ('%' + str(numdigits) + 'd/%d') % (current, self.target)
info = count + info
for k in self._values_order:
info += ' - %s:' % k
avg = np.mean(self._values[k][0] / max(1, self._values[k][1]))
if avg > 1e-3:
info += ' %.4f' % avg
else:
info += ' %.4e' % avg
if self._time_at_epoch_end:
time_per_epoch = self._time_at_epoch_end - self._time_at_epoch_start
avg_time_per_step = time_per_epoch / self.target
self._time_at_epoch_start = now
self._time_at_epoch_end = None
info += ' -' + self._format_time(time_per_epoch, 'epoch')
info += ' -' + self._format_time(avg_time_per_step, self.unit_name)
info += '\n'
message += info
io_utils.print_msg(message, line_break=False)
message = ''
self._last_update = now
def add(self, n, values=None):
self.update(self._seen_so_far + n, values)
def _format_time(self, time_per_unit, unit_name):
"""format a given duration to display to the user.
Given the duration, this function formats it in either milliseconds
or seconds and displays the unit (i.e. ms/step or s/epoch)
Args:
time_per_unit: the duration to display
unit_name: the name of the unit to display
Returns:
a string with the correctly formatted duration and units
"""
formatted = ''
if time_per_unit >= 1 or time_per_unit == 0:
formatted += ' %.0fs/%s' % (time_per_unit, unit_name)
elif time_per_unit >= 1e-3:
formatted += ' %.0fms/%s' % (time_per_unit * 1e3, unit_name)
else:
formatted += ' %.0fus/%s' % (time_per_unit * 1e6, unit_name)
return formatted
def _estimate_step_duration(self, current, now):
"""Estimate the duration of a single step.
Given the step number `current` and the corresponding time `now`
this function returns an estimate for how long a single step
takes. If this is called before one step has been completed
(i.e. `current == 0`) then zero is given as an estimate. The duration
estimate ignores the duration of the (assumed to be non-representative)
first step for estimates when more steps are available (i.e. `current>1`).
Args:
current: Index of current step.
now: The current time.
Returns: Estimate of the duration of a single step.
"""
if current:
# there are a few special scenarios here:
# 1) somebody is calling the progress bar without ever supplying step 1
# 2) somebody is calling the progress bar and supplies step one multiple
# times, e.g. as part of a finalizing call
# in these cases, we just fall back to the simple calculation
if self._time_after_first_step is not None and current > 1:
time_per_unit = (now - self._time_after_first_step) / (current - 1)
else:
time_per_unit = (now - self._start) / current
if current == 1:
self._time_after_first_step = now
return time_per_unit
else:
return 0
def _update_stateful_metrics(self, stateful_metrics):
self.stateful_metrics = self.stateful_metrics.union(stateful_metrics)
def make_batches(size, batch_size):
"""Returns a list of batch indices (tuples of indices).
Args:
size: Integer, total size of the data to slice into batches.
batch_size: Integer, batch size.
Returns:
A list of tuples of array indices.
"""
num_batches = int(np.ceil(size / float(batch_size)))
return [(i * batch_size, min(size, (i + 1) * batch_size))
for i in range(0, num_batches)]
def slice_arrays(arrays, start=None, stop=None):
"""Slice an array or list of arrays.
This takes an array-like, or a list of
array-likes, and outputs:
- arrays[start:stop] if `arrays` is an array-like
- [x[start:stop] for x in arrays] if `arrays` is a list
Can also work on list/array of indices: `slice_arrays(x, indices)`
Args:
arrays: Single array or list of arrays.
start: can be an integer index (start index) or a list/array of indices
stop: integer (stop index); should be None if `start` was a list.
Returns:
A slice of the array(s).
Raises:
ValueError: If the value of start is a list and stop is not None.
"""
if arrays is None:
return [None]
if isinstance(start, list) and stop is not None:
raise ValueError('The stop argument has to be None if the value of start '
f'is a list. Received start={start}, stop={stop}')
elif isinstance(arrays, list):
if hasattr(start, '__len__'):
# hdf5 datasets only support list objects as indices
if hasattr(start, 'shape'):
start = start.tolist()
return [None if x is None else x[start] for x in arrays]
return [
None if x is None else
None if not hasattr(x, '__getitem__') else x[start:stop] for x in arrays
]
else:
if hasattr(start, '__len__'):
if hasattr(start, 'shape'):
start = start.tolist()
return arrays[start]
if hasattr(start, '__getitem__'):
return arrays[start:stop]
return [None]
def to_list(x):
"""Normalizes a list/tensor into a list.
If a tensor is passed, we return
a list of size 1 containing the tensor.
Args:
x: target object to be normalized.
Returns:
A list.
"""
if isinstance(x, list):
return x
return [x]
def to_snake_case(name):
intermediate = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
insecure = re.sub('([a-z])([A-Z])', r'\1_\2', intermediate).lower()
# If the class is private the name starts with "_" which is not secure
# for creating scopes. We prefix the name with "private" in this case.
if insecure[0] != '_':
return insecure
return 'private' + insecure
def is_all_none(structure):
iterable = tf.nest.flatten(structure)
# We cannot use Python's `any` because the iterable may return Tensors.
for element in iterable:
if element is not None:
return False
return True
def check_for_unexpected_keys(name, input_dict, expected_values):
unknown = set(input_dict.keys()).difference(expected_values)
if unknown:
raise ValueError(
f'Unknown entries in {name} dictionary: {list(unknown)}. Only expected '
f'following keys: {expected_values}')
def validate_kwargs(kwargs,
allowed_kwargs,
error_message='Keyword argument not understood:'):
"""Checks that all keyword arguments are in the set of allowed keys."""
for kwarg in kwargs:
if kwarg not in allowed_kwargs:
raise TypeError(error_message, kwarg)
def validate_config(config):
"""Determines whether config appears to be a valid layer config."""
return isinstance(config, dict) and _LAYER_UNDEFINED_CONFIG_KEY not in config
def default(method):
"""Decorates a method to detect overrides in subclasses."""
method._is_default = True # pylint: disable=protected-access
return method
def is_default(method):
"""Check if a method is decorated with the `default` wrapper."""
return getattr(method, '_is_default', False)
def populate_dict_with_module_objects(target_dict, modules, obj_filter):
for module in modules:
for name in dir(module):
obj = getattr(module, name)
if obj_filter(obj):
target_dict[name] = obj
class LazyLoader(python_types.ModuleType):
"""Lazily import a module, mainly to avoid pulling in large dependencies."""
def __init__(self, local_name, parent_module_globals, name):
self._local_name = local_name
self._parent_module_globals = parent_module_globals
super(LazyLoader, self).__init__(name)
def _load(self):
"""Load the module and insert it into the parent's globals."""
# Import the target module and insert it into the parent's namespace
module = importlib.import_module(self.__name__)
self._parent_module_globals[self._local_name] = module
# Update this object's dict so that if someone keeps a reference to the
# LazyLoader, lookups are efficient (__getattr__ is only called on lookups
# that fail).
self.__dict__.update(module.__dict__)
return module
def __getattr__(self, item):
module = self._load()
return getattr(module, item)
# Aliases
custom_object_scope = CustomObjectScope # pylint: disable=invalid-name
| keras-team/keras | keras/utils/generic_utils.py | Python | apache-2.0 | 43,269 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('widgetbox', '0010_html'),
]
operations = [
migrations.AddField(
model_name='button',
name='extra_css_classes',
field=models.CharField(max_length=200, blank=True),
preserve_default=True,
),
]
| logithr/djangocms-widgetbox | widgetbox/migrations/0011_auto_20150731_0958.py | Python | mit | 447 |
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 28 16:52:18 2016 by emin
"""
import os
import sys
import theano
import theano.tensor as T
import numpy as np
from lasagne.layers import InputLayer, ReshapeLayer, DenseLayer, ConcatLayer, SliceLayer
from generators import ModularCueCombinationTaskFFWD
import lasagne.layers
import lasagne.nonlinearities
import lasagne.updates
import lasagne.objectives
import lasagne.init
import scipy.io as sio
os.chdir(os.path.dirname(sys.argv[0]))
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
job_idx = int(os.getenv('PBS_ARRAYID'))
np.random.seed(job_idx)
def model(input_var, batch_size=1, n_in=100, n_out=1, n_hid=200):
# Input Layer
l_in = InputLayer((batch_size, n_in), input_var=input_var)
# Hidden layer
l_in_hid_1 = DenseLayer( SliceLayer(l_in,indices=slice(None,2*(n_in/3)), axis=1), n_hid, nonlinearity=lasagne.nonlinearities.rectify)
l_in_hid = DenseLayer( ConcatLayer( (l_in_hid_1, SliceLayer(l_in,indices=slice(2*(n_in/3),None), axis=1) ), axis=1 ), n_hid, nonlinearity=lasagne.nonlinearities.rectify)
# Output Layer
l_shp = ReshapeLayer(l_in_hid, (-1, n_hid))
l_dense = DenseLayer(l_shp, num_units=n_out, nonlinearity=lasagne.nonlinearities.linear)
# To reshape back to our original shape, we can use the symbolic shape variables we retrieved above.
l_out = ReshapeLayer(l_dense, (batch_size, n_out))
return l_out, l_in_hid, l_in_hid_1
if __name__ == '__main__':
# Define the input and expected output variable
input_var, target_var = T.fmatrices('input', 'target')
# The generator to sample examples from
tr_cond = 'two_gains'
test_cond = 'all_gains'
generator = ModularCueCombinationTaskFFWD(max_iter=50001, batch_size=100, n_in=50, n_out=1, sigma_sq=100.0, tr_cond=tr_cond)
test_generator = ModularCueCombinationTaskFFWD(max_iter=2501, batch_size=100, n_in=50, n_out=1, sigma_sq=100.0, tr_cond=test_cond)
# The model
l_out, l_rec, l_fix = model(input_var, batch_size=generator.batch_size, n_in=3*generator.n_in, n_out=generator.n_out, n_hid=200)
# The generated output variable and the loss function
# all_layers = lasagne.layers.get_all_layers(l_out)
# l2_penalty = lasagne.regularization.regularize_layer_params(all_layers, lasagne.regularization.l2) * 1e-6
pred_var = lasagne.layers.get_output(l_out)
loss = T.mean(T.abs_(pred_var-target_var)) #+ l2_penalty
# Create the update expressions
params = lasagne.layers.get_all_params(l_out, trainable=True)
trainable_params = params[2:]
updates = lasagne.updates.adam(loss, trainable_params, learning_rate=0.0003)
# Compile the function for a training step, as well as the prediction function and a utility function to get the inner details of the RNN
train_fn = theano.function([input_var, target_var], loss, updates=updates, allow_input_downcast=True)
pred_fn = theano.function([input_var], pred_var, allow_input_downcast=True)
rec_layer_fn = theano.function([input_var], lasagne.layers.get_output(l_rec, get_details=True), allow_input_downcast=True)
# Set the untrained params
lasagne.layers.set_all_param_values(l_fix, [sio.loadmat('cc_abserr_everything_allgains_run3.mat')['W_hid'].astype('float32'),
sio.loadmat('cc_abserr_everything_allgains_run3.mat')['b_hid'].flatten().astype('float32')])
# TRAINING
s_vec, opt_s_vec, ex_pred_vec, frac_rmse_vec = [], [], [], []
for i, (example_input, example_output, g1, g2, s, opt_s) in generator:
score = train_fn(example_input, example_output)
example_prediction = pred_fn(example_input)
s_vec.append(s)
opt_s_vec.append(opt_s)
ex_pred_vec.append(example_prediction.squeeze())
if i % 500 == 0:
rmse_opt = np.sqrt(np.nanmean((np.asarray(s_vec) - np.asarray(opt_s_vec))**2))
rmse_net = np.sqrt(np.nanmean((np.asarray(s_vec) - np.asarray(ex_pred_vec))**2))
frac_rmse = (rmse_net - rmse_opt) / rmse_opt
frac_rmse_vec.append(frac_rmse)
print 'Batch #%d; Frac. RMSE: %.6f; Opt. RMSE: %.6f; Net. RMSE: %.6f' % (i, frac_rmse, rmse_opt, rmse_net)
s_vec = []
opt_s_vec = []
ex_pred_vec = []
# TESTING
s_vec, opt_s_vec, ex_pred_vec = [], [], []
for i, (example_input, example_output, g1, g2, s, opt_s) in test_generator:
example_prediction = pred_fn(example_input)
s_vec.append(s)
opt_s_vec.append(opt_s)
ex_pred_vec.append(example_prediction.squeeze())
rmse_opt = np.sqrt(np.nanmean((np.asarray(s_vec) - np.asarray(opt_s_vec))**2))
rmse_net = np.sqrt(np.nanmean((np.asarray(s_vec) - np.asarray(ex_pred_vec))**2))
frac_rmse_test = (rmse_net - rmse_opt) / rmse_opt
print 'Test data; Frac. RMSE: %.6f; Opt. RMSE: %.6f; Net. RMSE: %.6f' % (frac_rmse_test, rmse_opt, rmse_net)
# SAVE TRAINED MODEL
sio.savemat('cccc_abserr_testfrmse_twogains_run%i.mat'%job_idx, {'frac_rmse_vec':np.asarray(frac_rmse_vec), 'frac_rmse_test':frac_rmse_test, 'opt_vec':np.asarray(opt_s_vec), 'net_vec':np.asarray(ex_pred_vec) } )
| eminorhan/inevitable-probability | alt_objectives/abserr_cc_cc_expt.py | Python | gpl-3.0 | 5,456 |
from PyQt4 import QtGui
from Action import BaseAction
from Action import Motion
from Action import Stiffness
from EmpathyMotionList import EmpathyMotionList
import random
class EmpathyRandomButton(QtGui.QPushButton):
INDEX_ACTIONS = 0
INDEX_MOTION = 1
def __init__(self, label):
super(EmpathyRandomButton, self).__init__(label)
# The list list of action-motion.
# action-motion contains list of actions and corresponding motion IDs
self._list = dict()
self._maxLevel = 0
random.seed()
#END __init__()
def add(self, jlv, actions = None, motion = None):
if not jlv in self._list:
self._list[jlv] = [[], []]
self._maxLevel = max(self._maxLevel, jlv)
#END if
if actions is not None:
if isinstance(actions, BaseAction):
actions = [actions]
#END if
self._list[jlv][EmpathyRandomButton.INDEX_ACTIONS].append(actions)
#END if
if motion is not None:
motion = EmpathyMotionList.getByName(str(jlv) + "_" + motion)
if motion is not None:
self._list[jlv][EmpathyRandomButton.INDEX_MOTION].append(motion)
#END if
#END if
#END add()
def getRobotActions(self, jlv):
motions = []
level = jlv
while len(motions) <= 0 <= level:
if level in self._list:
if len(self._list[level][EmpathyRandomButton.INDEX_MOTION]) > 0:
val = random.randint(0, len(self._list[level][EmpathyRandomButton.INDEX_MOTION]) - 1)
motions.append(Stiffness(1.0))
motions.append(Motion(motion = self._list[level][EmpathyRandomButton.INDEX_MOTION][val], blocking = False))
#END if
#END if
level -= 1
#END while
actions = []
level = jlv
while len(actions) <= 0 <= level:
if level in self._list:
if len(self._list[level][EmpathyRandomButton.INDEX_ACTIONS]) > 0:
val = random.randint(0, len(self._list[level][EmpathyRandomButton.INDEX_ACTIONS]) - 1)
actions = actions + self._list[level][EmpathyRandomButton.INDEX_ACTIONS][val]
#END if
#END if
level -= 1
#END while
return motions + actions
#END getRobotActions()
def maxLevel(self):
return self._maxLevel
#END maxLevel()
#END EmpathyRandomButton.py | mattBrzezinski/Hydrogen | robot-controller/Study/EmpathyRandomButton.py | Python | mit | 2,532 |
# this file exists for backward compatibility
__all__ = ['DAL', 'Field', 'DRIVERS']
from dal import DAL, Field, Table, Query, Set, Expression, Row, Rows, DRIVERS, BaseAdapter, SQLField, SQLTable, SQLXorable, SQLQuery, SQLSet, SQLRows, SQLStorage, SQLDB, GQLDB, SQLALL, SQLCustomType
| pouyana/teireader | webui/gluon/sql.py | Python | mit | 285 |
from merchant_server import app
import paypal_client
import unittest
import json
from mock import patch, Mock
class TestMerchantServer(unittest.TestCase):
def setUp(self):
"""Before each test, set up a test client"""
app.config['TESTING'] = True
self.app = app.test_client()
self.response_dict = dict(
create_time='2014-02-12T22:29:49Z',
id='PAY-564191241M8701234KL57LXI',
intent='sale',
state='approved'
)
self.client_json = json.dumps(dict(
response_type='payment',
response=self.response_dict
))
def test_empty_request(self):
"""Check that request without body raises 400"""
rv = self.app.post('/client_responses')
self.assertEqual(rv.status_code, 400)
self.assertIn('Invalid mobile client response', rv.data)
def test_invalid_response_type(self):
"""Check invalid response type is handled properly"""
json_data = json.dumps(dict(response_type='test', response='test'))
rv = self.app.post('/client_responses', data=json_data, content_type='application/json')
self.assertEqual(rv.status_code, 400)
self.assertIn('Invalid response type', rv.data)
@patch('merchant_server.verify_payment')
def test_verify_payment(self, mock):
"""verify correct response on successful paypal payment verification"""
mock.return_value = True, None
rv = self.app.post('/client_responses', data=self.client_json, content_type='application/json')
self.assertEqual(rv.status_code, 200)
self.assertIn('verified', rv.data)
@patch('merchant_server.verify_payment')
def test_verify_payment_twice_fails(self, mock):
"""Trying to verify an already verified payment is a bad request"""
mock.return_value = True, None
rv = self.app.post('/client_responses', data=self.client_json, content_type='application/json')
self.assertEqual(rv.status_code, 200)
self.assertIn('verified', rv.data)
mock.return_value = False, 'Payment already been verified.'
rv = self.app.post('/client_responses', data=self.client_json, content_type='application/json')
self.assertEqual(rv.status_code, 404)
self.assertIn('Payment already been verified', rv.data)
@patch('merchant_server.add_consent')
def test_send_future_payment_consent(self, mock):
"""Test consent is received properly on merchant_server"""
mock.return_value = None
response_dict = dict(
code='EBYhRW3ncivudQn8UopLp4A28xIlqPDpAoqd7bi'
)
client_dict = dict(
environment='live',
paypal_sdk_version='2.0.1',
platform='iOS',
product_name='PayPal iOS SDK'
)
json_data= json.dumps(dict(
response_type='authorization_code',
response=response_dict,
customer_id='[email protected]',
client=client_dict
))
rv = self.app.post('/client_responses', data=json_data, content_type='application/json')
self.assertEqual(rv.status_code, 200)
self.assertIn('Received consent', rv.data)
class TestPaypalClient(unittest.TestCase):
def setUp(self):
self.transaction = {
"amount": {
"total": "1.00",
"currency": "USD"
},
"description": "This is the payment transaction description."
}
def test_get_stored_refresh_token(self):
"""Test that the correct refresh token is getting fetched for the customer"""
paypal_client.save_refresh_token('[email protected]', 'ref_token_sample')
refresh_token = paypal_client.get_stored_refresh_token('[email protected]')
self.assertEqual(refresh_token, 'ref_token_sample')
def test_remove_consent(self):
"""Test removing consent deletes stored refresh token"""
paypal_client.save_refresh_token('[email protected]', 'ref_token_sample')
refresh_token = paypal_client.get_stored_refresh_token('[email protected]')
self.assertEqual(refresh_token, 'ref_token_sample')
paypal_client.remove_consent('[email protected]')
refresh_token = paypal_client.get_stored_refresh_token('[email protected]')
self.assertEqual(refresh_token, None)
def test_charge_wallet_missing_consent(self):
"""Charging a new customer without consent will not work"""
return_status, message = paypal_client.charge_wallet(self.transaction, '[email protected]', None, 'sale')
self.assertEqual(return_status, False)
self.assertIn("Customer has not granted consent", message)
@patch('paypal_client.paypalrestsdk.Payment.create')
@patch('paypal_client.get_stored_refresh_token')
def test_charge_wallet_failure(self, mock_create, mock_token):
"""Test charge wallet fails with correct message"""
mock_token.return_value = False
mock_create.return_value = 'refresh_token'
return_status, message = paypal_client.charge_wallet(self.transaction, '[email protected]', 'correlation_id', 'sale')
self.assertEqual(return_status, False)
self.assertIn("Error while creating payment", message)
@patch('paypal_client.paypalrestsdk.Payment.create')
def test_charge_wallet_success(self, mock):
mock.return_value = True
paypal_client.save_refresh_token('[email protected]', 'ref_token_sample')
return_status, message = paypal_client.charge_wallet(self.transaction, '[email protected]', 'correlation_id', 'sale')
self.assertEqual(return_status, True)
self.assertIn("Charged customer [email protected] " + self.transaction["amount"]["total"], message) | stafur/pyTRUST | paypal-rest-api-sdk-python/samples/mobile_backend/merchant_server_tests.py | Python | apache-2.0 | 5,839 |
# -*- coding: utf-8 -*-
import click
import os
import pandas as pd
from shutil import rmtree
from . tree_scrape import author_minded
from . query_pmc import pmc_data
OUT_SUBFOLDER = 'contrib_data'
AUTHOR_DATA = 'author_data.json'
def verify_local_repo_location(repo):
if not os.path.isdir(repo):
raise IOError('could not locate repository {}'.format(repo))
def build_out_path(repo_name, parent_path=None):
if parent_path is None:
parent_path = os.path.abspath(os.curdir)
out_path = os.path.join(parent_path, repo_name, OUT_SUBFOLDER)
return out_path
def make_output_folder(path_, overwrite):
if not os.path.exists(path_):
os.mkdir(path_)
else:
rmtree(path_)
os.mkdir(path_)
@click.command()
@click.option('--repo', prompt='git repository location', help='path to folder containing .git repository or url')
@click.option('--out_dir', default=None,
help='parent dir for output data, default same as .git folder scraped')
@click.option('--clobber_output', default=True,
help='should we overwrite existing data?, default True')
@click.option('--verbose/--no-verbose', default=False)
def main(repo, out_dir, clobber_output, verbose):
""" """
import logging
from gitpandas import Repository
if verbose:
logging.getLogger().setLevel(10)
if repo.find("git@") == 0:
logging.info("Cloning repo %s" % repo)
repository = Repository(working_dir=repo)
repo = repository.git_dir
logging.info("Repo located at %s" % repo)
if out_dir is None:
out_dir = os.path.join(os.getcwd(), OUT_SUBFOLDER)
verify_local_repo_location(repo)
repo_name = os.path.basename(repo)
make_output_folder(out_dir, overwrite=clobber_output)
contributor_data = author_minded(repo)
citation_data = pmc_data('SPSS')
logging.info("output path: %s" % os.path.join(out_dir,'contributor_data.json'))
contributor_data.to_json(os.path.join(out_dir,'contributor_data.json'), date_format='iso')
citation_data['citations'].to_json(os.path.join(out_dir,'citation_data.json'))
if __name__ == '__main__':
main()
| lbillingham/commit_opener | commit_opener/commit_opener.py | Python | gpl-3.0 | 2,151 |
# This file is part of HamsiManager.
#
# Copyright (c) 2010 - 2015 Murat Demir <[email protected]>
#
# Hamsi Manager is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Hamsi Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HamsiManager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from Core.MyObjects import *
from Core import Universals as uni
import FileUtils as fu
from datetime import datetime
import time
Ok, Cancel, Yes, No, Continue = 1, 2, 3, 4, 5
lastInfoTime = (datetime.now().microsecond / 60000)
def show(_title="Hamsi Manager", _detail="", _btnString=translate("Dialogs", "OK")):
MApplication.processEvents()
if _detail == "":
_detail = _title
_title = "Hamsi Manager"
if len(uni.MySettings) > 0 and isActivePyKDE4:
MMessageBox.information(getActiveWindow(), str("<b>" + str(_title) + " : </b><br>" + str(_detail)),
str(str(_title) + "!.."))
else:
MMessageBox.information(getActiveWindow(), str(str(_title) + "!.."),
str("<b>" + str(_title) + " : </b><br>" + str(_detail)), _btnString)
return True
def showError(_title="Hamsi Manager", _detail="", _btnString=translate("Dialogs", "OK")):
MApplication.processEvents()
if _detail == "":
_detail = _title
_title = "Hamsi Manager"
if len(uni.MySettings) > 0 and isActivePyKDE4:
MMessageBox.error(getActiveWindow(), str("<b>" + str(_title) + " : </b><br>" + str(_detail)),
str(str(_title) + "!.."))
else:
MMessageBox.critical(getActiveWindow(), str(str(_title) + "!.."),
str("<b>" + str(_title) + " : </b><br>" + str(_detail)), _btnString)
return True
def ask(_title="Hamsi Manager", _detail="", _isShowCancel=False, _showAgainKeyName=""):
MApplication.processEvents()
if _detail == "":
_detail = _title
_title = "Hamsi Manager"
if len(uni.MySettings) > 0 and isActivePyKDE4:
if _isShowCancel:
if _showAgainKeyName != "":
return MMessageBox.messageBox(getActiveWindow(),
MMessageBox.QuestionYesNoCancel,
str("<b>" + str(_title) + " : </b><br>" + str(_detail)),
str(_title),
MStandardGuiItem.yes(), MStandardGuiItem.no(), MStandardGuiItem.cancel(),
str(_showAgainKeyName),
MMessageBox.AllowLink)
else:
return MMessageBox.questionYesNoCancel(getActiveWindow(),
str("<b>" + str(_title) + " : </b><br>" + str(_detail)),
str(_title),
MStandardGuiItem.yes(), MStandardGuiItem.no(),
MStandardGuiItem.cancel(), "",
MMessageBox.AllowLink)
else:
if _showAgainKeyName != "":
return MMessageBox.messageBox(getActiveWindow(),
MMessageBox.QuestionYesNo,
str("<b>" + str(_title) + " : </b><br>" + str(_detail)),
str(_title),
MStandardGuiItem.yes(), MStandardGuiItem.no(), MStandardGuiItem.cancel(),
str(_showAgainKeyName),
MMessageBox.AllowLink)
else:
return MMessageBox.questionYesNo(getActiveWindow(),
str("<b>" + str(_title) + " : </b><br>" + str(_detail)),
str(_title),
MStandardGuiItem.yes(), MStandardGuiItem.no(), "",
MMessageBox.AllowLink)
else:
if _isShowCancel:
try: mboxDialog = MMessageBox(getActiveWindow())
except: mboxDialog = MMessageBox(None)
mboxDialog.setWindowTitle(str(_title))
mboxDialog.setText(str("<b>" + str(_title) + " : </b><br>" + str(_detail)))
mboxDialog.setStandardButtons(MMessageBox.Yes | MMessageBox.No | MMessageBox.Cancel)
pressedButtonNo = mboxDialog.exec_()
else:
try: mboxDialog = MMessageBox(getActiveWindow())
except: mboxDialog = MMessageBox(None)
mboxDialog.setWindowTitle(str(_title))
mboxDialog.setText(str("<b>" + str(_title) + " : </b><br>" + str(_detail)))
mboxDialog.setStandardButtons(MMessageBox.Yes | MMessageBox.No)
pressedButtonNo = mboxDialog.exec_()
if pressedButtonNo == 16384: return Yes
elif pressedButtonNo == 65536: return No
elif pressedButtonNo == 4194304: return Cancel
else: return Cancel
def askSpecial(_title="Hamsi Manager", _detail="", _btnString=translate("Dialogs", "Yes"),
_btnString1=translate("Dialogs", "No"), _btnString2=translate("Dialogs", "Cancel"), _btnString3=None):
MApplication.processEvents()
MyMessageBox = MMessageBox
if len(uni.MySettings) > 0 and isActivePyKDE4:
MyMessageBox = QMessageBox
try: mboxDialog = MyMessageBox(getActiveWindow())
except: mboxDialog = MyMessageBox(None)
mboxDialog.setWindowTitle(str(_title))
mboxDialog.setText(str("<b>" + str(_title) + " : </b><br>" + str(_detail)))
btn = btn1 = btn2 = btn3 = None
btn = mboxDialog.addButton(_btnString, MyMessageBox.ActionRole)
if _btnString2 is not None:
btn2 = mboxDialog.addButton(_btnString2, MyMessageBox.ActionRole)
btn1 = mboxDialog.addButton(_btnString1, MyMessageBox.ActionRole)
if _btnString3 is not None:
btn3 = mboxDialog.addButton(_btnString3, MyMessageBox.ActionRole)
else:
btn3 = None
mboxDialog.exec_()
if mboxDialog.clickedButton() == btn:
return _btnString
elif mboxDialog.clickedButton() == btn1:
return _btnString1
elif mboxDialog.clickedButton() == btn2:
return _btnString2
elif mboxDialog.clickedButton() == btn3:
return _btnString3
else:
if btn3 is not None:
return _btnString3
elif btn2 is not None:
return _btnString2
elif btn1 is not None:
return _btnString1
elif btn is not None:
return _btnString
def showState(_title, _value=0, _maxValue=100, _isShowCancel=False, _connectToCancel=None, _isCheckLastShowTime=True):
global lastInfoTime
if _isCheckLastShowTime and _value != _maxValue:
if lastInfoTime == (datetime.now().microsecond / 60000):
return None
lastInfoTime = (datetime.now().microsecond / 60000)
if uni.isCanBeShowOnMainWindow:
return getMainWindow().StatusBar.showState(_title, _value, _maxValue, _isShowCancel, _connectToCancel)
if getMainWindow().StateDialog is None:
getMainWindow().StateDialogStateBar = MProgressBar()
HBoxs = []
if uni.getBoolValue("isMinimumWindowMode") and uni.isCanBeShowOnMainWindow:
if getMainWindow().isLockedMainForm is False:
getMainWindow().lockForm()
getMainWindow().StateDialog = MDockWidget(translate("Dialogs", "Progress Bar"))
getMainWindow().StateDialog.setObjectName("Progress Bar")
pnlState2 = MWidget(getMainWindow().StateDialog)
getMainWindow().StateDialogTitle = MLabel()
HBoxs.append(MHBoxLayout(pnlState2))
HBoxs[0].addWidget(getMainWindow().StateDialogTitle)
HBoxs[0].addWidget(getMainWindow().StateDialogStateBar)
getMainWindow().StateDialog.setWidget(pnlState2)
getMainWindow().StateDialog.setAllowedAreas(Mt.AllDockWidgetAreas)
getMainWindow().StateDialog.setFeatures(MDockWidget.AllDockWidgetFeatures)
getMainWindow().addDockWidget(Mt.TopDockWidgetArea, getMainWindow().StateDialog)
getMainWindow().StateDialog.setMaximumHeight(60)
else:
getMainWindow().StateDialog = MDialog(getMainWindow())
if len(uni.MySettings) > 0 and isActivePyKDE4:
getMainWindow().StateDialog.setButtons(MDialog.NoDefault)
getMainWindow().StateDialog.setModal(True)
getMainWindow().StateDialog.setMinimumWidth(500)
pnlMain = MWidget(getMainWindow().StateDialog)
HBoxs.append(MHBoxLayout(pnlMain))
HBoxs[0].addWidget(getMainWindow().StateDialogStateBar)
if len(uni.MySettings) > 0 and isActivePyKDE4:
getMainWindow().StateDialog.setMainWidget(pnlMain)
else:
getMainWindow().StateDialog.setLayout(HBoxs[0])
getMainWindow().StateDialog.show()
if _isShowCancel:
pbtnCancel = MPushButton(translate("Dialogs", "Cancel"), getMainWindow().StateDialog)
if _connectToCancel is None:
MObject.connect(pbtnCancel, SIGNAL("clicked()"), uni.cancelThreadAction)
else:
MObject.connect(pbtnCancel, SIGNAL("clicked()"), _connectToCancel)
HBoxs[0].addWidget(pbtnCancel)
getMainWindow().StateDialogStateBar.setRange(0, _maxValue)
getMainWindow().StateDialogStateBar.setValue(_value)
if uni.getBoolValue("isMinimumWindowMode") and uni.isCanBeShowOnMainWindow:
getMainWindow().StateDialog.setVisible(True)
getMainWindow().StateDialogTitle.setText(_title + " ( " + str(_value) + " / " + str(_maxValue) + " )")
else:
getMainWindow().StateDialog.open()
getMainWindow().StateDialog.setModal(True)
getMainWindow().StateDialog.setWindowTitle(_title + " ( " + str(_value) + " / " + str(_maxValue) + " )")
if _value == _maxValue:
if uni.getBoolValue("isMinimumWindowMode") and uni.isCanBeShowOnMainWindow:
if getMainWindow().isLockedMainForm:
getMainWindow().unlockForm()
getMainWindow().StateDialog.setVisible(False)
getMainWindow().removeDockWidget(getMainWindow().StateDialog)
else:
getMainWindow().StateDialog.setModal(False)
getMainWindow().StateDialog.close()
getMainWindow().StateDialog = None
MApplication.processEvents()
def sleep(_title, _value=0, _isShowCancel=False):
maxTime = _value * 4
step = 0
while step <= maxTime:
showState(_title, step, maxTime, _isShowCancel)
step += 1
time.sleep(0.25)
def toast(_title="Hamsi Manager", _detail="", _timeout=2):
if uni.isCanBeShowOnMainWindow:
from Core import MyThread
tw = MyToaster(_title, _detail, _timeout)
myProcs = MyThread.MyThread(time.sleep, tw.close, args=[_timeout])
myProcs.start()
else:
command = {"action": toast, "args": [_title, _detail, _timeout], "kwargs": {}}
uni.runAfter.append(command)
def getItem(_title="Hamsi Cover", _detail="", _itemList=[""], _currentItem=0):
if _detail == "":
_detail = _title
_title = "Hamsi Cover"
if len(uni.MySettings) > 0 and isActivePyKDE4:
selectedValue, isSelected = MInputDialog.getItem(str(str(_title) + "!.."), str(str(_detail)),
[str(str(x)) for x in _itemList], _currentItem, False)
else:
selectedValue, isSelected = MInputDialog.getItem(getActiveWindow(), str(str(_title) + "!.."), str(str(_detail)),
[str(str(x)) for x in _itemList], _currentItem, False)
if isSelected is False:
return None
return str(selectedValue)
def getText(_title="Hamsi Cover", _detail="", _default=""):
if _detail == "":
_detail = _title
_title = "Hamsi Cover"
if len(uni.MySettings) > 0 and isActivePyKDE4:
selectedValue, isSelected = MInputDialog.getText(str(str(_title) + "!.."), str(str(_detail)), str(_default))
else:
selectedValue, isSelected = MInputDialog.getText(getActiveWindow(), str(str(_title) + "!.."), str(str(_detail)),
MLineEdit.Normal, str(_default))
if isSelected is False:
return None
return str(selectedValue)
def getSaveFileName(_caption, _directory, _filter=None, _isUseLastPathKeyType=1, _lastPathKey=None):
if _filter is None:
if fu.isFile(_directory):
fileExt = fu.getFileExtension(_directory)
if fileExt != "":
_filter = "*.%s (*.%s)" % (fileExt, fileExt)
else:
_filter = "*.* (*.*)"
else:
_filter = "*.* (*.*)"
pathKey = uni.getLastPathKey(_caption, _directory, _filter, _isUseLastPathKeyType, _lastPathKey)
if pathKey is not None: _directory = uni.getLastPathByEvent(pathKey, _directory)
filePath = QFileDialog.getSaveFileName(getActiveWindow(), str(_caption),
str(_directory), str(_filter))
if filePath == "":
return None
if pathKey is not None: uni.setLastPathByEvent(pathKey, str(filePath))
return str(filePath)
def getOpenFileName(_caption, _directory, _filter, _isUseLastPathKeyType=1, _lastPathKey=None):
pathKey = uni.getLastPathKey(_caption, _directory, _filter, _isUseLastPathKeyType, _lastPathKey)
if pathKey is not None: _directory = uni.getLastPathByEvent(pathKey, _directory)
filePath = QFileDialog.getOpenFileName(getActiveWindow(), str(_caption),
str(_directory), str(_filter))
if filePath == "":
return None
if pathKey is not None: uni.setLastPathByEvent(pathKey, str(filePath))
return str(filePath)
def getOpenFileNames(_caption, _directory, _filter, _isUseLastPathKeyType=1, _lastPathKey=None):
pathKey = uni.getLastPathKey(_caption, _directory, _filter, _isUseLastPathKeyType, _lastPathKey)
if pathKey is not None: _directory = uni.getLastPathByEvent(pathKey, _directory)
filePaths = QFileDialog.getOpenFileNames(getActiveWindow(), str(_caption),
str(_directory), str(_filter))
if not filePaths:
return None
if pathKey is not None: uni.setLastPathByEvent(pathKey, str(filePaths[-1]))
return list(filePaths)
def getExistingDirectory(_caption, _directory, _isUseLastPathKeyType=1, _lastPathKey=None):
pathKey = uni.getLastPathKey(_caption, _directory, "", _isUseLastPathKeyType, _lastPathKey)
if pathKey is not None: _directory = uni.getLastPathByEvent(pathKey, _directory)
filePath = QFileDialog.getExistingDirectory(getActiveWindow(), str(_caption),
str(_directory))
if filePath == "":
return None
if pathKey is not None: uni.setLastPathByEvent(pathKey, str(filePath))
return str(filePath)
class MyStateObject(MObject):
def __init__(self, _title="", _isShowCancel=False, _connectToCancel=None, _isCheckLastShowTime=True):
MObject.__init__(self, getMainWindow())
self.title = _title
self.isShowCancel = _isShowCancel
self.connectToCancel = _connectToCancel
self.isCheckLastShowTime = _isCheckLastShowTime
self.connect(self, SIGNAL("setState"), self.setState)
def setState(self, _value=0, _maxValue=100):
showState(self.title, _value, _maxValue, self.isShowCancel, self.connectToCancel, self.isCheckLastShowTime)
class MyToaster(MDialog):
def __init__(self, _title, _detail, _timeout=3):
MDialog.__init__(self, getMainWindow())
if isActivePyKDE4:
self.setButtons(MDialog.NoDefault)
self.setWindowFlags(Mt.FramelessWindowHint)
pnlMain = MWidget(self)
self.vblMain = MVBoxLayout(pnlMain)
self.lblTitle = MLabel()
self.lblTitle.setText(_title)
self.lblTitle.setAlignment(Mt.AlignHCenter | Mt.AlignVCenter)
self.lblTitle.setWordWrap(True)
self.lblDetails = MLabel()
self.lblDetails.setText(_detail)
self.lblDetails.setAlignment(Mt.AlignHCenter | Mt.AlignVCenter)
self.lblDetails.setWordWrap(True)
fontTitle = MFont()
fontTitle.setPointSize(18)
self.lblTitle.setFont(fontTitle)
fontDetails = MFont()
fontDetails.setPointSize(16)
self.lblDetails.setFont(fontDetails)
self.vblMain.addStretch(5)
self.vblMain.addWidget(self.lblTitle)
self.vblMain.addWidget(self.lblDetails)
self.vblMain.addStretch(5)
self.setMaximumSize(600, 300)
self.setMinimumSize(600, 300)
rect = MRect()
rect.setX((getMainWindow().width() / 2) - (self.width() / 2))
rect.setY((getMainWindow().height() / 2) - (self.height() / 2))
rect.setWidth(self.geometry().width())
rect.setHeight(self.geometry().height())
self.setGeometry(rect)
if isActivePyKDE4:
self.setMainWidget(pnlMain)
else:
self.setLayout(self.vblMain)
self.show()
opacityEffect = MGraphicsOpacityEffect(self)
opacityEffect.setOpacity(1)
pnlMain.setGraphicsEffect(opacityEffect)
self.anim = MPropertyAnimation(opacityEffect, "opacity")
self.anim.setDuration(_timeout*1000)
self.anim.setStartValue(1.0)
self.anim.setEndValue(0.0)
self.anim.setEasingCurve(MEasingCurve.OutQuad)
self.anim.start(MAbstractAnimation.DeleteWhenStopped)
def close(self, _data=None):
self.hide()
self.done(0)
| supermurat/hamsi-manager | Core/Dialogs.py | Python | gpl-3.0 | 18,494 |
# ***************************************************************************
# * Copyright (c) 2017 Bernd Hahnebach <[email protected]> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "FreeCAD FEM solver object CalculiX"
__author__ = "Bernd Hahnebach"
__url__ = "https://www.freecadweb.org"
## @package SolverCalculix
# \ingroup FEM
import glob
import os
import FreeCAD
from . import tasks
from .. import run
from .. import solverbase
from femtools import femutils
if FreeCAD.GuiUp:
import FemGui
ANALYSIS_TYPES = ["static", "frequency", "thermomech", "check", "buckling"]
def create(doc, name="SolverCalculiX"):
return femutils.createObject(
doc, name, Proxy, ViewProxy)
class Proxy(solverbase.Proxy):
"""The Fem::FemSolver's Proxy python type, add solver specific properties
"""
Type = "Fem::SolverCalculix"
def __init__(self, obj):
super(Proxy, self).__init__(obj)
obj.Proxy = self
ccx_prefs = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Fem/Ccx")
add_attributes(obj, ccx_prefs)
def onDocumentRestored(self, obj):
ccx_prefs = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Fem/Ccx")
# since it is needed for the ccxtools solver too
# the method is implemented outside of the class
# thus we need to pass the prefs
on_restore_of_document(obj, ccx_prefs)
def createMachine(self, obj, directory, testmode=False):
return run.Machine(
solver=obj, directory=directory,
check=tasks.Check(),
prepare=tasks.Prepare(),
solve=tasks.Solve(),
results=tasks.Results(),
testmode=testmode)
def editSupported(self):
return True
def edit(self, directory):
pattern = os.path.join(directory, "*.inp")
FreeCAD.Console.PrintMessage("{}\n".format(pattern))
f = glob.glob(pattern)[0]
FemGui.open(f)
def execute(self, obj):
return
class ViewProxy(solverbase.ViewProxy):
pass
# ************************************************************************************************
# helper
# these methods are outside of the class to be able
# to use them from framework solver and ccxtools solver
def on_restore_of_document(obj, ccx_prefs):
# ANALYSIS_TYPES
# They have been extended. If file was saved with a old FC version
# not all enum types are available, because they are saved in the FC file
# thus refresh the list of known ANALYSIS_TYPES
# print("onRestoredFromSuperClass")
# print(obj.AnalysisType)
# print(obj.getEnumerationsOfProperty("AnalysisType"))
temp_analysis_type = obj.AnalysisType
# self.add_properties(obj)
obj.AnalysisType = ANALYSIS_TYPES
if temp_analysis_type in ANALYSIS_TYPES:
obj.AnalysisType = temp_analysis_type
else:
FreeCAD.Console.PrintWarning(
"Analysis type {} not found. Standard is used.\n"
.format(temp_analysis_type)
)
analysis_type = ccx_prefs.GetInt("AnalysisType", 0)
obj.AnalysisType = ANALYSIS_TYPES[analysis_type]
# add missing properties
# for example BucklingFactors will be added
# for all files created before buckle analysis was introduced
add_attributes(obj, ccx_prefs)
def add_attributes(obj, ccx_prefs):
if not hasattr(obj, "AnalysisType"):
obj.addProperty(
"App::PropertyEnumeration",
"AnalysisType",
"Fem",
"Type of the analysis"
)
obj.AnalysisType = ANALYSIS_TYPES
analysis_type = ccx_prefs.GetInt("AnalysisType", 0)
obj.AnalysisType = ANALYSIS_TYPES[analysis_type]
if not hasattr(obj, "GeometricalNonlinearity"):
choices_geom_nonlinear = ["linear", "nonlinear"]
obj.addProperty(
"App::PropertyEnumeration",
"GeometricalNonlinearity",
"Fem",
"Set geometrical nonlinearity"
)
obj.GeometricalNonlinearity = choices_geom_nonlinear
nonlinear_geom = ccx_prefs.GetBool("NonlinearGeometry", False)
if nonlinear_geom is True:
obj.GeometricalNonlinearity = choices_geom_nonlinear[1] # nonlinear
else:
obj.GeometricalNonlinearity = choices_geom_nonlinear[0] # linear
if not hasattr(obj, "MaterialNonlinearity"):
choices_material_nonlinear = ["linear", "nonlinear"]
obj.addProperty(
"App::PropertyEnumeration",
"MaterialNonlinearity",
"Fem",
"Set material nonlinearity (needs geometrical nonlinearity)"
)
obj.MaterialNonlinearity = choices_material_nonlinear
obj.MaterialNonlinearity = choices_material_nonlinear[0]
if not hasattr(obj, "EigenmodesCount"):
obj.addProperty(
"App::PropertyIntegerConstraint",
"EigenmodesCount",
"Fem",
"Number of modes for frequency calculations"
)
noem = ccx_prefs.GetInt("EigenmodesCount", 10)
obj.EigenmodesCount = (noem, 1, 100, 1)
if not hasattr(obj, "EigenmodeLowLimit"):
obj.addProperty(
"App::PropertyFloatConstraint",
"EigenmodeLowLimit",
"Fem",
"Low frequency limit for eigenmode calculations"
)
ell = ccx_prefs.GetFloat("EigenmodeLowLimit", 0.0)
obj.EigenmodeLowLimit = (ell, 0.0, 1000000.0, 10000.0)
if not hasattr(obj, "EigenmodeHighLimit"):
obj.addProperty(
"App::PropertyFloatConstraint",
"EigenmodeHighLimit",
"Fem",
"High frequency limit for eigenmode calculations"
)
ehl = ccx_prefs.GetFloat("EigenmodeHighLimit", 1000000.0)
obj.EigenmodeHighLimit = (ehl, 0.0, 1000000.0, 10000.0)
if not hasattr(obj, "IterationsThermoMechMaximum"):
help_string_IterationsThermoMechMaximum = (
"Maximum Number of thermo mechanical iterations "
"in each time step before stopping jobs"
)
obj.addProperty(
"App::PropertyIntegerConstraint",
"IterationsThermoMechMaximum",
"Fem",
help_string_IterationsThermoMechMaximum
)
niter = ccx_prefs.GetInt("AnalysisMaxIterations", 200)
obj.IterationsThermoMechMaximum = niter
if not hasattr(obj, "BucklingFactors"):
obj.addProperty(
"App::PropertyIntegerConstraint",
"BucklingFactors",
"Fem",
"Calculates the lowest buckling modes to the corresponding buckling factors"
)
bckl = ccx_prefs.GetInt("BucklingFactors", 1)
obj.BucklingFactors = bckl
if not hasattr(obj, "TimeInitialStep"):
obj.addProperty(
"App::PropertyFloatConstraint",
"TimeInitialStep",
"Fem",
"Initial time steps"
)
ini = ccx_prefs.GetFloat("AnalysisTimeInitialStep", 1.0)
obj.TimeInitialStep = ini
if not hasattr(obj, "TimeEnd"):
obj.addProperty(
"App::PropertyFloatConstraint",
"TimeEnd",
"Fem",
"End time analysis"
)
eni = ccx_prefs.GetFloat("AnalysisTime", 1.0)
obj.TimeEnd = eni
if not hasattr(obj, "ThermoMechSteadyState"):
obj.addProperty(
"App::PropertyBool",
"ThermoMechSteadyState",
"Fem",
"Choose between steady state thermo mech or transient thermo mech analysis"
)
sted = ccx_prefs.GetBool("StaticAnalysis", True)
obj.ThermoMechSteadyState = sted
if not hasattr(obj, "IterationsControlParameterTimeUse"):
obj.addProperty(
"App::PropertyBool",
"IterationsControlParameterTimeUse",
"Fem",
"Use the user defined time incrementation control parameter"
)
use_non_ccx_iterations_param = ccx_prefs.GetInt("UseNonCcxIterationParam", False)
obj.IterationsControlParameterTimeUse = use_non_ccx_iterations_param
if not hasattr(obj, "SplitInputWriter"):
obj.addProperty(
"App::PropertyBool",
"SplitInputWriter",
"Fem",
"Split writing of ccx input file"
)
split = ccx_prefs.GetBool("SplitInputWriter", False)
obj.SplitInputWriter = split
if not hasattr(obj, "IterationsControlParameterIter"):
control_parameter_iterations = (
"{I_0},{I_R},{I_P},{I_C},{I_L},{I_G},{I_S},{I_A},{I_J},{I_T}".format(
I_0=4,
I_R=8,
I_P=9,
I_C=200, # ccx default = 16
I_L=10,
I_G=400, # ccx default = 4
I_S="",
I_A=200, # ccx default = 5
I_J="",
I_T="",
)
)
obj.addProperty(
"App::PropertyString",
"IterationsControlParameterIter",
"Fem",
"User defined time incrementation iterations control parameter"
)
obj.IterationsControlParameterIter = control_parameter_iterations
if not hasattr(obj, "IterationsControlParameterCutb"):
control_parameter_cutback = (
"{D_f},{D_C},{D_B},{D_A},{D_S},{D_H},{D_D},{W_G}".format(
D_f=0.25,
D_C=0.5,
D_B=0.75,
D_A=0.85,
D_S="",
D_H="",
D_D=1.5,
W_G="",
)
)
obj.addProperty(
"App::PropertyString",
"IterationsControlParameterCutb",
"Fem",
"User defined time incrementation cutbacks control parameter"
)
obj.IterationsControlParameterCutb = control_parameter_cutback
if not hasattr(obj, "IterationsUserDefinedIncrementations"):
stringIterationsUserDefinedIncrementations = (
"Set to True to switch off the ccx automatic incrementation completely "
"(ccx parameter DIRECT). Use with care. Analysis may not converge!"
)
obj.addProperty(
"App::PropertyBool",
"IterationsUserDefinedIncrementations",
"Fem",
stringIterationsUserDefinedIncrementations
)
obj.IterationsUserDefinedIncrementations = False
if not hasattr(obj, "IterationsUserDefinedTimeStepLength"):
help_string_IterationsUserDefinedTimeStepLength = (
"Set to True to use the user defined time steps. "
"The time steps are set with TimeInitialStep and TimeEnd"
)
obj.addProperty(
"App::PropertyBool",
"IterationsUserDefinedTimeStepLength",
"Fem",
help_string_IterationsUserDefinedTimeStepLength
)
obj.IterationsUserDefinedTimeStepLength = False
if not hasattr(obj, "MatrixSolverType"):
known_ccx_solver_types = [
"default",
"spooles",
"iterativescaling",
"iterativecholesky"
]
obj.addProperty(
"App::PropertyEnumeration",
"MatrixSolverType",
"Fem",
"Type of solver to use"
)
obj.MatrixSolverType = known_ccx_solver_types
solver_type = ccx_prefs.GetInt("Solver", 0)
obj.MatrixSolverType = known_ccx_solver_types[solver_type]
if not hasattr(obj, "BeamShellResultOutput3D"):
obj.addProperty(
"App::PropertyBool",
"BeamShellResultOutput3D",
"Fem",
"Output 3D results for 1D and 2D analysis "
)
dimout = ccx_prefs.GetBool("BeamShellOutput", False)
obj.BeamShellResultOutput3D = dimout
"""
Should there be some equation object for Calculix too?
Necessarily yes! The properties GeometricalNonlinearity,
MaterialNonlinearity, ThermoMechSteadyState might be moved
to the appropriate equation.
Furthermore the material Category should not be used in writer.
See common material object for more information. The equation
should used instead to get this information needed in writer.
"""
| sanguinariojoe/FreeCAD | src/Mod/Fem/femsolver/calculix/solver.py | Python | lgpl-2.1 | 13,791 |
"""
test_backend.py
Test case for keyring basic function
created by Kang Zhang 2009-07-14
"""
import random
import unittest
import string
import os
import sys
import commands
import keyring.backend
from keyring.backend import PasswordSetError
ALPHABET = string.ascii_letters + string.digits
DIFFICULT_CHARS = string.whitespace + string.punctuation
def random_string(k, source = ALPHABET):
"""Generate a random string with length <i>k</i>
"""
result = ''
for i in range(0, k):
result += random.choice(source)
return result
def backup(file):
"""Backup the file as file.bak
"""
commands.getoutput( "mv %s{,.bak}" % file )
def restore(file):
"""Restore the file from file.bak
"""
commands.getoutput( "mv %s{.bak,}" % file )
class BackendBasicTestCase(unittest.TestCase):
"""Test for the keyring's basic funtions. password_set and password_get
"""
__test__ = False
def init_keyring(self):
return None
def setUp(self):
self.keyring = self.init_keyring()
def check_set_get(self, service, username, password):
keyring = self.keyring
if self.supported() == -1: # skip the unsupported keyring
return
# for the non-exsit password
self.assertEqual(keyring.get_password(service, username), None)
# common usage
keyring.set_password(service, username, password)
self.assertEqual(keyring.get_password(service, username), password)
# for the empty password
keyring.set_password(service, username, "")
self.assertEqual(keyring.get_password(service, username), "")
def test_password_set_get(self):
password = random_string(20)
username = random_string(20)
service = random_string(20)
self.check_set_get(service, username, password)
def test_difficult_chars(self):
password = random_string(20, DIFFICULT_CHARS)
username = random_string(20, DIFFICULT_CHARS)
service = random_string(20, DIFFICULT_CHARS)
self.check_set_get(service, username, password)
def supported(self):
"""Return the correct value for supported.
"""
return -1
def test_supported(self):
"""Test the keyring's supported value.
"""
self.assertEqual(self.keyring.supported(), self.supported())
class OSXKeychainTestCase(BackendBasicTestCase):
__test__ = True
def init_keyring(self):
print >> sys.stderr, "Testing OSXKeychain, following password prompts are for this keyring"
return keyring.backend.OSXKeychain()
def supported(self):
if sys.platform in ('mac','darwin'):
return 1
return -1
class GnomeKeyringTestCase(BackendBasicTestCase):
__test__ = True
def init_keyring(self):
print >> sys.stderr, "Testing GnomeKeyring, following password prompts are for this keyring"
return keyring.backend.GnomeKeyring()
def supported(self):
return self.keyring.supported()
class KDEKWalletTestCase(BackendBasicTestCase):
__test__ = True
def init_keyring(self):
print >> sys.stderr, "Testing KDEKWallet, following password prompts are for this keyring"
return keyring.backend.KDEKWallet()
def supported(self):
return self.keyring.supported()
class UnOpenableKWallet(object):
"""A module-like object used to test KDE wallet fall-back."""
Synchronous = None
def openWallet(self, *args):
return None
def NetworkWallet(self):
return None
class FauxQtGui(object):
"""A fake module-like object used in testing the open_kwallet function."""
class QApplication(object):
def __init__(self, *args):
pass
def exit(self):
pass
class KDEWalletCanceledTestCase(unittest.TestCase):
def test_user_canceled(self):
# If the user cancels either the "enter your password to unlock the
# keyring" dialog or clicks "deny" on the "can this application access
# the wallet" dialog then openWallet() will return None. The
# open_wallet() function should handle that eventuality by returning
# None to signify that the KWallet backend is not available.
self.assertEqual(
keyring.backend.open_kwallet(UnOpenableKWallet(), FauxQtGui()),
None)
class FileKeyringTestCase(BackendBasicTestCase):
__test__ = False
def setUp(self):
"""Backup the file before the test
"""
self.keyring = self.init_keyring()
self.file_path = os.path.join(os.path.expanduser("~"),
self.keyring.filename())
backup(self.file_path)
def tearDown(self):
"""Restore the keyring file.
"""
restore(self.file_path)
def test_encrypt_decrypt(self):
if self.supported() == -1: # skip the unsupported platform
return
password = random_string(20)
encyrpted = self.keyring.encrypt(password)
self.assertEqual(password, self.keyring.decrypt(encyrpted))
class UncryptedFileKeyringTestCase(FileKeyringTestCase):
__test__ = True
def init_keyring(self):
print >> sys.stderr, "Testing UnecryptedFile, following password prompts are for this keyring"
return keyring.backend.UncryptedFileKeyring()
def supported(self):
return 0
class CryptedFileKeyringTestCase(FileKeyringTestCase):
__test__ = True
def init_keyring(self):
print >> sys.stderr, "Testing CryptedFile, following password prompts are for this keyring"
return keyring.backend.CryptedFileKeyring()
def supported(self):
try:
from Crypto.Cipher import AES
return 0
except ImportError:
pass
return -1
class Win32CryptoKeyringTestCase(FileKeyringTestCase):
__test__ = True
def init_keyring(self):
print >> sys.stderr, "Testing Win32, following password prompts are for this keyring"
return keyring.backend.Win32CryptoKeyring()
def supported(self):
try:
import win32_crypto
if sys.platform in ['win32'] and sys.getwindowsversion()[-2] == 2:
return 1
except ImportError:
pass
return -1
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(OSXKeychainTestCase))
suite.addTest(unittest.makeSuite(GnomeKeyringTestCase))
suite.addTest(unittest.makeSuite(KDEKWalletTestCase))
suite.addTest(unittest.makeSuite(UncryptedFileKeyringTestCase))
suite.addTest(unittest.makeSuite(CryptedFileKeyringTestCase))
suite.addTest(unittest.makeSuite(Win32CryptoKeyringTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest="test_suite")
| wadobo/GECO | src/gecoc/gtk-geco/keyring/tests/test_backend.py | Python | gpl-3.0 | 6,879 |
import os,sys
sys.path.append("../lib")
from livdatcsvlib import *
import json,requests,urllib
from geopy import geocoders
def geocode_field_from_CSV(csv,fieldname):
g = geocoders.GoogleV3()
#print g.api_key
print "Hello"
finalcsv=CSVFile()
finalcsv.colnames=["SEARCHKEY","LOCATION","LAT","LONG"]
searchkeylist=[]
for line in csv.matrix:
searchkeylist.append(line[fieldname])
searchkeys=set(searchkeylist)
for searchkey in searchkeys:
dictionary={}
dictionary["SEARCHKEY"]=searchkey
try:
place, (lat, lng) = g.geocode(searchkey)
print "Ran a geocode"
dictionary["LOCATION"]=place
dictionary["LAT"]=lat
dictionary["LONG"]=lng
except:
print "Bad Geocode"
dictionary["LOCATION"]=""
dictionary["LAT"]=""
dictionary["LONG"]=""
finalcsv.matrix.append(dictionary)
return finalcsv
def add_loc_lat_long(csv,fieldname):
locfile=geocode_field_from_CSV(csv,fieldname)
finalcsv=CSVFile()
finalcsv.colnames=csv.colnames
finalcsv.colnames=finalcsv.colnames+["LOCATION","LAT","LONG"]
for row in csv.matrix:
dictionary=row
for loc in locfile.matrix:
if dictionary[fieldname]==loc["SEARCHKEY"]:
location=loc["LOCATION"]
lat=loc["LAT"]
lng=loc["LONG"]
dictionary["LOCATION"]=location
dictionary["LAT"]=lat
dictionary["LONG"]=lng
finalcsv.matrix.append(dictionary)
return finalcsv
| mojolab/LivingData | lib/livdatmaplib.py | Python | apache-2.0 | 1,399 |
# Copyright 2015-2017 Cisco Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ipaddr
class IPConversion(object):
'''
ip formatter
'''
def __init__(self):
pass
def is_ip(self, *args):
'''
validate if is ip
'''
try:
ip_addr = ipaddr.IPAddress(args[0])
return ip_addr, ip_addr.version
except ValueError:
return False
except StandardError:
return False
def str_to_int(self, *args):
'''
ip str addr to integer
'''
try:
ip_addr = ipaddr.IPAddress(args[0])
return int(ip_addr)
except ValueError:
raise ValueError('Invalid ip string')
except Exception:
raise ValueError('Invalid ip string')
def int_to_str(self, *args):
'''
ip int to str
'''
try:
ip_addr = ipaddr.IPAddress(args[0])
return str(ip_addr)
except ValueError:
raise ValueError('Invalid ip int')
except Exception:
raise ValueError('Invalid ip int')
if __name__ == '__main__':
IF = IPConversion()
print IF.int_to_str(3684565272)
| dashng/netseen | netseen/common/ip_conversion.py | Python | apache-2.0 | 1,787 |
# -*- coding: utf-8 -*-
from django import forms
from django.contrib.auth.forms import AuthenticationForm
class NuevaSolicitudForm(forms.Form):
choices = (('1','Constancia de Estudios'),
('2','Constancia de Estudios con Periodo Vacacional'),
('3','Boleta Global'),
('4','Boleta 2 Firmas'),
('5','Boleta 3 Firmas')) #(Valor,Opcion)
choices2 = (('1','Constancia de Estudios'),
('2','Constancia de Estudios con Periodo Vacacional'),
('3','Boleta Global'),
('4','Boleta 2 Firmas'),
('5','Boleta 3 Firmas'),
('6','Null'))
nombre = forms.CharField(widget=forms.TextInput(attrs={'placeholder': 'Nombre'}))
apellidoP = forms.CharField(label = 'Apellido Paterno', widget=forms.TextInput(attrs={'placeholder': 'Apellido Paterno'}))
apellidoM = forms.CharField(label = 'Apellido Materno', widget=forms.TextInput(attrs={'placeholder': 'Apellido Materno'}))
boleta = forms.IntegerField(widget=forms.TextInput(attrs={'placeholder': 'Boleta'}))
tipo = forms.ChoiceField(choices=choices,widget=forms.Select(attrs={'style': 'display:inline;',
'id':'tipo1'}))
tipo2 = forms.ChoiceField(label = '',choices=choices2,widget=forms.Select(attrs={'id':'tipo2'}),initial='6')
tipo3 = forms.ChoiceField(label = '',choices=choices2,widget=forms.Select(attrs={'id':'tipo3'}),initial='6')
| CallmeTorre/Idalia | ESCOM/Area/forms.py | Python | apache-2.0 | 1,509 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-16 16:08
from __future__ import unicode_literals
import ckeditor.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0043_about'),
]
operations = [
migrations.CreateModel(
name='OkthessMeetup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('time', models.TimeField()),
('title', models.TextField(blank=True, null=True)),
('agenda', ckeditor.fields.RichTextField(blank=True, null=True)),
],
),
]
| OKThess/website | main/migrations/0044_okthessmeetup.py | Python | mit | 775 |
'''Models for Pammy'''
from django.db import models
from django.dispatch import receiver
from closuretree.models import ClosureModel
from .fields import IPNetworkField
from .utils import subnet_complement
class Allocation(ClosureModel):
name = models.CharField(max_length=500)
network = IPNetworkField(unique=True)
parent = models.ForeignKey('self', blank=True, null=True, related_name='subnets', on_delete=models.DO_NOTHING)
def save(self, *args, **kwargs):
try:
parents = Allocation.objects.filter(network__is_supernet_of=self.network)
if self.pk:
parents = parents.exclude(pk=self.pk)
self.parent = parents.order_by('-network')[0]
except IndexError:
self.parent = None
super(Allocation, self).save(*args, **kwargs)
@property
def full(self):
try:
self.complement().next()
return False
except StopIteration:
return True
def fully_divided(self):
networks = list(self.network.subnet(self.network.prefixlen + 1))
return len(networks) == Allocation.objects.filter(network__in=networks).count()
def complement(self):
return subnet_complement(self.network, [x.network for x in self.subnets.all()])
def divide(self, prefixlen=None):
if prefixlen is None:
prefixlen = self.network.prefixlen + 1
subnets = list(self.subnets.order_by('network'))
networks = []
for subnet in self.network.subnet(prefixlen):
contains = [x for x in subnets if x.network in subnet]
try:
contained = self.subnets.filter(network__is_supernet_of=subnet).exclude(network=subnet).order_by('-network')[0]
networks.append((None, [], contained))
except IndexError:
contained = None
networks.append((subnet, contains, contained))
return networks
def __str__(self):
return str(self.network)
class Meta:
ordering = ('network', )
class ClosureMeta:
parent_attr = 'parent'
@receiver(models.signals.pre_delete, sender=Allocation)
def shuffle_parents_on_delete(sender, **kwargs):
instance = kwargs['instance']
count = instance.subnets.update(parent=instance.parent)
if count:
Allocation.rebuildtable()
@receiver(models.signals.post_save, sender=Allocation)
def shuffle_parents_on_save(sender, **kwargs):
instance = kwargs['instance']
children = Allocation.objects.filter(network__is_subnet_of=instance.network, parent=instance.parent).exclude(pk=instance.pk)
count = children.update(parent=instance)
if count:
Allocation.rebuildtable()
| ocadotechnology/pammy | pammy/models.py | Python | gpl-3.0 | 2,736 |
# This file is part of beets.
# Copyright 2015, Pedro Silva.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""List duplicate tracks or albums.
"""
from __future__ import (division, absolute_import, print_function,
unicode_literals)
import shlex
from beets.plugins import BeetsPlugin
from beets.ui import decargs, print_, vararg_callback, Subcommand, UserError
from beets.util import command_output, displayable_path, subprocess
from beets.library import Item, Album
PLUGIN = 'duplicates'
class DuplicatesPlugin(BeetsPlugin):
"""List duplicate tracks or albums
"""
def __init__(self):
super(DuplicatesPlugin, self).__init__()
self.config.add({
'album': False,
'checksum': '',
'copy': '',
'count': False,
'delete': False,
'format': '',
'full': False,
'keys': [],
'merge': False,
'move': '',
'path': False,
'tiebreak': {},
'strict': False,
'tag': '',
})
self._command = Subcommand('duplicates',
help=__doc__,
aliases=['dup'])
self._command.parser.add_option('-c', '--count', dest='count',
action='store_true',
help='show duplicate counts')
self._command.parser.add_option('-C', '--checksum', dest='checksum',
action='store', metavar='PROG',
help='report duplicates based on'
' arbitrary command')
self._command.parser.add_option('-d', '--delete', dest='delete',
action='store_true',
help='delete items from library and '
'disk')
self._command.parser.add_option('-F', '--full', dest='full',
action='store_true',
help='show all versions of duplicate'
' tracks or albums')
self._command.parser.add_option('-s', '--strict', dest='strict',
action='store_true',
help='report duplicates only if all'
' attributes are set')
self._command.parser.add_option('-k', '--keys', dest='keys',
action='callback', metavar='KEY1 KEY2',
callback=vararg_callback,
help='report duplicates based on keys')
self._command.parser.add_option('-M', '--merge', dest='merge',
action='store_true',
help='merge duplicate items')
self._command.parser.add_option('-m', '--move', dest='move',
action='store', metavar='DEST',
help='move items to dest')
self._command.parser.add_option('-o', '--copy', dest='copy',
action='store', metavar='DEST',
help='copy items to dest')
self._command.parser.add_option('-t', '--tag', dest='tag',
action='store',
help='tag matched items with \'k=v\''
' attribute')
self._command.parser.add_all_common_options()
def commands(self):
def _dup(lib, opts, args):
self.config.set_args(opts)
album = self.config['album'].get(bool)
checksum = self.config['checksum'].get(str)
copy = self.config['copy'].get(str)
count = self.config['count'].get(bool)
delete = self.config['delete'].get(bool)
fmt = self.config['format'].get(str)
full = self.config['full'].get(bool)
keys = self.config['keys'].get(list)
merge = self.config['merge'].get(bool)
move = self.config['move'].get(str)
path = self.config['path'].get(bool)
tiebreak = self.config['tiebreak'].get(dict)
strict = self.config['strict'].get(bool)
tag = self.config['tag'].get(str)
if album:
if not keys:
keys = ['mb_albumid']
items = lib.albums(decargs(args))
else:
if not keys:
keys = ['mb_trackid', 'mb_albumid']
items = lib.items(decargs(args))
if path:
fmt = '$path'
# Default format string for count mode.
if count and not fmt:
if album:
fmt = '$albumartist - $album'
else:
fmt = '$albumartist - $album - $title'
fmt += ': {0}'
if checksum:
for i in items:
k, _ = self._checksum(i, checksum)
keys = [k]
for obj_id, obj_count, objs in self._duplicates(items,
keys=keys,
full=full,
strict=strict,
tiebreak=tiebreak,
merge=merge):
if obj_id: # Skip empty IDs.
for o in objs:
self._process_item(o,
copy=copy,
move=move,
delete=delete,
tag=tag,
fmt=fmt.format(obj_count))
self._command.func = _dup
return [self._command]
def _process_item(self, item, copy=False, move=False, delete=False,
tag=False, fmt=''):
"""Process Item `item`.
"""
print_(format(item, fmt))
if copy:
item.move(basedir=copy, copy=True)
item.store()
if move:
item.move(basedir=move, copy=False)
item.store()
if delete:
item.remove(delete=True)
if tag:
try:
k, v = tag.split('=')
except:
raise UserError('%s: can\'t parse k=v tag: %s' % (PLUGIN, tag))
setattr(item, k, v)
item.store()
def _checksum(self, item, prog):
"""Run external `prog` on file path associated with `item`, cache
output as flexattr on a key that is the name of the program, and
return the key, checksum tuple.
"""
args = [p.format(file=item.path) for p in shlex.split(prog)]
key = args[0]
checksum = getattr(item, key, False)
if not checksum:
self._log.debug(u'key {0} on item {1} not cached:'
'computing checksum',
key, displayable_path(item.path))
try:
checksum = command_output(args)
setattr(item, key, checksum)
item.store()
self._log.debug(u'computed checksum for {0} using {1}',
item.title, key)
except subprocess.CalledProcessError as e:
self._log.debug(u'failed to checksum {0}: {1}',
displayable_path(item.path), e)
else:
self._log.debug(u'key {0} on item {1} cached:'
'not computing checksum',
key, displayable_path(item.path))
return key, checksum
def _group_by(self, objs, keys, strict):
"""Return a dictionary with keys arbitrary concatenations of attributes and
values lists of objects (Albums or Items) with those keys.
If strict, all attributes must be defined for a duplicate match.
"""
import collections
counts = collections.defaultdict(list)
for obj in objs:
values = [getattr(obj, k, None) for k in keys]
values = [v for v in values if v not in (None, '')]
if strict and len(values) < len(keys):
self._log.debug(u'some keys {0} on item {1} are null or empty:'
' skipping',
keys, displayable_path(obj.path))
elif (not strict and not len(values)):
self._log.debug(u'all keys {0} on item {1} are null or empty:'
' skipping',
keys, displayable_path(obj.path))
else:
key = tuple(values)
counts[key].append(obj)
return counts
def _order(self, objs, tiebreak=None):
"""Return objs sorted by descending order of fields in tiebreak dict.
Default ordering is based on attribute completeness.
"""
if tiebreak:
kind = 'items' if all(isinstance(o, Item)
for o in objs) else 'albums'
key = lambda x: tuple(getattr(x, k) for k in tiebreak[kind])
else:
kind = Item if all(isinstance(o, Item) for o in objs) else Album
if kind is Item:
fields = [f for sublist in kind.get_fields() for f in sublist]
key = lambda x: len([(a, getattr(x, a, None)) for a in fields
if getattr(x, a, None) not in (None, '')])
else:
key = lambda x: len(x.items())
return sorted(objs, key=key, reverse=True)
def _merge_items(self, objs):
"""Merge Item objs by copying missing fields from items in the tail to
the head item.
Return same number of items, with the head item modified.
"""
fields = [f for sublist in Item.get_fields() for f in sublist]
for f in fields:
for o in objs[1:]:
if getattr(objs[0], f, None) in (None, ''):
value = getattr(o, f, None)
if value:
self._log.debug(u'key {0} on item {1} is null '
'or empty: setting from item {2}',
f, displayable_path(objs[0].path),
displayable_path(o.path))
setattr(objs[0], f, value)
objs[0].store()
break
return objs
def _merge_albums(self, objs):
"""Merge Album objs by copying missing items from albums in the tail
to the head album.
Return same number of albums, with the head album modified."""
ids = [i.mb_trackid for i in objs[0].items()]
for o in objs[1:]:
for i in o.items():
if i.mb_trackid not in ids:
missing = Item.from_path(i.path)
missing.album_id = objs[0].id
missing.add(i._db)
self._log.debug(u'item {0} missing from album {1}:'
' merging from {2} into {3}',
missing,
objs[0],
displayable_path(o.path),
displayable_path(missing.destination()))
missing.move(copy=True)
return objs
def _merge(self, objs):
"""Merge duplicate items. See ``_merge_items`` and ``_merge_albums``
for the relevant strategies.
"""
kind = Item if all(isinstance(o, Item) for o in objs) else Album
if kind is Item:
objs = self._merge_items(objs)
else:
objs = self._merge_albums(objs)
return objs
def _duplicates(self, objs, keys, full, strict, tiebreak, merge):
"""Generate triples of keys, duplicate counts, and constituent objects.
"""
offset = 0 if full else 1
for k, objs in self._group_by(objs, keys, strict).iteritems():
if len(objs) > 1:
objs = self._order(objs, tiebreak)
if merge:
objs = self._merge(objs)
yield (k, len(objs) - offset, objs[offset:])
| ruippeixotog/beets | beetsplug/duplicates.py | Python | mit | 13,370 |
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 01 13:21:57 2014
@author: Acer
"""
class Raintanksetup:
def __init__(self, numberofRaintanks, starting_value_i, **Raintankattributes ):
self.numberofRaintanks = numberofRaintanks
self.Raintankattributelist = []
self.Raintanknodelist = []
for i in range(numberofRaintanks+starting_value_i)[starting_value_i:]:
exec 'self.Raintankattributelist.append({"Raintank_'+str(i)+'" : dict.copy(Raintankattributes)})'
print str(numberofRaintanks)+' Raintanks have been created!'
return
def Setandwrite_attributes(self,numberofRaintanks, starting_value_i, attributevector):
for i in range(numberofRaintanks+starting_value_i)[starting_value_i:]:
self.Raintankattributelist[i-starting_value_i][str('Raintank_'+str(i))]["Storage_Volume"] = attributevector[i-starting_value_i][0]
for i in range(numberofRaintanks+starting_value_i)[starting_value_i:]:
exec '''self.line1='\\t\\t\\t<node id="Raintank_'+str(i)+'" class="Raintank"> \\n' '''
exec '''self.line2='\\t\\t\\t\\t<parameter name="Storage_Volume_[m^3]" type="double" value="'+str(self.Raintankattributelist[i-starting_value_i][str('Raintank_'+str(i))]["Storage_Volume"])+'"/> \\n ' '''
exec '''self.line3='\\t\\t\\t</node> \\n ' '''
alllines = ''
for n in range(3):
exec 'alllines += self.line'+str(n+1)
self.Raintanknodelist.append(alllines)
return
| iut-ibk/DynaMind-ToolBox | DynaMind-Performance-Assessment/3rdparty/CD3Waterbalance/Modelcreator/Raintanksetup.py | Python | gpl-2.0 | 1,578 |
# Copyright (C) 2010 Google Inc. All rights reserved.
# Copyright (C) 2013 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.common.checkout.baselineoptimizer import BaselineOptimizer
from webkitpy.common.net.buildbot.buildbot_mock import MockBuilder
from webkitpy.common.system.executive_mock import MockExecutive2
from webkitpy.thirdparty.mock import Mock
from webkitpy.tool.commands.rebaseline import *
from webkitpy.tool.mocktool import MockTool, MockOptions
class _BaseTestCase(unittest.TestCase):
MOCK_WEB_RESULT = 'MOCK Web result, convert 404 to None=True'
WEB_PREFIX = 'http://example.com/f/builders/Apple Lion Release WK1 (Tests)/results/layout-test-results'
command_constructor = None
def setUp(self):
self.tool = MockTool()
self.command = self.command_constructor() # lint warns that command_constructor might not be set, but this is intentional; pylint: disable=E1102
self.command.bind_to_tool(self.tool)
self.lion_port = self.tool.port_factory.get_from_builder_name("Apple Lion Release WK1 (Tests)")
self.lion_expectations_path = self.lion_port.path_to_test_expectations_file()
# FIXME: we should override builders._exact_matches here to point to a set
# of test ports and restore the value in tearDown(), and that way the
# individual tests wouldn't have to worry about it.
def _expand(self, path):
if self.tool.filesystem.isabs(path):
return path
return self.tool.filesystem.join(self.lion_port.layout_tests_dir(), path)
def _read(self, path):
return self.tool.filesystem.read_text_file(self._expand(path))
def _write(self, path, contents):
self.tool.filesystem.write_text_file(self._expand(path), contents)
def _zero_out_test_expectations(self):
for port_name in self.tool.port_factory.all_port_names():
port = self.tool.port_factory.get(port_name)
for path in port.expectations_files():
self._write(path, '')
self.tool.filesystem.written_files = {}
class TestRebaselineTest(_BaseTestCase):
command_constructor = RebaselineTest # AKA webkit-patch rebaseline-test-internal
def setUp(self):
super(TestRebaselineTest, self).setUp()
self.options = MockOptions(builder="Apple Lion Release WK1 (Tests)", test="userscripts/another-test.html", suffixes="txt",
move_overwritten_baselines_to=None, results_directory=None)
def test_baseline_directory(self):
command = self.command
self.assertMultiLineEqual(command._baseline_directory("Apple Win XP Debug (Tests)"), "/mock-checkout/LayoutTests/platform/win-xp")
self.assertMultiLineEqual(command._baseline_directory("Apple Win 7 Release (Tests)"), "/mock-checkout/LayoutTests/platform/win")
self.assertMultiLineEqual(command._baseline_directory("Apple Lion Release WK1 (Tests)"), "/mock-checkout/LayoutTests/platform/mac-lion")
self.assertMultiLineEqual(command._baseline_directory("Apple Lion Release WK2 (Tests)"), "/mock-checkout/LayoutTests/platform/mac-wk2")
self.assertMultiLineEqual(command._baseline_directory("Apple MountainLion Release WK1 (Tests)"), "/mock-checkout/LayoutTests/platform/mac-mountainlion")
self.assertMultiLineEqual(command._baseline_directory("Apple MountainLion Release WK2 (Tests)"), "/mock-checkout/LayoutTests/platform/mac-wk2")
self.assertMultiLineEqual(command._baseline_directory("Apple Mavericks Release WK1 (Tests)"), "/mock-checkout/LayoutTests/platform/mac")
self.assertMultiLineEqual(command._baseline_directory("Apple Mavericks Release WK2 (Tests)"), "/mock-checkout/LayoutTests/platform/mac-wk2")
self.assertMultiLineEqual(command._baseline_directory("GTK Linux 64-bit Debug"), "/mock-checkout/LayoutTests/platform/gtk-wk1")
self.assertMultiLineEqual(command._baseline_directory("GTK Linux 64-bit Release WK2 (Tests)"), "/mock-checkout/LayoutTests/platform/gtk-wk2")
self.assertMultiLineEqual(command._baseline_directory("EFL Linux 64-bit Release WK2"), "/mock-checkout/LayoutTests/platform/efl-wk2")
def test_rebaseline_updates_expectations_file_noop(self):
self._zero_out_test_expectations()
self._write(self.lion_expectations_path, """Bug(B) [ Mac Linux XP Debug ] fast/dom/Window/window-postmessage-clone-really-deep-array.html [ Pass ]
Bug(A) [ Debug ] : fast/css/large-list-of-rules-crash.html [ Failure ]
""")
self._write("fast/dom/Window/window-postmessage-clone-really-deep-array.html", "Dummy test contents")
self._write("fast/css/large-list-of-rules-crash.html", "Dummy test contents")
self._write("userscripts/another-test.html", "Dummy test contents")
self.options.suffixes = "png,wav,txt"
self.command._rebaseline_test_and_update_expectations(self.options)
self.assertItemsEqual(self.tool.web.urls_fetched,
[self.WEB_PREFIX + '/userscripts/another-test-actual.png',
self.WEB_PREFIX + '/userscripts/another-test-actual.wav',
self.WEB_PREFIX + '/userscripts/another-test-actual.txt'])
new_expectations = self._read(self.lion_expectations_path)
self.assertMultiLineEqual(new_expectations, """Bug(B) [ Mac Linux XP Debug ] fast/dom/Window/window-postmessage-clone-really-deep-array.html [ Pass ]
Bug(A) [ Debug ] : fast/css/large-list-of-rules-crash.html [ Failure ]
""")
def test_rebaseline_updates_expectations_file(self):
self._write(self.lion_expectations_path, "Bug(x) [ Mac ] userscripts/another-test.html [ ImageOnlyFailure ]\nbug(z) [ Linux ] userscripts/another-test.html [ ImageOnlyFailure ]\n")
self._write("userscripts/another-test.html", "Dummy test contents")
self.options.suffixes = 'png,wav,txt'
self.command._rebaseline_test_and_update_expectations(self.options)
self.assertItemsEqual(self.tool.web.urls_fetched,
[self.WEB_PREFIX + '/userscripts/another-test-actual.png',
self.WEB_PREFIX + '/userscripts/another-test-actual.wav',
self.WEB_PREFIX + '/userscripts/another-test-actual.txt'])
new_expectations = self._read(self.lion_expectations_path)
self.assertMultiLineEqual(new_expectations, "Bug(x) [ Mac ] userscripts/another-test.html [ ImageOnlyFailure ]\nbug(z) [ Linux ] userscripts/another-test.html [ ImageOnlyFailure ]\n")
def test_rebaseline_does_not_include_overrides(self):
self._write(self.lion_expectations_path, "Bug(x) [ Mac ] userscripts/another-test.html [ ImageOnlyFailure ]\nBug(z) [ Linux ] userscripts/another-test.html [ ImageOnlyFailure ]\n")
self._write("userscripts/another-test.html", "Dummy test contents")
self.options.suffixes = 'png,wav,txt'
self.command._rebaseline_test_and_update_expectations(self.options)
self.assertItemsEqual(self.tool.web.urls_fetched,
[self.WEB_PREFIX + '/userscripts/another-test-actual.png',
self.WEB_PREFIX + '/userscripts/another-test-actual.wav',
self.WEB_PREFIX + '/userscripts/another-test-actual.txt'])
new_expectations = self._read(self.lion_expectations_path)
self.assertMultiLineEqual(new_expectations, "Bug(x) [ Mac ] userscripts/another-test.html [ ImageOnlyFailure ]\nBug(z) [ Linux ] userscripts/another-test.html [ ImageOnlyFailure ]\n")
def test_rebaseline_test(self):
self.command._rebaseline_test("Apple Lion Release WK1 (Tests)", "userscripts/another-test.html", None, "txt", self.WEB_PREFIX)
self.assertItemsEqual(self.tool.web.urls_fetched, [self.WEB_PREFIX + '/userscripts/another-test-actual.txt'])
def test_rebaseline_test_with_results_directory(self):
self._write(self.lion_expectations_path, "Bug(x) [ Mac ] userscripts/another-test.html [ ImageOnlyFailure ]\nbug(z) [ Linux ] userscripts/another-test.html [ ImageOnlyFailure ]\n")
self.options.results_directory = '/tmp'
self.command._rebaseline_test_and_update_expectations(self.options)
self.assertItemsEqual(self.tool.web.urls_fetched, ['file:///tmp/userscripts/another-test-actual.txt'])
def test_rebaseline_test_and_print_scm_changes(self):
self.command._print_scm_changes = True
self.command._scm_changes = {'add': [], 'delete': []}
self.tool._scm.exists = lambda x: False
self.command._rebaseline_test("Apple Lion Release WK1 (Tests)", "userscripts/another-test.html", None, "txt", None)
self.assertDictEqual(self.command._scm_changes, {'add': ['/mock-checkout/LayoutTests/platform/mac-lion/userscripts/another-test-expected.txt'], 'delete': []})
def test_rebaseline_and_copy_test(self):
self._write("userscripts/another-test-expected.txt", "generic result")
self.command._rebaseline_test("Apple Lion Release WK1 (Tests)", "userscripts/another-test.html", ["mac-lion-wk2"], "txt", None)
self.assertMultiLineEqual(self._read('platform/mac-lion/userscripts/another-test-expected.txt'), self.MOCK_WEB_RESULT)
self.assertMultiLineEqual(self._read('platform/mac-wk2/userscripts/another-test-expected.txt'), 'generic result')
def test_rebaseline_and_copy_test_no_existing_result(self):
self.command._rebaseline_test("Apple Lion Release WK1 (Tests)", "userscripts/another-test.html", ["mac-lion-wk2"], "txt", None)
self.assertMultiLineEqual(self._read('platform/mac-lion/userscripts/another-test-expected.txt'), self.MOCK_WEB_RESULT)
self.assertFalse(self.tool.filesystem.exists(self._expand('platform/mac-lion-wk2/userscripts/another-test-expected.txt')))
def test_rebaseline_and_copy_test_with_lion_result(self):
self._write("platform/mac-lion/userscripts/another-test-expected.txt", "original lion result")
self.command._rebaseline_test("Apple Lion Release WK1 (Tests)", "userscripts/another-test.html", ["mac-lion-wk2"], "txt", self.WEB_PREFIX)
self.assertItemsEqual(self.tool.web.urls_fetched, [self.WEB_PREFIX + '/userscripts/another-test-actual.txt'])
self.assertMultiLineEqual(self._read("platform/mac-wk2/userscripts/another-test-expected.txt"), "original lion result")
self.assertMultiLineEqual(self._read("platform/mac-lion/userscripts/another-test-expected.txt"), self.MOCK_WEB_RESULT)
def test_rebaseline_and_copy_no_overwrite_test(self):
self._write("platform/mac-lion/userscripts/another-test-expected.txt", "original lion result")
self._write("platform/mac-lion-wk2/userscripts/another-test-expected.txt", "original lion wk2 result")
self.command._rebaseline_test("Apple Lion Release WK1 (Tests)", "userscripts/another-test.html", ["mac-lion-wk2"], "txt", None)
self.assertMultiLineEqual(self._read("platform/mac-lion-wk2/userscripts/another-test-expected.txt"), "original lion wk2 result")
self.assertMultiLineEqual(self._read("platform/mac-lion/userscripts/another-test-expected.txt"), self.MOCK_WEB_RESULT)
def test_rebaseline_test_internal_with_move_overwritten_baselines_to(self):
self.tool.executive = MockExecutive2()
# FIXME: it's confusing that this is the test- port, and not the regular lion port. Really all of the tests should be using the test ports.
port = self.tool.port_factory.get('test-mac-snowleopard')
self._write(port._filesystem.join(port.layout_tests_dir(), 'platform/test-mac-snowleopard/failures/expected/image-expected.txt'), 'original snowleopard result')
old_exact_matches = builders._exact_matches
oc = OutputCapture()
try:
builders._exact_matches = {
"MOCK Leopard": {"port_name": "test-mac-leopard", "specifiers": set(["mock-specifier"])},
"MOCK SnowLeopard": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier"])},
}
options = MockOptions(optimize=True, builder="MOCK SnowLeopard", suffixes="txt",
move_overwritten_baselines_to=["test-mac-leopard"], verbose=True, test="failures/expected/image.html",
results_directory=None)
oc.capture_output()
self.command.execute(options, [], self.tool)
finally:
out, _, _ = oc.restore_output()
builders._exact_matches = old_exact_matches
self.assertMultiLineEqual(self._read(self.tool.filesystem.join(port.layout_tests_dir(), 'platform/test-mac-leopard/failures/expected/image-expected.txt')), 'original snowleopard result')
self.assertMultiLineEqual(out, '{"add": []}\n')
class TestRebaselineJson(_BaseTestCase):
command_constructor = RebaselineJson
def setUp(self):
super(TestRebaselineJson, self).setUp()
self.tool.executive = MockExecutive2()
self.old_exact_matches = builders._exact_matches
builders._exact_matches = {
"MOCK builder": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier"]),
"move_overwritten_baselines_to": ["test-mac-leopard"]},
"MOCK builder (Debug)": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier", "debug"])},
}
def tearDown(self):
builders._exact_matches = self.old_exact_matches
super(TestRebaselineJson, self).tearDown()
def test_rebaseline_all(self):
options = MockOptions(optimize=True, verbose=True, move_overwritten_baselines=False, results_directory=None)
self.command._rebaseline(options, {"user-scripts/another-test.html": {"MOCK builder": ["txt", "png"]}})
# Note that we have one run_in_parallel() call followed by a run_command()
self.assertEqual(self.tool.executive.calls,
[[['echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'user-scripts/another-test.html', '--verbose']],
['echo', '--verbose', 'optimize-baselines', '--suffixes', 'txt,png', 'user-scripts/another-test.html']])
def test_rebaseline_debug(self):
options = MockOptions(optimize=True, verbose=True, move_overwritten_baselines=False, results_directory=None)
self.command._rebaseline(options, {"user-scripts/another-test.html": {"MOCK builder (Debug)": ["txt", "png"]}})
# Note that we have one run_in_parallel() call followed by a run_command()
self.assertEqual(self.tool.executive.calls,
[[['echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder (Debug)', '--test', 'user-scripts/another-test.html', '--verbose']],
['echo', '--verbose', 'optimize-baselines', '--suffixes', 'txt,png', 'user-scripts/another-test.html']])
def test_move_overwritten(self):
options = MockOptions(optimize=True, verbose=True, move_overwritten_baselines=True, results_directory=None)
self.command._rebaseline(options, {"user-scripts/another-test.html": {"MOCK builder": ["txt", "png"]}})
# Note that we have one run_in_parallel() call followed by a run_command()
self.assertEqual(self.tool.executive.calls,
[[['echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'user-scripts/another-test.html', '--move-overwritten-baselines-to', 'test-mac-leopard', '--verbose']],
['echo', '--verbose', 'optimize-baselines', '--suffixes', 'txt,png', 'user-scripts/another-test.html']])
def test_no_optimize(self):
options = MockOptions(optimize=False, verbose=True, move_overwritten_baselines=False, results_directory=None)
self.command._rebaseline(options, {"user-scripts/another-test.html": {"MOCK builder (Debug)": ["txt", "png"]}})
# Note that we have only one run_in_parallel() call
self.assertEqual(self.tool.executive.calls,
[[['echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder (Debug)', '--test', 'user-scripts/another-test.html', '--verbose']]])
def test_results_directory(self):
options = MockOptions(optimize=False, verbose=True, move_overwritten_baselines=False, results_directory='/tmp')
self.command._rebaseline(options, {"user-scripts/another-test.html": {"MOCK builder": ["txt", "png"]}})
# Note that we have only one run_in_parallel() call
self.assertEqual(self.tool.executive.calls,
[[['echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'user-scripts/another-test.html', '--results-directory', '/tmp', '--verbose']]])
class TestRebaseline(_BaseTestCase):
# This command shares most of its logic with RebaselineJson, so these tests just test what is different.
command_constructor = Rebaseline # AKA webkit-patch rebaseline
def test_tests_to_update(self):
build = Mock()
OutputCapture().assert_outputs(self, self.command._tests_to_update, [build])
def test_rebaseline(self):
self.command._builders_to_pull_from = lambda: [MockBuilder('MOCK builder')]
self.command._tests_to_update = lambda builder: ['mock/path/to/test.html']
self._zero_out_test_expectations()
old_exact_matches = builders._exact_matches
oc = OutputCapture()
try:
builders._exact_matches = {
"MOCK builder": {"port_name": "test-mac-leopard", "specifiers": set(["mock-specifier"])},
}
oc.capture_output()
self.command.execute(MockOptions(optimize=False, builders=None, suffixes="txt,png", verbose=True, move_overwritten_baselines=False), [], self.tool)
finally:
oc.restore_output()
builders._exact_matches = old_exact_matches
calls = filter(lambda x: x != ['qmake', '-v'] and x[0] != 'perl', self.tool.executive.calls)
self.assertEqual(calls,
[[['echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'mock/path/to/test.html', '--verbose']]])
class TestRebaselineExpectations(_BaseTestCase):
command_constructor = RebaselineExpectations
def setUp(self):
super(TestRebaselineExpectations, self).setUp()
self.options = MockOptions(optimize=False, builders=None, suffixes=['txt'], verbose=False, platform=None,
move_overwritten_baselines=False, results_directory=None)
def test_rebaseline_expectations(self):
self._zero_out_test_expectations()
self.tool.executive = MockExecutive2()
self.command._tests_to_rebaseline = lambda port: {'userscripts/another-test.html': set(['txt']), 'userscripts/images.svg': set(['png'])}
self.command.execute(self.options, [], self.tool)
# FIXME: change this to use the test- ports.
calls = filter(lambda x: x != ['qmake', '-v'], self.tool.executive.calls)
self.assertEqual(len(calls), 1)
self.assertEqual(len(calls[0]), 24)
def test_rebaseline_expectations_noop(self):
self._zero_out_test_expectations()
oc = OutputCapture()
try:
oc.capture_output()
self.command.execute(self.options, [], self.tool)
finally:
_, _, logs = oc.restore_output()
self.assertEqual(self.tool.filesystem.written_files, {})
self.assertEqual(logs, 'Did not find any tests marked Rebaseline.\n')
def disabled_test_overrides_are_included_correctly(self):
# This tests that the any tests marked as REBASELINE in the overrides are found, but
# that the overrides do not get written into the main file.
self._zero_out_test_expectations()
self._write(self.lion_expectations_path, '')
self.lion_port.expectations_dict = lambda: {
self.lion_expectations_path: '',
'overrides': ('Bug(x) userscripts/another-test.html [ Failure Rebaseline ]\n'
'Bug(y) userscripts/test.html [ Crash ]\n')}
self._write('/userscripts/another-test.html', '')
self.assertDictEqual(self.command._tests_to_rebaseline(self.lion_port), {'userscripts/another-test.html': set(['png', 'txt', 'wav'])})
self.assertEqual(self._read(self.lion_expectations_path), '')
class _FakeOptimizer(BaselineOptimizer):
def read_results_by_directory(self, baseline_name):
if baseline_name.endswith('txt'):
return {'LayoutTests/passes/text.html': '123456',
'LayoutTests/platform/test-mac-leopard/passes/text.html': 'abcdef'}
return {}
class TestAnalyzeBaselines(_BaseTestCase):
command_constructor = AnalyzeBaselines
def setUp(self):
super(TestAnalyzeBaselines, self).setUp()
self.port = self.tool.port_factory.get('test')
self.tool.port_factory.get = (lambda port_name=None, options=None: self.port)
self.lines = []
self.command._optimizer_class = _FakeOptimizer
self.command._write = (lambda msg: self.lines.append(msg)) # pylint bug warning about unnecessary lambda? pylint: disable=W0108
def test_default(self):
self.command.execute(MockOptions(suffixes='txt', missing=False, platform=None), ['passes/text.html'], self.tool)
self.assertEqual(self.lines,
['passes/text-expected.txt:',
' (generic): 123456',
' test-mac-leopard: abcdef'])
def test_missing_baselines(self):
self.command.execute(MockOptions(suffixes='png,txt', missing=True, platform=None), ['passes/text.html'], self.tool)
self.assertEqual(self.lines,
['passes/text-expected.png: (no baselines found)',
'passes/text-expected.txt:',
' (generic): 123456',
' test-mac-leopard: abcdef'])
| sloanyang/aquantic | Tools/Scripts/webkitpy/tool/commands/rebaseline_unittest.py | Python | gpl-2.0 | 23,429 |
__all__ = ["echo", "surround", "reverse"] | XDrake99/IDTP | protocol/__init__.py | Python | mit | 41 |
import sublime
from . import SblmCmmnFnctns
class Spinner:
SYMBOLS_ROW = u'←↑→↓'
SYMBOLS_BOX = u'⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏'
def __init__(self, symbols, view, startStr, endStr):
self.symbols = symbols
self.length = len(symbols)
self.position = 0
self.stopFlag = False
self.view = view
self.startStr = startStr
self.endStr = endStr
def __next__(self):
self.position = self.position + 1
return self.startStr + self.symbols[self.position % self.length] + self.endStr
def start(self):
if not self.stopFlag:
self.view.set_status(SblmCmmnFnctns.SUBLIME_STATUS_SPINNER, self.__next__())
sublime.set_timeout(lambda: self.start(), 300)
def stop(self):
self.view.erase_status(SblmCmmnFnctns.SUBLIME_STATUS_SPINNER)
self.stopFlag = True
| rusiv/BSScript | bsscript/bsscriptSblm/Spinner.py | Python | mit | 784 |
"""Plotting module that can plot 2D and 3D functions
"""
try:
try:
from ctypes import *
except:
raise ImportError("ctypes is required for plotting.\n"
"'easy_install ctypes' or visit "
"http://sourceforge.net/projects/ctypes/")
def Plot(*args, **kwargs):
"""
Plot Examples
=============
See examples/plotting.py for many more examples.
>>> from sympy import symbols, Plot
>>> from sympy.abc import x, y, z
>>> Plot(x*y**3-y*x**3)
>>> p = Plot()
>>> p[1] = x*y
>>> p[1].color = z, (0.4,0.4,0.9), (0.9,0.4,0.4)
>>> p = Plot()
>>> p[1] = x**2+y**2
>>> p[2] = -x**2-y**2
Variable Intervals
==================
The basic format is [var, min, max, steps], but the
syntax is flexible and arguments left out are taken
from the defaults for the current coordinate mode:
>>> Plot(x**2) # implies [x,-5,5,100]
>>> Plot(x**2, [], []) # [x,-1,1,40], [y,-1,1,40]
>>> Plot(x**2-y**2, [100], [100]) # [x,-1,1,100], [y,-1,1,100]
>>> Plot(x**2, [x,-13,13,100])
>>> Plot(x**2, [-13,13]) # [x,-13,13,100]
>>> Plot(x**2, [x,-13,13]) # [x,-13,13,100]
>>> Plot(1*x, [], [x], mode='cylindrical')
... # [unbound_theta,0,2*Pi,40], [x,-1,1,20]
Coordinate Modes
================
Plot supports several curvilinear coordinate modes, and
they independent for each plotted function. You can specify
a coordinate mode explicitly with the 'mode' named argument,
but it can be automatically determined for Cartesian or
parametric plots, and therefore must only be specified for
polar, cylindrical, and spherical modes.
Specifically, Plot(function arguments) and Plot[n] =
(function arguments) will interpret your arguments as a
Cartesian plot if you provide one function and a parametric
plot if you provide two or three functions. Similarly, the
arguments will be interpreted as a curve is one variable is
used, and a surface if two are used.
Supported mode names by number of variables:
1: parametric, cartesian, polar
2: parametric, cartesian, cylindrical = polar, spherical
>>> Plot(1, mode='spherical')
Calculator-like Interface
=========================
>>> p = Plot(visible=False)
>>> f = x**2
>>> p[1] = f
>>> p[2] = f.diff(x)
>>> p[3] = f.diff(x).diff(x)
>>> p
[1]: x**2, 'mode=cartesian'
[2]: 2*x, 'mode=cartesian'
[3]: 2, 'mode=cartesian'
>>> p.show()
>>> p.clear()
>>> p
<blank plot>
>>> p[1] = x**2+y**2
>>> p[1].style = 'solid'
>>> p[2] = -x**2-y**2
>>> p[2].style = 'wireframe'
>>> p[1].color = z, (0.4,0.4,0.9), (0.9,0.4,0.4)
>>> p[1].style = 'both'
>>> p[2].style = 'both'
>>> p.close()
Plot Window Keyboard Controls
=============================
Screen Rotation:
X,Y axis Arrow Keys, A,S,D,W, Numpad 4,6,8,2
Z axis Q,E, Numpad 7,9
Model Rotation:
Z axis Z,C, Numpad 1,3
Zoom: R,F, PgUp,PgDn, Numpad +,-
Reset Camera: X, Numpad 5
Camera Presets:
XY F1
XZ F2
YZ F3
Perspective F4
Sensitivity Modifier: SHIFT
Axes Toggle:
Visible F5
Colors F6
Close Window: ESCAPE
=============================
"""
import plot
return plot.Plot(*args, **kwargs)
except Exception, e:
def Plot(*args, **kwargs):
raise e
from textplot import textplot
| ichuang/sympy | sympy/plotting/__init__.py | Python | bsd-3-clause | 3,982 |
"""Support for Canary sensors."""
from homeassistant.const import TEMP_CELSIUS
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.icon import icon_for_battery_level
from . import DATA_CANARY
SENSOR_VALUE_PRECISION = 2
ATTR_AIR_QUALITY = "air_quality"
# Sensor types are defined like so:
# sensor type name, unit_of_measurement, icon
SENSOR_TYPES = [
["temperature", TEMP_CELSIUS, "mdi:thermometer", ["Canary"]],
["humidity", "%", "mdi:water-percent", ["Canary"]],
["air_quality", None, "mdi:weather-windy", ["Canary"]],
["wifi", "dBm", "mdi:wifi", ["Canary Flex"]],
["battery", "%", "mdi:battery-50", ["Canary Flex"]],
]
STATE_AIR_QUALITY_NORMAL = "normal"
STATE_AIR_QUALITY_ABNORMAL = "abnormal"
STATE_AIR_QUALITY_VERY_ABNORMAL = "very_abnormal"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Canary sensors."""
data = hass.data[DATA_CANARY]
devices = []
for location in data.locations:
for device in location.devices:
if device.is_online:
device_type = device.device_type
for sensor_type in SENSOR_TYPES:
if device_type.get("name") in sensor_type[3]:
devices.append(CanarySensor(data, sensor_type,
location, device))
add_entities(devices, True)
class CanarySensor(Entity):
"""Representation of a Canary sensor."""
def __init__(self, data, sensor_type, location, device):
"""Initialize the sensor."""
self._data = data
self._sensor_type = sensor_type
self._device_id = device.device_id
self._sensor_value = None
sensor_type_name = sensor_type[0].replace("_", " ").title()
self._name = '{} {} {}'.format(location.name,
device.name,
sensor_type_name)
@property
def name(self):
"""Return the name of the Canary sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._sensor_value
@property
def unique_id(self):
"""Return the unique ID of this sensor."""
return "{}_{}".format(self._device_id, self._sensor_type[0])
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._sensor_type[1]
@property
def icon(self):
"""Icon for the sensor."""
if self.state is not None and self._sensor_type[0] == "battery":
return icon_for_battery_level(battery_level=self.state)
return self._sensor_type[2]
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._sensor_type[0] == "air_quality" \
and self._sensor_value is not None:
air_quality = None
if self._sensor_value <= .4:
air_quality = STATE_AIR_QUALITY_VERY_ABNORMAL
elif self._sensor_value <= .59:
air_quality = STATE_AIR_QUALITY_ABNORMAL
elif self._sensor_value <= 1.0:
air_quality = STATE_AIR_QUALITY_NORMAL
return {
ATTR_AIR_QUALITY: air_quality
}
return None
def update(self):
"""Get the latest state of the sensor."""
self._data.update()
from canary.api import SensorType
canary_sensor_type = None
if self._sensor_type[0] == "air_quality":
canary_sensor_type = SensorType.AIR_QUALITY
elif self._sensor_type[0] == "temperature":
canary_sensor_type = SensorType.TEMPERATURE
elif self._sensor_type[0] == "humidity":
canary_sensor_type = SensorType.HUMIDITY
elif self._sensor_type[0] == "wifi":
canary_sensor_type = SensorType.WIFI
elif self._sensor_type[0] == "battery":
canary_sensor_type = SensorType.BATTERY
value = self._data.get_reading(self._device_id, canary_sensor_type)
if value is not None:
self._sensor_value = round(float(value), SENSOR_VALUE_PRECISION)
| MartinHjelmare/home-assistant | homeassistant/components/canary/sensor.py | Python | apache-2.0 | 4,232 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from vendor.paypal.standard.ipn.models import PayPalIPN
class PayPalIPNAdmin(admin.ModelAdmin):
date_hierarchy = 'payment_date'
fieldsets = (
(None, {
"fields": [
"flag", "txn_id", "txn_type", "payment_status", "payment_date",
"transaction_entity", "reason_code", "pending_reason",
"mc_gross", "mc_fee", "auth_status", "auth_amount", "auth_exp",
"auth_id"
]
}),
("Address", {
"description": "The address of the Buyer.",
'classes': ('collapse',),
"fields": [
"address_city", "address_country", "address_country_code",
"address_name", "address_state", "address_status",
"address_street", "address_zip"
]
}),
("Buyer", {
"description": "The information about the Buyer.",
'classes': ('collapse',),
"fields": [
"first_name", "last_name", "payer_business_name", "payer_email",
"payer_id", "payer_status", "contact_phone", "residence_country"
]
}),
("Seller", {
"description": "The information about the Seller.",
'classes': ('collapse',),
"fields": [
"business", "item_name", "item_number", "quantity",
"receiver_email", "receiver_id", "custom", "invoice", "memo"
]
}),
("Recurring", {
"description": "Information about recurring Payments.",
"classes": ("collapse",),
"fields": [
"profile_status", "initial_payment_amount", "amount_per_cycle",
"outstanding_balance", "period_type", "product_name",
"product_type", "recurring_payment_id", "receipt_id",
"next_payment_date"
]
}),
("Admin", {
"description": "Additional Info.",
"classes": ('collapse',),
"fields": [
"test_ipn", "ipaddress", "query", "response", "flag_code",
"flag_info"
]
}),
)
list_display = [
"__unicode__", "flag", "flag_info", "invoice", "custom",
"payment_status", "created_at"
]
search_fields = ["txn_id", "recurring_payment_id"]
admin.site.register(PayPalIPN, PayPalIPNAdmin) | huihoo/reader | vendor/paypal/standard/ipn/admin.py | Python | mit | 2,505 |
# Copyright 2009 Noam Yorav-Raphael
#
# This file is part of DreamPie.
#
# DreamPie is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DreamPie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with DreamPie. If not, see <http://www.gnu.org/licenses/>.
# This file is a script (not a module) run by the DreamPie GUI.
# It expects one argument: the port to connect to.
# It creates a package called dreampielib from subp-py2.zip or subp-py3.zip
# (which are expected to be in the directory of __file__),
# and runs dreampielib.subprocess.main(port).
import sys
from os.path import abspath, join, dirname
def main():
port = int(sys.argv[1])
py_ver = sys.version_info[0]
lib_name = abspath(join(dirname(__file__), 'subp-py%d' % py_ver))
sys.path.insert(0, lib_name)
from dreampielib.subprocess import main as subprocess_main
del sys.path[0]
if sys.version_info[:2] == (3, 0):
sys.stderr.write("Warning: DreamPie doesn't support Python 3.0. \n"
"Please upgrade to Python 3.1.\n")
subprocess_main(port)
if __name__ == '__main__':
main()
| noamraph/dreampie | dreampielib/data/subp_main.py | Python | gpl-3.0 | 1,566 |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.5.0)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x02\xd0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x00\xe2\x00\x00\x00\xe2\
\x01\xdd\xa1\x3d\x43\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x02\x4d\x49\x44\
\x41\x54\x58\x85\xed\x95\x4d\x48\x94\x51\x14\x86\x9f\xf3\xdd\x3b\
\xdf\x37\x4e\x82\xe1\x0f\x82\x38\x21\xb9\x08\x22\xb1\x6c\x13\x14\
\xe5\x22\x5a\x44\x90\xae\x6c\x23\xb5\x33\x30\xcb\x08\x22\x42\x62\
\xc2\x20\x5a\x09\x15\xf4\x03\x45\x11\x42\x49\x20\x04\x11\xd5\xa2\
\xc0\xcc\x22\xc1\x31\x14\x0c\x5c\x19\x36\x89\x90\x20\x11\xcd\xcf\
\x37\xa7\xcd\xcc\xa0\xd2\x6e\x66\x9c\xcd\x5c\x38\x9b\xf7\x1c\x78\
\x9f\x7b\xef\xb9\xe7\x8a\xaa\x52\xca\xe5\x94\xd4\xbd\x0c\x50\x06\
\x28\x03\x00\x02\x0c\x03\xfb\x01\x05\xce\xa9\xea\x0b\x11\xe9\x03\
\x2e\x14\xc1\x2f\x0e\x9c\x50\xd5\xa9\xac\x60\x81\x3b\xc0\x58\x06\
\x60\x3c\xa3\xbf\x04\x92\x45\x00\x88\x00\x87\x80\x1c\x00\xaa\xba\
\x69\x01\xcc\x01\xfd\x6b\xb5\x92\xf7\xc0\x5a\x3a\x03\xf4\xba\xc1\
\xe0\x08\x10\x2e\xd0\x8e\x3d\xc7\xda\x88\xe3\x7a\x8b\x8e\xf5\x62\
\x88\xa4\xc4\x31\xab\x8e\xf5\x62\x8e\xeb\x45\x81\x76\xc9\x7e\x46\
\x22\x72\xb1\x62\x4b\xe5\xe0\xae\xbd\xfb\x9c\xe9\x4f\x63\xdf\x13\
\x89\xf8\x0e\x55\xcd\xab\x0f\x44\x64\xb7\x63\x03\x93\x2d\xdd\x03\
\x06\x91\x75\xb9\x95\xf9\x69\x62\x93\xaf\x67\x6d\xa6\xb0\xde\x58\
\x1b\xe9\xbd\x72\xc3\x3d\xdc\xd1\x45\x67\x5b\x53\x23\x89\xf8\x19\
\x60\x28\x1f\x00\xe0\x57\xda\x4f\x49\xa8\x2e\x4c\x7d\xeb\xc1\x75\
\x89\xa5\xe8\xfb\x54\x3a\x99\xfa\xe1\x00\x18\x63\xae\x37\x36\x35\
\x9b\xce\x93\x3d\x54\x55\xd7\xd0\x73\xf9\x5a\xc0\xda\xc0\xa0\x88\
\xd4\xe6\xe3\xae\xaa\x0b\x8e\x38\x0f\xbe\x3e\x8e\x24\xd4\xf7\x73\
\xfa\xf2\xcc\x38\x3f\xa7\xde\x19\x3f\xf9\xf7\x12\x40\x9b\x88\xa4\
\x6f\x3e\x7f\xab\x13\xcb\xaa\x13\xcb\xaa\x1f\x62\x49\x6d\x6c\x6a\
\x8e\x1b\x63\xee\x17\xa0\x0f\x6a\xc5\xd8\xdf\x2d\xdd\x03\xda\x31\
\x3c\xaf\xc7\x9f\x7c\xd3\xca\x86\xed\x71\xc7\x98\x87\xaa\x0a\x5e\
\x30\xf4\xe6\xc0\x91\x63\xc9\xac\x79\x36\x86\x9e\xbd\x52\x11\xf1\
\x81\x6d\x05\x80\x38\x6f\x83\xa1\xc4\xd1\x7b\x93\xda\x7a\xea\xaa\
\x8a\x31\x7f\x80\x7a\x55\xc5\xda\x80\x6d\xa8\xaa\xa9\xb3\xa3\x8f\
\xee\x6e\x3c\x3e\x8c\x0d\x68\x2a\x99\xa8\x06\x16\xf2\xb9\x0a\xe0\
\x76\xda\x4f\xf5\x7d\xb9\x75\x36\xbc\x32\x1f\x4d\xab\xef\x47\x54\
\x75\x29\x9b\x6c\xf7\x2a\x42\x51\xd7\x0b\xc6\x8c\xb1\xab\x22\x92\
\x72\xbd\x60\xcc\xf5\x82\x8b\xd6\xda\x08\xe0\x15\xe8\x49\x86\x9d\
\x80\x3b\x02\xf4\x02\x26\xa7\x6f\x28\xea\x07\xe6\x36\x73\x3a\x96\
\x7c\x12\xda\xff\x68\x5b\x45\xe4\x74\x11\xbc\x66\x55\x75\x4c\x44\
\x02\x40\x37\xe0\x02\x33\xb9\x49\x08\x20\x22\x7b\x80\xa7\x80\x57\
\x04\x80\xcf\xaa\xda\x25\x22\x3b\x81\xd1\x0c\xc0\xc7\x75\x00\xa5\
\x58\x25\xef\x81\x32\x40\x19\xa0\x0c\xf0\x0f\x32\x55\x59\x98\x3d\
\xa9\x0f\x07\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x03\x8c\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x00\xdd\x00\x00\x00\xdd\
\x01\x70\x53\xa2\x07\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x03\x09\x49\x44\
\x41\x54\x58\x85\xbd\x97\x3d\x68\x53\x51\x14\xc7\x7f\x27\x5a\xa3\
\x92\x48\x8b\x44\x9a\xa4\x4a\xd4\xfa\xb1\x68\xac\x14\x2a\x54\xb4\
\xd0\x41\x14\x15\x9d\x44\x71\x72\x12\xc9\x22\x2e\x56\x10\x82\x20\
\xae\x4e\x0e\x82\x0e\x0a\xce\x51\xeb\xe0\x20\xb6\xd2\xd6\x52\xc5\
\x2f\x04\x69\xea\x90\xfa\xd1\x0a\x8d\xd6\xa6\xd6\x9a\x6a\x7a\x1c\
\xde\x2d\x3e\x93\xd7\xf6\xa5\x69\x72\xe0\x2d\xf7\xdc\xff\xf9\xff\
\xee\xe5\x7e\x3d\x51\x55\xdc\x84\x88\x74\x00\x49\xe0\x82\xaa\xa6\
\x5d\x89\x5c\x84\xa7\x88\xbe\x91\x95\xe1\xfd\x4d\x40\xbf\x88\x9c\
\x11\x91\x25\x8b\x01\x20\x45\xcc\x40\x2a\x72\xe2\x47\x56\x73\x93\
\x93\x5f\x1e\x1d\x58\x99\x4d\x3f\x9b\x00\x62\xaa\xda\x5d\x0a\x40\
\x31\x33\xc0\xf7\x40\x6a\x68\x7a\x95\xbf\x2a\x74\xa0\x6f\x43\x70\
\x5f\x67\xc6\xb3\xac\xa6\x5d\x44\x6e\x89\x48\x6d\x45\x00\xa6\xbd\
\xde\x50\x66\x4d\x7a\xeb\xd8\x9a\x91\x6e\x6f\x70\xf7\xf6\x75\xc7\
\xd2\xb9\x9a\xe8\xa5\x08\xc8\x3b\x11\x39\x5b\x36\x00\x11\x09\x02\
\xbe\x19\x4d\xce\x3b\xb5\x67\x34\x3c\x9c\xcb\x56\x4f\xbc\xab\x8e\
\x5e\x6c\xf2\x6f\x3a\x95\x04\x2e\x8a\x48\x8d\xe9\xbf\xc5\x68\x4a\
\x07\x10\x91\x93\xc0\x2b\xc0\x9f\x97\x5a\xfd\xd3\xf7\x2d\xf2\xe9\
\x71\x73\x72\x7c\xe0\xc6\x1f\xa0\x41\x55\x47\x4d\xae\x19\x78\x63\
\xb4\x0b\x03\x10\x91\x80\x88\x24\x80\x38\x70\x08\x18\xb6\xe7\xa7\
\x47\x07\x7b\x33\x57\xa3\xbe\xdf\x1f\x7b\x1e\x00\x7b\x55\x75\x70\
\x26\xa7\xaa\x37\x81\xa3\x40\x5c\x44\x12\x22\x12\x28\x0a\x40\x44\
\x96\x03\xed\xc0\x38\xb0\x43\x55\xfb\x6c\xe9\x5f\xd9\xde\xeb\x4f\
\xc6\xaf\x35\x6f\xd4\xc9\xb1\xe3\xaa\x7a\x5e\x55\xff\xe4\xd7\x50\
\xd5\x2e\x20\x0a\x8c\x02\xed\xa6\x66\x61\xa8\x6a\xc1\x07\xdc\x01\
\x12\x98\x6d\x6a\xda\x52\xfe\xd3\x9d\x3d\x4b\xc3\x0d\xfd\x40\x07\
\x10\x72\xd2\x3a\xd4\xf2\x00\x77\x81\x3b\x8e\x79\x07\x41\x1b\xf0\
\x16\xf0\xe7\xb5\xa7\x10\xcf\x14\x70\x09\x58\xe2\xc6\xdc\xa6\xf5\
\x61\xad\xa3\xb6\x39\x01\x80\x00\xf0\x1d\xa8\x77\x28\x72\x19\x68\
\x2d\xc6\x38\x4f\xbf\x1e\x18\x03\x02\x73\x01\xc4\x81\xdb\x0b\x35\
\x71\x01\x71\x1b\x88\x3b\x02\x00\x2b\x80\x11\x60\x67\x19\x01\x1a\
\x8c\xc7\x8a\x99\x36\xfb\x2e\x38\x08\x0c\xa8\xea\x0b\xc7\xd5\xba\
\x08\xa1\xaa\x2f\x81\x01\xe3\x05\xfc\xbf\x0d\xb7\x02\x7d\xf9\xa2\
\x32\xc4\x0b\xe3\x55\x00\xb0\x19\x8b\xae\xdc\x91\x34\x5e\x05\x00\
\x9b\x80\xf7\x15\x00\xe8\x37\x5e\x05\x00\x7e\xac\x93\xaf\xdc\x31\
\x8e\xed\x5e\xb1\x03\x0c\x03\x75\x15\x00\xa8\xc3\x76\xaf\xe4\x03\
\xac\xad\x00\xc0\xda\xd9\x00\x9e\x02\xad\x15\x00\x68\x35\x5e\x56\
\xd8\x0e\x89\x30\x30\x49\xde\x1d\xb0\xc8\x07\x91\xdf\x78\x84\x0b\
\x0e\x22\x55\xfd\x0c\x3c\x07\x62\x65\x1c\x7d\x0c\x78\x6e\xbc\xfe\
\x9f\x01\x43\xd8\x02\x64\x81\x6d\x65\x18\xfd\x36\x53\xbb\x65\xd6\
\xcb\xc8\x74\xbc\x8f\x75\x5a\x55\x2d\xa2\x79\x95\xa9\x79\xbf\x20\
\xe7\xd0\xb9\x16\xf8\x44\xde\xad\x55\x22\x40\xdc\xd4\xac\x9d\x17\
\xc0\x08\x1a\x81\x0c\x70\x0e\xf0\x94\x60\xec\x31\x35\x32\x40\xa3\
\x63\x9f\x39\xc4\xbb\xb0\xf6\x6b\x17\x0e\x0f\x14\x17\xe6\xf5\x46\
\x3b\x0c\xec\x9a\xb5\xdf\x3c\x45\xc2\x58\xef\xbf\x09\xb3\x82\xe7\
\x9d\x0d\x33\xea\x98\xd1\x74\x60\xdb\x72\x4e\x9f\xab\x7f\x43\x11\
\x39\x0c\x5c\x01\x56\x03\xf7\x80\x87\xc0\x07\xfe\x9d\x68\x41\x60\
\x1d\xb0\x0f\x38\x0c\x7c\xc5\x7a\xff\xdd\x9b\xb7\xb6\x1b\x00\x1b\
\x48\x14\x38\x02\xec\xc5\x9a\x9d\x90\x49\x0d\x01\x9f\x81\x4e\x20\
\xa1\xaa\xaf\xdd\xd6\xfc\x0b\x02\x4a\x54\xeb\x8e\x31\x94\xa7\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x04\x55\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x03\x00\x00\x00\x44\xa4\x8a\xc6\
\x00\x00\x00\x03\x73\x42\x49\x54\x08\x08\x08\xdb\xe1\x4f\xe0\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x00\xdd\x00\x00\x00\xdd\x01\
\x70\x53\xa2\x07\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\
\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x01\xef\x50\x4c\x54\
\x45\xff\xff\xff\x00\xff\xff\x00\xff\xff\x40\xbf\xbf\x33\x99\xcc\
\x2b\xaa\xaa\x55\xd5\xd5\x40\x9f\xbf\x4d\xb3\xcc\x37\xa4\xb6\x49\
\x92\xa4\x49\xc8\xc8\x30\xaf\xbf\x50\xbf\xcf\x39\xaa\xb8\x47\xc6\
\xd5\x40\xbf\xbf\x4e\xc8\xd3\x3b\xa7\xba\x42\xc5\xce\x3a\xa8\xbd\
\x37\xac\xc1\x38\xa8\xc1\x46\xc3\xcd\x42\xc2\xd0\x38\xa7\xbe\x36\
\xaa\xc0\x37\xaa\xbf\x36\xaa\xbe\x36\xa8\xbf\x45\xc6\xd0\x38\x8b\
\xaf\x38\xa8\xbf\x37\xaa\xc0\x38\xaa\xbf\x39\xaa\xbf\x47\xc6\xcf\
\x38\xaa\xbe\x37\xa9\xbf\x47\xc7\xcf\x37\xa9\xc0\x35\x85\xab\x37\
\xa9\xbe\x46\xc8\xd1\x38\xa9\xc0\x37\xa9\xbf\x47\xc7\xd0\x3e\xa5\
\xbc\x47\xc7\xcf\x37\xa9\xbf\x36\xa9\xbf\x37\xa9\xc0\x49\xc9\xd1\
\x48\xca\xd1\x37\xa9\xbf\x37\xaa\xc0\x37\xa9\xbf\x36\x81\xa9\x36\
\xa9\xbf\x48\xca\xd3\x37\xa9\xbf\x37\xa9\xc0\x35\x9a\xb5\x4a\xcb\
\xd3\x4a\xcd\xd3\x37\xaa\xc0\x37\xa9\xc0\x4b\xcf\xd4\x4b\xd0\xd5\
\x37\xa9\xbf\x37\xa9\xbf\x37\xa9\xc0\x4c\xd1\xd6\x4d\xd2\xd6\x32\
\x74\xa3\x37\xa9\xbf\x4d\xd3\xd7\x4d\xd4\xd7\x37\xa9\xbf\x4d\xd3\
\xd6\x4d\xd4\xd7\x37\xa9\xbf\x4e\xd5\xd8\x37\xa9\xbf\x30\x70\xa1\
\x37\xa9\xc0\x4f\xd6\xd8\x37\xa9\xbf\x4f\xd7\xd8\x2f\x6c\x9f\x50\
\xd9\xda\x37\xa9\xbf\x50\xdb\xdb\x2c\x44\x7f\x2c\x46\x80\x2c\x47\
\x81\x2d\x4b\x83\x2d\x4f\x86\x2d\x69\xa0\x2d\x6a\xa0\x2d\x6a\xa1\
\x2d\x6b\xa2\x2d\x6c\xa3\x2e\x53\x89\x2e\x59\x8c\x2e\x59\x8d\x2e\
\x69\x9f\x2e\x69\xa0\x2e\x6a\x9f\x2f\x5a\x8e\x2f\x5b\x8e\x2f\x5e\
\x90\x2f\x5f\x90\x2f\x61\x91\x2f\x64\x93\x2f\x6d\xa0\x30\x66\x95\
\x30\x67\x95\x30\x6a\x97\x30\x6d\x99\x30\x6e\xa1\x31\x6f\x9a\x31\
\x70\x9b\x31\x71\xa2\x31\x74\x9d\x31\x76\x9f\x32\x77\x9f\x32\x7e\
\xa4\x32\x7f\xa4\x33\x77\xa4\x33\x86\xa9\x34\x8d\xad\x35\x94\xb2\
\x36\x9e\xb8\x36\xa2\xbb\x36\xa3\xbb\x37\xa5\xbc\x37\xa5\xbd\x37\
\xa6\xbd\x37\xa7\xbd\x37\xa7\xbe\x37\xa8\xbe\x37\xa9\xbf\x38\xa2\
\xbc\x39\x88\xad\x39\xaa\xc0\x39\xac\xc1\x3d\x94\xb3\x3e\x96\xb5\
\x41\xa1\xba\x44\xac\xc1\x46\xb4\xc5\x47\xb9\xc8\x48\xb9\xc8\x49\
\xbe\xcb\x4b\xc2\xcc\x4b\xc3\xcd\x4b\xc7\xcf\x4d\xcc\xd3\x50\xd4\
\xd7\x50\xd6\xd9\x51\xdc\xdb\x52\xdc\xdb\x52\xdc\xdc\x52\xdd\xdc\
\x9e\x4a\x19\x15\x00\x00\x00\x5d\x74\x52\x4e\x53\x00\x01\x02\x04\
\x05\x06\x06\x08\x0a\x0e\x0e\x0e\x10\x10\x12\x12\x14\x17\x1a\x1f\
\x23\x25\x29\x33\x36\x37\x39\x3c\x4b\x4c\x51\x56\x5b\x5d\x60\x6c\
\x70\x72\x74\x76\x79\x86\x86\x86\x89\x8b\x8d\x90\x96\x98\x9b\x9d\
\xa1\xa7\xab\xad\xaf\xb2\xb2\xb8\xba\xc1\xc2\xc5\xcc\xd1\xd5\xdd\
\xe3\xe4\xe7\xe9\xea\xef\xf0\xf0\xf0\xf2\xf3\xf4\xf4\xf6\xf6\xf7\
\xf9\xf9\xf9\xfa\xfa\xfd\xfd\xfe\xfe\xbe\xae\x42\x56\x00\x00\x01\
\x6f\x49\x44\x41\x54\x38\x4f\xcd\xd0\xe5\x57\xc2\x50\x1c\xc6\xf1\
\xd9\xdd\x1d\x18\xd8\xdd\xdd\xd8\x81\xdd\x78\x0d\x84\x19\xd8\xdd\
\x8a\x81\xfd\x53\xb1\xbb\xee\x1f\xea\x82\xc1\x86\xe2\x4b\x8f\xcf\
\xab\xed\x7e\xbe\xe7\xec\x9e\x11\xc4\x3f\x9f\x99\xa8\x30\xe1\x17\
\xb6\x14\x4b\x36\xb7\x4a\x4c\xb2\x5d\x64\xf9\xde\xd4\x06\xa4\x98\
\x60\xf7\xf8\x5a\xd8\xee\x5f\x03\x08\xfd\x91\xbd\xd3\xda\x40\x33\
\x8f\x56\x00\xc0\xf3\x07\x0e\xc8\x95\x01\xec\x0c\xa0\x45\xca\x2b\
\xbe\xa9\x79\x9c\x54\x0b\x70\xb8\x8c\xd0\x02\xe5\x90\x65\xc4\x0e\
\xe9\xed\x18\x6b\x61\x77\x08\xa1\xb9\x53\x3a\x88\x10\xb0\x47\x5e\
\x37\xa6\x76\xb1\xde\x8b\xd0\xcc\x09\xed\xe0\xc7\x63\xdf\xe2\x1e\
\x9a\xf1\xdd\x38\x42\x68\xfa\x98\xf1\x26\x4b\x3d\x07\x95\xbe\x33\
\xfc\x79\xae\xec\x43\x68\xf2\x88\x71\x28\xe0\x38\xbc\x0a\xb3\x7b\
\x9a\x25\xc9\x3e\x34\x71\xc0\x3a\xc4\x32\x6a\x91\xd8\xa8\x63\x7c\
\xa9\x20\xa9\x60\x78\x5f\xe7\x10\x48\xb1\x4b\x46\x27\xc7\x2f\xab\
\x24\xbd\x11\x0d\xe7\x32\x67\x2a\x90\x72\x8c\xaf\xe5\x8c\x8f\x9e\
\x71\x0e\x12\xfa\x03\xc9\x75\xec\xed\x5e\xd5\x24\xeb\xcf\x5a\x7d\
\x90\xca\xde\xd0\x35\xaa\x48\xfa\x71\xa3\x62\x5d\xf5\x88\x0d\x81\
\xd8\xf0\x13\xdc\xc6\x58\x97\x3f\x60\x5e\xe0\x65\x08\x88\x1c\xc6\
\x07\xef\x31\x2f\xa8\x24\xf8\xeb\xa2\x5c\x79\x8b\xf9\x41\xb6\x20\
\x88\xa1\x82\x25\x8c\x9b\x1b\x0c\x41\xa4\x20\x20\x5a\x49\xf2\x0a\
\x57\xbb\x11\x3e\x61\xf9\x1d\x6c\xe0\x2f\x0c\x82\x95\x8a\xb7\x2a\
\x27\xe6\xd1\x31\x24\xb3\x1e\xa0\xc5\x4a\x18\x10\x35\xea\x32\x5b\
\xfd\x8b\x8d\x28\x35\xda\xc8\x09\xfb\x24\x6b\xe3\xa3\xbf\xd9\x17\
\x0b\x98\xc8\xda\x8e\x31\xc4\x13\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
\x00\x00\x02\xf5\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x03\x00\x00\x00\x44\xa4\x8a\xc6\
\x00\x00\x00\x03\x73\x42\x49\x54\x08\x08\x08\xdb\xe1\x4f\xe0\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x00\xe1\x00\x00\x00\xe1\x01\
\x70\x18\x1c\x2e\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\
\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x01\x2f\x50\x4c\x54\
\x45\xff\xff\xff\x00\xff\xff\xff\xff\xff\xbf\xbf\xff\xcc\xcc\xff\
\x49\xb6\xff\xbf\xdf\xff\x55\xaa\xea\xbf\xdf\xef\xb7\xd7\xef\x57\
\xae\xec\x55\xaa\xed\xb6\xdb\xf3\x55\xaa\xef\xb9\xd7\xf0\xb6\xdb\
\xf1\xb7\xdb\xf2\x54\xab\xef\x56\xad\xef\xb8\xd8\xf2\xb7\xda\xf3\
\x71\xb7\xee\xb7\xd9\xf3\x55\xad\xee\x55\xad\xef\xb7\xda\xf2\x55\
\xac\xee\xb7\xd9\xf3\x54\xad\xed\x55\xac\xee\xb9\xda\xf3\xb7\xd9\
\xf1\x54\xac\xed\x56\xad\xee\x55\xab\xef\x55\xac\xee\x55\xad\xee\
\x6c\xb7\xee\x55\xac\xee\xb8\xd9\xf2\x55\xac\xee\xb9\xd9\xf2\xb8\
\xd9\xf2\xb8\xd9\xf2\x55\xac\xee\xb8\xd9\xf2\x55\xac\xee\xb9\xd9\
\xf2\x55\xac\xee\x55\xac\xee\x56\xac\xee\x59\xae\xee\x5c\xb0\xef\
\x5f\xb1\xef\x60\xb1\xef\x60\xb2\xef\x62\xb2\xef\x64\xb3\xef\x68\
\xb5\xf0\x6e\xb8\xf1\x6f\xb9\xf1\x70\xb9\xf1\x74\xbb\xf1\x7a\xbd\
\xef\x7c\xbf\xf2\x84\xc3\xf3\x86\xc2\xf0\x87\xc4\xf3\x8d\xc7\xf4\
\x90\xc9\xf4\x98\xcd\xf5\x9a\xce\xf5\x9c\xcf\xf5\x9f\xce\xf1\xa2\
\xcf\xf1\xa3\xd2\xf6\xa6\xd3\xf6\xa6\xd4\xf6\xac\xd7\xf7\xad\xd4\
\xf2\xb1\xd9\xf7\xb7\xd9\xf2\xb8\xd9\xf2\xbf\xe0\xf9\xc7\xe4\xf9\
\xc8\xe4\xf9\xce\xe7\xfa\xd2\xe9\xfb\xda\xed\xfb\xdf\xef\xfc\xe0\
\xf0\xfc\xf0\xf8\xfe\xf3\xf9\xfe\xf5\xfa\xfe\xf6\xfa\xfe\xf9\xfc\
\xfe\xfa\xfd\xff\xfc\xfd\xff\xfd\xfe\xff\xfe\xfe\xff\xff\xff\xff\
\x49\x83\xe7\x78\x00\x00\x00\x31\x74\x52\x4e\x53\x00\x01\x01\x04\
\x05\x07\x08\x0c\x10\x20\x29\x2a\x2a\x30\x33\x38\x39\x40\x41\x4f\
\x52\x58\x58\x5a\x60\x60\x69\x6a\x73\x7b\x7b\x80\xa0\xa1\xba\xbf\
\xc0\xd0\xd4\xd6\xd8\xe4\xe5\xe9\xea\xea\xf4\xf6\xf8\xcb\xdd\xc3\
\x9b\x00\x00\x00\xfb\x49\x44\x41\x54\x38\xcb\x63\x60\xa0\x1c\xf0\
\x48\x22\x01\x11\x36\x0c\x79\xb9\x20\x14\xa0\xcd\x81\xae\x40\x17\
\x55\x41\x90\x34\xba\x02\x34\xf9\x20\x79\xf2\x15\xf8\x3b\xd9\x83\
\x80\xaa\x02\x32\x90\x62\x87\x2b\x08\xb4\x30\xc4\x06\x14\xe1\x0a\
\xbc\xb0\xca\x1b\x1a\xc0\x15\x78\x62\x57\x60\x48\xa6\x02\x63\x47\
\x33\x43\x13\xa8\x02\x75\x54\x05\xae\xde\x46\x86\x86\x56\x31\x29\
\xf1\x11\x2e\x50\x05\xc2\x5a\x28\x0a\xdc\x53\xc2\x9c\x4d\xfd\x52\
\x52\x52\x62\x6d\xa0\x0a\x18\x98\xf9\xe4\xf4\x83\xc4\x38\x45\x21\
\x7c\x87\xe8\x94\x94\xa4\x38\xa0\x82\x70\x98\x1b\x40\x40\x40\x93\
\x9f\x41\x10\xc2\xb7\x4b\x4e\x81\x80\x00\x64\x05\x0c\x2c\x4c\x30\
\x05\x86\x6e\x89\x60\xf9\x04\x5b\x14\x05\x40\x00\x53\x60\x1b\x09\
\x56\xe0\x63\x88\x43\x81\xb9\x6b\x68\x42\x4a\x4a\xbc\x87\x21\x2e\
\x05\x96\xae\xc1\x51\x21\xbe\xd6\x86\x38\x15\x60\x86\x24\xf5\x14\
\x70\x63\x97\xd7\x41\x24\x2d\x25\xac\x0a\x64\x11\x0a\x18\x85\x24\
\x24\x24\x34\x80\x62\x4a\x32\x12\x70\xc0\x8d\x9e\x42\x55\x0c\xf5\
\xc4\xf1\x66\x33\x65\x35\x2e\xfc\xf9\x90\x97\x95\x81\xea\x00\x00\
\xb5\x6f\x8e\x6a\xb3\x3b\x5b\xe3\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x0d\
\x06\x92\xe1\x27\
\x00\x72\
\x00\x65\x00\x74\x00\x77\x00\x65\x00\x65\x00\x74\x00\x2d\x00\x32\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0c\
\x0b\x91\x06\x07\
\x00\x75\
\x00\x70\x00\x64\x00\x61\x00\x74\x00\x65\x00\x2d\x00\x31\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0a\
\x07\x0f\x6f\xc7\
\x00\x73\
\x00\x65\x00\x6e\x00\x64\x00\x2d\x00\x31\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0b\
\x07\xf2\xe0\xc7\
\x00\x74\
\x00\x77\x00\x65\x00\x65\x00\x74\x00\x2d\x00\x32\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x3e\x00\x00\x00\x00\x00\x01\x00\x00\x06\x64\
\x00\x00\x00\x58\x00\x00\x00\x00\x00\x01\x00\x00\x0a\xbd\
\x00\x00\x00\x20\x00\x00\x00\x00\x00\x01\x00\x00\x02\xd4\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| mokachokokarbon/Tsuki | src/resources/resources.py | Python | mit | 15,812 |
# -*- coding: utf-8 -*-
__author__ = "Sergey Karakovskiy, sergey at idsia fullstop ch"
__date__ = "$May 1, 2009 2:46:34 AM$"
from marioagent import MarioAgent
class ForwardAgent(MarioAgent):
""" In fact the Python twin of the
corresponding Java ForwardAgent.
"""
action = None
actionStr = None
KEY_JUMP = 3
KEY_SPEED = 4
levelScene = None
mayMarioJump = None
isMarioOnGround = None
marioFloats = None
enemiesFloats = None
isEpisodeOver = False
marioState = None
trueJumpCounter = 0;
trueSpeedCounter = 0;
"""default values for observation details"""
receptiveFieldWidth = 19
receptiveFieldHeight = 19
marioEgoRow = 9
marioEgoCol = 9
agentName = "AmiCo Python Forward Agent"
def reset(self):
self.isEpisodeOver = False
self.trueJumpCounter = 0;
self.trueSpeedCounter = 0;
def __init__(self):
"""Constructor"""
self.trueJumpCounter = 0
self.trueSpeedCounter = 0
self.action = [0, 0, 0, 0, 0, 0]
self.action[1] = 1
self.actionStr = ""
self.agentName = "Python Forward Agent"
def getReceptiveFieldCellValue(self, x, y):
if (x < 0 or x >= self.marioEgoRow or y < 0 or y >= self.marioEgoCol):
return 0
return self.levelScene[x][y]
def setObservationDetails(self, rfWidth, rfHeight, egoRow, egoCol):
self.receptiveFieldWidth = rfWidth
self.receptiveFieldHeight = rfHeight
self.marioEgoRow = egoRow;
self.marioEgoCol = egoCol;
def _dangerOfGap(self):
fromX = self.marioEgoRow
fromY = self.marioEgoCol
self.marioEgoRow = fromX + 1
self.marioEgoCol = fromY + 1
if (fromX > 3):
fromX -= 2;
for x in range(fromX, self.receptiveFieldWidth):
f = True
for y in range(fromY, self.receptiveFieldHeight):
if (self.getReceptiveFieldCellValue(y, x) != 0):
f = False
if (f or self.getReceptiveFieldCellValue(self.marioEgoRow + 1, self.marioEgoCol) == 0 or \
(self.marioState > 0 and \
(self.getReceptiveFieldCellValue(self.marioEgoRow + 1, self.marioEgoCol - 1) != 0 or \
self.getReceptiveFieldCellValue(self.marioEgoRow + 1, self.marioEgoCol) != 0))):
return True
return False
def giveIntermediateReward(self, reward):
pass
def _a2(self):
""" Interesting, sometimes very useful behaviour which might prevent falling down into a gap!
Just substitue getAction by this method and see how it behaves.
"""
# if (self.mayMarioJump):
# print "m: %d, %s, %s, 12: %d, 13: %d, j: %d" \
# % (self.getReceptiveFieldCellValue(self.marioEgoRow + 2, self.marioEgoCol + 2), self.mayMarioJump, self.isMarioOnGround, \
# self.getReceptiveFieldCellValue(self.marioEgoRow + 2, self.marioEgoCol + 3), self.getReceptiveFieldCellValue(self.marioEgoRow + 2, self.marioEgoCol + 3), self.trueJumpCounter)
# else:
# if self.levelScene == None:
# print "Bad news....."
# print "m: %d, 12: %d, 13: %d, j: %d" \
# % (self.getReceptiveFieldCellValue(self.marioEgoRow + 2, self.marioEgoCol + 2), \
# self.getReceptiveFieldCellValue(self.marioEgoRow + 2, self.marioEgoCol + 3), self.getReceptiveFieldCellValue(self.marioEgoRow + 2, self.marioEgoCol + 3), self.trueJumpCounter)
a = [0, 0, 0, 0, 0, 0]
a[1] = 1
danger = self._dangerOfGap()
if (self.getReceptiveFieldCellValue(self.marioEgoRow + 2, self.marioEgoCol + 3) != 0 or \
self.getReceptiveFieldCellValue(self.marioEgoRow + 2, self.marioEgoCol + 4) != 0 or danger):
if (self.mayMarioJump or \
(not self.isMarioOnGround and a[self.KEY_JUMP] == 1)):
a[self.KEY_JUMP] = 1
self.trueJumpCounter += 1
else:
a[self.KEY_JUMP] = 0;
self.trueJumpCounter = 0
if (self.trueJumpCounter > 16):
self.trueJumpCounter = 0
self.action[self.KEY_JUMP] = 0;
a[self.KEY_SPEED] = danger
actionStr = ""
for i in range(6):
if a[i] == 1:
actionStr += '1'
elif a[i] == 0:
actionStr += '0'
else:
print "something very dangerous happen...."
actionStr += "\r\n"
#print "action: " , actionStr
return actionStr
def getAction(self):
""" Possible analysis of current observation and sending an action back
"""
#print "M: mayJump: %s, onGround: %s, level[11,12]: %d, level[11,13]: %d, jc: %d" % (self.mayMarioJump, self.isMarioOnGround, self.levelScene[11,12], self.levelScene[11,13], self.trueJumpCounter)
if (self.isEpisodeOver):
return (1, 1, 1, 1, 1, 1)
#print "LevelScene: \n"
#print self.levelScene
danger = self._dangerOfGap()
#print "entered getAction1"
if (self.getReceptiveFieldCellValue(self.marioEgoRow, self.marioEgoCol + 2) != 0 or \
self.getReceptiveFieldCellValue(self.marioEgoRow, self.marioEgoCol + 1) != 0 or danger):
#print "entered getAction2"
if (self.mayMarioJump or \
(not self.isMarioOnGround and self.action[self.KEY_JUMP] == 1)):
#print "entered getAction3"
self.action[self.KEY_JUMP] = 1
#print "entered getAction4"
self.trueJumpCounter += 1
else:
# print "entered getAction5"
self.action[self.KEY_JUMP] = 0;
self.trueJumpCounter = 0
# print "entered getAction6"
if (self.trueJumpCounter > 16):
self.trueJumpCounter = 0
self.action[self.KEY_JUMP] = 0;
self.action[self.KEY_SPEED] = danger
return tuple(self.action)
def getName(self):
return self.agentName
def integrateObservation(self, squashedObservation, squashedEnemies, marioPos, enemiesPos, marioState):
"""This method stores the observation inside the agent"""
#print "Py: got observation::: squashedObservation: \n", squashedObservation
#print "Py: got observation::: squashedEnemies: \n", squashedEnemies
#print "Py: got observation::: marioPos: \n", marioPos
#print "Py: got observation::: enemiesPos: \n", enemiesPos
#print "Py: got observation::: marioState: \n", marioState
row = self.receptiveFieldHeight
col = self.receptiveFieldWidth
levelScene=[]
enemiesObservation=[]
for i in range(row):
levelScene.append(squashedObservation[i*col:i*col+col])
for i in range(row):
enemiesObservation.append(squashedEnemies[i*col:i*col+col])
self.marioFloats = marioPos
self.enemiesFloats = enemiesPos
self.mayMarioJump = marioState[3]
self.isMarioOnGround = marioState[2]
self.marioState = marioState[1]
self.levelScene = levelScene
#self.printLevelScene()
def printLevelScene(self):
ret = ""
for x in range(self.receptiveFieldWidth):
tmpData = ""
for y in range(self.receptiveFieldHeight):
tmpData += self.mapElToStr(self.getReceptiveFieldCellValue(x, y));
ret += "\n%s" % tmpData;
print ret
def mapElToStr(self, el):
"""maps element of levelScene to str representation"""
s = "";
if (el == 0):
s = "##"
s += "#MM#" if (el == 95) else str(el)
while (len(s) < 4):
s += "#";
return s + " "
def printObs(self):
"""for debug"""
print repr(self.observation)
| hunse/mario-ai | src/amico/python/agents/forwardagent.py | Python | bsd-3-clause | 7,913 |
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <[email protected]>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
import logging
from openfisca_core import formulas, reforms
from openfisca_france.model.prelevements_obligatoires.impot_revenu import ir
log = logging.getLogger(__name__)
class iaidrdi(formulas.SimpleFormulaColumn):
reference = ir.iaidrdi
label = u"iaidrdi"
def function(self, simulation, period):
'''
Impôt après imputation des réductions d'impôt
'''
period = period.start.offset('first-of', 'month').period('year')
ip_net = simulation.calculate('ip_net', period)
# reductions = simulation.calculate('reductions', period)
# return period, ip_net - reductions
return period, ip_net
def build_reform(tax_benefit_system):
# reference_legislation_json = tax_benefit_system.legislation_json
# reform_legislation_json = copy.deepcopy(reference_legislation_json)
# reform_legislation_json['children'].update(reform_legislation_subtree)
Reform = reforms.make_reform(
# legislation_json = reform_legislation_json,
name = u'Pas de réduction d impôt ',
new_formulas = [iaidrdi],
reference = tax_benefit_system,
)
return Reform()
| MalkIPP/ipp_work | ipp_work/reforms/ir_reduc.py | Python | agpl-3.0 | 2,110 |
#!/usr/bin/python
# -*- coding:utf-8 -*-
from jpype import *
import logging
import time
import settings_MQ as settings
logger = logging.getLogger("MQPullConsumer")
__all__ = ["MQPushConsumer"]
DefaultMQPushConsumer= JPackage('com.alibaba.rocketmq.client.consumer').DefaultMQPushConsumer
MQClientException = JPackage('com.alibaba.rocketmq.client.exception').MQClientException
#MessageExt = JPackage('com.alibaba.rocketmq.common.message').MessageExt
#ConsumeConsurrentlyContext = JPackage('com.alibaba.rocketmq.client.consumer.listener').ConsumeConsurrentlyContext
#ConsumeConsurrentlyStatus = JPackage('com.alibaba.rocketmq.client.consumer.listener').ConsumeConsurrentlyStatus
#MessageListenerConcurrently = JPackage('com.alibaba.rocketmq.client.consumer.listener').MessageListenerConcurrently
#from MQMessage import ConsumeConcurrentlyStatus, ConsumeOrderlyStatus
class MQPushConsumer(object):
'''实现类
public class DefaultMQPushConsumer extends ClientConfig implements MQPushConsumer {
'''
def __init__(self, groupName, namesrvAddr):
"""
:param groupName:
:param namesrvAddr:
:return:
"""
self.consumer = None #初始化放在了init函数中
self.groupName = groupName
self.namesrvAddr = namesrvAddr
self.instanceName = str(int(time.time()*1000)) #毫秒值作为instance name
def init(self):
"""批量设置一些基本项(为了尽可能少实现这些API接口,如以后有需要,可以逐个移出init)"""
logger.info('Initializing consumer ' + self.instanceName + ' ...')
self.consumer = DefaultMQPushConsumer(JString(self.groupName)) #创建实例
self.consumer.setNamesrvAddr(JString(self.namesrvAddr))
self.consumer.setInstanceName(JString(self.instanceName))
def start(self):
"""
# JAVA prototype
# public void start() throws MQClientException {
"""
logger.info('Starting consumer ' + self.instanceName + ' ...')
self.consumer.start()
def shutdown(self):
"""
# JAVA prototype
# public void shutdown() {
"""
logger.info('Shutting down consumer ' + self.instanceName + ' ...')
self.consumer.shutdown()
def setMessageModel(self, messageModel):
"""
# JAVA prototype
# public void setMessageModel(MessageModel messageModel)
"""
logger.info('Setting message model of instance ' + self.instanceName + ' to ' + str(messageModel))
#self.consumer.setMessageModel(JObject(messageModel, "com.alibaba.rocketmq.common.protocol.heartbeat.MessageModel"))
self.consumer.setMessageModel(messageModel)
def subscribe(self, topic, subExpression):
# JAVA prototype
# public void subscribe(String topic, String subExpression) throws MQClientException {
# public void subscribe(String topic, String fullClassName, String filterClassSource) throws MQClientException {
self.consumer.subscribe(JString(topic), JString(subExpression))
def unsubscribe(self, topic):
# JAVA prototype
# public void unsubscribe(String topic) {
self.consumer.unsubscribe(JString(topic))
def setConsumeFromWhere(self, fromwhere):
# JAVA prototype
# public void setConsumeFromWhere(ConsumeFromWhere consumeFromWhere) {
#self.consumer.setConsumeFromWhere(JObject(fromwhere, "com.alibaba.rocketmq.common.consumer.ConsumeFromWhere"))
self.consumer.setConsumeFromWhere(fromwhere)
def registerMessageListener(self, listener):
# JAVA prototype
# public void registerMessageListener(MessageListenerConcurrently messageListener) {
# public void registerMessageListener(MessageListenerOrderly messageListener) {
self.consumer.registerMessageListener(listener)
# 下面内容移除到MQMesasgeListener.py中
#
# class MessageListenerConcurrently:
# '''接口类MessageListenerConcurrently的实现
# public interface MessageListenerConcurrently extends MessageListener {
# '''
# def consumeMessage(self, msgs, context):
# '''
# # JAVA prototype
# # ConsumeConcurrentlyStatus consumeMessage(final List<MessageExt> msgs, final ConsumeConcurrentlyContext context);
# '''
# logger.debug("Into consumerMessage of MessageListenerConcurrently")
# #msg = msgs.get(JInt(0))
# for msg in msgs:
# topic = msg.getTopic()
# tags = msg.getTags()
# body = str(msg.getBody()).decode(settings.MsgBodyEncoding)
#
# logger.debug(msg.toString())
# # In Python 2.x, bytes is just an alias for str. 所以bytes解码时要注意了, msg.body.decode会出错(bytes没有decode方法)!
# #logger.debug("Message body: " + str(msg.getBody()))
# #logger.debug("Message body: " + str(msg.getBody()).decode(settings.MsgBodyEncoding))
# logger.debug("Message body: " + body)
#
# if topic == "RMQTopicTest":
# # 执行TopicTest的消费逻辑
# if tags == "TagA":
# # 执行TagA的消费
# logger.debug("Got message with topic " + topic + " and tags " + tags)
# elif tags == "TagB":
# # 执行TagB的消费
# logger.debug("Got message with topic " + topic + " and tags " + tags)
# elif tags == "TagC":
# # 执行TagC的消费
# logger.debug("Got message with topic " + topic + " and tags " + tags)
# else:
# # 错误的Tag
# logger.error("Got message with topic " + topic + " and UNKNOWN tags " + tags)
# elif topic == "TopicTest1":
# # 执行TopicTest1的消费逻辑
# logger.debug("Got message with topic " + topic + " and tags " + tags)
# else:
# logger.debug("Got message with UNKNOWN topic " + topic )
#
# return ConsumeConcurrentlyStatus['CONSUME_SUCCESS']
#
# #实现
# msgListenerConcurrently = MessageListenerConcurrently()
# #JProxy("MessageListenerConcurrently", inst = msgListenerConcurrently)
# msgListenerConcurrentlyProxy = JProxy("com.alibaba.rocketmq.client.consumer.listener.MessageListenerConcurrently", inst = msgListenerConcurrently)
#
# class MessageListenerOrderly:
# '''接口类MessageListenerOrderly的实现
# public interface MessageListenerOrderly extends MessageListener {
# '''
# def __init__(self):
# #JAVA原子类
# self.consumeTimes = java.util.concurrent.atomic.AtomicLong(0)
#
# def consumeMessage(self, msgs, context):
# # JAVA prototype
# # ConsumeOrderlyStatus consumeMessage(final List<MessageExt> msgs, final ConsumeOrderlyContext context);
# context.setAutoCommit(False)
# logger.debug(java.lang.Thread.currentThread().getName() + " Receive New Messages: " + msgs.toString())
# #TODO: msgs.toString()可能需要改成for msg in msgs: msg.toString()
#
# self.consumeTimes.incrementAndGet()
# consumeTimes = self.consumeTimes.get()
# # print consumeTimes
# # print type(consumeTimes)
#
# if (consumeTimes % 2) == 0:
# logger.debug("consumeTimes % 2 = 0, return SUCCESS")
# return ConsumeOrderlyStatus['SUCCESS']
# elif (consumeTimes % 3) == 0:
# logger.debug("consumeTimes % 3 = 0, return ROLLBACK")
# return ConsumeOrderlyStatus['ROLLBACK']
# elif (consumeTimes % 4) == 0:
# logger.debug("consumeTimes % 4 = 0, return COMMIT")
# return ConsumeOrderlyStatus['COMMIT']
# elif (consumeTimes % 5) == 0:
# logger.debug("consumeTimes % 5 = 0, return SUSPEND_CURRENT_QUEUE_A_MOMENT")
# context.setSuspendCurrentQueueTimeMillis(3000)
# return ConsumeOrderlyStatus['SUSPEND_CURRENT_QUEUE_A_MOMENT']
# else:
# logger.debug("consumeTimes is not times of 2, 3, 4, 5, return SUCCESS")
# return ConsumeOrderlyStatus['SUCCESS']
#
# #实现
# msgListenerOrderly = MessageListenerOrderly()
# #JProxy("MessageListenerOrderly", inst = msgListenerOrderly)
# msgListenerOrderlyProxy = JProxy("com.alibaba.rocketmq.client.consumer.listener.MessageListenerOrderly", inst = msgListenerOrderly)
#
| GangLuICT/RMQ-Client4Python | MQPushConsumer.py | Python | mit | 8,416 |
import re
import os
from setuptools import setup, Extension
from codecs import open
from os import path
version_file = open("tsfm/_version.py", "r").read()
version_match = re.match(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file)
if (version_match):
version = version_match.group(1)
else:
raise RuntimeError("Unable to find version string in _version.py")
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
#bpexact_ext = Extension('bplogofun.exact', ['src/exact.c'])
setup(name = "tsfm",
setup_requires=['cython','pytest-runner'],
tests_require=['pytest'],
install_requires=['cython', 'scipy', 'pandas', 'patsy', 'mpmath','statsmodels', 'numpy'],
python_requires='~=3.5',
packages = ["tsfm"],
package_data={'tsfm': ['eps/Template.eps', 'eps/inverse_template.eps']},
entry_points = {
"console_scripts": ['tsfm = tsfm.tsfm:main']},
version = version,
author="Travis J. Lawrence and David H. Ardell",
author_email="[email protected]",
description = "tRNA structure function mapper",
long_description=long_description,
license='LGPLv3',
url = "https://github.com/tlawrence3/tsfm",
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering :: Bio-Informatics'],
ext_modules=[Extension('tsfm.exact',['src/exact.c'])],)
| tlawrence3/tsfm | setup.py | Python | lgpl-3.0 | 2,152 |
# -*-coding:Utf-8 -*
# Copyright (c) 2014 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import unittest
from test.primaires.connex.static.commande import TestCommande
from test.primaires.joueur.static.joueur import ManipulationJoueur
from test.primaires.scripting.static.scripting import ManipulationScripting
class TestIndice(TestCommande, ManipulationJoueur, ManipulationScripting,
unittest.TestCase):
"""Tests unitaires de la fonction scripting 'indice'."""
def test_indice(self):
"""Test la fonction indice pour une liste."""
joueur = self.creer_joueur("simple", "Kredh")
with self.scripter(joueur.salle, "dit") as test:
test.ajouter_instructions("""
animaux = liste("chien", "canard", "oie", "cheval", "pommier")
nombre1 = indice("cheval", animaux)
nombre2 = indice("pommier", animaux)
dire personnage "${nombre1} ${nombre2}"
""")
msg = self.entrer_commande(joueur, "dire hey oh")
self.assertEqual(msg, "Vous dites : hey oh\n4 5")
self.supprimer_joueur(joueur)
| stormi/tsunami | src/test/primaires/scripting/fonctions/test_indice.py | Python | bsd-3-clause | 2,605 |
# pylint: disable=invalid-name
"""TVM operator for softmax and log_softmax compute."""
from __future__ import absolute_import
import tvm
@tvm.tag_scope(tag='softmax_output')
def softmax(x, axis=-1):
"""Perform softmax activation on the data
Parameters
----------
data : tvm.Tensor
can be any dimension
axis : int
channel axis
Returns
-------
output : tvm.Tensor
output shape is the same as input
"""
shape = x.shape
if axis < 0:
axis = len(shape) + axis
if axis >= len(shape):
ValueError("axis parameter should be less than input dim")
k1 = tvm.reduce_axis((0, shape[axis]), name='k')
k2 = tvm.reduce_axis((0, shape[axis]), name='k')
def insert_reduce_index(indices, reduce_index):
return indices[:axis] + (reduce_index,) + indices[axis:]
def _compute_max(*indices):
eval_range = insert_reduce_index(indices, k1)
return tvm.max(x[eval_range], axis=k1)
def _compute_expsum(max_elem, *indices):
eval_range = insert_reduce_index(indices, k2)
return tvm.sum(tvm.exp(x[eval_range] - max_elem[indices]), axis=k2)
def _normalize(max_elem, expsum, *indices):
non_reduce_indices = tuple([var for (i, var) in enumerate(indices) if i != axis])
return tvm.exp(x[indices] - max_elem[non_reduce_indices]) / expsum[non_reduce_indices]
reduced_shape = tuple([dim for (i, dim) in enumerate(shape) if i != axis])
max_elem = tvm.compute(reduced_shape, _compute_max)
expsum = tvm.compute(reduced_shape, lambda *indices: _compute_expsum(max_elem, *indices))
return tvm.compute(shape, lambda *indices: _normalize(max_elem, expsum, *indices))
@tvm.tag_scope(tag='log_softmax_output')
def log_softmax(x):
"""Perform log softmax activation on the data
Parameters
----------
data : tvm.Tensor
2-D input data
Returns
-------
output : tvm.Tensor
2-D output with same shape
"""
assert len(x.shape) == 2, "only support 2-dim log softmax"
m, n = x.shape
k = tvm.reduce_axis((0, n), name='k')
max_elem = tvm.compute((m, ), lambda i: tvm.max(x[i, k], axis=k))
k = tvm.reduce_axis((0, n), name='k')
expsum = tvm.compute(
(m, ), lambda i: tvm.sum(tvm.exp(x[i, k] - max_elem[i]), axis=k))
return tvm.compute(
x.shape, lambda i, j: x[i, j] - max_elem[i] - tvm.log(expsum[i]))
| phisiart/tvm | topi/python/topi/nn/softmax.py | Python | apache-2.0 | 2,430 |
# Save parameters every a few SGD iterations as fail-safe
SAVE_PARAMS_EVERY = 1000
import glob
import random
import numpy as np
import os.path as op
import pickle as pickle
def load_saved_params():
""" A helper function that loads previously saved parameters and resets iteration start """
st = 0
for f in glob.glob("saved_params_*.npy"):
iter = int(op.splitext(op.basename(f))[0].split("_")[2])
if (iter > st):
st = iter
if st > 0:
with open("saved_params_%d.npy" % st, "rb") as f:
params = pickle.load(f)
state = pickle.load(f)
return st, params, state
else:
return st, None, None
def save_params(iter, params):
with open("saved_params_%d.npy" % iter, "wb") as f:
pickle.dump(params, f)
pickle.dump(random.getstate(), f)
def sgd(f, x0, step, iterations, postprocessing = None, useSaved = False, PRINT_EVERY=10):
""" Stochastic Gradient Descent """
# Implement the stochastic gradient descent method in this
# function.
# Inputs:
# - f: the function to optimize, it should take a single
# argument and yield two outputs, a cost and the gradient
# with respect to the arguments
# - x0: the initial point to start SGD from
# - step: the step size for SGD
# - iterations: total iterations to run SGD for
# - postprocessing: postprocessing function for the parameters
# if necessary. In the case of word2vec we will need to
# normalize the word vectors to have unit length.
# - PRINT_EVERY: specifies every how many iterations to output
# Output:
# - x: the parameter value after SGD finishes
# Anneal learning rate every several iterations
ANNEAL_EVERY = 20000
if useSaved:
start_iter, oldx, state = load_saved_params()
if start_iter > 0:
x0 = oldx;
step *= 0.5 ** (start_iter / ANNEAL_EVERY)
if state:
random.setstate(state)
else:
start_iter = 0
x = x0
if not postprocessing:
postprocessing = lambda x: x
expcost = None
for iter in range(start_iter + 1, iterations + 1):
### Don't forget to apply the postprocessing after every iteration!
### You might want to print the progress every few iterations.
cost = None
### YOUR CODE HERE
cost, grad = f(x)
x -= step * grad
x = postprocessing(x)
### END YOUR CODE
if PRINT_EVERY is not None and iter % PRINT_EVERY == 0:
if not expcost:
expcost = cost
else:
expcost = .95 * expcost + .05 * cost
print("iter %d: %f" % (iter, expcost))
if iter % SAVE_PARAMS_EVERY == 0 and useSaved:
save_params(iter, x)
if iter % ANNEAL_EVERY == 0:
step *= 0.5
return x
def sanity_check():
quad = lambda x: (np.sum(x ** 2), x * 2)
print("Running sanity checks...")
t1 = sgd(quad, 0.5, 0.01, 1000, PRINT_EVERY=None)
print("test 1 result:", t1)
assert abs(t1) <= 1e-6
t2 = sgd(quad, 0.0, 0.01, 1000, PRINT_EVERY=None)
print("test 2 result:", t2)
assert abs(t2) <= 1e-6
t3 = sgd(quad, -1.5, 0.01, 1000, PRINT_EVERY=None)
print("test 3 result:", t3)
assert abs(t3) <= 1e-6
print("")
def your_sanity_checks():
"""
Use this space add any additional sanity checks by running:
python q3_sgd.py
This function will not be called by the autograder, nor will
your additional tests be graded.
"""
print("Running your sanity checks...")
### YOUR CODE HERE
#raise NotImplementedError
### END YOUR CODE
if __name__ == "__main__":
sanity_check();
your_sanity_checks(); | kingtaurus/cs224d | assignment1/q3_sgd.py | Python | mit | 4,190 |
import numpy
#
# creates twoslits.pol
#
corners = numpy.array([-1.0,-1.0,1,1])*1e-4 # x_leftbottom,y_leftbottom,x_rightup,y_roghtup
t = numpy.array([0,11.3e-4])/2 # translation vector (i.e., horiz. and V preiods)
n = numpy.array([0,1]) # number of translation (H,V)
file_name = 'twoslits.pol'
f = open(file_name,'w')
nn = (2*n[0]+1)*(2*n[1]+1)
f.write("%d\n"%(nn-1))
for i in range(-n[0],n[0]+1,1):
for j in range(-n[1],n[1]+1,1):
if not((i == 0) and (j == 0)):
f.write("4\n")
f.write("%f %f\n"%(corners[0]+i*t[0], corners[1]+j*t[1]))
f.write("%f %f\n"%(corners[0]+i*t[0], corners[3]+j*t[1]))
f.write("%f %f\n"%(corners[2]+i*t[0], corners[3]+j*t[1]))
f.write("%f %f\n"%(corners[2]+i*t[0], corners[1]+j*t[1]))
f.close()
print('File written to disk: ',file_name)
| srio/ShadowOui-Tutorial | SCRIPTS/script26_twoslits.py | Python | mit | 844 |
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.utils import (get_error_details, is_string,
split_args_from_name_or_path, type_name, Importer)
from .visitor import SuiteVisitor
class ModelModifier(SuiteVisitor):
def __init__(self, visitors, empty_suite_ok, logger):
self._log_error = logger.error
self._empty_suite_ok = empty_suite_ok
self._visitors = list(self._yield_visitors(visitors))
def visit_suite(self, suite):
for visitor in self._visitors:
try:
suite.visit(visitor)
except:
message, details = get_error_details()
self._log_error("Executing model modifier '%s' failed: %s\n%s"
% (type_name(visitor), message, details))
if not (suite.test_count or self._empty_suite_ok):
raise DataError("Suite '%s' contains no tests after model "
"modifiers." % suite.name)
def _yield_visitors(self, visitors):
importer = Importer('model modifier')
for visitor in visitors:
try:
if not is_string(visitor):
yield visitor
else:
name, args = split_args_from_name_or_path(visitor)
yield importer.import_class_or_module(name, args)
except DataError as err:
self._log_error(unicode(err))
| fingeronthebutton/RIDE | src/robotide/lib/robot/model/modifier.py | Python | apache-2.0 | 2,081 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('adopteitor_core', '0010_ipn'),
]
operations = [
migrations.AlterModelOptions(
name='ipn',
options={'verbose_name_plural': 'IpnS'},
),
]
| smarbos/adopteitor-server | adopteitor_core/migrations/0011_auto_20170221_2157.py | Python | mit | 368 |
# Test Tree:
# 7 <- root
# / 7 \
# 4 7 9
# / 4 \ \
# 2 5 10
import ternary_tree as t_tree
def _insert_nodes(tree):
""" Helper method to create above test tree structure."""
tree.insert(7)
tree.insert(9)
tree.insert(4)
tree.insert(7)
tree.insert(4)
tree.insert(2)
tree.insert(10)
tree.insert(5)
tree.insert(7)
def test_insert():
""" Tests inserting of nodes. Referes the test tree above and checks for the structure. """
tree = t_tree.TernaryTree()
_insert_nodes(tree)
first_left = tree.root.left
first_right = tree.root.right
first_middle = tree.root.middle
assert tree.root.data == 7
assert first_left.data == 4
assert first_right.data == 9
assert first_middle.data == 7
assert first_left.left.data == 2
assert first_left.right.data == 5
assert first_left.middle.data == 4
assert first_right.right.data == 10
assert first_middle.middle.data == 7
def test_search():
""" Tests searching of a node in the tree. If found, it returns node and it's parent."""
tree = t_tree.TernaryTree()
_insert_nodes(tree)
assert tree.search(10)[0].data == 10
assert type(tree.search(10)[0]) == t_tree.Node
assert tree.search(9)[0].data == 9
assert type(tree.search(9)[0]) == t_tree.Node
assert tree.search(4)[0].data == 4
assert type(tree.search(4)[0]) == t_tree.Node
assert tree.search(1)[0] is None
def test_delete_leaf():
""" Tests deletion when node has no children """
tree = t_tree.TernaryTree()
_insert_nodes(tree)
assert tree.search(5)[0] is not None
tree.delete(5)
assert tree.search(5)[0] is None
def test_delete_with_one_child():
""" Tests deletion when a node has one child"""
tree = t_tree.TernaryTree()
_insert_nodes(tree)
assert tree.search(9)[0] is not None
tree.delete(9)
assert tree.search(9)[0] is None
def test_delete_with_multiple_children():
""" Tests deletion of all cases with more than 1 child """
tree = t_tree.TernaryTree()
_insert_nodes(tree)
# When has middle child
assert tree.search(4)[0] is not None
tree.delete(4)
assert tree.search(4)[0] is not None
# when middle child is not present
tree.delete(4)
assert tree.search(4)[0] is None
| vishnun/python-programs | test_ternary_tree.py | Python | mit | 2,164 |
#!/usr/bin/env python
import os
import django
# Base paths
DJANGO_ROOT = os.path.dirname(os.path.realpath(django.__file__))
SITE_ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
# Debugging
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'hacktyler_crime',
'USER': 'hacktyler_crime',
'PASSWORD': 'qw8ndyHprt',
}
}
# Localization
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
USE_I18N = True
USE_L10N = True
# Media
STATIC_ROOT = os.path.join(SITE_ROOT, 'media')
STATIC_URL = '/site_media/'
ADMIN_MEDIA_PREFIX = '/site_media/admin/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
# Uploads
MEDIA_ROOT = '/tmp/sirens'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '+ei7-2)76sh$$dy^5h4zmkglw#ey1d3f0cj^$r+3zo!wq9j+_*'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.media',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'config.urls'
TEMPLATE_DIRS = (
os.path.join(SITE_ROOT, 'templates')
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.humanize',
'django.contrib.staticfiles',
'django.contrib.gis',
'compressor',
'activecalls',
'sirens'
)
# Email
# run "python -m smtpd -n -c DebuggingServer localhost:1025" to see outgoing
# messages dumped to the terminal
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
DEFAULT_FROM_EMAIL = '[email protected]'
# Django-compressor
COMPRESS_ENABLED = False
# Caching
CACHE_MIDDLEWARE_KEY_PREFIX='hacktyler_crime'
CACHE_MIDDLEWARE_SECONDS=90 * 60 # 90 minutes
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console': {
'level':'DEBUG',
'class':'logging.StreamHandler',
'formatter': 'standard'
},
'default': {
'level':'INFO',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/sites/hacktyler_crime/hacktyler_crime.log',
'maxBytes': 1024*1024*5, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'INFO',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/sites/hacktyler_crime/requests.log',
'maxBytes': 1024*1024*5, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
'backend_handler': {
'level':'DEBUG',
'class':'django.utils.log.NullHandler',
},
},
'loggers': {
'': {
'handlers': ['default', 'console'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler', 'console'],
'level': 'DEBUG',
'propagate': False
},
'django.db': {
'handlers': ['backend_handler'],
'level': 'DEBUG',
'propagate': False
},
'requests.packages.urllib3.connectionpool': {
'handlers': ['console'],
'level': 'ERROR',
'propogate': False
},
'geopy': {
'handlers': ['console'],
'level': 'INFO',
'propogate': False
}
}
}
# Pusher
PUSHER_APP_ID = '11732'
PUSHER_KEY = 'd20fddb74c58823cd05d'
PUSHER_SECRET = None # must be in local_settings.py
PUSHER_CHANNEL = 'active-calls-test'
# Mapquest
MAPQUEST_API_KEY = None # must be in local_settings.py
# App
DEFAULT_HOURS_DISPLAYED = 4
# Allow for local (per-user) override
try:
from local_settings import *
except ImportError:
pass
| hacktyler/hacktyler_crime | config/settings.py | Python | mit | 4,568 |
from zzeppelin.util import _subgen
class SoundGenerator( _subgen.SubGenerator ):
pass
# -------------------------------
from zzeppelin.util._subgen import Generate
| ZoyaEngine/zoyazeppelin | src/zzeppelin/util/sound.py | Python | gpl-3.0 | 173 |
# -*- coding: utf-8 -*-
"""@package UF
Declaração das classes para UF dos estados.
Este modulo contem declação da classe de modelo
para as UF de os estados.
"""
class Uf:
""" UF de estados """
def __init__(self):
self.tufuf = ''
self.tufdenominacao = '' | matheus-fonseca/acidentes-em-rodovias | acidentes_em_rodovias/app/models/uf.py | Python | gpl-3.0 | 265 |
import datetime
import json
import mock
import pyquery
from nose.tools import eq_, ok_
from django.core.urlresolvers import reverse
from django.utils.timezone import utc
from waffle import Switch
from crashstats.crashstats.tests.test_views import BaseTestViews, Response
from crashstats.supersearch.views import get_report_list_parameters
SUPERSEARCH_FIELDS_MOCKED_RESULTS = {
'signature': {
'name': 'signature',
'query_type': 'str',
'namespace': 'processed_crash',
'form_field_type': 'StringField',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'product': {
'name': 'product',
'query_type': 'enum',
'namespace': 'processed_crash',
'form_field_type': 'MultipleValueField',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'version': {
'name': 'version',
'query_type': 'enum',
'namespace': 'processed_crash',
'form_field_type': 'MultipleValueField',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'platform': {
'name': 'platform',
'query_type': 'enum',
'namespace': 'processed_crash',
'form_field_type': 'MultipleValueField',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'dump': {
'name': 'dump',
'query_type': 'str',
'namespace': 'processed_crash',
'form_field_type': 'MultipleValueField',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': False,
'is_mandatory': False,
},
'release_channel': {
'name': 'release_channel',
'query_type': 'enum',
'namespace': 'processed_crash',
'form_field_type': 'MultipleValueField',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'date': {
'name': 'date',
'query_type': 'datetime',
'namespace': 'processed_crash',
'form_field_type': 'DateTimeField',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'address': {
'name': 'address',
'query_type': 'str',
'namespace': 'processed_crash',
'form_field_type': 'StringField',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'build_id': {
'name': 'build_id',
'query_type': 'int',
'namespace': 'processed_crash',
'form_field_type': 'IntegerField',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'reason': {
'name': 'reason',
'query_type': 'str',
'namespace': 'processed_crash',
'form_field_type': 'StringField',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'java_stack_trace': {
'name': 'java_stack_trace',
'query_type': 'enum',
'namespace': 'processed_crash',
'form_field_type': 'MultipleValueField',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'email': {
'name': 'email',
'query_type': 'str',
'namespace': 'processed_crash',
'form_field_type': 'StringField',
'form_field_choices': None,
'permissions_needed': ['crashstats.view_pii'],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'url': {
'name': 'url',
'query_type': 'str',
'namespace': 'processed_crash',
'form_field_type': 'StringField',
'form_field_choices': None,
'permissions_needed': ['crashstats.view_pii'],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'exploitability': {
'name': 'exploitability',
'query_type': 'str',
'namespace': 'processed_crash',
'form_field_type': 'MultipleValueField',
'form_field_choices': [
'high', 'normal', 'low', 'none', 'unknown', 'error'
],
'permissions_needed': ['crashstats.view_exploitability'],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
}
class TestViews(BaseTestViews):
@staticmethod
def setUpClass():
TestViews.switch = Switch.objects.create(
name='supersearch-all',
active=True,
)
TestViews.custom_switch = Switch.objects.create(
name='supersearch-custom-query',
active=True,
)
@staticmethod
def tearDownClass():
try:
TestViews.switch.delete()
TestViews.custom_switch.delete()
except AssertionError:
# test_search_waffle_switch removes those switches before, causing
# this error
pass
def test_search_waffle_switch(self):
# Delete the custom-query switch but keep the generic one around.
TestViews.custom_switch.delete()
url = reverse('supersearch.search_custom')
response = self.client.get(url)
eq_(response.status_code, 404)
url = reverse('supersearch.search_query')
response = self.client.get(url)
eq_(response.status_code, 404)
# delete the switch to verify it's not accessible
TestViews.switch.delete()
url = reverse('supersearch.search')
response = self.client.get(url)
eq_(response.status_code, 404)
url = reverse('supersearch.search_results')
response = self.client.get(url)
eq_(response.status_code, 404)
url = reverse('supersearch.search_fields')
response = self.client.get(url)
eq_(response.status_code, 404)
url = reverse('supersearch.search_custom')
response = self.client.get(url)
eq_(response.status_code, 404)
url = reverse('supersearch.search_query')
response = self.client.get(url)
eq_(response.status_code, 404)
@mock.patch('requests.get')
def test_search(self, rget):
def mocked_get(url, params, **options):
assert 'supersearch' in url
if 'supersearch/fields' in url:
return Response(SUPERSEARCH_FIELDS_MOCKED_RESULTS)
rget.side_effect = mocked_get
self._login()
url = reverse('supersearch.search')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Run a search to get some results' in response.content)
@mock.patch('requests.get')
def test_search_fields(self, rget):
def mocked_get(url, params, **options):
assert 'supersearch' in url
if 'supersearch/fields' in url:
return Response(SUPERSEARCH_FIELDS_MOCKED_RESULTS)
rget.side_effect = mocked_get
self._login()
url = reverse('supersearch.search_fields')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(json.loads(response.content)) # Verify it's valid JSON
ok_('WaterWolf' in response.content)
ok_('SeaMonkey' in response.content)
ok_('NightTrain' in response.content)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_search_results(self, rget, rpost):
def mocked_post(url, **options):
assert 'bugs' in url, url
return Response({
"hits": [
{
"id": "123456",
"signature": "nsASDOMWindowEnumerator::GetNext()"
}
],
"total": 1
})
def mocked_get(url, params, **options):
assert 'supersearch' in url
if 'supersearch/fields' in url:
return Response(SUPERSEARCH_FIELDS_MOCKED_RESULTS)
if 'product' in params and 'WaterWolf' in params['product']:
return Response({
"hits": [
{
"signature": "nsASDOMWindowEnumerator::GetNext()",
"date": "2017-01-31T23:12:57",
"uuid": "aaaaaaaaaaaaa1",
"product": "WaterWolf",
"version": "1.0",
"platform": "Linux",
"build_id": 888981
},
{
"signature": "mySignatureIsCool",
"date": "2017-01-31T23:12:57",
"uuid": "aaaaaaaaaaaaa2",
"product": "WaterWolf",
"version": "1.0",
"platform": "Linux",
"build_id": 888981
},
{
"signature": "mineIsCoolerThanYours",
"date": "2017-01-31T23:12:57",
"uuid": "aaaaaaaaaaaaa3",
"product": "WaterWolf",
"version": "1.0",
"platform": "Linux",
"build_id": None
},
{
"signature": "EMPTY",
"date": "2017-01-31T23:12:57",
"uuid": "aaaaaaaaaaaaa4",
"product": "WaterWolf",
"version": "1.0",
"platform": "Linux",
"build_id": None
}
],
"facets": {
"signature": [
{
"term": "nsASDOMWindowEnumerator::GetNext()",
"count": 1
},
{
"term": "mySignatureIsCool",
"count": 1
},
{
"term": "mineIsCoolerThanYours",
"count": 1
},
{
"term": "EMPTY",
"count": 1
}
]
},
"total": 4
})
elif 'product' in params and 'SeaMonkey' in params['product']:
return Response({
"hits": [
{
"signature": "nsASDOMWindowEnumerator::GetNext()",
"date": "2017-01-31T23:12:57",
"uuid": "aaaaaaaaaaaaa",
"product": "WaterWolf",
"version": "1.0",
"platform": "Linux",
"build_id": 888981
},
{
"signature": "mySignatureIsCool",
"date": "2017-01-31T23:12:57",
"uuid": "aaaaaaaaaaaaa",
"product": "WaterWolf",
"version": "1.0",
"platform": "Linux",
"build_id": 888981
}
],
"facets": {
"build_id": [
{
"term": "888981",
"count": 2
}
]
},
"total": 2
})
elif (
'signature' in params and
'~nsASDOMWindowEnumerator' in params['signature']
):
return Response({
"hits": [
{
"signature": "nsASDOMWindowEnumerator::GetNext()",
"date": "2017-01-31T23:12:57",
"uuid": "aaaaaaaaaaaaa",
"product": "WaterWolf",
"version": "1.0",
"platform": "Linux",
"build_id": 12345678
}
],
"facets": {
"signature": [
{
"term": "nsASDOMWindowEnumerator::GetNext()",
"count": 1
}
]
},
"total": 1
})
else:
return Response({"hits": [], "facets": [], "total": 0})
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
url = reverse('supersearch.search_results')
response = self.client.get(
url,
{'product': 'WaterWolf'}
)
eq_(response.status_code, 200)
# Test results are existing
ok_('table id="reports-list"' in response.content)
ok_('nsASDOMWindowEnumerator::GetNext()' in response.content)
ok_('mySignatureIsCool' in response.content)
ok_('mineIsCoolerThanYours' in response.content)
ok_('EMPTY' in response.content)
ok_('aaaaaaaaaaaaa1' in response.content)
ok_('888981' in response.content)
ok_('Linux' in response.content)
ok_('2017-01-31 23:12:57' in response.content)
# Test facets are existing
ok_('table id="facets-list"' in response.content)
# Test bugs are existing
ok_('<th scope="col">Bugs</th>' in response.content)
ok_('123456' in response.content)
# Test links on terms are existing
ok_('product=%3DWaterWolf' in response.content)
# Test with empty results
response = self.client.get(url, {
'product': 'NightTrain',
'date': '2012-01-01'
})
eq_(response.status_code, 200)
ok_('table id="reports-list"' not in response.content)
ok_('No results were found' in response.content)
# Test with a signature param
response = self.client.get(
url,
{'signature': '~nsASDOMWindowEnumerator'}
)
eq_(response.status_code, 200)
ok_('table id="reports-list"' in response.content)
ok_('nsASDOMWindowEnumerator::GetNext()' in response.content)
ok_('123456' in response.content)
# Test with a different facet
response = self.client.get(
url,
{'_facets': 'build_id', 'product': 'SeaMonkey'}
)
eq_(response.status_code, 200)
ok_('table id="reports-list"' in response.content)
ok_('table id="facets-list"' in response.content)
ok_('888981' in response.content)
# Bugs should not be there, they appear only in the signature facet
ok_('<th>Bugs</th>' not in response.content)
ok_('123456' not in response.content)
# Test with a different columns list
response = self.client.get(
url,
{'_columns': ['build_id', 'platform'], 'product': 'WaterWolf'}
)
eq_(response.status_code, 200)
ok_('table id="reports-list"' in response.content)
ok_('table id="facets-list"' in response.content)
# The build and platform appear
ok_('888981' in response.content)
ok_('Linux' in response.content)
# The crash id is always shown
ok_('aaaaaaaaaaaaa1' in response.content)
# The version and date do not appear
ok_('1.0' not in response.content)
ok_('2017' not in response.content)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_search_results_admin_mode(self, rget, rpost):
"""Test that an admin can see more fields, and that a non-admin cannot.
"""
def mocked_post(**options):
assert 'bugs' in options['url'], options['url']
return Response({"hits": [], "total": 0})
def mocked_get(url, params, **options):
assert 'supersearch' in url
if 'supersearch/fields' in url:
return Response(SUPERSEARCH_FIELDS_MOCKED_RESULTS)
if '_facets' in params and 'url' in params['_facets']:
facets = {
"platform": [
{
"term": "Linux",
"count": 3
}
],
"url": [
{
"term": "http://example.org",
"count": 3
}
]
}
else:
facets = {
"platform": [
{
"term": "Linux",
"count": 3
}
]
}
return Response({
"hits": [
{
"signature": "nsASDOMWindowEnumerator::GetNext()",
"date": "2017-01-31T23:12:57",
"uuid": "aaaaaaaaaaaaa1",
"product": "WaterWolf",
"version": "1.0",
"platform": "Linux",
"build_id": 888981,
"email": "[email protected]",
"url": "http://example.org",
"exploitability": "high"
},
{
"signature": "mySignatureIsCool",
"date": "2017-01-31T23:12:57",
"uuid": "aaaaaaaaaaaaa2",
"product": "WaterWolf",
"version": "1.0",
"platform": "Linux",
"build_id": 888981,
"email": "[email protected]",
"url": "http://example.org",
"exploitability": "low"
},
{
"signature": "mineIsCoolerThanYours",
"date": "2017-01-31T23:12:57",
"uuid": "aaaaaaaaaaaaa3",
"product": "WaterWolf",
"version": "1.0",
"platform": "Linux",
"build_id": None,
"email": "[email protected]",
"url": "http://example.org",
"exploitability": "error"
}
],
"facets": facets,
"total": 3
})
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
url = reverse('supersearch.search_results')
# Logged in user, can see the email field
user = self._login()
group = self._create_group_with_permission('view_pii')
user.groups.add(group)
response = self.client.get(
url,
{
'_columns': ['version', 'email', 'url', 'exploitability'],
'_facets': ['url', 'platform']
}
)
eq_(response.status_code, 200)
ok_('Email' in response.content)
ok_('[email protected]' in response.content)
ok_('Url facet' in response.content)
ok_('http://example.org' in response.content)
ok_('Version' in response.content)
ok_('1.0' in response.content)
# Without the correct permission the user cannot see exploitability.
ok_('Exploitability' not in response.content)
exp_group = self._create_group_with_permission('view_exploitability')
user.groups.add(exp_group)
response = self.client.get(
url,
{
'_columns': ['version', 'email', 'url', 'exploitability'],
'_facets': ['url', 'platform']
}
)
eq_(response.status_code, 200)
ok_('Email' in response.content)
ok_('Exploitability' in response.content)
ok_('high' in response.content)
# Logged out user, cannot see the email field
self._logout()
response = self.client.get(
url,
{
'_columns': ['version', 'email', 'url'],
'_facets': ['url', 'platform']
}
)
eq_(response.status_code, 200)
ok_('Email' not in response.content)
ok_('[email protected]' not in response.content)
ok_('Url facet' not in response.content)
ok_('http://example.org' not in response.content)
ok_('Version' in response.content)
ok_('1.0' in response.content)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_search_results_parameters(self, rget, rpost):
def mocked_post(**options):
assert 'bugs' in options['url'], options['url']
return Response({
"hits": [],
"total": 0
})
def mocked_get(url, params, **options):
assert 'supersearch' in url
if 'supersearch/fields' in url:
return Response(SUPERSEARCH_FIELDS_MOCKED_RESULTS)
# Verify that all expected parameters are in the URL.
ok_('product' in params)
ok_('WaterWolf' in params['product'])
ok_('NightTrain' in params['product'])
ok_('address' in params)
ok_('0x0' in params['address'])
ok_('0xa' in params['address'])
ok_('reason' in params)
ok_('^hello' in params['reason'])
ok_('$thanks' in params['reason'])
ok_('java_stack_trace' in params)
ok_('Exception' in params['java_stack_trace'])
return Response({
"hits": [],
"facets": "",
"total": 0
})
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
url = reverse('supersearch.search_results')
response = self.client.get(
url, {
'product': ['WaterWolf', 'NightTrain'],
'address': ['0x0', '0xa'],
'reason': ['^hello', '$thanks'],
'java_stack_trace': 'Exception',
}
)
eq_(response.status_code, 200)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_search_results_pagination(self, rget, rpost):
"""Test that the pagination of results works as expected.
"""
def mocked_post(**options):
assert 'bugs' in options['url'], options['url']
return Response("""
{"hits": [], "total": 0}
""")
def mocked_get(url, params, **options):
assert 'supersearch' in url
if 'supersearch/fields' in url:
return Response(SUPERSEARCH_FIELDS_MOCKED_RESULTS)
# Make sure a negative page does not lead to negative offset value.
# But instead it is considered as the page 1 and thus is not added.
ok_('_results_offset' not in params)
hits = []
for i in range(140):
hits.append({
"signature": "nsASDOMWindowEnumerator::GetNext()",
"date": "2017-01-31T23:12:57",
"uuid": i,
"product": "WaterWolf",
"version": "1.0",
"platform": "Linux",
"build_id": 888981
})
return Response({
"hits": hits,
"facets": "",
"total": len(hits)
})
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
url = reverse('supersearch.search_results')
response = self.client.get(
url,
{
'_columns': ['version'],
'_facets': ['platform']
}
)
eq_(response.status_code, 200)
ok_('140' in response.content)
# Check that the pagination URL contains all three expected parameters.
doc = pyquery.PyQuery(response.content)
next_page_url = str(doc('.pagination a').eq(0))
ok_('_facets=platform' in next_page_url)
ok_('_columns=version' in next_page_url)
ok_('page=2' in next_page_url)
# Test that a negative page value does not break it.
response = self.client.get(url, {'page': '-1'})
eq_(response.status_code, 200)
def test_get_report_list_parameters(self):
source = {
'date': ['<2013-01-01T10:00:00+00:00']
}
res = get_report_list_parameters(source)
eq_(res['date'], datetime.datetime(2013, 1, 1, 10).replace(tzinfo=utc))
ok_('range_value' not in res)
ok_('range_unit' not in res)
source = {
'date': ['>=2013-01-01T10:00:00+00:00']
}
res = get_report_list_parameters(source)
eq_(res['date'].date(), datetime.datetime.utcnow().date())
ok_('range_value' in res)
eq_(res['range_unit'], 'hours')
source = {
'date': [
'>2013-01-01T10:00:00+00:00',
'<2013-02-01T10:00:00+00:00'
],
'product': ['WaterWolf'],
'version': ['3.0b1', '4.0a', '5.1'],
'release_channel': 'aurora',
'build_id': ['12345', '~67890'],
}
res = get_report_list_parameters(source)
eq_(res['date'].date(), datetime.date(2013, 2, 1))
ok_('range_value' in res)
ok_(res['range_unit'], 'hours')
eq_(res['release_channels'], 'aurora')
ok_('release_channel' not in res)
eq_(res['product'], ['WaterWolf'])
eq_(
res['version'],
['WaterWolf:3.0b1', 'WaterWolf:4.0a', 'WaterWolf:5.1']
)
eq_(res['build_id'], ['12345'])
def create_custom_query_perm(self):
user = self._login()
group = self._create_group_with_permission('run_custom_queries')
user.groups.add(group)
@mock.patch('requests.get')
def test_search_custom_permission(self, rget):
def mocked_get(url, params, **options):
assert 'supersearch' in url
if 'supersearch/fields' in url:
return Response(SUPERSEARCH_FIELDS_MOCKED_RESULTS)
return Response()
rget.side_effect = mocked_get
url = reverse('supersearch.search_custom')
response = self.client.get(url)
eq_(response.status_code, 302)
self.create_custom_query_perm()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Run a search to get some results' in response.content)
@mock.patch('requests.get')
def test_search_custom(self, rget):
def mocked_get(url, params, **options):
assert 'supersearch' in url
if 'supersearch/fields' in url:
return Response(SUPERSEARCH_FIELDS_MOCKED_RESULTS)
return Response()
rget.side_effect = mocked_get
self.create_custom_query_perm()
url = reverse('supersearch.search_custom')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Run a search to get some results' in response.content)
@mock.patch('requests.get')
def test_search_custom_parameters(self, rget):
self.create_custom_query_perm()
def mocked_get(url, params, **options):
if '/supersearch/fields' in url:
return Response(SUPERSEARCH_FIELDS_MOCKED_RESULTS)
if '/supersearch' in url:
ok_('_return_query' in params)
ok_('signature' in params)
eq_(params['signature'], ['nsA'])
return Response({
"query": {"query": None},
"indices": ["socorro200000", "socorro200001"]
})
raise NotImplementedError(url)
rget.side_effect = mocked_get
url = reverse('supersearch.search_custom')
response = self.client.get(url, {'signature': 'nsA'})
eq_(response.status_code, 200)
ok_('Run a search to get some results' in response.content)
ok_('{"query": null}' in response.content)
ok_('socorro200000' in response.content)
ok_('socorro200001' in response.content)
@mock.patch('requests.get')
@mock.patch('requests.post')
def test_search_query(self, rget, rpost):
self.create_custom_query_perm()
def mocked_get(url, params, **options):
if 'supersearch/fields' in url:
return Response(SUPERSEARCH_FIELDS_MOCKED_RESULTS)
return Response('{"hits": []}')
def mocked_post(url, data, **options):
ok_('/query' in url)
ok_('query' in data)
ok_('indices' in data)
return Response({"hits": []})
rget.side_effect = mocked_get
rpost.side_effect = mocked_post
url = reverse('supersearch.search_query')
response = self.client.post(url, {'query': '{"query": {}}'})
eq_(response.status_code, 200)
content = json.loads(response.content)
ok_('hits' in content)
eq_(content['hits'], [])
# Test a failure.
response = self.client.post(url)
eq_(response.status_code, 400)
ok_('query' in response.content)
| bsmedberg/socorro | webapp-django/crashstats/supersearch/tests/test_views.py | Python | mpl-2.0 | 31,386 |
from morsel.panda import *
from morsel.geometries.box import Box as Base
from morsel.nodes.ode.solid import Solid
#-------------------------------------------------------------------------------
class Box(Solid, Base):
def __init__(self, **kargs):
super(Box, self).__init__(**kargs)
#-------------------------------------------------------------------------------
def fit(self, node):
Base.fit(self, node)
self.geometry = panda.OdeBoxGeom(node.world.space, *self.globalSize)
#-------------------------------------------------------------------------------
def onScale(self, scaling):
if self.geometry:
self.geometry.setLengths(*self.globalSize)
| kralf/morsel | python/lib/morsel/nodes/ode/solids/box.py | Python | gpl-2.0 | 688 |
from __future__ import absolute_import, print_function
from flask import Flask
def create_app():
return Flask('create_app')
def create_app2(foo, bar):
return Flask("_".join(['create_app2', foo, bar]))
def create_app3(foo, bar, script_info):
return Flask("_".join(['create_app3', foo, bar]))
| tristanfisher/flask | tests/test_apps/cliapp/factory.py | Python | bsd-3-clause | 310 |
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Driver base-classes:
(Beginning of) the contract that compute drivers must follow, and shared
types that support that contract
"""
import sys
from oslo.config import cfg
from nova.openstack.common.gettextutils import _
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
from nova import utils
from nova.virt import event as virtevent
driver_opts = [
cfg.StrOpt('compute_driver',
help='Driver to use for controlling virtualization. Options '
'include: libvirt.LibvirtDriver, xenapi.XenAPIDriver, '
'fake.FakeDriver, baremetal.BareMetalDriver, '
'vmwareapi.VMwareESXDriver, vmwareapi.VMwareVCDriver'),
cfg.StrOpt('default_ephemeral_format',
help='The default format an ephemeral_volume will be '
'formatted with on creation.'),
cfg.StrOpt('preallocate_images',
default='none',
help='VM image preallocation mode: '
'"none" => no storage provisioning is done up front, '
'"space" => storage is fully allocated at instance start'),
cfg.BoolOpt('use_cow_images',
default=True,
help='Whether to use cow images'),
cfg.BoolOpt('vif_plugging_is_fatal',
default=True,
help="Fail instance boot if vif plugging fails"),
cfg.IntOpt('vif_plugging_timeout',
default=300,
help='Number of seconds to wait for neutron vif plugging '
'events to arrive before continuing or failing (see '
'vif_plugging_is_fatal). If this is set to zero and '
'vif_plugging_is_fatal is False, events should not '
'be expected to arrive at all.'),
]
CONF = cfg.CONF
CONF.register_opts(driver_opts)
LOG = logging.getLogger(__name__)
def driver_dict_from_config(named_driver_config, *args, **kwargs):
driver_registry = dict()
for driver_str in named_driver_config:
driver_type, _sep, driver = driver_str.partition('=')
driver_class = importutils.import_class(driver)
driver_registry[driver_type] = driver_class(*args, **kwargs)
return driver_registry
def block_device_info_get_root(block_device_info):
block_device_info = block_device_info or {}
return block_device_info.get('root_device_name')
def block_device_info_get_swap(block_device_info):
block_device_info = block_device_info or {}
return block_device_info.get('swap') or {'device_name': None,
'swap_size': 0}
def swap_is_usable(swap):
return swap and swap['device_name'] and swap['swap_size'] > 0
def block_device_info_get_ephemerals(block_device_info):
block_device_info = block_device_info or {}
ephemerals = block_device_info.get('ephemerals') or []
return ephemerals
def block_device_info_get_mapping(block_device_info):
block_device_info = block_device_info or {}
block_device_mapping = block_device_info.get('block_device_mapping') or []
return block_device_mapping
class ComputeDriver(object):
"""Base class for compute drivers.
The interface to this class talks in terms of 'instances' (Amazon EC2 and
internal Nova terminology), by which we mean 'running virtual machine'
(XenAPI terminology) or domain (Xen or libvirt terminology).
An instance has an ID, which is the identifier chosen by Nova to represent
the instance further up the stack. This is unfortunately also called a
'name' elsewhere. As far as this layer is concerned, 'instance ID' and
'instance name' are synonyms.
Note that the instance ID or name is not human-readable or
customer-controlled -- it's an internal ID chosen by Nova. At the
nova.virt layer, instances do not have human-readable names at all -- such
things are only known higher up the stack.
Most virtualization platforms will also have their own identity schemes,
to uniquely identify a VM or domain. These IDs must stay internal to the
platform-specific layer, and never escape the connection interface. The
platform-specific layer is responsible for keeping track of which instance
ID maps to which platform-specific ID, and vice versa.
Some methods here take an instance of nova.compute.service.Instance. This
is the data structure used by nova.compute to store details regarding an
instance, and pass them into this layer. This layer is responsible for
translating that generic data structure into terms that are specific to the
virtualization platform.
"""
capabilities = {
"has_imagecache": False,
"supports_recreate": False,
}
def __init__(self, virtapi):
self.virtapi = virtapi
self._compute_event_callback = None
def init_host(self, host):
"""Initialize anything that is necessary for the driver to function,
including catching up with currently running VM's on the given host.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_info(self, instance):
"""Get the current status of an instance, by name (not ID!)
:param instance: nova.objects.instance.Instance object
Returns a dict containing:
:state: the running state, one of the power_state codes
:max_mem: (int) the maximum memory in KBytes allowed
:mem: (int) the memory in KBytes used by the domain
:num_cpu: (int) the number of virtual CPUs for the domain
:cpu_time: (int) the CPU time used in nanoseconds
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_num_instances(self):
"""Return the total number of virtual machines.
Return the number of virtual machines that the hypervisor knows
about.
.. note::
This implementation works for all drivers, but it is
not particularly efficient. Maintainers of the virt drivers are
encouraged to override this method with something more
efficient.
"""
return len(self.list_instances())
def instance_exists(self, instance_id):
"""Checks existence of an instance on the host.
:param instance_id: The ID / name of the instance to lookup
Returns True if an instance with the supplied ID exists on
the host, False otherwise.
.. note::
This implementation works for all drivers, but it is
not particularly efficient. Maintainers of the virt drivers are
encouraged to override this method with something more
efficient.
"""
return instance_id in self.list_instances()
def estimate_instance_overhead(self, instance_info):
"""Estimate the virtualization overhead required to build an instance
of the given flavor.
Defaults to zero, drivers should override if per-instance overhead
calculations are desired.
:param instance_info: Instance/flavor to calculate overhead for.
:returns: Dict of estimated overhead values.
"""
return {'memory_mb': 0}
def list_instances(self):
"""Return the names of all the instances known to the virtualization
layer, as a list.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def list_instance_uuids(self):
"""Return the UUIDS of all the instances known to the virtualization
layer, as a list.
"""
raise NotImplementedError()
def rebuild(self, context, instance, image_meta, injected_files,
admin_password, bdms, detach_block_devices,
attach_block_devices, network_info=None,
recreate=False, block_device_info=None,
preserve_ephemeral=False):
"""Destroy and re-make this instance.
A 'rebuild' effectively purges all existing data from the system and
remakes the VM with given 'metadata' and 'personalities'.
This base class method shuts down the VM, detaches all block devices,
then spins up the new VM afterwards. It may be overridden by
hypervisors that need to - e.g. for optimisations, or when the 'VM'
is actually proxied and needs to be held across the shutdown + spin
up steps.
:param context: security context
:param instance: nova.objects.instance.Instance
This function should use the data there to guide
the creation of the new instance.
:param image_meta: image object returned by nova.image.glance that
defines the image from which to boot this instance
:param injected_files: User files to inject into instance.
:param admin_password: Administrator password to set in instance.
:param bdms: block-device-mappings to use for rebuild
:param detach_block_devices: function to detach block devices. See
nova.compute.manager.ComputeManager:_rebuild_default_impl for
usage.
:param attach_block_devices: function to attach block devices. See
nova.compute.manager.ComputeManager:_rebuild_default_impl for
usage.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param recreate: True if the instance is being recreated on a new
hypervisor - all the cleanup of old state is skipped.
:param block_device_info: Information about block devices to be
attached to the instance.
:param preserve_ephemeral: True if the default ephemeral storage
partition must be preserved on rebuild
"""
raise NotImplementedError()
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
"""Create a new instance/VM/domain on the virtualization platform.
Once this successfully completes, the instance should be
running (power_state.RUNNING).
If this fails, any partial instance should be completely
cleaned up, and the virtualization platform should be in the state
that it was before this call began.
:param context: security context
:param instance: nova.objects.instance.Instance
This function should use the data there to guide
the creation of the new instance.
:param image_meta: image object returned by nova.image.glance that
defines the image from which to boot this instance
:param injected_files: User files to inject into instance.
:param admin_password: Administrator password to set in instance.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: Information about block devices to be
attached to the instance.
"""
raise NotImplementedError()
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True):
"""Destroy the specified instance from the Hypervisor.
If the instance is not found (for example if networking failed), this
function should still succeed. It's probably a good idea to log a
warning in that case.
:param context: security context
:param instance: Instance object as returned by DB layer.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: Information about block devices that should
be detached from the instance.
:param destroy_disks: Indicates if disks should be destroyed
"""
raise NotImplementedError()
def cleanup(self, context, instance, network_info, block_device_info=None,
destroy_disks=True):
"""Cleanup the instance resources .
Instance should have been destroyed from the Hypervisor before calling
this method.
:param context: security context
:param instance: Instance object as returned by DB layer.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: Information about block devices that should
be detached from the instance.
:param destroy_disks: Indicates if disks should be destroyed
"""
raise NotImplementedError()
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
"""Reboot the specified instance.
After this is called successfully, the instance's state
goes back to power_state.RUNNING. The virtualization
platform should ensure that the reboot action has completed
successfully even in cases in which the underlying domain/vm
is paused or halted/stopped.
:param instance: nova.objects.instance.Instance
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param reboot_type: Either a HARD or SOFT reboot
:param block_device_info: Info pertaining to attached volumes
:param bad_volumes_callback: Function to handle any bad volumes
encountered
"""
raise NotImplementedError()
def get_console_pool_info(self, console_type):
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_console_output(self, context, instance):
"""Get console output for an instance
:param context: security context
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def get_vnc_console(self, context, instance):
"""Get connection info for a vnc console.
:param context: security context
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def get_spice_console(self, context, instance):
"""Get connection info for a spice console.
:param context: security context
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def get_rdp_console(self, context, instance):
"""Get connection info for a rdp console.
:param context: security context
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def get_diagnostics(self, instance):
"""Return data about VM diagnostics.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_all_bw_counters(self, instances):
"""Return bandwidth usage counters for each interface on each
running VM.
:param instances: nova.objects.instance.InstanceList
"""
raise NotImplementedError()
def get_all_volume_usage(self, context, compute_host_bdms):
"""Return usage info for volumes attached to vms on
a given host.-
"""
raise NotImplementedError()
def get_host_ip_addr(self):
"""Retrieves the IP address of the dom0
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
"""Attach the disk to the instance at mountpoint using info."""
raise NotImplementedError()
def detach_volume(self, connection_info, instance, mountpoint,
encryption=None):
"""Detach the disk attached to the instance."""
raise NotImplementedError()
def swap_volume(self, old_connection_info, new_connection_info,
instance, mountpoint):
"""Replace the disk attached to the instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def attach_interface(self, instance, image_meta, vif):
"""Attach an interface to the instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def detach_interface(self, instance, vif):
"""Detach an interface from the instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None):
"""Transfers the disk of a running instance in multiple phases, turning
off the instance before the end.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def snapshot(self, context, instance, image_id, update_task_state):
"""Snapshots the specified instance.
:param context: security context
:param instance: nova.objects.instance.Instance
:param image_id: Reference to a pre-created image that will
hold the snapshot.
"""
raise NotImplementedError()
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info=None, power_on=True):
"""Completes a resize.
:param context: the context for the migration/resize
:param migration: the migrate/resize information
:param instance: nova.objects.instance.Instance being migrated/resized
:param disk_info: the newly transferred disk information
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param image_meta: image object returned by nova.image.glance that
defines the image from which this instance
was created
:param resize_instance: True if the instance is being resized,
False otherwise
:param block_device_info: instance volume block device info
:param power_on: True if the instance should be powered on, False
otherwise
"""
raise NotImplementedError()
def confirm_migration(self, migration, instance, network_info):
"""Confirms a resize, destroying the source VM.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
"""Finish reverting a resize.
:param context: the context for the finish_revert_migration
:param instance: nova.objects.instance.Instance being migrated/resized
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: instance volume block device info
:param power_on: True if the instance should be powered on, False
otherwise
"""
raise NotImplementedError()
def pause(self, instance):
"""Pause the specified instance.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def unpause(self, instance):
"""Unpause paused VM instance.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def suspend(self, instance):
"""suspend the specified instance.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def resume(self, context, instance, network_info, block_device_info=None):
"""resume the specified instance.
:param context: the context for the resume
:param instance: nova.objects.instance.Instance being resumed
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: instance volume block device info
"""
raise NotImplementedError()
def resume_state_on_host_boot(self, context, instance, network_info,
block_device_info=None):
"""resume guest state when a host is booted.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def rescue(self, context, instance, network_info, image_meta,
rescue_password):
"""Rescue the specified instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def set_bootable(self, instance, is_bootable):
"""Set the ability to power on/off an instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def unrescue(self, instance, network_info):
"""Unrescue the specified instance.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def power_off(self, instance):
"""Power off the specified instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def power_on(self, context, instance, network_info,
block_device_info=None):
"""Power on the specified instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def soft_delete(self, instance):
"""Soft delete the specified instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def restore(self, instance):
"""Restore the specified instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def get_available_resource(self, nodename):
"""Retrieve resource information.
This method is called when nova-compute launches, and
as part of a periodic task that records the results in the DB.
:param nodename:
node which the caller want to get resources from
a driver that manages only one node can safely ignore this
:returns: Dictionary describing resources
"""
raise NotImplementedError()
def pre_live_migration(self, ctxt, instance, block_device_info,
network_info, disk_info, migrate_data=None):
"""Prepare an instance for live migration
:param ctxt: security context
:param instance: nova.objects.instance.Instance object
:param block_device_info: instance block device information
:param network_info: instance network information
:param disk_info: instance disk information
:param migrate_data: implementation specific data dict.
"""
raise NotImplementedError()
def live_migration(self, ctxt, instance_ref, dest,
post_method, recover_method, block_migration=False,
migrate_data=None):
"""Live migration of an instance to another host.
:param ctxt: security context
:param instance_ref:
nova.db.sqlalchemy.models.Instance object
instance object that is migrated.
:param dest: destination host
:param post_method:
post operation method.
expected nova.compute.manager.post_live_migration.
:param recover_method:
recovery method when any exception occurs.
expected nova.compute.manager.recover_live_migration.
:param block_migration: if true, migrate VM disk.
:param migrate_data: implementation specific params.
"""
raise NotImplementedError()
def rollback_live_migration_at_destination(self, ctxt, instance_ref,
network_info,
block_device_info):
"""Clean up destination node after a failed live migration.
:param ctxt: security context
:param instance_ref: instance object that was being migrated
:param network_info: instance network information
:param block_device_info: instance block device information
"""
raise NotImplementedError()
def post_live_migration(self, ctxt, instance_ref, block_device_info,
migrate_data=None):
"""Post operation of live migration at source host.
:param ctxt: security context
:instance_ref: instance object that was migrated
:block_device_info: instance block device information
:param migrate_data: if not None, it is a dict which has data
"""
pass
def post_live_migration_at_destination(self, ctxt, instance_ref,
network_info,
block_migration=False,
block_device_info=None):
"""Post operation of live migration at destination host.
:param ctxt: security context
:param instance_ref: instance object that is migrated
:param network_info: instance network information
:param block_migration: if true, post operation of block_migration.
"""
raise NotImplementedError()
def check_instance_shared_storage_local(self, ctxt, instance):
"""Check if instance files located on shared storage.
This runs check on the destination host, and then calls
back to the source host to check the results.
:param ctxt: security context
:param instance: nova.db.sqlalchemy.models.Instance
"""
raise NotImplementedError()
def check_instance_shared_storage_remote(self, ctxt, data):
"""Check if instance files located on shared storage.
:param context: security context
:param data: result of check_instance_shared_storage_local
"""
raise NotImplementedError()
def check_instance_shared_storage_cleanup(self, ctxt, data):
"""Do cleanup on host after check_instance_shared_storage calls
:param ctxt: security context
:param data: result of check_instance_shared_storage_local
"""
pass
def check_can_live_migrate_destination(self, ctxt, instance_ref,
src_compute_info, dst_compute_info,
block_migration=False,
disk_over_commit=False):
"""Check if it is possible to execute live migration.
This runs checks on the destination host, and then calls
back to the source host to check the results.
:param ctxt: security context
:param instance_ref: nova.db.sqlalchemy.models.Instance
:param src_compute_info: Info about the sending machine
:param dst_compute_info: Info about the receiving machine
:param block_migration: if true, prepare for block migration
:param disk_over_commit: if true, allow disk over commit
:returns: a dict containing migration info (hypervisor-dependent)
"""
raise NotImplementedError()
def check_can_live_migrate_destination_cleanup(self, ctxt,
dest_check_data):
"""Do required cleanup on dest host after check_can_live_migrate calls
:param ctxt: security context
:param dest_check_data: result of check_can_live_migrate_destination
"""
raise NotImplementedError()
def check_can_live_migrate_source(self, ctxt, instance_ref,
dest_check_data):
"""Check if it is possible to execute live migration.
This checks if the live migration can succeed, based on the
results from check_can_live_migrate_destination.
:param context: security context
:param instance_ref: nova.db.sqlalchemy.models.Instance
:param dest_check_data: result of check_can_live_migrate_destination
:returns: a dict containing migration info (hypervisor-dependent)
"""
raise NotImplementedError()
def refresh_security_group_rules(self, security_group_id):
"""This method is called after a change to security groups.
All security groups and their associated rules live in the datastore,
and calling this method should apply the updated rules to instances
running the specified security group.
An error should be raised if the operation cannot complete.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def refresh_security_group_members(self, security_group_id):
"""This method is called when a security group is added to an instance.
This message is sent to the virtualization drivers on hosts that are
running an instance that belongs to a security group that has a rule
that references the security group identified by `security_group_id`.
It is the responsibility of this method to make sure any rules
that authorize traffic flow with members of the security group are
updated and any new members can communicate, and any removed members
cannot.
Scenario:
* we are running on host 'H0' and we have an instance 'i-0'.
* instance 'i-0' is a member of security group 'speaks-b'
* group 'speaks-b' has an ingress rule that authorizes group 'b'
* another host 'H1' runs an instance 'i-1'
* instance 'i-1' is a member of security group 'b'
When 'i-1' launches or terminates we will receive the message
to update members of group 'b', at which time we will make
any changes needed to the rules for instance 'i-0' to allow
or deny traffic coming from 'i-1', depending on if it is being
added or removed from the group.
In this scenario, 'i-1' could just as easily have been running on our
host 'H0' and this method would still have been called. The point was
that this method isn't called on the host where instances of that
group are running (as is the case with
:py:meth:`refresh_security_group_rules`) but is called where references
are made to authorizing those instances.
An error should be raised if the operation cannot complete.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def refresh_provider_fw_rules(self):
"""This triggers a firewall update based on database changes.
When this is called, rules have either been added or removed from the
datastore. You can retrieve rules with
:py:meth:`nova.db.provider_fw_rule_get_all`.
Provider rules take precedence over security group rules. If an IP
would be allowed by a security group ingress rule, but blocked by
a provider rule, then packets from the IP are dropped. This includes
intra-project traffic in the case of the allow_project_net_traffic
flag for the libvirt-derived classes.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def refresh_instance_security_rules(self, instance):
"""Refresh security group rules
Gets called when an instance gets added to or removed from
the security group the instance is a member of or if the
group gains or looses a rule.
"""
raise NotImplementedError()
def reset_network(self, instance):
"""reset networking for specified instance."""
# TODO(Vek): Need to pass context in for access to auth_token
pass
def ensure_filtering_rules_for_instance(self, instance, network_info):
"""Setting up filtering rules and waiting for its completion.
To migrate an instance, filtering rules to hypervisors
and firewalls are inevitable on destination host.
( Waiting only for filtering rules to hypervisor,
since filtering rules to firewall rules can be set faster).
Concretely, the below method must be called.
- setup_basic_filtering (for nova-basic, etc.)
- prepare_instance_filter(for nova-instance-instance-xxx, etc.)
to_xml may have to be called since it defines PROJNET, PROJMASK.
but libvirt migrates those value through migrateToURI(),
so , no need to be called.
Don't use thread for this method since migration should
not be started when setting-up filtering rules operations
are not completed.
:param instance: nova.objects.instance.Instance object
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def filter_defer_apply_on(self):
"""Defer application of IPTables rules."""
pass
def filter_defer_apply_off(self):
"""Turn off deferral of IPTables rules and apply the rules now."""
pass
def unfilter_instance(self, instance, network_info):
"""Stop filtering instance."""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def set_admin_password(self, context, instance, new_pass=None):
"""Set the root password on the specified instance.
:param instance: nova.objects.instance.Instance
:param new_password: the new password
"""
raise NotImplementedError()
def inject_file(self, instance, b64_path, b64_contents):
"""Writes a file on the specified instance.
The first parameter is an instance of nova.compute.service.Instance,
and so the instance is being specified as instance.name. The second
parameter is the base64-encoded path to which the file is to be
written on the instance; the third is the contents of the file, also
base64-encoded.
NOTE(russellb) This method is deprecated and will be removed once it
can be removed from nova.compute.manager.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def change_instance_metadata(self, context, instance, diff):
"""Applies a diff to the instance metadata.
This is an optional driver method which is used to publish
changes to the instance's metadata to the hypervisor. If the
hypervisor has no means of publishing the instance metadata to
the instance, then this method should not be implemented.
:param context: security context
:param instance: nova.objects.instance.Instance
"""
pass
def inject_network_info(self, instance, nw_info):
"""inject network info for specified instance."""
# TODO(Vek): Need to pass context in for access to auth_token
pass
def poll_rebooting_instances(self, timeout, instances):
"""Poll for rebooting instances
:param timeout: the currently configured timeout for considering
rebooting instances to be stuck
:param instances: instances that have been in rebooting state
longer than the configured timeout
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def host_power_action(self, host, action):
"""Reboots, shuts down or powers up the host."""
raise NotImplementedError()
def host_maintenance_mode(self, host, mode):
"""Start/Stop host maintenance window. On start, it triggers
guest VMs evacuation.
"""
raise NotImplementedError()
def set_host_enabled(self, host, enabled):
"""Sets the specified host's ability to accept new instances."""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_host_uptime(self, host):
"""Returns the result of calling "uptime" on the target host."""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def plug_vifs(self, instance, network_info):
"""Plug VIFs into networks.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def unplug_vifs(self, instance, network_info):
"""Unplug VIFs from networks.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def get_host_stats(self, refresh=False):
"""Return currently known host stats.
If the hypervisor supports pci passthrough, the returned
dictionary includes a key-value pair for it.
The key of pci passthrough device is "pci_passthrough_devices"
and the value is a json string for the list of assignable
pci devices. Each device is a dictionary, with mandatory
keys of 'address', 'vendor_id', 'product_id', 'dev_type',
'dev_id', 'label' and other optional device specific information.
Refer to the objects/pci_device.py for more idea of these keys.
"""
raise NotImplementedError()
def get_host_cpu_stats(self):
"""Get the currently known host CPU stats.
:returns: a dict containing the CPU stat info, eg:
{'kernel': kern,
'idle': idle,
'user': user,
'iowait': wait,
'frequency': freq},
where kern and user indicate the cumulative CPU time
(nanoseconds) spent by kernel and user processes
respectively, idle indicates the cumulative idle CPU time
(nanoseconds), wait indicates the cumulative I/O wait CPU
time (nanoseconds), since the host is booting up; freq
indicates the current CPU frequency (MHz). All values are
long integers.
"""
raise NotImplementedError()
def block_stats(self, instance_name, disk_id):
"""Return performance counters associated with the given disk_id on the
given instance_name. These are returned as [rd_req, rd_bytes, wr_req,
wr_bytes, errs], where rd indicates read, wr indicates write, req is
the total number of I/O requests made, bytes is the total number of
bytes transferred, and errs is the number of requests held up due to a
full pipeline.
All counters are long integers.
This method is optional. On some platforms (e.g. XenAPI) performance
statistics can be retrieved directly in aggregate form, without Nova
having to do the aggregation. On those platforms, this method is
unused.
Note that this function takes an instance ID.
"""
raise NotImplementedError()
def interface_stats(self, instance_name, iface_id):
"""Return performance counters associated with the given iface_id
on the given instance_id. These are returned as [rx_bytes, rx_packets,
rx_errs, rx_drop, tx_bytes, tx_packets, tx_errs, tx_drop], where rx
indicates receive, tx indicates transmit, bytes and packets indicate
the total number of bytes or packets transferred, and errs and dropped
is the total number of packets failed / dropped.
All counters are long integers.
This method is optional. On some platforms (e.g. XenAPI) performance
statistics can be retrieved directly in aggregate form, without Nova
having to do the aggregation. On those platforms, this method is
unused.
Note that this function takes an instance ID.
"""
raise NotImplementedError()
def macs_for_instance(self, instance):
"""What MAC addresses must this instance have?
Some hypervisors (such as bare metal) cannot do freeform virtualisation
of MAC addresses. This method allows drivers to return a set of MAC
addresses that the instance is to have. allocate_for_instance will take
this into consideration when provisioning networking for the instance.
Mapping of MAC addresses to actual networks (or permitting them to be
freeform) is up to the network implementation layer. For instance,
with openflow switches, fixed MAC addresses can still be virtualised
onto any L2 domain, with arbitrary VLANs etc, but regular switches
require pre-configured MAC->network mappings that will match the
actual configuration.
Most hypervisors can use the default implementation which returns None.
Hypervisors with MAC limits should return a set of MAC addresses, which
will be supplied to the allocate_for_instance call by the compute
manager, and it is up to that call to ensure that all assigned network
details are compatible with the set of MAC addresses.
This is called during spawn_instance by the compute manager.
:return: None, or a set of MAC ids (e.g. set(['12:34:56:78:90:ab'])).
None means 'no constraints', a set means 'these and only these
MAC addresses'.
"""
return None
def dhcp_options_for_instance(self, instance):
"""Get DHCP options for this instance.
Some hypervisors (such as bare metal) require that instances boot from
the network, and manage their own TFTP service. This requires passing
the appropriate options out to the DHCP service. Most hypervisors can
use the default implementation which returns None.
This is called during spawn_instance by the compute manager.
Note that the format of the return value is specific to Quantum
client API.
:return: None, or a set of DHCP options, eg:
[{'opt_name': 'bootfile-name',
'opt_value': '/tftpboot/path/to/config'},
{'opt_name': 'server-ip-address',
'opt_value': '1.2.3.4'},
{'opt_name': 'tftp-server',
'opt_value': '1.2.3.4'}
]
"""
pass
def manage_image_cache(self, context, all_instances):
"""Manage the driver's local image cache.
Some drivers chose to cache images for instances on disk. This method
is an opportunity to do management of that cache which isn't directly
related to other calls into the driver. The prime example is to clean
the cache and remove images which are no longer of interest.
:param instances: nova.objects.instance.InstanceList
"""
pass
def add_to_aggregate(self, context, aggregate, host, **kwargs):
"""Add a compute host to an aggregate."""
#NOTE(jogo) Currently only used for XenAPI-Pool
raise NotImplementedError()
def remove_from_aggregate(self, context, aggregate, host, **kwargs):
"""Remove a compute host from an aggregate."""
raise NotImplementedError()
def undo_aggregate_operation(self, context, op, aggregate,
host, set_error=True):
"""Undo for Resource Pools."""
raise NotImplementedError()
def get_volume_connector(self, instance):
"""Get connector information for the instance for attaching to volumes.
Connector information is a dictionary representing the ip of the
machine that will be making the connection, the name of the iscsi
initiator and the hostname of the machine as follows::
{
'ip': ip,
'initiator': initiator,
'host': hostname
}
"""
raise NotImplementedError()
def get_available_nodes(self, refresh=False):
"""Returns nodenames of all nodes managed by the compute service.
This method is for multi compute-nodes support. If a driver supports
multi compute-nodes, this method returns a list of nodenames managed
by the service. Otherwise, this method should return
[hypervisor_hostname].
"""
stats = self.get_host_stats(refresh=refresh)
if not isinstance(stats, list):
stats = [stats]
return [s['hypervisor_hostname'] for s in stats]
def node_is_available(self, nodename):
"""Return whether this compute service manages a particular node."""
if nodename in self.get_available_nodes():
return True
# Refresh and check again.
return nodename in self.get_available_nodes(refresh=True)
def get_per_instance_usage(self):
"""Get information about instance resource usage.
:returns: dict of nova uuid => dict of usage info
"""
return {}
def instance_on_disk(self, instance):
"""Checks access of instance files on the host.
:param instance: nova.objects.instance.Instance to lookup
Returns True if files of an instance with the supplied ID accessible on
the host, False otherwise.
.. note::
Used in rebuild for HA implementation and required for validation
of access to instance shared disk files
"""
return False
def register_event_listener(self, callback):
"""Register a callback to receive events.
Register a callback to receive asynchronous event
notifications from hypervisors. The callback will
be invoked with a single parameter, which will be
an instance of the nova.virt.event.Event class.
"""
self._compute_event_callback = callback
def emit_event(self, event):
"""Dispatches an event to the compute manager.
Invokes the event callback registered by the
compute manager to dispatch the event. This
must only be invoked from a green thread.
"""
if not self._compute_event_callback:
LOG.debug(_("Discarding event %s") % str(event))
return
if not isinstance(event, virtevent.Event):
raise ValueError(
_("Event must be an instance of nova.virt.event.Event"))
try:
LOG.debug(_("Emitting event %s") % str(event))
self._compute_event_callback(event)
except Exception as ex:
LOG.error(_("Exception dispatching event %(event)s: %(ex)s"),
{'event': event, 'ex': ex})
def delete_instance_files(self, instance):
"""Delete any lingering instance files for an instance.
:param instance: nova.objects.instance.Instance
:returns: True if the instance was deleted from disk, False otherwise.
"""
return True
@property
def need_legacy_block_device_info(self):
"""Tell the caller if the driver requires legacy block device info.
Tell the caller weather we expect the legacy format of block
device info to be passed in to methods that expect it.
"""
return True
def volume_snapshot_create(self, context, instance, volume_id,
create_info):
"""Snapshots volumes attached to a specified instance.
:param context: request context
:param instance: nova.objects.instance.Instance that has the volume
attached
:param volume_id: Volume to be snapshotted
:param create_info: The data needed for nova to be able to attach
to the volume. This is the same data format returned by
Cinder's initialize_connection() API call. In the case of
doing a snapshot, it is the image file Cinder expects to be
used as the active disk after the snapshot operation has
completed. There may be other data included as well that is
needed for creating the snapshot.
"""
raise NotImplementedError()
def volume_snapshot_delete(self, context, instance, volume_id,
snapshot_id, delete_info):
"""Snapshots volumes attached to a specified instance.
:param context: request context
:param instance: nova.objects.instance.Instance that has the volume
attached
:param volume_id: Attached volume associated with the snapshot
:param snapshot_id: The snapshot to delete.
:param delete_info: Volume backend technology specific data needed to
be able to complete the snapshot. For example, in the case of
qcow2 backed snapshots, this would include the file being
merged, and the file being merged into (if appropriate).
"""
raise NotImplementedError()
def default_root_device_name(self, instance, image_meta, root_bdm):
"""Provide a default root device name for the driver."""
raise NotImplementedError()
def default_device_names_for_instance(self, instance, root_device_name,
*block_device_lists):
"""Default the missing device names in the block device mapping."""
raise NotImplementedError()
def load_compute_driver(virtapi, compute_driver=None):
"""Load a compute driver module.
Load the compute driver module specified by the compute_driver
configuration option or, if supplied, the driver name supplied as an
argument.
Compute drivers constructors take a VirtAPI object as their first object
and this must be supplied.
:param virtapi: a VirtAPI instance
:param compute_driver: a compute driver name to override the config opt
:returns: a ComputeDriver instance
"""
if not compute_driver:
compute_driver = CONF.compute_driver
if not compute_driver:
LOG.error(_("Compute driver option required, but not specified"))
sys.exit(1)
LOG.info(_("Loading compute driver '%s'") % compute_driver)
try:
driver = importutils.import_object_ns('nova.virt',
compute_driver,
virtapi)
return utils.check_isinstance(driver, ComputeDriver)
except ImportError:
LOG.exception(_("Unable to load the virtualization driver"))
sys.exit(1)
def compute_driver_matches(match):
return CONF.compute_driver and CONF.compute_driver.endswith(match)
| ycl2045/nova-master | nova/virt/driver.py | Python | apache-2.0 | 52,734 |
#!/usr/bin/env kross
# -*- coding: utf-8 -*-
import os, datetime, sys, traceback, pickle
import Kross, Plan
T = Kross.module("kdetranslation")
class BusyinfoImporter:
def __init__(self, scriptaction):
self.scriptaction = scriptaction
self.currentpath = self.scriptaction.currentPath()
self.proj = Plan.project()
self.forms = Kross.module("forms")
self.dialog = self.forms.createDialog(T.i18n("Busy Information Import"))
self.dialog.setButtons("Ok|Cancel")
self.dialog.setFaceType("List") #Auto Plain List Tree Tabbed
openpage = self.dialog.addPage(T.i18n("Open"), T.i18n("Import Busy Info File"),"document-open")
self.openwidget = self.forms.createFileWidget(openpage, "kfiledialog:///kplatobusyinfoimportopen")
self.openwidget.setMode("Opening")
self.openwidget.setFilter("*.rbi|%(1)s\n*|%(2)s" % { '1' : T.i18n("Resource Busy Information"), '2' : T.i18n("All Files") } )
if self.dialog.exec_loop():
try:
Plan.beginCommand( T.i18nc("(qtundo_format)", "Import resource busy information") )
self.doImport( self.proj )
Plan.endCommand()
except:
Plan.revertCommand()
self.forms.showMessageBox("Error", T.i18n("Error"), "%s" % "".join( traceback.format_exception(sys.exc_info()[0],sys.exc_info()[1],sys.exc_info()[2]) ))
def doImport( self, project ):
filename = self.openwidget.selectedFile()
if not os.path.isfile(filename):
self.forms.showMessageBox("Sorry", T.i18n("Error"), T.i18n("No file selected") )
return
file = open(filename,'r')
try:
# load project id and -name
data = pickle.load( file )
#print data
pid = data[0]
if project.id() == pid:
self.forms.showMessageBox("Error", T.i18n("Error"), T.i18n("Cannot load data from project with the same identity") )
raise Exception
pname = data[1].decode( "UTF-8" )
# clear existing, so we don't get double up
project.clearExternalAppointments( pid )
# load the intervals
while True:
data = pickle.load( file )
self.loadAppointment( project, pid, pname, data )
except:
file.close()
def loadAppointment( self, project, pid, pname, data ):
r = project.findResource( data[0] )
if r is None:
print "Resource is not used in this project: %s, %s" % ( data[0], data[1] )
return
if project.data( r, 'ResourceName' ) != data[1]:
#TODO Warning ?
print "Resources has same id but different names %s - %s" % ( project.data( r, 'ResourceName' ), data[1] )
r.addExternalAppointment( pid, pname, data[2:5] )
BusyinfoImporter( self )
| yxl/emscripten-calligra-mobile | plan/plugins/scripting/scripts/busyinfoimport.py | Python | gpl-2.0 | 2,939 |
"""
This module contains some assorted functions used in tests
"""
from __future__ import absolute_import
import os
from importlib import import_module
from twisted.trial.unittest import SkipTest
from scrapy.exceptions import NotConfigured
from scrapy.utils.boto import is_botocore
def assert_aws_environ():
"""Asserts the current environment is suitable for running AWS testsi.
Raises SkipTest with the reason if it's not.
"""
skip_if_no_boto()
if 'AWS_ACCESS_KEY_ID' not in os.environ:
raise SkipTest("AWS keys not found")
def assert_gcs_environ():
if 'GCS_PROJECT_ID' not in os.environ:
raise SkipTest("GCS_PROJECT_ID not found")
def skip_if_no_boto():
try:
is_botocore()
except NotConfigured as e:
raise SkipTest(e)
def get_s3_content_and_delete(bucket, path, with_key=False):
""" Get content from s3 key, and delete key afterwards.
"""
if is_botocore():
import botocore.session
session = botocore.session.get_session()
client = session.create_client('s3')
key = client.get_object(Bucket=bucket, Key=path)
content = key['Body'].read()
client.delete_object(Bucket=bucket, Key=path)
else:
import boto
# assuming boto=2.2.2
bucket = boto.connect_s3().get_bucket(bucket, validate=False)
key = bucket.get_key(path)
content = key.get_contents_as_string()
bucket.delete_key(path)
return (content, key) if with_key else content
def get_gcs_content_and_delete(bucket, path):
from google.cloud import storage
client = storage.Client(project=os.environ.get('GCS_PROJECT_ID'))
bucket = client.get_bucket(bucket)
blob = bucket.get_blob(path)
content = blob.download_as_string()
bucket.delete_blob(path)
return content, blob
def get_crawler(spidercls=None, settings_dict=None):
"""Return an unconfigured Crawler object. If settings_dict is given, it
will be used to populate the crawler settings with a project level
priority.
"""
from scrapy.crawler import CrawlerRunner
from scrapy.spiders import Spider
runner = CrawlerRunner(settings_dict)
return runner.create_crawler(spidercls or Spider)
def get_pythonpath():
"""Return a PYTHONPATH suitable to use in processes so that they find this
installation of Scrapy"""
scrapy_path = import_module('scrapy').__path__[0]
return os.path.dirname(scrapy_path) + os.pathsep + os.environ.get('PYTHONPATH', '')
def get_testenv():
"""Return a OS environment dict suitable to fork processes that need to import
this installation of Scrapy, instead of a system installed one.
"""
env = os.environ.copy()
env['PYTHONPATH'] = get_pythonpath()
return env
def assert_samelines(testcase, text1, text2, msg=None):
"""Asserts text1 and text2 have the same lines, ignoring differences in
line endings between platforms
"""
testcase.assertEqual(text1.splitlines(), text2.splitlines(), msg)
| umrashrf/scrapy | scrapy/utils/test.py | Python | bsd-3-clause | 3,020 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
import time
from action import Action
from shinken.property import IntegerProp, StringProp, FloatProp, BoolProp
from shinken.autoslots import AutoSlots
""" TODO: Add some comment about this class for the doc"""
class EventHandler(Action):
# AutoSlots create the __slots__ with properties and
# running_properties names
__metaclass__ = AutoSlots
my_type = 'eventhandler'
properties = {
'is_a': StringProp(default='eventhandler'),
'type': StringProp(default=''),
'_in_timeout': StringProp(default=False),
'status': StringProp(default=''),
'exit_status': StringProp(default=3),
'output': StringProp(default=''),
'long_output': StringProp(default=''),
't_to_go': StringProp(default=0),
'check_time': StringProp(default=0),
'execution_time': FloatProp(default=0),
'u_time': FloatProp(default=0.0),
's_time': FloatProp(default=0.0),
'env': StringProp(default={}),
'perf_data': StringProp(default=''),
'sched_id': IntegerProp(default=0),
'timeout': IntegerProp(default=10),
'check_time': IntegerProp(default=0),
'command': StringProp(default=''),
'module_type': StringProp(default='fork'),
'worker': StringProp(default='none'),
'reactionner_tag': StringProp(default='None'),
'is_snapshot': BoolProp(default=False),
}
# id = 0 #Is common to Actions
def __init__(self, command, id=None, ref=None, timeout=10, env={}, \
module_type='fork', reactionner_tag='None', is_snapshot=False):
self.is_a = 'eventhandler'
self.type = ''
self.status = 'scheduled'
if id is None: # id != None is for copy call only
self.id = Action.id
Action.id += 1
self.ref = ref
self._in_timeout = False
self.timeout = timeout
self.exit_status = 3
self.command = command
self.output = ''
self.long_output = ''
self.t_to_go = time.time()
self.check_time = 0
self.execution_time = 0
self.u_time = 0
self.s_time = 0
self.perf_data = ''
self.env = {}
self.module_type = module_type
self.worker = 'none'
self.reactionner_tag = reactionner_tag
self.is_snapshot = is_snapshot
# return a copy of the check but just what is important for execution
# So we remove the ref and all
def copy_shell(self):
# We create a dummy check with nothing in it, just defaults values
return self.copy_shell__(EventHandler('', id=self.id, is_snapshot=self.is_snapshot))
def get_return_from(self, e):
self.exit_status = e.exit_status
self.output = e.output
self.long_output = getattr(e, 'long_output', '')
self.check_time = e.check_time
self.execution_time = getattr(e, 'execution_time', 0.0)
self.perf_data = getattr(e, 'perf_data', '')
def get_outputs(self, out, max_plugins_output_length):
self.output = out
def is_launchable(self, t):
return t >= self.t_to_go
def __str__(self):
return "Check %d status:%s command:%s" % (self.id, self.status, self.command)
def get_id(self):
return self.id
# Call by pickle to dataify the comment
# because we DO NOT WANT REF in this pickleisation!
def __getstate__(self):
cls = self.__class__
# id is not in *_properties
res = {'id': self.id}
for prop in cls.properties:
if hasattr(self, prop):
res[prop] = getattr(self, prop)
return res
# Inverted function of getstate
def __setstate__(self, state):
cls = self.__class__
self.id = state['id']
for prop in cls.properties:
if prop in state:
setattr(self, prop, state[prop])
if not hasattr(self, 'worker'):
self.worker = 'none'
if not getattr(self, 'module_type', None):
self.module_type = 'fork'
# s_time and u_time are added between 1.2 and 1.4
if not hasattr(self, 'u_time'):
self.u_time = 0
self.s_time = 0
| kaji-project/shinken | shinken/eventhandler.py | Python | agpl-3.0 | 5,296 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
LocalExecutor
.. seealso::
For more information on how the LocalExecutor works, take a look at the guide:
:ref:`executor:LocalExecutor`
"""
import logging
import os
import subprocess
from abc import abstractmethod
from multiprocessing import Manager, Process
from multiprocessing.managers import SyncManager
from queue import Empty, Queue # pylint: disable=unused-import # noqa: F401
from typing import Any, List, Optional, Tuple, Union # pylint: disable=unused-import # noqa: F401
from setproctitle import setproctitle # pylint: disable=no-name-in-module
from airflow import settings
from airflow.exceptions import AirflowException
from airflow.executors.base_executor import NOT_STARTED_MESSAGE, PARALLELISM, BaseExecutor, CommandType
from airflow.models.taskinstance import ( # pylint: disable=unused-import # noqa: F401
TaskInstanceKey,
TaskInstanceStateType,
)
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.state import State
# This is a work to be executed by a worker.
# It can Key and Command - but it can also be None, None which is actually a
# "Poison Pill" - worker seeing Poison Pill should take the pill and ... die instantly.
ExecutorWorkType = Tuple[Optional[TaskInstanceKey], Optional[CommandType]]
class LocalWorkerBase(Process, LoggingMixin):
"""
LocalWorkerBase implementation to run airflow commands. Executes the given
command and puts the result into a result queue when done, terminating execution.
:param result_queue: the queue to store result state
"""
def __init__(self, result_queue: 'Queue[TaskInstanceStateType]'):
super().__init__(target=self.do_work)
self.daemon: bool = True
self.result_queue: 'Queue[TaskInstanceStateType]' = result_queue
def run(self):
# We know we've just started a new process, so lets disconnect from the metadata db now
settings.engine.pool.dispose()
settings.engine.dispose()
return super().run()
def execute_work(self, key: TaskInstanceKey, command: CommandType) -> None:
"""
Executes command received and stores result state in queue.
:param key: the key to identify the task instance
:param command: the command to execute
"""
if key is None:
return
self.log.info("%s running %s", self.__class__.__name__, command)
if settings.EXECUTE_TASKS_NEW_PYTHON_INTERPRETER:
state = self._execute_work_in_subprocess(command)
else:
state = self._execute_work_in_fork(command)
self.result_queue.put((key, state))
def _execute_work_in_subprocess(self, command: CommandType) -> str:
try:
subprocess.check_call(command, close_fds=True)
return State.SUCCESS
except subprocess.CalledProcessError as e:
self.log.error("Failed to execute task %s.", str(e))
return State.FAILED
def _execute_work_in_fork(self, command: CommandType) -> str:
pid = os.fork()
if pid:
# In parent, wait for the child
pid, ret = os.waitpid(pid, 0)
return State.SUCCESS if ret == 0 else State.FAILED
from airflow.sentry import Sentry
ret = 1
try:
import signal
from airflow.cli.cli_parser import get_parser
signal.signal(signal.SIGINT, signal.SIG_DFL)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
signal.signal(signal.SIGUSR2, signal.SIG_DFL)
parser = get_parser()
# [1:] - remove "airflow" from the start of the command
args = parser.parse_args(command[1:])
args.shut_down_logging = False
setproctitle(f"airflow task supervisor: {command}")
args.func(args)
ret = 0
return State.SUCCESS
except Exception as e: # pylint: disable=broad-except
self.log.error("Failed to execute task %s.", str(e))
finally:
Sentry.flush()
logging.shutdown()
os._exit(ret) # pylint: disable=protected-access
raise RuntimeError('unreachable -- keep mypy happy')
@abstractmethod
def do_work(self):
"""Called in the subprocess and should then execute tasks"""
raise NotImplementedError()
class LocalWorker(LocalWorkerBase):
"""
Local worker that executes the task.
:param result_queue: queue where results of the tasks are put.
:param key: key identifying task instance
:param command: Command to execute
"""
def __init__(
self, result_queue: 'Queue[TaskInstanceStateType]', key: TaskInstanceKey, command: CommandType
):
super().__init__(result_queue)
self.key: TaskInstanceKey = key
self.command: CommandType = command
def do_work(self) -> None:
self.execute_work(key=self.key, command=self.command)
class QueuedLocalWorker(LocalWorkerBase):
"""
LocalWorker implementation that is waiting for tasks from a queue and will
continue executing commands as they become available in the queue.
It will terminate execution once the poison token is found.
:param task_queue: queue from which worker reads tasks
:param result_queue: queue where worker puts results after finishing tasks
"""
def __init__(self, task_queue: 'Queue[ExecutorWorkType]', result_queue: 'Queue[TaskInstanceStateType]'):
super().__init__(result_queue=result_queue)
self.task_queue = task_queue
def do_work(self) -> None:
while True:
try:
key, command = self.task_queue.get()
except EOFError:
self.log.info(
"Failed to read tasks from the task queue because the other "
"end has closed the connection. Terminating worker %s.",
self.name,
)
break
try:
if key is None or command is None:
# Received poison pill, no more tasks to run
break
self.execute_work(key=key, command=command)
finally:
self.task_queue.task_done()
class LocalExecutor(BaseExecutor):
"""
LocalExecutor executes tasks locally in parallel.
It uses the multiprocessing Python library and queues to parallelize the execution
of tasks.
:param parallelism: how many parallel processes are run in the executor
"""
def __init__(self, parallelism: int = PARALLELISM):
super().__init__(parallelism=parallelism)
self.manager: Optional[SyncManager] = None
self.result_queue: Optional['Queue[TaskInstanceStateType]'] = None
self.workers: List[QueuedLocalWorker] = []
self.workers_used: int = 0
self.workers_active: int = 0
self.impl: Optional[
Union['LocalExecutor.UnlimitedParallelism', 'LocalExecutor.LimitedParallelism']
] = None
class UnlimitedParallelism:
"""
Implements LocalExecutor with unlimited parallelism, starting one process
per each command to execute.
:param executor: the executor instance to implement.
"""
def __init__(self, executor: 'LocalExecutor'):
self.executor: 'LocalExecutor' = executor
def start(self) -> None:
"""Starts the executor."""
self.executor.workers_used = 0
self.executor.workers_active = 0
# pylint: disable=unused-argument # pragma: no cover
def execute_async(
self,
key: TaskInstanceKey,
command: CommandType,
queue: Optional[str] = None,
executor_config: Optional[Any] = None,
) -> None:
"""
Executes task asynchronously.
:param key: the key to identify the task instance
:param command: the command to execute
:param queue: Name of the queue
:param executor_config: configuration for the executor
"""
if not self.executor.result_queue:
raise AirflowException(NOT_STARTED_MESSAGE)
local_worker = LocalWorker(self.executor.result_queue, key=key, command=command)
self.executor.workers_used += 1
self.executor.workers_active += 1
local_worker.start()
# pylint: enable=unused-argument # pragma: no cover
def sync(self) -> None:
"""Sync will get called periodically by the heartbeat method."""
if not self.executor.result_queue:
raise AirflowException("Executor should be started first")
while not self.executor.result_queue.empty():
results = self.executor.result_queue.get()
self.executor.change_state(*results)
self.executor.workers_active -= 1
def end(self) -> None:
"""
This method is called when the caller is done submitting job and
wants to wait synchronously for the job submitted previously to be
all done.
"""
while self.executor.workers_active > 0:
self.executor.sync()
class LimitedParallelism:
"""
Implements LocalExecutor with limited parallelism using a task queue to
coordinate work distribution.
:param executor: the executor instance to implement.
"""
def __init__(self, executor: 'LocalExecutor'):
self.executor: 'LocalExecutor' = executor
self.queue: Optional['Queue[ExecutorWorkType]'] = None
def start(self) -> None:
"""Starts limited parallelism implementation."""
if not self.executor.manager:
raise AirflowException(NOT_STARTED_MESSAGE)
self.queue = self.executor.manager.Queue()
if not self.executor.result_queue:
raise AirflowException(NOT_STARTED_MESSAGE)
self.executor.workers = [
QueuedLocalWorker(self.queue, self.executor.result_queue)
for _ in range(self.executor.parallelism)
]
self.executor.workers_used = len(self.executor.workers)
for worker in self.executor.workers:
worker.start()
def execute_async(
self,
key: TaskInstanceKey,
command: CommandType,
queue: Optional[str] = None, # pylint: disable=unused-argument
executor_config: Optional[Any] = None, # pylint: disable=unused-argument
) -> None:
"""
Executes task asynchronously.
:param key: the key to identify the task instance
:param command: the command to execute
:param queue: name of the queue
:param executor_config: configuration for the executor
"""
if not self.queue:
raise AirflowException(NOT_STARTED_MESSAGE)
self.queue.put((key, command))
def sync(self):
"""Sync will get called periodically by the heartbeat method."""
while True:
try:
results = self.executor.result_queue.get_nowait()
try:
self.executor.change_state(*results)
finally:
self.executor.result_queue.task_done()
except Empty:
break
def end(self):
"""Ends the executor. Sends the poison pill to all workers."""
for _ in self.executor.workers:
self.queue.put((None, None))
# Wait for commands to finish
self.queue.join()
self.executor.sync()
def start(self) -> None:
"""Starts the executor"""
self.manager = Manager()
self.result_queue = self.manager.Queue()
self.workers = []
self.workers_used = 0
self.workers_active = 0
self.impl = (
LocalExecutor.UnlimitedParallelism(self)
if self.parallelism == 0
else LocalExecutor.LimitedParallelism(self)
)
self.impl.start()
def execute_async(
self,
key: TaskInstanceKey,
command: CommandType,
queue: Optional[str] = None,
executor_config: Optional[Any] = None,
) -> None:
"""Execute asynchronously."""
if not self.impl:
raise AirflowException(NOT_STARTED_MESSAGE)
self.validate_command(command)
self.impl.execute_async(key=key, command=command, queue=queue, executor_config=executor_config)
def sync(self) -> None:
"""Sync will get called periodically by the heartbeat method."""
if not self.impl:
raise AirflowException(NOT_STARTED_MESSAGE)
self.impl.sync()
def end(self) -> None:
"""
Ends the executor.
:return:
"""
if not self.impl:
raise AirflowException(NOT_STARTED_MESSAGE)
if not self.manager:
raise AirflowException(NOT_STARTED_MESSAGE)
self.impl.end()
self.manager.shutdown()
def terminate(self):
"""Terminate the executor is not doing anything."""
| DinoCow/airflow | airflow/executors/local_executor.py | Python | apache-2.0 | 14,191 |
#!/usr/bin/python
# -*- encoding: utf-8 -*-
#
# author: javi santana
from fabric.api import *
from fabric.contrib.files import upload_template
from fabric.contrib.files import exists
import os.path
import tempfile
import fab_python
def svn_checkout(to_dir):
""" checkout svn repository into dir """
cmd = "svn co --non-interactive --no-auth-cache --trust-server-cert --username %(repo_user)s --password %(repo_password)s %(repo)s" % env
cmd = cmd + " " + to_dir
run(cmd)
def install(conf_folder):
pass
def copy_conf_files(conf_folder, project_dir, is_mobile=False):
put('%s/django/app.wsgi' % conf_folder, project_dir)
upload_template('%s/django/local_settings.py' % conf_folder, project_dir + "/app", context=env)
if is_mobile:
put('%s/django/mobile_local_settings.py' % conf_folder, project_dir + "/app")
def create_virtualenv(conf_folder, project_dir):
req_file = '%s/django/requirements.txt' % conf_folder
# if project has own req file upload
if(os.path.exists(req_file)):
put(req_file, project_dir)
else:
put('%s/requirements.txt' % env.source_folder, project_dir)
fab_python.create_virtualenv(project_dir + "/requirements.txt", "env", project_dir)
def prepare_env(conf_folder, project_dir):
create_virtualenv(conf_folder, project_dir)
with cd(project_dir):
if env.from_repo:
svn_checkout("app")
else:
tmpfile = tempfile.mktemp()
local("cd %s && tar -czf %s.tar.gz ." % (env.source_folder, tmpfile))
put('%s.tar.gz' % tmpfile , '/tmp/%s.tar.gz' % env.app_name)
run("mkdir -p app")
run('tar -xzf /tmp/%s.tar.gz -C app' % env.app_name)
run("mkdir -p logs")
def syncdb():
# make a dump to avoid problems
run("mysqldump -u%(database_admin)s -p%(database_admin_pass)s %(database_name)s > dump_%(database_name)s.sql" % env)
run('source env/bin/activate && python app/manage.py syncdb --noinput')
#run('source env/bin/activate && python app/manage.py migrate')
def update_index():
run('source env/bin/activate && python app/manage.py update_index')
def load_fixture(fixture_file):
run("source env/bin/activate && python app/manage.py loaddata %s" % fixture_file)
def deploy():
if env.from_repo:
cmd = "svn up --username %(repo_user)s --password %(repo_password)s app" % env
run(cmd)
else:
tmpfile = tempfile.mktemp()
local("cd %s && tar -czf %s.tar.gz ." % (env.source_folder, tmpfile))
put('%s.tar.gz' % tmpfile , '/tmp/%s.tar.gz' % env.app_name)
run("mkdir -p app")
run('tar -xzf /tmp/%s.tar.gz -C app' % env.app_name)
def create_admin():
cmd = "from django.contrib.auth.models import User; User.objects.create_superuser('admin', '[email protected]', '%s')" % env.admin_password
run('source env/bin/activate && echo "' + cmd + '" | python app/manage.py shell')
def run_django_cmd(cmd):
run('source env/bin/activate && python app/manage.py syncdb --noinput')
def compile_locales():
with cd(env.deploy_folder):
run('source env/bin/activate && cd app/ && django-admin.py compilemessages')
def start():
pass
def stop():
pass
def restart():
run("touch app.wsgi")
def clean_pyc():
sudo('find . -name "*.pyc" -delete')
run('source env/bin/activate && python app/manage.py clean_pyc')
def restart_app(app_name):
sudo('supervisorctl restart %s' % app_name)
def load_data(data):
""" load application fixtures"""
for k, v in data.items():
for fixture in v:
if k:
run("source env/bin/activate && python app/manage.py loaddata app/%s/fixtures/%s.json" %(k, fixture))
else:
load_fixture(fixture)
| frhumanes/consulting | web/deploy/wtdeploy/wtdeploy/modules/fab_django.py | Python | apache-2.0 | 3,798 |
# -*- coding: utf-8 -*-
import logging, logging.handlers
from django.conf import settings
def get_logger(name, level=logging.INFO, format='[%(asctime)s] %(message)s', handler=None, filename=None):
new_logger = logging.getLogger(name)
new_logger.setLevel(level)
if not handler:
filename = filename or '%s/logs/%s.log' % (settings.HOME_DIR, name)
handler = logging.FileHandler(filename)
handler.setFormatter(logging.Formatter(format))
new_logger.addHandler(handler)
return new_logger
if hasattr(settings, 'LOG_FILENAME') and not logger:
handler = logging.handlers.TimedRotatingFileHandler(settings.LOG_FILENAME, when = 'midnight')
logger = get_logger('default', handler=handler)
| leliel12/handy | handy/logger.py | Python | bsd-3-clause | 733 |
import time
import sys,os
lib_path = os.path.abspath('./modules')
sys.path.append(lib_path)
from py532lib.i2c import *
from py532lib.frame import *
from py532lib.constants import *
class NFCmonitor :
NEWTAG = 1
REMOVETAG = 2
def __init__(self) :
self.cardIn = False
self.UUID = []
self.stopped = False
self.cbcardin = None
self.cbcardout = None
#Initialise NFC_reader
self.pn532 = Pn532_i2c()
self.pn532.SAMconfigure()
def add_event_detect(self,fn,callback) :
print ('new callback added')
if fn == self.NEWTAG :
self.cbcardin = callback
elif fn == self.REMOVETAG :
self.cbcardout = callback
def _trust_uid(self,uid) :
return uid == self.pn532.get_uid() and uid == self.pn532.get_uid() and uid == self.pn532.get_uid()
def stop(self) :
self.stopped = True
def start(self) :
print ("NFC Monitor started")
while not self.stopped :
uid = self.pn532.get_uid()
if uid == self.UUID :
time.sleep(0.2)
elif uid and self._trust_uid(uid) :
print ("New Card Detected",uid)
self.UUID = uid
if not self.cardIn :
self.cardIn = True
if self.cbcardin : self.cbcardin(self.UUID)
elif not uid and self.cardIn and self._trust_uid(uid):
print ("Card Removed")
uuid = self.UUID
self.UUID = None
self.cardIn = False
if self.cbcardout : self.cbcardout(uuid)
NFC = NFCmonitor()
| belese/luciphone | Luciphone/modules/py532lib/NFC.py | Python | gpl-2.0 | 1,850 |
"""
BrowserID support
"""
from social.backends.base import BaseAuth
from social.exceptions import AuthFailed, AuthMissingParameter
class PersonaAuth(BaseAuth):
"""BrowserID authentication backend"""
name = 'persona'
def get_user_id(self, details, response):
"""Use BrowserID email as ID"""
return details['email']
def get_user_details(self, response):
"""Return user details, BrowserID only provides Email."""
# {'status': 'okay',
# 'audience': 'localhost:8000',
# 'expires': 1328983575529,
# 'email': '[email protected]',
# 'issuer': 'browserid.org'}
email = response['email']
return {'username': email.split('@', 1)[0],
'email': email,
'fullname': '',
'first_name': '',
'last_name': ''}
def extra_data(self, user, uid, response, details):
"""Return users extra data"""
return {'audience': response['audience'],
'issuer': response['issuer']}
def auth_complete(self, *args, **kwargs):
"""Completes loging process, must return user instance"""
if not 'assertion' in self.data:
raise AuthMissingParameter(self, 'assertion')
response = self.get_json('https://browserid.org/verify', data={
'assertion': self.data['assertion'],
'audience': self.strategy.request_host()
}, method='POST')
if response.get('status') == 'failure':
raise AuthFailed(self)
kwargs.update({'response': response, 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
| nvbn/python-social-auth | social/backends/persona.py | Python | bsd-3-clause | 1,664 |
"""
Module with functionality for splitting and shuffling datasets.
"""
import numpy as np
from sklearn.utils import murmurhash3_32
from spotlight.interactions import Interactions
def _index_or_none(array, shuffle_index):
if array is None:
return None
else:
return array[shuffle_index]
def shuffle_interactions(interactions,
random_state=None):
"""
Shuffle interactions.
Parameters
----------
interactions: :class:`spotlight.interactions.Interactions`
The interactions to shuffle.
random_state: np.random.RandomState, optional
The random state used for the shuffle.
Returns
-------
interactions: :class:`spotlight.interactions.Interactions`
The shuffled interactions.
"""
if random_state is None:
random_state = np.random.RandomState()
shuffle_indices = np.arange(len(interactions.user_ids))
random_state.shuffle(shuffle_indices)
return Interactions(interactions.user_ids[shuffle_indices],
interactions.item_ids[shuffle_indices],
ratings=_index_or_none(interactions.ratings,
shuffle_indices),
timestamps=_index_or_none(interactions.timestamps,
shuffle_indices),
weights=_index_or_none(interactions.weights,
shuffle_indices),
num_users=interactions.num_users,
num_items=interactions.num_items)
def random_train_test_split(interactions,
test_percentage=0.2,
random_state=None):
"""
Randomly split interactions between training and testing.
Parameters
----------
interactions: :class:`spotlight.interactions.Interactions`
The interactions to shuffle.
test_percentage: float, optional
The fraction of interactions to place in the test set.
random_state: np.random.RandomState, optional
The random state used for the shuffle.
Returns
-------
(train, test): (:class:`spotlight.interactions.Interactions`,
:class:`spotlight.interactions.Interactions`)
A tuple of (train data, test data)
"""
interactions = shuffle_interactions(interactions,
random_state=random_state)
cutoff = int((1.0 - test_percentage) * len(interactions))
train_idx = slice(None, cutoff)
test_idx = slice(cutoff, None)
train = Interactions(interactions.user_ids[train_idx],
interactions.item_ids[train_idx],
ratings=_index_or_none(interactions.ratings,
train_idx),
timestamps=_index_or_none(interactions.timestamps,
train_idx),
weights=_index_or_none(interactions.weights,
train_idx),
num_users=interactions.num_users,
num_items=interactions.num_items)
test = Interactions(interactions.user_ids[test_idx],
interactions.item_ids[test_idx],
ratings=_index_or_none(interactions.ratings,
test_idx),
timestamps=_index_or_none(interactions.timestamps,
test_idx),
weights=_index_or_none(interactions.weights,
test_idx),
num_users=interactions.num_users,
num_items=interactions.num_items)
return train, test
def user_based_train_test_split(interactions,
test_percentage=0.2,
random_state=None):
"""
Split interactions between a train and a test set based on
user ids, so that a given user's entire interaction history
is either in the train, or the test set.
Parameters
----------
interactions: :class:`spotlight.interactions.Interactions`
The interactions to shuffle.
test_percentage: float, optional
The fraction of users to place in the test set.
random_state: np.random.RandomState, optional
The random state used for the shuffle.
Returns
-------
(train, test): (:class:`spotlight.interactions.Interactions`,
:class:`spotlight.interactions.Interactions`)
A tuple of (train data, test data)
"""
if random_state is None:
random_state = np.random.RandomState()
minint = np.iinfo(np.uint32).min
maxint = np.iinfo(np.uint32).max
seed = random_state.randint(minint, maxint, dtype=np.int64)
in_test = ((murmurhash3_32(interactions.user_ids,
seed=seed,
positive=True) % 100 /
100.0) <
test_percentage)
in_train = np.logical_not(in_test)
train = Interactions(interactions.user_ids[in_train],
interactions.item_ids[in_train],
ratings=_index_or_none(interactions.ratings,
in_train),
timestamps=_index_or_none(interactions.timestamps,
in_train),
weights=_index_or_none(interactions.weights,
in_train),
num_users=interactions.num_users,
num_items=interactions.num_items)
test = Interactions(interactions.user_ids[in_test],
interactions.item_ids[in_test],
ratings=_index_or_none(interactions.ratings,
in_test),
timestamps=_index_or_none(interactions.timestamps,
in_test),
weights=_index_or_none(interactions.weights,
in_test),
num_users=interactions.num_users,
num_items=interactions.num_items)
return train, test
| maciejkula/spotlight | spotlight/cross_validation.py | Python | mit | 6,519 |
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import sys
if not (sys.version_info.major == 3 and sys.version_info.minor > 5):
print("Python version %s.%s not supported version 3.6 or above required - exiting" % (sys.version_info.major,sys.version_info.minor))
sys.exit(1)
# To be executed in the SchemaTerms/example-code/{example} directory
import os
for path in [os.getcwd(),"..","../..","../../.."]: #Adds in current, example-code, and SchemaTerms directory into path
sys.path.insert( 1, path ) #Pickup libs from local directories
import rdflib
from sdotermsource import *
from sdoterm import *
from localmarkdown import Markdown
import jinja2
Markdown.setWikilinkCssClass("localLink")
Markdown.setWikilinkPrePath("/")
if VOCABURI.startswith("https://"):
triplesfile = "../data/schemaorg-all-https.nt"
else:
triplesfile = "../data/schemaorg-all-http.nt"
termgraph = rdflib.Graph()
termgraph.parse(triplesfile, format="nt")
print ("loaded %s triples" % len(termgraph))
SdoTermSource.setSourceGraph(termgraph)
print ("Types Count: %s" % len(SdoTermSource.getAllTypes(expanded=False)))
print ("Properties Count: %s" % len(SdoTermSource.getAllProperties(expanded=False)))
###################################################
#JINJA INITIALISATION
###################################################
#Setup Jinja2 environment - template(s) location etc.
#TEMPLATESFOLDER = "SchemaTerms/templates"
TEMPLATESDIR = "templates"
jenv = jinja2.Environment(loader=jinja2.FileSystemLoader(TEMPLATESDIR),
extensions=['jinja2.ext.autoescape'], autoescape=True, cache_size=0)
### Template rendering for term definitions
# term: SDO Term definition either simple (strings only)
# or expanded (nested definitions for related terms)
def templateRender(term):
#Basic varibles configuring UI
tvars = {
'sitename': "SchemaTerms",
'menu_sel': "Schemas",
'home_page': "False",
'href_prefix': "",
'term': term
}
page=None
if term.expanded:
if term.termType == SdoTerm.TYPE:
page = "expanded/TypePageEx.tpl"
elif term.termType == SdoTerm.PROPERTY:
page = "expanded/PropertyPageEx.tpl"
elif term.termType == SdoTerm.ENUMERATION:
page = "expanded/EnumerationPageEx.tpl"
elif term.termType == SdoTerm.ENUMERATIONVALUE:
page = "expanded/EnumerationValuePageEx.tpl"
elif term.termType == SdoTerm.DATATYPE:
page = "expanded/DataTypePageEx.tpl"
else:
if term.termType == SdoTerm.TYPE:
page = "simple/TypePage.tpl"
elif term.termType == SdoTerm.PROPERTY:
page = "simple/PropertyPage.tpl"
elif term.termType == SdoTerm.ENUMERATION:
page = "simple/EnumerationPage.tpl"
elif term.termType == SdoTerm.ENUMERATIONVALUE:
page = "simple/EnumerationValuePage.tpl"
elif term.termType == SdoTerm.DATATYPE:
page = "simple/DataTypePage.tpl"
if not page:
print("Invalid term type: %s" % term.termType)
return
template = jenv.get_template(page)
return template.render(tvars)
###################################################
#JINJA INITIALISATION - End
###################################################
#terms = SdoTermSource.getAllTerms()
terms = ["DataType","about","Action","CreativeWork","MonetaryAmount","PronounceableText","Thing","Text","LinkRole","EBook","BookFormatType"]
print("Processing %s terms" % len(terms))
import time,datetime
start = datetime.datetime.now()
lastCount = 0
for t in terms:
tic = datetime.datetime.now() #diagnostics
term = SdoTermSource.getTerm(t,expanded=True)
pageout = templateRender(term)
filename = "html/" + term.id +".html"
f = open(filename,"w")
f.write(pageout)
f.close()
#diagnostics ##########################################
termsofar = len(SdoTermSource.termCache()) #diagnostics
termscreated = termsofar - lastCount #diagnostics
lastCount = termsofar #diagnostics
print("Term: %s (%d) - %s" % (t, termscreated, str(datetime.datetime.now()-tic))) #diagnostics
# Note: (%d) = number of individual newly created (not cached) term definitions to
# build this expanded definition. ie. All Properties associated with a Type, etc.
print()
print ("All terms took %s seconds" % str(datetime.datetime.now()-start)) #diagnostics
| schemaorg/schemaorg | software/SchemaTerms/example-code/jinjaTemplating/simpleJinjaHtml.py | Python | apache-2.0 | 4,454 |
def parse_csv(text):
'''
Parses CSV with a header. Returns a list of dicts, one per line of data.
'''
# Split the text into lines.
lines = text.split('\n')
# The header is the first line; remaining lines contain data.
header = lines[0]
data_lines = lines[1 :]
# The columns names are in the header.
names = header.split(',')
# Start with empty data.
data = []
for line in data_lines:
# Split the line into individual values.
values = line.split(',')
# Pair up the names and values, and store them in a row.
row = {}
for name, value in zip(names, values):
row[name] = value
# Store the row.
data.append(row)
return data
| alexhsamuel/codex | solutions/csv.py | Python | mit | 750 |
#!/usr/bin/env python
import sys
from galaxy.util.checkers import is_gzip
def main():
"""
The format of the file is JSON::
{ "sections" : [
{ "start" : "x", "end" : "y", "sequences" : "z" },
...
]}
This works only for UNCOMPRESSED fastq files. The Python GzipFile does not provide seekable
offsets via tell(), so clients just have to split the slow way
"""
input_fname = sys.argv[1]
if is_gzip(input_fname):
print 'Conversion is only possible for uncompressed files'
sys.exit(1)
out_file = open(sys.argv[2], 'w')
current_line = 0
sequences = 1000000
lines_per_chunk = 4 * sequences
chunk_begin = 0
in_file = open(input_fname)
out_file.write('{"sections" : [')
for line in in_file:
current_line += 1
if 0 == current_line % lines_per_chunk:
chunk_end = in_file.tell()
out_file.write('{"start":"%s","end":"%s","sequences":"%s"},' % (chunk_begin, chunk_end, sequences))
chunk_begin = chunk_end
chunk_end = in_file.tell()
out_file.write('{"start":"%s","end":"%s","sequences":"%s"}' % (chunk_begin, chunk_end, (current_line % lines_per_chunk) / 4))
out_file.write(']}\n')
if __name__ == "__main__":
main()
| icaoberg/cellorganizer-galaxy-tools | datatypes/converters/fastq_to_fqtoc.py | Python | gpl-3.0 | 1,305 |
def maximum_value(maximum_weight, items):
totals = [[0 for _ in range(len(items) + 1)]
for _ in range(maximum_weight + 1)]
for weight in range(1, maximum_weight + 1):
for index, item in enumerate(items, 1):
if item["weight"] <= weight:
value = item["value"] + \
totals[weight - item["weight"]][index - 1]
value_without_item = totals[weight][index - 1]
totals[weight][index] = max(value, value_without_item)
else:
totals[weight][index] = totals[weight][index - 1]
return totals[maximum_weight][len(items)]
| behrtam/xpython | exercises/knapsack/example.py | Python | mit | 653 |
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def get_install_requires():
return [line.strip() for line in read("requirements.txt").split("\n") if line]
setup(
packages=find_packages(),
install_requires=get_install_requires()
)
| janesolomon/twitter_search | setup.py | Python | mit | 342 |
# Importing required libraries, setting up logging, and loading questions
import logging
import random # noqa: F401
import pandas as pd
from IPython.display import display
from pandas.io.formats.style import Styler
from pybatfish.client.session import Session # noqa: F401
# noinspection PyUnresolvedReferences
from pybatfish.datamodel import Edge, Interface # noqa: F401
from pybatfish.datamodel.answer import TableAnswer
from pybatfish.datamodel.flow import HeaderConstraints, PathConstraints # noqa: F401
from pybatfish.datamodel.route import BgpRoute # noqa: F401
from pybatfish.util import get_html
# Configure all pybatfish loggers to use WARN level
logging.getLogger("pybatfish").setLevel(logging.WARN)
pd.set_option("display.max_colwidth", None)
pd.set_option("display.max_columns", None)
# Prevent rendering text between '$' as MathJax expressions
pd.set_option("display.html.use_mathjax", False)
# UUID for CSS styles used by pandas styler.
# Keeps our notebook HTML deterministic when displaying dataframes
_STYLE_UUID = "pybfstyle"
class MyStyler(Styler):
"""A custom styler for displaying DataFrames in HTML"""
def __repr__(self):
return repr(self.data)
def show(df):
"""
Displays a dataframe as HTML table.
Replaces newlines and double-spaces in the input with HTML markup, and
left-aligns the text.
"""
if isinstance(df, TableAnswer):
df = df.frame()
# workaround for Pandas bug in Python 2.7 for empty frames
if not isinstance(df, pd.DataFrame) or df.size == 0:
display(df)
return
display(
MyStyler(df)
.set_uuid(_STYLE_UUID)
.format(get_html)
.set_properties(**{"text-align": "left", "vertical-align": "top"})
)
| batfish/pybatfish | jupyter_notebooks/startup.py | Python | apache-2.0 | 1,759 |
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import argparse
import fnmatch
import os
import re
import shlex
import shutil
import sys
from collections import defaultdict
from glob import glob
from subprocess import run
from tempfile import NamedTemporaryFile, TemporaryDirectory
from typing import Dict, List, Optional, Tuple
from tabulate import tabulate
from docs.exts.docs_build import dev_index_generator, lint_checks # pylint: disable=no-name-in-module
from docs.exts.docs_build.errors import ( # pylint: disable=no-name-in-module
DocBuildError,
display_errors_summary,
parse_sphinx_warnings,
)
from docs.exts.docs_build.spelling_checks import ( # pylint: disable=no-name-in-module
SpellingError,
display_spelling_error_summary,
parse_spelling_warnings,
)
from docs.exts.provider_yaml_utils import load_package_data # pylint: disable=no-name-in-module
if __name__ != "__main__":
raise Exception(
"This file is intended to be executed as an executable program. You cannot use it as a module."
"To run this script, run the ./build_docs.py command"
)
ROOT_PROJECT_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
ROOT_PACKAGE_DIR = os.path.join(ROOT_PROJECT_DIR, "airflow")
DOCS_DIR = os.path.join(ROOT_PROJECT_DIR, "docs")
ALL_PROVIDER_YAMLS = load_package_data()
CHANNEL_INVITATION = """\
If you need help, write to #documentation channel on Airflow's Slack.
Channel link: https://apache-airflow.slack.com/archives/CJ1LVREHX
Invitation link: https://s.apache.org/airflow-slack\
"""
class AirflowDocsBuilder:
"""Documentation builder for Airflow."""
def __init__(self, package_name: str):
self.package_name = package_name
@property
def _doctree_dir(self) -> str:
return f"{DOCS_DIR}/_doctrees/docs/{self.package_name}"
@property
def _out_dir(self) -> str:
if self.package_name == 'apache-airflow-providers':
# Disable versioning. This documentation does not apply to any issued product and we can update
# it as needed, i.e. with each new package of providers.
return f"{DOCS_DIR}/_build/docs/{self.package_name}"
else:
return f"{DOCS_DIR}/_build/docs/{self.package_name}/latest"
@property
def _src_dir(self) -> str:
# TODO(mik-laj):
# After migrating the content from the core to providers, we should move all documentation from .
# to /airflow/ to keep the directory structure more maintainable.
if self.package_name == 'apache-airflow':
return DOCS_DIR
elif self.package_name.startswith('apache-airflow-providers-') or (
self.package_name == 'apache-airflow-providers'
):
return f"{DOCS_DIR}/{self.package_name}"
else:
raise Exception(F"Unsupported package: {self.package_name}")
def clean_files(self) -> None:
"""Cleanup all artifacts generated by previous builds."""
api_dir = os.path.join(self._src_dir, "_api")
shutil.rmtree(api_dir, ignore_errors=True)
shutil.rmtree(self._out_dir, ignore_errors=True)
os.makedirs(api_dir, exist_ok=True)
os.makedirs(self._out_dir, exist_ok=True)
print(f"Recreated content of the {shlex.quote(self._out_dir)} and {shlex.quote(api_dir)} folders")
def check_spelling(self):
"""Checks spelling."""
spelling_errors = []
with TemporaryDirectory() as tmp_dir:
build_cmd = [
"sphinx-build",
"-W", # turn warnings into errors
"-T", # show full traceback on exception
"-b", # builder to use
"spelling",
"-c",
DOCS_DIR,
"-d", # path for the cached environment and doctree files
self._doctree_dir,
self._src_dir, # path to documentation source files
tmp_dir,
]
print("Executing cmd: ", " ".join([shlex.quote(c) for c in build_cmd]))
env = os.environ.copy()
env['AIRFLOW_PACKAGE_NAME'] = self.package_name
completed_proc = run( # pylint: disable=subprocess-run-check
build_cmd, cwd=self._src_dir, env=env
)
if completed_proc.returncode != 0:
spelling_errors.append(
SpellingError(
file_path=None,
line_no=None,
spelling=None,
suggestion=None,
context_line=None,
message=(
f"Sphinx spellcheck returned non-zero exit status: {completed_proc.returncode}."
),
)
)
warning_text = ""
for filepath in glob(f"{tmp_dir}/**/*.spelling", recursive=True):
with open(filepath) as speeling_file:
warning_text += speeling_file.read()
spelling_errors.extend(parse_spelling_warnings(warning_text, self._src_dir))
return spelling_errors
def build_sphinx_docs(self) -> List[DocBuildError]:
"""Build Sphinx documentation"""
build_errors = []
with NamedTemporaryFile() as tmp_file:
build_cmd = [
"sphinx-build",
"-T", # show full traceback on exception
"--color", # do emit colored output
"-b", # builder to use
"html",
"-d", # path for the cached environment and doctree files
self._doctree_dir,
"-c",
DOCS_DIR,
"-w", # write warnings (and errors) to given file
tmp_file.name,
self._src_dir, # path to documentation source files
self._out_dir, # path to output directory
]
print("Executing cmd: ", " ".join([shlex.quote(c) for c in build_cmd]))
env = os.environ.copy()
env['AIRFLOW_PACKAGE_NAME'] = self.package_name
completed_proc = run( # pylint: disable=subprocess-run-check
build_cmd, cwd=self._src_dir, env=env
)
if completed_proc.returncode != 0:
build_errors.append(
DocBuildError(
file_path=None,
line_no=None,
message=f"Sphinx returned non-zero exit status: {completed_proc.returncode}.",
)
)
tmp_file.seek(0)
warning_text = tmp_file.read().decode()
# Remove 7-bit C1 ANSI escape sequences
warning_text = re.sub(r"\x1B[@-_][0-?]*[ -/]*[@-~]", "", warning_text)
build_errors.extend(parse_sphinx_warnings(warning_text, self._src_dir))
return build_errors
def get_available_packages():
"""Get list of all available packages to build."""
provider_package_names = [provider['package-name'] for provider in ALL_PROVIDER_YAMLS]
return ["apache-airflow", "apache-airflow-providers", *provider_package_names]
def _get_parser():
available_packages_list = " * " + "\n * ".join(get_available_packages())
parser = argparse.ArgumentParser(
description='Builds documentation and runs spell checking',
epilog=f"List of supported documentation packages:\n{available_packages_list}" "",
)
parser.formatter_class = argparse.RawTextHelpFormatter
parser.add_argument(
'--disable-checks', dest='disable_checks', action='store_true', help='Disables extra checks'
)
parser.add_argument(
"--package-filter",
action="append",
help=(
"Filter specifying for which packages the documentation is to be built. Wildcard are supported."
),
)
parser.add_argument('--docs-only', dest='docs_only', action='store_true', help='Only build documentation')
parser.add_argument(
'--spellcheck-only', dest='spellcheck_only', action='store_true', help='Only perform spellchecking'
)
return parser
def build_docs_for_packages(
current_packages: List[str], docs_only: bool, spellcheck_only: bool
) -> Tuple[Dict[str, List[DocBuildError]], Dict[str, List[SpellingError]]]:
"""Builds documentation for single package and returns errors"""
all_build_errors: Dict[str, List[DocBuildError]] = defaultdict(list)
all_spelling_errors: Dict[str, List[SpellingError]] = defaultdict(list)
for package_name in current_packages:
print("#" * 20, package_name, "#" * 20)
builder = AirflowDocsBuilder(package_name=package_name)
builder.clean_files()
if not docs_only:
spelling_errors = builder.check_spelling()
if spelling_errors:
all_spelling_errors[package_name].extend(spelling_errors)
if not spellcheck_only:
docs_errors = builder.build_sphinx_docs()
if docs_errors:
all_build_errors[package_name].extend(docs_errors)
return all_build_errors, all_spelling_errors
def display_packages_summary(
build_errors: Dict[str, List[DocBuildError]], spelling_errors: Dict[str, List[SpellingError]]
):
"""Displays a summary that contains information on the number of errors in each packages"""
packages_names = {*build_errors.keys(), *spelling_errors.keys()}
tabular_data = [
{
"Package name": package_name,
"Count of doc build errors": len(build_errors.get(package_name, [])),
"Count of spelling errors": len(spelling_errors.get(package_name, [])),
}
for package_name in sorted(packages_names, key=lambda k: k or '')
]
print("#" * 20, "Packages errors summary", "#" * 20)
print(tabulate(tabular_data=tabular_data, headers="keys"))
print("#" * 50)
def print_build_errors_and_exit(
message: str,
build_errors: Dict[str, List[DocBuildError]],
spelling_errors: Dict[str, List[SpellingError]],
) -> None:
"""Prints build errors and exists."""
if build_errors or spelling_errors:
if build_errors:
display_errors_summary(build_errors)
print()
if spelling_errors:
display_spelling_error_summary(spelling_errors)
print()
print(message)
display_packages_summary(build_errors, spelling_errors)
print()
print(CHANNEL_INVITATION)
sys.exit(1)
def main():
"""Main code"""
args = _get_parser().parse_args()
available_packages = get_available_packages()
print("Available packages: ", available_packages)
docs_only = args.docs_only
spellcheck_only = args.spellcheck_only
disable_checks = args.disable_checks
package_filters = args.package_filter
print("Current package filters: ", package_filters)
current_packages = (
[p for p in available_packages if any(fnmatch.fnmatch(p, f) for f in package_filters)]
if package_filters
else available_packages
)
print(f"Documentation will be built for {len(current_packages)} package(s): {current_packages}")
all_build_errors: Dict[Optional[str], List[DocBuildError]] = {}
all_spelling_errors: Dict[Optional[str], List[SpellingError]] = {}
package_build_errors, package_spelling_errors = build_docs_for_packages(
current_packages=current_packages,
docs_only=docs_only,
spellcheck_only=spellcheck_only,
)
if package_build_errors:
all_build_errors.update(package_build_errors)
if package_spelling_errors:
all_spelling_errors.update(package_spelling_errors)
to_retry_packages = [
package_name
for package_name, errors in package_build_errors.items()
if any(
'failed to reach any of the inventories with the following issues' in e.message for e in errors
)
]
if to_retry_packages:
for package_name in to_retry_packages:
if package_name in all_build_errors:
del all_build_errors[package_name]
if package_name in all_spelling_errors:
del all_spelling_errors[package_name]
package_build_errors, package_spelling_errors = build_docs_for_packages(
current_packages=to_retry_packages,
docs_only=docs_only,
spellcheck_only=spellcheck_only,
)
if package_build_errors:
all_build_errors.update(package_build_errors)
if package_spelling_errors:
all_spelling_errors.update(package_spelling_errors)
if not disable_checks:
general_errors = []
general_errors.extend(lint_checks.check_guide_links_in_operator_descriptions())
general_errors.extend(lint_checks.check_enforce_code_block())
general_errors.extend(lint_checks.check_exampleinclude_for_example_dags())
if general_errors:
all_build_errors[None] = general_errors
dev_index_generator.generate_index(f"{DOCS_DIR}/_build/index.html")
print_build_errors_and_exit(
"The documentation has errors.",
all_build_errors,
all_spelling_errors,
)
main()
| mrkm4ntr/incubator-airflow | docs/build_docs.py | Python | apache-2.0 | 14,120 |
from setuptools import setup
setup(
name="singlemodule",
version="0.0.1",
description="A sample Python project with a single module",
py_modules=["singlemodule"],
)
| sbidoul/pip | tests/data/src/singlemodule/setup.py | Python | mit | 182 |
from O365.attachment import Attachment
from O365.contact import Contact
from O365.group import Group
import logging
import json
import requests
log = logging.getLogger(__name__)
class Message(object):
'''
Management of the process of sending, recieving, reading, and editing emails.
Note: the get and set methods are technically superflous. You can get more through control over
a message you are trying to craft throught he use of editing the message.json, but these
methods provide an easy way if you don't need all the power and would like the ease.
Methods:
constructor -- creates a new message class, using json for existing, nothing for new.
fetchAttachments -- kicks off the process that downloads attachments.
sendMessage -- take local variables and form them to send the message.
markAsRead -- marks the analougs message in the cloud as read.
getSender -- gets a dictionary with the sender's information.
getSenderEmail -- gets the email address of the sender.
getSenderName -- gets the name of the sender, if possible.
getSubject -- gets the email's subject line.
getBody -- gets contents of the body of the email.
addRecipient -- adds a person to the recipient list.
setRecipients -- sets the list of recipients.
setSubject -- sets the subject line.
setBody -- sets the body.
Variables:
att_url -- url for requestiong attachments. takes message GUID
send_url -- url for sending an email
update_url -- url for updating an email already existing in the cloud.
'''
att_url = 'https://outlook.office365.com/api/v1.0/me/messages/{0}/attachments'
send_url = 'https://outlook.office365.com/api/v1.0/me/sendmail'
draft_url = 'https://outlook.office365.com/api/v1.0/me/folders/{folder_id}/messages'
update_url = 'https://outlook.office365.com/api/v1.0/me/messages/{0}'
def __init__(self, json=None, auth=None, verify=True):
'''
Makes a new message wrapper for sending and recieving messages.
Keyword Arguments:
json (default = None) -- Takes json if you have a pre-existing message to create from.
this is mostly used inside the library for when new messages are downloaded.
auth (default = None) -- Takes an (email,password) tuple that will be used for
authentication with office365.
'''
if json:
self.json = json
self.hasAttachments = json['HasAttachments']
else:
self.json = {'Message': {'Body': {}},
'ToRecipients': [], 'CcRecipients': [], 'BccRecipients': []}
self.hasAttachments = False
self.auth = auth
self.attachments = []
self.reciever = None
self.verify = verify
def fetchAttachments(self):
'''kicks off the process that downloads attachments locally.'''
if not self.hasAttachments:
log.debug('message has no attachments, skipping out early.')
return False
response = requests.get(self.att_url.format(
self.json['Id']), auth=self.auth,verify=self.verify)
log.info('response from O365 for retriving message attachments: %s', str(response))
json = response.json()
for att in json['value']:
try:
self.attachments.append(Attachment(att))
log.debug('successfully downloaded attachment for: %s.', self.auth[0])
except Exception as e:
log.info('failed to download attachment for: %s', self.auth[0])
return len(self.attachments)
def sendMessage(self):
'''takes local variabls and forms them into a message to be sent.'''
headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
try:
data = {'Message': {'Body': {}}}
data['Message']['Subject'] = self.json['Subject']
data['Message']['Body']['Content'] = self.json['Body']['Content']
data['Message']['Body']['ContentType'] = self.json['Body']['ContentType']
data['Message']['ToRecipients'] = self.json['ToRecipients']
data['Message']['CcRecipients'] = self.json['CcRecipients']
data['Message']['BccRecipients'] = self.json['BccRecipients']
data['Message']['Attachments'] = [att.json for att in self.attachments]
data = json.dumps(data)
except Exception as e:
log.error(
'Error while trying to compile the json string to send: {0}'.format(str(e)))
return False
response = requests.post(
self.send_url, data, headers=headers, auth=self.auth,verify=self.verify)
log.debug('response from server for sending message:' + str(response))
log.debug("respnse body: {}".format(response.text))
if response.status_code != 202:
return False
return True
def markAsRead(self):
'''marks analogous message as read in the cloud.'''
read = '{"IsRead":true}'
headers = {'Content-type': 'application/json', 'Accept': 'application/json'}
try:
response = requests.patch(self.update_url.format(
self.json['Id']), read, headers=headers, auth=self.auth,verify=self.verify)
except:
return False
return True
def getSender(self):
'''get all available information for the sender of the email.'''
return self.json['Sender']
def getSenderEmail(self):
'''get the email address of the sender.'''
return self.json['Sender']['EmailAddress']['Address']
def getSenderName(self):
'''try to get the name of the sender.'''
try:
return self.json['Sender']['EmailAddress']['Name']
except:
return ''
def getSubject(self):
'''get email subject line.'''
return self.json['Subject']
def getBody(self):
'''get email body.'''
return self.json['Body']['Content']
def setRecipients(self, val, r_type="To"):
'''
set the recipient list.
val: the one argument this method takes can be very flexible. you can send:
a dictionary: this must to be a dictionary formated as such:
{"EmailAddress":{"Address":"[email protected]"}}
with other options such ass "Name" with address. but at minimum
it must have this.
a list: this must to be a list of libraries formatted the way
specified above, or it can be a list of dictionary objects of
type Contact or it can be an email address as string. The
method will sort out the libraries from the contacts.
a string: this is if you just want to throw an email address.
a contact: type Contact from this dictionary.
a group: type Group, which is a list of contacts.
For each of these argument types the appropriate action will be taken
to fit them to the needs of the library.
'''
log.debug("Entered SET_RECIPIENTS function with type: {}".format(r_type))
self.json[r_type + 'Recipients'] = []
if isinstance(val, list):
for con in val:
if isinstance(con, Contact):
self.addRecipient(con, r_type=r_type)
elif isinstance(con, str):
if '@' in con:
self.addRecipient(con, r_type=r_type)
elif isinstance(con, dict):
self.json[r_type + 'Recipients'].append(con)
elif isinstance(val, dict):
self.json[r_type + 'Recipients'] = [val]
elif isinstance(val, str):
if '@' in val:
self.addRecipient(val, r_type=r_type)
elif isinstance(val, Contact):
self.addRecipient(val, r_type=r_type)
elif isinstance(val, Group):
for person in val:
self.addRecipient(person, r_type=r_type)
else:
return False
return True
def addRecipient(self, address, name=None, r_type="To"):
'''
Adds a recipient to the recipients list.
Arguments:
address -- the email address of the person you are sending to. <<< Important that.
Address can also be of type Contact or type Group.
name -- the name of the person you are sending to. mostly just a decorator. If you
send an email address for the address arg, this will give you the ability
to set the name properly, other wise it uses the email address up to the
at sign for the name. But if you send a type Contact or type Group, this
argument is completely ignored.
'''
if isinstance(address, Contact):
self.json[r_type + 'Recipients'].append(address.getFirstEmailAddress())
elif isinstance(address, Group):
for con in address.contacts:
self.json[r_type + 'Recipients'].append(address.getFirstEmailAddress())
else:
if name is None:
name = address[:address.index('@')]
self.json[r_type + 'Recipients'].append(
{'EmailAddress': {'Address': address, 'Name': name}})
def setSubject(self, val):
'''Sets the subect line of the email.'''
self.json['Subject'] = val
def setBody(self, val):
'''Sets the body content of the email.'''
cont = False
while not cont:
try:
self.json['Body']['Content'] = val
self.json['Body']['ContentType'] = 'Text'
cont = True
except:
self.json['Body'] = {}
def setBodyHTML(self, val=None):
'''
Sets the body content type to HTML for your pretty emails.
arguments:
val -- Default: None. The content of the body you want set. If you don't pass a
value it is just ignored.
'''
cont = False
while not cont:
try:
self.json['Body']['ContentType'] = 'HTML'
if val:
self.json['Body']['Content'] = val
cont = True
except:
self.json['Body'] = {}
# To the King!
| roycem90/python-o365 | O365/message.py | Python | apache-2.0 | 9,031 |
from __future__ import unicode_literals
from frappe import _
app_name = "erpnext"
app_title = "ERPNext"
app_publisher = "Frappe Technologies Pvt. Ltd."
app_description = """ERP made simple"""
app_icon = "fa fa-th"
app_color = "#e74c3c"
app_email = "[email protected]"
app_license = "GNU General Public License (v3)"
source_link = "https://github.com/frappe/erpnext"
develop_version = '12.x.x-develop'
staging_version = '11.0.3-beta.37'
# error_report_email = "[email protected]"
docs_app = "foundation"
app_include_js = "assets/js/erpnext.min.js"
app_include_css = "assets/css/erpnext.css"
web_include_js = "assets/js/erpnext-web.min.js"
web_include_css = "assets/css/erpnext-web.css"
doctype_js = {
"Communication": "public/js/communication.js",
"Event": "public/js/event.js"
}
welcome_email = "erpnext.setup.utils.welcome_email"
# setup wizard
setup_wizard_requires = "assets/erpnext/js/setup_wizard.js"
setup_wizard_stages = "erpnext.setup.setup_wizard.setup_wizard.get_setup_stages"
setup_wizard_test = "erpnext.setup.setup_wizard.test_setup_wizard.run_setup_wizard_test"
before_install = "erpnext.setup.install.check_setup_wizard_not_completed"
after_install = "erpnext.setup.install.after_install"
boot_session = "erpnext.startup.boot.boot_session"
notification_config = "erpnext.startup.notifications.get_notification_config"
get_help_messages = "erpnext.utilities.activation.get_help_messages"
get_user_progress_slides = "erpnext.utilities.user_progress.get_user_progress_slides"
update_and_get_user_progress = "erpnext.utilities.user_progress_utils.update_default_domain_actions_and_get_state"
on_session_creation = "erpnext.shopping_cart.utils.set_cart_count"
on_logout = "erpnext.shopping_cart.utils.clear_cart_count"
treeviews = ['Account', 'Cost Center', 'Warehouse', 'Item Group', 'Customer Group', 'Sales Person', 'Territory', 'Assessment Group']
# website
update_website_context = "erpnext.shopping_cart.utils.update_website_context"
my_account_context = "erpnext.shopping_cart.utils.update_my_account_context"
email_append_to = ["Job Applicant", "Lead", "Opportunity", "Issue"]
calendars = ["Task", "Work Order", "Leave Application", "Sales Order", "Holiday List", "Course Schedule"]
domains = {
'Agriculture': 'erpnext.domains.agriculture',
'Distribution': 'erpnext.domains.distribution',
'Education': 'erpnext.domains.education',
'Healthcare': 'erpnext.domains.healthcare',
'Hospitality': 'erpnext.domains.hospitality',
'Manufacturing': 'erpnext.domains.manufacturing',
'Non Profit': 'erpnext.domains.non_profit',
'Retail': 'erpnext.domains.retail',
'Services': 'erpnext.domains.services',
}
website_generators = ["Item Group", "Item", "BOM", "Sales Partner",
"Job Opening", "Student Admission"]
website_context = {
"favicon": "/assets/erpnext/images/favicon.png",
"splash_image": "/assets/erpnext/images/erp-icon.svg"
}
website_route_rules = [
{"from_route": "/orders", "to_route": "Sales Order"},
{"from_route": "/orders/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Sales Order",
"parents": [{"label": _("Orders"), "route": "orders"}]
}
},
{"from_route": "/invoices", "to_route": "Sales Invoice"},
{"from_route": "/invoices/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Sales Invoice",
"parents": [{"label": _("Invoices"), "route": "invoices"}]
}
},
{"from_route": "/supplier-quotations", "to_route": "Supplier Quotation"},
{"from_route": "/supplier-quotations/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Supplier Quotation",
"parents": [{"label": _("Supplier Quotation"), "route": "supplier-quotations"}]
}
},
{"from_route": "/quotations", "to_route": "Quotation"},
{"from_route": "/quotations/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Quotation",
"parents": [{"label": _("Quotations"), "route": "quotations"}]
}
},
{"from_route": "/shipments", "to_route": "Delivery Note"},
{"from_route": "/shipments/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Delivery Note",
"parents": [{"label": _("Shipments"), "route": "shipments"}]
}
},
{"from_route": "/rfq", "to_route": "Request for Quotation"},
{"from_route": "/rfq/<path:name>", "to_route": "rfq",
"defaults": {
"doctype": "Request for Quotation",
"parents": [{"label": _("Request for Quotation"), "route": "rfq"}]
}
},
{"from_route": "/addresses", "to_route": "Address"},
{"from_route": "/addresses/<path:name>", "to_route": "addresses",
"defaults": {
"doctype": "Address",
"parents": [{"label": _("Addresses"), "route": "addresses"}]
}
},
{"from_route": "/jobs", "to_route": "Job Opening"},
{"from_route": "/admissions", "to_route": "Student Admission"},
{"from_route": "/boms", "to_route": "BOM"},
{"from_route": "/timesheets", "to_route": "Timesheet"},
]
standard_portal_menu_items = [
{"title": _("Personal Details"), "route": "/personal-details", "reference_doctype": "Patient", "role": "Patient"},
{"title": _("Projects"), "route": "/project", "reference_doctype": "Project"},
{"title": _("Request for Quotations"), "route": "/rfq", "reference_doctype": "Request for Quotation", "role": "Supplier"},
{"title": _("Supplier Quotation"), "route": "/supplier-quotations", "reference_doctype": "Supplier Quotation", "role": "Supplier"},
{"title": _("Quotations"), "route": "/quotations", "reference_doctype": "Quotation", "role":"Customer"},
{"title": _("Orders"), "route": "/orders", "reference_doctype": "Sales Order", "role":"Customer"},
{"title": _("Invoices"), "route": "/invoices", "reference_doctype": "Sales Invoice", "role":"Customer"},
{"title": _("Shipments"), "route": "/shipments", "reference_doctype": "Delivery Note", "role":"Customer"},
{"title": _("Issues"), "route": "/issues", "reference_doctype": "Issue", "role":"Customer"},
{"title": _("Addresses"), "route": "/addresses", "reference_doctype": "Address"},
{"title": _("Timesheets"), "route": "/timesheets", "reference_doctype": "Timesheet", "role":"Customer"},
{"title": _("Timesheets"), "route": "/timesheets", "reference_doctype": "Timesheet", "role":"Customer"},
{"title": _("Lab Test"), "route": "/lab-test", "reference_doctype": "Lab Test", "role":"Patient"},
{"title": _("Prescription"), "route": "/prescription", "reference_doctype": "Patient Encounter", "role":"Patient"},
{"title": _("Patient Appointment"), "route": "/patient-appointments", "reference_doctype": "Patient Appointment", "role":"Patient"},
{"title": _("Fees"), "route": "/fees", "reference_doctype": "Fees", "role":"Student"},
{"title": _("Newsletter"), "route": "/newsletters", "reference_doctype": "Newsletter"},
{"title": _("Admission"), "route": "/admissions", "reference_doctype": "Student Admission"},
{"title": _("Certification"), "route": "/certification", "reference_doctype": "Certification Application"},
]
default_roles = [
{'role': 'Customer', 'doctype':'Contact', 'email_field': 'email_id'},
{'role': 'Supplier', 'doctype':'Contact', 'email_field': 'email_id'},
{'role': 'Student', 'doctype':'Student', 'email_field': 'student_email_id'},
]
has_website_permission = {
"Sales Order": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Quotation": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Sales Invoice": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Supplier Quotation": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Delivery Note": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Issue": "erpnext.support.doctype.issue.issue.has_website_permission",
"Timesheet": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Lab Test": "erpnext.healthcare.web_form.lab_test.lab_test.has_website_permission",
"Patient Encounter": "erpnext.healthcare.web_form.prescription.prescription.has_website_permission",
"Patient Appointment": "erpnext.healthcare.web_form.patient_appointments.patient_appointments.has_website_permission",
"Patient": "erpnext.healthcare.web_form.personal_details.personal_details.has_website_permission"
}
dump_report_map = "erpnext.startup.report_data_map.data_map"
before_tests = "erpnext.setup.utils.before_tests"
standard_queries = {
"Customer": "erpnext.selling.doctype.customer.customer.get_customer_list",
"Healthcare Practitioner": "erpnext.healthcare.doctype.healthcare_practitioner.healthcare_practitioner.get_practitioner_list"
}
doc_events = {
"Stock Entry": {
"on_submit": "erpnext.stock.doctype.material_request.material_request.update_completed_and_requested_qty",
"on_cancel": "erpnext.stock.doctype.material_request.material_request.update_completed_and_requested_qty"
},
"User": {
"after_insert": "frappe.contacts.doctype.contact.contact.update_contact",
"validate": "erpnext.hr.doctype.employee.employee.validate_employee_role",
"on_update": ["erpnext.hr.doctype.employee.employee.update_user_permissions",
"erpnext.portal.utils.set_default_role"]
},
("Sales Taxes and Charges Template", 'Price List'): {
"on_update": "erpnext.shopping_cart.doctype.shopping_cart_settings.shopping_cart_settings.validate_cart_settings"
},
"Website Settings": {
"validate": "erpnext.portal.doctype.products_settings.products_settings.home_page_is_products"
},
"Sales Invoice": {
"on_submit": "erpnext.regional.france.utils.create_transaction_log",
"on_trash": "erpnext.regional.check_deletion_permission"
},
"Payment Entry": {
"on_submit": ["erpnext.regional.france.utils.create_transaction_log", "erpnext.accounts.doctype.payment_request.payment_request.make_status_as_paid"],
"on_trash": "erpnext.regional.check_deletion_permission"
},
'Address': {
'validate': 'erpnext.regional.india.utils.validate_gstin_for_india'
},
('Sales Invoice', 'Purchase Invoice', 'Delivery Note'): {
'validate': 'erpnext.regional.india.utils.set_place_of_supply'
},
"Contact":{
"on_trash": "erpnext.support.doctype.issue.issue.update_issue"
}
}
scheduler_events = {
"all": [
"erpnext.projects.doctype.project.project.project_status_update_reminder"
],
"hourly": [
'erpnext.hr.doctype.daily_work_summary_group.daily_work_summary_group.trigger_emails',
"erpnext.accounts.doctype.subscription.subscription.process_all",
"erpnext.erpnext_integrations.doctype.amazon_mws_settings.amazon_mws_settings.schedule_get_order_details",
"erpnext.projects.doctype.project.project.hourly_reminder",
"erpnext.projects.doctype.project.project.collect_project_status"
],
"daily": [
"erpnext.stock.reorder_item.reorder_item",
"erpnext.setup.doctype.email_digest.email_digest.send",
"erpnext.support.doctype.issue.issue.auto_close_tickets",
"erpnext.crm.doctype.opportunity.opportunity.auto_close_opportunity",
"erpnext.controllers.accounts_controller.update_invoice_status",
"erpnext.accounts.doctype.fiscal_year.fiscal_year.auto_create_fiscal_year",
"erpnext.hr.doctype.employee.employee.send_birthday_reminders",
"erpnext.projects.doctype.task.task.set_tasks_as_overdue",
"erpnext.assets.doctype.asset.depreciation.post_depreciation_entries",
"erpnext.hr.doctype.daily_work_summary_group.daily_work_summary_group.send_summary",
"erpnext.stock.doctype.serial_no.serial_no.update_maintenance_status",
"erpnext.buying.doctype.supplier_scorecard.supplier_scorecard.refresh_scorecards",
"erpnext.setup.doctype.company.company.cache_companies_monthly_sales_history",
"erpnext.assets.doctype.asset.asset.update_maintenance_status",
"erpnext.assets.doctype.asset.asset.make_post_gl_entry",
"erpnext.crm.doctype.contract.contract.update_status_for_contracts",
"erpnext.projects.doctype.project.project.update_project_sales_billing",
"erpnext.projects.doctype.project.project.send_project_status_email_to_users"
],
"daily_long": [
"erpnext.manufacturing.doctype.bom_update_tool.bom_update_tool.update_latest_price_in_all_boms"
],
"monthly": [
"erpnext.accounts.deferred_revenue.convert_deferred_revenue_to_income",
"erpnext.accounts.deferred_revenue.convert_deferred_expense_to_expense",
"erpnext.hr.utils.allocate_earned_leaves"
]
}
email_brand_image = "assets/erpnext/images/erpnext-logo.jpg"
default_mail_footer = """
<span>
Sent via
<a class="text-muted" href="https://erpnext.com?source=via_email_footer" target="_blank">
ERPNext
</a>
</span>
"""
get_translated_dict = {
("doctype", "Global Defaults"): "frappe.geo.country_info.get_translated_dict"
}
bot_parsers = [
'erpnext.utilities.bot.FindItemBot',
]
get_site_info = 'erpnext.utilities.get_site_info'
payment_gateway_enabled = "erpnext.accounts.utils.create_payment_gateway_account"
regional_overrides = {
'France': {
'erpnext.tests.test_regional.test_method': 'erpnext.regional.france.utils.test_method'
},
'India': {
'erpnext.tests.test_regional.test_method': 'erpnext.regional.india.utils.test_method',
'erpnext.controllers.taxes_and_totals.get_itemised_tax_breakup_header': 'erpnext.regional.india.utils.get_itemised_tax_breakup_header',
'erpnext.controllers.taxes_and_totals.get_itemised_tax_breakup_data': 'erpnext.regional.india.utils.get_itemised_tax_breakup_data',
'erpnext.accounts.party.get_regional_address_details': 'erpnext.regional.india.utils.get_regional_address_details',
'erpnext.hr.utils.calculate_annual_eligible_hra_exemption': 'erpnext.regional.india.utils.calculate_annual_eligible_hra_exemption',
'erpnext.hr.utils.calculate_hra_exemption_for_period': 'erpnext.regional.india.utils.calculate_hra_exemption_for_period'
},
'United Arab Emirates': {
'erpnext.controllers.taxes_and_totals.update_itemised_tax_data': 'erpnext.regional.united_arab_emirates.utils.update_itemised_tax_data'
},
'Saudi Arabia': {
'erpnext.controllers.taxes_and_totals.update_itemised_tax_data': 'erpnext.regional.united_arab_emirates.utils.update_itemised_tax_data'
}
}
| ESS-LLP/erpnext-medical | erpnext/hooks.py | Python | gpl-3.0 | 13,893 |
import logging
from waitlist.storage.database import Waitlist, Character, HistoryEntry
from waitlist.base import db
import flask
from waitlist.data.sse import GongSSE, send_server_sent_event
from waitlist.utility.history_utils import create_history_object
from flask_login import current_user
from waitlist.ts3.connection import send_poke
from ts3.query import TS3QueryError
from waitlist.utility import config
logger = logging.getLogger(__name__)
def send_notification(player_id: int, waitlist_id: int, message: str = "You are invited to fleet as {0}") -> None:
if player_id is None:
logger.error("Tried to send notification to player with None ID.")
flask.abort(400, "Tried to send notification to player with None ID")
# lets check that the given wl exists
waitlist = db.session.query(Waitlist).get(waitlist_id)
if waitlist is None:
logger.error("Given waitlist id %s is not valid.", str(waitlist_id))
flask.abort(400, f"Given waitlist id {waitlist_id} is not valid.")
# don't remove from queue
# queue = db.session.query(Waitlist).filter(Waitlist.name == WaitlistNames.xup_queue).first()
# db.session.query(WaitlistEntry).filter((WaitlistEntry.user == playerId)
# & (WaitlistEntry.waitlist_id != queue.id)).delete()
# db.session.commit()
event = GongSSE(player_id)
send_server_sent_event(event)
# publish(event)
character = db.session.query(Character).filter(Character.id == player_id).first()
if not config.disable_teamspeak and character.poke_me: # only poke if he didn't disable it
try:
message = message.format(waitlist.name)
send_poke(character.eve_name, message)
except TS3QueryError:
pass # ignore it a user that is not on TS
h_entry = create_history_object(character.get_eve_id(), HistoryEntry.EVENT_COMP_NOTI_PL, current_user.id)
h_entry.exref = waitlist.group.groupID
db.session.add(h_entry)
db.session.commit()
logger.info("%s send notification to %s.", current_user.username, character.eve_name)
| SpeedProg/eve-inc-waitlist | waitlist/utility/notifications.py | Python | mit | 2,084 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.utils.dirsnapshot
:synopsis: Directory snapshots and comparison.
:author: [email protected] (Yesudeep Mangalapilly)
.. ADMONITION:: Where are the moved events? They "disappeared"
This implementation does not take partition boundaries
into consideration. It will only work when the directory
tree is entirely on the same file system. More specifically,
any part of the code that depends on inode numbers can
break if partition boundaries are crossed. In these cases,
the snapshot diff will represent file/directory movement as
created and deleted events.
Windows does not have any concept of ``inodes``, which prevents
this snapshotter from determining file or directory renames/movement
on it. The snapshotter does not try to handle this on Windows.
File or directory movement will show up as creation and deletion
events.
Please do not use this on a virtual file system mapped to
a network share.
Classes
-------
.. autoclass:: DirectorySnapshot
:members:
:show-inheritance:
.. autoclass:: DirectorySnapshotDiff
:members:
:show-inheritance:
"""
import os
import sys
import stat
from pathtools.path import walk as path_walk, absolute_path
if not sys.version < (2, 6, 0):
from watchdog.utils.bricks import OrderedSet as set
class DirectorySnapshotDiff(object):
"""
Compares two directory snapshots and creates an object that represents
the difference between the two snapshots.
:param ref_dirsnap:
The reference directory snapshot.
:type ref_dirsnap:
:class:`DirectorySnapshot`
:param dirsnap:
The directory snapshot which will be compared
with the reference snapshot.
:type dirsnap:
:class:`DirectorySnapshot`
"""
def __init__(self, ref_dirsnap, dirsnap):
"""
"""
self._files_deleted = list()
self._files_modified = list()
self._files_created = list()
self._files_moved = list()
self._dirs_modified = list()
self._dirs_moved = list()
self._dirs_deleted = list()
self._dirs_created = list()
# Detect all the modifications.
for path, stat_info in dirsnap.stat_snapshot.items():
if path in ref_dirsnap.stat_snapshot:
ref_stat_info = ref_dirsnap.stat_info(path)
if stat_info.st_ino == ref_stat_info.st_ino and stat_info.st_mtime != ref_stat_info.st_mtime:
if stat.S_ISDIR(stat_info.st_mode):
self._dirs_modified.append(path)
else:
self._files_modified.append(path)
paths_deleted = ref_dirsnap.paths - dirsnap.paths
paths_created = dirsnap.paths - ref_dirsnap.paths
# Detect all the moves/renames.
# Doesn't work on Windows, so exlude on Windows.
if not sys.platform.startswith('win'):
for created_path in paths_created.copy():
created_stat_info = dirsnap.stat_info(created_path)
for deleted_path in paths_deleted.copy():
deleted_stat_info = ref_dirsnap.stat_info(deleted_path)
if created_stat_info.st_ino == deleted_stat_info.st_ino:
paths_deleted.remove(deleted_path)
paths_created.remove(created_path)
if stat.S_ISDIR(created_stat_info.st_mode):
self._dirs_moved.append((deleted_path, created_path))
else:
self._files_moved.append((deleted_path, created_path))
# Now that we have renames out of the way, enlist the deleted and
# created files/directories.
for path in paths_deleted:
stat_info = ref_dirsnap.stat_info(path)
if stat.S_ISDIR(stat_info.st_mode):
self._dirs_deleted.append(path)
else:
self._files_deleted.append(path)
for path in paths_created:
stat_info = dirsnap.stat_info(path)
if stat.S_ISDIR(stat_info.st_mode):
self._dirs_created.append(path)
else:
self._files_created.append(path)
@property
def files_created(self):
"""List of files that were created."""
return self._files_created
@property
def files_deleted(self):
"""List of files that were deleted."""
return self._files_deleted
@property
def files_modified(self):
"""List of files that were modified."""
return self._files_modified
@property
def files_moved(self):
"""
List of files that were moved.
Each event is a two-tuple the first item of which is the path
that has been renamed to the second item in the tuple.
"""
return self._files_moved
@property
def dirs_modified(self):
"""
List of directories that were modified.
"""
return self._dirs_modified
@property
def dirs_moved(self):
"""
List of directories that were moved.
Each event is a two-tuple the first item of which is the path
that has been renamed to the second item in the tuple.
"""
return self._dirs_moved
@property
def dirs_deleted(self):
"""
List of directories that were deleted.
"""
return self._dirs_deleted
@property
def dirs_created(self):
"""
List of directories that were created.
"""
return self._dirs_created
class DirectorySnapshot(object):
"""
A snapshot of stat information of files in a directory.
:param path:
The directory path for which a snapshot should be taken.
:type path:
``str``
:param recursive:
``True`` if the entired directory tree should be included in the
snapshot; ``False`` otherwise.
:type recursive:
``bool``
:param walker_callback:
A function with the signature ``walker_callback(path, stat_info)``
which will be called for every entry in the directory tree.
"""
def __init__(self,
path,
recursive=True,
walker_callback=(lambda p, s: None),
_copying=False):
self._path = absolute_path(path)
self._stat_snapshot = {}
self._inode_to_path = {}
self.is_recursive = recursive
if not _copying:
stat_info = os.stat(self._path)
self._stat_snapshot[self._path] = stat_info
self._inode_to_path[stat_info.st_ino] = self._path
walker_callback(self._path, stat_info)
for root, directories, files in path_walk(self._path, recursive):
for directory_name in directories:
try:
directory_path = os.path.join(root, directory_name)
stat_info = os.stat(directory_path)
self._stat_snapshot[directory_path] = stat_info
self._inode_to_path[stat_info.st_ino] = directory_path
walker_callback(directory_path, stat_info)
except OSError:
continue
for file_name in files:
try:
file_path = os.path.join(root, file_name)
stat_info = os.stat(file_path)
self._stat_snapshot[file_path] = stat_info
self._inode_to_path[stat_info.st_ino] = file_path
walker_callback(file_path, stat_info)
except OSError:
continue
def __sub__(self, previous_dirsnap):
"""Allow subtracting a DirectorySnapshot object instance from
another.
:returns:
A :class:`DirectorySnapshotDiff` object.
"""
return DirectorySnapshotDiff(previous_dirsnap, self)
#def __add__(self, new_dirsnap):
# self._stat_snapshot.update(new_dirsnap._stat_snapshot)
def copy(self, from_pathname=None):
snapshot = DirectorySnapshot(path=from_pathname,
recursive=self.is_recursive,
_copying=True)
for pathname, stat_info in self._stat_snapshot.items():
if pathname.starts_with(from_pathname):
snapshot._stat_snapshot[pathname] = stat_info
snapshot._inode_to_path[stat_info.st_ino] = pathname
return snapshot
@property
def stat_snapshot(self):
"""
Returns a dictionary of stat information with file paths being keys.
"""
return self._stat_snapshot
def stat_info(self, path):
"""
Returns a stat information object for the specified path from
the snapshot.
:param path:
The path for which stat information should be obtained
from a snapshot.
"""
return self._stat_snapshot[path]
def path_for_inode(self, inode):
"""
Determines the path that an inode represents in a snapshot.
:param inode:
inode number.
"""
return self._inode_to_path[inode]
def stat_info_for_inode(self, inode):
"""
Determines stat information for a given inode.
:param inode:
inode number.
"""
return self.stat_info(self.path_for_inode(inode))
@property
def paths(self):
"""
List of file/directory paths in the snapshot.
"""
return set(self._stat_snapshot)
def __str__(self):
return self.__repr__()
def __repr__(self):
return str(self._stat_snapshot)
| austinwagner/sublime-sourcepawn | watchdog/utils/dirsnapshot.py | Python | mit | 9,600 |
# use the files in this folder first!
#import os, sys
#sys.path.insert(0, os.path.abspath("."))
from bbfreeze import Freezer
includes = ['coherence', 'setuptools', 'cairo',
'pango', 'gtk', 'pangocairo', 'atk', 'xml',
'coherence.base',
#'netifaces'
]
f = Freezer("build", includes=includes)
f.addScript("upnp-inspector.py", gui_only=True)
f.include_py = True
f.use_compression = True
f()
| ismaelgaudioso/UPnP-Inspector | win32/compile.py | Python | mit | 437 |
#!/usr/bin/env python
# This file is part of VoltDB.
# Copyright (C) 2008-2015 VoltDB Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with VoltDB. If not, see <http://www.gnu.org/licenses/>.
gpl_header = \
"""/* This file is part of VoltDB.
* Copyright (C) 2008-2015 VoltDB Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with VoltDB. If not, see <http://www.gnu.org/licenses/>.
*/
"""
auto_gen_warning = \
"""/* WARNING: THIS FILE IS AUTO-GENERATED
DO NOT MODIFY THIS SOURCE
ALL CHANGES MUST BE MADE IN THE CATALOG GENERATOR */
"""
| wolffcm/voltdb | src/catgen/catalog_utils/strings.py | Python | agpl-3.0 | 1,677 |
import sys
import os
import os.path as osp
import re
import fnmatch
import json
import hashlib
import codecs
import traceback
from time import time
from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE, S_IFMT
from stat import S_IFREG, S_ISDIR, S_ISREG
PYSIDE_FOUND = True
try:
from PySide import QtGui, QtCore
except ImportError:
PYSIDE_FOUND = False
from pytd.util.external import parse
from pytd.util.sysutils import toUnicode, argToList, toStr, qtGuiApp, timer
from pytd.util.logutils import logMsg
from pytd.util.systypes import MemSize
from pytd.util.sysutils import SYSTEM_ENCODING, hostApp
from pytd.util.qtutils import getTopWidget
def isDirStat(statobj):
return S_ISDIR(statobj.st_mode)
def isFileStat(statobj):
return S_ISREG(statobj.st_mode)
def statSig(statobj):
return (S_IFMT(statobj.st_mode), statobj.st_size, statobj.st_mtime)
_FILE_EQUAL_CACHE = {}
def sameContent(p, p2, checksum="", shallow=True):
"""Compare two files.
Arguments:
p -- First file path
p2 -- Second file path
checksum -- First file checksum
shallow -- Just check stat signature (do not read the files).
defaults to True.
Return value:
True if the files are the same, False otherwise.
This function uses a cache for past comparisons and the results,
with a cache invalidation mechanism relying on stale signatures.
"""
p = pathNormAll(p)
p2 = pathNormAll(p2)
st1 = statSig(os.stat(p))
st2 = statSig(os.stat(p2))
if st1[0] != S_IFREG or st2[0] != S_IFREG:
return False, "", checksum
if shallow and st1 == st2:
return True, "", checksum
if st1[1] != st2[1]:
return False, "", checksum
res = _FILE_EQUAL_CACHE.get((p, p2, st1, st2))
if res is None:
if not checksum:
checksum = sha1HashFile(p)
checksum2 = sha1HashFile(p2)
res = ((checksum == checksum2), checksum2, checksum)
if len(_FILE_EQUAL_CACHE) > 100: # limit the maximum size of the cache
_FILE_EQUAL_CACHE.clear()
_FILE_EQUAL_CACHE[p, p2, st1, st2] = res
return res
def pathEqual(p, p2):
return pathNormAll(p) == pathNormAll(p2)
def pathNorm(p, case=False, keepEndSlash=False):
if keepEndSlash:
bEndSlash = p.replace("\\", "/").endswith("/")
p = osp.normpath(p)
if bEndSlash:
p = addEndSlash(p)
else:
p = osp.normpath(p)
if case:
p = osp.normcase(p)
return p.replace("\\", "/")
def pathNormAll(p):
return osp.normcase(osp.normpath(p)).replace("\\", "/")
def normCase(p):
return osp.normcase(p).replace("\\", "/")
def pathAbs(p):
return osp.abspath(p).replace("\\", "/")
def pathJoin(*args):
try:
p = osp.join(*args)
except UnicodeDecodeError:
p = osp.join(*tuple(toUnicode(arg) for arg in args))
return pathNorm(p, case=False, keepEndSlash=True)
def pathResolve(p, recursive=True):
res = osp.expanduser(osp.expandvars(p))
if ":" in res:
sDrive, sTail = res.split(":", 1)
sUncPath = os.environ.get(sDrive.upper() + "_DRIVE_UNC_PATH")
if sUncPath:
res = pathNorm(sUncPath) + sTail
if recursive and (res != p) and re.findall(r'[%$]', res):
return pathResolve(res)
return res
def pathSuffixed(sFileNameOrPath, *suffixes):
sRootPath, sExt = osp.splitext(sFileNameOrPath)
sJoinList = [sRootPath]
sJoinList.extend(suffixes)
return "".join(sJoinList) + sExt
def pathRelativeTo(*args):
return pathNorm(osp.relpath(*args), keepEndSlash=True)
def pathParse(sPathFormat, sPath, log=False):
fmtDirs = pathSplitDirs(sPathFormat)
pathDirs = pathSplitDirs(sPath)
numFmtDirs = len(fmtDirs)
numPathDirs = len(pathDirs)
minLen = min(numFmtDirs, numPathDirs)
fmt = pathJoin(*fmtDirs[1:minLen])
s = pathJoin(*pathDirs[1:minLen])
res = parse.parse(fmt, s)
if log:
print "\n", fmt, sPathFormat
print s, sPath
print res
return res
def pathRedir(sInPath, sFromDir, sToDir, fail=True):
if pathEqual(sFromDir, sInPath):
return pathNorm(sToDir, keepEndSlash=True)
sInPath = pathNorm(sInPath, keepEndSlash=True)
sFromDir = addEndSlash(pathNorm(sFromDir))
sToDir = addEndSlash(pathNorm(sToDir))
sOutPath = pathReSub('^' + re.escape(sFromDir), sToDir, sInPath)
if fail and pathEqual(sInPath, sOutPath):
sMsg = "\n"
sMsg += "Failed to redirect '{}'\n".format(sInPath)
sMsg += " from '{}'\n".format(sFromDir)
sMsg += " to '{}'\n".format(sToDir)
raise ValueError(sMsg)
return sOutPath
def pathReSub(pattern, repl, string, count=0, flags=0):
if os.name == "nt":
flags |= re.IGNORECASE
return re.sub(pattern, repl, string, count, flags)
def pathStartsWith(p, sDirPath, pathSplits=None, log=False):
if pathSplits:
sPathDirList = pathSplits
else:
sPathDirList = pathSplitDirs(pathNormAll(p))
sDirPath = addEndSlash(pathNormAll(sDirPath))
numDirs = len(pathSplitDirs(sDirPath))
if numDirs > len(sPathDirList):
sAlignedPath = p
bSubDir = False
else:
sAlignedPath = addEndSlash(pathJoin(*sPathDirList[:numDirs]))
bSubDir = (sAlignedPath == sDirPath)
if log:
print "\n", p
print sAlignedPath
print sDirPath
print bSubDir
return bSubDir
def pathStripDrive(p):
return pathJoin(*pathSplitDirs(p)[1:])
def pathSplitDirs(p):
p = pathNorm(p)
if p.startswith("//"):
root, p = osp.splitunc(p)
elif p.startswith("/"):
dirs = p.split("/", 2)
root, p = (dirs[1], "") if len(dirs) == 2 else dirs[1:]
root = "/" + root
else:
root, p = osp.splitdrive(p)
p = p.strip("/")
res = [root + "/"] if root else []
if p:
res.extend(p.split("/"))
return res
def pathRename(sSrcPath, sDstPath):
try:
os.rename(sSrcPath, sDstPath)
except WindowsError as e:
if hostApp() == "maya":
raise WindowsError(toUnicode("code {} - {}: {}".format(e.args[0], e.strerror , sSrcPath)))
else:
raise WindowsError(e.args[0], "{}: {}".format(e.strerror , sSrcPath))
def ignorePatterns(*patterns):
"""Function that can be used as iterPaths() ignore parameters.
Patterns is a sequence of glob-style patterns
that are used to exclude files"""
def _ignore_patterns(p, names):
ignored_names = []
for pattern in patterns:
ignored_names.extend(fnmatch.filter(names, pattern))
return set(ignored_names)
return _ignore_patterns
def iterPaths(sStartDirPath, **kwargs):
if not osp.isdir(sStartDirPath):
raise ValueError('No such directory found: "{0}"'.format(sStartDirPath))
bFiles = kwargs.pop("files", True)
bDirs = kwargs.pop("dirs", True)
bEmptyDirs = kwargs.pop("emptyDirs", True)
bInterDirs = kwargs.pop("intermediateDirs", kwargs.pop("intermeDirs", False))
bRelPath = kwargs.pop("relative", False)
bRecursive = kwargs.pop("recursive", True)
ignoreDirsFunc = kwargs.get("ignoreDirs", None)
ignoreFilesFunc = kwargs.get("ignoreFiles", None)
onlyFilesFunc = kwargs.get("onlyFiles", None)
for sDirPath, sDirList, sFileList in os.walk(sStartDirPath):
sDirPath = sDirPath.replace("\\", "/")
if not bRecursive:
del sDirList[:] # don't walk further
if ignoreDirsFunc is not None:
sIgnoredDirs = ignoreDirsFunc(sDirPath, sDirList)
for sDir in sIgnoredDirs:
try: sDirList.remove(sDir)
except ValueError: pass
bOnly = False
sOnlyFiles = []
if onlyFilesFunc is not None:
sOnlyFiles = onlyFilesFunc(sDirPath, sFileList)
#print "sOnlyFiles", sOnlyFiles, sFileList
bOnly = True
sIgnoredFiles = []
if ignoreFilesFunc is not None:
sIgnoredFiles = ignoreFilesFunc(sDirPath, sFileList)
#print "sIgnoredFiles", sIgnoredFiles
sKeptFileList = sFileList[:]
for sFileName in sFileList:
if bOnly and (sFileName not in sOnlyFiles):
if bEmptyDirs:
sKeptFileList.remove(sFileName)
continue
if sFileName in sIgnoredFiles:
if bEmptyDirs:
sKeptFileList.remove(sFileName)
continue
if bFiles:
p = pathJoin(sDirPath, sFileName)
yield p if not bRelPath else pathRelativeTo(p, sStartDirPath)
if bDirs:
p = pathNorm(sDirPath)
if bRelPath:
p = pathRelativeTo(p, sStartDirPath)
bYieldDir = True
if p == ".":
bYieldDir = False
elif not bInterDirs:
bIsLeaf = (not sDirList)
bIsEmpty = bIsLeaf and (not sKeptFileList)
bYieldDir = bIsEmpty if bEmptyDirs else bIsLeaf
#print sDirPath, bIsLeaf, bIsEmpty
if bYieldDir:
yield addEndSlash(p)
def addEndSlash(p):
#return p if p.endswith("/") else p + "/"
return (p + "/") if p and (not p.replace("\\", "/").endswith("/")) else p
def delEndSlash(p):
return p[:-1] if p.endswith("/") else p
def commonDir(sPathList):
sDir = osp.commonprefix(sPathList)
return sDir if (sDir[-1] in ("\\", "/")) else (osp.dirname(sDir) + "/")
_copy_action = {
'': 'copying',
'hard': 'hard linking',
'symb': 'symbolically linking'}
def copyFile(sSrcPath, sDstPath, preserve_mode=True, preserve_times=True, in_place=False,
update=False, link="", verbose=1, dry_run=False, buffer_size=512 * 1024, showProgress=True):
"""Copy a file 'sSrcPath' to 'sDstPath'. (Stolen and customized from distutils.file_util.copy_file)
If 'sDstPath' is a directory, then 'sSrcPath' is copied there with the same name;
otherwise, it must be a filename. (If the file exists, it will be
ruthlessly clobbered.) If 'preserve_mode' is true (the default),
the file's mode (type and permission bits, or whatever is analogous on
the current platform) is copied. If 'preserve_times' is true (the
default), the last-modified and last-access times are copied as well.
If 'update' is true, 'sSrcPath' will only be copied if 'sDstPath' does not exist,
or if 'sDstPath' does exist but is older than 'sSrcPath'.
'link' allows you to make hard links (os.link) or symbolic links
(os.symlink) instead of copying: set it to "hard" or "sym"; if it is
None (the default), files are copied. Don't set 'link' on systems that
don't support it: 'copy_file()' doesn't check if hard or symbolic
linking is available.
Under Mac OS, uses the native file copy function in macostools; on
other systems, uses '_copy_file_contents()' to copy file contents.
Return a tuple (dest_name, copied): 'dest_name' is the actual name of
the output file, and 'copied' is true if the file was copied (or would
have been copied, if 'dry_run' true).
"""
# XXX if the destination file already exists, we clobber it if
# copying, but blow up if linking. Hmmm. And I don't know what
# macostools.copyfile() does. Should definitely be consistent, and
# should probably blow up if destination exists and we would be
# changing it (ie. it's not already a hard/soft link to sSrcPath OR
# (not update) and (sSrcPath newer than sDstPath).
sSrcPath = toStr(sSrcPath)
sDstPath = toStr(sDstPath)
# try:
# sAction = _copy_action[link].capitalize()
# except KeyError:
# raise ValueError("Invalid value for 'link' argument: '{}'. Expected one of {}."
# .format(link, _copy_action.keys()))
sAction = "Copying"
srcStat = os.stat(sSrcPath)
if not S_ISREG(srcStat.st_mode):
raise EnvironmentError("Source file NOT found: '{}'.".format(sSrcPath))
if osp.isdir(sDstPath):
sDirPath = sDstPath
sDstPath = osp.join(sDstPath, osp.basename(sSrcPath))
else:
sDirPath = osp.dirname(sDstPath)
if update and (not pathNewer(sSrcPath, sDstPath)):
if verbose >= 1:
logMsg("Not copying (output up-to-date): '{}'".format(sSrcPath), log="debug")
return sDstPath, False
if verbose >= 1:
if osp.normcase(osp.basename(sDstPath)) == osp.normcase(osp.basename(sSrcPath)):
logMsg("{} {}\n to {}".format(sAction, sSrcPath, sDirPath))
else:
logMsg("{} {}\n as {}".format(sAction, sSrcPath, sDstPath))
if dry_run:
return (sDstPath, True)
# # If linking (hard or symbolic), use the appropriate system call
# # (Unix only, of course, but that's the caller's responsibility)
# if link == 'hard':
# if not (osp.exists(sDstPath) and osp.samefile(sSrcPath, sDstPath)):
# os.link(sSrcPath, sDstPath)
# elif link == 'symb':
# if not (osp.exists(sDstPath) and osp.samefile(sSrcPath, sDstPath)):
# os.symlink(sSrcPath, sDstPath)
#
# # Otherwise (non-Mac, not linking), copy the file contents and
# # (optionally) copy the times and mode.
# else:
if sameFile(sSrcPath, sDstPath):
sMsg = "Source and destination files are the same:"
sMsg += "\n source: ", sSrcPath
sMsg += "\n destination: ", sDstPath
raise EnvironmentError(sMsg)
sTmpPath = ""
try:
dstStat = os.stat(sDstPath)
except OSError:
pass
else:# destination path exists
if not S_ISREG(dstStat.st_mode):
raise EnvironmentError("Path already exists but NOT a regular file: '{}'."
.format(sDstPath))
if not in_place:
#pathRename(sDstPath, sDstPath)
sTmpPath = sDstPath + ".tmpcopy"
sCopyPath = sTmpPath if sTmpPath else sDstPath
try:
copyFileData(sSrcPath, sCopyPath,
preserve_mode=preserve_mode, preserve_times=preserve_times,
buffer_size=buffer_size, sourceStat=srcStat, showProgress=showProgress)
if sTmpPath:
if os.name == "nt": # on nt platform, destination must be removed first
os.remove(sDstPath)
pathRename(sTmpPath, sDstPath)
finally:
if sTmpPath and osp.exists(sTmpPath):
os.remove(sTmpPath)
return (sDstPath, True)
def copyFileData(sSrcPath, sDstPath, preserve_mode=True, preserve_times=True,
buffer_size=512 * 1024, sourceStat=None, showProgress=False):
srcStat = sourceStat if sourceStat else os.stat(sSrcPath)
srcSize = srcStat.st_size
# Optimize the buffer for small files
bufferSize = min(buffer_size, srcSize)
if bufferSize == 0:
bufferSize = 1024
numChunks = 1
else:
numChunks = srcSize / bufferSize
showProgress = (showProgress and (numChunks >= 100))
if showProgress:
progress = CopyProgress(srcSize, sSrcPath)
copiedSize = 0
with open(sSrcPath, 'rb') as srcFile:
with open(sDstPath, 'wb') as dstFile:
while True:
try:
buf = srcFile.read(bufferSize)
if not buf:
break
dstFile.write(buf)
if showProgress:
copiedSize += len(buf)
try:
progress.update(copiedSize)
except:
traceback.print_exc()
showProgress = False
except:
if showProgress:
progress.closeDialog()
raise
if preserve_mode or preserve_times:
# According to David Ascher <[email protected]>, utime() should be done
# before chmod() (at least under NT).
if preserve_times:
os.utime(sDstPath, (srcStat[ST_ATIME], srcStat[ST_MTIME]))
if preserve_mode:
os.chmod(sDstPath, S_IMODE(srcStat[ST_MODE]))
dstStat = os.stat(sDstPath)
if dstStat.st_size != srcStat.st_size:
srcSize = MemSize(srcStat.st_size)
dstSize = MemSize(dstStat.st_size)
raise IOError("Incomplete copy: {}/{} bytes copied.".format(dstSize, srcSize))
return True
if PYSIDE_FOUND and "CopyProgress" in globals():
tmpDlg = globals()["CopyProgress"].dialog
if tmpDlg:
tmpDlg.setAttribute(QtCore.Qt.WA_DeleteOnClose)
tmpDlg.done(0)
del tmpDlg
class CopyProgress(object):
dialog = None
def __init__(self, sourceSize, sourcePath, latency=3.0):
self.sourceSize = sourceSize
self.sourcePath = sourcePath
self.latency = latency
self.progressLineTmp = ""
self.percentage = 0
self.showProgress = (latency == 0.0)
self.progressShown = False
self.startTime = None
def __start(self):
dialog = None
qApp = qtGuiApp()
if qApp:
dialog = self.__class__.dialog
bDialogCreated = False
if not dialog:
bDialogCreated = True
dialog = QtGui.QProgressDialog(getTopWidget(qApp))
dialog.setWindowModality(QtCore.Qt.WindowModal)
dialog.setCancelButton(None)
dialog.setLabelText("Copying {}".format(self.sourcePath))
dialog.setMinimum(0)
dialog.setMaximum(self.sourceSize)
if not bDialogCreated:
dialog.reset()
self.__class__.dialog = dialog
self.startTime = time()
def __update(self, copiedSize):
if self.startTime is None:
self.__start()
sourceSize = self.sourceSize
bDone = (copiedSize == sourceSize)
startTime = self.startTime
dialog = self.__class__.dialog
bShowProgress = self.showProgress
bProgressShown = self.progressShown
sProgessTmp = self.progressLineTmp
if (not bDone) and (not bShowProgress):
elapsed = (time() - startTime)
if (elapsed > self.latency):
copySpeed = copiedSize / elapsed
estimTime = (sourceSize - copiedSize) / copySpeed
#print 'estim = {:f} sec'.format(estimTime)
if estimTime >= 5.0:
bShowProgress = True
if dialog:
if bDone:
dialog.setValue(sourceSize)
elif bShowProgress:
dialog.setValue(copiedSize)
bProgressShown = True
if bDone:
if bProgressShown:
copySpeed = MemSize(copiedSize / (time() - startTime))
print " {:.2cM} at {:.2cM}/sec".format(MemSize(copiedSize),
copySpeed)
else:
curPercent = copiedSize * 100 / sourceSize
if curPercent > self.percentage:
self.percentage = curPercent
if bShowProgress:
bProgressShown = True
if sProgessTmp:
sys.stdout.write(sProgessTmp + '.')
self.progressLineTmp = ""
else:
sys.stdout.write('.')
sys.stdout.flush()
else:
self.progressLineTmp += "."
self.showProgress = bShowProgress
self.progressShown = bProgressShown
def update(self, *args, **kwargs):
try:
self.__update(*args, **kwargs)
except:
self.closeDialog()
raise
def closeDialog(self):
dlg = self.__class__.dialog
if dlg:
dlg.setValue(dlg.maximum())
def pathNewer(sSrcPath, sDstPath):
"""Tells if the sDstPath is newer than the sSrcPath.
Return true if 'sSrcPath' exists and is more recently modified than
'sDstPath', or if 'sSrcPath' exists and 'sDstPath' doesn't.
Return false if both exist and 'sDstPath' is the same age or younger
than 'sSrcPath'. Raise DistutilsFileError if 'sSrcPath' does not exist.
Note that this test is not very accurate: files created in the same second
will have the same "age".
"""
if not osp.exists(sSrcPath):
raise EnvironmentError("No such file: '{}'.".format(osp.abspath(sSrcPath)))
if not osp.exists(sDstPath):
return True
return os.stat(sSrcPath)[ST_MTIME] > os.stat(sDstPath)[ST_MTIME]
def sameFile(sSrcPath, sDestPath):
# Macintosh, Unix.
if hasattr(osp, 'samefile'):
try:
return osp.samefile(sSrcPath, sDestPath)
except OSError:
return False
# All other platforms: check for same pathname.
return pathEqual(osp.abspath(sSrcPath), osp.abspath(sDestPath))
def distribTree(in_sSrcRootDir, in_sDestRootDir, **kwargs):
bDryRun = kwargs.get("dry_run", False)
bPrintSrcOnly = kwargs.pop("printSourceOnly", False)
sFilePathList = kwargs.pop("filePaths", "NoEntry")
sReplaceExtDct = kwargs.pop("replaceExtensions", kwargs.pop("replaceExts", {}))
if not isinstance(sReplaceExtDct, dict):
raise TypeError('"replaceExtensions" kwarg expects {0} but gets {1}.'
.format(dict, type(sReplaceExtDct)))
sEncryptExtList = kwargs.pop("encryptExtensions", kwargs.pop("encryptExts", []))
if not isinstance(sEncryptExtList, list):
raise TypeError('"encryptExtensions" kwarg expects {0} but gets {1}.'
.format(list, type(sEncryptExtList)))
if sEncryptExtList:
raise NotImplementedError, "Sorry, feature has been removed."
# import cryptUtil
sEncryptExtList = list(e.strip(".") for e in sEncryptExtList)
sSrcRootDir = addEndSlash(pathNorm(in_sSrcRootDir))
sDestRootDir = addEndSlash(pathNorm(in_sDestRootDir))
if not osp.isdir(sSrcRootDir):
raise ValueError, 'No such directory found: "{0}"'.format(sSrcRootDir)
if not osp.isdir(sDestRootDir):
print 'Creating destination directory: "{0}"'.format(sDestRootDir)
if not bDryRun:
os.makedirs(sDestRootDir)
sCopiedFileList = []
if sFilePathList == "NoEntry":
sMsg = "Sorry, but for now, you must provide a list of file paths to copy."
raise NotImplementedError(sMsg)
else:
sFilePathList = argToList(sFilePathList)
sFilePathList.sort()
srcRootDirRexp = re.compile("^" + sSrcRootDir, re.I)
destRootDirRexp = re.compile("^" + sDestRootDir, re.I)
# building destination directories
sDestDirList = sFilePathList[:]
iMaxPathLen = 0
for i, sFilePath in enumerate(sFilePathList):
sSrcDir = addEndSlash(pathNorm(osp.dirname(sFilePath)))
sRexpList = srcRootDirRexp.findall(sSrcDir)
if not sRexpList:
raise RuntimeError, "File outside of source directory: {0}.".format(sSrcDir)
sDestDirList[i] = sSrcDir.replace(sRexpList[0], sDestRootDir)
iPathLen = len(srcRootDirRexp.split(sFilePath, 1)[1])
if iPathLen > iMaxPathLen:
iMaxPathLen = iPathLen
iNumFiles = len(sFilePathList)
iDoneFileCount = 0
iCountLen = len(str(iNumFiles)) * 2 + 5
sPrintFormat = "{0:^{width1}} {1:<{width2}} >> {2}"
sPrintFormat = sPrintFormat if not bPrintSrcOnly else sPrintFormat.split(">>", 1)[0]
def endCopy(sFilePath, sDestPath, bCopied, iDoneFileCount):
iDoneFileCount += 1
if bCopied:
sCount = "{0}/{1}".format(iDoneFileCount, iNumFiles)
print sPrintFormat.format(sCount,
srcRootDirRexp.split(sFilePath, 1)[1],
destRootDirRexp.split(sDestPath, 1)[1],
width1=iCountLen,
width2=iMaxPathLen)
sCopiedFileList.append(sDestPath)
return iDoneFileCount
print '{0} files to copy from "{1}" to "{2}":'.format(iNumFiles, sSrcRootDir, sDestRootDir)
# creating directories
for sDestDir in sorted(set(sDestDirList)):
if (not osp.isdir(sDestDir)) and (not bDryRun):
os.makedirs(sDestDir)
# copying files
if sReplaceExtDct:
for sFilePath, sDestDir in zip(sFilePathList, sDestDirList):
sPath, sExt = osp.splitext(sFilePath); sExt = sExt.strip(".")
sNewExt = sReplaceExtDct.get(sExt, "")
if sNewExt:
sDestPath = pathJoin(sDestDir, osp.basename(sPath)) + "." + sNewExt.strip(".")
else:
sDestPath = pathJoin(sDestDir, osp.basename(sFilePath))
bCopied = True
if sExt in sEncryptExtList:
pass# bCopied = cryptUtil.encryptFile(sFilePath, sDestPath, **kwargs)
else:
sDestPath, bCopied = copyFile(sFilePath, sDestPath, **kwargs)
iDoneFileCount = endCopy(sFilePath, sDestPath, bCopied, iDoneFileCount)
elif sEncryptExtList:
for sFilePath, sDestDir in zip(sFilePathList, sDestDirList):
sExt = osp.splitext(sFilePath)[1].strip(".")
# print "\t{0} >> {1}".format( srcRootDirRexp.split( sFilePath, 1 )[1], destRootDirRexp.split( sDestDir, 1 )[1] )
sDestPath = pathJoin(sDestDir, osp.basename(sFilePath))
bCopied = True
if sExt in sEncryptExtList:
pass# bCopied = cryptUtil.encryptFile(sFilePath, sDestPath, **kwargs)
else:
_, bCopied = copyFile(sFilePath, sDestPath, **kwargs)
iDoneFileCount = endCopy(sFilePath, sDestPath, bCopied, iDoneFileCount)
else:
for sFilePath, sDestDir in zip(sFilePathList, sDestDirList):
sDestPath = pathJoin(sDestDir, osp.basename(sFilePath))
_, bCopied = copyFile(sFilePath, sDestPath, **kwargs)
iDoneFileCount = endCopy(sFilePath, sDestPath, bCopied, iDoneFileCount)
return sCopiedFileList
def jsonWrite(p, pyobj, ensure_ascii=False, indent=2, encoding=SYSTEM_ENCODING, **kwargs):
with codecs.open(p, 'wb', 'utf_8') as fileobj:
json.dump(pyobj, fileobj, ensure_ascii=ensure_ascii,
indent=indent, encoding=encoding, **kwargs)
def jsonRead(p, **kwargs):
# if not osp.isfile(p):
# raise EnvironmentError("No such file: '{}'".format(p))
with open(p, 'rb') as fileobj:
pyobj = json.load(fileobj, **kwargs)
return pyobj
def sha1HashFile(sFilePath, chunk_size=32 * 1024):
#print "sha1HashFile", sFilePath
with open(sFilePath, "rb") as fileobj:
h = hashlib.sha1()
while True:
chunk = fileobj.read(chunk_size)
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def topmostFoundDir(sPath):
sTestPath = sPath
while not osp.exists(sTestPath):
sSplitPath, _ = osp.split(sTestPath)
if sSplitPath == sTestPath:
return ""
sTestPath = sSplitPath
return sTestPath
def parseDirContent(sInDirPath):
ignoreFunc = ignorePatterns(".*", "*.db")
sAllDirList = []
sAllFileList = []
for sCurDirPath, sDirList, sFileList in os.walk(sInDirPath):
sIgnoredDirs = ignoreFunc(sCurDirPath, sDirList)
for sDir in sIgnoredDirs:
try: sDirList.remove(sDir)
except ValueError: pass
sIgnoredFiles = ignoreFunc(sCurDirPath, sFileList)
sCurDirPath = sCurDirPath.replace("\\", "/")
sAllDirList.extend(pathRelativeTo(pathJoin(sCurDirPath, s), sInDirPath)
for s in sDirList)
sAllFileList.extend(pathRelativeTo(pathJoin(sCurDirPath, s), sInDirPath)
for s in sFileList if s not in sIgnoredFiles)
dirSize = MemSize(sum(osp.getsize(pathJoin(sInDirPath, p)) for p in sAllFileList))
return {"dir_size":dirSize, "dir_subfiles":sAllFileList, "dir_subdirs":sAllDirList}
| sebcourtois/pypeline-tool-devkit | pytd/util/fsutils.py | Python | gpl-3.0 | 28,594 |
# -*- coding: utf-8 -*-
from django.conf import settings
import mock
import urlparse
import amo
import amo.tests
from addons.models import Addon
from applications.models import AppVersion
from files.utils import make_xpi
from versions.compare import version_int
from zadmin import tasks
def RequestMock(response='', headers=None):
"""Mocks the request objects of urllib2 and requests modules."""
res = mock.Mock()
res.read.return_value = response
res.contents = response
res.text = response
res.iter_lines.side_effect = lambda chunk_size=1: (response.split('\n')
.__iter__())
res.iter_content.side_effect = lambda chunk_size=1: (response,).__iter__()
def lines():
return [l + '\n' for l in response.split('\n')[:-1]]
res.readlines.side_effect = lines
res.iter_lines.side_effect = lambda: lines().__iter__()
res.headers = headers or {}
res.headers['content-length'] = len(response)
return res
def make_langpack(version):
versions = (version, '%s.*' % version)
for version in versions:
AppVersion.objects.get_or_create(application=amo.FIREFOX.id,
version=version,
version_int=version_int(version))
return make_xpi({
'install.rdf': """<?xml version="1.0"?>
<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
<Description about="urn:mozilla:install-manifest"
em:id="[email protected]"
em:name="Foo Language Pack"
em:version="{0}"
em:type="8"
em:creator="mozilla.org">
<em:targetApplication>
<Description>
<em:id>{{ec8030f7-c20a-464f-9b0e-13a3a9e97384}}</em:id>
<em:minVersion>{0}</em:minVersion>
<em:maxVersion>{1}</em:maxVersion>
</Description>
</em:targetApplication>
</Description>
</RDF>
""".format(*versions)
}).read()
class TestLangpackFetcher(amo.tests.TestCase):
fixtures = ['zadmin/users']
LISTING = 'pretend-this-is-a-sha256-sum win32/xpi/de-DE.xpi\n'
def setUp(self):
super(TestLangpackFetcher, self).setUp()
request_patch = mock.patch('zadmin.tasks.requests.get')
self.mock_request = request_patch.start()
self.addCleanup(request_patch.stop)
def get_langpacks(self):
return (Addon.objects.no_cache()
.filter(addonuser__user__email=settings.LANGPACK_OWNER_EMAIL,
type=amo.ADDON_LPAPP))
def fetch_langpacks(self, version):
path = settings.LANGPACK_PATH_DEFAULT % ('firefox', version)
base_url = urlparse.urljoin(settings.LANGPACK_DOWNLOAD_BASE, path)
list_url = urlparse.urljoin(base_url, settings.LANGPACK_MANIFEST_PATH)
langpack_url = urlparse.urljoin(base_url, 'de-DE.xpi')
responses = {list_url: RequestMock(self.LISTING),
langpack_url: RequestMock(make_langpack(version))}
self.mock_request.reset_mock()
self.mock_request.side_effect = lambda url, **kw: responses.get(url)
tasks.fetch_langpacks(path)
self.mock_request.assert_has_calls(
[mock.call(list_url, verify=settings.CA_CERT_BUNDLE_PATH),
mock.call(langpack_url, verify=settings.CA_CERT_BUNDLE_PATH)])
@mock.patch('zadmin.tasks.sign_file')
def test_fetch_new_langpack(self, mock_sign_file):
assert self.get_langpacks().count() == 0
self.fetch_langpacks(amo.FIREFOX.latest_version)
langpacks = self.get_langpacks()
assert langpacks.count() == 1
addon = langpacks[0]
assert addon.default_locale == 'de-DE'
assert addon.target_locale == 'de-DE'
assert addon._current_version
assert addon.current_version.version == amo.FIREFOX.latest_version
assert addon.status == amo.STATUS_PUBLIC
assert addon.current_version.files.all()[0].status == amo.STATUS_PUBLIC
mock_sign_file.assert_called_once_with(
addon.current_version.files.get(), settings.SIGNING_SERVER)
@mock.patch('zadmin.tasks.sign_file')
def test_fetch_updated_langpack(self, mock_sign_file):
versions = ('16.0', '17.0')
self.fetch_langpacks(versions[0])
assert self.get_langpacks().count() == 1
self.fetch_langpacks(versions[1])
langpacks = self.get_langpacks()
assert langpacks.count() == 1
addon = langpacks[0]
assert addon.versions.count() == 2
version = addon.versions.get(version=versions[1])
assert version.files.all()[0].status == amo.STATUS_PUBLIC
mock_sign_file.assert_called_with(
version.files.get(), settings.SIGNING_SERVER)
@mock.patch('zadmin.tasks.sign_file')
def test_fetch_duplicate_langpack(self, mock_sign_file):
self.fetch_langpacks(amo.FIREFOX.latest_version)
langpacks = self.get_langpacks()
assert langpacks.count() == 1
assert langpacks[0].versions.count() == 1
assert (langpacks[0].versions.all()[0].version ==
amo.FIREFOX.latest_version)
self.fetch_langpacks(amo.FIREFOX.latest_version)
langpacks = self.get_langpacks()
assert langpacks.count() == 1
addon = langpacks[0]
assert addon.versions.count() == 1
assert (addon.versions.all()[0].version ==
amo.FIREFOX.latest_version)
mock_sign_file.assert_called_once_with(
addon.current_version.files.get(), settings.SIGNING_SERVER)
@mock.patch('zadmin.tasks.sign_file')
def test_fetch_updated_langpack_beta(self, mock_sign_file):
versions = ('16.0', '16.0a2')
self.fetch_langpacks(versions[0])
assert self.get_langpacks().count() == 1
self.fetch_langpacks(versions[1])
langpacks = self.get_langpacks()
assert langpacks.count() == 1
addon = langpacks[0]
assert addon.versions.count() == 2
version = addon.versions.get(version=versions[1])
assert version.files.all()[0].status == amo.STATUS_BETA
mock_sign_file.assert_called_with(
version.files.get(), settings.PRELIMINARY_SIGNING_SERVER)
@mock.patch('zadmin.tasks.sign_file')
def test_fetch_new_langpack_beta(self, mock_sign_file):
self.fetch_langpacks('16.0a2')
assert self.get_langpacks().count() == 0
assert not mock_sign_file.called
@mock.patch('zadmin.tasks.sign_file')
def test_fetch_langpack_wrong_owner(self, mock_sign_file):
Addon.objects.create(guid='[email protected]',
type=amo.ADDON_LPAPP)
self.fetch_langpacks(amo.FIREFOX.latest_version)
assert self.get_langpacks().count() == 0
assert not mock_sign_file.called
@mock.patch('zadmin.tasks.sign_file')
def test_fetch_langpack_invalid_path_fails(self, mock_sign_file):
self.mock_request.return_value = None
with self.assertRaises(ValueError) as exc:
tasks.fetch_langpacks('../foo/')
assert str(exc.exception) == 'Invalid path'
assert not mock_sign_file.called
| mdaif/olympia | apps/zadmin/tests/test_tasks.py | Python | bsd-3-clause | 7,541 |
import json
from twisted.web import http, resource
from Tribler.community.tunnel.tunnel_community import TunnelCommunity
class DebugEndpoint(resource.Resource):
"""
This endpoint is responsible for handing requests regarding debug information in Tribler.
"""
def __init__(self, session):
resource.Resource.__init__(self)
child_handler_dict = {"circuits": DebugCircuitsEndpoint}
for path, child_cls in child_handler_dict.iteritems():
self.putChild(path, child_cls(session))
class DebugCircuitsEndpoint(resource.Resource):
"""
This class handles requests regarding the tunnel community debug information.
"""
def __init__(self, session):
resource.Resource.__init__(self)
self.session = session
def get_tunnel_community(self):
"""
Search for the tunnel community in the dispersy communities.
"""
for community in self.session.get_dispersy_instance().get_communities():
if isinstance(community, TunnelCommunity):
return community
return None
def render_GET(self, request):
"""
.. http:get:: /debug/circuits
A GET request to this endpoint returns information about the built circuits in the tunnel community.
**Example request**:
.. sourcecode:: none
curl -X GET http://localhost:8085/debug/circuits
**Example response**:
.. sourcecode:: javascript
{
"circuits": [
"id": 1234,
"state": "EXTENDING",
"goal_hops": 4,
"bytes_up": 45,
"bytes_down": 49,
"created": 1468176257,
"hops": [{
"host": "unknown"
}, {
"host": "39.95.147.20:8965"
}],
...
]
}
"""
tunnel_community = self.get_tunnel_community()
if not tunnel_community:
request.setResponseCode(http.NOT_FOUND)
return json.dumps({"error": "tunnel community not found"})
circuits_json = []
for circuit_id, circuit in tunnel_community.circuits.iteritems():
item = {'id': circuit_id, 'state': str(circuit.state), 'goal_hops': circuit.goal_hops,
'bytes_up': circuit.bytes_up, 'bytes_down': circuit.bytes_down, 'created': circuit.creation_time}
hops_array = []
for hop in circuit.hops:
hops_array.append({'host': 'unknown' if 'UNKNOWN HOST' in hop.host else '%s:%s' % (hop.host, hop.port)})
item['hops'] = hops_array
circuits_json.append(item)
return json.dumps({'circuits': circuits_json})
| vandenheuvel/tribler | Tribler/Core/Modules/restapi/debug_endpoint.py | Python | lgpl-3.0 | 2,937 |
"""
Many-to-one relationships that can be null
To define a many-to-one relationship that can have a null foreign key, use
``ForeignKey()`` with ``null=True`` .
"""
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Reporter(models.Model):
name = models.CharField(max_length=30)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=100)
reporter = models.ForeignKey(Reporter, null=True)
class Meta:
ordering = ('headline',)
def __str__(self):
return self.headline
class Car(models.Model):
make = models.CharField(max_length=100, null=True, unique=True)
class Driver(models.Model):
car = models.ForeignKey(Car, to_field='make', null=True, related_name='drivers')
| BrotherPhil/django | tests/many_to_one_null/models.py | Python | bsd-3-clause | 885 |
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
| vialink/skd | keys/tests.py | Python | bsd-2-clause | 184 |
import os
import subprocess
from flask import Flask, abort, make_response
import yaml
app = Flask(__name__)
TEST_ARGS = ['python2', '/opt/solution.py']
@app.errorhandler(500)
def _handle_exception(error):
return error, 500
def _read_static_flag():
try:
with open('/etc/config.yml') as f:
config = yaml.load(f)
return config['flag']
except KeyError:
abort(500, 'Missing flag from config.yml. You have to implement a dynamic flag generation in the controller.')
except FileNotFoundError as e:
abort(500, 'Could not find /etc/config.yml. Please make sure it is in place.')
except Exception as e:
abort(500, 'An error occurred while loading config.yml. Details %s' % e)
@app.route('/%s/test' % os.environ['SECRET'], methods=['GET'])
def test():
"""
This function is invoked automatically upon deployment to
test if the challenge is working properly.
Send HTTP 200 on success and HTTP 500 on failure. Use logging
for error and information reporting.
Please adjust if necessary.
"""
try:
output = subprocess.check_output(TEST_ARGS, stderr=subprocess.STDOUT, universal_newlines=True)
flag = _read_static_flag()
except OSError as e:
abort(500, 'File is not accessible: %s' % ' '.join(TEST_ARGS))
except subprocess.CalledProcessError:
abort(500, 'Failed to invoke %s' % ' '.join(TEST_ARGS))
except Exception as e:
abort(500, e)
else:
if output.find(flag) == -1:
abort(500, 'The flag in config.yml does not match output.')
return make_response('OK', 200)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=int(os.environ['CONTROLLER_PORT']),
debug=(os.environ['DEBUG'].lower() == 'true'))
| avatao/challenge-engine | templates/telnet/controller/opt/server.py | Python | apache-2.0 | 1,810 |
from typing import List
from ..data_store_encryptor import get_datastore_encryptor
from . import IFieldEncryptor
class StringListEncryptor(IFieldEncryptor):
@staticmethod
def encrypt(value: List[str]):
return [get_datastore_encryptor().encrypt(string) for string in value]
@staticmethod
def decrypt(value: List[str]):
return [get_datastore_encryptor().decrypt(string) for string in value]
| guardicore/monkey | monkey/monkey_island/cc/server_utils/encryption/field_encryptors/string_list_encryptor.py | Python | gpl-3.0 | 425 |
from itertools import permutations
import sys
def main(filepath):
with open(filepath, 'r') as f:
for line in f.readlines():
if line:
line = line.strip()
perms = permutations(line)
results = [''.join(x) for x in perms]
print ','.join(sorted(results))
if __name__ == '__main__':
main(sys.argv[1])
| tdsymonds/codeeval | python/hard/(14) string-permutations.py | Python | mit | 403 |
# -*- coding: utf-8 -*-
import datetime
import json
import re
import urllib.parse
import httmock
import jwt
import requests
from girder.exceptions import ValidationException
from girder.models.setting import Setting
from girder.models.token import Token
from girder.models.user import User
from girder.settings import SettingKey
import girder.events
from tests import base
from girder_oauth.providers.base import ProviderBase
from girder_oauth.providers.google import Google
from girder_oauth.settings import PluginSettings
def setUpModule():
base.enabledPlugins.append('oauth')
base.startServer()
def tearDownModule():
base.stopServer()
class OauthTest(base.TestCase):
def setUp(self):
super().setUp()
self.adminUser = User().createUser(
email='[email protected]',
login='rocky',
firstName='Robert',
lastName='Balboa',
password='adrian',
admin=True
)
# Specifies which test account (typically 'new' or 'existing') a
# redirect to a provider will simulate authentication for
self.accountType = None
def testDeriveLogin(self):
"""
Unit tests the _deriveLogin method of the provider classes.
"""
login = ProviderBase._deriveLogin('[email protected]', 'John', 'Doe')
self.assertEqual(login, 'johndoe')
login = ProviderBase._deriveLogin('hello#world#[email protected]', 'A', 'B')
self.assertEqual(login, 'helloworldfoo')
login = ProviderBase._deriveLogin('[email protected]', 'A', 'B', 'user2')
self.assertEqual(login, 'user2')
# This should conflict with the saved admin user
login = ProviderBase._deriveLogin('[email protected]', 'Robert', 'Balboa', 'rocky')
self.assertEqual(login, 'rocky1')
def _testSettings(self, providerInfo):
Setting().set(SettingKey.REGISTRATION_POLICY, 'closed')
self.accountType = 'new'
# We should get an empty listing when no providers are set up
params = {
'key': PluginSettings.PROVIDERS_ENABLED,
'value': []
}
resp = self.request('/system/setting', user=self.adminUser, method='PUT', params=params)
self.assertStatusOk(resp)
resp = self.request('/oauth/provider', exception=True, params={
'redirect': 'http://localhost/#foo/bar',
'list': True
})
self.assertStatusOk(resp)
self.assertFalse(resp.json)
# Turn on provider, but don't set other settings
params = {
'list': json.dumps([{
'key': PluginSettings.PROVIDERS_ENABLED,
'value': [providerInfo['id']]
}])
}
resp = self.request('/system/setting', user=self.adminUser, method='PUT', params=params)
self.assertStatusOk(resp)
resp = self.request('/oauth/provider', exception=True, params={
'redirect': 'http://localhost/#foo/bar'
})
self.assertStatus(resp, 500)
# Set up provider normally
params = {
'list': json.dumps([
{
'key': PluginSettings.PROVIDERS_ENABLED,
'value': [providerInfo['id']]
}, {
'key': providerInfo['client_id']['key'],
'value': providerInfo['client_id']['value']
}, {
'key': providerInfo['client_secret']['key'],
'value': providerInfo['client_secret']['value']
}
])
}
resp = self.request('/system/setting', user=self.adminUser, method='PUT', params=params)
self.assertStatusOk(resp)
# No need to re-fetch and test all of these settings values; they will
# be implicitly tested later
def _testOauthEventHandling(self, providerInfo):
self.accountType = 'existing'
def _getCallbackParams(providerInfo, redirect):
resp = self.request('/oauth/provider', params={
'redirect': redirect,
'list': True
})
providerResp = resp.json[0]
resp = requests.get(providerResp['url'], allow_redirects=False)
callbackLoc = urllib.parse.urlparse(resp.headers['location'])
callbackLocQuery = urllib.parse.parse_qs(callbackLoc.query)
callbackParams = {
key: val[0] for key, val in callbackLocQuery.items()
}
return callbackParams
redirect = 'http://localhost/#foo/bar?token={girderToken}'
class EventHandler:
def __init__(self):
self.state = ''
def _oauth_before_stop(self, event):
self.state = 'been in "before"'
event.preventDefault()
def _oauth_before(self, event):
self.state = 'been in "before"'
def _oauth_after(self, event):
self.state = 'been in "after"'
event.preventDefault()
event_handler = EventHandler()
params = _getCallbackParams(providerInfo, redirect)
with girder.events.bound(
'oauth.auth_callback.before',
'oauth_before',
event_handler._oauth_before_stop
), girder.events.bound(
'oauth.auth_callback.after',
'oauth_after',
event_handler._oauth_after
):
resp = self.request(
'/oauth/%s/callback' % providerInfo['id'], params=params, isJson=False)
self.assertStatus(resp, 303)
self.assertTrue('girderToken' not in resp.cookie)
self.assertEqual(event_handler.state, 'been in "before"')
params = _getCallbackParams(providerInfo, redirect)
with girder.events.bound(
'oauth.auth_callback.before',
'oauth_before',
event_handler._oauth_before
), girder.events.bound(
'oauth.auth_callback.after',
'oauth_after',
event_handler._oauth_after
):
resp = self.request(
'/oauth/%s/callback' % providerInfo['id'], params=params, isJson=False)
self.assertStatus(resp, 303)
self.assertTrue('girderToken' not in resp.cookie)
self.assertEqual(event_handler.state, 'been in "after"')
def _testOauthTokenAsParam(self, providerInfo):
self.accountType = 'existing'
def _getCallbackParams(providerInfo, redirect):
resp = self.request('/oauth/provider', params={
'redirect': redirect,
'list': True
})
self.assertStatusOk(resp)
providerResp = resp.json[0]
resp = requests.get(providerResp['url'], allow_redirects=False)
self.assertEqual(resp.status_code, 302)
callbackLoc = urllib.parse.urlparse(resp.headers['location'])
self.assertEqual(
callbackLoc.path, r'/api/v1/oauth/%s/callback' % providerInfo['id'])
callbackLocQuery = urllib.parse.parse_qs(callbackLoc.query)
self.assertNotHasKeys(callbackLocQuery, ('error',))
callbackParams = {
key: val[0] for key, val in callbackLocQuery.items()
}
return callbackParams
redirect = 'http://localhost/#foo/bar?token={girderToken}'
params = _getCallbackParams(providerInfo, redirect)
resp = self.request(
'/oauth/%s/callback' % providerInfo['id'], params=params, isJson=False)
self.assertStatus(resp, 303)
self.assertTrue('girderToken' in resp.cookie)
self.assertEqual(
resp.headers['Location'],
redirect.format(girderToken=resp.cookie['girderToken'].value))
redirect = 'http://localhost/#foo/bar?token={foobar}'
params = _getCallbackParams(providerInfo, redirect)
resp = self.request(
'/oauth/%s/callback' % providerInfo['id'], params=params, isJson=False)
self.assertStatus(resp, 303)
self.assertTrue('girderToken' in resp.cookie)
self.assertEqual(resp.headers['Location'], redirect)
def _testOauth(self, providerInfo):
# Close registration to start off, and simulate a new user
self._testSettings(providerInfo)
# Make sure that if no list param is passed, we receive the old format
resp = self.request('/oauth/provider', params={
'redirect': 'http://localhost/#foo/bar'
})
self.assertStatusOk(resp)
self.assertIsInstance(resp.json, dict)
self.assertEqual(len(resp.json), 1)
self.assertIn(providerInfo['name'], resp.json)
self.assertRegex(resp.json[providerInfo['name']], providerInfo['url_re'])
# This will need to be called several times, to get fresh tokens
def getProviderResp():
resp = self.request('/oauth/provider', params={
'redirect': 'http://localhost/#foo/bar',
'list': True
})
self.assertStatusOk(resp)
self.assertIsInstance(resp.json, list)
self.assertEqual(len(resp.json), 1)
providerResp = resp.json[0]
self.assertSetEqual(set(providerResp.keys()), {'id', 'name', 'url'})
self.assertEqual(providerResp['id'], providerInfo['id'])
self.assertEqual(providerResp['name'], providerInfo['name'])
self.assertRegex(providerResp['url'], providerInfo['url_re'])
redirectParams = urllib.parse.parse_qs(
urllib.parse.urlparse(providerResp['url']).query)
csrfTokenParts = redirectParams['state'][0].partition('.')
token = Token().load(csrfTokenParts[0], force=True, objectId=False)
self.assertLess(
token['expires'],
datetime.datetime.utcnow() + datetime.timedelta(days=0.30))
self.assertEqual(csrfTokenParts[2], 'http://localhost/#foo/bar')
return providerResp
# Try the new format listing
getProviderResp()
# Try callback, for a nonexistent provider
resp = self.request('/oauth/foobar/callback')
self.assertStatus(resp, 400)
# Try callback, without providing any params
resp = self.request('/oauth/%s/callback' % providerInfo['id'])
self.assertStatus(resp, 400)
# Try callback, providing params as though the provider failed
resp = self.request(
'/oauth/%s/callback' % providerInfo['id'],
params={
'code': None,
'error': 'some_custom_error',
}, exception=True)
self.assertStatus(resp, 502)
self.assertEqual(resp.json['message'], "Provider returned error: 'some_custom_error'.")
# This will need to be called several times, to use fresh tokens
def getCallbackParams(providerResp):
resp = requests.get(providerResp['url'], allow_redirects=False)
self.assertEqual(resp.status_code, 302)
callbackLoc = urllib.parse.urlparse(resp.headers['location'])
self.assertEqual(
callbackLoc.path, r'/api/v1/oauth/%s/callback' % providerInfo['id'])
callbackLocQuery = urllib.parse.parse_qs(callbackLoc.query)
self.assertNotHasKeys(callbackLocQuery, ('error',))
callbackParams = {
key: val[0] for key, val in callbackLocQuery.items()
}
return callbackParams
# Call (simulated) external provider
getCallbackParams(getProviderResp())
# Try callback, with incorrect CSRF token
params = getCallbackParams(getProviderResp())
params['state'] = 'something_wrong'
resp = self.request('/oauth/%s/callback' % providerInfo['id'], params=params)
self.assertStatus(resp, 403)
self.assertTrue(
resp.json['message'].startswith('Invalid CSRF token'))
# Try callback, with expired CSRF token
params = getCallbackParams(getProviderResp())
token = Token().load(params['state'].partition('.')[0], force=True, objectId=False)
token['expires'] -= datetime.timedelta(days=1)
Token().save(token)
resp = self.request('/oauth/%s/callback' % providerInfo['id'], params=params)
self.assertStatus(resp, 403)
self.assertTrue(resp.json['message'].startswith('Expired CSRF token'))
# Try callback, with a valid CSRF token but no redirect
params = getCallbackParams(getProviderResp())
params['state'] = params['state'].partition('.')[0]
resp = self.request('/oauth/%s/callback' % providerInfo['id'], params=params)
self.assertStatus(resp, 400)
self.assertTrue(resp.json['message'].startswith('No redirect location'))
# Try callback, with incorrect code
params = getCallbackParams(getProviderResp())
params['code'] = 'something_wrong'
resp = self.request('/oauth/%s/callback' % providerInfo['id'], params=params)
self.assertStatus(resp, 502)
# Try callback, with real parameters from provider, but still for the
# 'new' account
params = getCallbackParams(getProviderResp())
resp = self.request('/oauth/%s/callback' % providerInfo['id'], params=params)
self.assertStatus(resp, 400)
self.assertTrue(
resp.json['message'].startswith('Registration on this instance is closed.'))
# This will need to be called several times, and will do a normal login
def doOauthLogin(accountType):
self.accountType = accountType
params = getCallbackParams(getProviderResp())
resp = self.request(
'/oauth/%s/callback' % providerInfo['id'], params=params, isJson=False)
self.assertStatus(resp, 303)
self.assertEqual(resp.headers['Location'], 'http://localhost/#foo/bar')
self.assertTrue('girderToken' in resp.cookie)
resp = self.request('/user/me', token=resp.cookie['girderToken'].value)
user = resp.json
self.assertStatusOk(resp)
self.assertEqual(
user['email'], providerInfo['accounts'][accountType]['user']['email'])
self.assertEqual(
user['login'], providerInfo['accounts'][accountType]['user']['login'])
self.assertEqual(
user['firstName'], providerInfo['accounts'][accountType]['user']['firstName'])
self.assertEqual(
user['lastName'], providerInfo['accounts'][accountType]['user']['lastName'])
return user
# Try callback for the 'existing' account, which should succeed
existing = doOauthLogin('existing')
# Hit validation exception on ignore registration policy setting
with self.assertRaises(ValidationException):
Setting().set(PluginSettings.IGNORE_REGISTRATION_POLICY, 'foo')
# Try callback for the 'new' account, with registration policy ignored
Setting().set(PluginSettings.IGNORE_REGISTRATION_POLICY, True)
new = doOauthLogin('new')
# Password login for 'new' OAuth-only user should fail gracefully
newUser = providerInfo['accounts']['new']['user']
resp = self.request('/user/authentication', basicAuth='%s:mypasswd' % newUser['login'])
self.assertStatus(resp, 400)
self.assertTrue(resp.json['message'].startswith("You don't have a password."))
# Reset password for 'new' OAuth-only user should work
self.assertTrue(base.mockSmtp.isMailQueueEmpty())
resp = self.request(
'/user/password/temporary', method='PUT', params={
'email': providerInfo['accounts']['new']['user']['email']})
self.assertStatusOk(resp)
self.assertEqual(resp.json['message'], 'Sent temporary access email.')
self.assertTrue(base.mockSmtp.waitForMail())
msg = base.mockSmtp.getMail(parse=True)
# Pull out the auto-generated token from the email
body = msg.get_payload(decode=True).decode('utf8')
search = re.search('<a href="(.*)">', body)
link = search.group(1)
linkParts = link.split('/')
userId = linkParts[-3]
tokenId = linkParts[-1]
tempToken = Token().load(tokenId, force=True, objectId=False)
resp = self.request(
'/user/password/temporary/' + userId, method='GET', params={'token': tokenId})
self.assertStatusOk(resp)
self.assertEqual(resp.json['user']['login'], newUser['login'])
# We should now be able to change the password
resp = self.request(
'/user/password', method='PUT', user=resp.json['user'], params={
'old': tokenId,
'new': 'mypasswd'
})
self.assertStatusOk(resp)
# The temp token should get deleted on password change
token = Token().load(tempToken, force=True, objectId=False)
self.assertEqual(token, None)
# Password login for 'new' OAuth-only user should now succeed
resp = self.request('/user/authentication', basicAuth='%s:mypasswd' % newUser['login'])
self.assertStatusOk(resp)
return existing, new
@httmock.all_requests
def mockOtherRequest(self, url, request):
raise Exception('Unexpected url %s' % str(request.url))
def testGoogleOauth(self): # noqa
providerInfo = {
'id': 'google',
'name': 'Google',
'client_id': {
'key': PluginSettings.GOOGLE_CLIENT_ID,
'value': 'google_test_client_id'
},
'client_secret': {
'key': PluginSettings.GOOGLE_CLIENT_SECRET,
'value': 'google_test_client_secret'
},
'allowed_callback_re': r'^http://127\.0\.0\.1(?::\d+)?/api/v1/oauth/google/callback$',
'url_re': r'^https://accounts\.google\.com/o/oauth2/v2/auth',
'accounts': {
'existing': {
'auth_code': 'google_existing_auth_code',
'access_token': 'google_existing_test_token',
'user': {
'login': self.adminUser['login'],
'email': self.adminUser['email'],
'firstName': self.adminUser['firstName'],
'lastName': self.adminUser['lastName'],
'oauth': {
'provider': 'google',
'id': '5326'
}
}
},
'new': {
'auth_code': 'google_new_auth_code',
'access_token': 'google_new_test_token',
'user': {
# this login is not provided by Google, but will be
# created internally by _deriveLogin
'login': 'creed',
'email': '[email protected]',
'firstName': 'Apollo',
'lastName': 'Creed',
'oauth': {
'provider': 'google',
'id': 'the1best'
}
}
}
}
}
# Test inclusion of custom scope
Google.addScopes(['custom_scope', 'foo'])
@httmock.urlmatch(scheme='https', netloc=r'^accounts\.google\.com$',
path=r'^/o/oauth2/v2/auth$', method='GET')
def mockGoogleRedirect(url, request):
try:
params = urllib.parse.parse_qs(url.query)
self.assertEqual(params['response_type'], ['code'])
self.assertEqual(params['access_type'], ['online'])
self.assertEqual(params['scope'], ['openid profile email custom_scope foo'])
except (KeyError, AssertionError) as e:
return {
'status_code': 400,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertEqual(params['client_id'], [providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 401,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertRegex(
params['redirect_uri'][0], providerInfo['allowed_callback_re'])
state = params['state'][0]
# Nothing to test for state, since provider doesn't care
except (KeyError, AssertionError) as e:
return {
'status_code': 400,
'content': json.dumps({
'error': repr(e)
})
}
returnQuery = urllib.parse.urlencode({
'state': state,
'code': providerInfo['accounts'][self.accountType]['auth_code']
})
return {
'status_code': 302,
'headers': {
'Location': '%s?%s' % (params['redirect_uri'][0], returnQuery)
}
}
@httmock.urlmatch(scheme='https', netloc=r'^oauth2\.googleapis\.com$',
path=r'^/token$', method='POST')
def mockGoogleToken(url, request):
try:
params = urllib.parse.parse_qs(request.body)
self.assertEqual(params['client_id'], [providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 401,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertEqual(params['grant_type'], ['authorization_code'])
self.assertEqual(params['client_secret'], [providerInfo['client_secret']['value']])
self.assertRegex(
params['redirect_uri'][0], providerInfo['allowed_callback_re'])
for account in providerInfo['accounts'].values():
if account['auth_code'] == params['code'][0]:
break
else:
self.fail()
except (KeyError, AssertionError) as e:
return {
'status_code': 400,
'content': json.dumps({
'error': repr(e)
})
}
return json.dumps({
'token_type': 'Bearer',
'access_token': account['access_token'],
'expires_in': 3546,
'id_token': jwt.encode({
'sub': account['user']['oauth']['id'],
'email': account['user']['email'],
}, 'secret').decode()
})
@httmock.urlmatch(scheme='https', netloc=r'^accounts\.google\.com$',
path=r'^/.well-known/openid-configuration$', method='GET')
def mockGoogleDiscovery(url, request):
return json.dumps({
'userinfo_endpoint': 'https://openidconnect.googleapis.com/v1/userinfo'
})
@httmock.urlmatch(scheme='https', netloc=r'^openidconnect\.googleapis\.com$',
path=r'^/v1/userinfo$', method='GET')
def mockGoogleApi(url, request):
for account in providerInfo['accounts'].values():
if 'Bearer %s' % account['access_token'] == request.headers['Authorization']:
break
else:
self.fail()
return json.dumps({
'sub': account['user']['oauth']['id'],
'given_name': account['user']['firstName'],
'family_name': account['user']['lastName'],
'email': account['user']['email']
})
with httmock.HTTMock(
mockGoogleRedirect,
mockGoogleToken,
mockGoogleDiscovery,
mockGoogleApi,
# Must keep 'mockOtherRequest' last
self.mockOtherRequest
):
self._testOauth(providerInfo)
def testGithubOauth(self): # noqa
providerInfo = {
'id': 'github',
'name': 'GitHub',
'client_id': {
'key': PluginSettings.GITHUB_CLIENT_ID,
'value': 'github_test_client_id'
},
'client_secret': {
'key': PluginSettings.GITHUB_CLIENT_SECRET,
'value': 'github_test_client_secret'
},
'allowed_callback_re':
r'^http://127\.0\.0\.1(?::\d+)?/api/v1/oauth/github/callback$',
'url_re': r'^https://github\.com/login/oauth/authorize',
'accounts': {
'existing': {
'auth_code': 'github_existing_auth_code',
'access_token': 'github_existing_test_token',
'user': {
'login': self.adminUser['login'],
'email': self.adminUser['email'],
'firstName': self.adminUser['firstName'],
'lastName': self.adminUser['lastName'],
'oauth': {
'provider': 'github',
'id': '2399'
}
}
},
'new': {
'auth_code': 'github_new_auth_code',
'access_token': 'github_new_test_token',
'user': {
# login may be provided externally by GitHub; for
# simplicity here, do not use a username with whitespace
# or underscores
'login': 'drago',
'email': '[email protected]',
'firstName': 'Ivan',
'lastName': 'Drago',
'oauth': {
'provider': 'github',
'id': 1985
}
}
}
}
}
@httmock.urlmatch(scheme='https', netloc='^github.com$',
path='^/login/oauth/authorize$', method='GET')
def mockGithubRedirect(url, request):
redirectUri = None
try:
params = urllib.parse.parse_qs(url.query)
# Check redirect_uri first, so other errors can still redirect
redirectUri = params['redirect_uri'][0]
self.assertEqual(params['client_id'], [providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 404,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertRegex(redirectUri, providerInfo['allowed_callback_re'])
state = params['state'][0]
# Nothing to test for state, since provider doesn't care
self.assertEqual(params['scope'], ['user:email'])
except (KeyError, AssertionError) as e:
returnQuery = urllib.parse.urlencode({
'error': repr(e),
})
else:
returnQuery = urllib.parse.urlencode({
'state': state,
'code': providerInfo['accounts'][self.accountType]['auth_code']
})
return {
'status_code': 302,
'headers': {
'Location': '%s?%s' % (redirectUri, returnQuery)
}
}
@httmock.urlmatch(scheme='https', netloc='^github.com$',
path='^/login/oauth/access_token$', method='POST')
def mockGithubToken(url, request):
try:
self.assertEqual(request.headers['Accept'], 'application/json')
params = urllib.parse.parse_qs(request.body)
self.assertEqual(params['client_id'], [providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 404,
'content': json.dumps({
'error': repr(e)
})
}
try:
for account in providerInfo['accounts'].values():
if account['auth_code'] == params['code'][0]:
break
else:
self.fail()
self.assertEqual(params['client_secret'], [providerInfo['client_secret']['value']])
self.assertRegex(
params['redirect_uri'][0], providerInfo['allowed_callback_re'])
except (KeyError, AssertionError) as e:
returnBody = json.dumps({
'error': repr(e),
'error_description': repr(e)
})
else:
returnBody = json.dumps({
'token_type': 'bearer',
'access_token': account['access_token'],
'scope': 'user:email'
})
return {
'status_code': 200,
'headers': {
'Content-Type': 'application/json'
},
'content': returnBody
}
@httmock.urlmatch(scheme='https', netloc='^api.github.com$', path='^/user$', method='GET')
def mockGithubApiUser(url, request):
try:
for account in providerInfo['accounts'].values():
if 'token %s' % account['access_token'] == request.headers['Authorization']:
break
else:
self.fail()
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'message': repr(e)
})
}
return json.dumps({
'id': account['user']['oauth']['id'],
'login': account['user']['login'],
'name': '%s %s' % (account['user']['firstName'], account['user']['lastName'])
})
@httmock.urlmatch(scheme='https', netloc='^api.github.com$',
path='^/user/emails$', method='GET')
def mockGithubApiEmail(url, request):
try:
for account in providerInfo['accounts'].values():
if 'token %s' % account['access_token'] == request.headers['Authorization']:
break
else:
self.fail()
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'message': repr(e)
})
}
return json.dumps([
{
'primary': False,
'email': '[email protected]',
'verified': True
}, {
'primary': True,
'email': account['user']['email'],
'verified': True
}
])
with httmock.HTTMock(
mockGithubRedirect,
mockGithubToken,
mockGithubApiUser,
mockGithubApiEmail,
# Must keep 'mockOtherRequest' last
self.mockOtherRequest
):
self._testOauth(providerInfo)
@httmock.urlmatch(scheme='https', netloc='^api.github.com$', path='^/user$', method='GET')
def mockGithubUserWithoutName(url, request):
try:
for account in providerInfo['accounts'].values():
if 'token %s' % account['access_token'] == request.headers['Authorization']:
break
else:
self.fail()
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'message': repr(e)
})
}
return json.dumps({
'id': account['user']['oauth']['id'],
'login': account['user']['login'],
'name': None
})
self.setUp() # Call to reset everything so we can call _testOauth again
# If no name is provided, we expect to use the github login for both
providerInfo['accounts']['existing']['user']['lastName'] = 'rocky'
providerInfo['accounts']['existing']['user']['firstName'] = 'rocky'
providerInfo['accounts']['new']['user']['lastName'] = 'drago'
providerInfo['accounts']['new']['user']['firstName'] = 'drago'
with httmock.HTTMock(
mockGithubRedirect,
mockGithubToken,
mockGithubUserWithoutName,
mockGithubApiEmail,
# Must keep 'mockOtherRequest' last
self.mockOtherRequest
):
self._testOauth(providerInfo)
def testGlobusOauth(self): # noqa
providerInfo = {
'id': 'globus',
'name': 'Globus',
'client_id': {
'key': PluginSettings.GLOBUS_CLIENT_ID,
'value': 'globus_test_client_id'
},
'client_secret': {
'key': PluginSettings.GLOBUS_CLIENT_SECRET,
'value': 'globus_test_client_secret'
},
'scope': 'urn:globus:auth:scope:auth.globus.org:view_identities openid profile email',
'allowed_callback_re':
r'^http://127\.0\.0\.1(?::\d+)?/api/v1/oauth/globus/callback$',
'url_re': r'^https://auth.globus.org/v2/oauth2/authorize',
'accounts': {
'existing': {
'auth_code': 'globus_existing_auth_code',
'access_token': 'globus_existing_test_token',
'id_token': 'globus_exisiting_id_token',
'user': {
'login': self.adminUser['login'],
'email': self.adminUser['email'],
'firstName': self.adminUser['firstName'],
'lastName': self.adminUser['lastName'],
'oauth': {
'provider': 'globus',
'id': '2399'
}
}
},
'new': {
'auth_code': 'globus_new_auth_code',
'access_token': 'globus_new_test_token',
'id_token': 'globus_new_id_token',
'user': {
'login': 'metaphor',
'email': '[email protected]',
'firstName': 'Ivan',
'lastName': 'Drago',
'oauth': {
'provider': 'globus',
'id': 1985
}
}
}
}
}
@httmock.urlmatch(scheme='https', netloc='^auth.globus.org$',
path='^/v2/oauth2/authorize$', method='GET')
def mockGlobusRedirect(url, request):
try:
params = urllib.parse.parse_qs(url.query)
self.assertEqual(params['response_type'], ['code'])
self.assertEqual(params['access_type'], ['online'])
self.assertEqual(params['scope'], [providerInfo['scope']])
except (KeyError, AssertionError) as e:
return {
'status_code': 400,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertEqual(params['client_id'], [providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 401,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertRegex(
params['redirect_uri'][0], providerInfo['allowed_callback_re'])
state = params['state'][0]
# Nothing to test for state, since provider doesn't care
except (KeyError, AssertionError) as e:
return {
'status_code': 400,
'content': json.dumps({
'error': repr(e)
})
}
returnQuery = urllib.parse.urlencode({
'state': state,
'code': providerInfo['accounts'][self.accountType]['auth_code']
})
return {
'status_code': 302,
'headers': {
'Location': '%s?%s' % (params['redirect_uri'][0], returnQuery)
}
}
@httmock.urlmatch(scheme='https', netloc='^auth.globus.org$',
path='^/v2/oauth2/userinfo$', method='GET')
def mockGlobusUserInfo(url, request):
try:
for account in providerInfo['accounts'].values():
if 'Bearer %s' % account['access_token'] == \
request.headers['Authorization']:
break
else:
self.fail()
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'message': repr(e)
})
}
user = account['user']
return json.dumps({
'email': user['email'],
'preferred_username': user['email'],
'sub': user['oauth']['id'],
'name': '{firstName} {lastName}'.format(**user),
})
@httmock.urlmatch(scheme='https', netloc='^auth.globus.org$',
path='^/v2/oauth2/token$', method='POST')
def mockGlobusToken(url, request):
try:
self.assertEqual(request.headers['Accept'], 'application/json')
params = urllib.parse.parse_qs(request.body)
self.assertEqual(params['client_id'], [providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 404,
'content': json.dumps({
'error': repr(e)
})
}
try:
for account in providerInfo['accounts'].values():
if account['auth_code'] == params['code'][0]:
break
else:
self.fail()
self.assertEqual(params['client_secret'], [providerInfo['client_secret']['value']])
self.assertRegex(
params['redirect_uri'][0], providerInfo['allowed_callback_re'])
except (KeyError, AssertionError) as e:
returnBody = json.dumps({
'error': repr(e),
'error_description': repr(e)
})
else:
returnBody = json.dumps({
'access_token': account['access_token'],
'resource_server': 'auth.globus.org',
'expires_in': 3600,
'token_type': 'bearer',
'scope': 'urn:globus:auth:scope:auth.globus.org:monitor_ongoing',
'refresh_token': 'blah',
'id_token': account['id_token'],
'state': 'provided_by_client_to_prevent_replay_attacks',
'other_tokens': [],
})
return {
'status_code': 200,
'headers': {
'Content-Type': 'application/json'
},
'content': returnBody
}
with httmock.HTTMock(
mockGlobusRedirect,
mockGlobusUserInfo,
mockGlobusToken,
# Must keep 'mockOtherRequest' last
self.mockOtherRequest
):
self._testOauth(providerInfo)
self._testOauthTokenAsParam(providerInfo)
self._testOauthEventHandling(providerInfo)
def testLinkedinOauth(self): # noqa
providerInfo = {
'id': 'linkedin',
'name': 'LinkedIn',
'client_id': {
'key': PluginSettings.LINKEDIN_CLIENT_ID,
'value': 'linkedin_test_client_id'
},
'client_secret': {
'key': PluginSettings.LINKEDIN_CLIENT_SECRET,
'value': 'linkedin_test_client_secret'
},
'allowed_callback_re':
r'^http://127\.0\.0\.1(?::\d+)?/api/v1/oauth/linkedin/callback$',
'url_re': r'^https://www\.linkedin\.com/uas/oauth2/authorization',
'accounts': {
'existing': {
'auth_code': 'linkedin_existing_auth_code',
'access_token': 'linkedin_existing_test_token',
'user': {
'login': self.adminUser['login'],
'email': self.adminUser['email'],
'firstName': self.adminUser['firstName'],
'lastName': self.adminUser['lastName'],
'oauth': {
'provider': 'linkedin',
'id': '42kD-5H'
}
}
},
'new': {
'auth_code': 'linkedin_new_auth_code',
'access_token': 'linkedin_new_test_token',
'user': {
# this login is not provided by LinkedIn, but will be
# created internally by _deriveLogin
'login': 'clubber',
'email': '[email protected]',
'firstName': 'James',
'lastName': 'Lang',
'oauth': {
'provider': 'linkedin',
'id': '634pity-fool4'
}
}
}
}
}
@httmock.urlmatch(scheme='https', netloc='^www.linkedin.com$',
path='^/uas/oauth2/authorization$', method='GET')
def mockLinkedinRedirect(url, request):
try:
params = urllib.parse.parse_qs(url.query)
self.assertEqual(params['client_id'], [providerInfo['client_id']['value']])
self.assertRegex(
params['redirect_uri'][0], providerInfo['allowed_callback_re'])
except (KeyError, AssertionError) as e:
return {
'status_code': 200,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertEqual(params['response_type'], ['code'])
self.assertEqual(
params['scope'][0].split(' '), ['r_basicprofile', 'r_emailaddress'])
state = params['state'][0]
# Nothing to test for state, since provider doesn't care
except (KeyError, AssertionError) as e:
returnQuery = urllib.parse.urlencode({
'error': repr(e),
'error_description': repr(e)
})
else:
returnQuery = urllib.parse.urlencode({
'state': state,
'code': providerInfo['accounts'][self.accountType]['auth_code']
})
return {
'status_code': 302,
'headers': {
'Location': '%s?%s' % (params['redirect_uri'][0], returnQuery)
}
}
@httmock.urlmatch(scheme='https', netloc='^www.linkedin.com$',
path='^/uas/oauth2/accessToken$', method='POST')
def mockLinkedinToken(url, request):
try:
self.assertEqual(
request.headers['Content-Type'], 'application/x-www-form-urlencoded')
params = urllib.parse.parse_qs(request.body)
self.assertEqual(params['grant_type'], ['authorization_code'])
self.assertEqual(params['client_id'], [providerInfo['client_id']['value']])
for account in providerInfo['accounts'].values():
if account['auth_code'] == params['code'][0]:
break
else:
self.fail()
self.assertRegex(
params['redirect_uri'][0], providerInfo['allowed_callback_re'])
except (KeyError, AssertionError) as e:
return {
'status_code': 400,
'content': json.dumps({
'error': repr(e),
'error_description': repr(e)
})
}
try:
self.assertEqual(params['client_secret'], [providerInfo['client_secret']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 401,
'content': json.dumps({
'error': repr(e),
'error_description': repr(e)
})
}
return json.dumps({
'access_token': account['access_token'],
'expires_in': datetime.timedelta(days=60).seconds
})
@httmock.urlmatch(scheme='https', netloc='^api.linkedin.com$',
path=r'^/v1/people/~(?::\(.+\)?)$', method='GET')
def mockLinkedinApi(url, request):
try:
for account in providerInfo['accounts'].values():
if 'Bearer %s' % account['access_token'] == request.headers['Authorization']:
break
else:
self.fail()
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'errorCode': 0,
'message': repr(e)
})
}
try:
fieldsRe = re.match(r'^.+:\((.+)\)$', url.path)
self.assertTrue(fieldsRe)
self.assertSetEqual(
set(fieldsRe.group(1).split(',')),
{'id', 'emailAddress', 'firstName', 'lastName'})
params = urllib.parse.parse_qs(url.query)
self.assertEqual(params['format'], ['json'])
except AssertionError as e:
return {
'status_code': 400,
'content': json.dumps({
'errorCode': 0,
'message': repr(e)
})
}
return json.dumps({
'id': account['user']['oauth']['id'],
'firstName': account['user']['firstName'],
'lastName': account['user']['lastName'],
'emailAddress': account['user']['email']
})
with httmock.HTTMock(
mockLinkedinRedirect,
mockLinkedinToken,
mockLinkedinApi,
# Must keep 'mockOtherRequest' last
self.mockOtherRequest
):
self._testOauth(providerInfo)
def testBitbucketOauth(self): # noqa
providerInfo = {
'id': 'bitbucket',
'name': 'Bitbucket',
'client_id': {
'key': PluginSettings.BITBUCKET_CLIENT_ID,
'value': 'bitbucket_test_client_id'
},
'client_secret': {
'key': PluginSettings.BITBUCKET_CLIENT_SECRET,
'value': 'bitbucket_test_client_secret'
},
'allowed_callback_re':
r'^http://127\.0\.0\.1(?::\d+)?'
r'/api/v1/oauth/bitbucket/callback$',
'url_re': r'^https://bitbucket\.org/site/oauth2/authorize',
'accounts': {
'existing': {
'auth_code': 'bitbucket_existing_auth_code',
'access_token': 'bitbucket_existing_test_token',
'user': {
'login': self.adminUser['login'],
'email': self.adminUser['email'],
'firstName': self.adminUser['firstName'],
'lastName': self.adminUser['lastName'],
'oauth': {
'provider': 'bitbucket',
'id': '2399'
}
}
},
'new': {
'auth_code': 'bitbucket_new_auth_code',
'access_token': 'bitbucket_new_test_token',
'user': {
# login may be provided externally by Bitbucket; for
# simplicity here, do not use a username with whitespace
# or underscores
'login': 'drago',
'email': '[email protected]',
'firstName': 'Ivan',
'lastName': 'Drago',
'oauth': {
'provider': 'bitbucket',
'id': 1983
}
}
}
}
}
@httmock.urlmatch(scheme='https', netloc='^bitbucket.org$',
path='^/site/oauth2/authorize$', method='GET')
def mockBitbucketRedirect(url, request):
redirectUri = None
try:
params = urllib.parse.parse_qs(url.query)
# Check redirect_uri first, so other errors can still redirect
redirectUri = params['redirect_uri'][0]
self.assertEqual(params['client_id'], [providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 404,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertRegex(redirectUri, providerInfo['allowed_callback_re'])
state = params['state'][0]
# Nothing to test for state, since provider doesn't care
self.assertEqual(params['scope'], ['account'])
except (KeyError, AssertionError) as e:
returnQuery = urllib.parse.urlencode({
'error': repr(e),
'error_description': repr(e)
})
else:
returnQuery = urllib.parse.urlencode({
'state': state,
'code': providerInfo['accounts'][self.accountType]['auth_code']
})
return {
'status_code': 302,
'headers': {
'Location': '%s?%s' % (redirectUri, returnQuery)
}
}
@httmock.urlmatch(scheme='https', netloc='^bitbucket.org$',
path='^/site/oauth2/access_token$', method='POST')
def mockBitbucketToken(url, request):
try:
self.assertEqual(request.headers['Accept'], 'application/json')
params = urllib.parse.parse_qs(request.body)
self.assertEqual(params['grant_type'], ['authorization_code'])
except (KeyError, AssertionError) as e:
return {
'status_code': 400,
'content': json.dumps({
'error': repr(e),
'error_description': repr(e)
})
}
try:
for account in providerInfo['accounts'].values():
if account['auth_code'] == params['code'][0]:
break
else:
self.fail()
self.assertEqual(params['client_secret'], [providerInfo['client_secret']['value']])
self.assertRegex(
params['redirect_uri'][0], providerInfo['allowed_callback_re'])
except (KeyError, AssertionError) as e:
returnBody = json.dumps({
'error': repr(e),
'error_description': repr(e)
})
else:
returnBody = json.dumps({
'token_type': 'bearer',
'access_token': account['access_token'],
'scope': 'account'
})
return {
'status_code': 200,
'headers': {
'Content-Type': 'application/json'
},
'content': returnBody
}
@httmock.urlmatch(scheme='https', netloc='^api.bitbucket.org$',
path='^/2.0/user$', method='GET')
def mockBitbucketApiUser(url, request):
try:
for account in providerInfo['accounts'].values():
if 'Bearer %s' % account['access_token'] == request.headers['Authorization']:
break
else:
self.fail()
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'message': repr(e)
})
}
return json.dumps({
'created_on': '2011-12-20T16:34:07.132459+00:00',
'uuid': account['user']['oauth']['id'],
'location': 'Santa Monica, CA',
'links': {},
'website': 'https://tutorials.bitbucket.org/',
'username': account['user']['login'],
'display_name': '%s %s' % (
account['user']['firstName'], account['user']['lastName'])
})
@httmock.urlmatch(scheme='https', netloc='^api.bitbucket.org$',
path='^/2.0/user/emails$', method='GET')
def mockBitbucketApiEmail(url, request):
try:
for account in providerInfo['accounts'].values():
if 'Bearer %s' % account['access_token'] == request.headers['Authorization']:
break
else:
self.fail()
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'message': repr(e)
})
}
return json.dumps({
'page': 1,
'pagelen': 10,
'size': 1,
'values': [{
'is_primary': True,
'is_confirmed': True,
'email': account['user']['email'],
'links': {},
'type': 'email'
}]
})
with httmock.HTTMock(
mockBitbucketRedirect,
mockBitbucketToken,
mockBitbucketApiUser,
mockBitbucketApiEmail,
# Must keep 'mockOtherRequest' last
self.mockOtherRequest
):
self._testOauth(providerInfo)
def testBoxOauth(self): # noqa
providerInfo = {
'id': 'box',
'name': 'Box',
'client_id': {
'key': PluginSettings.BOX_CLIENT_ID,
'value': 'box_test_client_id'
},
'client_secret': {
'key': PluginSettings.BOX_CLIENT_SECRET,
'value': 'box_test_client_secret'
},
'allowed_callback_re':
r'^http://127\.0\.0\.1(?::\d+)?/api/v1/oauth/box/callback$',
'url_re': r'^https://account\.box\.com/api/oauth2/authorize',
'accounts': {
'existing': {
'auth_code': 'box_existing_auth_code',
'access_token': 'box_existing_test_token',
'user': {
'login': self.adminUser['login'],
'email': self.adminUser['email'],
'firstName': self.adminUser['firstName'],
'lastName': self.adminUser['lastName'],
'oauth': {
'provider': 'box',
'id': '2481632'
}
}
},
'new': {
'auth_code': 'box_new_auth_code',
'access_token': 'box_new_test_token',
'user': {
# this login is not provided by Box, but will be
# created internally by _deriveLogin
'login': 'metaphor',
'email': '[email protected]',
'firstName': 'Ivan',
'lastName': 'Drago',
'oauth': {
'provider': 'box',
'id': '1985'
}
}
}
}
}
@httmock.urlmatch(scheme='https', netloc='^account.box.com$',
path='^/api/oauth2/authorize$', method='GET')
def mockBoxRedirect(url, request):
redirectUri = None
try:
params = urllib.parse.parse_qs(url.query)
# Check redirect_uri first, so other errors can still redirect
redirectUri = params['redirect_uri'][0]
self.assertEqual(params['client_id'], [providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 404,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertRegex(redirectUri, providerInfo['allowed_callback_re'])
state = params['state'][0]
# Nothing to test for state, since provider doesn't care
except (KeyError, AssertionError) as e:
returnQuery = urllib.parse.urlencode({
'error': repr(e),
})
else:
returnQuery = urllib.parse.urlencode({
'state': state,
'code': providerInfo['accounts'][self.accountType]['auth_code']
})
return {
'status_code': 302,
'headers': {
'Location': '%s?%s' % (redirectUri, returnQuery)
}
}
@httmock.urlmatch(scheme='https', netloc='^api.box.com$',
path='^/oauth2/token$', method='POST')
def mockBoxToken(url, request):
try:
self.assertEqual(request.headers['Accept'], 'application/json')
params = urllib.parse.parse_qs(request.body)
self.assertEqual(params['client_id'], [providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 404,
'content': json.dumps({
'error': repr(e)
})
}
try:
for account in providerInfo['accounts'].values():
if account['auth_code'] == params['code'][0]:
break
else:
self.fail()
self.assertEqual(params['client_secret'], [providerInfo['client_secret']['value']])
except (KeyError, AssertionError) as e:
returnBody = json.dumps({
'error': repr(e),
'error_description': repr(e)
})
else:
returnBody = json.dumps({
'token_type': 'bearer',
'access_token': account['access_token'],
'scope': 'user:email'
})
return {
'status_code': 200,
'headers': {
'Content-Type': 'application/json'
},
'content': returnBody
}
@httmock.urlmatch(scheme='https', netloc='^api.box.com$',
path='^/2.0/users/me$', method='GET')
def mockBoxApiUser(url, request):
try:
for account in providerInfo['accounts'].values():
if 'Bearer %s' % account['access_token'] == request.headers['Authorization']:
break
else:
self.fail()
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'message': repr(e)
})
}
return json.dumps({
'id': account['user']['oauth']['id'],
'login': account['user']['email'],
'name': '%s %s' % (account['user']['firstName'], account['user']['lastName'])
})
with httmock.HTTMock(
mockBoxRedirect,
mockBoxToken,
mockBoxApiUser,
# Must keep 'mockOtherRequest' last
self.mockOtherRequest
):
self._testOauth(providerInfo)
| girder/girder | plugins/oauth/plugin_tests/oauth_test.py | Python | apache-2.0 | 63,218 |
import socket
from socksv.socksvSocketServer import socksvSocketServer
def main():
s = socket.socket()
s.connect(("127.0.0.1",1234))
s.send(b'\x05\x01\x00')
rply = s.recv(2)
print("innoSocks local-server reply:", rply)
b = bytearray(b'\x05\x01\x00\x01')
h = socket.gethostbyname("go.microsoft.com")
h = socksvSocketServer.IPV4AddrToBytes(h)
h.extend(bytearray([0, 80]))
b.extend(h)
s.send(b)
print("innoSocks local-server reply:", s.recv(10))
s.send(b"GET /fwlink/?linkid=66138&clcid=0x409 HTTP/1.1\r\nconnection: close\r\nhost: go.microsoft.com\r\ncontent-length: 0\r\n\r\n")
while True:
print(str(s.recv(5000),'utf-8'))
if __name__ == "__main__":
main() | securesocketimproved/ssi | tests/test_socksv_client.py | Python | gpl-3.0 | 756 |
#!/usr/bin/env python
# Copyright 2016 Criteo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import print_function
import unittest
import mock
from biggraphite.drivers import _utils
class CountDownTest(unittest.TestCase):
_COUNT = 42
def setUp(self):
self.on_zero = mock.Mock()
self.count_down = _utils.CountDown(self._COUNT, self.on_zero)
def test_on_failure(self):
exc = Exception()
self.count_down.on_failure(exc)
self.on_zero.assert_called_once()
# Failing again should not call the callback again.
self.count_down.on_failure(exc)
self.on_zero.assert_called_once()
def test_on_result(self):
result = "whatever this is not used"
for _ in xrange(self._COUNT - 1):
self.count_down.on_result(result)
self.on_zero.assert_not_called()
self.count_down.on_result(result)
self.on_zero.assert_called_with(None)
if __name__ == "__main__":
unittest.main()
| natbraun/biggraphite | tests/test_drivers_utils.py | Python | apache-2.0 | 1,554 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
HELPER_SETTINGS = dict(
ROOT_URLCONF='example_app.urls',
INSTALLED_APPS=[
'sekizai',
'meta_mixin',
'example_app',
],
META_SITE_PROTOCOL='http',
META_USE_SITES=True,
META_USE_OG_PROPERTIES=True,
META_USE_TWITTER_PROPERTIES=True,
META_USE_GOOGLEPLUS_PROPERTIES=True,
NOSE_ARGS=['-s'],
TEMPLATE_CONTEXT_PROCESSORS=[
'sekizai.context_processors.sekizai',
],
)
def run():
from djangocms_helper import runner
runner.run('meta_mixin')
if __name__ == '__main__':
run()
| motleytech/django-meta-mixin | cms_helper.py | Python | bsd-3-clause | 673 |
"""
This test tests that i18n extraction (`paver i18n_extract -v`) works properly.
"""
import os
import random
import re
import string
import subprocess
import sys
from datetime import datetime, timedelta
from unittest import TestCase
from i18n import config, dummy, extract, generate
from polib import pofile
from pytz import UTC
from six.moves import range
class TestGenerate(TestCase):
"""
Tests functionality of i18n/generate.py
"""
generated_files = ('django-partial.po', 'djangojs-partial.po', 'mako.po')
@classmethod
def setUpClass(cls):
super(TestGenerate, cls).setUpClass()
sys.stderr.write(
"\nThis test tests that i18n extraction (`paver i18n_extract`) works properly. "
"If you experience failures, please check that all instances of `gettext` and "
"`ngettext` are used correctly. You can also try running `paver i18n_extract -v` "
"locally for more detail.\n"
)
sys.stderr.write(
"\nExtracting i18n strings and generating dummy translations; "
"this may take a few minutes\n"
)
sys.stderr.flush()
extract.main(verbose=0)
dummy.main(verbose=0)
@classmethod
def tearDownClass(cls):
# Clear the Esperanto & RTL directories of any test artifacts
cmd = "git checkout conf/locale/eo conf/locale/rtl"
sys.stderr.write("Cleaning up dummy language directories: " + cmd)
sys.stderr.flush()
returncode = subprocess.call(cmd, shell=True)
assert returncode == 0
super(TestGenerate, cls).tearDownClass()
def setUp(self):
super(TestGenerate, self).setUp()
self.configuration = config.Configuration()
# Subtract 1 second to help comparisons with file-modify time succeed,
# since os.path.getmtime() is not millisecond-accurate
self.start_time = datetime.now(UTC) - timedelta(seconds=1)
def test_merge(self):
"""
Tests merge script on English source files.
"""
filename = os.path.join(self.configuration.source_messages_dir, random_name())
generate.merge(self.configuration, self.configuration.source_locale, target=filename)
self.assertTrue(os.path.exists(filename))
os.remove(filename)
def test_main(self):
"""
Runs generate.main() which should merge source files,
then compile all sources in all configured languages.
Validates output by checking all .mo files in all configured languages.
.mo files should exist, and be recently created (modified
after start of test suite)
"""
# Change dummy_locales to not have Esperanto present.
self.configuration.dummy_locales = ['fake2']
generate.main(verbosity=0, strict=False)
for locale in self.configuration.translated_locales:
for filename in ('django', 'djangojs'):
mofile = filename + '.mo'
path = os.path.join(self.configuration.get_messages_dir(locale), mofile)
exists = os.path.exists(path)
self.assertTrue(exists, msg=u'Missing file in locale %s: %s' % (locale, mofile))
self.assertGreaterEqual(
datetime.fromtimestamp(os.path.getmtime(path), UTC),
self.start_time,
msg=u'File not recently modified: %s' % path
)
# Segmenting means that the merge headers don't work they way they
# used to, so don't make this check for now. I'm not sure if we'll
# get the merge header back eventually, or delete this code eventually.
# self.assert_merge_headers(locale)
def assert_merge_headers(self, locale):
"""
This is invoked by test_main to ensure that it runs after
calling generate.main().
There should be exactly three merge comment headers
in our merged .po file. This counts them to be sure.
A merge comment looks like this:
# #-#-#-#-# django-partial.po (0.1a) #-#-#-#-#
"""
path = os.path.join(self.configuration.get_messages_dir(locale), 'django.po')
pof = pofile(path)
pattern = re.compile('^#-#-#-#-#', re.M)
match = pattern.findall(pof.header)
self.assertEqual(
len(match),
3,
msg=u"Found %s (should be 3) merge comments in the header for %s" % (len(match), path)
)
def random_name(size=6):
"""Returns random filename as string, like test-4BZ81W"""
chars = string.ascii_uppercase + string.digits
return 'test-' + ''.join(random.choice(chars) for x in range(size))
| cpennington/edx-platform | pavelib/paver_tests/test_extract_and_generate.py | Python | agpl-3.0 | 4,737 |
# -*- coding: ISO-8859-1 -*-
# Copyright 2010 Dirk Holtwick, holtwick.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "$Revision: 176 $"
__author__ = "$Author: holtwick $"
__date__ = "$Date: 2008-03-15 00:11:47 +0100 (Sa, 15 Mrz 2008) $"
"""
HTML/CSS to PDF converter
Most people know how to write a page with HTML and CSS. Why not using these skills to dynamically generate PDF documents using it? The "pisa" project http://www.htmltopdf.org enables you to to this quite simple.
"""
import cStringIO
import ho.pisa as pisa
import os
# Shortcut for dumping all logs to the screen
pisa.showLogging()
def HTML2PDF(data, filename, open=False):
"""
Simple test showing how to create a PDF file from
PML Source String. Also shows errors and tries to start
the resulting PDF
"""
pdf = pisa.CreatePDF(
cStringIO.StringIO(data),
file(filename, "wb"))
if open and (not pdf.err):
pisa.startViewer(filename)
return not pdf.err
if __name__=="__main__":
HTMLTEST = """
<html><body>
<p>Hello <strong style="color: #f00;">World</strong>
<hr>
<table border="1" style="background: #eee; padding: 0.5em;">
<tr>
<td>Amount</td>
<td>Description</td>
<td>Total</td>
</tr>
<tr>
<td>1</td>
<td>Good weather</td>
<td>0 EUR</td>
</tr>
<tr style="font-weight: bold">
<td colspan="2" align="right">Sum</td>
<td>0 EUR</td>
</tr>
</table>
</body></html>
"""
HTML2PDF(HTMLTEST, "test.pdf", open=True)
| alanjds/xhtml2pdf | test/cookbook.py | Python | apache-2.0 | 2,140 |
# -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
| Boussadia/weboob | weboob/tools/browser2/cookies.py | Python | agpl-3.0 | 2,534 |
# -*- coding: UTF-8 -*-
"""
Copyright 2000, 2001, 2002, 2003, 2004, 2005 Dataprev - Empresa de Tecnologia e Informações da Previdência Social, Brasil
Este arquivo é parte do programa CACIC - Configurador Automático e Coletor de Informações Computacionais
O CACIC é um software livre; você pode redistribui-lo e/ou modifica-lo dentro dos termos da Licença Pública Geral GNU como
publicada pela Fundação do Software Livre (FSF); na versão 2 da Licença, ou (na sua opnião) qualquer versão.
Este programa é distribuido na esperança que possa ser util, mas SEM NENHUMA GARANTIA; sem uma garantia implicita de ADEQUAÇÂO a qualquer
MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a Licença Pública Geral GNU para maiores detalhes.
Você deve ter recebido uma cópia da Licença Pública Geral GNU, sob o título "LICENCA.txt", junto com este programa, se não, escreva para a Fundação do Software
Livre(FSF) Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Modulo url
Modulo com finalidade de efetuar a comunicacao
HTTP com o servidor e resgatar as informacoes
retornadas por ele.
Tem como funcao tambem baixar os arquivos do
servidor por FTP.
@author: Dataprev - ES
"""
import re
import sys
import base64
import urllib, urllib2
import ftplib
from coletores.coletor import *
from xml.dom import minidom, Node
class URL :
"""Responsavel por efetuar acessos HTTP e FTP ao servidor"""
def __init__(self) :
self.ftp = ''
def enviaRecebeDados(self, dados, url, user, pwd, defaults):
"""
Envia os dados passados por parametro ao servidor por metodo POST
e retorna o conteudo da pagina gerada
@param dados: dict
"""
# returns string
for key, value in defaults.items():
dados[key] = value
query = urllib.urlencode(dados)
base64string = base64.encodestring('%s:%s' % (user,pwd))[:-1]
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
passman.add_password(None, url, user, pwd)
""" Lista de proxies vazia para forçar nenhuma """
proxy_handler = urllib2.ProxyHandler({})
auth_handler = urllib2.HTTPBasicAuthHandler(passman)
opener = urllib2.build_opener(proxy_handler, auth_handler)
urllib2.install_opener(opener)
request = urllib2.Request(url)
request.add_header("Authorization", "Basic %s" % base64string)
opener.addheaders = [('User-Agent', defaults['agent']), ('Accept','text/html, */*')]
data = opener.open(request, data=query).read()
opener.close()
return data
def isOK(self, xml):
"""Retorna se o XML gerado esta valido ou nao"""
try:
xml = minidom.parseString(xml)
status = xml.getElementsByTagName('STATUS')[0].firstChild.nodeValue
if status != "OK":
return 0 # False
return 1 # True
except:
return 0
def ftpConecta(self, server, login, senha):
"""Conecta a um servidor FTP"""
try:
self.ftp = ftplib.FTP(server)
self.ftp.login(login, senha)
except Exception, e:
raise Exception('Tentando conectar ao FTP. %s' % e.message)
def ftpAlteraDir(self, dir):
self.ftp.cwd(dir)
def ftpDesconecta(self):
"""Desconecta do servidor FTP"""
try:
self.ftp.close()
except Exception, e:
raise Exception('Tentando disconectar. %s' % e.message)
def getFile(self, arquivo, destino) :
""" Salva arquivo via FTP no disco local """
# returns void
try:
self.ftp.retrbinary("RETR " + arquivo, open(destino, 'wb').write)
except Exception, e:
raise Exception('Tentando baixar arquivo: %s. Causa: %s' % (arquivo, e))
| lightbase/pycacic | coletores/lib/url.py | Python | gpl-2.0 | 4,034 |
# RiveScript-Python
#
# This code is released under the MIT License.
# See the "LICENSE" file for more information.
#
# https://www.rivescript.com/
from __future__ import unicode_literals
import re
"""Common regular expressions used in RiveScript."""
# Common regular expressions.
class RE(object):
equals = re.compile('\s*=\s*')
ws = re.compile('\s+')
space = re.compile('\\\\s')
objend = re.compile('^\s*<\s*object')
weight = re.compile(r'\s*\{weight=(\d+)\}\s*')
inherit = re.compile('\{inherits=(\d+)\}')
wilds_and_optionals = re.compile('[\s\*\#\_\[\]()]+')
nasties = re.compile('[^A-Za-z0-9 ]')
crlf = re.compile('<crlf>')
literal_w = re.compile(r'\\w')
array = re.compile(r'\@(.+?)\b')
reply_array = re.compile(r'\(@([A-Za-z0-9_]+)\)')
ph_array = re.compile(r'\x00@([A-Za-z0-9_]+)\x00')
def_syntax = re.compile(r'^.+(?:\s+.+|)\s*=\s*.+?$')
name_syntax = re.compile(r'[^a-z0-9_\-\s]')
obj_syntax = re.compile(r'[^A-Za-z0-9_\-\s]')
utf8_trig = re.compile(r'[A-Z\\.]')
trig_syntax = re.compile(r'[^a-z0-9(\|)\[\]*_#@{}<>=\s]')
cond_syntax = re.compile(r'^.+?\s*(?:==|eq|!=|ne|<>|<|<=|>|>=)\s*.+?=>.+?$')
utf8_meta = re.compile(r'[\\<>]')
utf8_punct = re.compile(r'[.?,!;:@#$%^&*()]')
cond_split = re.compile(r'\s*=>\s*')
cond_parse = re.compile(r'^(.+?)\s+(==|eq|!=|ne|<>|<|<=|>|>=)\s+(.+?)$')
topic_tag = re.compile(r'\{topic=(.+?)\}')
set_tag = re.compile(r'<set (.+?)=(.+?)>')
bot_tag = re.compile(r'<bot (.+?)>')
get_tag = re.compile(r'<get (.+?)>')
star_tags = re.compile(r'<star(\d+)>')
botstars = re.compile(r'<botstar(\d+)>')
input_tags = re.compile(r'<input([1-9])>')
reply_tags = re.compile(r'<reply([1-9])>')
random_tags = re.compile(r'\{random\}(.+?)\{/random\}')
redir_tag = re.compile(r'\{@(.+?)\}')
tag_search = re.compile(r'<([^<]+?)>')
placeholder = re.compile(r'\x00(\d+)\x00')
zero_star = re.compile(r'^\*$')
optionals = re.compile(r'\[(.+?)\]')
empty_pipe = re.compile(r'\|\s*\||\[\s*\||\|\s*\]|\(\s*\||\|\s*\)') # ||, [|, |], (|, |)
| Dinh-Hung-Tu/rivescript-python | rivescript/regexp.py | Python | mit | 2,209 |
# Copyright 2008-2016 Free Software Foundation, Inc.
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
from __future__ import absolute_import
import os
from mako.template import Template
from .hier_block import HierBlockGenerator, QtHierBlockGenerator
from .top_block import TopBlockGenerator
from .cpp_top_block import CppTopBlockGenerator
from .cpp_hier_block import CppHierBlockGenerator
DATA_DIR = os.path.dirname(__file__)
FLOW_GRAPH_TEMPLATE = os.path.join(DATA_DIR, "flow_graph.py.mako")
flow_graph_template = Template(filename=FLOW_GRAPH_TEMPLATE)
class Generator(object):
"""Adaptor for various generators (uses generate_options)"""
def __init__(self, flow_graph, file_path):
"""
Initialize the generator object.
Determine the file to generate.
Args:
flow_graph: the flow graph object
file_path: the path to the grc file
"""
self.generate_options = flow_graph.get_option("generate_options")
self.output_language = flow_graph.get_option("output_language")
if self.output_language == "python":
if self.generate_options == "hb":
generator_cls = HierBlockGenerator
elif self.generate_options == "hb_qt_gui":
generator_cls = QtHierBlockGenerator
else:
generator_cls = TopBlockGenerator
elif self.output_language == "cpp":
if self.generate_options == "hb":
generator_cls = CppHierBlockGenerator
elif self.generate_options == "hb_qt_gui":
pass
else:
generator_cls = CppTopBlockGenerator
self._generator = generator_cls(flow_graph, file_path)
def __getattr__(self, item):
"""get all other attrib from actual generator object"""
return getattr(self._generator, item)
| skoslowski/gnuradio | grc/core/generator/Generator.py | Python | gpl-3.0 | 1,914 |
"""
What : Internet Group Management Protocol, Version 2
How : http://www.ietf.org/rfc/rfc2236.txt
Who : jesse @ housejunkie . ca
"""
from binascii import unhexlify
from construct import Byte, Enum,Struct, UBInt16
from construct.protocols.layer3.ipv4 import IpAddress
igmp_type = Enum(Byte("igmp_type"),
MEMBERSHIP_QUERY = 0x11,
MEMBERSHIP_REPORT_V1 = 0x12,
MEMBERSHIP_REPORT_V2 = 0x16,
LEAVE_GROUP = 0x17,
)
igmpv2_header = Struct("igmpv2_header",
igmp_type,
Byte("max_resp_time"),
UBInt16("checksum"),
IpAddress("group_address"),
)
if __name__ == '__main__':
capture = unhexlify(b"1600FA01EFFFFFFD")
print (igmpv2_header.parse(capture))
| gkonstantyno/construct | construct/protocols/layer3/igmpv2.py | Python | mit | 688 |
#!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# create four cursors configured differently
cursor = vtk.vtkCursor2D()
cursor.SetModelBounds(15, 45, 15, 45, 0, 0)
cursor.SetFocalPoint(30, 30, 0)
cursor.AllOff()
cursor.AxesOn()
cursor.OutlineOn()
cursorMapper = vtk.vtkPolyDataMapper2D()
cursorMapper.SetInputConnection(cursor.GetOutputPort())
cursorActor = vtk.vtkActor2D()
cursorActor.SetMapper(cursorMapper)
cursorActor.GetProperty().SetColor(1, 0, 0)
cursor2 = vtk.vtkCursor2D()
cursor2.SetModelBounds(75, 105, 15, 45, 0, 0)
cursor2.SetFocalPoint(90, 30, 0)
cursor2.AllOff()
cursor2.AxesOn()
cursor2.OutlineOn()
cursor2.PointOn()
cursor2Mapper = vtk.vtkPolyDataMapper2D()
cursor2Mapper.SetInputConnection(cursor2.GetOutputPort())
cursor2Actor = vtk.vtkActor2D()
cursor2Actor.SetMapper(cursor2Mapper)
cursor2Actor.GetProperty().SetColor(0, 1, 0)
cursor3 = vtk.vtkCursor2D()
cursor3.SetModelBounds(15, 45, 75, 105, 0, 0)
cursor3.SetFocalPoint(30, 90, 0)
cursor3.AllOff()
cursor3.AxesOn()
cursor3.OutlineOff()
cursor3.PointOn()
cursor3.SetRadius(3)
cursor3Mapper = vtk.vtkPolyDataMapper2D()
cursor3Mapper.SetInputConnection(cursor3.GetOutputPort())
cursor3Actor = vtk.vtkActor2D()
cursor3Actor.SetMapper(cursor3Mapper)
cursor3Actor.GetProperty().SetColor(0, 1, 0)
cursor4 = vtk.vtkCursor2D()
cursor4.SetModelBounds(75, 105, 75, 105, 0, 0)
cursor4.SetFocalPoint(90, 90, 0)
cursor4.AllOff()
cursor4.AxesOn()
cursor4.SetRadius(0.0)
cursor4Mapper = vtk.vtkPolyDataMapper2D()
cursor4Mapper.SetInputConnection(cursor4.GetOutputPort())
cursor4Actor = vtk.vtkActor2D()
cursor4Actor.SetMapper(cursor4Mapper)
cursor4Actor.GetProperty().SetColor(1, 0, 0)
# rendering support
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# read data
ren1.AddActor(cursorActor)
ren1.AddActor(cursor2Actor)
ren1.AddActor(cursor3Actor)
ren1.AddActor(cursor4Actor)
ren1.SetBackground(0, 0, 0)
renWin.SetSize(150, 150)
renWin.SetMultiSamples(0)
renWin.Render()
iren.Initialize()
#iren.Start()
| HopeFOAM/HopeFOAM | ThirdParty-0.1/ParaView-5.0.1/VTK/Filters/General/Testing/Python/cursor2D.py | Python | gpl-3.0 | 2,161 |
"""
Django settings for djdjdj project.
Generated by 'django-admin startproject' using Django 1.8.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'bq$4v+^n)-%pae@q92vjp5brd3gz@x^u)*a9l07_46_^a@ud05'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# 3rd-party
'pipeline',
# own
'demo_project',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'demo_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'loaders': [
('pyjade.ext.django.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
))
],
'builtins': ['pyjade.ext.django.templatetags'],
},
},
]
WSGI_APPLICATION = 'demo_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
# STATIC_ROOT = 'static'
STATIC_ROOT = os.path.join(BASE_DIR, "static")
# STATICFILES_DIRS = [
# os.path.join(BASE_DIR, "static"),
# ]
MEDIA_URL = '/media/'
MEDIA_ROOT = 'media/'
STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
# STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'pipeline.finders.FileSystemFinder',
# 'pipeline.finders.AppDirectoriesFinder',
'pipeline.finders.PipelineFinder',
# 'pipeline.finders.CachedFileFinder',
)
PIPELINE = {
'PIPELINE_ENABLED': False,
'STYLESHEETS': {
'base': {
'source_filenames': (
'site/stylus/base.styl',
),
'output_filename': 'css/base.css',
# 'extra_context': {
# 'media': 'screen,projection',
# },
},
},
'JAVASCRIPT': {
'base': {
'source_filenames': (
'site/coffee/base.coffee',
),
'output_filename': 'js/base.js',
}
},
'COMPILERS': (
'pipeline.compilers.coffee.CoffeeScriptCompiler',
'pipeline.compilers.stylus.StylusCompiler',
)
}
| SimplyCo/django-jade-stylus-coffeescript | demo_project/settings.py | Python | mit | 4,283 |
import unittest
import sys
import _ast
from test import support
class TestSpecifics(unittest.TestCase):
def test_debug_assignment(self):
# catch assignments to __debug__
self.assertRaises(SyntaxError, compile, '__debug__ = 1', '?', 'single')
import builtins
prev = builtins.__debug__
setattr(builtins, '__debug__', 'sure')
setattr(builtins, '__debug__', prev)
def test_argument_handling(self):
# detect duplicate positional and keyword arguments
self.assertRaises(SyntaxError, eval, 'lambda a,a:0')
self.assertRaises(SyntaxError, eval, 'lambda a,a=1:0')
self.assertRaises(SyntaxError, eval, 'lambda a=1,a=1:0')
self.assertRaises(SyntaxError, exec, 'def f(a, a): pass')
self.assertRaises(SyntaxError, exec, 'def f(a = 0, a = 1): pass')
self.assertRaises(SyntaxError, exec, 'def f(a): global a; a = 1')
def test_syntax_error(self):
self.assertRaises(SyntaxError, compile, "1+*3", "filename", "exec")
def test_none_keyword_arg(self):
self.assertRaises(SyntaxError, compile, "f(None=1)", "<string>", "exec")
def test_duplicate_global_local(self):
self.assertRaises(SyntaxError, exec, 'def f(a): global a; a = 1')
def test_exec_with_general_mapping_for_locals(self):
class M:
"Test mapping interface versus possible calls from eval()."
def __getitem__(self, key):
if key == 'a':
return 12
raise KeyError
def __setitem__(self, key, value):
self.results = (key, value)
def keys(self):
return list('xyz')
m = M()
g = globals()
exec('z = a', g, m)
self.assertEqual(m.results, ('z', 12))
try:
exec('z = b', g, m)
except NameError:
pass
else:
self.fail('Did not detect a KeyError')
exec('z = dir()', g, m)
self.assertEqual(m.results, ('z', list('xyz')))
exec('z = globals()', g, m)
self.assertEqual(m.results, ('z', g))
exec('z = locals()', g, m)
self.assertEqual(m.results, ('z', m))
self.assertRaises(TypeError, exec, 'z = b', m)
class A:
"Non-mapping"
pass
m = A()
self.assertRaises(TypeError, exec, 'z = a', g, m)
# Verify that dict subclasses work as well
class D(dict):
def __getitem__(self, key):
if key == 'a':
return 12
return dict.__getitem__(self, key)
d = D()
exec('z = a', g, d)
self.assertEqual(d['z'], 12)
def test_extended_arg(self):
longexpr = 'x = x or ' + '-x' * 2500
g = {}
code = '''
def f(x):
%s
%s
%s
%s
%s
%s
%s
%s
%s
%s
# the expressions above have no effect, x == argument
while x:
x -= 1
# EXTENDED_ARG/JUMP_ABSOLUTE here
return x
''' % ((longexpr,)*10)
exec(code, g)
self.assertEqual(g['f'](5), 0)
def test_argument_order(self):
self.assertRaises(SyntaxError, exec, 'def f(a=1, b): pass')
def test_float_literals(self):
# testing bad float literals
self.assertRaises(SyntaxError, eval, "2e")
self.assertRaises(SyntaxError, eval, "2.0e+")
self.assertRaises(SyntaxError, eval, "1e-")
self.assertRaises(SyntaxError, eval, "3-4e/21")
def test_indentation(self):
# testing compile() of indented block w/o trailing newline"
s = """
if 1:
if 2:
pass"""
compile(s, "<string>", "exec")
# This test is probably specific to CPython and may not generalize
# to other implementations. We are trying to ensure that when
# the first line of code starts after 256, correct line numbers
# in tracebacks are still produced.
def test_leading_newlines(self):
s256 = "".join(["\n"] * 256 + ["spam"])
co = compile(s256, 'fn', 'exec')
self.assertEqual(co.co_firstlineno, 257)
self.assertEqual(co.co_lnotab, bytes())
def test_literals_with_leading_zeroes(self):
for arg in ["077787", "0xj", "0x.", "0e", "090000000000000",
"080000000000000", "000000000000009", "000000000000008",
"0b42", "0BADCAFE", "0o123456789", "0b1.1", "0o4.2",
"0b101j2", "0o153j2", "0b100e1", "0o777e1", "0777",
"000777", "000000000000007"]:
self.assertRaises(SyntaxError, eval, arg)
self.assertEqual(eval("0xff"), 255)
self.assertEqual(eval("0777."), 777)
self.assertEqual(eval("0777.0"), 777)
self.assertEqual(eval("000000000000000000000000000000000000000000000000000777e0"), 777)
self.assertEqual(eval("0777e1"), 7770)
self.assertEqual(eval("0e0"), 0)
self.assertEqual(eval("0000e-012"), 0)
self.assertEqual(eval("09.5"), 9.5)
self.assertEqual(eval("0777j"), 777j)
self.assertEqual(eval("000"), 0)
self.assertEqual(eval("00j"), 0j)
self.assertEqual(eval("00.0"), 0)
self.assertEqual(eval("0e3"), 0)
self.assertEqual(eval("090000000000000."), 90000000000000.)
self.assertEqual(eval("090000000000000.0000000000000000000000"), 90000000000000.)
self.assertEqual(eval("090000000000000e0"), 90000000000000.)
self.assertEqual(eval("090000000000000e-0"), 90000000000000.)
self.assertEqual(eval("090000000000000j"), 90000000000000j)
self.assertEqual(eval("000000000000008."), 8.)
self.assertEqual(eval("000000000000009."), 9.)
self.assertEqual(eval("0b101010"), 42)
self.assertEqual(eval("-0b000000000010"), -2)
self.assertEqual(eval("0o777"), 511)
self.assertEqual(eval("-0o0000010"), -8)
def test_unary_minus(self):
# Verify treatment of unary minus on negative numbers SF bug #660455
if sys.maxsize == 2147483647:
# 32-bit machine
all_one_bits = '0xffffffff'
self.assertEqual(eval(all_one_bits), 4294967295)
self.assertEqual(eval("-" + all_one_bits), -4294967295)
elif sys.maxsize == 9223372036854775807:
# 64-bit machine
all_one_bits = '0xffffffffffffffff'
self.assertEqual(eval(all_one_bits), 18446744073709551615)
self.assertEqual(eval("-" + all_one_bits), -18446744073709551615)
else:
self.fail("How many bits *does* this machine have???")
# Verify treatment of contant folding on -(sys.maxsize+1)
# i.e. -2147483648 on 32 bit platforms. Should return int, not long.
self.assertTrue(isinstance(eval("%s" % (-sys.maxsize - 1)), int))
self.assertTrue(isinstance(eval("%s" % (-sys.maxsize - 2)), int))
if sys.maxsize == 9223372036854775807:
def test_32_63_bit_values(self):
a = +4294967296 # 1 << 32
b = -4294967296 # 1 << 32
c = +281474976710656 # 1 << 48
d = -281474976710656 # 1 << 48
e = +4611686018427387904 # 1 << 62
f = -4611686018427387904 # 1 << 62
g = +9223372036854775807 # 1 << 63 - 1
h = -9223372036854775807 # 1 << 63 - 1
for variable in self.test_32_63_bit_values.__code__.co_consts:
if variable is not None:
self.assertTrue(isinstance(variable, int))
def test_sequence_unpacking_error(self):
# Verify sequence packing/unpacking with "or". SF bug #757818
i,j = (1, -1) or (-1, 1)
self.assertEqual(i, 1)
self.assertEqual(j, -1)
def test_none_assignment(self):
stmts = [
'None = 0',
'None += 0',
'__builtins__.None = 0',
'def None(): pass',
'class None: pass',
'(a, None) = 0, 0',
'for None in range(10): pass',
'def f(None): pass',
]
for stmt in stmts:
stmt += "\n"
self.assertRaises(SyntaxError, compile, stmt, 'tmp', 'single')
self.assertRaises(SyntaxError, compile, stmt, 'tmp', 'exec')
def test_import(self):
succeed = [
'import sys',
'import os, sys',
'import os as bar',
'import os.path as bar',
'from __future__ import nested_scopes, generators',
'from __future__ import (nested_scopes,\ngenerators)',
'from __future__ import (nested_scopes,\ngenerators,)',
'from sys import stdin, stderr, stdout',
'from sys import (stdin, stderr,\nstdout)',
'from sys import (stdin, stderr,\nstdout,)',
'from sys import (stdin\n, stderr, stdout)',
'from sys import (stdin\n, stderr, stdout,)',
'from sys import stdin as si, stdout as so, stderr as se',
'from sys import (stdin as si, stdout as so, stderr as se)',
'from sys import (stdin as si, stdout as so, stderr as se,)',
]
fail = [
'import (os, sys)',
'import (os), (sys)',
'import ((os), (sys))',
'import (sys',
'import sys)',
'import (os,)',
'import os As bar',
'import os.path a bar',
'from sys import stdin As stdout',
'from sys import stdin a stdout',
'from (sys) import stdin',
'from __future__ import (nested_scopes',
'from __future__ import nested_scopes)',
'from __future__ import nested_scopes,\ngenerators',
'from sys import (stdin',
'from sys import stdin)',
'from sys import stdin, stdout,\nstderr',
'from sys import stdin si',
'from sys import stdin,'
'from sys import (*)',
'from sys import (stdin,, stdout, stderr)',
'from sys import (stdin, stdout),',
]
for stmt in succeed:
compile(stmt, 'tmp', 'exec')
for stmt in fail:
self.assertRaises(SyntaxError, compile, stmt, 'tmp', 'exec')
def test_for_distinct_code_objects(self):
# SF bug 1048870
def f():
f1 = lambda x=1: x
f2 = lambda x=2: x
return f1, f2
f1, f2 = f()
self.assertNotEqual(id(f1.__code__), id(f2.__code__))
## def test_unicode_encoding(self):
## code = "# -*- coding: utf-8 -*-\npass\n"
## self.assertRaises(SyntaxError, compile, code, "tmp", "exec")
def test_subscripts(self):
# SF bug 1448804
# Class to make testing subscript results easy
class str_map(object):
def __init__(self):
self.data = {}
def __getitem__(self, key):
return self.data[str(key)]
def __setitem__(self, key, value):
self.data[str(key)] = value
def __delitem__(self, key):
del self.data[str(key)]
def __contains__(self, key):
return str(key) in self.data
d = str_map()
# Index
d[1] = 1
self.assertEqual(d[1], 1)
d[1] += 1
self.assertEqual(d[1], 2)
del d[1]
self.assertEqual(1 in d, False)
# Tuple of indices
d[1, 1] = 1
self.assertEqual(d[1, 1], 1)
d[1, 1] += 1
self.assertEqual(d[1, 1], 2)
del d[1, 1]
self.assertEqual((1, 1) in d, False)
# Simple slice
d[1:2] = 1
self.assertEqual(d[1:2], 1)
d[1:2] += 1
self.assertEqual(d[1:2], 2)
del d[1:2]
self.assertEqual(slice(1, 2) in d, False)
# Tuple of simple slices
d[1:2, 1:2] = 1
self.assertEqual(d[1:2, 1:2], 1)
d[1:2, 1:2] += 1
self.assertEqual(d[1:2, 1:2], 2)
del d[1:2, 1:2]
self.assertEqual((slice(1, 2), slice(1, 2)) in d, False)
# Extended slice
d[1:2:3] = 1
self.assertEqual(d[1:2:3], 1)
d[1:2:3] += 1
self.assertEqual(d[1:2:3], 2)
del d[1:2:3]
self.assertEqual(slice(1, 2, 3) in d, False)
# Tuple of extended slices
d[1:2:3, 1:2:3] = 1
self.assertEqual(d[1:2:3, 1:2:3], 1)
d[1:2:3, 1:2:3] += 1
self.assertEqual(d[1:2:3, 1:2:3], 2)
del d[1:2:3, 1:2:3]
self.assertEqual((slice(1, 2, 3), slice(1, 2, 3)) in d, False)
# Ellipsis
d[...] = 1
self.assertEqual(d[...], 1)
d[...] += 1
self.assertEqual(d[...], 2)
del d[...]
self.assertEqual(Ellipsis in d, False)
# Tuple of Ellipses
d[..., ...] = 1
self.assertEqual(d[..., ...], 1)
d[..., ...] += 1
self.assertEqual(d[..., ...], 2)
del d[..., ...]
self.assertEqual((Ellipsis, Ellipsis) in d, False)
def test_annotation_limit(self):
# 16 bits are available for # of annotations, but only 8 bits are
# available for the parameter count, hence 255
# is the max. Ensure the result of too many annotations is a
# SyntaxError.
s = "def f(%s): pass"
s %= ', '.join('a%d:%d' % (i,i) for i in range(256))
self.assertRaises(SyntaxError, compile, s, '?', 'exec')
# Test that the max # of annotations compiles.
s = "def f(%s): pass"
s %= ', '.join('a%d:%d' % (i,i) for i in range(255))
compile(s, '?', 'exec')
def test_mangling(self):
class A:
def f():
__mangled = 1
__not_mangled__ = 2
import __mangled_mod
import __package__.module
self.assertTrue("_A__mangled" in A.f.__code__.co_varnames)
self.assertTrue("__not_mangled__" in A.f.__code__.co_varnames)
self.assertTrue("_A__mangled_mod" in A.f.__code__.co_varnames)
self.assertTrue("__package__" in A.f.__code__.co_varnames)
def test_compile_ast(self):
fname = __file__
if fname.lower().endswith(('pyc', 'pyo')):
fname = fname[:-1]
with open(fname, 'r') as f:
fcontents = f.read()
sample_code = [
['<assign>', 'x = 5'],
['<ifblock>', """if True:\n pass\n"""],
['<forblock>', """for n in [1, 2, 3]:\n print(n)\n"""],
['<deffunc>', """def foo():\n pass\nfoo()\n"""],
[fname, fcontents],
]
for fname, code in sample_code:
co1 = compile(code, '%s1' % fname, 'exec')
ast = compile(code, '%s2' % fname, 'exec', _ast.PyCF_ONLY_AST)
self.assertTrue(type(ast) == _ast.Module)
co2 = compile(ast, '%s3' % fname, 'exec')
self.assertEqual(co1, co2)
# the code object's filename comes from the second compilation step
self.assertEqual(co2.co_filename, '%s3' % fname)
# raise exception when node type doesn't match with compile mode
co1 = compile('print(1)', '<string>', 'exec', _ast.PyCF_ONLY_AST)
self.assertRaises(TypeError, compile, co1, '<ast>', 'eval')
# raise exception when node type is no start node
self.assertRaises(TypeError, compile, _ast.If(), '<ast>', 'exec')
# raise exception when node has invalid children
ast = _ast.Module()
ast.body = [_ast.BoolOp()]
self.assertRaises(TypeError, compile, ast, '<ast>', 'exec')
def test_main():
support.run_unittest(TestSpecifics)
if __name__ == "__main__":
test_main()
| mancoast/CPythonPyc_test | fail/313_test_compile.py | Python | gpl-3.0 | 15,716 |
from abc import ABCMeta, abstractmethod
from core.statements import Statement
from enum import Enum
from typing import Dict, List, Set, Tuple, Generator, Union
class Node(metaclass=ABCMeta):
def __init__(self, identifier: int, stmts: List[Statement]):
"""Node of a control flow graph.
:param identifier: identifier associated with the node
:param stmts: list of statements stored in the node
"""
self._identifier = identifier
self._stmts = stmts
@property
def identifier(self):
return self._identifier
@property
def stmts(self):
return self._stmts
def __eq__(self, other: 'Node'):
return self.identifier == other.identifier
def __hash__(self):
return hash(self.identifier)
def __ne__(self, other: 'Node'):
return not (self == other)
def __repr__(self):
return str(self)
@abstractmethod
def __str__(self):
"""Node string representation.
:return: string representing the node
"""
def size(self):
"""Number of statements stored in the node.
:return: number of statements stored in the node
"""
return len(self.stmts)
class Basic(Node):
def __init__(self, identifier: int, stmts: List[Statement] = None):
"""Basic node of a control flow graph.
:param identifier: identifier associated with the node
:param stmts: list of statements stored in the node
"""
super().__init__(identifier, stmts or [])
def __str__(self):
return str(self.identifier)
class Loop(Node):
def __init__(self, identifier: int, stmts: List[Statement] = None):
"""Loop head node of a control flow graph.
:param identifier: identifier associated with the block
:param stmts: list of statements stored in the block
"""
super().__init__(identifier, stmts or [])
def __str__(self):
return str(self.identifier)
class Edge(metaclass=ABCMeta):
class Kind(Enum):
"""Kind of an edge of a control flow graph."""
IF_OUT = -2 # if exit edge
LOOP_OUT = -1 # loop exit edge
DEFAULT = 0
LOOP_IN = 1 # loop entry edge
IF_IN = 2 # if entry edge
def __init__(self, source: Node, target: Node, kind: Kind = Kind.DEFAULT):
"""Edge of a control flow graph.
:param source: source node of the edge
:param target: target node of the edge
:param kind: kind of the edge
"""
self._source = source
self._target = target
self._kind = kind
@property
def source(self):
return self._source
@property
def target(self):
return self._target
@property
def kind(self):
return self._kind
def __eq__(self, other: 'Edge'):
return (self.source, self.target) == (other.source, other.target)
def __hash__(self):
return hash((self.source, self.target))
def __ne__(self, other: 'Edge'):
return not (self == other)
def __repr__(self):
return str(self)
@abstractmethod
def __str__(self):
"""Edge string representation.
:return: string representing the edge
"""
class Unconditional(Edge):
def __init__(self, source: Union[Node, None], target: Union[Node, None], kind=Edge.Kind.DEFAULT):
"""Unconditional edge of a control flow graph.
:param source: source node of the edge
:param target: target node of the edge
:param kind: kind of the edge
"""
super().__init__(source, target, kind)
def __str__(self):
return "{0.source} -- {0.target}".format(self)
class Conditional(Edge):
def __init__(self, source: Union[Node, None], condition: Statement, target: Union[Node, None],
kind=Edge.Kind.DEFAULT):
"""Conditional edge of a control flow graph.
:param source: source node of the edge
:param target: target node of the edge
:param condition: condition associated with the edge
:param kind: kind of the edge
"""
super().__init__(source, target, kind)
self._condition = condition
@property
def condition(self):
return self._condition
def __str__(self):
return "{0.source} -- {0.condition} -- {0.target}".format(self)
class ControlFlowGraph:
def __init__(self, nodes: Set[Node], in_node: Node, out_node: Node, edges: Set[Edge]):
"""Control flow graph representation.
:param nodes: set of nodes of the control flow graph
:param in_node: entry node of the control flow graph
:param out_node: exit node of the control flow graph
:param edges: set of edges of the control flow graph
"""
self._nodes = {node.identifier: node for node in nodes}
self._in_node = in_node
self._out_node = out_node
self._edges = {(edge.source, edge.target): edge for edge in edges}
@property
def nodes(self) -> Dict[int, Node]:
return self._nodes
@property
def in_node(self) -> Node:
return self._in_node
@property
def out_node(self) -> Node:
return self._out_node
@property
def edges(self) -> Dict[Tuple[Node, Node], Edge]:
return self._edges
def nodes_forward(self) -> Generator[Node, None, None]:
worklist = [self.in_node]
done = set()
while worklist:
current = worklist.pop()
if current not in done:
done.add(current)
yield current
for successor in self.successors(current):
worklist.insert(0, successor)
def nodes_backward(self) -> Generator[Node, None, None]:
worklist = [self.out_node]
done = set()
while worklist:
current = worklist.pop()
if current not in done:
done.add(current)
yield current
for predecessor in self.predecessors(current):
worklist.insert(0, predecessor)
def in_edges(self, node: Node) -> Set[Edge]:
"""Ingoing edges of a given node.
:param node: given node
:return: set of ingoing edges of the node
"""
return {self.edges[(source, target)] for (source, target) in self.edges if target == node}
def predecessors(self, node: Node) -> Set[Node]:
"""Predecessors of a given node.
:param node: given node
:return: set of predecessors of the node
"""
return {edge.source for edge in self.in_edges(node)}
def out_edges(self, node: Node) -> Set[Edge]:
"""Outgoing edges of a given node.
:param node: given node
:return: set of outgoing edges of the node
"""
return {self.edges[(source, target)] for (source, target) in self.edges if source == node}
def successors(self, node: Node) -> Set[Node]:
"""Successors of a given node.
:param node: given node
:return: set of successors of the node
"""
return {edge.target for edge in self.out_edges(node)}
| gitsimon/spadup-lyra | core/cfg.py | Python | mpl-2.0 | 7,292 |
#!/usr/bin/env python2.7
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
""" Basic selenium test runner
This script is used for running all selenium tests against the server defined
in the configuration yaml file. The script will wait a defined time for the
server to start before running the test. If the server fails to start before
its grace time is up, the script will return with an error code of 3. Error
codes 1 and 2 are reserved by pytest and status 0 is returned only if all the
tests pass.
"""
import os
import sys
import time
import urllib
import pytest # pylint: disable=import-error
# add src to path so that we can do imports from our src
PROJECT_ROOT_PATH = os.path.dirname(os.path.abspath(__file__)) + "/../"
sys.path.append(PROJECT_ROOT_PATH + "src")
from lib import file_ops # NOQA
from lib import environment # NOQA
def wait_for_server():
""" Wait for the server to return a 200 response
"""
sys.stdout.write("Wating on server: ")
for _ in xrange(environment.SERVER_WAIT_TIME):
try:
if urllib.urlopen(environment.APP_URL).getcode() == 200:
print "[Done]"
return True
except IOError:
sys.stdout.write(".")
sys.stdout.flush()
time.sleep(1)
print "[Failed]"
return False
if __name__ == "__main__":
if not wait_for_server():
sys.exit(3)
file_ops.create_directory(environment.LOG_PATH)
file_ops.delete_directory_contents(environment.LOG_PATH)
sys.exit(pytest.main())
| AleksNeStu/ggrc-core | test/selenium/bin/run_selenium.py | Python | apache-2.0 | 1,534 |
from django.test import TestCase
from nav.models.arnold import Justification
from nav.auditlog import find_modelname
from nav.auditlog.models import LogEntry
from nav.auditlog.utils import get_auditlog_entries
class AuditlogModelTestCase(TestCase):
def setUp(self):
# This specific model is used because it is very simple
self.justification = Justification.objects.create(name='testarossa')
def test_str(self):
LogEntry.add_log_entry(self.justification, u'str test', 'foo')
l = LogEntry.objects.filter(verb='str test').get()
self.assertEqual(str(l), 'foo')
l.delete()
def test_add_log_entry_bad_template(self):
LogEntry.add_log_entry(self.justification, u'bad template test',
u'this is a {bad} template')
l = LogEntry.objects.filter(verb='bad template test').get()
self.assertEqual(l.summary, u'Error creating summary - see error log')
l.delete()
# # When on python3:
# with self.assertLogs(level='ERROR') as log:
# # run body
# self.assertEqual(len(log.output), 1)
# self.assertEqual(len(log.records), 1)
# self.assertIn('KeyError when creating summary:', log.output[0])
def test_add_log_entry_actor_only(self):
LogEntry.add_log_entry(self.justification, u'actor test',
u'actor "{actor}" only is tested')
l = LogEntry.objects.filter(verb='actor test').get()
self.assertEqual(l.summary, u'actor "testarossa" only is tested')
l.delete()
def test_add_create_entry(self):
LogEntry.add_create_entry(self.justification, self.justification)
l = LogEntry.objects.filter(verb=u'create-justification').get()
self.assertEqual(l.summary, u'testarossa created testarossa')
l.delete()
def test_add_delete_entry(self):
LogEntry.add_delete_entry(self.justification, self.justification)
l = LogEntry.objects.filter(verb=u'delete-justification').get()
self.assertEqual(l.summary, u'testarossa deleted testarossa')
l.delete()
def test_compare_objects(self):
j1 = Justification.objects.create(name='ferrari', description='Psst!')
j2 = Justification.objects.create(name='lambo', description='Hush')
LogEntry.compare_objects(self.justification, j1, j2,
('name', 'description'),
('description',)
)
l = LogEntry.objects.filter(verb=u'edit-justification-name').get()
self.assertEqual(l.summary,
u'testarossa edited lambo: name changed'
u" from 'ferrari' to 'lambo'")
l.delete()
l = LogEntry.objects.filter(
verb=u'edit-justification-description'
).get()
self.assertEqual(l.summary,
u'testarossa edited lambo: description changed')
l.delete()
def test_addLog_entry_before(self):
LogEntry.add_log_entry(self.justification, u'actor test',
u'blbl', before=1)
l = LogEntry.objects.filter(verb='actor test').get()
self.assertEqual(l.before, u'1')
l.delete()
def test_find_name(self):
name = find_modelname(self.justification)
self.assertEqual(name, 'blocked_reason')
class AuditlogUtilsTestCase(TestCase):
def setUp(self):
# This specific model is used because it is very simple
self.justification = Justification.objects.create(name='testarossa')
def test_get_auditlog_entries(self):
modelname = 'blocked_reason' # Justification's db_table
j1 = Justification.objects.create(name='j1')
j2 = Justification.objects.create(name='j2')
LogEntry.add_create_entry(self.justification, j1)
LogEntry.add_log_entry(self.justification, u'greet',
u'{actor} greets {object}',
object=j2, subsystem="hello")
LogEntry.add_log_entry(self.justification, u'deliver',
u'{actor} delivers {object} to {target}',
object=j1, target=j2, subsystem='delivery')
entries = get_auditlog_entries(modelname=modelname)
self.assertEqual(entries.count(), 3)
entries = get_auditlog_entries(modelname=modelname, subsystem='hello')
self.assertEqual(entries.count(), 1)
entries = get_auditlog_entries(modelname=modelname, pks=[j1.pk])
self.assertEqual(entries.count(), 2)
| UNINETT/nav | tests/integration/auditlog_test.py | Python | gpl-2.0 | 4,631 |
#!/usr/bin/env python
# Copyright (C) 2014 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#
# Authors:
# Daniel Izquierdo <[email protected]>
# Alvaro del Castillo <[email protected]>
import logging, os, re
from vizgrimoire.data_source import DataSource
from vizgrimoire.GrimoireUtils import createJSON
from vizgrimoire.filter import Filter
from vizgrimoire.GrimoireUtils import GetPercentageDiff, GetDates, getPeriod, createJSON, completePeriodIds
from vizgrimoire.metrics.metrics_filter import MetricFilters
class EventsDS(DataSource):
@staticmethod
def get_db_name():
return "db_eventizer"
@staticmethod
def get_query_builder():
"""Class used to build queries to get metrics"""
from vizgrimoire.metrics.query_builder import EventizerQuery
return EventizerQuery
@staticmethod
def get_name():
return "eventizer"
@staticmethod
def get_supported_filters():
# return ['group','category','city']
return ['repository']
@staticmethod
def get_metrics_core_agg():
return ['events','members','cities','rsvps','groups', 'rsvps_event']
@staticmethod
def get_metrics_core_ts():
return ['events','members','cities','rsvps','groups']
@staticmethod
def get_metrics_core_trends():
return ['events','members','cities','rsvps','groups']
@staticmethod
def get_evolutionary_data (period, startdate, enddate, i_db, filter_ = None):
logging.warn("evolutionary_data")
if filter_ is not None:
sf = EventsDS.get_supported_filters()
if filter_.get_name() not in sf:
logging.warn("EventsDS only supports " + ",".join(sf))
return {}
metrics = EventsDS.get_metrics_data(period, startdate, enddate, i_db, filter_, True)
if filter_ is not None: studies = {}
else:
studies = DataSource.get_studies_data(EventsDS, period, startdate, enddate, True)
return dict(metrics.items()+studies.items())
@staticmethod
def create_evolutionary_report (period, startdate, enddate, destdir, i_db, type_analysis = None):
data = EventsDS.get_evolutionary_data (period, startdate, enddate, i_db, type_analysis)
filename = EventsDS().get_evolutionary_filename()
createJSON (data, os.path.join(destdir, filename))
@staticmethod
def get_agg_data (period, startdate, enddate, identities_db, filter_ = None):
if filter_ is not None:
sf = EventsDS.get_supported_filters()
if filter_.get_name() not in sf:
logging.warn("EventsDS only supports " + ",".join(sf))
return {}
metrics = EventsDS.get_metrics_data(period, startdate, enddate, identities_db, filter_, False)
if filter_ is not None: studies = {}
else:
studies = DataSource.get_studies_data(EventsDS, period, startdate, enddate, False)
return dict(metrics.items()+studies.items())
@staticmethod
def create_agg_report (period, startdate, enddate, destdir, i_db, type_analysis = None):
data = EventsDS.get_agg_data (period, startdate, enddate, i_db, type_analysis)
filename = EventsDS().get_agg_filename()
createJSON (data, os.path.join(destdir, filename))
@staticmethod
def get_filter_items(filter_, startdate, enddate, identities_db):
items = None
filter_name = filter_.get_name()
if (filter_name == "repository"):
metric = DataSource.get_metrics("groups", EventsDS)
items = metric.get_list()
else:
logging.error("EventsDS " + filter_name + " not supported")
return items
@staticmethod
def create_filter_report_top(filter_, period, startdate, enddate, destdir, npeople, identities_db):
from vizgrimoire.report import Report
items = Report.get_items()
if items is None:
items = EventsDS.get_filter_items(filter_, startdate, enddate, identities_db)
if (items == None): return
items = items['name']
filter_name = filter_.get_name()
if not isinstance(items, (list)):
items = [items]
fn = os.path.join(destdir, filter_.get_filename(EventsDS()))
createJSON(items, fn)
for item in items :
item_name = "'"+ item+ "'"
logging.info (item_name)
filter_item = Filter(filter_name, item)
if filter_name in ("repository"):
logging.warn("Filter 'repository' detected for top info")
top_authors = EventsDS.get_top_data(startdate, enddate, identities_db, filter_item, npeople)
logging.warn(filter_item.get_top_filename(EventsDS()))
fn = os.path.join(destdir, filter_item.get_top_filename(EventsDS()))
createJSON(top_authors, fn)
@staticmethod
def create_filter_report(filter_, period, startdate, enddate, destdir, npeople, identities_db):
from vizgrimoire.report import Report
items = Report.get_items()
if items is None:
items = EventsDS.get_filter_items(filter_, startdate, enddate, identities_db)
if (items == None): return
filter_name = filter_.get_name()
items = items['name']
if not isinstance(items, list):
items = [items]
file_items = []
for item in items:
if re.compile("^\..*").match(item) is not None: item = "_"+item
file_items.append(item)
fn = os.path.join(destdir, filter_.get_filename(EventsDS()))
createJSON(file_items, fn)
if filter_name in ("repository"):
items_list = {'name' : [], 'events_365' : [], 'rsvps_365' : []}
else:
items_list = items
for item in items:
logging.info(item)
filter_item = Filter(filter_.get_name(), item)
evol_data = EventsDS.get_evolutionary_data(period, startdate, enddate, identities_db, filter_item)
fn = os.path.join(destdir, filter_item.get_evolutionary_filename(EventsDS()))
createJSON(completePeriodIds(evol_data, period, startdate, enddate), fn)
agg = EventsDS.get_agg_data(period, startdate, enddate, identities_db, filter_item)
fn = os.path.join(destdir, filter_item.get_static_filename(EventsDS()))
createJSON(agg, fn)
if filter_name in ("repository"):
items_list['name'].append(item.replace('/', '_'))
items_list['events_365'].append(agg['events_365'])
items_list['rsvps_365'].append(agg['rsvps_365'])
EventsDS.create_filter_report_top(filter_, period, startdate, enddate, destdir, npeople, identities_db)
fn = os.path.join(destdir, filter_.get_filename(EventsDS()))
createJSON(items_list, fn)
@staticmethod
def get_top_metrics ():
return ["rsvps","groups"]
@staticmethod
def get_top_data (startdate, enddate, identities_db, filter_, npeople):
top = {}
attendees = DataSource.get_metrics("rsvps", EventsDS)
period = attendees.filters.period
type_analysis = None
mfilter = attendees.filters # updated filters
filters = None # original filters
if filter_ is not None:
type_analysis = filter_.get_type_analysis()
mfilter = MetricFilters(period, startdate, enddate, type_analysis, npeople)
if filter_ is not None:
filters = attendees.filters
attendees.filters = mfilter
top['rsvps.'] = attendees.get_list(mfilter, 0)
top['rsvps.last month'] = attendees.get_list(mfilter, 31)
top['rsvps.last year'] = attendees.get_list(mfilter, 365)
if filter_ is not None:
attendees.filters = filters
events = DataSource.get_metrics("events", EventsDS)
if filter_ is not None:
filters = events.filters
events.filters = mfilter
top['events.'] = events.get_list(mfilter, 0)
top['events.last month'] = events.get_list(mfilter,31)
top['events.last year'] = events.get_list(mfilter, 365)
if filter_ is not None:
events.filters = filters
if filter_ is not None:
groups = DataSource.get_metrics("groups", EventsDS)
filters = groups.filters
groups.filters = mfilter
if filter_.get_name() <> 'repository':
top['groups.'] = groups.get_list(mfilter, 0)
top['groups.last month'] = groups.get_list(mfilter, 31)
top['groups.last year'] = groups.get_list(mfilter, 365)
groups.filters = filters
return top
@staticmethod
def create_top_report (startdate, enddate, destdir, npeople, i_db):
data = EventsDS.get_top_data (startdate, enddate, i_db, None, npeople)
top_file = destdir+"/"+EventsDS().get_top_filename()
createJSON (data, top_file)
@staticmethod
def get_top_people(startdate, enddate, identities_db, npeople):
return []
top_data = EventsDS.get_top_data (startdate, enddate, identities_db, None, npeople)
top = top_data['rsvps.']["id"]
top += top_data['rsvps.last year']["id"]
top += top_data['rsvps.last month']["id"]
# remove duplicates
people = list(set(top))
return people
| jalonsob/Informes | vizgrimoire/EventsDS.py | Python | gpl-3.0 | 10,162 |
from haystack.generic_views import FacetedSearchView
from haystack.generic_views import FacetedSearchMixin
from hs_core.discovery_form import DiscoveryForm, FACETS_TO_SHOW
from haystack.query import SearchQuerySet
from django.conf import settings
class DiscoveryView(FacetedSearchView):
facet_fields = FACETS_TO_SHOW # interpreted by FacetedSearchView; must be attribute
form_class = DiscoveryForm
def form_valid(self, form):
self.queryset = form.search()
query_text = self.request.GET.get('q', '')
if not self.request.session.get('current_query', None):
if len(query_text):
self.request.session['query_changed'] = True
self.request.session['current_query'] = query_text
else:
self.request.session['query_changed'] = False
else:
if self.request.session['current_query'] != query_text:
self.request.session['query_changed'] = True
self.request.session['current_query'] = query_text
else:
self.request.session['query_changed'] = False
sortfield = self.request.GET.get('sort_order')
sortdir = self.request.GET.get('sort_direction')
# must use exact match or SOLR will use stemmed words with unpredictable results!
if sortfield is not None and sortdir is not None:
self.queryset = self.queryset.order_by(sortdir + sortfield)
maps_key = settings.MAPS_KEY if hasattr(settings, 'MAPS_KEY') else ''
if form.parse_error is not None:
context = self.get_context_data(**{
self.form_name: form,
'query': form.cleaned_data.get(self.search_field),
'object_list': self.queryset,
'parse_error': form.parse_error, # if not None, then show error message
'maps_key': maps_key
})
else:
context = self.get_context_data(**{
self.form_name: form,
'query': form.cleaned_data.get(self.search_field),
'object_list': self.queryset,
'maps_key': maps_key
})
return self.render_to_response(context)
def get_context_data(self, **kwargs):
context = super(FacetedSearchMixin, self).get_context_data(**kwargs)
total_results = SearchQuerySet().all().count()
# check whether total number of results is set
if self.request.session.get('total_results', None):
# if total number of results is not updated
if total_results == self.request.session['total_results']:
# if query changed
if self.request.session.get('query_changed', True):
context.update({'facets': self.queryset.facet_counts()})
self.request.session['facets_items'] = self.queryset.facet_counts()
else: # if query not changed
# if facets_items is already set
if self.request.session.get('facets_items', None):
context.update({'facets': self.request.session['facets_items']})
else: # initial session variable
context.update({'facets': self.queryset.facet_counts()})
self.request.session['facets_items'] = self.queryset.facet_counts()
else: # if total number of results is updated
self.request.session['total_results'] = total_results
context.update({'facets': self.queryset.facet_counts()})
self.request.session['facets_items'] = self.queryset.facet_counts()
else: # if total number of results is not set yet
self.request.session['total_results'] = total_results
context.update({'facets': self.queryset.facet_counts()})
self.request.session['facets_items'] = self.queryset.facet_counts()
return context
def get_queryset(self):
if len(self.request.GET.get('q', '')):
qs = super(FacetedSearchMixin, self).get_queryset()
else:
qs = SearchQuerySet().all()
for field in self.facet_fields:
qs = qs.facet(field)
return qs
| hydroshare/hydroshare | hs_core/views/discovery_view.py | Python | bsd-3-clause | 4,275 |
#
# Copyright (c) 2017 Juniper Networks, Inc. All rights reserved.
#
"""
VNC Ingress management for kubernetes
"""
import uuid
from config_db import *
from vnc_api.vnc_api import *
from kube_manager.common.kube_config_db import IngressKM
from kube_manager.common.kube_config_db import NamespaceKM
from kube_manager.vnc.loadbalancer import ServiceLbManager
from kube_manager.vnc.loadbalancer import ServiceLbListenerManager
from kube_manager.vnc.loadbalancer import ServiceLbPoolManager
from kube_manager.vnc.loadbalancer import ServiceLbMemberManager
from vnc_kubernetes_config import VncKubernetesConfig as vnc_kube_config
from vnc_common import VncCommon
from kube_manager.common.utils import get_fip_pool_fq_name_from_dict_string
from cStringIO import StringIO
from cfgm_common.utils import cgitb_hook
class VncIngress(VncCommon):
def __init__(self):
self._k8s_event_type = 'Ingress'
super(VncIngress,self).__init__(self._k8s_event_type)
self._name = type(self).__name__
self._args = vnc_kube_config.args()
self._queue = vnc_kube_config.queue()
self._vnc_lib = vnc_kube_config.vnc_lib()
self._logger = vnc_kube_config.logger()
self._kube = vnc_kube_config.kube()
self._label_cache = vnc_kube_config.label_cache()
self._service_fip_pool = vnc_kube_config.service_fip_pool()
self._ingress_label_cache = {}
self._default_vn_obj = None
self._fip_pool_obj = None
self.service_lb_mgr = ServiceLbManager()
self.service_ll_mgr = ServiceLbListenerManager()
self.service_lb_pool_mgr = ServiceLbPoolManager()
self.service_lb_member_mgr = ServiceLbMemberManager()
def _get_project(self, ns_name):
proj_fq_name = vnc_kube_config.cluster_project_fq_name(ns_name)
try:
proj_obj = self._vnc_lib.project_read(fq_name=proj_fq_name)
except NoIdError:
self._logger.error("%s - %s Not Found" %(self._name, proj_fq_name))
return None
return proj_obj
def _get_namespace(self, ns_name):
return NamespaceKM.find_by_name_or_uuid(ns_name)
def _is_network_isolated(self, ns_name):
return self._get_namespace(ns_name).is_isolated()
def _get_network(self, ns_name):
ns = self._get_namespace(ns_name)
if ns.is_isolated():
vn_fq_name = ns.get_isolated_network_fq_name()
else:
if self._default_vn_obj:
return self._default_vn_obj
vn_fq_name = vnc_kube_config.cluster_default_network_fq_name()
try:
vn_obj = self._vnc_lib.virtual_network_read(fq_name=vn_fq_name)
except NoIdError:
self._logger.error("%s - %s Not Found" %(self._name, vn_fq_name))
return None
if not ns.is_isolated():
self._default_vn_obj = vn_obj
return vn_obj
def _get_pod_ipam_subnet_uuid(self, vn_obj):
pod_ipam_subnet_uuid = None
fq_name = vnc_kube_config.pod_ipam_fq_name()
vn = VirtualNetworkKM.find_by_name_or_uuid(vn_obj.get_uuid())
pod_ipam_subnet_uuid = vn.get_ipam_subnet_uuid(fq_name)
if pod_ipam_subnet_uuid is None:
self._logger.error("%s - %s Not Found" %(self._name, fq_name))
return pod_ipam_subnet_uuid
def _get_cluster_service_fip(self, name, ns_name, lb_obj):
if not self._service_fip_pool:
return None
fip_pool = FloatingIpPool()
fip_pool.uuid = self._service_fip_pool.uuid
fip_pool.fq_name = self._service_fip_pool.fq_name
fip_pool.name = self._service_fip_pool.name
fip_uuid = str(uuid.uuid4())
fip_name = VncCommon.make_name(name, fip_uuid)
display_name=VncCommon.make_display_name(ns_name, name)
fip_obj = FloatingIp(name="cluster-svc-fip-%s"% (fip_name),
parent_obj=fip_pool,
floating_ip_traffic_direction='egress',
display_name=display_name)
fip_obj.uuid = fip_uuid
proj_obj = self._get_project(ns_name)
fip_obj.set_project(proj_obj)
vmi_id = lb_obj.virtual_machine_interface_refs[0]['uuid']
vmi_obj = self._vnc_lib.virtual_machine_interface_read(id=vmi_id)
if vmi_obj:
fip_obj.set_virtual_machine_interface(vmi_obj)
FloatingIpKM.add_annotations(self, fip_obj, name, ns_name)
try:
self._vnc_lib.floating_ip_create(fip_obj)
fip = FloatingIpKM.locate(fip_obj.uuid)
except Exception as e:
string_buf = StringIO()
cgitb_hook(file=string_buf, format="text")
err_msg = string_buf.getvalue()
self._logger.error("%s - %s" %(self._name, err_msg))
return
def _get_public_fip_pool(self, fip_pool_fq_name):
if self._fip_pool_obj:
return self._fip_pool_obj
try:
fip_pool_obj = self._vnc_lib. \
floating_ip_pool_read(fq_name=fip_pool_fq_name)
except NoIdError:
self._logger.error("%s - %s Not Found" \
%(self._name, fip_pool_fq_name))
return None
self._fip_pool_obj = fip_pool_obj
return fip_pool_obj
def _get_floating_ip(self, name,
proj_obj, external_ip=None, vmi_obj=None):
if not vnc_kube_config.is_public_fip_pool_configured():
return None
try:
fip_pool_fq_name = get_fip_pool_fq_name_from_dict_string(
self._args.public_fip_pool)
except Exception as e:
string_buf = StringIO()
cgitb_hook(file=string_buf, format="text")
err_msg = string_buf.getvalue()
self._logger.error("%s - %s" %(self._name, err_msg))
return None
if vmi_obj:
fip_refs = vmi_obj.get_floating_ip_back_refs()
for ref in fip_refs or []:
fip = FloatingIpKM.get(ref['uuid'])
if fip and fip.fq_name[:-1] == fip_pool_fq_name:
return fip
else:
break
fip_pool = self._get_public_fip_pool(fip_pool_fq_name)
if fip_pool is None:
return None
fip_uuid = str(uuid.uuid4())
fip_name = VncCommon.make_name(name, fip_uuid)
fip_obj = FloatingIp(fip_name, fip_pool)
fip_obj.uuid = fip_uuid
fip_obj.set_project(proj_obj)
if vmi_obj:
fip_obj.set_virtual_machine_interface(vmi_obj)
if external_ip:
fip_obj.floating_ip_address = external_ip
try:
self._vnc_lib.floating_ip_create(fip_obj)
fip = FloatingIpKM.locate(fip_obj.uuid)
except Exception as e:
string_buf = StringIO()
cgitb_hook(file=string_buf, format="text")
err_msg = string_buf.getvalue()
self._logger.error("%s - %s" %(self._name, err_msg))
return None
return fip
def _allocate_floating_ip(self, lb_obj, name, proj_obj, external_ip):
vmi_id = lb_obj.virtual_machine_interface_refs[0]['uuid']
vmi_obj = self._vnc_lib.virtual_machine_interface_read(id=vmi_id)
if vmi_obj is None:
self._logger.error("%s - %s Vmi %s Not Found" \
%(self._name, lb_obj.name, vmi_id))
return None
fip = self._get_floating_ip(name, proj_obj, external_ip, vmi_obj)
return fip
def _deallocate_floating_ip(self, lb, delete_svc_fip=True):
vmi_id = list(lb.virtual_machine_interfaces)[0]
vmi = VirtualMachineInterfaceKM.get(vmi_id)
if vmi is None:
self._logger.error("%s - %s Vmi %s Not Found" \
%(self._name, lb.name, vmi_id))
return
fip_list = vmi.floating_ips.copy()
for fip_id in fip_list or []:
fip_obj = self._vnc_lib.floating_ip_read(id=fip_id)
if delete_svc_fip == False and \
self._service_fip_pool.fq_name == fip_obj.fq_name[:-1]:
continue
fip_obj.set_virtual_machine_interface_list([])
self._vnc_lib.floating_ip_update(fip_obj)
self._vnc_lib.floating_ip_delete(id=fip_obj.uuid)
FloatingIpKM.delete(fip_obj.uuid)
def _update_floating_ip(self, name, ns_name, external_ip, lb_obj):
proj_obj = self._get_project(ns_name)
fip = self._allocate_floating_ip(lb_obj,
name, proj_obj, external_ip)
if fip:
lb_obj.add_annotations(
KeyValuePair(key='externalIP', value=external_ip))
self._vnc_lib.loadbalancer_update(lb_obj)
return fip
def _update_kube_api_server(self, name, ns_name, lb_obj, fip):
vip_dict_list = []
if fip:
vip_dict = {}
vip_dict['ip'] = fip.address
vip_dict_list.append(vip_dict)
vip_dict = {}
vip_dict['ip'] = lb_obj._loadbalancer_properties.vip_address
vip_dict_list.append(vip_dict)
patch = {'status': {'loadBalancer': {'ingress': vip_dict_list}}}
self._kube.patch_resource("ingresses", name, patch,
ns_name, beta=True, sub_resource_name='status')
def _find_ingress(self, ingress_cache, ns_name, service_name):
if not ns_name or not service_name:
return
key = 'service'
value = '-'.join([ns_name, service_name])
labels = {key:value}
result = set()
for label in labels.items():
key = self._label_cache._get_key(label)
ingress_ids = ingress_cache.get(key, set())
#no matching label
if not ingress_ids:
return ingress_ids
if not result:
result = ingress_ids.copy()
else:
result.intersection_update(ingress_ids)
return result
def _clear_ingress_cache_uuid(self, ingress_cache, ingress_uuid):
if not ingress_uuid:
return
key_list = [k for k,v in ingress_cache.items() if ingress_uuid in v]
for key in key_list or []:
label = tuple(key.split(':'))
self._label_cache._remove_label(key, ingress_cache, label, ingress_uuid)
def _clear_ingress_cache(self, ingress_cache,
ns_name, service_name, ingress_uuid):
if not ns_name or not service_name:
return
key = 'service'
value = '-'.join([ns_name, service_name])
labels = {key:value}
for label in labels.items() or []:
key = self._label_cache._get_key(label)
self._label_cache._remove_label(key,
ingress_cache, label, ingress_uuid)
def _update_ingress_cache(self, ingress_cache,
ns_name, service_name, ingress_uuid):
if not ns_name or not service_name:
return
key = 'service'
value = '-'.join([ns_name, service_name])
labels = {key:value}
for label in labels.items() or []:
key = self._label_cache._get_key(label)
self._label_cache._locate_label(key,
ingress_cache, label, ingress_uuid)
def _vnc_create_member(self, pool, address, port, annotations):
pool_obj = self.service_lb_pool_mgr.read(pool.uuid)
member_obj = self.service_lb_member_mgr.create(pool_obj,
address, port, annotations)
return member_obj
def _vnc_update_member(self, member_id, address, port, annotations):
member_obj = self.service_lb_member_mgr.update(member_id,
address, port, annotations)
return member_obj
def _vnc_create_pool(self, ns_name, ll, port, lb_algorithm, annotations):
proj_obj = self._get_project(ns_name)
ll_obj = self.service_ll_mgr.read(ll.uuid)
pool_obj = self.service_lb_pool_mgr.create(ll_obj, proj_obj,
port, lb_algorithm, annotations)
return pool_obj
def _vnc_create_listeners(self, ns_name, lb, port):
proj_obj = self._get_project(ns_name)
lb_obj = self.service_lb_mgr.read(lb.uuid)
ll_obj = self.service_ll_mgr.create(lb_obj, proj_obj, port)
return ll_obj
def _vnc_create_lb(self, uid, name, ns_name, annotations):
proj_obj = self._get_project(ns_name)
vn_obj = self._get_network(ns_name)
if proj_obj is None or vn_obj is None:
return None
vip_address = None
pod_ipam_subnet_uuid = self._get_pod_ipam_subnet_uuid(vn_obj)
lb_obj = self.service_lb_mgr.create(self._k8s_event_type, ns_name, uid,
name, proj_obj, vn_obj, vip_address, pod_ipam_subnet_uuid)
if lb_obj:
if self._is_network_isolated(ns_name):
self._get_cluster_service_fip(name, ns_name, lb_obj)
external_ip = None
if annotations and 'externalIP' in annotations:
external_ip = annotations['externalIP']
fip = self._update_floating_ip(name,
ns_name, external_ip, lb_obj)
self._update_kube_api_server(name, ns_name, lb_obj, fip)
else:
self._logger.error("%s - %s LB Not Created" %(self._name, name))
return lb_obj
def _vnc_delete_member(self, member_id):
self.service_lb_member_mgr.delete(member_id)
def _vnc_delete_pool(self, pool_id):
self.service_lb_pool_mgr.delete(pool_id)
def _vnc_delete_listener(self, ll_id):
self.service_ll_mgr.delete(ll_id)
def _vnc_delete_lb(self, lb):
self._deallocate_floating_ip(lb)
self.service_lb_mgr.delete(lb.uuid)
def _get_old_backend_list(self, lb):
backend_list = []
listener_list = lb.loadbalancer_listeners
for ll_id in listener_list:
backend = {}
backend['listener_id'] = ll_id
ll = LoadbalancerListenerKM.get(ll_id)
backend['listener'] = {}
backend['listener']['protocol'] = ll.params['protocol']
if backend['listener']['protocol'] == 'TERMINTED_HTTPS':
if ll.params['default_tls_container']:
backend['listener']['default_tls_container'] = \
ll.params['default_tls_container']
if ll.params['sni_containers']:
backend['listener']['sni_containers'] = \
ll.params['sni_containers']
pool_id = ll.loadbalancer_pool
if pool_id:
pool = LoadbalancerPoolKM.get(pool_id)
if pool.annotations is None:
annotations = {}
kvps = []
pool_obj = self._vnc_lib.loadbalancer_pool_read(id=pool_id)
pool_obj_kvp = pool_obj.annotations.key_value_pair
kvps_len = len(pool_obj_kvp)
for count in range(0, kvps_len):
kvp = {}
kvp['key'] = pool_obj_kvp[count].key
kvp['value'] = pool_obj_kvp[count].value
kvps.append(kvp)
annotations['key_value_pair'] = kvps
else:
annotations = pool.annotations
backend['pool_id'] = pool_id
backend['annotations'] = {}
for kvp in annotations['key_value_pair'] or []:
key = kvp['key']
value = kvp['value']
backend['annotations'][key] = value
backend['pool'] = {}
backend['pool']['protocol'] = pool.params['protocol']
backend['member'] = {}
if len(pool.members) == 0:
continue
member_id = list(pool.members)[0]
member = LoadbalancerMemberKM.get(member_id)
if member.annotations is None:
annotations = {}
kvps = []
member_obj = self._vnc_lib. \
loadbalancer_member_read(id=member_id)
member_obj_kvp = member_obj.annotations.key_value_pair
kvps_len = len(member_obj_kvp)
for count in range(0, kvps_len):
kvp = {}
kvp['key'] = member_obj_kvp[count].key
kvp['value'] = member_obj_kvp[count].value
kvps.append(kvp)
annotations['key_value_pair'] = kvps
else:
annotations = member.annotations
backend['member_id'] = member_id
protocol_port = member.params['protocol_port']
for kvp in annotations['key_value_pair'] or []:
if kvp['key'] == 'serviceName':
backend['member']['serviceName'] = kvp['value']
backend['member']['servicePort'] = protocol_port
break
backend_list.append(backend)
return backend_list
def _get_tls_dict(self, spec, ns_name):
tls_dict = {}
if 'tls' in spec:
tls_list = spec['tls']
for tls in tls_list:
if not 'secretName' in tls:
continue
if 'hosts' in tls:
hosts = tls['hosts']
else:
hosts = ['ALL']
for host in hosts:
tls_dict[host] = ns_name + '__' + tls['secretName']
return tls_dict
def _get_new_backend_list(self, spec, ns_name):
tls_dict = self._get_tls_dict(spec, ns_name)
backend_list = []
rules = []
if 'rules' in spec:
rules = spec['rules']
for rule in rules:
if 'http' not in rule:
continue
paths = rule['http']['paths']
for path in paths or []:
backend = {}
backend['annotations'] = {}
backend['listener'] = {}
backend['pool'] = {}
backend['member'] = {}
backend['listener']['protocol'] = 'HTTP'
backend['pool']['protocol'] = 'HTTP'
secretname = ""
virtual_host = False
if 'host' in rule:
host = rule['host']
backend['annotations']['host'] = host
if host in tls_dict.keys():
secretname = tls_dict[host]
virtual_host = True
if 'path' in path:
backend['annotations']['path'] = path['path']
if virtual_host == False and 'ALL' in tls_dict.keys():
secretname = 'ALL'
service = path['backend']
backend['annotations']['type'] = 'acl'
backend['member']['serviceName'] = service['serviceName']
backend['member']['servicePort'] = service['servicePort']
backend_list.append(backend)
if secretname:
backend_https = copy.deepcopy(backend)
backend_https['listener']['protocol'] = 'TERMINATED_HTTPS'
if virtual_host:
backend_https['listener']['sni_containers'] = [secretname]
else:
backend_https['listener']['default_tls_container'] = tls_dict['ALL']
backend_list.append(backend_https)
if 'backend' in spec:
service = spec['backend']
backend = {}
backend['annotations'] = {}
backend['listener'] = {}
backend['pool'] = {}
backend['member'] = {}
backend['listener']['protocol'] = 'HTTP'
backend['pool']['protocol'] = 'HTTP'
backend['annotations']['type'] = 'default'
backend['member']['serviceName'] = service['serviceName']
backend['member']['servicePort'] = service['servicePort']
backend_list.append(backend)
if 'ALL' in tls_dict.keys():
backend_https = copy.deepcopy(backend)
backend_https['listener']['protocol'] = 'TERMINATED_HTTPS'
backend_https['listener']['default_tls_container'] = tls_dict['ALL']
backend_list.append(backend_https)
return backend_list
def _create_member(self, ns_name, backend_member, pool):
resource_type = "services"
service_name = backend_member['serviceName']
service_port = backend_member['servicePort']
service_info = self._kube.get_resource(resource_type,
service_name, ns_name)
member = None
if service_info and 'clusterIP' in service_info['spec']:
service_ip = service_info['spec']['clusterIP']
self._logger.debug("%s - clusterIP for service %s - %s" \
%(self._name, service_name, service_ip))
member_match = False
annotations = {}
annotations['serviceName'] = service_name
for member_id in pool.members:
member = LoadbalancerMemberKM.get(member_id)
if member and member.params['address'] == service_ip \
and member.params['protocol_port'] == service_port:
member_match = True
break
if not member_match:
member_obj = self._vnc_create_member(pool,
service_ip, service_port, annotations)
if member_obj:
member = LoadbalancerMemberKM.locate(member_obj.uuid)
else:
self._logger.error(
"%s - (%s %s) Member Not Created for Pool %s" \
%(self._name, service_name,
str(service_port), pool.name))
else:
self._logger.error("%s - clusterIP for Service %s Not Found" \
%(self._name, service_name))
self._logger.error(
"%s - (%s %s) Member Not Created for Pool %s" \
%(self._name, service_name,
str(service_port), pool.name))
return member
def _update_member(self, ns_name, backend_member, pool):
resource_type = "services"
member_id = backend_member['member_id']
new_service_name = backend_member['serviceName']
new_service_port = backend_member['servicePort']
member = LoadbalancerMemberKM.get(member_id)
annotations = member.annotations
for kvp in annotations['key_value_pair'] or []:
if kvp['key'] == 'serviceName':
old_service_name = kvp['value']
break
old_service_port = member.params['protocol_port']
service_ip = None
if new_service_name != old_service_name:
service_info = self._kube.get_resource(resource_type,
new_service_name, ns_name)
if service_info and 'clusterIP' in service_info['spec']:
service_ip = service_info['spec']['clusterIP']
else:
self._logger.error("%s - clusterIP for Service %s Not Found" \
%(self._name, new_service_name))
self._logger.error(
"%s - (%s %s) Member Not Updated for Pool %s" \
%(self._name, new_service_name,
str(new_service_port), pool.name))
self._vnc_delete_member(member_id)
LoadbalancerMemberKM.delete(member_id)
self._logger.error(
"%s - (%s %s) Member Deleted for Pool %s" \
%(self._name, old_service_name,
str(old_service_port), pool.name))
return None
else:
service_ip = member.params['address']
annotations = {}
annotations['serviceName'] = new_service_name
member_obj = self._vnc_update_member(member_id,
service_ip, new_service_port, annotations)
member = LoadbalancerMemberKM.update(member)
return member
def _create_pool(self, ns_name, ll, port, lb_algorithm, annotations):
pool_id = ll.loadbalancer_pool
pool = LoadbalancerPoolKM.get(pool_id)
if pool is None:
pool_obj = self._vnc_create_pool(ns_name, ll,
port, lb_algorithm, annotations)
pool_id = pool_obj.uuid
pool = LoadbalancerPoolKM.locate(pool_id)
else:
self._logger.error("%s - %s Pool Not Created" \
%(self._name, ll.name))
return pool
def _create_listener(self, ns_name, lb, port):
ll_obj = self._vnc_create_listeners(ns_name, lb, port)
if ll_obj:
ll = LoadbalancerListenerKM.locate(ll_obj.uuid)
else:
self._logger.error("%s - %s Listener for Port %s Not Created" \
%(self._name, lb.name, str(port)))
return ll
def _create_listener_pool_member(self, ns_name, lb, backend):
pool_port = {}
listener_port = {}
listener_port['port'] = '80'
listener_port['protocol'] = backend['listener']['protocol']
if listener_port['protocol'] == 'TERMINATED_HTTPS':
listener_port['port'] = '443'
if 'default_tls_container' in backend['listener']:
listener_port['default_tls_container'] = backend['listener']['default_tls_container']
if 'sni_containers' in backend['listener']:
listener_port['sni_containers'] = backend['listener']['sni_containers']
ll = self._create_listener(ns_name, lb, listener_port)
annotations = {}
for key in backend['annotations']:
annotations[key] = backend['annotations'][key]
lb_algorithm = "ROUND_ROBIN"
pool_port['port'] = '80'
pool_port['protocol'] = backend['pool']['protocol']
pool = self._create_pool(ns_name, ll, pool_port, lb_algorithm, annotations)
backend_member = backend['member']
member = self._create_member(ns_name, backend_member, pool)
if member is None:
self._logger.error("%s - Deleting Listener %s and Pool %s" \
%(self._name, ll.name, pool.name))
self._vnc_delete_pool(pool.uuid)
LoadbalancerPoolKM.delete(pool.uuid)
self._vnc_delete_listener(ll.uuid)
LoadbalancerListenerKM.delete(ll.uuid)
def update_ingress_backend(self, ns_name, service_name, oper):
ingress_ids = self._find_ingress(
self._ingress_label_cache, ns_name, service_name)
for ingress_id in ingress_ids or []:
ingress = IngressKM.get(ingress_id)
lb = LoadbalancerKM.get(ingress_id)
if not ingress or not lb:
continue
if oper == 'ADD':
new_backend_list = self._get_new_backend_list(ingress.spec, ns_name)
for new_backend in new_backend_list[:] or []:
if new_backend['member']['serviceName'] == service_name:
self._create_listener_pool_member(
ns_name, lb, new_backend)
else:
old_backend_list = self._get_old_backend_list(lb)
for old_backend in old_backend_list[:] or []:
if old_backend['member']['serviceName'] == service_name:
self._delete_listener(old_backend['listener_id'])
def _create_lb(self, uid, name, ns_name, event):
annotations = event['object']['metadata'].get('annotations')
lb = LoadbalancerKM.get(uid)
if not lb:
lb_obj = self._vnc_create_lb(uid, name, ns_name, annotations)
if lb_obj is None:
return
lb = LoadbalancerKM.locate(uid)
else:
external_ip = None
if annotations and 'externalIP' in annotations:
external_ip = annotations['externalIP']
if external_ip != lb.external_ip:
self._deallocate_floating_ip(lb, delete_svc_fip=False)
lb_obj = self._vnc_lib.loadbalancer_read(id=lb.uuid)
fip = self._update_floating_ip(name, ns_name,
external_ip, lb_obj)
if fip:
lb.external_ip = external_ip
self._update_kube_api_server(name, ns_name, lb_obj, fip)
self._clear_ingress_cache_uuid(self._ingress_label_cache, uid)
spec = event['object']['spec']
new_backend_list = self._get_new_backend_list(spec, ns_name)
old_backend_list = self._get_old_backend_list(lb)
# find the unchanged backends
for new_backend in new_backend_list[:] or []:
self._update_ingress_cache(self._ingress_label_cache,
ns_name, new_backend['member']['serviceName'], uid)
for old_backend in old_backend_list[:] or []:
if new_backend['annotations'] == old_backend['annotations'] \
and new_backend['listener'] == old_backend['listener'] \
and new_backend['pool'] == old_backend['pool'] \
and new_backend['member'] == old_backend['member']:
old_backend_list.remove(old_backend)
new_backend_list.remove(new_backend)
break
if len(old_backend_list) == 0 and len(new_backend_list) == 0:
return lb
# find the updated backends and update
backend_update_list = []
for new_backend in new_backend_list[:] or []:
for old_backend in old_backend_list[:] or []:
if new_backend['annotations'] == old_backend['annotations'] \
and new_backend['listener'] == old_backend['listener'] \
and new_backend['pool'] == old_backend['pool']:
backend = old_backend
backend['member']['member_id'] = \
old_backend['member_id']
backend['member']['serviceName'] = \
new_backend['member']['serviceName']
backend['member']['servicePort'] = \
new_backend['member']['servicePort']
backend_update_list.append(backend)
old_backend_list.remove(old_backend)
new_backend_list.remove(new_backend)
for backend in backend_update_list or []:
ll = LoadbalancerListenerKM.get(backend['listener_id'])
pool = LoadbalancerPoolKM.get(backend['pool_id'])
backend_member = backend['member']
member = self._update_member(ns_name, backend_member, pool)
if member is None:
self._logger.error("%s - Deleting Listener %s and Pool %s" \
%(self._name, ll.name, pool.name))
self._vnc_delete_pool(pool.uuid)
LoadbalancerPoolKM.delete(pool.uuid)
self._vnc_delete_listener(ll.uuid)
LoadbalancerListenerKM.delete(ll.uuid)
if len(old_backend_list) == 0 and len(new_backend_list) == 0:
return lb
# delete the old backends
for backend in old_backend_list or []:
self._delete_listener(backend['listener_id'])
# create the new backends
for backend in new_backend_list:
self._create_listener_pool_member(ns_name, lb, backend)
return lb
def _delete_all_listeners(self, lb):
listener_list = lb.loadbalancer_listeners.copy()
for ll_id in listener_list:
ll = LoadbalancerListenerKM.get(ll_id)
pool_id = ll.loadbalancer_pool
if pool_id:
pool = LoadbalancerPoolKM.get(pool_id)
member_list = pool.members.copy()
for member_id in member_list:
self._vnc_delete_member(member_id)
LoadbalancerMemberKM.delete(member_id)
self._vnc_delete_pool(pool_id)
LoadbalancerPoolKM.delete(pool_id)
self._vnc_delete_listener(ll_id)
LoadbalancerListenerKM.delete(ll_id)
def _delete_listener(self, ll_id):
ll = LoadbalancerListenerKM.get(ll_id)
pool_id = ll.loadbalancer_pool
if pool_id:
pool = LoadbalancerPoolKM.get(pool_id)
member_list = pool.members.copy()
for member_id in member_list:
self._vnc_delete_member(member_id)
LoadbalancerMemberKM.delete(member_id)
self._vnc_delete_pool(pool_id)
LoadbalancerPoolKM.delete(pool_id)
self._vnc_delete_listener(ll_id)
LoadbalancerListenerKM.delete(ll_id)
def _delete_lb(self, uid):
lb = LoadbalancerKM.get(uid)
if not lb:
return
self._delete_all_listeners(lb)
self._vnc_delete_lb(lb)
LoadbalancerKM.delete(uid)
def _update_ingress(self, name, uid, event):
ns_name = event['object']['metadata'].get('namespace')
self._create_lb(uid, name, ns_name, event)
def _delete_ingress(self, uid):
self._delete_lb(uid)
self._clear_ingress_cache_uuid(self._ingress_label_cache, uid)
def _create_ingress_event(self, event_type, ingress_id, lb):
event = {}
object = {}
object['kind'] = 'Ingress'
object['spec'] = {}
object['metadata'] = {}
object['metadata']['uid'] = ingress_id
if event_type == 'delete':
event['type'] = 'DELETED'
event['object'] = object
self._queue.put(event)
return
def _sync_ingress_lb(self):
lb_uuid_set = set(LoadbalancerKM.keys())
ingress_uuid_set = set(IngressKM.keys())
deleted_ingress_set = lb_uuid_set - ingress_uuid_set
for uuid in deleted_ingress_set:
lb = LoadbalancerKM.get(uuid)
if not lb:
continue
if not lb.annotations:
continue
owner = None
kind = None
for kvp in lb.annotations['key_value_pair'] or []:
if kvp['key'] == 'owner':
owner = kvp['value']
elif kvp['key'] == 'kind':
kind = kvp['value']
if owner == 'k8s' and kind == self._k8s_event_type:
self._create_ingress_event('delete', uuid, lb)
break
return
def ingress_timer(self):
self._sync_ingress_lb()
def process(self, event):
event_type = event['type']
kind = event['object'].get('kind')
ns_name = event['object']['metadata'].get('namespace')
name = event['object']['metadata'].get('name')
uid = event['object']['metadata'].get('uid')
print("%s - Got %s %s %s:%s:%s"
%(self._name, event_type, kind, ns_name, name, uid))
self._logger.debug("%s - Got %s %s %s:%s:%s"
%(self._name, event_type, kind, ns_name, name, uid))
if event['type'] == 'ADDED' or event['type'] == 'MODIFIED':
self._update_ingress(name, uid, event)
elif event['type'] == 'DELETED':
self._delete_ingress(uid)
else:
self._logger.warning(
'Unknown event type: "{}" Ignoring'.format(event['type']))
| nischalsheth/contrail-controller | src/container/kube-manager/kube_manager/vnc/vnc_ingress.py | Python | apache-2.0 | 36,050 |
import Deck
class AuctionException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Auction(object):
def __init__(self, deck, cardsOnTable):
if (len(cardsOnTable) != 8):
raise AuctionException("Auction must begin with 8 cards on the table")
| straemer/PowerGrid | src/gamemaster/Auction.py | Python | gpl-3.0 | 353 |
#!/usr/bin/env python
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2010 Leszek Lesner [email protected]
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
###################### DO NOT TOUCH THIS (HEAD TO THE SECOND PART) ######################
import os
import sys
try:
import DistUtilsExtra.auto
except ImportError:
print >> sys.stderr, 'To build lxkeymap you need https://launchpad.net/python-distutils-extra'
sys.exit(1)
assert DistUtilsExtra.auto.__version__ >= '2.18', 'needs DistUtilsExtra.auto >= 2.18'
def update_data_path(prefix, oldvalue=None):
try:
fin = file('lxkeymap/lxkeymapconfig.py', 'r')
fout = file(fin.name + '.new', 'w')
for line in fin:
fields = line.split(' = ') # Separate variable from value
if fields[0] == '__lxkeymap_data_directory__':
# update to prefix, store oldvalue
if not oldvalue:
oldvalue = fields[1]
line = "%s = '%s'\n" % (fields[0], prefix)
else: # restore oldvalue
line = "%s = %s" % (fields[0], oldvalue)
fout.write(line)
fout.flush()
fout.close()
fin.close()
os.rename(fout.name, fin.name)
except (OSError, IOError), e:
print ("ERROR: Can't find lxkeymap/lxkeymapconfig.py")
sys.exit(1)
return oldvalue
def update_desktop_file(datadir):
try:
fin = file('lxkeymap.desktop.in', 'r')
fout = file(fin.name + '.new', 'w')
for line in fin:
if 'Icon=' in line:
line = "Icon=%s\n" % (datadir + 'media/icon.png')
fout.write(line)
fout.flush()
fout.close()
fin.close()
os.rename(fout.name, fin.name)
except (OSError, IOError), e:
print ("ERROR: Can't find lxkeymap.desktop.in")
sys.exit(1)
class InstallAndUpdateDataDirectory(DistUtilsExtra.auto.install_auto):
def run(self):
previous_value = update_data_path(self.prefix + '/local/share/lxkeymap/')
update_desktop_file(self.prefix + '/local/share/lxkeymap/')
DistUtilsExtra.auto.install_auto.run(self)
update_data_path(self.prefix, previous_value)
##################################################################################
###################### YOU SHOULD MODIFY ONLY WHAT IS BELOW ######################
##################################################################################
DistUtilsExtra.auto.setup(
name='lxkeymap',
version='0.1',
license='GPL-3',
#author='Your Name',
#author_email='[email protected]',
#description='UI for managing …',
#long_description='Here a longer description',
#url='https://launchpad.net/lxkeymap',
cmdclass={'install': InstallAndUpdateDataDirectory}
)
| raspberrypi/lxkeymap | setup.py | Python | gpl-3.0 | 3,508 |
'''
Created on 24.05.2015
@author: vvladych
'''
from MDO import MDO
class ForecastPublication(MDO):
sql_dict={"get_all":"SELECT sid, forecast_sid, publication_sid FROM fc_forecast_publication",
"get_all_foreign_key":"SELECT sid, forecast_sid, publication_sid FROM fc_forecast_publication WHERE forecast_sid=%s",
"delete":"DELETE FROM fc_forecast_publication WHERE sid=%s",
"insert":"INSERT INTO fc_forecast_publication(forecast_sid, publication_sid) VALUES(%s, %s) RETURNING sid",
"load":"SELECT sid, forecast_sid, publication_sid FROM fc_forecast_publication WHERE sid=%s"}
def __init__(self, sid=None, uuid=None, forecast_sid=None, publication_sid=None):
super(ForecastPublication, self).__init__(ForecastPublication.sql_dict,sid,uuid)
self.forecast_sid=forecast_sid
self.publication_sid=publication_sid
def load_object_from_db(self,rec):
self.forecast_sid=rec.forecast_sid
self.publication_sid=rec.publication_sid
def get_insert_data(self):
return (self.forecast_sid, self.publication_sid,)
def fabric_method(self,rec):
return ForecastPublication(rec.sid, None, rec.forecast_sid, rec.publication_sid) | vvladych/forecastmgmt | src/forecastmgmt/model/forecast_publication.py | Python | unlicense | 1,271 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# 计算1+2+3+...+100:
sum1 = 0
n = 1
while n <= 100:
sum1 = sum1 + n
n = n + 1
print("1+2+3+...+100 = ", sum)
# 计算1x2x3x...x100:
acc = 1
n = 1
while n <= 100:
acc = acc * n
n = n + 1
print("1x2x3x...x100 = ", acc)
| felix9064/python | Demo/liaoxf/do_while.py | Python | mit | 284 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.