code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1
value | license stringclasses 15
values | size int64 3 1.05M |
|---|---|---|---|---|---|
#
# -*- coding: utf-8 -*-#
# Copyright (c) 2010 Red Hat, Inc.
#
# Authors: Jeff Ortel <[email protected]>
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import gettext
from iniparse import RawConfigParser as ConfigParser
import logging
import os
import string
import subscription_manager.injection as inj
from subscription_manager.cache import OverrideStatusCache, WrittenOverrideCache
from subscription_manager import utils
from subscription_manager import model
from subscription_manager.model import ent_cert
from rhsm.config import initConfig
# FIXME: local imports
from subscription_manager.certlib import ActionReport, BaseActionInvoker
from subscription_manager.certdirectory import Path
log = logging.getLogger('rhsm-app.' + __name__)
CFG = initConfig()
ALLOWED_CONTENT_TYPES = ["yum"]
_ = gettext.gettext
class RepoActionInvoker(BaseActionInvoker):
"""Invoker for yum repo updating related actions."""
def __init__(self, cache_only=False, locker=None):
super(RepoActionInvoker, self).__init__(locker=locker)
self.cache_only = cache_only
self.identity = inj.require(inj.IDENTITY)
def _do_update(self):
action = RepoUpdateActionCommand(cache_only=self.cache_only)
res = action.perform()
return res
def is_managed(self, repo):
action = RepoUpdateActionCommand(cache_only=self.cache_only)
return repo in [c.label for c in action.matching_content()]
def get_repos(self, apply_overrides=True):
action = RepoUpdateActionCommand(cache_only=self.cache_only,
apply_overrides=apply_overrides)
repos = action.get_unique_content()
current = set()
# Add the current repo data
repo_file = RepoFile()
repo_file.read()
for repo in repos:
existing = repo_file.section(repo.id)
if existing is None:
current.add(repo)
else:
action.update_repo(existing, repo)
current.add(existing)
return current
def get_repo_file(self):
repo_file = RepoFile()
return repo_file.path
@classmethod
def delete_repo_file(cls):
repo_file = RepoFile()
if os.path.exists(repo_file.path):
os.unlink(repo_file.path)
# When the repo is removed, also remove the override tracker
WrittenOverrideCache.delete_cache()
# This is $releasever specific, but expanding other vars would be similar,
# just the marker, and get_expansion would change
#
# For example, for full craziness, we could expand facts in urls...
class YumReleaseverSource(object):
"""
Contains a ReleaseStatusCache and releasever helpers.
get_expansion() gets 'release' from consumer info from server,
using the cache as required.
"""
marker = "$releasever"
# if all eles fails the default is to leave the marker un expanded
default = marker
def __init__(self):
self.release_status_cache = inj.require(inj.RELEASE_STATUS_CACHE)
self._expansion = None
self.identity = inj.require(inj.IDENTITY)
self.cp_provider = inj.require(inj.CP_PROVIDER)
self.uep = self.cp_provider.get_consumer_auth_cp()
# FIXME: these guys are really more of model helpers for the object
# represent a release.
@staticmethod
def is_not_empty(expansion):
if expansion is None or len(expansion) == 0:
return False
return True
@staticmethod
def is_set(result):
"""Check result for existing, and having a non empty value.
Return True if result has a non empty, non null result['releaseVer']
False indicates we don't know or it is not set.
"""
if result is None:
return False
try:
release = result['releaseVer']
return YumReleaseverSource.is_not_empty(release)
except Exception:
return False
def get_expansion(self):
# mem cache
if self._expansion:
return self._expansion
result = self.release_status_cache.read_status(self.uep,
self.identity.uuid)
# status cache returned None, which points to a failure.
# Since we only have one value, use the default there and cache it
# NOTE: the _expansion caches exists for the lifetime of the object,
# so a new created YumReleaseverSource needs to be created when
# you think there may be a new release set. We assume it will be
# the same for the lifetime of a RepoUpdateActionCommand
if not self.is_set(result):
# we got a result indicating we don't know the release, use the
# default. This could be server error or just an "unset" release.
self._expansion = self.default
return self._expansion
self._expansion = result['releaseVer']
return self._expansion
class RepoUpdateActionCommand(object):
"""UpdateAction for yum repos.
Update yum repos when triggered. Generates yum repo config
based on:
- entitlement certs
- repo overrides
- rhsm config
- yum config
- manual changes made to "redhat.repo".
Returns an RepoActionReport.
"""
def __init__(self, cache_only=False, apply_overrides=True):
self.identity = inj.require(inj.IDENTITY)
# These should probably move closer their use
self.ent_dir = inj.require(inj.ENT_DIR)
self.prod_dir = inj.require(inj.PROD_DIR)
self.ent_source = ent_cert.EntitlementDirEntitlementSource()
self.cp_provider = inj.require(inj.CP_PROVIDER)
self.uep = self.cp_provider.get_consumer_auth_cp()
self.manage_repos = 1
self.apply_overrides = apply_overrides
if CFG.has_option('rhsm', 'manage_repos'):
self.manage_repos = CFG.get_int('rhsm', 'manage_repos')
self.release = None
self.overrides = {}
self.override_supported = bool(self.identity.is_valid() and self.uep and self.uep.supports_resource('content_overrides'))
self.written_overrides = WrittenOverrideCache()
# FIXME: empty report at the moment, should be changed to include
# info about updated repos
self.report = RepoActionReport()
self.report.name = "Repo updates"
# If we are not registered, skip trying to refresh the
# data from the server
if not self.identity.is_valid():
return
# NOTE: if anything in the RepoActionInvoker init blocks, and it
# could, yum could still block. The closest thing to an
# event loop we have is the while True: sleep() in lock.py:Lock.acquire()
# Only attempt to update the overrides if they are supported
# by the server.
if self.override_supported:
self.written_overrides._read_cache()
try:
override_cache = inj.require(inj.OVERRIDE_STATUS_CACHE)
except KeyError:
override_cache = OverrideStatusCache()
if cache_only:
status = override_cache._read_cache()
else:
status = override_cache.load_status(self.uep, self.identity.uuid)
for item in status or []:
# Don't iterate through the list
if item['contentLabel'] not in self.overrides:
self.overrides[item['contentLabel']] = {}
self.overrides[item['contentLabel']][item['name']] = item['value']
def perform(self):
# Load the RepoFile from disk, this contains all our managed yum repo sections:
repo_file = RepoFile()
# the [rhsm] manage_repos can be overridden to disable generation of the
# redhat.repo file:
if not self.manage_repos:
log.debug("manage_repos is 0, skipping generation of: %s" %
repo_file.path)
if repo_file.exists():
log.info("Removing %s due to manage_repos configuration." %
repo_file.path)
RepoActionInvoker.delete_repo_file()
return 0
repo_file.read()
valid = set()
# Iterate content from entitlement certs, and create/delete each section
# in the RepoFile as appropriate:
for cont in self.get_unique_content():
valid.add(cont.id)
existing = repo_file.section(cont.id)
if existing is None:
repo_file.add(cont)
self.report_add(cont)
else:
# Updates the existing repo with new content
self.update_repo(existing, cont)
repo_file.update(existing)
self.report_update(existing)
for section in repo_file.sections():
if section not in valid:
self.report_delete(section)
repo_file.delete(section)
# Write new RepoFile to disk:
repo_file.write()
if self.override_supported:
# Update with the values we just wrote
self.written_overrides.overrides = self.overrides
self.written_overrides.write_cache()
log.info("repos updated: %s" % self.report)
return self.report
def get_unique_content(self):
# FIXME Shouldn't this skip all of the repo updating?
if not self.manage_repos:
return []
# baseurl and ca_cert could be "CDNInfo" or
# bundle with "ConnectionInfo" etc
baseurl = CFG.get('rhsm', 'baseurl')
ca_cert = CFG.get('rhsm', 'repo_ca_cert')
content_list = self.get_all_content(baseurl, ca_cert)
# assumes items in content_list are hashable
return set(content_list)
# Expose as public API for RepoActionInvoker.is_managed, since that
# is used by openshift tooling.
# See https://bugzilla.redhat.com/show_bug.cgi?id=1223038
def matching_content(self):
return model.find_content(self.ent_source,
content_type="yum")
def get_all_content(self, baseurl, ca_cert):
matching_content = self.matching_content()
content_list = []
# avoid checking for release/etc if there is no matching_content
if not matching_content:
return content_list
# wait until we know we have content before fetching
# release. We could make YumReleaseverSource understand
# cache_only as well.
release_source = YumReleaseverSource()
for content in matching_content:
repo = Repo.from_ent_cert_content(content, baseurl, ca_cert,
release_source)
# overrides are yum repo only at the moment, but
# content sources will likely need to learn how to
# apply overrides as well, perhaps generically
if self.override_supported and self.apply_overrides:
repo = self._set_override_info(repo)
content_list.append(repo)
return content_list
def _set_override_info(self, repo):
# In the disconnected case, self.overrides will be an empty list
for name, value in self.overrides.get(repo.id, {}).items():
repo[name] = value
return repo
def _is_overridden(self, repo, key):
return key in self.overrides.get(repo.id, {})
def _was_overridden(self, repo, key, value):
written_value = self.written_overrides.overrides.get(repo.id, {}).get(key)
# Compare values as strings to avoid casting problems from io
return written_value is not None and value is not None and str(written_value) == str(value)
def _build_props(self, old_repo, new_repo):
result = {}
all_keys = old_repo.keys() + new_repo.keys()
for key in all_keys:
result[key] = Repo.PROPERTIES.get(key, (1, None))
return result
def update_repo(self, old_repo, new_repo):
"""
Checks an existing repo definition against a potentially updated
version created from most recent entitlement certificates and
configuration. Creates, updates, and removes properties as
appropriate and returns the number of changes made. (if any)
"""
changes_made = 0
for key, (mutable, default) in self._build_props(old_repo, new_repo).items():
new_val = new_repo.get(key)
# Mutable properties should be added if not currently defined,
# otherwise left alone. However if we see that the property was overridden
# but that override has since been removed, we need to revert to the default
# value.
if mutable and not self._is_overridden(old_repo, key) \
and not self._was_overridden(old_repo, key, old_repo.get(key)):
if (new_val is not None) and (not old_repo.get(key)):
if old_repo.get(key) == new_val:
continue
old_repo[key] = new_val
changes_made += 1
# Immutable properties should be always be added/updated,
# and removed if undefined in the new repo definition.
else:
if new_val is None or (str(new_val).strip() == ""):
# Immutable property should be removed:
if key in old_repo.keys():
del old_repo[key]
changes_made += 1
continue
# Unchanged:
if old_repo.get(key) == new_val:
continue
old_repo[key] = new_val
changes_made += 1
return changes_made
def report_update(self, repo):
self.report.repo_updates.append(repo)
def report_add(self, repo):
self.report.repo_added.append(repo)
def report_delete(self, section):
self.report.repo_deleted.append(section)
class RepoActionReport(ActionReport):
"""Report class for reporting yum repo updates."""
name = "Repo Updates"
def __init__(self):
super(RepoActionReport, self).__init__()
self.repo_updates = []
self.repo_added = []
self.repo_deleted = []
def updates(self):
"""How many repos were updated"""
return len(self.repo_updates) + len(self.repo_added) + len(self.repo_deleted)
def format_repos_info(self, repos, formatter):
indent = ' '
if not repos:
return '%s<NONE>' % indent
r = []
for repo in repos:
r.append("%s%s" % (indent, formatter(repo)))
return '\n'.join(r)
def repo_format(self, repo):
msg = "[id:%s %s]" % (repo.id,
repo['name'])
return msg.encode('utf8')
def section_format(self, section):
msg = "[%s]" % section
return msg.encode('utf8')
def format_repos(self, repos):
return self.format_repos_info(repos, self.repo_format)
def format_sections(self, sections):
return self.format_repos_info(sections, self.section_format)
def __str__(self):
s = [_('Repo updates') + '\n']
s.append(_('Total repo updates: %d') % self.updates())
s.append(_('Updated'))
s.append(self.format_repos(self.repo_updates))
s.append(_('Added (new)'))
s.append(self.format_repos(self.repo_added))
s.append(_('Deleted'))
# deleted are former repo sections, but they are the same type
s.append(self.format_sections(self.repo_deleted))
return '\n'.join(s)
class Repo(dict):
# (name, mutable, default) - The mutability information is only used in disconnected cases
PROPERTIES = {
'name': (0, None),
'baseurl': (0, None),
'enabled': (1, '1'),
'gpgcheck': (1, '1'),
'gpgkey': (0, None),
'sslverify': (1, '1'),
'sslcacert': (0, None),
'sslclientkey': (0, None),
'sslclientcert': (0, None),
'metadata_expire': (1, None),
'proxy': (0, None),
'proxy_username': (0, None),
'proxy_password': (0, None),
'ui_repoid_vars': (0, None)}
def __init__(self, repo_id, existing_values=None):
# existing_values is a list of 2-tuples
existing_values = existing_values or []
self.id = self._clean_id(repo_id)
# used to store key order, so we can write things out in the order
# we read them from the config.
self._order = []
for key, value in existing_values:
# only set keys that have a non-empty value, to not clutter the
# file.
if value:
self[key] = value
# NOTE: This sets the above properties to the default values even if
# they are not defined on disk. i.e. these properties will always
# appear in this dict, but their values may be None.
for k, (m, d) in self.PROPERTIES.items():
if k not in self.keys():
self[k] = d
@classmethod
def from_ent_cert_content(cls, content, baseurl, ca_cert, release_source):
"""Create an instance of Repo() from an ent_cert.EntitlementCertContent().
And the other out of band info we need including baseurl, ca_cert, and
the release version string.
"""
repo = cls(content.label)
repo['name'] = content.name
if content.enabled:
repo['enabled'] = "1"
else:
repo['enabled'] = "0"
expanded_url_path = Repo._expand_releasever(release_source, content.url)
repo['baseurl'] = utils.url_base_join(baseurl, expanded_url_path)
# Extract the variables from the url
repo_parts = repo['baseurl'].split("/")
repoid_vars = [part[1:] for part in repo_parts if part.startswith("$")]
if repoid_vars:
repo['ui_repoid_vars'] = " ".join(repoid_vars)
# If no GPG key URL is specified, turn gpgcheck off:
gpg_url = content.gpg
if not gpg_url:
repo['gpgkey'] = ""
repo['gpgcheck'] = '0'
else:
repo['gpgkey'] = utils.url_base_join(baseurl, gpg_url)
# Leave gpgcheck as the default of 1
repo['sslclientkey'] = content.cert.key_path()
repo['sslclientcert'] = content.cert.path
repo['sslcacert'] = ca_cert
repo['metadata_expire'] = content.metadata_expire
repo = Repo._set_proxy_info(repo)
return repo
@staticmethod
def _set_proxy_info(repo):
proxy = ""
# Worth passing in proxy config info to from_ent_cert_content()?
# That would decouple Repo some
proxy_host = CFG.get('server', 'proxy_hostname')
# proxy_port as string is fine here
proxy_port = CFG.get('server', 'proxy_port')
if proxy_host != "":
proxy = "https://%s" % proxy_host
if proxy_port != "":
proxy = "%s:%s" % (proxy, proxy_port)
# These could be empty string, in which case they will not be
# set in the yum repo file:
repo['proxy'] = proxy
repo['proxy_username'] = CFG.get('server', 'proxy_user')
repo['proxy_password'] = CFG.get('server', 'proxy_password')
return repo
@staticmethod
def _expand_releasever(release_source, contenturl):
# no $releasever to expand
if release_source.marker not in contenturl:
return contenturl
expansion = release_source.get_expansion()
# NOTE: This is building a url from external info
# so likely needs more validation. In our case, the
# external source is trusted (release list from tls
# mutually authed cdn, or a tls mutual auth api)
# NOTE: The on disk cache is more vulnerable, since it is
# trusted.
return contenturl.replace(release_source.marker,
expansion)
def _clean_id(self, repo_id):
"""
Format the config file id to contain only characters that yum expects
(we'll just replace 'bad' chars with -)
"""
new_id = ""
valid_chars = string.ascii_letters + string.digits + "-_.:"
for byte in repo_id:
if byte not in valid_chars:
new_id += '-'
else:
new_id += byte
return new_id
def items(self):
"""
Called when we fetch the items for this yum repo to write to disk.
"""
# Skip anything set to 'None' or empty string, as this is likely
# not intended for a yum repo file. None can result here if the
# default is None, or the entitlement certificate did not have the
# value set.
#
# all values will be in _order, since the key has to have been set
# to get into our dict.
return tuple([(k, self[k]) for k in self._order if
k in self and self[k]])
def __setitem__(self, key, value):
if key not in self._order:
self._order.append(key)
dict.__setitem__(self, key, value)
def __str__(self):
s = []
s.append('[%s]' % self.id)
for k in self.PROPERTIES:
v = self.get(k)
if v is None:
continue
s.append('%s=%s' % (k, v))
return '\n'.join(s)
def __eq__(self, other):
return (self.id == other.id)
def __hash__(self):
return hash(self.id)
class TidyWriter:
"""
ini file reader that removes successive newlines,
and adds a trailing newline to the end of a file.
used to keep our repo file clean after removals and additions of
new sections, as iniparser's tidy function is not available in all
versions.
"""
def __init__(self, backing_file):
self.backing_file = backing_file
self.ends_with_newline = False
self.writing_empty_lines = False
def write(self, line):
lines = line.split("\n")
i = 0
while i < len(lines):
line = lines[i]
if line == "":
if i != len(lines) - 1:
if not self.writing_empty_lines:
self.backing_file.write("\n")
self.writing_empty_lines = True
else:
self.writing_empty_lines = False
self.backing_file.write(line)
if i != len(lines) - 1:
self.backing_file.write("\n")
i += 1
if lines[-1] == "":
self.ends_with_newline = True
else:
self.ends_with_newline = False
def close(self):
if not self.ends_with_newline:
self.backing_file.write("\n")
class RepoFile(ConfigParser):
PATH = 'etc/yum.repos.d/'
def __init__(self, name='redhat.repo'):
ConfigParser.__init__(self)
# note PATH get's expanded with chroot info, etc
self.path = Path.join(self.PATH, name)
self.repos_dir = Path.abs(self.PATH)
self.manage_repos = 1
if CFG.has_option('rhsm', 'manage_repos'):
self.manage_repos = CFG.get_int('rhsm', 'manage_repos')
# Simulate manage repos turned off if no yum.repos.d directory exists.
# This indicates yum is not installed so clearly no need for us to
# manage repos.
if not self.path_exists(self.repos_dir):
log.warn("%s does not exist, turning manage_repos off." %
self.repos_dir)
self.manage_repos = 0
self.create()
# Easier than trying to mock/patch os.path.exists
def path_exists(self, path):
"wrapper around os.path.exists"
return os.path.exists(path)
def exists(self):
return self.path_exists(self.path)
def read(self):
ConfigParser.read(self, self.path)
def _configparsers_equal(self, otherparser):
if set(otherparser.sections()) != set(self.sections()):
return False
for section in self.sections():
# Sometimes we end up with ints, but values must be strings to compare
current_items = dict([(str(k), str(v)) for (k, v) in self.items(section)])
if current_items != dict(otherparser.items(section)):
return False
return True
def _has_changed(self):
'''
Check if the version on disk is different from what we have loaded
'''
on_disk = ConfigParser()
on_disk.read(self.path)
return not self._configparsers_equal(on_disk)
def write(self):
if not self.manage_repos:
log.debug("Skipping write due to manage_repos setting: %s" %
self.path)
return
if self._has_changed():
f = open(self.path, 'w')
tidy_writer = TidyWriter(f)
ConfigParser.write(self, tidy_writer)
tidy_writer.close()
f.close()
def add(self, repo):
self.add_section(repo.id)
self.update(repo)
def delete(self, section):
return self.remove_section(section)
def update(self, repo):
# Need to clear out the old section to allow unsetting options:
# don't use remove section though, as that will reorder sections,
# and move whitespace around (resulting in more and more whitespace
# as time progresses).
for (k, v) in self.items(repo.id):
self.remove_option(repo.id, k)
for k, v in repo.items():
ConfigParser.set(self, repo.id, k, v)
def section(self, section):
if self.has_section(section):
return Repo(section, self.items(section))
def create(self):
if self.path_exists(self.path) or not self.manage_repos:
return
f = open(self.path, 'w')
s = []
s.append('#')
s.append('# Certificate-Based Repositories')
s.append('# Managed by (rhsm) subscription-manager')
s.append('#')
s.append('# *** This file is auto-generated. Changes made here will be over-written. ***')
s.append('# *** Use "subscription-manager repo-override --help" if you wish to make changes. ***')
s.append('#')
s.append('# If this file is empty and this system is subscribed consider ')
s.append('# a "yum repolist" to refresh available repos')
s.append('#')
f.write('\n'.join(s))
f.close()
| alikins/subscription-manager | src/subscription_manager/repolib.py | Python | gpl-2.0 | 27,398 |
from ..core import Eq, Ge, Gt, Integer, Le, Lt, Ne, diff, nan, oo, sympify
from ..core.compatibility import is_sequence, ordered
from ..functions import Min
from ..matrices import eye, zeros
from ..series import limit
from ..sets import Interval
from ..solvers import reduce_inequalities, solve
from .singularities import singularities
__all__ = 'minimize', 'maximize'
def minimize(f, *v):
"""Minimizes `f` with respect to given variables `v`.
Examples
========
>>> minimize(x**2, x)
(0, {x: 0})
>>> minimize([x**2, x >= 1], x)
(1, {x: 1})
>>> minimize([-x**2, x >= -2, x <= 1], x)
(-4, {x: -2})
See Also
========
maximize
"""
f = set(map(sympify, f if is_sequence(f) else [f]))
constraints = {c for c in f if c.is_Relational}
assert len(f - constraints) == 1
obj = (f - constraints).pop()
if not v:
v = obj.free_symbols
if not v:
return obj, {}
v = list(ordered(v))
dim = len(v)
assert all(x.is_Symbol for x in v)
# Canonicalize constraints, Ne -> pair Lt, Eq -> pair Le
constraints |= {Lt(*c.args) for c in constraints if isinstance(c, Ne)}
constraints |= {Lt(c.rhs, c.lhs) for c in constraints if isinstance(c, Ne)}
constraints |= {Le(*c.args) for c in constraints if isinstance(c, Eq)}
constraints |= {Le(c.rhs, c.lhs) for c in constraints if isinstance(c, Eq)}
constraints -= {c for c in constraints if isinstance(c, (Ne, Eq))}
# Gt/Ge -> Lt, Le
constraints = {c.reversed if c.func in (Gt, Ge) else c
for c in constraints}
# Now we have only Lt/Le
constraints = list(ordered(c.func(c.lhs - c.rhs, 0)
for c in constraints))
if dim == 1:
if constraints:
dom = reduce_inequalities(constraints, *v).as_set()
else:
dom = Interval(-oo, oo, True, True)**len(v)
return minimize_univariate(obj, v[0], dom)
polys = [obj.as_poly(*v)] + [c.lhs.as_poly(*v) for c in constraints]
is_polynomial = all(p is not None for p in polys)
is_linear = is_polynomial and all(p.is_linear for p in polys)
if is_linear:
# Quick exit for strict forms
if any(isinstance(c, Lt) for c in constraints):
return
# Transform to the standard form: maximize cᵀx with m⋅x≤b, x≥0.
# We replace original vector of unrestricted variables v with
# x of doubled size, so e.g. for the first component of v we
# will have v₁ = x₁⁺ - x₁⁻, where x₁⁺≥0 and x₁⁻≥0.
c = [-polys[0].coeff_monomial(x) for x in v]
c.extend([-_ for _ in c])
m = [([+p.coeff_monomial(x) for x in v] +
[-p.coeff_monomial(x) for x in v])
for p in polys[1:]]
b = [-p.coeff_monomial(1) for p in polys[1:]]
res, sol = simplex(c, m, b)
res -= polys[0].coeff_monomial(1)
sol = map(lambda x, y: x - y, sol[:dim], sol[dim:])
return -res, dict(zip(v, sol))
raise NotImplementedError
def maximize(f, *v):
"""
Maximizes `f` with respect to given variables `v`.
See Also
========
minimize
"""
f = set(map(sympify, f if is_sequence(f) else [f]))
fv, d = minimize([e if e.is_Relational else -e for e in f], *v)
return -fv, d
def minimize_univariate(f, x, dom):
extr = {}
if dom.is_Union:
for d in dom.args:
fp, r = minimize_univariate(f, x, d)
extr[r[x]] = fp
elif dom.is_Interval:
if not dom.left_open:
extr[dom.start] = limit(f, x, dom.start)
if not dom.right_open:
extr[dom.end] = limit(f, x, dom.end, dir='-')
for s in singularities(f, x):
if s in dom:
m = Min(limit(f, x, s), limit(f, x, s, dir='-'))
if m == -oo:
return -oo, dict({x: s})
else:
extr[s] = m
for p in solve(diff(f, x), x):
p = p[x]
if p in dom:
extr[p] = f.subs({x: p})
elif dom.is_FiniteSet:
for p in dom.args:
extr[p] = f.subs({x: p})
else:
raise NotImplementedError
if extr:
min, point = oo, nan
for p, fp in sorted(extr.items()):
if fp < min:
point, min = p, fp
return min, dict({x: point})
class InfeasibleProblem(Exception):
pass
def simplex(c, m, b):
"""
Simplex algorithm for linear programming.
Find a vector x with nonnegative elements, that maximizes
quantity `c^T x`, subject to the constraints `m x <= b`.
Examples
========
>>> simplex([2, 3, 4], [[3, 2, 1], [2, 5, 3]], [10, 15])
(20, (0, 0, 5))
References
==========
* Paul R. Thie, Gerard E. Keough, An Introduction to Linear
Programming and Game Theory, Third edition, 2008, Ch. 3.
"""
rows, cols = len(b), len(c)
if len(m) != rows or any(len(_) != cols for _ in m):
raise ValueError("The dimensions doesn't match")
m = sorted(m, key=lambda v: b[m.index(v)])
b = sorted(b)
# build full tableau
tableau = zeros(rows + 1, cols + rows + 1)
tableau[-1, :-1] = [[-_ for _ in c] + [0]*rows]
tableau[:-1, :cols] = m
tableau[:-1, cols:-1] = eye(rows)
tableau[:, -1] = b + [0]
def pivot_col(obj):
# use Bland's rule
for i in range(len(obj) - 1): # pragma: no branch
if obj[i] < 0:
return i
def pivot_row(lhs, rhs):
ratio, idx = oo, 0
for i in range(len(lhs)):
if lhs[i] > 0:
r = rhs[i]/lhs[i]
if r < ratio:
ratio, idx = r, i
return idx
def solve_simplex(tableau, basis, phase1=False):
while min(tableau[-1, :-1]) < 0:
col = pivot_col(tableau[-1, :])
row = pivot_row(tableau[:-1 - phase1, col], tableau[:, -1])
if tableau[row, col] <= 0:
return 1
else:
basis[row] = col
tableau[row, :] /= tableau[row, col]
for r in range(tableau.rows):
if r != row:
tableau[r, :] -= tableau[r, col]*tableau[row, :]
return 0
# Now solve
neg_idx = [b.index(_) for _ in b if _ < 0]
nneg = len(neg_idx)
basis = list(range(cols + nneg - 1, cols + nneg + rows - 1))
if neg_idx:
tableau = tableau.col_insert(-1, zeros(tableau.rows, nneg))
tableau = tableau.row_insert(tableau.cols, zeros(1, tableau.cols))
j = tableau.cols - nneg - 1
for i in neg_idx:
tableau[i, :] *= -1
tableau[i, j] = 1
tableau[-1, :-1 - nneg] -= tableau[i, :-1 - nneg]
tableau[-1, -1] -= tableau[i, -1]
j += 1
status = solve_simplex(tableau, basis, phase1=True)
assert status == 0
if tableau[-1, -1].is_nonzero:
raise InfeasibleProblem
del tableau[-1, :]
for i in range(nneg):
del tableau[:, -2]
for row in [_ for _ in range(rows) if basis[_] > cols + rows - 1]:
for col in range(tableau.cols - 1): # pragma: no branch
if tableau[row, col] != 0:
break
basis[row] = col
tableau[row, :] /= tableau[row, col]
for r in range(tableau.rows):
if r != row:
tableau[r, :] -= tableau[r, col]*tableau[row, :]
status = solve_simplex(tableau, basis)
if status == 1:
return oo, (oo,)*cols
ans = [Integer(0)]*cols
for c, b in enumerate(basis):
if b < cols:
ans[b] = tableau[:-1, -1][c]
return tableau[-1, -1], tuple(ans)
| skirpichev/omg | diofant/calculus/optimization.py | Python | bsd-3-clause | 7,844 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 GNS3 Technologies Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Base class (interface) for modules.
"""
from ..qt import QtCore
import logging
log = logging.getLogger(__name__)
class Module(QtCore.QObject):
"""
Module interface.
"""
notification_signal = QtCore.Signal(str, str)
def __init__(self):
super(Module, self).__init__()
@staticmethod
def nodes(self):
"""
Returns all nodes supported by this module.
Must be overloaded.
:returns: list of node classes
"""
raise NotImplementedError()
@staticmethod
def preferencePages():
"""
Returns all the preference pages used by this module.
Must be overloaded.
:returns: list of preference page classes
"""
raise NotImplementedError()
| noplay/gns3-gui | gns3/modules/module.py | Python | gpl-3.0 | 1,482 |
# Generated by Creer at 08:40PM on November 07, 2015 UTC, git hash: '1b69e788060071d644dd7b8745dca107577844e1'
# This is a simple class to represent the Building object in the game. You can extend it by adding utility functions here in this file.
from games.anarchy.game_object import GameObject
# <<-- Creer-Merge: imports -->> - Code you add between this comment and the end comment will be preserved between Creer re-runs.
# you can add addtional import(s) here
# <<-- /Creer-Merge: imports -->>
class Building(GameObject):
""" The class representing the Building in the Anarchy game.
A basic building. It does nothing besides burn down. Other Buildings inherit from this class.
"""
def __init__(self):
""" initializes a Building with basic logic as provided by the Creer code generator
"""
GameObject.__init__(self)
# private attributes to hold the properties so they appear read only
self._bribed = False
self._building_east = None
self._building_north = None
self._building_south = None
self._building_west = None
self._fire = 0
self._health = 0
self._is_headquarters = False
self._owner = None
self._x = 0
self._y = 0
self._sides = None
@property
def bribed(self):
"""when true this building has already been bribed this turn and cannot be bribed again this turn.
"""
return self._bribed
@property
def building_east(self):
"""The Building directly to the east of this building, or null if not present.
"""
return self._building_east
@property
def building_north(self):
"""The Building directly to the north of this building, or null if not present.
"""
return self._building_north
@property
def building_south(self):
"""The Building directly to the south of this building, or null if not present.
"""
return self._building_south
@property
def building_west(self):
"""The Building directly to the west of this building, or null if not present.
"""
return self._building_west
@property
def fire(self):
"""How much fire is currently burning the building, and thus how much damage it will take at the end of its owner's turn. 0 means no fire.
"""
return self._fire
@property
def health(self):
"""How much health this building currently has. When this reaches 0 the Building has been burned down
"""
return self._health
@property
def is_headquarters(self):
"""true if this is the Headquarters of the owning player, false otherwise. Burning this down wins the game for the other Player.
"""
return self._is_headquarters
@property
def owner(self):
"""The player that owns this building. If it burns down (health reaches 0) that player gets an additional bribe(s).
"""
return self._owner
@property
def x(self):
"""The location of the Building along the x-axis
"""
return self._x
@property
def y(self):
"""The location of the Building along the y-axis
"""
return self._y
@property
def is_alive(self):
return self.health > 0
@property
def is_usable(self):
return self.is_alive and not self.bribed
@property
def time_until_death(self):
if self.health <= 0:
return 0
if self.fire == 0:
return 1000000000
return self.health/self.fire # integer division?
def get_sides(self):
"""List of adjacent buildings
"""
return [x for x in [self._building_east,self._building_north,self._building_south,self._building_west] if x is not None]
def get_sides_true(self):
"""List of adjacent buildings
"""
return [x for x in [self._building_east,self._building_north,self._building_south,self._building_west]]
def get_building_by_dir(self, dir):
if dir == "north":
return self.building_north
if dir == "south":
return self.building_south
if dir == "west":
return self.building_west
if dir == "east":
return self.building_east
def get_building_by_wind(self, dir):
if dir == "north":
return self.building_south
if dir == "south":
return self.building_north
if dir == "west":
return self.building_east
if dir == "east":
return self.building_west
# <<-- Creer-Merge: functions -->> - Code you add between this comment and the end comment will be preserved between Creer re-runs.
# if you want to add any client side logic (such as state checking functions) this is where you can add them
# <<-- /Creer-Merge: functions -->>
def put_out_fire(self, ai):
""" Finds the first available fire department and calls extinguish on current building
Args:
fire_departments: List of player owned fire departments
"""
if self.is_headquarters:
return
for dep in ai.player.fire_departments:
if self.fire > 0 and dep.is_usable\
and ai.player.bribes_remaining > 0:
dep.extinguish(self)
| brhoades/megaminer16-anarchy | games/anarchy/building.py | Python | mit | 5,438 |
# -*- coding: utf-8 -*-
#import the app and the login manager
from app import app
from flask import g, request, render_template, redirect, url_for, flash, send_file, abort
from flask import jsonify
from flask.ext.login import login_user, logout_user, current_user, login_required
from app.structures.models.user import *
from app.structures.models.gradebook import *
from app.structures.models.course import *
from app.plugins.latework import getLateCalculators
from app.helpers.gradebook import getStudentAssignmentScores, getStudentAuxScores
@app.route('/student/grades/<cid>')
@login_required
def viewGrades(cid):
'''
Function Type: View Function
Purpose: Show the user thier grades
Inputs: None
'''
course = Course.objects.get(id=cid)
return render_template('student/viewgrades.html', course=course, cid=cid)
def createHighlight(gradeSpec):
if 'highlight' in gradeSpec:
if gradeSpec['highlight'] == 'red':
return "class='danger'"
elif gradeSpec['highlight'] == 'yellow':
return "class='warning'"
elif gradeSpec['highlight'] == 'blue':
return "class='info'"
elif gradeSpec['highlight'] == 'green':
return "class='success'"
else:
return ""
@app.route('/student/renderGrade', methods=['POST'])
@login_required
def studentRenderGrades():
try:
content = request.get_json()
c = Course.objects.get(id=content['cid'])
u = User.objects.get(id=current_user.id)
assignmentScores = getStudentAssignmentScores(c, u)
userCourseScore = 0
outString = "<tr><th>Your Scores</th>"
# <td>{{link to problem grading}}</td>
for assignment, a in zip(assignmentScores, c.assignments):
#If this assignment doesn't have any problems we put a blank column in
if len(assignment) == 0:
outString += "<td class='active'></td>"
continue
for problem, p in zip(assignment, a.problems):
if problem == None:
#If there was no submission link to the make blank page
outString += "<td class='active'>"
outString += "0.00"
outString += "</td>"
else:
highlight = createHighlight(problem)
if 'finalTotalScore' in problem:
points = problem['finalTotalScore']
userCourseScore += problem['finalTotalScore']
else:
points = problem['rawTotalScore']
userCourseScore += problem['rawTotalScore']
maxPoints = p.gradeColumn.maxScore
cellTemplate = "<td %s>%.2f</td>" % (highlight, points)
outString += cellTemplate
for group in c.gradeBook.auxillaryGrades:
if len(group.columns) == 0:
outString += "<td class='active'></td>"
continue
for col in group.columns:
score = col.scores.setdefault(u.keyOfUsername(), None)
if score:
outString += "<td>%.2f</td>" % (score.totalScore())
userCourseScore += score.totalScore()
else:
outString += "<td>%.2f</td>" % (0)
outString += "<td>%.2f</td></tr>" % (userCourseScore)
return jsonify(res=outString, cid=str(c.id))
except (Course.DoesNotExist,User.DoesNotExist):
abort(404)
except Exception as e:
return jsonify(res=str(e))
| CSGreater-Developers/HMC-Grader | app/userViews/student/viewGrades.py | Python | mit | 3,237 |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class BashCompletion(AutotoolsPackage):
"""Programmable completion functions for bash."""
homepage = "https://github.com/scop/bash-completion"
url = "https://github.com/scop/bash-completion/archive/2.3.tar.gz"
git = "https://github.com/scop/bash-completion.git"
version('develop', branch='master')
version('2.7', sha256='dba2b88c363178622b61258f35d82df64dc8d279359f599e3b93eac0375a416c')
version('2.3', sha256='d92fcef5f6e3bbc68a84f0a7b063a1cd07b4000cc6e275cd1ff83863ab3b322a')
# Build dependencies
depends_on('automake', type='build')
depends_on('autoconf', type='build')
depends_on('libtool', type='build')
# Other dependencies
depends_on('[email protected]:', type='run')
@run_before('install')
def create_install_directory(self):
mkdirp(join_path(self.prefix.share, 'bash-completion', 'completions'))
@run_after('install')
def show_message_to_user(self):
prefix = self.prefix
# Guidelines for individual user as provided by the author at
# https://github.com/scop/bash-completion
print('=====================================================')
print('Bash completion has been installed. To use it, please')
print('include the following lines in your ~/.bash_profile :')
print('')
print('# Use bash-completion, if available')
print('[[ $PS1 && -f %s/share/bash-completion/bash_completion ]] && \ ' % prefix) # NOQA: ignore=E501
print(' . %s/share/bash-completion/bash_completion' % prefix)
print('')
print('=====================================================')
| iulian787/spack | var/spack/repos/builtin/packages/bash-completion/package.py | Python | lgpl-2.1 | 1,869 |
from imports import *
from textures import *
class Obstaculo:
def __init__(self,posx,posy,rad):
self.posx = posx
self.posy = posy
self.rad = 5
self.escalax = 30
self.escalay = 30
self.renderlist = []
self.texture = generarTex("cono.png", True)
self.dibujar(self.texture)
def dibujar(self, texture):
self.renderlist = glGenLists(1)
glNewList(self.renderlist, GL_COMPILE)
glEnable(GL_TEXTURE_2D)
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE)
glBindTexture(GL_TEXTURE_2D, self.texture)
glBegin(GL_QUADS)
glTexCoord2f(0.0, 0.0)
glVertex2f(0.0, 0.0)
glTexCoord2f(1.0, 0.0)
glVertex2f(1.0, 0.0)
glTexCoord2f(1.0, 1.0)
glVertex2f(1.0, 1.0)
glTexCoord2f(0.0, 1.0)
glVertex2f(0.0, 1.0)
glEnd()
glDisable(GL_TEXTURE_2D)
glEndList()
def update(self):
glPushMatrix()
glTranslatef(self.posx, self.posy, 0.0)
glTranslatef(-self.escalax / 2, 0.0, 0.0)
glScalef(self.escalax, self.escalay, 0.0)
glCallList(self.renderlist)
glPopMatrix()
def getpos(self):
return [self.posx,self.posy]
def dist(self,par1,par2):
dist = math.sqrt((par1[0]-par2[0])**2 + (par1[1]- par2[1])**2)
return dist
def colicionando(self,auto):
posA = auto.getpos()
if self.dist(posA,self.getpos()) <= 30 + self.rad:
##wwwwwwwwwwwwwwwwwprint "colicion"
return True
## if (posA[1] >= self.posy and posA[1] <= self.posy + self.rad) and (abs(posA[0] - self.posx)<= self.rad):
## return True
return False
class Obstaculos:
def __init__(self,L = []):
self.obstaculos = L
def update(self):
for obs in self.obstaculos:
obs.update()
def colicionando(self,auto,jugador):
for obs in self.obstaculos:
if obs.colicionando(auto):
self.obstaculos.remove(obs)
jugador.updatePoints(-100)
jugador.looseHp(10)
def adRandom(self):
self.obstaculos.append(Obstaculo(randint(100,600),randint(100,500),15))
| bsubercaseaux/dcc | Modelación y Computación Gráfica/graficaAutos/obstaculo.py | Python | mit | 2,245 |
#
# Electric Brain is an easy to use platform for machine learning.
# Copyright (C) 2016 Electric Brain Software Corporation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def eprint(*args, **kwargs):
import sys
import pprint
import traceback
import re
caller = traceback.format_list(traceback.extract_stack())[-2]
filename = caller.split("\"")[1].split("/")[-1]
lineNumber = re.search('line (\\d+)', caller).group(1)
message = filename + ":" + lineNumber + " " + " ".join([pprint.pformat(arg) for arg in args])
print(message, file = sys.stderr)
sys.stderr.flush()
| electricbrainio/electric-brain | lib/python/utils.py | Python | agpl-3.0 | 1,219 |
#!/usr/bin/env python
#coding: utf-8
from collections import deque
def yanghui(k):
#0 -> 1 -> 2-> ...-> k
q = deque([1])
for i in xrange(k):
for _ in xrange(i):
q.append(q.popleft() + q[0])
q.append(1)
return list(q)
print yanghui(4) | libchaos/algorithm-python | 05/code/queue_yanghui.py | Python | mit | 249 |
class HelpMixin:
@property
def help(self):
from .Help import HelpCell
if self._path[:1] == ("HELP",):
raise AttributeError("Help cells can't have help")
return HelpCell(self)
@help.setter
def help(self, value):
from .Help import HelpCell
wrapper = HelpCell(self)
return wrapper.set(value)
def __getattribute__(self, name: str):
if name == "__dict__":
return super().__getattribute__(name)
elif name == "__doc__":
return self.self.help.value
elif name in self.__dict__:
return self.__dict__[name]
return super().__getattribute__(name)
| sjdv1982/seamless | seamless/highlevel/HelpMixin.py | Python | mit | 688 |
"""
WSGI config for discover project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "discover.settings")
application = get_wsgi_application()
| martinskou/training | discover/discover/wsgi.py | Python | apache-2.0 | 393 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
def update_site_forward(apps, schema_editor):
"""Set site domain and name."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
"domain": "repork.com",
"name": "repork"
}
)
def update_site_backward(apps, schema_editor):
"""Revert site domain and name to default."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
"domain": "example.com",
"name": "example.com"
}
)
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
]
operations = [
migrations.RunPython(update_site_forward, update_site_backward),
]
| jvosk/repork | repork_project/contrib/sites/migrations/0002_set_site_domain_and_name.py | Python | bsd-3-clause | 937 |
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2018, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-04 20:58
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('payments', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='order',
name='billed_datetime',
field=models.DateTimeField(help_text='This is the confirmed date and time of payment', null=True, verbose_name='billed date & time'),
),
migrations.AlterField(
model_name='order',
name='billed_total',
field=models.DecimalField(decimal_places=2, help_text='This is the confirmed paid amount from NAU', max_digits=7, null=True, verbose_name='billed total (USD)'),
),
migrations.AlterField(
model_name='order',
name='order_total',
field=models.DecimalField(decimal_places=2, max_digits=7, verbose_name='order total (USD)'),
),
migrations.AlterField(
model_name='rate',
name='price',
field=models.DecimalField(decimal_places=2, max_digits=6, verbose_name='price (USD)'),
),
migrations.AlterField(
model_name='workshop',
name='slug',
field=models.SlugField(help_text='This is the unique identifier for the URL (i.e. title-YYYY-MM-DD)'),
),
migrations.AlterField(
model_name='workshop',
name='url',
field=models.URLField(max_length=2000, verbose_name='URL'),
),
]
| jakereps/qiime-workshops | payments/migrations/0002_helptext.py | Python | bsd-3-clause | 1,971 |
from django.db.models.query import QuerySet
class PublisherQuerySet(QuerySet):
"""Added publisher specific filters to queryset.
"""
def drafts(self):
return self.filter(publisher_is_draft=True)
def public(self):
return self.filter(publisher_is_draft=False) | emiquelito/django-cms-2.0 | publisher/query.py | Python | bsd-3-clause | 294 |
import asyncio
from .log import internal_logger
class BaseProtocol(asyncio.Protocol):
__slots__ = ('_loop', '_paused', '_drain_waiter',
'_connection_lost', 'transport')
def __init__(self, loop=None):
if loop is None:
self._loop = asyncio.get_event_loop()
else:
self._loop = loop
self._paused = False
self._drain_waiter = None
self._connection_lost = False
self.transport = None
def pause_writing(self):
assert not self._paused
self._paused = True
if self._loop.get_debug():
internal_logger.debug("%r pauses writing", self)
def resume_writing(self):
assert self._paused
self._paused = False
if self._loop.get_debug():
internal_logger.debug("%r resumes writing", self)
waiter = self._drain_waiter
if waiter is not None:
self._drain_waiter = None
if not waiter.done():
waiter.set_result(None)
def connection_made(self, transport):
self.transport = transport
def connection_lost(self, exc):
self._connection_lost = True
# Wake up the writer if currently paused.
self.transport = None
if not self._paused:
return
waiter = self._drain_waiter
if waiter is None:
return
self._drain_waiter = None
if waiter.done():
return
if exc is None:
waiter.set_result(None)
else:
waiter.set_exception(exc)
async def _drain_helper(self):
if self._connection_lost:
raise ConnectionResetError('Connection lost')
if not self._paused:
return
waiter = self._drain_waiter
assert waiter is None or waiter.cancelled()
waiter = self._loop.create_future()
self._drain_waiter = waiter
await waiter
| rutsky/aiohttp | aiohttp/base_protocol.py | Python | apache-2.0 | 1,948 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Tasks used in OAI harvesting together with repository information."""
import os
import random
import re
from functools import wraps
from invenio.base.globals import cfg
REGEXP_AUTHLIST = re.compile(
"<collaborationauthorlist.*?>.*?</collaborationauthorlist>", re.DOTALL)
REGEXP_REFS = re.compile(
"<record.*?>.*?<controlfield .*?>.*?</controlfield>(.*?)</record>",
re.DOTALL)
def _attach_files_to_obj(obj, new_ffts):
"""Given a SmartJSON representation, add any missing fft entries to obj."""
if not new_ffts or new_ffts.get("fft") is None:
obj.log.error("No files to add")
return
if "fft" not in obj.data:
obj.data['fft'] = new_ffts["fft"]
return
if not isinstance(new_ffts["fft"], list):
new_ffts["fft"] = [new_ffts["fft"]]
if not isinstance(obj.data["fft"], list):
obj.data["fft"] = [obj.data["fft"]]
for element in new_ffts["fft"]:
if element.get("url", "") in obj.data.get("fft.url", []):
continue
obj.data['fft'].append(element)
def post_process_selected(post_process):
"""Check if post process is selected."""
@wraps(post_process_selected)
def _post_process_selected(obj, eng):
try:
post_process_list = obj.extra_data["repository"]["postprocess"]
except KeyError:
# No post process list, we return False
eng.log.info("No post-process for {0}".format(post_process))
return False
if post_process in post_process_list:
eng.log.info("Post-process found for {0}".format(post_process))
return True
return False
return _post_process_selected
def convert_record_with_repository(stylesheet=""):
"""Convert a MARC record to another one thanks to the stylesheet.
This function converts a record to a marcxml representation by using a
style sheet which should be in parameter or which should have been stored
into extra data of the object.
The priority is given to the stylesheet into the extra data of the object.
The parameter should be used in case the stylesheet is missing from extra data
or when you want to do a simple workflow which doesn't need to be dynamic.
:param stylesheet: it is the name of the stylesheet that you want to use
to convert a oai record to a marcxml one
:type stylesheet: str
"""
@wraps(convert_record_with_repository)
def _convert_record(obj, eng):
from invenio.modules.workflows.tasks.marcxml_tasks import convert_record
if not stylesheet:
repository = obj.extra_data.get("repository", {})
arguments = repository.get("arguments", {})
stylesheet_to_use = arguments.get('c_stylesheet')
else:
stylesheet_to_use = stylesheet
convert_record(stylesheet_to_use)(obj, eng)
return _convert_record
def arxiv_fulltext_download(obj, eng):
"""Perform the fulltext download step for arXiv records.
:param obj: Bibworkflow Object to process
:param eng: BibWorkflowEngine processing the object
"""
from invenio.utils.plotextractor.api import get_pdf_from_arxiv
if "result" not in obj.extra_data:
obj.extra_data["_result"] = {}
if "pdf" not in obj.extra_data["_result"]:
extract_path = os.path.join(
cfg.get('OAIHARVESTER_STORAGEDIR', cfg.get('CFG_TMPSHAREDDIR')),
str(eng.uuid)
)
pdf = get_pdf_from_arxiv(
obj.data.get(cfg.get('OAIHARVESTER_RECORD_ARXIV_ID_LOOKUP')),
extract_path
)
arguments = obj.extra_data["repository"]["arguments"]
try:
if not arguments['t_doctype'] == '':
doctype = arguments['t_doctype']
else:
doctype = 'arXiv'
except KeyError:
eng.log.error("WARNING: HASARDOUS BEHAVIOUR EXPECTED, "
"You didn't specified t_doctype in argument"
" for fulltext_download,"
"try to recover by using the default one!")
doctype = 'arXiv'
if pdf:
obj.extra_data["_result"]["pdf"] = pdf
new_dict_representation = {
"fft": [
{
"url": pdf,
"docfile_type": doctype
}
]
}
_attach_files_to_obj(obj, new_dict_representation)
fileinfo = {
"type": "fulltext",
"filename": os.path.basename(pdf),
"full_path": pdf,
}
obj.update_task_results(
"PDF",
[{
"name": "PDF",
"result": fileinfo,
"template": "workflows/results/fft.html"
}]
)
else:
obj.log.info("No PDF found.")
else:
eng.log.info("There was already a pdf register for this record,"
"perhaps a duplicate task in you workflow.")
def plot_extract(plotextractor_types=("latex",)):
"""Perform the plotextraction step.
Download tarball for each harvested/converted record,
then run plotextrator.
Update converted xml files with generated xml or add it for upload.
:param plotextractor_types:
:return: :raise workflows_error.WorkflowError:
"""
@wraps(plot_extract)
def _plot_extract(obj, eng):
from invenio.utils.plotextractor.api import (
get_tarball_from_arxiv,
get_marcxml_plots_from_tarball
)
from invenio.modules.workflows.utils import convert_marcxml_to_bibfield
from invenio.utils.shell import Timeout
if "_result" not in obj.extra_data:
obj.extra_data["_result"] = {}
repository = obj.extra_data.get("repository", {})
arguments = repository.get("arguments", {})
chosen_type = plotextractor_types
if not chosen_type:
chosen_type = arguments.get('p_extraction-source', [])
if not isinstance(chosen_type, list):
chosen_type = [chosen_type]
if 'latex' in chosen_type:
# Run LaTeX plotextractor
if "tarball" not in obj.extra_data["_result"]:
extract_path = os.path.join(
cfg.get('OAIHARVESTER_STORAGEDIR', cfg.get('CFG_TMPSHAREDDIR')),
str(eng.uuid)
)
tarball = get_tarball_from_arxiv(
obj.data.get(cfg.get('OAIHARVESTER_RECORD_ARXIV_ID_LOOKUP')),
extract_path
)
if tarball is None:
obj.log.error("No tarball found")
return
obj.extra_data["_result"]["tarball"] = tarball
else:
tarball = obj.extra_data["_result"]["tarball"]
try:
marcxml = get_marcxml_plots_from_tarball(tarball)
except Timeout:
eng.log.error(
'Timeout during tarball extraction on {0}'.format(tarball)
)
if marcxml:
# We store the path to the directory the tarball contents lives
new_dict = convert_marcxml_to_bibfield(marcxml)
_attach_files_to_obj(obj, new_dict)
obj.update_task_results(
"Plots",
[{
"name": "Plots",
"result": new_dict["fft"],
"template": "workflows/results/plots.html"
}]
)
return _plot_extract
def refextract(obj, eng):
"""Perform the reference extraction step.
:param obj: Bibworkflow Object to process
:param eng: BibWorkflowEngine processing the object
"""
from invenio.legacy.refextract.api import extract_references_from_file_xml
from invenio.utils.plotextractor.api import get_pdf_from_arxiv
from invenio.modules.workflows.utils import convert_marcxml_to_bibfield
if "_result" not in obj.extra_data:
obj.extra_data["_result"] = {}
try:
pdf = obj.extra_data["_result"]["pdf"]
except KeyError:
pdf = None
if not pdf:
extract_path = os.path.join(
cfg.get('OAIHARVESTER_STORAGEDIR', cfg.get('CFG_TMPSHAREDDIR')),
str(eng.uuid)
)
pdf = get_pdf_from_arxiv(
obj.data.get(cfg.get('OAIHARVESTER_RECORD_ARXIV_ID_LOOKUP')),
extract_path
)
obj.extra_data["_result"]["pdf"] = pdf
if pdf and os.path.isfile(pdf):
references_xml = extract_references_from_file_xml(
obj.extra_data["_result"]["pdf"]
)
if references_xml:
updated_xml = '<?xml version="1.0" encoding="UTF-8"?>\n' \
'<collection>\n' + references_xml + \
"\n</collection>"
new_dict_representation = convert_marcxml_to_bibfield(updated_xml)
obj.data["reference"] = new_dict_representation["reference"]
obj.log.info("Extracted {0} references".format(len(obj.data["reference"])))
obj.update_task_results(
"References",
[{"name": "References",
"result": new_dict_representation['reference'],
"template": "workflows/results/refextract.html"}]
)
else:
obj.log.info("No references extracted")
else:
obj.log.error("Not able to download and process the PDF")
def author_list(obj, eng):
"""Perform the special authorlist extraction step.
:param obj: Bibworkflow Object to process
:param eng: BibWorkflowEngine processing the object
"""
from invenio.legacy.bibrecord import create_records, record_xml_output
from invenio.legacy.bibconvert.xslt_engine import convert
from invenio.utils.plotextractor.api import get_tarball_from_arxiv
from invenio.utils.plotextractor.cli import get_defaults
from invenio.modules.workflows.utils import convert_marcxml_to_bibfield
from invenio.utils.plotextractor.converter import untar
from invenio.utils.shell import Timeout
from ..utils import find_matching_files
identifiers = obj.data.get(cfg.get('OAIHARVESTER_RECORD_ARXIV_ID_LOOKUP'), "")
if "_result" not in obj.extra_data:
obj.extra_data["_result"] = {}
if "tarball" not in obj.extra_data["_result"]:
extract_path = os.path.join(
cfg.get('OAIHARVESTER_STORAGEDIR', cfg.get('CFG_TMPSHAREDDIR')),
str(eng.uuid)
)
tarball = get_tarball_from_arxiv(
obj.data.get(cfg.get('OAIHARVESTER_RECORD_ARXIV_ID_LOOKUP')),
extract_path
)
if tarball is None:
obj.log.error("No tarball found")
return
else:
tarball = obj.extra_data["_result"]["tarball"]
# FIXME
tarball = str(tarball)
sub_dir, dummy = get_defaults(tarball,
cfg['CFG_TMPDIR'], "")
try:
untar(tarball, sub_dir)
obj.log.info("Extracted tarball to: {0}".format(sub_dir))
except Timeout:
eng.log.error('Timeout during tarball extraction on %s' % (
obj.extra_data["_result"]["tarball"]))
xml_files_list = find_matching_files(sub_dir, ["xml"])
obj.log.info("Found xmlfiles: {0}".format(xml_files_list))
authors = ""
for xml_file in xml_files_list:
xml_file_fd = open(xml_file, "r")
xml_content = xml_file_fd.read()
xml_file_fd.close()
match = REGEXP_AUTHLIST.findall(xml_content)
if match:
obj.log.info("Found a match for author extraction")
a_stylesheet = obj.extra_data["repository"]["arguments"].get(
"a_stylesheet"
) or "authorlist2marcxml.xsl"
authors = convert(xml_content, a_stylesheet)
authorlist_record = create_records(authors)
if len(authorlist_record) == 1:
if authorlist_record[0][0] is None:
eng.log.error("Error parsing authorlist record for id: %s" % (
identifiers,))
authorlist_record = authorlist_record[0][0]
author_xml = record_xml_output(authorlist_record)
if author_xml:
updated_xml = '<?xml version="1.0" encoding="UTF-8"?>\n<collection>\n' \
+ record_xml_output(authorlist_record) + '</collection>'
new_dict_representation = convert_marcxml_to_bibfield(updated_xml)
obj.data["authors"] = new_dict_representation["authors"]
obj.update_task_results(
"authors",
[{
"name": "authors",
"results": new_dict_representation["authors"]
}]
)
obj.update_task_results(
"number_of_authors",
[{
"name": "number_of_authors",
"results": new_dict_representation["number_of_authors"]
}]
)
break
def upload_step(obj, eng):
"""Perform the upload step.
:param obj: BibWorkflowObject to process
:param eng: BibWorkflowEngine processing the object
"""
from invenio_records.api import Record
from invenio.legacy.bibsched.bibtask import task_low_level_submission
repository = obj.extra_data.get("repository", {})
sequence_id = random.randrange(1, 60000)
arguments = repository.get("arguments", {})
default_args = []
default_args.extend(['-I', str(sequence_id)])
if arguments.get('u_name', ""):
default_args.extend(['-N', arguments.get('u_name', "")])
if arguments.get('u_priority', 5):
default_args.extend(['-P', str(arguments.get('u_priority', 5))])
extract_path = os.path.join(
cfg.get('OAIHARVESTER_STORAGEDIR', cfg.get('CFG_TMPSHAREDDIR')),
str(eng.uuid)
)
if not os.path.exists(extract_path):
os.makedirs(extract_path)
filepath = extract_path + os.sep + str(obj.id)
if "f" in repository.get("postprocess", []):
# We have a filter.
file_uploads = [
("{0}.insert.xml".format(filepath), ["-i"]),
("{0}.append.xml".format(filepath), ["-a"]),
("{0}.correct.xml".format(filepath), ["-c"]),
("{0}.holdingpen.xml".format(filepath), ["-o"]),
]
else:
# We do not, so we get the data from the record
marcxml_value = Record(obj.data.dumps()).legacy_export_as_marc()
file_fd = open(filepath, 'w')
file_fd.write(marcxml_value)
file_fd.close()
file_uploads = [(filepath, ["-r", "-i"])]
task_id = None
for location, mode in file_uploads:
if os.path.exists(location):
try:
args = mode + [filepath] + default_args
task_id = task_low_level_submission("bibupload",
"oaiharvest",
*tuple(args))
except Exception as msg:
eng.log.error(
"An exception during submitting oaiharvest task occured : %s " % (
str(msg)))
if task_id is None:
eng.log.error("an error occurred while uploading %s from %s" %
(filepath, repository.get("name", "Unknown")))
else:
eng.log.info(
"material harvested from source %s was successfully uploaded" %
(repository.get("name", "Unknown"),))
eng.log.info("end of upload")
def filter_step(obj, eng):
"""Run an external python script."""
from invenio_records.api import Record
from invenio.utils.shell import run_shell_command
repository = obj.extra_data.get("repository", {})
arguments = repository.get("arguments", {})
script_name = arguments.get("f_filter-file")
if script_name:
marcxml_value = Record(obj.data.dumps()).legacy_export_as_marc()
extract_path = os.path.join(
cfg.get('OAIHARVESTER_STORAGEDIR', cfg.get('CFG_TMPSHAREDDIR')),
str(eng.uuid)
)
if not os.path.exists(extract_path):
os.makedirs(extract_path)
# Now we launch BibUpload tasks for the final MARCXML files
marcxmlfile = extract_path + os.sep + str(obj.id)
file_fd = open(marcxmlfile, 'w')
file_fd.write(marcxml_value)
file_fd.close()
exitcode, cmd_stdout, cmd_stderr = run_shell_command(
cmd="%s '%s'",
args=(str(script_name),
str(marcxmlfile)))
if exitcode != 0 or cmd_stderr != "":
obj.log.error(
"Error while running filtering script on %s\nError:%s"
% (marcxmlfile, cmd_stderr)
)
else:
obj.log.info(cmd_stdout)
else:
obj.log.error("No script file found!")
def check_record(obj, eng):
"""Check if there is a valid record in the data.
If not, skip this object.
"""
try:
assert obj.data
assert obj.data != '<?xml version="1.0"?>\n<collection/>\n'
except AssertionError as e:
obj.log.info("No data found in record. Skipping: {0}".format(str(e)))
eng.continueNextToken()
| dset0x/invenio | invenio/modules/oaiharvester/tasks/postprocess.py | Python | gpl-2.0 | 18,387 |
"""
Virtstrap
=========
A bootstrapping mechanism for virtualenv, buildout, and shell scripts.
"""
from setuptools import setup, find_packages
import sys
# Installation requirements
REQUIREMENTS = [
'virtualenv',
'pyyaml',
]
if sys.version_info < (2, 7):
REQUIREMENTS.append('argparse>=1.2.1')
setup(
name="virtstrap",
version="0.3.0-alpha",
license="MIT",
author="Reuven V. Gonzales",
url="https://github.com/ravenac95/virtstrap",
author_email="[email protected]",
description="A bootstrapping mechanism for virtualenv+pip and shell scripts",
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
zip_safe=False,
platforms='*nix',
install_requires=REQUIREMENTS,
entry_points={
'console_scripts': [
'vstrap = virtstrap.runner:main',
],
},
classifiers = [
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Operating System :: POSIX',
'Topic :: Software Development :: Build Tools',
],
)
| ravenac95/testvirtstrapdocs | setup.py | Python | mit | 1,125 |
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
CORE_LABELS = {
"ARM7TDMI-S": "ARM7",
"Cortex-M0" : "M0",
"Cortex-M0+": "M0P",
"Cortex-M3" : "M3",
"Cortex-M4" : "M4",
"Cortex-M4F" : "M4"
}
import os
import shutil
class Target:
def __init__(self):
# ARM Core
self.core = None
# Is the disk provided by the interface chip of this board virtual?
self.is_disk_virtual = False
# list of toolchains that are supported by the mbed SDK for this target
self.supported_toolchains = None
# list of extra specific labels
self.extra_labels = []
# list of macros (-D)
self.macros = []
# Default online compiler:
self.default_toolchain = "ARM"
self.name = self.__class__.__name__
def program_cycle_s(self):
return 4 if self.is_disk_virtual else 1.5
def get_labels(self):
return [self.name, CORE_LABELS[self.core]] + self.extra_labels
def init_hooks(self, hook, toolchain_name):
pass
class LPC2368(Target):
def __init__(self):
Target.__init__(self)
self.core = "ARM7TDMI-S"
self.extra_labels = ['NXP', 'LPC23XX']
self.supported_toolchains = ["ARM", "GCC_ARM", "GCC_CR"]
class LPC1768(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['NXP', 'LPC176X', 'MBED_LPC1768']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM", "GCC_CS", "GCC_CR", "IAR"]
class LPC11U24(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC11UXX', 'LPC11U24_401']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
self.default_toolchain = "uARM"
class LPC11U24_301(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC11UXX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
class KL05Z(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['Freescale', 'KLXX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
self.default_toolchain = "uARM"
self.supported_form_factors = ["ARDUINO"]
self.is_disk_virtual = True
class KL25Z(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['Freescale', 'KLXX']
self.supported_toolchains = ["ARM", "GCC_CW_EWL", "GCC_CW_NEWLIB", "GCC_ARM"]
self.supported_form_factors = ["ARDUINO"]
self.is_disk_virtual = True
class KL46Z(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['Freescale', 'KLXX']
self.supported_toolchains = ["GCC_ARM", "ARM"]
self.supported_form_factors = ["ARDUINO"]
self.is_disk_virtual = True
class K20D50M(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4"
self.extra_labels = ['Freescale']
self.supported_toolchains = ["GCC_ARM", "ARM"]
self.is_disk_virtual = True
class K64F(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['Freescale', 'KPSDK_MCUS', 'KPSDK_CODE']
self.macros = ["CPU_MK64FN1M0VMD12", "FSL_RTOS_MBED"]
self.supported_toolchains = ["ARM", "GCC_ARM"]
self.supported_form_factors = ["ARDUINO"]
self.is_disk_virtual = True
class LPC812(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['NXP', 'LPC81X']
self.supported_toolchains = ["uARM"]
self.default_toolchain = "uARM"
self.supported_form_factors = ["ARDUINO"]
self.is_disk_virtual = True
class LPC810(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['NXP', 'LPC81X']
self.supported_toolchains = ["uARM"]
self.default_toolchain = "uARM"
self.is_disk_virtual = True
class LPC4088(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['NXP', 'LPC408X']
self.supported_toolchains = ["ARM", "GCC_CR", "GCC_ARM"]
self.is_disk_virtual = True
def init_hooks(self, hook, toolchain_name):
if toolchain_name in ['ARM_STD', 'ARM_MICRO']:
hook.hook_add_binary("post", self.binary_hook)
@staticmethod
def binary_hook(t_self, resources, elf, binf):
if not os.path.isdir(binf):
# Regular binary file, nothing to do
return
outbin = open(binf + ".temp", "wb")
partf = open(os.path.join(binf, "ER_IROM1"), "rb")
# Pad the fist part (internal flash) with 0xFF to 512k
data = partf.read()
outbin.write(data)
outbin.write('\xFF' * (512*1024 - len(data)))
partf.close()
# Read and append the second part (external flash) in chunks of fixed size
chunksize = 128 * 1024
partf = open(os.path.join(binf, "ER_IROM2"), "rb")
while True:
data = partf.read(chunksize)
outbin.write(data)
if len(data) < chunksize:
break
partf.close()
outbin.close()
# Remove the directory with the binary parts and rename the temporary
# file to 'binf'
shutil.rmtree(binf, True)
os.rename(binf + '.temp', binf)
t_self.debug("Generated custom binary file (internal flash + SPIFI)")
class LPC4330_M4(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['NXP', 'LPC43XX']
self.supported_toolchains = ["ARM", "GCC_CR", "IAR", "GCC_ARM"]
class LPC4330_M0(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC43XX']
self.supported_toolchains = ["ARM", "GCC_CR", "IAR"]
class LPC1800(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['NXP', 'LPC43XX']
self.supported_toolchains = ["ARM", "GCC_CR", "IAR"]
class STM32F407(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['STM', 'STM32F4XX']
self.supported_toolchains = ["ARM", "GCC_ARM"]
class NUCLEO_F030R8(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['STM', 'STM32F0', 'STM32F030R8']
self.supported_toolchains = ["ARM", "uARM"]
self.default_toolchain = "uARM"
self.supported_form_factors = ["ARDUINO", "MORPHO"]
class NUCLEO_F072RB(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['STM', 'STM32F0', 'STM32F072RB']
self.supported_toolchains = ["ARM", "uARM"]
self.default_toolchain = "uARM"
self.supported_form_factors = ["ARDUINO", "MORPHO"]
class NUCLEO_F103RB(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['STM', 'STM32F1', 'STM32F103RB']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
self.default_toolchain = "uARM"
self.supported_form_factors = ["ARDUINO", "MORPHO"]
class NUCLEO_F302R8(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['STM', 'STM32F3', 'STM32F302R8']
self.supported_toolchains = ["ARM", "uARM"]
self.default_toolchain = "uARM"
self.supported_form_factors = ["ARDUINO", "MORPHO"]
class NUCLEO_F334R8(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['STM', 'STM32F3', 'STM32F334R8']
self.supported_toolchains = ["ARM", "uARM"]
self.default_toolchain = "uARM"
self.supported_form_factors = ["ARDUINO", "MORPHO"]
class NUCLEO_F401RE(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['STM', 'STM32F4', 'STM32F401RE']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
self.default_toolchain = "uARM"
self.supported_form_factors = ["ARDUINO", "MORPHO"]
class NUCLEO_F411RE(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4"
self.extra_labels = ['STM', 'STM32F4', 'STM32F411RE']
self.supported_toolchains = ["ARM", "uARM"]
self.default_toolchain = "uARM"
self.supported_form_factors = ["ARDUINO", "MORPHO"]
class NUCLEO_L053R8(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['STM', 'STM32L0', 'STM32L053R8']
self.supported_toolchains = ["ARM", "uARM"]
self.default_toolchain = "uARM"
self.supported_form_factors = ["ARDUINO", "MORPHO"]
class NUCLEO_L152RE(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['STM', 'STM32L1', 'STM32L152RE']
self.supported_toolchains = ["ARM", "uARM"]
self.default_toolchain = "uARM"
self.supported_form_factors = ["ARDUINO", "MORPHO"]
class STM32F3XX(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4"
self.extra_labels = ['STM', 'STM32F3XX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
self.default_toolchain = "uARM"
class LPC1347(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['NXP', 'LPC13XX']
self.supported_toolchains = ["ARM", "GCC_ARM"]
class LPC1114(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC11XX_11CXX', 'LPC11XX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM", "GCC_CR"]
self.default_toolchain = "uARM"
class LPC11C24(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC11XX_11CXX', 'LPC11CXX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
class LPC11U35_401(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC11UXX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM", "GCC_CR"]
self.default_toolchain = "uARM"
class LPC11U35_501(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC11UXX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM", "GCC_CR"]
self.default_toolchain = "uARM"
class LPC11U37_501(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC11UXX']
self.supported_toolchains = ["GCC_ARM", "GCC_CR"]
self.default_toolchain = "uARM"
class UBLOX_C027(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['NXP', 'LPC176X']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM", "GCC_CS", "GCC_CR", "IAR"]
self.macros = ['TARGET_LPC1768']
self.supported_form_factors = ["ARDUINO"]
class NRF51822(Target):
EXPECTED_SOFTDEVICE = 's110_nrf51822_7.0.0_softdevice.hex'
OUTPUT_EXT = '.hex'
APPCODE_OFFSET = 0x16000
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ["NORDIC", "NRF51822_MKIT"]
self.supported_toolchains = ["ARM", "GCC_ARM"]
self.is_disk_virtual = True
def program_cycle_s(self):
return 6
def init_hooks(self, hook, toolchain_name):
if toolchain_name in ['ARM_STD', 'ARM_MICRO']:
hook.hook_add_binary("post", self.binary_hook)
@staticmethod
def binary_hook(t_self, resources, elf, binf):
for hexf in resources.hex_files:
if hexf.find(NRF51822.EXPECTED_SOFTDEVICE) != -1:
break
else:
t_self.debug("Hex file not found. Aborting.")
return
# Merge user code with softdevice
from intelhex import IntelHex
binh = IntelHex()
binh.loadbin(binf, offset=NRF51822.APPCODE_OFFSET)
sdh = IntelHex(hexf)
sdh.merge(binh)
with open(binf.replace(".bin", ".hex"), "w") as f:
sdh.tofile(f, format='hex')
class LPC1549(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['NXP', 'LPC15XX']
self.supported_toolchains = ["uARM", "GCC_CR"]
self.default_toolchain = "uARM"
self.supported_form_factors = ["ARDUINO"]
class LPC11U68(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['NXP', 'LPC11U6X']
self.supported_toolchains = ["uARM", "GCC_CR", "GCC_ARM"]
self.default_toolchain = "uARM"
self.supported_form_factors = ["ARDUINO"]
class DISCO_F100RB(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['STM', 'STM32F1', 'STM32F100RB']
self.supported_toolchains = ["GCC_ARM"]
self.default_toolchain = "uARM"
class DISCO_F051R8(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['STM', 'STM32F0', 'STM32F051', 'STM32F051R8']
self.supported_toolchains = ["GCC_ARM"]
self.default_toolchain = "uARM"
class DISCO_F407VG(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['STM', 'STM32F4', 'STM32F407', 'STM32F407VG']
self.supported_toolchains = ["GCC_ARM"]
self.default_toolchain = "uARM"
class DISCO_F303VC(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['STM', 'STM32F3', 'STM32F303', 'STM32F303VC']
self.supported_toolchains = ["GCC_ARM"]
self.default_toolchain = "uARM"
class XADOW_M0(LPC11U35_501):
def __init__(self):
LPC11U35_501.__init__(self)
self.extra_labels = ['NXP', 'LPC11UXX', 'LPC11U35_501']
self.macros = ['TARGET_LPC11U35_501']
class ARCH_BLE(NRF51822):
def __init__(self):
NRF51822.__init__(self)
self.extra_labels = ['NORDIC', 'NRF51822']
self.macros = ['TARGET_NRF51822']
class ARCH_PRO(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['NXP', 'LPC176X']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM", "GCC_CS", "GCC_CR", "IAR"]
self.macros = ['TARGET_LPC1768']
self.supported_form_factors = ["ARDUINO"]
class LPCCAPPUCCINO(LPC11U37_501):
def __init__(self):
LPC11U37_501.__init__(self)
class HRM1017(NRF51822):
def __init__(self):
NRF51822.__init__(self)
self.extra_labels = ['NORDIC', 'NRF51822']
self.macros = ['TARGET_NRF51822']
class ARM_MPS2(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.macros = ['CMSDK_CM4']
self.supported_toolchains = ["ARM", "GCC_ARM"]
self.default_toolchain = "ARM"
# Get a single instance for each target
TARGETS = [
LPC2368(),
LPC1768(),
LPC11U24(),
LPC11U24_301(),
KL05Z(),
KL25Z(),
KL46Z(),
K20D50M(),
K64F(),
LPC812(),
LPC810(),
LPC4088(),
LPC4330_M4(),
STM32F3XX(),
STM32F407(),
NUCLEO_F030R8(),
NUCLEO_F072RB(),
NUCLEO_F103RB(),
NUCLEO_F302R8(),
NUCLEO_F334R8(),
NUCLEO_F401RE(),
NUCLEO_F411RE(),
NUCLEO_L053R8(),
NUCLEO_L152RE(),
LPC1347(),
LPC1114(),
LPC11C24(),
LPC11U35_401(),
LPC11U35_501(),
NRF51822(),
UBLOX_C027(),
LPC1549(),
LPC11U68(),
DISCO_F051R8(),
DISCO_F100RB(),
DISCO_F303VC(),
DISCO_F407VG(),
XADOW_M0(),
ARCH_BLE(),
ARCH_PRO(),
LPCCAPPUCCINO(),
HRM1017(),
ARM_MPS2(),
]
# Map each target name to its unique instance
TARGET_MAP = {}
for t in TARGETS:
TARGET_MAP[t.name] = t
TARGET_NAMES = TARGET_MAP.keys()
# Some targets with different name have the same exporters
EXPORT_MAP = {}
| NordicSemiconductor/mbed | workspace_tools/targets.py | Python | apache-2.0 | 17,540 |
__version__ = '0.1.43'
| hexgis/authldap | authldap/__init__.py | Python | agpl-3.0 | 23 |
# Copyright 2011 Jamie Norrish ([email protected])
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing tests against the `Typed` interface.
Most if not all of these tests are ported from the public domain tests
that come with the TMAPI 2.0 distribution (http://www.tmapi.org/2.0/).
"""
from tmapi.exceptions import ModelConstraintException
from tmapi_test_case import TMAPITestCase
class TypedTest (TMAPITestCase):
def _test_typed (self, typed):
old_type = typed.get_type()
self.assertNotEqual(None, old_type)
new_type = self.create_topic()
typed.set_type(new_type)
self.assertEqual(new_type, typed.get_type(),
'Expected another type')
typed.set_type(old_type)
self.assertEqual(old_type, typed.get_type(),
'Expected the previous type')
self.assertRaises(ModelConstraintException, typed.set_type, None)
def test_association (self):
"""Typed tests against an association."""
self._test_typed(self.create_association())
def test_role (self):
"""Typed tests against a role."""
self._test_typed(self.create_role())
def test_occurrence (self):
"""Typed tests against an occurrence."""
self._test_typed(self.create_occurrence())
def test_name (self):
"""Typed tests against a name."""
self._test_typed(self.create_name())
| ajenhl/django-tmapi | tmapi/tests/models/test_typed.py | Python | apache-2.0 | 1,944 |
#! /usr/bin/env python
import rospy, math
import numpy as np
import sys, termios, tty, select, os
from geometry_msgs.msg import Twist
from std_msgs.msg import Bool
class KeyTeleop(object):
cmd_bindings = {'q':np.array([1,1]),
'w':np.array([1,0]),
'e':np.array([1,-1]),
'a':np.array([0,1]),
'd':np.array([0,-1]),
'z':np.array([-1,-1]),
'x':np.array([-1,0]),
'c':np.array([-1,1])
}
set_bindings = { 't':np.array([1,1]),
'b':np.array([-1,-1]),
'y':np.array([1,0]),
'n':np.array([-1,0]),
'u':np.array([0,1]),
'm':np.array([0,-1])
}
def init(self):
# Save terminal settings
self.settings = termios.tcgetattr(sys.stdin)
# Initial values
self.inc_ratio = 0.1
self.speed = np.array([0.5, 1.0])
self.command = np.array([0, 0])
self.update_rate = 10 # Hz
self.alive = True
self.using_teleop = False
# Setup publishers
self.pub_twist = rospy.Publisher('/cmd_vel', Twist)
self.pub_using_telop = rospy.Publisher('/using_telop', Bool)
# Start the timer that will publish the teleop status at 10 Hz. Rate hard code on propose
self.timer = rospy.Timer(rospy.Duration(1.0/10.0), self.using_teleop_timer)
def fini(self):
# Restore terminal settings
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, self.settings)
# Stop the timer
def using_teleop_timer(self, event):
if not rospy.is_shutdown():
self.pub_using_telop.publish(Bool(self.using_teleop))
def run(self):
try:
self.init()
self.print_usage()
r = rospy.Rate(self.update_rate) # Hz
while not rospy.is_shutdown():
ch = self.get_key()
self.process_key(ch)
r.sleep()
except rospy.exceptions.ROSInterruptException:
pass
finally:
self.fini()
def print_usage(self):
msg = """
Keyboard Teleop that Publish to /cmd_vel (geometry_msgs/Twist)
Copyright (C) 2013
Released under BSD License
--------------------------------------------------
H: Print this menu
Moving around:
Q W E
A S D
Z X Z
T/B : increase/decrease max speeds 10%
Y/N : increase/decrease only linear speed 10%
U/M : increase/decrease only angular speed 10%
anything else : stop
G : Quit
--------------------------------------------------
"""
self.loginfo(msg)
self.show_status()
# Used to print items to screen, while terminal is in funky mode
def loginfo(self, str):
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, self.settings)
print(str)
tty.setraw(sys.stdin.fileno())
# Used to print teleop status
def show_status(self):
msg = 'Status:\tlinear %.2f\tangular %.2f' % (self.speed[0],self.speed[1])
self.loginfo(msg)
# For everything that can't be a binding, use if/elif instead
def process_key(self, ch):
if ch == 'h':
self.print_usage()
elif ch in self.cmd_bindings.keys():
self.command = self.cmd_bindings[ch]
self.update()
self.using_teleop = True
elif ch in self.set_bindings.keys():
self.speed = self.speed * (1 + self.set_bindings[ch]*self.inc_ratio)
self.update()
self.show_status()
elif ch == 'g':
self.loginfo('Quitting')
# Stop the robot
twist = Twist()
self.pub_twist.publish(twist)
self.timer.shutdown()
rospy.sleep(0.5)
rospy.signal_shutdown('Shutdown')
else:
self.using_teleop = False
self.command = np.array([0, 0])
self.update()
def update(self):
if rospy.is_shutdown():
return
twist = Twist()
cmd = self.speed*self.command
twist.linear.x = cmd[0]
twist.angular.z = cmd[1]
self.pub_twist.publish(twist)
# Get input from the terminal
def get_key(self):
tty.setraw(sys.stdin.fileno())
select.select([sys.stdin], [], [], 0)
key = sys.stdin.read(1)
return key.lower()
if __name__ == '__main__':
rospy.init_node('keyboard_teleop')
teleop = KeyTeleop()
teleop.run()
| jajberni/pi2go_ros | pi2go_control/scripts/key_teleop.py | Python | mit | 4,261 |
from selenium import webdriver
from time import sleep
class Login():
def user_login(self,driver):
driver.find_element_by_name("username").clear()
driver.find_element_by_name("username").send_keys("yidishui")
driver.find_element_by_name("password").clear()
driver.find_element_by_name("password").send_keys("123456")
driver.find_element_by_css_selector(".inputSub").click()
def user_logot(self,driver):
driver.find_element_by_link_text("退出").click()
sleep(2)
driver.switch_to.alert.accept()
sleep(2)
driver.quit()
if __name__=='__main__':
driver = webdriver.Firefox()
driver.get("http://localhost")
driver.implicitly_wait(10) #隐式等待10s
Login().user_login(driver)
Login().user_logot(driver)
| 1065865483/0python_script | Five/Test_Module/Login_Class.py | Python | mit | 813 |
from django.test import RequestFactory
from mock import patch
from nose.tools import ok_, eq_
from mozillians.common.tests import TestCase
from mozillians.groups.models import Group
from mozillians.groups.tests import GroupFactory
from mozillians.groups.views import _list_groups
from mozillians.users.tests import UserFactory
@patch('mozillians.groups.views.settings.ITEMS_PER_PAGE', 1)
@patch('mozillians.groups.views.render')
class ListTests(TestCase):
def setUp(self):
self.user = UserFactory.create()
self.group_1 = GroupFactory.create()
self.group_2 = GroupFactory.create()
self.group_2.add_member(self.user.userprofile)
self.query = Group.objects.filter(pk__in=[self.group_1.pk, self.group_2.pk])
self.template = 'groups/index.html'
self.request = RequestFactory()
self.request.GET = {}
self.request.user = self.user
def test_list_groups(self, render_mock):
_list_groups(self.request, self.template, self.query)
ok_(render_mock.called)
request, template, data = render_mock.call_args[0]
eq_(template, self.template)
eq_(data['groups'].paginator.count, 2)
eq_(data['groups'].paginator.num_pages, 2)
eq_(data['groups'].number, 1)
eq_(data['groups'].object_list[0], self.group_1)
def test_sort_by_name(self, render_mock):
self.request.GET = {'sort': 'name'}
_list_groups(self.request, self.template, self.query)
ok_(render_mock.called)
request, template, data = render_mock.call_args[0]
eq_(data['groups'].object_list[0], self.group_1)
def test_sort_by_most_members(self, render_mock):
self.request.GET = {'sort': '-member_count'}
_list_groups(self.request, self.template, self.query)
ok_(render_mock.called)
request, template, data = render_mock.call_args[0]
eq_(data['groups'].object_list[0], self.group_2)
def test_sort_by_fewest_members(self, render_mock):
self.request.GET = {'sort': 'member_count'}
_list_groups(self.request, self.template, self.query)
ok_(render_mock.called)
request, template, data = render_mock.call_args[0]
eq_(data['groups'].object_list[0], self.group_1)
def test_invalid_sort(self, render_mock):
self.request.GET = {'sort': 'invalid'}
_list_groups(self.request, self.template, self.query)
ok_(render_mock.called)
request, template, data = render_mock.call_args[0]
eq_(data['groups'].object_list[0], self.group_1)
def test_second_page(self, render_mock):
self.request.GET = {'page': '2'}
_list_groups(self.request, self.template, self.query)
ok_(render_mock.called)
request, template, data = render_mock.call_args[0]
eq_(data['groups'].number, 2)
def test_empty_page(self, render_mock):
self.request.GET = {'page': '20000'}
_list_groups(self.request, self.template, self.query)
ok_(render_mock.called)
request, template, data = render_mock.call_args[0]
eq_(data['groups'].number, 2)
def test_invalid_page(self, render_mock):
self.request.GET = {'page': 'invalid'}
_list_groups(self.request, self.template, self.query)
ok_(render_mock.called)
request, template, data = render_mock.call_args[0]
eq_(data['groups'].number, 1)
| ChristineLaMuse/mozillians | mozillians/groups/tests/test_views/test_list.py | Python | bsd-3-clause | 3,419 |
#!/usr/bin/env python
'''
CADET_00001 is one of the challenge released by DARPA for the Cyber Grand Challenge:
https://github.com/CyberGrandChallenge/samples/tree/master/examples/CADET_00001
The binary can run in the DECREE VM (http://repo.cybergrandchallenge.com/boxes/)
CADET_00001.adapted (by Jacopo Corbetta) is the same program, modified to be runnable in an Intel x86 Linux machine.
The binary contains an easter egg and a stack buffer overflow.
'''
import angr
def main():
project= angr.Project("./CADET_00001")
#let's find the buffer overflow (overwriting the return address)
#overwriting the return pointer with user-controllable data will generate
#an "unconstrained" state: the symbolic executor does not know how to proceed
#since the instruction pointer can assume any value
#by default angr discards unconstrained paths, so we need to specify the
#save_unconstrained option
print "finding the buffer overflow..."
sm = project.factory.simgr(save_unconstrained=True)
#symbolically execute the binary until an unconstrained path is reached
while len(sm.unconstrained)==0:
sm.step()
unconstrained_state = sm.unconstrained[0]
crashing_input = unconstrained_state.posix.dumps(0)
#cat crash_input.bin | ./CADET_00001.adapted will segfault
unconstrained_state.posix.dump(0,"crash_input.bin")
print "buffer overflow found!"
print repr(crashing_input)
#let's now find the easter egg (it takes about 2 minutes)
#now we want angr to avoid "unfeasible" paths
#by default, "lazy solving" is enabled, this means that angr will not
#automatically discard unfeasible paths
#to disable "lazy solving" we generate a blank path and we change its options,
#then we specify this path as the initial path of the path group
print "finding the easter egg..."
sm = project.factory.simgr(project.factory.entry_state())
#at this point we just ask angr to reach the basic block where the easter egg
#text is printed
sm.explore(find=0x804833E)
found = sm.found[0]
solution1 = found.posix.dumps(0)
print "easter egg found!"
print repr(solution1)
found.posix.dump(0,"easteregg_input1.bin")
#you can even check if the easter egg has been found by checking stdout
stdout1 = found.posix.dumps(1)
print repr(stdout1)
#an alternative way to avoid unfeasible paths (paths that contain an unsatisfiable set
#of constraints) is to "manually" step the path group execution and call prune()
print "finding the easter egg (again)..."
sm = project.factory.simgr()
while True:
sm.step()
sm.prune() #we "manually" ask angr to remove unfeasible paths
found_list = [active for active in sm.active if active.addr == 0x804833E]
if len(found_list) > 0:
break
found = found_list[0]
solution2 = found.posix.dumps(0)
print "easter egg found!"
print repr(solution2)
found.posix.dump(0,"easteregg_input2.bin")
#you can even check if the easter egg has been found by checking stdout
stdout2 = found.posix.dumps(1)
print repr(stdout2)
return (crashing_input, solution1, stdout1, solution2, stdout2)
def test():
crashing_input, solution1, stdout1, solution2, stdout2 = main()
assert len(crashing_input) >= 92 and solution1.startswith("^") and solution2.startswith("^") and \
"EASTER EGG!" in stdout1 and "EASTER EGG!" in stdout2
if __name__ == '__main__':
print(repr(main()))
| Ruide/angr-dev | angr-doc/examples/CADET_00001/solve.py | Python | bsd-2-clause | 3,534 |
# -*- coding: utf-8 -*-
"""
Liquid is a form management tool for web frameworks.
Copyright (C) 2014, Bence Faludi ([email protected])
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, <see http://www.gnu.org/licenses/>.
"""
class WidgetExpectedError( Exception ):
"""
Raised when no Widget object is defined in the related Element.
"""
pass
class TypeConversionError( Exception ):
"""
Raised when the Element's setValue() function fails because of not
valid value is arriving.
"""
pass
class ValidationError( Exception ):
"""
Raised when some validation condition fails. The exception will contains
the message of the error.
"""
# void
def __init__( self, msg ):
"""
Raised when some validation condition fails. The exception will
contains the message of the error.
@param msg: Error message.
@type msg: unicode
"""
self.msg = msg
class ValidationCollectionError( Exception ):
"""
Raised when some FieldSet validation condition fails. The exception will
contains all child's error message as well.
"""
# void
def __init__( self, errors ):
"""
Raised when some FieldSet validation condition fails. The exception
will contains all child's error message as well.
@param msg: Collected error messages.
@type msg: list<tuple<unicode,unicode>>
"""
self.errors = errors
| bfaludi/liquid4m | liquid4m/exceptions.py | Python | gpl-3.0 | 1,990 |
# Generated by Django 3.2.8 on 2021-12-03 09:58
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
("account", "0031_auto_20210517_1421"),
]
operations = [
migrations.AddField(
model_name="user",
name="language",
field=models.CharField(
blank=False, choices=settings.LANGUAGES, default="de", max_length=10
),
),
migrations.AlterField(
model_name="user",
name="organization",
field=models.CharField(
blank=True,
help_text="Optional. Affiliation will be shown next to your name",
max_length=255,
verbose_name="Organization",
),
),
migrations.AlterField(
model_name="user",
name="organization_url",
field=models.URLField(
blank=True, max_length=255, verbose_name="Organization Website"
),
),
]
| fin/froide | froide/account/migrations/0032_auto_20211203_1058.py | Python | mit | 1,087 |
import os
import arrow
import tarfile
import xml.etree.ElementTree as ET
from dateutil import tz
from common import Event
class MoodleActivity(Event):
"""Describes an XML Moodle event with key based access"""
event_keys = [
'timeopen',
'timeclose'
]
# for preview
event_pretty_names = [
'opens',
'closes'
]
def __init__(self, path):
self.modified = False
self.path = path
self.tree = ET.parse(path)
self.activity = self.tree.getroot()
if len(self.activity) != 1:
raise Exception('An activity can only have one event.')
self.event = self.activity[0]
def __getitem__(self, k):
if k == 'id':
return self.event.attrib[k]
if k == 'moduleid':
return int(self.activity.attrib[k])
return self.event.find(k).text
def __setitem__(self, k, v):
if k == 'id' or k == 'moduleid':
raise Exception('Not allowed')
self.event.find(k).text = v
self.modified = True
def is_activity(self=None):
return True
def set_start_datetime(self, datetime):
self._set_date_at_index(datetime, 0)
def set_end_datetime(self, datetime):
self._set_date_at_index(datetime, 1)
def get_start_datetime(self):
return self._get_arrow_at_index(0).datetime
def get_start_timestamp(self):
return self._get_arrow_at_index(0).timestamp
def get_end_datetime(self):
return self._get_arrow_at_index(1).datetime
def get_end_timestamp(self):
return self._get_arrow_at_index(1).timestamp
def get_timestamp_at_index(self, index):
return self._get_arrow_at_index(index).timestamp
def get_title(self):
return self.__getitem__('name')
def write(self):
if not self.modified:
return
self.tree.write(self.path, short_empty_elements=False, encoding='UTF-8',
xml_declaration=True)
self._write_calendar()
def is_visible(self):
module_xml_path = (os.path.join(self.global_path, 'module.xml'))
module_xml = ET.parse(module_xml_path).getroot()
return int(module_xml.find('visible').text) == 1
def _write_calendar(self):
moodle_cal_path = os.path.join(self.global_path, 'calendar.xml')
cal_tree = ET.parse(moodle_cal_path)
events = cal_tree.getroot()
if len(events) > 0:
events[0].find('timestart').text = str(self.get_start_timestamp())
events[0].find('timeduration').text = 0
if len(events) > 1:
events[0].find('timeduration').text = str(
self.get_end_timestamp() - self.get_start_timestamp())
events[1].find('timeduration').text = 0
events[1].find('timestart').text = str(self.get_end_timestamp())
cal_tree.write(moodle_cal_path, short_empty_elements=False,
encoding='UTF-8', xml_declaration=True)
def _set_date_at_index(self, datetime, index):
k = self.event_keys[index]
timestamp = str(arrow.get(datetime).to('utc').timestamp)
self.__setitem__(k, timestamp)
def _get_datetime_at_index(self, index):
return self._get_arrow_at_index(index).datetime
def _get_start_arrow(self):
"""Returns end as arrow object"""
return self._get_arrow_at_index(0)
def _get_end_arrow(self):
"""Returns end as arrow object"""
return self._get_arrow_at_index(1)
def _get_arrow_at_index(self, index):
"""Gets the arrow object representation of the start or close event.
"""
k = self.event_keys[index]
epoch = self.event.find(k).text
return arrow.get(epoch, tzinfo=tz.gettz('America/Montreal'))
class MoodleQuiz(MoodleActivity):
"""Describes an XML Moodle quiz with key based access"""
key = 'MQ'
name = 'Quiz'
def __init__(self, path):
self.global_path = path
super().__init__(os.path.join(path, 'quiz.xml'))
class MoodleChoice(MoodleActivity):
"""Describes an XML Moodle choice with key based access"""
key = 'MC'
name = 'Choice'
def __init__(self, path):
self.global_path = path
super().__init__(os.path.join(path, 'choice.xml'))
class MoodleFeedback(MoodleActivity):
"""Describes an XML Moodle feedback with key based access"""
key = 'MF'
name = 'Feedback'
def __init__(self, path):
self.global_path = path
super().__init__(os.path.join(path, 'feedback.xml'))
class MoodleLesson(MoodleActivity):
"""Describes an XML Moodle lesson with key based access"""
event_keys = [
'available',
'deadline'
]
# for preview
event_pretty_names = [
'opens',
'closes'
]
key = 'ML'
name = 'Lesson'
def __init__(self, path):
self.global_path = path
super().__init__(os.path.join(path, 'lesson.xml'))
def get_pretty_name(self=None):
return 'Lesson'
def get_key():
return 'L'
class MoodleHomework(MoodleActivity):
"""Describes an XML Moodle assignment (homework) with key based access"""
maximum_dates_count = 3
event_keys = [
'allowsubmissionsfromdate',
'duedate',
'cutoffdate',
]
# for preview
event_pretty_names = [
'opens',
'is due',
'closes'
]
key = 'MH'
name = 'Homework'
def __init__(self, path):
self.global_path = path
super().__init__(os.path.join(path, 'assign.xml'))
def _write_calendar(self):
moodle_cal_path = os.path.join(self.global_path, 'calendar.xml')
cal_tree = ET.parse(moodle_cal_path)
events = cal_tree.getroot()
if len(events) != 1:
raise Exception('Unimplemented')
events[0].find('timestart').text = str(self.get_end_timestamp())
events[0].find('timeduration').text = 0
cal_tree.write(moodle_cal_path, short_empty_elements=False,
encoding='UTF-8', xml_declaration=True)
class MoodleCourse():
"""\
Describes a complete Moodle course from an unpacked archive on the disk"""
modname_to_class = {
'quiz': MoodleQuiz,
'assign': MoodleHomework,
'feedback': MoodleFeedback,
'lesson': MoodleLesson,
'choice': MoodleChoice,
}
def __init__(self, moodle_archive_path):
self.path = moodle_archive_path
if not moodle_archive_path:
return
self.fullpath = os.path.join(self.path, 'moodle_backup.xml')
self.backup = ET.parse(self.fullpath)
self._load_activities_and_sequence()
def replace_event(self, activity):
self.activities[type(activity)][activity.rel_id - 1] = activity
def get_activity_by_type_and_num(self, type, relative_number):
return self.activities[type][relative_number - 1]
def write(self, output_path):
self._write_activities_to_disk()
# Moodle archives require special care !
# Archive must be created like this `tar -cf archive.mbz *`
ogwd = os.getcwd()
os.chdir(self.path)
full_output_path = os.path.join(ogwd, output_path)
with tarfile.open(full_output_path, "w:gz") as archive:
for name in os.listdir(self.path):
archive.add(name)
archive.close()
os.chdir(ogwd)
def _load_activity_sequence(self):
""""Read the activity sequence from moodle_backup.xml.
Returns a list of the module_ids in order of the course.
"""
o = []
activities = self.backup.getroot().find('information') \
.find('contents').find('activities')
for activity in activities:
o.append(int(activity.find('moduleid').text))
return o
def _load_activities_and_sequence(self):
self.activity_sequence = self._load_activity_sequence()
self.activities = self._load_activites()
def _load_activites(self):
activities = {}
for clazz in self.modname_to_class.values():
activities[clazz] = []
for a in self.backup.getroot().find('information').find('contents'). \
find('activities'):
module_name = a.find('modulename').text
directory = a.find('directory').text
if module_name not in self.modname_to_class:
continue # Ignore incomptatible activity
clazz = self.modname_to_class[module_name]
activity_instance = clazz(os.path.join(self.path, directory))
# Ingore invisible activites
if activity_instance.is_visible():
activities[clazz].append(activity_instance)
for activity_type, items in activities.items():
activities[activity_type] = self._sort_activity_type(items)
return activities
def _sort_activity_type(self, activities):
s = sorted(activities, key=lambda activity:
self.activity_sequence.index(activity['moduleid']))
# Set relative id of activity
for i, activity in enumerate(s):
activity.rel_id = i + 1
return s
def _write_activities_to_disk(self):
for activities in self.activities.values():
for activity in activities:
activity.write()
| fuhrmanator/course-activity-planner | python/moodle.py | Python | gpl-3.0 | 9,456 |
""" The layer module contains a Layer class to help when working with layers."""
class Layer(object):
def __init__(self, definition):
# Name -- Required
try:
self.name = definition['name']
except KeyError:
raise KeyError('The "name" key is required for the Layer object.')
# Path -- Required
try:
self.path = definition['path']
except KeyError:
raise KeyError('The "path" key is required for the Layer object.')
# Style - Optional - If provided it should be a filename in the project's directory.
try:
self.style = definition['style']
except KeyError:
self.style = None
# Set visible status for layers
try:
self.visible = definition['visible']
except KeyError:
self.visible = True
# Sets Definition query to allow filtering layers based on data attributes
try:
self.definition_query = definition['definition_query']
except KeyError:
self.definition_query = None
| rustprooflabs/MapBuilder | mapbuilder/layer.py | Python | mit | 1,152 |
#!/usr/bin/env python
# coding: utf-8
"""A python bulk editor class to apply the same code to many files."""
# Copyright (c) 2012, 2013 Jérôme Lecomte
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import unicode_literals
__version__ = '0.66' # UPDATE setup.py when changing version.
__author__ = 'Jérôme Lecomte'
__license__ = 'MIT'
import os
import shutil
import sys
import logging
import argparse
import difflib
# Most manip will involve re so we include it here for convenience.
import re # pylint: disable=W0611
import fnmatch
import io
import subprocess
log = logging.getLogger(__name__)
try:
unicode
except NameError:
unicode = str # pylint: disable=C0103
def get_function(fn_name):
"""Retrieve the function defined by the function_name.
Arguments:
fn_name: specification of the type module:function_name.
"""
module_name, callable_name = fn_name.split(':')
current = globals()
if not callable_name:
callable_name = module_name
else:
import importlib
try:
module = importlib.import_module(module_name)
except ImportError:
log.error("failed to import {}".format(module_name))
raise
current = module
for level in callable_name.split('.'):
current = getattr(current, level)
return current
class MassEdit(object):
"""Mass edit lines of files."""
def __init__(self, **kwds):
"""Initialize MassEdit object.
Args:
- code (byte code object): code to execute on input file.
- function (str or callable): function to call on input file.
- module (str): module name where to find the function.
- executable (str): executable file name to execute on input file.
- dry_run (bool): skip actual modification of input file if True.
"""
self.code_objs = dict()
self._codes = []
self._functions = []
self._executables = []
self.dry_run = None
if 'module' in kwds:
self.import_module(kwds['module'])
if 'code' in kwds:
self.append_code_expr(kwds['code'])
if 'function' in kwds:
self.append_function(kwds['function'])
if 'executable' in kwds:
self.append_executable(kwds['executable'])
if 'dry_run' in kwds:
self.dry_run = kwds['dry_run']
def __edit_line(self, line, code, code_obj): # pylint: disable=R0201
"""Edit a line with one code object built in the ctor."""
try:
result = eval(code_obj, globals(), locals())
except TypeError as ex:
message = "failed to execute {}: {}".format(code, ex)
log.error(message)
raise
if result is None:
log.error("cannot process line '{}' with {}".format(line, code))
raise
elif isinstance(result, list) or isinstance(result, tuple):
line = ' '.join([unicode(res_element) for res_element in result])
else:
line = unicode(result)
return line
def edit_line(self, line):
"""Edit a single line using the code expression."""
for code, code_obj in self.code_objs.items():
line = self.__edit_line(line, code, code_obj)
return line
def edit_content(self, lines):
"""Process a file contents.
First processes the contents line by line applying the registered
expressions, then process the resulting contents using the
registered functions.
Arguments:
lines (list): file content.
"""
lines = [self.edit_line(line) for line in lines]
for function in self._functions:
try:
lines = function(lines)
except Exception as err:
msg = "failed to execute code: {}".format(err)
log.error(msg)
raise # Let the exception be handled at a higher level.
return lines
def edit_file(self, file_name):
"""Edit file in place, returns a list of modifications (unified diff).
Arguments:
file_name: The name of the file.
dry_run: only return differences, but do not edit the file.
"""
with io.open(file_name, "r", encoding='utf-8') as from_file:
from_lines = from_file.readlines()
if self._executables:
nb_execs = len(self._executables)
if nb_execs > 1:
log.warn("found {} executables; only the first one is used".
format(nb_execs))
exec_list = self._executables[0].split()
exec_list.append(file_name)
try:
log.info("running {}".format(" ".join(exec_list)))
output = subprocess.check_output(exec_list,
universal_newlines=True)
except Exception as err:
msg = "failed to execute {}: {}"
log.error(msg.format(" ".join(exec_list), err))
raise # Let the exception be handled at a higher level.
to_lines = output.split("\n")
else:
to_lines = from_lines
# unified_diff wants structure of known length. Convert to a list.
to_lines = list(self.edit_content(to_lines))
diffs = difflib.unified_diff(from_lines, to_lines,
fromfile=file_name, tofile='<new>')
if not self.dry_run:
bak_file_name = file_name + ".bak"
if os.path.exists(bak_file_name):
msg = "{} already exists".format(bak_file_name)
if sys.version_info < (3, 3):
raise OSError(msg)
else:
raise FileExistsError(msg)
try:
os.rename(file_name, bak_file_name)
with io.open(file_name, "w", encoding='utf-8') as new_file:
new_file.writelines(to_lines)
# Keeps mode of original file.
shutil.copymode(bak_file_name, file_name)
except Exception as err:
msg = "failed to write output to {}: {}"
log.error(msg.format(file_name, err))
# Try to recover...
try:
os.rename(bak_file_name, file_name)
except Exception as err: # pylint: disable=W0703
msg = "failed to restore {} from {}: {}"
log.error(msg.format(file_name, bak_file_name, err))
raise
try:
os.unlink(bak_file_name)
except Exception as err: # pylint: disable=W0703
msg = "failed to remove backup {}: {}"
log.warning(msg.format(bak_file_name, err))
return list(diffs)
def append_code_expr(self, code):
"""Compile argument and adds it to the list of code objects."""
if not isinstance(code, str): # expects a string.
raise TypeError("string expected")
log.debug("compiling code {}...".format(code))
try:
code_obj = compile(code, '<string>', 'eval')
self.code_objs[code] = code_obj
except SyntaxError as syntax_err:
log.error("cannot compile {0}: {1}".format(
code, syntax_err))
raise
log.debug("compiled code {}".format(code))
def append_function(self, function):
"""Append the function to the list of functions to be called.
If the function is already a callable, use it. If it's a type str
try to interpret it as [module]:?<callable>, load the module
if there is one and retrieve the callable.
Argument:
function (str or callable): function to call on input.
"""
if not hasattr(function, '__call__'):
function = get_function(function)
if not hasattr(function, '__call__'):
raise ValueError("function is expected to be callable")
self._functions.append(function)
log.debug("registered {}".format(function.__name__))
def append_executable(self, executable):
"""Append san executable os command to the list to be called.
Argument:
executable (str): os callable executable.
"""
if not isinstance(executable, str):
raise TypeError("expected executable name as str, not {}".
format(executable.__class__.__name__))
self._executables.append(executable)
def set_code_exprs(self, codes):
"""Convenience: sets all the code expressions at once."""
self.code_objs = dict()
self._codes = []
for code in codes:
self.append_code_expr(code)
def set_functions(self, functions):
"""Convenience: sets all functions to be called."""
for func in functions:
try:
self.append_function(func)
except (ValueError, AttributeError) as ex:
msg = "'{}' is not a callable function: {}"
log.error(msg.format(func, ex))
raise
def set_executables(self, executables): # pylint: disable=W0613
"""Convenience: sets all the executables to be called."""
for exc in executables:
self.append_executable(exc)
def import_module(self, module): # pylint: disable=R0201
"""Import module that are needed for the code expr to compile.
Argument:
module (str or list): module(s) to import.
"""
if isinstance(module, list):
all_modules = module
else:
all_modules = [module]
for mod in all_modules:
globals()[mod] = __import__(mod.strip())
def parse_command_line(argv):
"""Parse command line argument. See -h option.
Arguments:
argv: arguments on the command line must include caller file name.
"""
import textwrap
example = textwrap.dedent("""
Examples:
# Simple string substitution (-e). Will show a diff. No changes applied.
{0} -e "re.sub('failIf', 'assertFalse', line)" *.py
# File level modifications (-f). Overwrites the files in place (-w).
{0} -w -f fixer:main *.py
# Will change all test*.py in subdirectories of tests.
{0} -e "re.sub('failIf', 'assertFalse', line)" -s tests test*.py
""").format(os.path.basename(argv[0]))
formatter_class = argparse.RawDescriptionHelpFormatter
if sys.version_info[0] < 3:
parser = argparse.ArgumentParser(description="Python mass editor",
version=__version__,
epilog=example,
formatter_class=formatter_class)
else:
parser = argparse.ArgumentParser(description="Python mass editor",
epilog=example,
formatter_class=formatter_class)
parser.add_argument("-v", "--version", action="version",
version="%(prog)s {}".format(__version__))
parser.add_argument("-w", "--write", dest="dry_run",
action="store_false", default=True,
help="modify target file(s) in place. "
"Shows diff otherwise.")
parser.add_argument("-V", "--verbose", dest="verbose_count",
action="count", default=0,
help="increases log verbosity (can be specified "
"multiple times)")
parser.add_argument('-e', "--expression", dest="expressions", nargs=1,
help="Python expressions applied to target files. "
"Use the line variable to reference the current line.")
parser.add_argument('-f', "--function", dest="functions", nargs=1,
help="Python function to apply to target file. "
"Takes file content as input and yield lines. "
"Specify function as [module]:?<function name>.")
parser.add_argument('-x', "--executable", dest="executables", nargs=1,
help="Python executable to apply to target file.")
parser.add_argument("-s", "--start", dest="start_dir",
help="Directory from which to look for target files.")
parser.add_argument('-m', "--max-depth-level", type=int, dest="max_depth",
help="Maximum depth when walking subdirectories.")
parser.add_argument('-o', '--output', metavar="output",
type=argparse.FileType('w'), default=sys.stdout,
help="redirect output to a file")
parser.add_argument('patterns', metavar="pattern",
nargs='+', # argparse.REMAINDER,
help="shell-like file name patterns to process.")
arguments = parser.parse_args(argv[1:])
if not (arguments.expressions or
arguments.functions or
arguments.executables):
parser.error(
'--expression, --function, or --executable must be specified')
# Sets log level to WARN going more verbose for each new -V.
log.setLevel(max(3 - arguments.verbose_count, 0) * 10)
return arguments
def get_paths(patterns, start_dir=None, max_depth=1):
"""Retrieve files that match any of the patterns."""
# Shortcut: if there is only one pattern, make sure we process just that.
if len(patterns) == 1 and not start_dir:
pattern = patterns[0]
directory = os.path.dirname(pattern)
if directory:
patterns = [os.path.basename(pattern)]
start_dir = directory
max_depth = 1
if not start_dir:
start_dir = os.getcwd()
for root, dirs, files in os.walk(start_dir): # pylint: disable=W0612
if max_depth is not None:
relpath = os.path.relpath(root, start=start_dir)
depth = len(relpath.split(os.sep))
if depth > max_depth:
continue
names = []
for pattern in patterns:
names += fnmatch.filter(files, pattern)
for name in names:
path = os.path.join(root, name)
yield path
def edit_files(patterns, expressions=[], # pylint: disable=R0913, R0914
functions=[], executables=[],
start_dir=None, max_depth=1, dry_run=True,
output=sys.stdout):
"""Process patterns with MassEdit.
Arguments:
patterns: file pattern to identify the files to be processed.
expressions: single python expression to be applied line by line.
functions: functions to process files contents.
executables: os executables to execute on the argument files.
Keyword arguments:
max_depth: maximum recursion level when looking for file matches.
start_dir: directory where to start the file search.
dry_run: only display differences if True. Save modified file otherwise.
output: handle where the output should be redirected.
Return:
list of files processed.
"""
# Makes for a better diagnostic because str are also iterable.
if not iter(patterns) or isinstance(patterns, str):
raise TypeError("patterns should be a list")
if expressions and (not iter(expressions) or isinstance(expressions, str)):
raise TypeError("expressions should be a list of exec expressions")
if functions and (not iter(functions) or isinstance(functions, str)):
raise TypeError("functions should be a list of functions")
if executables and (not iter(executables) or isinstance(executables, str)):
raise TypeError("executables should be a list of program names")
editor = MassEdit(dry_run=dry_run)
if expressions:
editor.set_code_exprs(expressions)
if functions:
editor.set_functions(functions)
if executables:
editor.set_executables(executables)
processed_paths = []
for path in get_paths(patterns, start_dir=start_dir, max_depth=max_depth):
diffs = list(editor.edit_file(path))
if dry_run:
output.write("".join(diffs))
processed_paths.append(os.path.abspath(path))
return processed_paths
def command_line(argv):
"""Instantiate an editor and process arguments.
Optional argument:
- processed_paths: paths processed are appended to the list.
"""
arguments = parse_command_line(argv)
paths = edit_files(arguments.patterns,
expressions=arguments.expressions,
functions=arguments.functions,
executables=arguments.executables,
start_dir=arguments.start_dir,
max_depth=arguments.max_depth,
dry_run=arguments.dry_run,
output=arguments.output)
# If the output is not sys.stdout, we need to close it because
# argparse.FileType does not do it for us.
if isinstance(arguments.output, io.IOBase):
arguments.output.close()
return paths
def main():
"""Main program."""
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
try:
command_line(sys.argv)
finally:
logging.shutdown()
if __name__ == "__main__":
sys.exit(main())
| imoldman/AdditionalLogger | third_party/massedit.py | Python | mit | 18,540 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
=====================================================================================
Copyright (c) 2016-2018 Université de Lorraine & Luleå tekniska universitet
Author: Luca Di Stasio <[email protected]>
<[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
=====================================================================================
DESCRIPTION
Tested with Python 2.7 Anaconda 2.4.1 (64-bit) distribution in Windows 7.
'''
from os.path import isfile, join, exists
from os import makedirs
from datetime import datetime
from time import strftime, sleep
from platform import platform,system
import getopt
import subprocess
import numpy
from odbAccess import *
from abaqusConstants import *
from odbMaterial import *
from odbSection import *
import re
#===============================================================================#
#===============================================================================#
# Data extraction functions
#===============================================================================#
#===============================================================================#
def writeLineToLogFile(logFileFullPath,mode,line,toScreen):
with open(logFileFullPath,mode) as log:
log.write(line + '\n')
if toScreen:
print(line + '\n')
def skipLineToLogFile(logFileFullPath,mode,toScreen):
with open(logFileFullPath,mode) as log:
log.write('\n')
if toScreen:
print('\n')
def writeTitleSepLineToLogFile(logFileFullPath,mode,toScreen):
with open(logFileFullPath,mode) as log:
log.write('===============================================================================================\n')
if toScreen:
print('===============================================================================================\n')
def writeTitleSecToLogFile(logFileFullPath,mode,title,toScreen):
writeTitleSepLineToLogFile(logFileFullPath,mode,toScreen)
writeTitleSepLineToLogFile(logFileFullPath,'a',toScreen)
skipLineToLogFile(logFileFullPath,'a',toScreen)
writeLineToLogFile(logFileFullPath,'a',title,toScreen)
skipLineToLogFile(logFileFullPath,'a',toScreen)
writeLineToLogFile(logFileFullPath,'a','Starting on ' + datetime.now().strftime('%Y-%m-%d') + ' at ' + datetime.now().strftime('%H:%M:%S'),toScreen)
skipLineToLogFile(logFileFullPath,'a',toScreen)
writeLineToLogFile(logFileFullPath,'a','Platform: ' + platform(),toScreen)
skipLineToLogFile(logFileFullPath,'a',toScreen)
writeTitleSepLineToLogFile(logFileFullPath,'a',toScreen)
writeTitleSepLineToLogFile(logFileFullPath,'a',toScreen)
skipLineToLogFile(logFileFullPath,'a',toScreen)
def writeErrorToLogFile(logFileFullPath,mode,exc,err,toScreen):
with open(logFileFullPath,mode) as log:
log.write('!!! ----------------------------------------------------------------------------------------!!!\n')
log.write('\n')
log.write(' AN ERROR OCCURED\n')
log.write('\n')
log.write(' -------------------------\n')
log.write('\n')
log.write(str(exc) + '\n')
log.write(str(err) + '\n')
log.write('\n')
log.write('Terminating program\n')
log.write('\n')
log.write('!!! ----------------------------------------------------------------------------------------!!!\n')
log.write('\n')
if toScreen:
print('!!! ----------------------------------------------------------------------------------------!!!\n')
print('\n')
print(' AN ERROR OCCURED\n')
print('\n')
print(' -------------------------\n')
print('\n')
print(str(exc) + '\n')
print(str(err) + '\n')
print('\n')
print('Terminating program\n')
print('\n')
print('!!! ----------------------------------------------------------------------------------------!!!\n')
print('\n')
def getPerfs(wd,sims):
perf = []
perf.append(['PROJECT NAME','DEBOND [°]','NUMBER OF CPUS [-]','USER TIME [s]','SYSTEM TIME [s]','USER TIME/TOTAL CPU TIME [%]','SYSTEM TIME/TOTAL CPU TIME [%]','TOTAL CPU TIME [s]','WALLCLOCK TIME [s]','WALLCLOCK TIME [m]','WALLCLOCK TIME [h]','WALLCLOCK TIME/TOTAL CPU TIME [%]','ESTIMATED FLOATING POINT OPERATIONS PER ITERATION [-]','MINIMUM REQUIRED MEMORY [MB]','MEMORY TO MINIMIZE I/O [MB]','TOTAL NUMBER OF ELEMENTS [-]','NUMBER OF ELEMENTS DEFINED BY THE USER [-]','NUMBER OF ELEMENTS DEFINED BY THE PROGRAM [-]','TOTAL NUMBER OF NODES [-]','NUMBER OF NODES DEFINED BY THE USER [-]','NUMBER OF NODES DEFINED BY THE PROGRAM [-]','TOTAL NUMBER OF VARIABLES [-]'])
print('')
for sim in sims:
print('Extracting data from project: ' + sim)
usertime = 0
systemtime = 0
totalcpu = 0
wallclock = 0
floatops = 0
minMemory = 0
minIOmemory = 0
totEl = 0
userEl = 0
progEl = 0
totN = 0
userN = 0
progN = 0
totVar = 0
cpus = 0
debond = 0
if exists(join(wd,sim,'solver',sim+'.dat')):
with open(join(wd,sim,'solver',sim+'.dat'),'r') as dat:
lines = dat.readlines()
for l,line in enumerate(lines):
if 'JOB TIME SUMMARY' in line:
for subline in lines[l:]:
if 'USER TIME' in subline:
usertime = float(subline.split('=')[1])
elif 'SYSTEM TIME' in subline:
systemtime = float(subline.split('=')[1])
elif 'TOTAL CPU TIME' in subline:
totalcpu = float(subline.split('=')[1])
elif 'WALLCLOCK TIME' in subline:
wallclock = float(subline.split('=')[1])
elif 'M E M O R Y E S T I M A T E' in line:
values = lines[l+6].replace('\n','').split(' ')
while '' in values:
values.remove('')
floatops = float(values[1])
minMemory = float(values[2])
minIOmemory = float(values[3])
elif 'P R O B L E M S I Z E' in line:
words = lines[l+3].replace('\n','').split(' ')
while '' in words:
words.remove('')
totEl = int(words[-1])
words = lines[l+4].split(' ')
while '' in words:
words.remove('')
userEl = int(words[-1])
words = lines[l+5].split(' ')
while '' in words:
words.remove('')
progEl = int(words[-1])
words = lines[l+6].split(' ')
while '' in words:
words.remove('')
totN = int(words[-1])
words = lines[l+7].split(' ')
while '' in words:
words.remove('')
userN = int(words[-1])
words = lines[l+8].split(' ')
while '' in words:
words.remove('')
progN = int(words[-1])
words = lines[l+9].split(' ')
while '' in words:
words.remove('')
totVar = int(words[-1])
if exists(join(wd,sim,'solver',sim+'.msg')):
with open(join(wd,sim,'solver',sim+'.msg'),'r') as msg:
lines = msg.readlines()
for line in lines:
if 'USING THE DIRECT SOLVER WITH' in line:
words = line.replace('\n','').split(' ')
while '' in words:
words.remove('')
cpus = int(words[words.index('PROCESSORS')-1])
if exists(join(wd,sim,'input',sim+'.inp')):
with open(join(wd,sim,'input',sim+'.inp'),'r') as inp:
lines = inp.readlines()
for line in lines:
if 'Crack Angular Aperture' in line:
debond = numpy.round(float(line.replace('\n','').replace('*','').replace('-','').split(':')[-1].replace('deg','')))
break
perf.append([sim,debond,cpus,usertime,systemtime,usertime/totalcpu,systemtime/totalcpu,totalcpu,wallclock,wallclock/60.,wallclock/3600.,wallclock/totalcpu,floatops,minMemory,minIOmemory,totEl,userEl,progEl,totN,userN,progN,totVar])
return perf
def writePerfToFile(od,outfile,performanceslist):
with open(join(od,outfile),'w') as csv:
for performances in performanceslist:
line = ''
for i,performance in enumerate(performances):
if i>0:
line += ','
line += str(performance)
csv.write(line + '\n')
def getFrame(odbObj,step,frame):
return odbObj.steps[odbObj.steps.keys()[step]].frames[frame]
def getFirstAndLastFrame(odbObj,step):
return getFrame(odbObj,step,0),getFrame(odbObj,step,-1)
def getFirstAndLastFrameLastStep(odbObj):
first, last = getFirstAndLastFrame(odbObj,-1)
return first, last
def getSingleNodeSet(odbObj,part,nodeSet):
return odbObj.rootAssembly.instances[part].nodeSets[nodeSet]
def getSingleElementSet(odbObj,part,elementSet):
return odbObj.rootAssembly.instances[part].elementSets[elementSet]
def getSingleSetNodeCoordinates(odbObj,step,frame,part,nodeSet):
frameObj = getFrame(odbObj,step,frame)
allCoords = frameObj.fieldOutputs['COORD'].getSubset(position=NODAL)
coords = allCoords.getSubset(region=odbObj.rootAssembly.instances[part].nodeSets[nodeSet])
return coords
def getMultipleSetsNodeCoordinates(odbObj,nodeSets):
coords = {}
for nodeSet in nodeSets:
step = nodeSet[0]
frame = nodeSet[1]
part = nodeSet[2]
nodeSetName = nodeSet[3]
coords[nodeSet] = getSingleSetNodeCoordinates(odbObj,step,frame,part,nodeSetName)
return coords
def extractAndSaveNodesCoordinates(odbObj,nodeSetsData,folder,filename,ext):
nodeSets = getMultipleSetsNodeCoordinates(odbObj,nodeSetsData)
with open(join(folder,filename + ext),'w') as csv:
if len(nodeSets[nodeSetsData[0][3]].values[0].data)==1:
string = 'X'
elif len(nodeSets[nodeSetsData[0][3]].values[0].data)==2:
string = 'X, Y'
elif len(nodeSets[nodeSetsData[0][3]].values[0].data)==3:
string = 'X, Y, Z'
csv.write('DATA\n')
csv.write('NODE SET' + ', ' + 'NODE TYPE, NODE LABEL, ' + string + '\n')
for nodeSet in nodeSetsData:
for value in nodeSets[nodeSet[3]].values:
line = ''
line = nodeSet[3] + ', ' + 'NODAL' + ', ' + str(value.nodeLabel)
for datum in value.data:
line += ', ' + str(datum)
csv.write(line + '\n')
def getAllNodes(odbObj,step,frameN):
allNodes = {}
frame = getFrame(odbObj,step,frameN)
nodesCoords = frame.fieldOutputs['COORD'].getSubset(position=NODAL)
for value in nodesCoords.values:
components = []
for component in value.data:
components.append(component)
allNodes[str(value.nodeLabel)] = components
return allNodes
def getAndSaveAllNodes(odbObj,step,frameN,folder,filename,ext):
allNodes = {}
frame = getFrame(odbObj,step,frameN)
nodesCoords = frame.fieldOutputs['COORD'].getSubset(position=NODAL)
for value in nodesCoords.values:
components = []
for component in value.data:
components.append(component)
allNodes[str(value.nodeLabel)] = components
with open(join(folder,filename + ext),'w') as csv:
if len(nodesCoords.values[0].data)==1:
string = 'X'
elif len(nodesCoords.values[0].data)==2:
string = 'X, Y'
elif len(nodesCoords.values[0].data)==3:
string = 'X, Y, Z'
csv.write('DATA\n')
csv.write('NODE TYPE, NODE LABEL, ' + string + '\n')
for value in nodesCoords.values:
line = ''
line = 'NODAL' + ', ' + str(value.nodeLabel)
for datum in value.data:
line += ', ' + str(datum)
csv.write(line + '\n')
return allNodes
def getAllIntPoints(odbObj,step,frameN):
allIntPoints = {}
frame = getFrame(odbObj,step,frameN)
intpointCoords = frame.fieldOutputs['COORD'].getSubset(position=INTEGRATION_POINT)
for value in intpointCoords.values:
components = []
for component in value.data:
components.append(component)
allIntPoints[str(value.elementLabel)+'-'+str(value.integrationPoint)] = components
return allIntPoints
def getAndSaveAllIntPoints(odbObj,step,frameN,folder,filename,ext):
allIntPoints = {}
frame = getFrame(odbObj,step,frameN)
intpointCoords = frame.fieldOutputs['COORD'].getSubset(position=INTEGRATION_POINT)
for value in intpointCoords.values:
components = []
for component in value.data:
components.append(component)
allIntPoints[str(value.elementLabel)+'-'+str(value.integrationPoint)] = components
with open(join(folder,filename + ext),'w') as csv:
if len(intpointCoords.values[0].data)==1:
string = 'X'
elif len(intpointCoords.values[0].data)==2:
string = 'X, Y'
elif len(intpointCoords.values[0].data)==3:
string = 'X, Y, Z'
csv.write('DATA\n')
csv.write('NODE TYPE, NODE LABEL, ' + string + '\n')
for value in intpointCoords.values:
line = ''
line = 'INTEGRATION_POINT' + ', ' + str(value.elementLabel)+'-'+str(value.integrationPoint)
for datum in value.data:
line += ', ' + str(datum)
csv.write(line + '\n')
return allIntPoints
def getFieldOutput(odbObj,step,frame,fieldOutput,subset=None,pos=None):
frame = getFrame(odbObj,step,frame)
if subset!=None:
if pos==1:
out = frame.fieldOutputs[fieldOutput].getSubset(region=subset,position=INTEGRATION_POINT)
elif pos==2:
out = frame.fieldOutputs[fieldOutput].getSubset(region=subset,position=NODAL)
elif pos==3:
out = frame.fieldOutputs[fieldOutput].getSubset(region=subset,position=ELEMENT_NODAL)
elif pos==4:
out = frame.fieldOutputs[fieldOutput].getSubset(region=subset,position=CENTROID)
else:
out = frame.fieldOutputs[fieldOutput].getSubset(region=subset)
else:
out = frame.fieldOutputs[fieldOutput]
return out
def extractAndSaveFieldOutput(odbObj,step,frameN,folder,filename,ext,fieldOutput,subset=None,pos=None):
frame = getFrame(odbObj,step,frameN)
nodes = getAllNodes(odbObj,step,frameN)
intpoints = getAllIntPoints(odbObj,step,frameN)
if subset!=None:
if pos==1:
out = frame.fieldOutputs[fieldOutput].getSubset(region=subset,position=INTEGRATION_POINT)
elif pos==2:
out = frame.fieldOutputs[fieldOutput].getSubset(region=subset,position=NODAL)
elif pos==3:
out = frame.fieldOutputs[fieldOutput].getSubset(region=subset,position=ELEMENT_NODAL)
elif pos==4:
out = frame.fieldOutputs[fieldOutput].getSubset(region=subset,position=CENTROID)
else:
out = frame.fieldOutputs[fieldOutput].getSubset(region=subset)
else:
out = frame.fieldOutputs[fieldOutput]
with open(join(folder,filename + ext),'w') as csv:
if fieldOutput== 'U' or fieldOutput=='RF':
if len(out.values[0].data)==1:
string = 'X, ' + fieldOutput + '1'
elif len(out.values[0].data)==2:
string = 'X, Y, ' + fieldOutput + '1' + ', ' + fieldOutput + '2'
elif len(out.values[0].data)==3:
string = 'X, Y, Z, ' + fieldOutput + '1' + ', ' + fieldOutput + '2' + ', ' + fieldOutput + '3'
elif fieldOutput== 'S' or fieldOutput=='EE':
if len(out.values[0].data)==2:
string = 'X, ' + fieldOutput + '11' + ', ' + fieldOutput + '12'
elif len(out.values[0].data)==4:
string = 'X, Y, ' + fieldOutput + '11' + ', ' + fieldOutput + '22' + ', ' + fieldOutput + '33' + ', ' + fieldOutput + '12'
elif len(out.values[0].data)==6:
string = 'X, Y, Z, ' + fieldOutput + '11' + ', ' + fieldOutput + '22' + ', ' + fieldOutput + '33' + ', ' + fieldOutput + '12' + ', ' + fieldOutput + '13' + ', ' + fieldOutput + '23'
csv.write('HEAT MAP\n')
csv.write('NODE TYPE, NODE LABEL, ' + string + '\n')
for value in out.values:
if 'NODAL' in str(value.position):
line = ''
line = 'NODAL' + ', ' + str(value.nodeLabel)
for datum in nodes[str(value.nodeLabel)]:
line += ', ' + str(datum)
for datum in value.data:
line += ', ' + str(datum)
csv.write(line + '\n')
elif 'INTEGRATION_POINT' in str(value.position):
line = ''
line = 'INTEGRATION_POINT' + ', ' + str(value.elementLabel)+'-'+str(value.integrationPoint)
for datum in intpoints[str(value.elementLabel)+'-'+str(value.integrationPoint)]:
line += ', ' + str(datum)
for datum in value.data:
line += ', ' + str(datum)
csv.write(line + '\n')
def getDispVsReactionOnBoundarySubset(odbObj,step,frame,part,subset,component):
nodeSet = getSingleNodeSet(odbObj,part,subset)
disp = getFieldOutput(odbObj,-1,-1,'U',nodeSet)
countdisp = 0
meandisp = 0
for value in disp.values:
countdisp += 1
meandisp += value.data[component]
meandisp /= countdisp
force = getFieldOutput(odbObj,-1,-1,'RF',nodeSet)
totalforce = 0
for value in force.values:
totalforce += value.data[component]
return meandisp,totalforce
#===============================================================================#
#===============================================================================#
# Data extraction sets
#===============================================================================#
#===============================================================================#
#===============================================================================#
# extractFromODBoutputSet01
#
# For Single Fiber RVE model
#
# Full analysis
# Extract coordinates of nodes and integration points, displacements, strains,
# stresses, displacements and reactions at boundaries, displacements and stresses
# at interfaces, compute VCCT and VCCI
#
#===============================================================================#
def extractFromODBoutputSet01(wd,project,matdatafolder,codedir,settings,logfile,logfilename):
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Reading settings and assigning to variables...',True)
nEl0 = int(settings['nEl0'])
NElMax = int(settings['NElMax'])
DeltaEl = int(settings['DeltaEl'])
deltapsi = float(settings['deltapsi'])
nl = int(settings['nl'])
nSegsOnPath = int(settings['nSegsOnPath'])
tol = float(settings['tol'])
writeLineToLogFile(logfile,'a','... done.',True)
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Starting post-processing on simulation ' + project,True)
skipLineToLogFile(logfile,'a',True)
# define database name
odbname = project + '.odb'
odbfullpath = join(wd,project,'solver',odbname)
writeLineToLogFile(logfile,'a','ODB: ' + odbfullpath,True)
# define input file name
inpname = project + '.inp'
inpfullpath = join(wd,project,'input',inpname)
writeLineToLogFile(logfile,'a','INPUT file: ' + inpfullpath,True)
# define csv output folder and create if it does not exist
csvfolder = join(wd,project,'csv')
if not os.path.exists(csvfolder):
os.makedirs(csvfolder)
writeLineToLogFile(logfile,'a','CSV folder: ' + csvfolder,True)
# define dat output folder and create if it does not exist
datfolder = join(wd,project,'dat')
if not os.path.exists(datfolder):
os.makedirs(datfolder)
writeLineToLogFile(logfile,'a','DAT folder: ' + datfolder,True)
#=======================================================================
# BEGIN - extract performances
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Extract performances...',True)
try:
writePerfToFile(csvfolder,'performances.csv',getPerfs(wd,[project]))
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - extract performances
#=======================================================================
#=======================================================================
# BEGIN - open odb
#=======================================================================
writeLineToLogFile(logfile,'a','Open odb ' + odbname + ' in folder ' + join(wd,project,'solver') + ' ...',True)
try:
odb = openOdb(path=odbfullpath)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - open odb
#=======================================================================
#=======================================================================
# BEGIN - get first and last frame
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get first and last frame...',True)
try:
firstFrame,lastFrame = getFirstAndLastFrameLastStep(odb)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get first and last frame
#=======================================================================
#=======================================================================
# BEGIN - get deformed nodes
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get deformed nodes ..',True)
try:
nodes = getAndSaveAllNodes(odb,-1,-1,csvfolder,'defnodesCoords','.csv')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
writeLineToLogFile(logfile,'a','Get deformed integration points ...',True)
try:
intpoints = getAndSaveAllIntPoints(odb,-1,-1,csvfolder,'defintpointCoords','.csv')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
boundaryNodeSetsData = [[-1,-1,'PART-1-1','SW-CORNERNODE'],
[-1,-1,'PART-1-1','SE-CORNERNODE'],
[-1,-1,'PART-1-1','NE-CORNERNODE'],
[-1,-1,'PART-1-1','NW-CORNERNODE'],
[-1,-1,'PART-1-1','LOWERSIDE-NODES-WITHOUT-CORNERS'],
[-1,-1,'PART-1-1','RIGHTSIDE-NODES-WITHOUT-CORNERS'],
[-1,-1,'PART-1-1','UPPERSIDE-NODES-WITHOUT-CORNERS'],
[-1,-1,'PART-1-1','LEFTSIDE-NODES-WITHOUT-CORNERS']]
writeLineToLogFile(logfile,'a','Extract and save deformed nodes coordinates at the boundary...',True)
try:
extractAndSaveNodesCoordinates(odb,boundaryNodeSetsData,csvfolder,'defboundaryNodesCoords','.csv')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
interfaceNodeSetsData = [[-1,-1,'PART-1-1','FIBERSURFACE-NODES'],
[-1,-1,'PART-1-1','MATRIXSURFACEATFIBERINTERFACE-NODES']]
writeLineToLogFile(logfile,'a','Extract and save deformed nodes coordinates at the interface...',True)
try:
extractAndSaveNodesCoordinates(odb,interfaceNodeSetsData,csvfolder,'deffiberInterfaceNodesCoords','.csv')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get deformed nodes
#=======================================================================
#=======================================================================
# BEGIN - get undeformed nodes
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get undeformed nodes...',True)
try:
undefNodes = getAndSaveAllNodes(odb,-1,0,csvfolder,'undefnodesCoords','.csv')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
try:
undefIntpoints = getAndSaveAllIntPoints(odb,-1,0,csvfolder,'undefintpointCoords','.csv')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
undefBoundaryNodeSetsData = [[-1,0,'PART-1-1','SW-CORNERNODE'],
[-1,0,'PART-1-1','SE-CORNERNODE'],
[-1,0,'PART-1-1','NE-CORNERNODE'],
[-1,0,'PART-1-1','NW-CORNERNODE'],
[-1,0,'PART-1-1','LOWERSIDE-NODES-WITHOUT-CORNERS'],
[-1,0,'PART-1-1','RIGHTSIDE-NODES-WITHOUT-CORNERS'],
[-1,0,'PART-1-1','UPPERSIDE-NODES-WITHOUT-CORNERS'],
[-1,0,'PART-1-1','LEFTSIDE-NODES-WITHOUT-CORNERS']]
try:
extractAndSaveNodesCoordinates(odb,undefBoundaryNodeSetsData,csvfolder,'undefboundaryNodesCoords','.csv')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
undefInterfaceNodeSetsData = [[-1,0,'PART-1-1','FIBERSURFACE-NODES'],
[-1,0,'PART-1-1','MATRIXSURFACEATFIBERINTERFACE-NODES']]
try:
extractAndSaveNodesCoordinates(odb,undefInterfaceNodeSetsData,csvfolder,'undeffiberInterfaceNodesCoords','.csv')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get undeformed nodes
#=======================================================================
#=======================================================================
# BEGIN - get fiber and matrix elements and nodes subsets
#=======================================================================
writeLineToLogFile(logfile,'a','... done.',True)
try:
fiberNodes = getSingleNodeSet(odb,'PART-1-1','FIBER-NODES')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
try:
matrixNodes = getSingleNodeSet(odb,'PART-1-1','MATRIX-NODES')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
try:
fiberElements = getSingleElementSet(odb,'PART-1-1','FIBER-ELEMENTS')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
try:
matrixElements = getSingleElementSet(odb,'PART-1-1','MATRIX-ELEMENTS')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get fiber and matrix elements and nodes subsets
#=======================================================================
#=======================================================================
# BEGIN - get displacements
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get displacements in the entire model...',True)
try:
extractAndSaveFieldOutput(odb,-1,-1,csvfolder,'all-displacements','.csv','U')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get displacements in fiber subset...',True)
try:
extractAndSaveFieldOutput(odb,-1,-1,csvfolder,'fibersubset-displacements','.csv','U',fiberNodes)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get displacements in matrix subset...',True)
try:
extractAndSaveFieldOutput(odb,-1,-1,csvfolder,'matrixsubset-displacements','.csv','U',matrixNodes)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get displacements
#=======================================================================
#=======================================================================
# BEGIN - get strains
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get strains in the entire model...',True)
try:
extractAndSaveFieldOutput(odb,-1,-1,csvfolder,'all-elasticstrains','.csv','EE')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get strains in fiber subset...',True)
try:
extractAndSaveFieldOutput(odb,-1,-1,csvfolder,'fibersubset-elasticstrains','.csv','EE',fiberElements)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get strains in matrix subset...',True)
try:
extractAndSaveFieldOutput(odb,-1,-1,csvfolder,'matrixsubset-elasticstrains','.csv','EE',matrixElements)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get strains
#=======================================================================
#=======================================================================
# BEGIN - get stresses
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get stresses in the entire model...',True)
try:
extractAndSaveFieldOutput(odb,-1,-1,csvfolder,'all-elasticstresses','.csv','S')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get stresses in fiber subset...',True)
try:
extractAndSaveFieldOutput(odb,-1,-1,csvfolder,'fibersubset-elasticstresses','.csv','S',fiberElements)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get stresses in matrix subset...',True)
try:
extractAndSaveFieldOutput(odb,-1,-1,csvfolder,'matrixsubset-elasticstresses','.csv','S',matrixElements)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get stresses
#=======================================================================
#=======================================================================
# BEGIN - get displacement and reaction force at boundary
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get displacement and reaction force at boundary...',True)
try:
meanleftdisp,totalleftforce = getDispVsReactionOnBoundarySubset(odb,-1,-1,'PART-1-1','LEFTSIDE-NODES-WITH-CORNERS',0)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
try:
meanrightdisp,totalrightforce = getDispVsReactionOnBoundarySubset(odb,-1,-1,'PART-1-1','RIGHTSIDE-NODES-WITH-CORNERS',0)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
try:
with open(join(csvfolder,'dispVSreactionforce.csv'),'w') as csv:
csv.write('TABLE\n')
csv.write('SIDE, U1, RF1\n')
csv.write('RIGHT, ' + str(meanrightdisp) + ', ' + str(totalrightforce) + '\n')
csv.write('LEFT, ' + str(meanleftdisp) + ', ' + str(totalleftforce) + '\n')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get displacement and reaction force at boundary
#=======================================================================
#=======================================================================
# BEGIN - get interfaces
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get interfaces...',True)
try:
master = getSingleNodeSet(odb,'PART-1-1','FIBERSURFACE-NODES')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
try:
slave = getSingleNodeSet(odb,'PART-1-1','MATRIXSURFACEATFIBERINTERFACE-NODES')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get interfaces
#=======================================================================
#=======================================================================
# BEGIN - get stresses at interface (on slave and master)
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get stresses at interface (on slave and master)...',True)
# on master
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','...on master...',True)
# get values
try:
cstatusOnMaster = getFieldOutput(odb,-1,-1,'CSTATUS',master)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
try:
cpressOnMaster = getFieldOutput(odb,-1,-1,'CPRESS',master)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
try:
cshearOnMaster = getFieldOutput(odb,-1,-1,'CSHEARF',master)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
try:
cshearfOnMaster = getFieldOutput(odb,-1,-1,'CSHEAR1',master)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
# write to file
try:
toWrite = []
for value in cstatusOnMaster.values:
if 'NODAL' in str(value.position):
pos = nodes
typeOfVar = 'NODAL'
elif 'INTEGRATION_POINT' in str(value.position):
pos = intpoints
typeOfVar = 'INTEGRATION_POINT'
toWrite.append([typeOfVar,str(value.nodeLabel),str(pos[str(value.nodeLabel)][0]),str(pos[str(value.nodeLabel)][1]),str(numpy.sqrt(numpy.power(pos[str(value.nodeLabel)][0],2)+numpy.power(pos[str(value.nodeLabel)][1],2))),str(numpy.arctan2(pos[str(value.nodeLabel)][1],pos[str(value.nodeLabel)][0])* 180/numpy.pi),str(value.data),'0','0','0','0'])
for value in cpressOnMaster.values:
posit = -1
for k,item in enumerate(toWrite):
if item[1]==str(value.nodeLabel):
posit = k
break
if posit>-1:
toWrite[posit][7] = str(value.data)
else:
if 'NODAL' in str(value.position):
pos = nodes
typeOfVar = 'NODAL'
elif 'INTEGRATION_POINT' in str(value.position):
pos = intpoints
typeOfVar = 'INTEGRATION_POINT'
toWrite.append([typeOfVar,str(value.nodeLabel),str(pos[str(value.nodeLabel)][0]),str(pos[str(value.nodeLabel)][1]),str(numpy.sqrt(numpy.power(pos[str(value.nodeLabel)][0],2)+numpy.power(pos[str(value.nodeLabel)][1],2))),str(numpy.arctan2(pos[str(value.nodeLabel)][1],pos[str(value.nodeLabel)][0])* 180/numpy.pi),'0',str(value.data),'0','0','0'])
for value in cshearOnMaster.values:
posit = -1
for k,item in enumerate(toWrite):
if item[1]==str(value.nodeLabel):
posit = k
break
if posit>-1:
toWrite[posit][8] = str(value.data[0])
toWrite[posit][9] = str(value.data[1])
else:
if 'NODAL' in str(value.position):
pos = nodes
typeOfVar = 'NODAL'
elif 'INTEGRATION_POINT' in str(value.position):
pos = intpoints
typeOfVar = 'INTEGRATION_POINT'
toWrite.append([typeOfVar,str(value.nodeLabel),str(pos[str(value.nodeLabel)][0]),str(pos[str(value.nodeLabel)][1]),str(numpy.sqrt(numpy.power(pos[str(value.nodeLabel)][0],2)+numpy.power(pos[str(value.nodeLabel)][1],2))),str(numpy.arctan2(pos[str(value.nodeLabel)][1],pos[str(value.nodeLabel)][0])* 180/numpy.pi),'0','0',str(value.data[0]),str(value.data[1]),'0'])
for value in cshearfOnMaster.values:
posit = -1
for k,item in enumerate(toWrite):
if item[1]==str(value.nodeLabel):
posit = k
break
if posit>-1:
toWrite[posit][10] = str(value.data)
else:
if 'NODAL' in str(value.position):
pos = nodes
typeOfVar = 'NODAL'
elif 'INTEGRATION_POINT' in str(value.position):
pos = intpoints
typeOfVar = 'INTEGRATION_POINT'
toWrite.append([typeOfVar,str(value.nodeLabel),str(pos[str(value.nodeLabel)][0]),str(pos[str(value.nodeLabel)][1]),str(numpy.sqrt(numpy.power(pos[str(value.nodeLabel)][0],2)+numpy.power(pos[str(value.nodeLabel)][1],2))),str(numpy.arctan2(pos[str(value.nodeLabel)][1],pos[str(value.nodeLabel)][0])* 180/numpy.pi),'0','0','0','0',str(value.data)])
with open(join(csvfolder,'stressesOnMaster.csv'),'w') as csv:
csv.write('SCATTER PLOT\n')
csv.write('NODE TYPE, NODE LABEL, X, Y, R, THETA [°], CSTATUS, CPRESS, CSHEAR1, CSHEAR2, CSHEARFRIC1\n')
for item in toWrite:
csv.write(item[0] + ', ' + item[1] + ', ' + item[2] + ', ' + item[3] + ', ' + item[4] + ', ' + item[5] + ', ' + item[6] + ', ' + item[7] + ', ' + item[8] + ', ' + item[9] + ', ' + item[10] + '\n')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','...done...',True)
# on slave
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','...on slave...',True)
# get values
try:
cstatusOnSlave = getFieldOutput(odb,-1,-1,'CSTATUS',slave)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','...done...',True)
try:
cpressOnSlave = getFieldOutput(odb,-1,-1,'CPRESS',slave)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','...done...',True)
try:
cshearOnSlave = getFieldOutput(odb,-1,-1,'CSHEARF',slave)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','...done...',True)
try:
cshearfOnSlave = getFieldOutput(odb,-1,-1,'CSHEAR1',slave)
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','...done...',True)
# write to file
try:
toWrite = []
for value in cstatusOnSlave.values:
if 'NODAL' in str(value.position):
pos = nodes
typeOfVar = 'NODAL'
elif 'INTEGRATION_POINT' in str(value.position):
pos = intpoints
typeOfVar = 'INTEGRATION_POINT'
toWrite.append([typeOfVar,str(value.nodeLabel),str(pos[str(value.nodeLabel)][0]),str(pos[str(value.nodeLabel)][1]),str(numpy.sqrt(numpy.power(pos[str(value.nodeLabel)][0],2)+numpy.power(pos[str(value.nodeLabel)][1],2))),str(numpy.arctan2(pos[str(value.nodeLabel)][1],pos[str(value.nodeLabel)][0])* 180/numpy.pi),str(value.data),'0','0','0','0'])
for value in cpressOnSlave.values:
posit = -1
for k,item in enumerate(toWrite):
if item[1]==str(value.nodeLabel):
posit = k
break
if posit>-1:
toWrite[posit][7] = str(value.data)
else:
if 'NODAL' in str(value.position):
pos = nodes
typeOfVar = 'NODAL'
elif 'INTEGRATION_POINT' in str(value.position):
pos = intpoints
typeOfVar = 'INTEGRATION_POINT'
toWrite.append([typeOfVar,str(value.nodeLabel),str(pos[str(value.nodeLabel)][0]),str(pos[str(value.nodeLabel)][1]),str(numpy.sqrt(numpy.power(pos[str(value.nodeLabel)][0],2)+numpy.power(pos[str(value.nodeLabel)][1],2))),str(numpy.arctan2(pos[str(value.nodeLabel)][1],pos[str(value.nodeLabel)][0])* 180/numpy.pi),'0',str(value.data),'0','0','0'])
for value in cshearOnSlave.values:
posit = -1
for k,item in enumerate(toWrite):
if item[1]==str(value.nodeLabel):
posit = k
break
if posit>-1:
toWrite[posit][8] = str(value.data[0])
toWrite[posit][9] = str(value.data[1])
else:
if 'NODAL' in str(value.position):
pos = nodes
typeOfVar = 'NODAL'
elif 'INTEGRATION_POINT' in str(value.position):
pos = intpoints
typeOfVar = 'INTEGRATION_POINT'
toWrite.append([typeOfVar,str(value.nodeLabel),str(pos[str(value.nodeLabel)][0]),str(pos[str(value.nodeLabel)][1]),str(numpy.sqrt(numpy.power(pos[str(value.nodeLabel)][0],2)+numpy.power(pos[str(value.nodeLabel)][1],2))),str(numpy.arctan2(pos[str(value.nodeLabel)][1],pos[str(value.nodeLabel)][0])* 180/numpy.pi),'0','0',str(value.data[0]),str(value.data[1]),'0'])
for value in cshearfOnSlave.values:
posit = -1
for k,item in enumerate(toWrite):
if item[1]==str(value.nodeLabel):
posit = k
break
if posit>-1:
toWrite[posit][10] = str(value.data)
else:
if 'NODAL' in str(value.position):
pos = nodes
typeOfVar = 'NODAL'
elif 'INTEGRATION_POINT' in str(value.position):
pos = intpoints
typeOfVar = 'INTEGRATION_POINT'
toWrite.append([typeOfVar,str(value.nodeLabel),str(pos[str(value.nodeLabel)][0]),str(pos[str(value.nodeLabel)][1]),str(numpy.sqrt(numpy.power(pos[str(value.nodeLabel)][0],2)+numpy.power(pos[str(value.nodeLabel)][1],2))),str(numpy.arctan2(pos[str(value.nodeLabel)][1],pos[str(value.nodeLabel)][0])* 180/numpy.pi),'0','0','0','0',str(value.data)])
with open(join(csvfolder,'stressesOnSlave.csv'),'w') as csv:
csv.write('SCATTER PLOT\n')
csv.write('NODE TYPE, NODE LABEL, X, Y, R, THETA [°], CSTATUS, CPRESS, CSHEAR1, CSHEAR2, CSHEARFRIC1\n')
for item in toWrite:
csv.write(item[0] + ', ' + item[1] + ', ' + item[2] + ', ' + item[3] + ', ' + item[4] + ', ' + item[5] + ', ' + item[6] + ', ' + item[7] + ', ' + item[8] + ', ' + item[9] + ', ' + item[10] + '\n')
except Exception,e:
writeErrorToLogFile(logfile,'a',Exception,e,True)
sys.exc_clear()
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get stresses at interface (on slave and master)
#=======================================================================
#=======================================================================
# BEGIN - get displacements at interface (on slave only)
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get displacements at interface (on slave only)...',True)
# get values
copenOnSlave = getFieldOutput(odb,-1,-1,'COPEN',slave)
cslipOnSlave = getFieldOutput(odb,-1,-1,'CSLIP1',slave)
# write to file
toWrite = []
for value in copenOnSlave.values:
if 'NODAL' in str(value.position):
pos = nodes
typeOfVar = 'NODAL'
elif 'INTEGRATION_POINT' in str(value.position):
pos = intpoints
typeOfVar = 'INTEGRATION_POINT'
toWrite.append([typeOfVar,str(value.nodeLabel),str(pos[str(value.nodeLabel)][0]),str(pos[str(value.nodeLabel)][1]),str(numpy.sqrt(numpy.power(pos[str(value.nodeLabel)][0],2)+numpy.power(pos[str(value.nodeLabel)][1],2))),str(numpy.arctan2(pos[str(value.nodeLabel)][1],pos[str(value.nodeLabel)][0])* 180/numpy.pi),str(value.data),'0'])
for value in cslipOnSlave.values:
posit = -1
for k,item in enumerate(toWrite):
if item[1]==str(value.nodeLabel):
posit = k
break
if posit>-1:
toWrite[posit][7] = str(value.data)
else:
if 'NODAL' in str(value.position):
pos = nodes
typeOfVar = 'NODAL'
elif 'INTEGRATION_POINT' in str(value.position):
pos = intpoints
typeOfVar = 'INTEGRATION_POINT'
toWrite.append([typeOfVar,str(value.nodeLabel),str(pos[str(value.nodeLabel)][0]),str(pos[str(value.nodeLabel)][1]),str(numpy.sqrt(numpy.power(pos[str(value.nodeLabel)][0],2)+numpy.power(pos[str(value.nodeLabel)][1],2))),str(numpy.arctan2(pos[str(value.nodeLabel)][1],pos[str(value.nodeLabel)][0])* 180/numpy.pi),'0',str(value.data)])
with open(join(csvfolder,'displacementsOnSlave.csv'),'w') as csv:
csv.write('SCATTER PLOT\n')
csv.write('NODE TYPE, NODE LABEL, X, Y, R, THETA [°], COPEN, CSLIP\n')
for item in toWrite:
csv.write(item[0] + ', ' + item[1] + ', ' + item[2] + ', ' + item[3] + ', ' + item[4] + ', ' + item[5] + ', ' + item[6] + ', ' + item[7] + '\n')
toWrite = []
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get displacements at interface (on slave only)
#=======================================================================
#=======================================================================
# BEGIN - get stresses at boundaries
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get stresses at boundaries...',True)
leftSide = getSingleNodeSet(odb,'PART-1-1','LEFTSIDE-NODES-WITH-CORNERS')
rightSide = getSingleNodeSet(odb,'PART-1-1','RIGHTSIDE-NODES-WITH-CORNERS')
extractAndSaveFieldOutput(odb,-1,-1,csvfolder,'StressesOnRightSide','.csv','S',rightSide,3)
extractAndSaveFieldOutput(odb,-1,-1,csvfolder,'StressesOnLeftSide','.csv','S',leftSide,3)
rightStresses = getFieldOutput(odb,-1,-1,'S',rightSide,3)
leftStresses = getFieldOutput(odb,-1,-1,'S',leftSide,3)
maxRight = rightStresses.values[0].data[0]
minRight = rightStresses.values[0].data[0]
meanRight = 0
countRight = 0
for value in rightStresses.values:
if value.data[0]>maxRight:
maxRight = value.data[0]
elif value.data[0]<minRight:
minRight = value.data[0]
meanRight += value.data[0]
countRight += 1
meanRight /=countRight
maxLeft = leftStresses.values[0].data[0]
minLeft = leftStresses.values[0].data[0]
meanLeft = 0
countLeft = 0
for value in leftStresses.values:
if value.data[0]>maxLeft:
maxLeft = value.data[0]
elif value.data[0]<minLeft:
minLeft = value.data[0]
meanLeft += value.data[0]
countLeft += 1
meanLeft /=countLeft
sigmaInf = 0.5*(meanRight+meanLeft)
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get stresses at boundaries
#=======================================================================
#=======================================================================
# BEGIN - get simulation units of measurement and material and geometry
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get simulation''s units of measurement, material and geometry...',True)
with open(join(csvfolder,project + '.csv')) as csv:
lines = csv.readlines()
for l,line in enumerate(lines):
if 'Fiber radius Rf' in line:
Rf = float(line.replace('\n','').replace('-','').replace('*','').split(',')[1]);
elif 'Applied Axial Strain' in line:
epsxx = float(line.replace('\n','').replace('-','').replace('*','').split(',')[1])
elif 'Matrix' in line:
matrixType = line.replace('\n','').replace('-','').replace('*','').split(',')[1]
elif 'length, SI' in line:
lengthFactor = 1.0/float(line.replace('**','').replace('--','').replace('\n','').split(',')[2])
elif 'energy release rate, SI' in line:
enrrtFactor = 1.0/float(line.replace('**','').replace('--','').replace('\n','').split(',')[2])
elif 'force, SI' in line:
forceFactor = 1.0/float(line.replace('**','').replace('--','').replace('\n','').split(',')[2])
elif 'pressure/stress, SI' in line:
stressFactor = 1.0/float(line.replace('**','').replace('--','').replace('\n','').split(',')[2])
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get simulation units of measurement and material and geometry
#=======================================================================
#=======================================================================
# BEGIN - compute G0
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Compute G0...',True)
if 'Epoxy' in matrixType:
matrix = 'EP'
elif 'HDPE' in matrixType:
matrix = 'HDPE'
with open(join(matdatafolder,matrix + '.csv')) as mat:
lines = mat.readlines()
factors = lines[1].replace('\n','').split(',')
elprops = lines[2].replace('\n','').split(',')
Em = float(factors[1])*float(elprops[1])
num = float(factors[4])*float(elprops[4])
Gm = float(factors[3])*float(elprops[3])
Rf *= lengthFactor #recast in SI units
sigmaInf *= stressFactor #recast in SI units
G0 = numpy.pi*Rf*sigmaInf*sigmaInf*(1+(3.0-4.0*num))/(8.0*Gm)
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - compute G0
#=======================================================================
#=======================================================================
# BEGIN - get J integrals
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Get J-integrals...',True)
isJINTcomputed = False
with open(join(wd,project,'solver',project + '.dat'),'r') as dat:
lines = dat.readlines()
lineIndex = 0
for l,line in enumerate(lines):
if 'J - I N T E G R A L E S T I M A T E S' in line:
isJINTcomputed = True
lineIndex = l
break
if isJINTcomputed:
JINTs = []
JINToverG0s = []
startIndex = 0
for l,line in enumerate(lines[lineIndex:]):
if 'INCREMENT' in line and 'SUMMARY' in line:
startIndex = lineIndex + l
for l,line in enumerate(lines[startIndex:]):
if 'J - I N T E G R A L E S T I M A T E S' in line:
temp1 = filter(lambda x: x!=' ' and x!='', lines[startIndex+l+11].replace('\n','').split(' '))[2:]
temp2 = filter(lambda x: x!=' ' and x!='', lines[startIndex+l+12].replace('\n','').split(' '))
temp3 = filter(lambda x: x!=' ' and x!='', lines[startIndex+l+13].replace('\n','').split(' '))
temp4 = filter(lambda x: x!=' ' and x!='', lines[startIndex+l+14].replace('\n','').split(' '))
try:
setName = filter(lambda x: x!=' ' and x!='', lines[startIndex+l+20].replace('\n','').split(' '))[-1]
except Exception:
setName = filter(lambda x: x!=' ' and x!='', lines[startIndex+l+24].replace('\n','').split(' '))[-1]
if setName=='' or setName ==' ':
setName = filter(lambda x: x!=' ' and x!='', lines[startIndex+l+24].replace('\n','').split(' '))[-1]
values = []
valuesOverG0 = []
values.append(setName)
valuesOverG0.append(setName)
for value in temp1:
values.append(enrrtFactor*float(value))
valuesOverG0.append(enrrtFactor*float(value)/G0)
for value in temp2:
values.append(enrrtFactor*float(value))
valuesOverG0.append(enrrtFactor*float(value)/G0)
for value in temp3:
values.append(enrrtFactor*float(value))
valuesOverG0.append(enrrtFactor*float(value)/G0)
for value in temp4:
values.append(enrrtFactor*float(value))
valuesOverG0.append(enrrtFactor*float(value)/G0)
JINTs.append(values)
JINToverG0s.append(valuesOverG0)
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - get J integrals
#=======================================================================
#=======================================================================
# BEGIN - VCCT in forces
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Compute energy release rates with VCCT in forces...',True)
crackTips = []
with open(join(csvfolder,project + '.csv')) as csv:
lines = csv.readlines()
for line in lines:
if 'Nalpha' in line:
N1 = int(line.replace('\n','').split(',')[-1])
elif 'Nbeta' in line:
N2 = int(line.replace('\n','').split(',')[-1])
elif 'Ngamma' in line:
N3 = int(line.replace('\n','').split(',')[-1])
elif 'Ndelta' in line:
N4 = int(line.replace('\n','').split(',')[-1])
elif 'Neps' in line:
N5 = int(line.replace('\n','').split(',')[-1])
elif 'Fiber radius Rf' in line:
Rf = float(line.replace('\n','').split(',')[-1])
elif 'Angular discretization at interface' in line:
deltaC = float(line.replace('\n','').replace('deg','').split(',')[-1])*numpy.pi/180.0
elif 'Crack Angular Aperture' in line:
deltaTheta = float(line.replace('\n','').replace('deg','').split(',')[-1])
if deltaTheta>0 and deltaTheta<180:
matrixCrackTip1 = getSingleNodeSet(odb,'PART-1-1','MATRIXCRACKTIP1-NODE')
fiberCrackTip1 = getSingleNodeSet(odb,'PART-1-1','FIBERCRACKTIP1-NODE')
matrixCrackTip2 = getSingleNodeSet(odb,'PART-1-1','MATRIXCRACKTIP2-NODE')
fiberCrackTip2 = getSingleNodeSet(odb,'PART-1-1','FIBERCRACKTIP2-NODE')
gamma2 = getSingleNodeSet(odb,'PART-1-1','GAMMA4-NODES')
matrixCrackTip1Label = int(lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=matrixCrackTip1).values[0].nodeLabel)
matrixCrackTip2Label = int(lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=matrixCrackTip2).values[0].nodeLabel)
gamma2Labels = []
for value in lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=gamma2).values:
gamma2Labels.append(int(value.nodeLabel))
if matrixCrackTip1Label==4*N1*(N4+N5+1):
if 4*N1*(N4+N5) in gamma2Labels:
preMatrixCrackTip1Label = 4*N1*(N4+N5)
else:
preMatrixCrackTip1Label = matrixCrackTip1Label-1
elif matrixCrackTip1Label==4*N1*(N4+N5):
if matrixCrackTip1Label+1 in gamma2Labels:
preMatrixCrackTip1Label = matrixCrackTip1Label+1
else:
preMatrixCrackTip1Label = 4*N1*(N4+N5+1)
else:
if matrixCrackTip1Label+1 in gamma2Labels:
preMatrixCrackTip1Label = matrixCrackTip1Label+1
else:
preMatrixCrackTip1Label = matrixCrackTip1Label-1
if matrixCrackTip2Label==4*N1*(N4+N5+1):
if matrixCrackTip2Label-1 in gamma2Labels:
preMatrixCrackTip1Label = matrixCrackTip2Label-1
else:
preMatrixCrackTip1Label = 4*N1*(N4+N5)
elif matrixCrackTip2Label==4*N1*(N4+N5):
if 4*N1*(N4+N5+1) in gamma2Labels:
preMatrixCrackTip1Label = 4*N1*(N4+N5+1)
else:
preMatrixCrackTip1Label = matrixCrackTip2Label+1
else:
if matrixCrackTip2Label-1 in gamma2Labels:
preMatrixCrackTip2Label = matrixCrackTip2Label-1
else:
preMatrixCrackTip2Label = matrixCrackTip2Label+1
preMatrixCrackTip1 = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(preMatrixCrackTip1Label)
preMatrixCrackTip2 = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(preMatrixCrackTip2Label)
preFiberCrackTip1 = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(preMatrixCrackTip1Label+4*N1)
preFiberCrackTip2 = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(preMatrixCrackTip2Label+4*N1)
undeftip1 = firstFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=matrixCrackTip1).values[0]
deftip1 = lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=matrixCrackTip1).values[0]
undeftip2 = firstFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=matrixCrackTip2).values[0]
deftip2 = lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=matrixCrackTip2).values[0]
#undefmatrixpretip1 = firstFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=preMatrixCrackTip1).values[0]#unused
#undefmatrixpretip2 = firstFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=preMatrixCrackTip2).values[0]#unused
undeffiberpretip1 = firstFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=preFiberCrackTip1).values[0]
undeffiberpretip2 = firstFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=preFiberCrackTip2).values[0]
#defmatrixpretip1 = lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=preMatrixCrackTip1).values[0]#unused
defmatrixpretip2 = lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=preMatrixCrackTip2).values[0]
deffiberpretip1 = lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=preFiberCrackTip1).values[0]
deffiberpretip2 = lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=preFiberCrackTip2).values[0]
dispmatrixpretip1 = lastFrame.fieldOutputs['U'].getSubset(position=NODAL).getSubset(region=preMatrixCrackTip1).values[0]
dispmatrixpretip2 = lastFrame.fieldOutputs['U'].getSubset(position=NODAL).getSubset(region=preMatrixCrackTip2).values[0]
dispfiberpretip1 = lastFrame.fieldOutputs['U'].getSubset(position=NODAL).getSubset(region=preFiberCrackTip1).values[0]
dispfiberpretip2 = lastFrame.fieldOutputs['U'].getSubset(position=NODAL).getSubset(region=preFiberCrackTip2).values[0]
beta1 = numpy.arctan2(undeftip1.data[1],undeftip1.data[0])
beta2 = numpy.arctan2(undeftip2.data[1],undeftip2.data[0])
#writeLineToLogFile(logfile,'a','orientation defined')
# xdispcracktip1 = (defmatrixpretip1.data[0]-undefmatrixpretip1.data[0]) - (deffiberpretip1.data[0]-undeffiberpretip1.data[0])
# zdispcracktip1 = (defmatrixpretip1.data[1]-undefmatrixpretip1.data[1]) - (deffiberpretip1.data[1]-undeffiberpretip1.data[1])
#
# xdispcracktip2 = (defmatrixpretip2.data[0]-undefmatrixpretip2.data[0]) - (deffiberpretip2.data[0]-undeffiberpretip2.data[0])
# zdispcracktip2 = (defmatrixpretip2.data[1]-undefmatrixpretip2.data[1]) - (deffiberpretip2.data[1]-undeffiberpretip2.data[1])
xdispcracktip1 = dispmatrixpretip1.data[0] - dispfiberpretip1.data[0]
zdispcracktip1 = dispmatrixpretip1.data[1] - dispfiberpretip1.data[1]
xdispcracktip2 = dispmatrixpretip2.data[0] - dispfiberpretip2.data[0]
zdispcracktip2 = dispmatrixpretip2.data[1] - dispfiberpretip2.data[1]
rdispcracktip1 = numpy.cos(beta1)*xdispcracktip1 + numpy.sin(beta1)*zdispcracktip1
thetadispcracktip1 = -numpy.sin(beta1)*xdispcracktip1 + numpy.cos(beta1)*zdispcracktip1
rdispcracktip2 = numpy.cos(beta2)*xdispcracktip2 + numpy.sin(beta2)*zdispcracktip2
thetadispcracktip2 = -numpy.sin(beta2)*xdispcracktip2 + numpy.cos(beta2)*zdispcracktip2
#writeLineToLogFile(logfile,'a','disps rotated:')
try:
dummy1Node = odb.rootAssembly.instances['PART-1-1'].nodeSets['DUMMY1-NODE']
dummy2Node = odb.rootAssembly.instances['PART-1-1'].nodeSets['DUMMY2-NODE']
isDummy = True
#writeLineToLogFile(logfile,'a','is dummy')
except Exception,error:
#writeLineToLogFile(logfile,'a',str(Exception))
#print(str(error))
isDummy = False
#print('is not dummy')
sys.exc_clear()
#sys.exit(2)
if isDummy:
xRFcracktip1 = lastFrame.fieldOutputs['RF'].getSubset(region=dummy1Node,position=NODAL).values[0].data[0]
zRFcracktip1 = lastFrame.fieldOutputs['RF'].getSubset(region=dummy1Node,position=NODAL).values[0].data[1]
xRFcracktip2 = lastFrame.fieldOutputs['RF'].getSubset(region=dummy2Node,position=NODAL).values[0].data[0]
zRFcracktip2 = lastFrame.fieldOutputs['RF'].getSubset(region=dummy2Node,position=NODAL).values[0].data[1]
#print('got reaction forces')
else:
connectorElcracktip1 = odb.rootAssembly.instances['PART-1-1'].elementSets['CONNECTORCRACKTIP1-ELEMENT']
connectorElcracktip1label = connectorElcracktip1.elements[0].label
connectorElcracktip2 = odb.rootAssembly.instances['PART-1-1'].elementSets['CONNECTORCRACKTIP2-ELEMENT']
connectorElcracktip2label = connectorElcracktip2.elements[0].label
for region in odb.steps[odb.steps.keys()[-1]].historyRegions.keys():
if 'Node' not in region:
if str(connectorElcracktip1label) in region:
connectorElcracktip1histregion = region
elif str(connectorElcracktip2label) in region:
connectorElcracktip2histregion = region
crf1key = ''
crf2key = ''
for key in odb.steps[odb.steps.keys()[-1]].historyRegions[connectorElcracktip1histregion].historyOutputs.keys():
if 'CRF1' in key:
crf1key = key
elif 'CRF2' in key:
crf2key = key
if len(crf1key)>0 and len(crf2key)>0:
crf1Hist = odb.steps[odb.steps.keys()[-1]].historyRegions[connectorElcracktip1histregion].historyOutputs[crf1key]
crf2Hist = odb.steps[odb.steps.keys()[-1]].historyRegions[connectorElcracktip1histregion].historyOutputs[crf2key]
xRFcracktip1 = crf1Hist.data[-1][1]
zRFcracktip1 = crf2Hist.data[-1][1]
crf1key = ''
crf2key = ''
for key in odb.steps[odb.steps.keys()[-1]].historyRegions[connectorElcracktip2histregion].historyOutputs.keys():
if 'CRF1' in key:
crf1key = key
elif 'CRF2' in key:
crf2key = key
if len(crf1key)>0 and len(crf2key)>0:
crf1Hist = odb.steps[odb.steps.keys()[-1]].historyRegions[connectorElcracktip2histregion].historyOutputs[crf1key]
crf2Hist = odb.steps[odb.steps.keys()[-1]].historyRegions[connectorElcracktip2histregion].historyOutputs[crf2key]
xRFcracktip2 = crf1Hist.data[-1][1]
zRFcracktip2 = crf2Hist.data[-1][1]
rRFcracktip1 = numpy.cos(beta1)*xRFcracktip1 + numpy.sin(beta1)*zRFcracktip1
thetaRFcracktip1 = -numpy.sin(beta1)*xRFcracktip1 + numpy.cos(beta1)*zRFcracktip1
rRFcracktip2 = numpy.cos(beta2)*xRFcracktip2 + numpy.sin(beta2)*zRFcracktip2
thetaRFcracktip2 = -numpy.sin(beta2)*xRFcracktip2 + numpy.cos(beta2)*zRFcracktip2
#writeLineToLogFile(logfile,'a','forces rotated')
G1cracktip1 = enrrtFactor*numpy.abs(0.5*(rRFcracktip1*rdispcracktip1)/(Rf*deltaC))
G2cracktip1 = enrrtFactor*numpy.abs(0.5*(thetaRFcracktip1*thetadispcracktip1)/(Rf*deltaC))
G1cracktip2 = enrrtFactor*numpy.abs(0.5*(rRFcracktip2*rdispcracktip2)/(Rf*deltaC))
G2cracktip2 = enrrtFactor*numpy.abs(0.5*(thetaRFcracktip2*thetadispcracktip2)/(Rf*deltaC))
#writeLineToLogFile(logfile,'a','Gs calculated')
crackTip1 = [undeftip1.nodeLabel,
lengthFactor*undeftip1.data[0], lengthFactor*undeftip1.data[1],
lengthFactor*numpy.sqrt(numpy.power(undeftip1.data[0],2)+numpy.power(undeftip1.data[1],2)), numpy.arctan2(undeftip1.data[1],undeftip1.data[0])*180/numpy.pi,
lengthFactor*deftip1.data[0], lengthFactor*deftip1.data[1],
lengthFactor*numpy.sqrt(numpy.power(deftip1.data[0],2)+numpy.power(deftip1.data[1],2)), numpy.arctan2(deftip1.data[1],deftip1.data[0])*180/numpy.pi,
num, Gm, deltaC*180/numpy.pi, lengthFactor*rdispcracktip1, lengthFactor*thetadispcracktip1, forceFactor*rRFcracktip1, forceFactor*thetaRFcracktip1,
epsxx*Em/(1-num*num), sigmaInf, numpy.pi*lengthFactor*Rf*(epsxx*Em/(1-num*num))*(epsxx*Em/(1-num*num))*(1+(3.0-4.0*num))/(8.0*Gm), G0,
G1cracktip1, G2cracktip1, G1cracktip1+G2cracktip1, G1cracktip1/G0, G2cracktip1/G0, (G1cracktip1+G2cracktip1)/G0]
crackTip2 = [undeftip2.nodeLabel,
lengthFactor*undeftip2.data[0], lengthFactor*undeftip2.data[1],
lengthFactor*numpy.sqrt(numpy.power(undeftip2.data[0],2)+numpy.power(undeftip2.data[1],2)), numpy.arctan2(undeftip2.data[1],undeftip2.data[0])*180/numpy.pi,
lengthFactor*deftip2.data[0], lengthFactor*deftip2.data[1],
lengthFactor*numpy.sqrt(numpy.power(deftip2.data[0],2)+numpy.power(deftip2.data[1],2)), numpy.arctan2(deftip2.data[1],deftip2.data[0])*180/numpy.pi,
num, Gm, deltaC*180/numpy.pi, lengthFactor*rdispcracktip2, lengthFactor*thetadispcracktip2, forceFactor*rRFcracktip2, forceFactor*thetaRFcracktip2,
epsxx*Em/(1-num*num), sigmaInf, numpy.pi*lengthFactor*Rf*(epsxx*Em/(1-num*num))*(epsxx*Em/(1-num*num))*(1+(3.0-4.0*num))/(8.0*Gm), G0,
G1cracktip2, G2cracktip2, G1cracktip2+G2cracktip2, G1cracktip2/G0, G2cracktip2/G0, (G1cracktip2+G2cracktip2)/G0]
for tip in JINTs:
if 'CONTOURINTEGRALCRACKTIP1-NODES' in tip[0]:
for value in tip[1:]:
crackTip1.append(value)
else:
for value in tip[1:]:
crackTip2.append(value)
for tip in JINToverG0s:
if 'CONTOURINTEGRALCRACKTIP1-NODES' in tip[0]:
for value in tip[1:]:
crackTip1.append(value)
else:
for value in tip[1:]:
crackTip2.append(value)
crackTips.append(crackTip1)
crackTips.append(crackTip2)
writeLineToLogFile(logfile,'a','data saved in list',True)
line = 'NODE LABEL, X0 [m], Y0 [m], R0 [m], THETA0 [°], X [m], Y [m], R [m], THETA [°], nu [-], mu [Pa], deltaC [°], Disp_R, Disp_theta, RF_R, RF_theta, sigma_Inf_UNDAMAGED [Pa], sigma_Inf_DAMAGED [Pa], G0_UNDAMAGED [J/m^2], G0_DAMAGED [J/m^2], GI_M-VCCT [J/m^2], GII_M-VCCT [J/m^2], GTOT_M-VCCT [J/m^2], GI_M-VCCT/G0 [-], GII_M-VCCT/G0 [-], GTOT_M-VCCT/G0 [-]'
if len(JINTs)>0 and len(JINTs[0])>0:
numJINTs = len(JINTs[0])-1
secondline = ', , , , , , , , , , , , , , , , , , , , , , , , , '
line += ', '
secondline += ', '
line += 'GTOT_ABQ-JINT [J/m^2]'
secondline += 'Contour 1'
for j in range(1,numJINTs):
secondline += ', '
secondline += 'Contour ' + str(j)
line += ', '
line += ', '
secondline += ', '
line += 'GTOT_ABQ-JINT/G0 [-]'
secondline += 'Contour 1'
for j in range(1,numJINTs):
secondline += ', '
secondline += 'Contour ' + str(j)
line += ', '
with open(join(csvfolder,'ENRRTs-Summary.csv'),'w') as csv:
csv.write(line + '\n')
csv.write(secondline + '\n')
for tip in crackTips:
line = ''
for v,value in enumerate(tip):
if v>0:
line += ','
line += str(value)
csv.write(line + '\n')
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - VCCT in forces
#=======================================================================
#=======================================================================
# BEGIN - VCCI in stresses (trapezoidal integration for elements of equal length at the interface in the undeformed configuration)
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Compute energy release rates with VCCT in stresses...',True)
crackTips = []
if deltaTheta>0 and deltaTheta<180:
# get crack tips' node sets
matrixCrackTip1 = getSingleNodeSet(odb,'PART-1-1','MATRIXCRACKTIP1-NODE')
fiberCrackTip1 = getSingleNodeSet(odb,'PART-1-1','FIBERCRACKTIP1-NODE')
matrixCrackTip2 = getSingleNodeSet(odb,'PART-1-1','MATRIXCRACKTIP2-NODE')
fiberCrackTip2 = getSingleNodeSet(odb,'PART-1-1','FIBERCRACKTIP2-NODE')
#writeLineToLogFile(logfile,'a','crack tips node sets')
# get surface sections' node sets
gamma2 = getSingleNodeSet(odb,'PART-1-1','GAMMA4-NODES')
#writeLineToLogFile(logfile,'a','gamma4 node set')
# get crack tips' node labels
matrixCrackTip1Label = int(lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=matrixCrackTip1).values[0].nodeLabel)
matrixCrackTip2Label = int(lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=matrixCrackTip2).values[0].nodeLabel)
#writeLineToLogFile(logfile,'a','crack tips node labels')
# get labels of nodes just before the crack tip on matrix side
gamma2Labels = []
for value in lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=gamma2).values:
gamma2Labels.append(int(value.nodeLabel))
if matrixCrackTip1Label==4*N1*(N4+N5+1):
if 4*N1*(N4+N5) in gamma2Labels:
preMatrixCrackTip1Label = 4*N1*(N4+N5)
else:
preMatrixCrackTip1Label = matrixCrackTip1Label-1
elif matrixCrackTip1Label==4*N1*(N4+N5):
if matrixCrackTip1Label+1 in gamma2Labels:
preMatrixCrackTip1Label = matrixCrackTip1Label+1
else:
preMatrixCrackTip1Label = 4*N1*(N4+N5+1)
else:
if matrixCrackTip1Label+1 in gamma2Labels:
preMatrixCrackTip1Label = matrixCrackTip1Label+1
else:
preMatrixCrackTip1Label = matrixCrackTip1Label-1
if matrixCrackTip2Label==4*N1*(N4+N5+1):
if matrixCrackTip2Label-1 in gamma2Labels:
preMatrixCrackTip1Label = matrixCrackTip2Label-1
else:
preMatrixCrackTip1Label = 4*N1*(N4+N5)
elif matrixCrackTip2Label==4*N1*(N4+N5):
if 4*N1*(N4+N5+1) in gamma2Labels:
preMatrixCrackTip1Label = 4*N1*(N4+N5+1)
else:
preMatrixCrackTip1Label = matrixCrackTip2Label+1
else:
if matrixCrackTip2Label-1 in gamma2Labels:
preMatrixCrackTip2Label = matrixCrackTip2Label-1
else:
preMatrixCrackTip2Label = matrixCrackTip2Label+1
#writeLineToLogFile(logfile,'a','node labels of crack tips on matrix side')
# get crack tips' coordinates
undeftip1 = firstFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=matrixCrackTip1).values[0]
deftip1 = lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=matrixCrackTip1).values[0]
undeftip2 = firstFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=matrixCrackTip2).values[0]
deftip2 = lastFrame.fieldOutputs['COORD'].getSubset(position=NODAL).getSubset(region=matrixCrackTip2).values[0]
#writeLineToLogFile(logfile,'a','crack tips coordinates')
# define the orientation at crack tips
beta1 = numpy.arctan2(undeftip1.data[1],undeftip1.data[0])
beta2 = numpy.arctan2(undeftip2.data[1],undeftip2.data[0])
#writeLineToLogFile(logfile,'a','direction at crack tips')
# compute energy release rates for crack tip 1
dataMatrixSideCrackTip1 = []
dataFiberSideCrackTip1 = []
for elN in range(nEl0,NElMax+DeltaEl,DeltaEl):
psMatrix = []
psFiber = []
if preMatrixCrackTip1Label<matrixCrackTip1Label:
for n in range(0,elN+1,1):
# get matrix node before and after the crack tip
preMatrixNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip1Label-(elN-n))
postMatrixNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip1Label+n)
# get matrix node before and after the crack tip
preFiberNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip1Label-(elN-n)+4*N1)
postFiberNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip1Label+n+4*N1)
# get displacements on matrix and fiber
dispPreMatrixNode = lastFrame.fieldOutputs['U'].getSubset(position=NODAL).getSubset(region=preMatrixNode).values[0]
dispPreFiberNode = lastFrame.fieldOutputs['U'].getSubset(position=NODAL).getSubset(region=preFiberNode).values[0]
# calculate crack face displacement
xdisp = dispPreMatrixNode.data[0] - dispPreFiberNode.data[0]
zdisp = dispPreMatrixNode.data[1] - dispPreFiberNode.data[1]
# rotate displacements to crack tip local system
rdisp = numpy.cos(beta1)*xdisp + numpy.sin(beta1)*zdisp
thetadisp = -numpy.sin(beta1)*xdisp + numpy.cos(beta1)*zdisp
# get stresses on matrix and fiber
postMatrixStresses = getFieldOutput(odb,-1,-1,'S',postMatrixNode,3)
postFiberStresses = getFieldOutput(odb,-1,-1,'S',postFiberNode,3)
# define stress components on matrix
sxxMatrix = postMatrixStresses.values[0].data[0]
szzMatrix = postMatrixStresses.values[0].data[1]
sxzMatrix = postMatrixStresses.values[0].data[3]
# define stress components on matrix
sxxFiber = postFiberStresses.values[0].data[0]
szzFiber = postFiberStresses.values[0].data[1]
sxzFiber = postFiberStresses.values[0].data[3]
# rotate stress components on matrix
srrMatrix = numpy.power(numpy.cos(beta1),2)*sxxMatrix + 2*numpy.sin(beta1)*numpy.cos(beta1)*sxzMatrix + numpy.power(numpy.sin(beta1),2)*szzMatrix
srthetaMatrix = (sxxMatrix+szzMatrix)*numpy.cos(beta1)*numpy.sin(beta1) + sxzMatrix*(numpy.power(numpy.cos(beta1),2)-numpy.power(numpy.sin(beta1),2))
# rotate stress components on fiber
srrFiber = numpy.power(numpy.cos(beta1),2)*sxxFiber + 2*numpy.sin(beta1)*numpy.cos(beta1)*sxzFiber + numpy.power(numpy.sin(beta1),2)*szzFiber
srthetaFiber = (sxxFiber+szzFiber)*numpy.cos(beta1)*numpy.sin(beta1) + sxzFiber*(numpy.power(numpy.cos(beta1),2)-numpy.power(numpy.sin(beta1),2))
# compute products on matrix
prrMatrix = srrMatrix*rdisp
prthetaMatrix = srthetaMatrix*thetadisp
# compute products on fiber
prrFiber = srrFiber*rdisp
prthetaFiber = srthetaFiber*thetadisp
#save products to array
psMatrix.append([prrMatrix,prthetaMatrix])
psFiber.append([prrFiber,prthetaFiber])
else:
for n in range(0,elN+1,1):
# get matrix node before and after the crack tip
postMatrixNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip1Label-(elN-n))
preMatrixNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip1Label+n)
# get matrix node before and after the crack tip
postFiberNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip1Label-(elN-n)+4*N1)
preFiberNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip1Label+n+4*N1)
# get displacements on matrix and fiber
dispPreMatrixNode = lastFrame.fieldOutputs['U'].getSubset(position=NODAL).getSubset(region=preMatrixNode).values[0]
dispPreFiberNode = lastFrame.fieldOutputs['U'].getSubset(position=NODAL).getSubset(region=preFiberNode).values[0]
# calculate crack face displacement
xdisp = dispPreMatrixNode.data[0] - dispPreFiberNode.data[0]
zdisp = dispPreMatrixNode.data[1] - dispPreFiberNode.data[1]
# rotate displacements to crack tip local system
rdisp = numpy.cos(beta1)*xdisp + numpy.sin(beta1)*zdisp
thetadisp = -numpy.sin(beta1)*xdisp + numpy.cos(beta1)*zdisp
# get stresses on matrix and fiber
postMatrixStresses = getFieldOutput(odb,-1,-1,'S',postMatrixNode,3)
postFiberStresses = getFieldOutput(odb,-1,-1,'S',postFiberNode,3)
# define stress components on matrix
sxxMatrix = postMatrixStresses.values[0].data[0]
szzMatrix = postMatrixStresses.values[0].data[1]
sxzMatrix = postMatrixStresses.values[0].data[3]
# define stress components on matrix
sxxFiber = postFiberStresses.values[0].data[0]
szzFiber = postFiberStresses.values[0].data[1]
sxzFiber = postFiberStresses.values[0].data[3]
# rotate stress components on matrix
srrMatrix = numpy.power(numpy.cos(beta1),2)*sxxMatrix + 2*numpy.sin(beta1)*numpy.cos(beta1)*sxzMatrix + numpy.power(numpy.sin(beta1),2)*szzMatrix
srthetaMatrix = (sxxMatrix+szzMatrix)*numpy.cos(beta1)*numpy.sin(beta1) + sxzMatrix*(numpy.power(numpy.cos(beta1),2)-numpy.power(numpy.sin(beta1),2))
# rotate stress components on fiber
srrFiber = numpy.power(numpy.cos(beta1),2)*sxxFiber + 2*numpy.sin(beta1)*numpy.cos(beta1)*sxzFiber + numpy.power(numpy.sin(beta1),2)*szzFiber
srthetaFiber = (sxxFiber+szzFiber)*numpy.cos(beta1)*numpy.sin(beta1) + sxzFiber*(numpy.power(numpy.cos(beta1),2)-numpy.power(numpy.sin(beta1),2))
# compute products on matrix
prrMatrix = srrMatrix*rdisp
prthetaMatrix = srthetaMatrix*thetadisp
# compute products on fiber
prrFiber = srrFiber*rdisp
prthetaFiber = srthetaFiber*thetadisp
#save products to array
psMatrix.append([prrMatrix,prthetaMatrix])
psFiber.append([prrFiber,prthetaFiber])
GI = 0
GII = 0
for e,element in enumerate(psMatrix):
if e>0 and e<len(psMatrix)-1:
GI += 2*abs(psMatrix[e][0])
GII += 2*abs(psMatrix[e][1])
else:
GI += abs(psMatrix[e][0])
GII += abs(psMatrix[e][1])
GI *= 0.25/elN
GII *= 0.25/elN
dataMatrixSideCrackTip1.append([elN, enrrtFactor*GI, enrrtFactor*GII, enrrtFactor*(GI+GII), enrrtFactor*GI/G0, enrrtFactor*GII/G0, enrrtFactor*(GI+GII)/G0])
GI = 0
GII = 0
for e,element in enumerate(psFiber):
if e>0 and e<len(psFiber)-1:
GI += 2*abs(psFiber[e][0])
GII += 2*abs(psFiber[e][1])
else:
GI += abs(psFiber[e][0])
GII += abs(psFiber[e][1])
GI *= 0.25/elN
GII *= 0.25/elN
dataFiberSideCrackTip1.append([elN, enrrtFactor*GI, enrrtFactor*GII, enrrtFactor*(GI+GII), enrrtFactor*GI/G0, enrrtFactor*GII/G0, enrrtFactor*(GI+GII)/G0])
#writeLineToLogFile(logfile,'a','errt crack tip 1 calculated')
# compute energy release rates for crack tip 2
dataMatrixSideCrackTip2 = []
dataFiberSideCrackTip2 = []
for elN in range(nEl0,NElMax+DeltaEl,DeltaEl):
psMatrix = []
psFiber = []
if preMatrixCrackTip2Label<matrixCrackTip2Label:
for n in range(0,elN+1,1):
# get matrix node before and after the crack tip
preMatrixNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip2Label-(elN-n))
postMatrixNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip2Label+n)
# get matrix node before and after the crack tip
preFiberNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip2Label-(elN-n)+4*N1)
postFiberNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip2Label+n+4*N1)
# get displacements on matrix and fiber
dispPreMatrixNode = lastFrame.fieldOutputs['U'].getSubset(position=NODAL).getSubset(region=preMatrixNode).values[0]
dispPreFiberNode = lastFrame.fieldOutputs['U'].getSubset(position=NODAL).getSubset(region=preFiberNode).values[0]
# calculate crack face displacement
xdisp = dispPreMatrixNode.data[0] - dispPreFiberNode.data[0]
zdisp = dispPreMatrixNode.data[1] - dispPreFiberNode.data[1]
# rotate displacements to crack tip local system
rdisp = numpy.cos(beta2)*xdisp + numpy.sin(beta2)*zdisp
thetadisp = -numpy.sin(beta2)*xdisp + numpy.cos(beta2)*zdisp
# get stresses on matrix and fiber
postMatrixStresses = getFieldOutput(odb,-1,-1,'S',postMatrixNode,3)
postFiberStresses = getFieldOutput(odb,-1,-1,'S',postFiberNode,3)
# define stress components on matrix
sxxMatrix = postMatrixStresses.values[0].data[0]
szzMatrix = postMatrixStresses.values[0].data[1]
sxzMatrix = postMatrixStresses.values[0].data[3]
# define stress components on matrix
sxxFiber = postFiberStresses.values[0].data[0]
szzFiber = postFiberStresses.values[0].data[1]
sxzFiber = postFiberStresses.values[0].data[3]
# rotate stress components on matrix
srrMatrix = numpy.power(numpy.cos(beta2),2)*sxxMatrix + 2*numpy.sin(beta2)*numpy.cos(beta2)*sxzMatrix + numpy.power(numpy.sin(beta2),2)*szzMatrix
srthetaMatrix = (sxxMatrix+szzMatrix)*numpy.cos(beta2)*numpy.sin(beta2) + sxzMatrix*(numpy.power(numpy.cos(beta2),2)-numpy.power(numpy.sin(beta2),2))
# rotate stress components on fiber
srrFiber = numpy.power(numpy.cos(beta2),2)*sxxFiber + 2*numpy.sin(beta2)*numpy.cos(beta2)*sxzFiber + numpy.power(numpy.sin(beta2),2)*szzFiber
srthetaFiber = (sxxFiber+szzFiber)*numpy.cos(beta2)*numpy.sin(beta2) + sxzFiber*(numpy.power(numpy.cos(beta2),2)-numpy.power(numpy.sin(beta2),2))
# compute products on matrix
prrMatrix = srrMatrix*rdisp
prthetaMatrix = srthetaMatrix*thetadisp
# compute products on fiber
prrFiber = srrFiber*rdisp
prthetaFiber = srthetaFiber*thetadisp
#save products to array
psMatrix.append([prrMatrix,prthetaMatrix])
psFiber.append([prrFiber,prthetaFiber])
else:
for n in range(0,elN+1,1):
# get matrix node before and after the crack tip
postMatrixNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip2Label-(elN-n))
preMatrixNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip2Label+n)
# get matrix node before and after the crack tip
postFiberNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip2Label-(elN-n)+4*N1)
preFiberNode = odb.rootAssembly.instances['PART-1-1'].getNodeFromLabel(matrixCrackTip2Label+n+4*N1)
# get displacements on matrix and fiber
dispPreMatrixNode = lastFrame.fieldOutputs['U'].getSubset(position=NODAL).getSubset(region=preMatrixNode).values[0]
dispPreFiberNode = lastFrame.fieldOutputs['U'].getSubset(position=NODAL).getSubset(region=preFiberNode).values[0]
# calculate crack face displacement
xdisp = dispPreMatrixNode.data[0] - dispPreFiberNode.data[0]
zdisp = dispPreMatrixNode.data[1] - dispPreFiberNode.data[1]
# rotate displacements to crack tip local system
rdisp = numpy.cos(beta2)*xdisp + numpy.sin(beta2)*zdisp
thetadisp = -numpy.sin(beta2)*xdisp + numpy.cos(beta2)*zdisp
# get stresses on matrix and fiber
postMatrixStresses = getFieldOutput(odb,-1,-1,'S',postMatrixNode,3)
postFiberStresses = getFieldOutput(odb,-1,-1,'S',postFiberNode,3)
# define stress components on matrix
sxxMatrix = postMatrixStresses.values[0].data[0]
szzMatrix = postMatrixStresses.values[0].data[1]
sxzMatrix = postMatrixStresses.values[0].data[3]
# define stress components on matrix
sxxFiber = postFiberStresses.values[0].data[0]
szzFiber = postFiberStresses.values[0].data[1]
sxzFiber = postFiberStresses.values[0].data[3]
# rotate stress components on matrix
srrMatrix = numpy.power(numpy.cos(beta2),2)*sxxMatrix + 2*numpy.sin(beta2)*numpy.cos(beta2)*sxzMatrix + numpy.power(numpy.sin(beta2),2)*szzMatrix
srthetaMatrix = (sxxMatrix+szzMatrix)*numpy.cos(beta2)*numpy.sin(beta2) + sxzMatrix*(numpy.power(numpy.cos(beta2),2)-numpy.power(numpy.sin(beta2),2))
# rotate stress components on fiber
srrFiber = numpy.power(numpy.cos(beta2),2)*sxxFiber + 2*numpy.sin(beta2)*numpy.cos(beta2)*sxzFiber + numpy.power(numpy.sin(beta2),2)*szzFiber
srthetaFiber = (sxxFiber+szzFiber)*numpy.cos(beta2)*numpy.sin(beta2) + sxzFiber*(numpy.power(numpy.cos(beta2),2)-numpy.power(numpy.sin(beta2),2))
# compute products on matrix
prrMatrix = srrMatrix*rdisp
prthetaMatrix = srthetaMatrix*thetadisp
# compute products on fiber
prrFiber = srrFiber*rdisp
prthetaFiber = srthetaFiber*thetadisp
#save products to array
psMatrix.append([prrMatrix,prthetaMatrix])
psFiber.append([prrFiber,prthetaFiber])
GI = 0
GII = 0
for e,element in enumerate(psMatrix):
if e>0 and e<len(psMatrix)-1:
GI += 2*abs(psMatrix[e][0])
GII += 2*abs(psMatrix[e][1])
else:
GI += abs(psMatrix[e][0])
GII += abs(psMatrix[e][1])
GI *= 0.25/elN
GII *= 0.25/elN
dataMatrixSideCrackTip2.append([elN, enrrtFactor*GI, enrrtFactor*GII, enrrtFactor*(GI+GII), enrrtFactor*GI/G0, enrrtFactor*GII/G0, enrrtFactor*(GI+GII)/G0])
GI = 0
GII = 0
for e,element in enumerate(psFiber):
if e>0 and e<len(psFiber)-1:
GI += 2*abs(psFiber[e][0])
GII += 2*abs(psFiber[e][1])
else:
GI += abs(psFiber[e][0])
GII += abs(psFiber[e][1])
GI *= 0.25/elN
GII *= 0.25/elN
dataFiberSideCrackTip2.append([elN, enrrtFactor*GI, enrrtFactor*GII, enrrtFactor*(GI+GII), enrrtFactor*GI/G0, enrrtFactor*GII/G0, enrrtFactor*(GI+GII)/G0])
#writeLineToLogFile(logfile,'a','errts crack tip 2 calculated')
#writeLineToLogFile(logfile,'a','Gs calculated')
crackTip1 = [undeftip1.nodeLabel,
lengthFactor*undeftip1.data[0], lengthFactor*undeftip1.data[1],
lengthFactor*numpy.sqrt(numpy.power(undeftip1.data[0],2)+numpy.power(undeftip1.data[1],2)), numpy.arctan2(undeftip1.data[1],undeftip1.data[0])*180/numpy.pi,
lengthFactor*deftip1.data[0], lengthFactor*deftip1.data[1],
lengthFactor*numpy.sqrt(numpy.power(deftip1.data[0],2)+numpy.power(deftip1.data[1],2)), numpy.arctan2(deftip1.data[1],deftip1.data[0])*180/numpy.pi,
num, Gm, deltaC*180/numpy.pi,
epsxx*Em/(1-num*num), sigmaInf, numpy.pi*lengthFactor*Rf*(epsxx*Em/(1-num*num))*(epsxx*Em/(1-num*num))*(1+(3.0-4.0*num))/(8.0*Gm), G0]
crackTip2 = [undeftip2.nodeLabel,
lengthFactor*undeftip2.data[0], lengthFactor*undeftip2.data[1],
lengthFactor*numpy.sqrt(numpy.power(undeftip2.data[0],2)+numpy.power(undeftip2.data[1],2)), numpy.arctan2(undeftip2.data[1],undeftip2.data[0])*180/numpy.pi,
lengthFactor*deftip2.data[0], lengthFactor*deftip2.data[1],
lengthFactor*numpy.sqrt(numpy.power(deftip2.data[0],2)+numpy.power(deftip2.data[1],2)), numpy.arctan2(deftip2.data[1],deftip2.data[0])*180/numpy.pi,
num, Gm, deltaC*180/numpy.pi,
epsxx*Em/(1-num*num), sigmaInf, numpy.pi*lengthFactor*Rf*(epsxx*Em/(1-num*num))*(epsxx*Em/(1-num*num))*(1+(3.0-4.0*num))/(8.0*Gm), G0]
for v in range(1,len(dataMatrixSideCrackTip1[0])):
for data in dataMatrixSideCrackTip1:
crackTip1.append(data[v])
for v in range(1,len(dataFiberSideCrackTip1[0])):
for data in dataFiberSideCrackTip1:
crackTip1.append(data[v])
for v in range(1,len(dataMatrixSideCrackTip2[0])):
for data in dataMatrixSideCrackTip2:
crackTip2.append(data[v])
for v in range(1,len(dataFiberSideCrackTip2[0])):
for data in dataFiberSideCrackTip2:
crackTip2.append(data[v])
crackTips.append(crackTip1)
crackTips.append(crackTip2)
#writeLineToLogFile(logfile,'a','data saved in list')
line = 'NODE LABEL, X0 [m], Y0 [m], R0 [m], THETA0 [°], X [m], Y [m], R [m], THETA [°], nu [-], mu [Pa], deltaC [°], sigma_Inf_UNDAMAGED [Pa], sigma_Inf_DAMAGED [Pa], G0_UNDAMAGED [J/m^2], G0_DAMAGED [J/m^2], '
secondline = ' , , , , , , , , , , , , , , , , '
numGs = (NElMax+DeltaEl-nEl0)/DeltaEl
line += ', '
secondline += ', '
line += 'GI_M-SoM-VCCT [J/m^2]'
secondline += 'N Int El ' + str(nEl0)
for j in range(1,numGs):
secondline += ', '
secondline += 'N Int El ' + str(nEl0 + j*DeltaEl)
line += ', '
line += ', '
secondline += ', '
line += 'GII_M-SoM-VCCT [J/m^2]'
secondline += 'N Int El ' + str(nEl0)
for j in range(1,numGs):
secondline += ', '
secondline += 'N Int El ' + str(nEl0 + j*DeltaEl)
line += ', '
line += ', '
secondline += ', '
line += 'GTOT_M-SoM-VCCT [J/m^2]'
secondline += 'N Int El ' + str(nEl0)
for j in range(1,numGs):
secondline += ', '
secondline += 'N Int El ' + str(nEl0 + j*DeltaEl)
line += ', '
line += ', '
secondline += ', '
line += 'GI_M-SoM-VCCT/G0 [J/m^2]'
secondline += 'N Int El ' + str(nEl0)
for j in range(1,numGs):
secondline += ', '
secondline += 'N Int El ' + str(nEl0 + j*DeltaEl)
line += ', '
line += ', '
secondline += ', '
line += 'GII_M-SoM-VCCT/G0 [J/m^2]'
secondline += 'N Int El ' + str(nEl0)
for j in range(1,numGs):
secondline += ', '
secondline += 'N Int El ' + str(nEl0 + j*DeltaEl)
line += ', '
line += ', '
secondline += ', '
line += 'GTOT_M-SoM-VCCT/G0 [J/m^2]'
secondline += 'N Int El ' + str(nEl0)
for j in range(1,numGs):
secondline += ', '
secondline += 'N Int El ' + str(nEl0 + j*DeltaEl)
line += ', '
line += ', '
secondline += ', '
line += 'GI_M-SoF-VCCT [J/m^2]'
secondline += 'N Int El ' + str(nEl0)
for j in range(1,numGs):
secondline += ', '
secondline += 'N Int El ' + str(nEl0 + j*DeltaEl)
line += ', '
line += ', '
secondline += ', '
line += 'GII_M-SoF-VCCT [J/m^2]'
secondline += 'N Int El ' + str(nEl0)
for j in range(1,numGs):
secondline += ', '
secondline += 'N Int El ' + str(nEl0 + j*DeltaEl)
line += ', '
line += ', '
secondline += ', '
line += 'GTOT_M-SoF-VCCT [J/m^2]'
secondline += 'N Int El ' + str(nEl0)
for j in range(1,numGs):
secondline += ', '
secondline += 'N Int El ' + str(nEl0 + j*DeltaEl)
line += ', '
line += ', '
secondline += ', '
line += 'GI_M-SoF-VCCT/G0 [J/m^2]'
secondline += 'N Int El ' + str(nEl0)
for j in range(1,numGs):
secondline += ', '
secondline += 'N Int El ' + str(nEl0 + j*DeltaEl)
line += ', '
line += ', '
secondline += ', '
line += 'GII_M-SoF-VCCT/G0 [J/m^2]'
secondline += 'N Int El ' + str(nEl0)
for j in range(1,numGs):
secondline += ', '
secondline += 'N Int El ' + str(nEl0 + j*DeltaEl)
line += ', '
line += ', '
secondline += ', '
line += 'GTOT_M-SoF-VCCT/G0 [J/m^2]'
secondline += 'N Int El ' + str(nEl0)
for j in range(1,numGs):
secondline += ', '
secondline += 'N Int El ' + str(nEl0 + j*DeltaEl)
line += ', '
with open(join(csvfolder,'ENRRTs-VCCTinStresses-Summary.csv'),'w') as csv:
csv.write(line + '\n')
csv.write(secondline + '\n')
for tip in crackTips:
line = ''
for v,value in enumerate(tip):
if v>0:
line += ','
line += str(value)
csv.write(line + '\n')
#writeLineToLogFile(logfile,'a','data written to file')
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - VCCT in stresses (trapezoidal integration for elements of equal length at the interface in the undeformed configuration)
#=======================================================================
#=======================================================================
# BEGIN - extract data on paths
#=======================================================================
templateFile = join(codedir,'templateExtractABQpathData.py')
extractor = join(wd,'pathextractor.py')
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Reading template file ' + templateFile,True)
with open(templateFile,'r') as template:
lines = template.readlines()
writeLineToLogFile(logfile,'a','... done.',True)
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Writing file for path extraction:' + extractor,True)
with open(extractor,'w') as post:
for line in lines:
post.write(line)
post.write('' + '\n')
post.write('' + '\n')
post.write('def main(argv):' + '\n')
post.write('' + '\n')
post.write(' workdir = \'' + wd + '\'' + '\n')
post.write(' matdir = \'' + matdatafolder + '\'' + '\n')
post.write(' proj = \'' + project + '\'' + '\n')
post.write(' logfile = \'' + logfilename + '\'' + '\n')
post.write(' logfilePath = join(workdir,logfile)' + '\n')
post.write('' + '\n')
post.write(' settingsData = {}' + '\n')
post.write(' settingsData[\'nEl0\'] = ' + str(settings['nEl0']) + '\n')
post.write(' settingsData[\'NElMax\'] = ' + str(settings['NElMax']) + '\n')
post.write(' settingsData[\'DeltaEl\'] = ' + str(settings['DeltaEl']) + '\n')
post.write(' settingsData[\'deltapsi\'] = ' + str(settings['deltapsi']) + '\n')
post.write(' settingsData[\'nl\'] = ' + str(settings['nl']) + '\n')
post.write(' settingsData[\'nSegsOnPath\'] = ' + str(settings['nSegsOnPath']) + '\n')
post.write(' settingsData[\'tol\'] = ' + str(settings['tol']) + '\n')
post.write('' + '\n')
post.write(' skipLineToLogFile(logfilePath,\'a\',True)' + '\n')
post.write(' writeLineToLogFile(logfilePath,\'a\',\'Calling function extractPathsfromODBoutputSet01 ...\',True)' + '\n')
post.write(' try:' + '\n')
post.write(' extractPathsfromODBoutputSet01(workdir,proj,float(settingsData[\'deltapsi\']),int(settingsData[\'nl\']),int(settingsData[\'nSegsOnPath\']),float(settingsData[\'tol\']),logfilePath)' + '\n')
post.write(' except Exception, error:' + '\n')
post.write(' writeErrorToLogFile(logfilePath,\'a\',Exception,error,True)' + '\n')
post.write('' + '\n')
post.write('if __name__ == "__main__":' + '\n')
post.write(' main(sys.argv[1:])' + '\n')
writeLineToLogFile(logfile,'a','... done.',True)
skipLineToLogFile(logfilename,'a',True)
if 'Windows' in system():
cmdfile = join(wd,'runextractor.cmd')
writeLineToLogFile(logfilename,'a','Working in Windows',True)
writeLineToLogFile(logfilename,'a','Writing Windows command file ' + cmdfile + ' ...',True)
with open(cmdfile,'w') as cmd:
cmd.write('\n')
cmd.write('CD ' + wd + '\n')
cmd.write('\n')
cmd.write('abaqus viewer noGUI=' + extractor + '\n')
writeLineToLogFile(logfilename,'a','... done.',True)
writeLineToLogFile(logfilename,'a','Running extractor ... ',True)
try:
#subprocess.call('cmd.exe /C ' + cmdfile,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
p = subprocess.Popen(cmdfile,shell=True,stderr=subprocess.PIPE)
while True:
output = p.stderr.read(1)
if output == '' and p.poll()!= None:
break
if output != '':
sys.stdout.write(output)
sys.stdout.flush()
except Exception, error:
writeErrorToLogFile(logfilename,'a',Exception,error,True)
sys.exc_clear()
writeLineToLogFile(logfilename,'a','... done.',True)
elif 'Linux' in system():
bashfile = join(wd,'runextractor.sh')
writeLineToLogFile(logfilename,'a','Working in Linux',True)
writeLineToLogFile(logfilename,'a','Writing bash file ' + bashfile + ' ...',True)
with open(bashfile,'w') as bash:
bash.write('#!/bin/bash\n')
bash.write('\n')
bash.write('cd ' + wd + '\n')
bash.write('\n')
bash.write('abaqus viewer noGUI=' + extractor + '\n')
writeLineToLogFile(logfilename,'a','... done.',True)
writeLineToLogFile(logfilename,'a','Changing permissions to ' + bashfile + ' ...',True)
os.chmod(bashfile, 0o755)
writeLineToLogFile(logfilename,'a','... done.',True)
writeLineToLogFile(logfilename,'a','Running extractor ... ',True)
rc = call('.' + bashfile)
writeLineToLogFile(logfilename,'a','... done.',True)
#=======================================================================
# END - extract data on paths
#=======================================================================
#=======================================================================
# BEGIN - close database
#=======================================================================
skipLineToLogFile(logfile,'a',True)
writeLineToLogFile(logfile,'a','Close database...',True)
odb.close()
writeLineToLogFile(logfile,'a','... done.',True)
#=======================================================================
# END - close database
#======================================================================= | LucaDiStasio/thinPlyMechanics | python/templateAnalyzeABQoutputData.py | Python | apache-2.0 | 109,123 |
# Copyright 2021 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Mozaik Mass Mailing Access Rights",
"summary": """
New group: Mass Mailing Manager. Managers can edit
and unlink mass mailings.""",
"version": "14.0.1.0.0",
"license": "AGPL-3",
"author": "ACSONE SA/NV",
"website": "https://github.com/OCA/mozaik",
"depends": [
"mass_mailing",
],
"data": [
"security/groups.xml",
"security/ir.model.access.csv",
"views/mailing_mailing.xml",
"views/mail_template.xml",
],
"demo": [],
}
| mozaik-association/mozaik | mozaik_mass_mailing_access_rights/__manifest__.py | Python | agpl-3.0 | 625 |
#!/usr/bin/env
"""
GOA_Winds_NARR_3hr.py
Compare NARR Winds with NCEP V2 (with Mooring Winds)
Using Anaconda packaged Python
"""
#System Stack
import datetime
#Science Stack
import numpy as np
# User Stack
import general_utilities.date2doy as date2doy
import general_utilities.haversine as sphered
from utilities import ncutilities as ncutil
# Visual Stack
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap, shiftgrid
from matplotlib.dates import MonthLocator, DateFormatter, DayLocator
__author__ = 'Shaun Bell'
__email__ = '[email protected]'
__created__ = datetime.datetime(2014, 01, 13)
__modified__ = datetime.datetime(2014, 01, 13)
__version__ = "0.1.0"
__status__ = "Development"
__keywords__ = 'NARR','GLOBEC3','3hr comparison', 'Winds', 'Gulf of Alaska'
"""------------------------General Modules-------------------------------------------"""
def date2pydate(file_time, file_time2=None, file_flag='EPIC'):
if file_flag == 'EPIC':
ref_time_py = datetime.datetime.toordinal(datetime.datetime(1968, 5, 23))
ref_time_epic = 2440000
offset = ref_time_epic - ref_time_py
try: #if input is an array
python_time = [None] * len(file_time)
for i, val in enumerate(file_time):
pyday = file_time[i] - offset
pyfrac = file_time2[i] / (1000. * 60. * 60.* 24.) #milliseconds in a day
python_time[i] = (pyday + pyfrac)
except:
pytime = []
pyday = file_time - offset
pyfrac = file_time2 / (1000. * 60. * 60.* 24.) #milliseconds in a day
python_time = (pyday + pyfrac)
elif file_flag == 'NARR':
""" Hours since 1800-1-1"""
base_date=datetime.datetime.strptime('1800-01-01','%Y-%m-%d').toordinal()
python_time = file_time / 24. + base_date
elif file_flag == 'NCEP':
""" Hours since 1800-1-1"""
base_date=datetime.datetime.strptime('1800-01-01','%Y-%m-%d').toordinal()
python_time = file_time / 24. + base_date
else:
print "time flag not recognized"
return np.array(python_time)
def hourly2daily(ltbound,utbound, time, data):
tarray = np.arange(ltbound, utbound+1,1.)
dmean = np.zeros_like(tarray)
dstd = np.zeros_like(tarray)
for i, val in enumerate(tarray):
ind = np.where(np.floor(time) == val )
dmean[i] = data[ind].mean()
dstd[i] = data[ind].std()
return ( {'daily_mean':dmean ,'daily_std':dstd, 'daily_time':tarray} )
def hourly_2_3hrly(ltbound,utbound, time, data):
interval = 3 / 24.
tarray = np.arange(ltbound, utbound,interval)
dmean = np.zeros_like(tarray)
dstd = np.zeros_like(tarray)
for i, val in enumerate(tarray):
ind = (time >= val) & (time < val+interval)
dmean[i] = data[ind].mean()
dstd[i] = data[ind].std()
return ( {'mean':dmean ,'std':dstd, 'time':tarray} )
def cart2wind(cart_angle):
""" 0deg is North, rotate clockwise"""
cart_angle = 90. - cart_angle #rotate so N is 0deg
cart_angle =cart_angle % 360.
return cart_angle
def rotate_coord(angle_rot, mag, dir):
""" converts math coords to along/cross shelf.
+ onshore / along coast with land to right (right handed)
- offshore / along coast with land to left
Todo: convert met standard for winds (left handed coordinate system
"""
dir = dir - angle_rot
along = mag * np.sin(np.deg2rad(dir))
cross = mag * np.cos(np.deg2rad(dir))
return (along, cross)
def from_netcdf(infile):
""" Uses ncreadfile_dic which returns a dictionary of all data from netcdf"""
###nc readin/out
nchandle = ncutil.ncopen(infile)
params = ncutil.get_vars(nchandle) #gets all of them
ncdata = ncutil.ncreadfile_dic(nchandle, params)
ncutil.ncclose(nchandle)
return (ncdata, params)
"""---------------------------- Plotting Modules --------------------------------------"""
def quiver_timeseries(time,ucomp,vcomp,magnitude,data_source):
t_ind = ~(~np.isnan(magnitude) & (magnitude < 100))
ucomp[t_ind] = 0.
vcomp[t_ind] = 0.
magnitude[t_ind] = 0.
fig1, (ax1, ax2) = plt.subplots(2,1)
# Plot quiver
ax1.set_ylim(-magnitude.max(), magnitude.max())
fill1 = ax1.fill_between(time, magnitude, 0, color='k', alpha=0.1)
# Fake 'box' to be able to insert a legend for 'Magnitude'
p = ax1.add_patch(plt.Rectangle((1,1),1,1,fc='k',alpha=0.1))
leg1 = ax1.legend([p], ["Wind magnitude [m/s]"],loc='lower right')
leg1._drawFrame=False
# 1D Quiver plot
q = ax1.quiver(time,0,ucomp,vcomp,color='r',units='y',scale_units='y',
scale = 1,headlength=1,headaxislength=1,width=0.04,alpha=.95)
qk = plt.quiverkey(q,0.2, 0.05, 5,r'$5 \frac{m}{s}$',labelpos='W',
fontproperties={'weight': 'bold'})
# Plot u and v components
ax1.axes.get_xaxis().set_visible(False)
ax1.set_xlim(time.min(),time.max()+0.5)
ax1.set_ylabel("Velocity (m/s)")
ax2.plot(time, vcomp, 'b-')
ax2.plot(time, ucomp, 'g-')
ax2.set_xlim(time.min(),time.max()+0.5)
ax2.set_xlabel("Date (UTC)")
ax2.set_ylabel("Velocity (m/s)")
ax2.xaxis.set_major_locator(MonthLocator())
ax2.xaxis.set_major_formatter(DateFormatter('%Y-%m-%d'))
ax2.xaxis.set_minor_locator(DayLocator())
fig1.autofmt_xdate()
# Set legend location - See: http://matplotlib.org/users/legend_guide.html#legend-location
leg2 = plt.legend(['v','u'],loc='upper left')
leg2._drawFrame=False
ax1.spines['bottom'].set_visible(False)
ax2.spines['top'].set_visible(False)
ax2.xaxis.set_ticks_position('bottom')
ax2.yaxis.set_ticks_position('both')
DefaultSize = fig1.get_size_inches()
fig1.set_size_inches( (DefaultSize[0]*2, DefaultSize[1]) )
fig1.suptitle("3hr ave Wind data for: " + data_source, fontsize=12)
# Save figure (without 'white' borders)
plt.savefig('images/Globec_' + data_source + '_timeseries.png', bbox_inches='tight', dpi = (100))
plt.close(fig1)
"""---------------------------- Main Routine-------------------------------------------"""
"""------Ingest Data--------"""
NARR = '/Users/bell/Data_Local/Reanalysis_Files/NARR/3hourly/'
NARR_uwind, NARR_uparams = from_netcdf(NARR + 'uwnd.10m.2003.nc')
NARR_vwind, NARR_vparams = from_netcdf(NARR + 'vwnd.10m.2003.nc')
NARRTime = date2pydate(NARR_uwind['time'], file_flag='NCEP')
### NARR Data has the following boundary corners:
# 12.2N;133.5W, 54.5N; 152.9W, 57.3N; 49.4W ,14.3N;65.1W
# Lambert Conformal
#lat/lon is ~ 59N, 149W
MooringFile = '/Users/bell/Data_Local/FOCI/Mooring/2003/globec3/03gbm3a_wpak.nc'
MooringMetData, Mooring_params = from_netcdf(MooringFile)
MooringTime = date2pydate(MooringMetData['time'], MooringMetData['time2'], file_flag='EPIC')
MooringDaily_uwnd = hourly_2_3hrly(NARRTime.min(),NARRTime.max(), MooringTime, MooringMetData['WU_422'])
MooringDaily_vwnd = hourly_2_3hrly(NARRTime.min(),NARRTime.max(), MooringTime, MooringMetData['WV_423'])
sta_lat = MooringMetData['latitude'][0]
sta_long = MooringMetData['longitude'][0]
#-----> user set to force mooring location instead of using built in location (useful if you want
# to specify lat/lon for model comparison purposes
#sta_lat = 58.
#sta_long = 148.
"""---------------------------- Data Manipulation Routines-----------------------------"""
#Find NCEP and NARR nearest point to mooring
narrpt = sphered.nearest_point([sta_lat,-1 * sta_long],NARR_uwind['lat'],NARR_uwind['lon'], '2d')
try: #some data gives extra paramter and puts data in a dict structure... others does not
NARR_u = NARR_uwind['uwnd'].data[:,narrpt[3],narrpt[4]]
NARR_v = NARR_vwind['vwnd'].data[:,narrpt[3],narrpt[4]]
except TypeError: #no .data parameter
NARR_u = NARR_uwind['uwnd'][:,narrpt[3],narrpt[4]]
NARR_v = NARR_vwind['vwnd'][:,narrpt[3],narrpt[4]]
NARR_wind_mag = np.sqrt(NARR_u**2. + NARR_u**2.)
NARR_wind_dir_math = np.rad2deg(np.arctan2(NARR_v , NARR_u))
NARR_wind_dir = cart2wind(NARR_wind_dir_math)
Mooring_wind_mag = np.sqrt(MooringDaily_uwnd['mean']**2. + MooringDaily_vwnd['mean']**2.)
Mooring_wind_dir_math = np.rad2deg(np.arctan2(MooringDaily_vwnd['mean'] , MooringDaily_uwnd['mean']))
Mooring_wind_dir = cart2wind(Mooring_wind_dir_math)
# mask when mooring wasn't available
t_ind = ~np.isnan(Mooring_wind_mag) & (Mooring_wind_mag < 100)
### Calculate +-flow and x-flow rotating along coast (~43 degrees bearing near Globec3 )
(NARRalong, NARRcross) = rotate_coord(137., NARR_wind_mag, NARR_wind_dir_math)
(MOORalong, MOORcross) = rotate_coord(137., Mooring_wind_mag, Mooring_wind_dir_math)
"""---------------------------- Plotting Routines--------------------------------------"""
### standard wind / time plots
# NARR
quiver_timeseries(NARRTime,NARR_u,NARR_v,NARR_wind_mag,'NARR')
quiver_timeseries(MooringDaily_uwnd['time'],MooringDaily_uwnd['mean'],MooringDaily_vwnd['mean'],Mooring_wind_mag,'GLOBEC3')
### Along/Cross Shore comparisons Mooring vs NARR/NCEP
# for entire year (mark mooring specific times)
fig = plt.figure(6)
ax = plt.subplot(121)
p1 = ax.plot(MOORalong[t_ind], NARRalong[t_ind])
plt.setp(p1,color='r', marker='+', linestyle='')
p3 = ax.plot(np.arange(-15,16,5),np.arange(-15,16,5))
plt.setp(p3,'color','k','linestyle','--')
ax.set_xticks(np.arange(-15,16,5))
ax.set_yticks(np.arange(-15,16,5))
ax.set_xlabel('3hr Globec3 Alongshore Flow (m/s)')
ax.set_ylabel('3hr NARR Alongshore Flow (m/s)')
ax = plt.subplot(122)
p1 = ax.plot(MOORcross[t_ind], NARRcross[t_ind])
plt.setp(p1,color='r', marker='+', linestyle='')
p3 = ax.plot(np.arange(-15,16,5),np.arange(-15,16,5))
plt.setp(p3,'color','k','linestyle','--')
ax.set_xticks(np.arange(-15,16,5))
ax.set_yticks(np.arange(-15,16,5))
ax.set_xlabel('3hr Globec3 AcrossShore Flow (m/s)')
ax.set_ylabel('3hr NARR AcrossShore Flow (m/s)')
DefaultSize = fig.get_size_inches()
fig.set_size_inches( (DefaultSize[0]*2, DefaultSize[1]) )
plt.savefig('images/Globec_alongacross_comp.png', bbox_inches='tight', dpi = (100))
plt.close()
| shaunwbell/FOCI_Analysis | ReanalysisRetreival_orig/GOA_Winds/depricated/GOA_Winds_NARR_3hr.py | Python | mit | 10,276 |
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Fabian Barkhau <[email protected]>
# License: MIT (see LICENSE.TXT file)
from django.dispatch import Signal
team_created = Signal(providing_args=["team", "creator"])
join_request_created = Signal(providing_args=["join_request"])
join_request_processed = Signal(providing_args=["join_request"])
remove_request_created = Signal(providing_args=["remove_request"])
remove_request_processed = Signal(providing_args=["remove_request"])
| F483/bikesurf.org | apps/team/signals.py | Python | mit | 507 |
from common.forms import ModelFormWithHelper
from common.helpers import SubmitCancelFormHelper
from blog.models import News, Resource, Tag
from users.models import SystersUser
class AddNewsForm(ModelFormWithHelper):
"""Form to add new Community News. The author and the community of the
news should be provided by the view:
* author - currently logged in user
* community - defined by the community slug from the URL
"""
class Meta:
model = News
fields = ['slug', 'title', 'content', 'is_public', 'is_monitored',
'tags']
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'view_community_news_list' " \
"community.slug %}"
def __init__(self, *args, **kwargs):
self.author = kwargs.pop('author')
self.community = kwargs.pop('community')
super(AddNewsForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
"""Override save to add author and community to the instance."""
instance = super(AddNewsForm, self).save(commit=False)
instance.author = SystersUser.objects.get(user=self.author)
instance.community = self.community
if commit:
instance.save()
return instance
class EditNewsForm(ModelFormWithHelper):
"""Form to edit Community News."""
class Meta:
model = News
fields = ['slug', 'title', 'content', 'is_public', 'is_monitored',
'tags']
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'view_community_news' community.slug " \
"object.slug %}"
class AddResourceForm(ModelFormWithHelper):
"""Form to add new Community Resource. The author and the community of the
resource should be provided by the view:
* author - currently logged in user
* community - defined by the community slug from the URL
"""
class Meta:
model = Resource
fields = ['slug', 'title', 'content', 'is_public', 'is_monitored',
'tags', 'resource_type']
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'view_community_resource_list' " \
"community.slug %}"
def __init__(self, *args, **kwargs):
self.author = kwargs.pop('author')
self.community = kwargs.pop('community')
super(AddResourceForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
"""Override save to add author and community to the instance."""
instance = super(AddResourceForm, self).save(commit=False)
instance.author = SystersUser.objects.get(user=self.author)
instance.community = self.community
if commit:
instance.save()
return instance
class EditResourceForm(ModelFormWithHelper):
"""Form to edit Community Resource."""
class Meta:
model = Resource
fields = ['slug', 'title', 'content', 'is_public', 'is_monitored',
'tags', 'resource_type']
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'view_community_resource' " \
"community.slug object.slug %}"
class TagForm(ModelFormWithHelper):
"""Form to create or edit a tag"""
class Meta:
model = Tag
fields = ['name']
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'view_community_news_list' " \
"community.slug %}"
| willingc/portal | systers_portal/blog/forms.py | Python | gpl-2.0 | 3,574 |
import os
import unittest
import vtk, qt, ctk, slicer
import math
import sys
#
# AstroMomentMapsSelfTest
#
class AstroMomentMapsSelfTest:
def __init__(self, parent):
parent.title = "Astro MomentMaps SelfTest"
parent.categories = ["Testing.TestCases"]
parent.dependencies = ["AstroVolume"]
parent.contributors = ["""
Davide Punzo (Kapteyn Astronomical Institute) and
Thijs van der Hulst (Kapteyn Astronomical Institute)."""]
parent.helpText = """
This module was developed as a self test to perform the operations needed for generating moment maps.
"""
parent.acknowledgementText = """
""" # replace with organization, grant and thanks.
self.parent = parent
# Add this test to the SelfTest module's list for discovery when the module
# is created. Since this module may be discovered before SelfTests itself,
# create the list if it doesn't already exist.
try:
slicer.selfTests
except AttributeError:
slicer.selfTests = {}
slicer.selfTests['Astro MomentMaps SelfTest'] = self.runTest
def runTest(self):
tester = AstroMomentMapsSelfTestTest()
tester.runTest()
#
# qAstroMomentMapsSelfTestWidget
#
class AstroMomentMapsSelfTestWidget:
def __init__(self, parent = None):
if not parent:
self.parent = slicer.qMRMLWidget()
self.parent.setLayout(qt.QVBoxLayout())
self.parent.setMRMLScene(slicer.mrmlScene)
else:
self.parent = parent
self.layout = self.parent.layout()
if not parent:
self.setup()
self.parent.show()
def setup(self):
# Instantiate and connect widgets ...
# reload button
# (use this during development, but remove it when delivering
# your module to users)
self.reloadButton = qt.QPushButton("Reload")
self.reloadButton.toolTip = "Reload this module."
self.reloadButton.name = "AstroMomentMapsSelfTest Reload"
self.layout.addWidget(self.reloadButton)
self.reloadButton.connect('clicked()', self.onReload)
# reload and test button
# (use this during development, but remove it when delivering
# your module to users)
self.reloadAndTestButton = qt.QPushButton("Reload and Test")
self.reloadAndTestButton.toolTip = "Reload this module and then run the self tests."
self.layout.addWidget(self.reloadAndTestButton)
self.reloadAndTestButton.connect('clicked()', self.onReloadAndTest)
# Add vertical spacer
self.layout.addStretch(1)
def cleanup(self):
pass
def onReload(self,moduleName="AstroMomentMapsSelfTest"):
"""Generic reload method for any scripted module.
ModuleWizard will subsitute correct default moduleName.
"""
globals()[moduleName] = slicer.util.reloadScriptedModule(moduleName)
def onReloadAndTest(self,moduleName="AstroMomentMapsSelfTest"):
self.onReload()
evalString = 'globals()["%s"].%sTest()' % (moduleName, moduleName)
tester = eval(evalString)
tester.runTest()
#
# AstroMomentMapsSelfTestLogic
#
class AstroMomentMapsSelfTestLogic:
"""This class should implement all the actual
computation done by your module. The interface
should be such that other python code can import
this class and make use of the functionality without
requiring an instance of the Widget
"""
def __init__(self):
pass
def hasImageData(self,volumeNode):
"""This is a dummy logic method that
returns true if the passed in volume
node has valid image data
"""
if not volumeNode:
print('no volume node')
return False
if volumeNode.GetImageData() is None:
print('no image data')
return False
return True
class AstroMomentMapsSelfTestTest(unittest.TestCase):
"""
This is the test case for your scripted module.
"""
def delayDisplay(self,message,msec=100):
"""This utility method displays a small dialog and waits.
This does two things: 1) it lets the event loop catch up
to the state of the test so that rendering and widget updates
have all taken place before the test continues and 2) it
shows the user/developer/tester the state of the test
so that we'll know when it breaks.
"""
print(message)
self.info = qt.QDialog()
self.infoLayout = qt.QVBoxLayout()
self.info.setLayout(self.infoLayout)
self.label = qt.QLabel(message,self.info)
self.infoLayout.addWidget(self.label)
qt.QTimer.singleShot(msec, self.info.close)
self.info.exec_()
def setUp(self):
slicer.mrmlScene.Clear(0)
def runTest(self):
self.setUp()
self.test_AstroMomentMapsSelfTest()
def test_AstroMomentMapsSelfTest(self):
print("Running AstroMomentMapsSelfTest Test case:")
self.downloadWEIN069()
astroVolume = slicer.util.getNode("WEIN069")
rms = astroVolume.GetDisplayThreshold()
mainWindow = slicer.util.mainWindow()
mainWindow.moduleSelector().selectModule('AstroVolume')
mainWindow.moduleSelector().selectModule('AstroMomentMaps')
astroMomentMapsModule = module = slicer.modules.astromomentmaps
astroMomentMapsModuleWidget = astroMomentMapsModule.widgetRepresentation()
AstroMomentMapsParameterNode = slicer.util.getNode("AstroMomentMapsParameters")
AstroMomentMapsParameterNode.SetIntensityMin(rms * 3)
QPushButtonList = astroMomentMapsModuleWidget.findChildren(qt.QPushButton)
for QPushButton in (QPushButtonList):
if QPushButton.name == "ApplyButton":
ApplyPushButton = QPushButton
self.delayDisplay('Calculating moment maps', 700)
ApplyPushButton.click()
ZeroMomentMapVolume = slicer.mrmlScene.GetNodeByID(AstroMomentMapsParameterNode.GetZeroMomentVolumeNodeID())
pixelValue0 = ZeroMomentMapVolume.GetImageData().GetScalarComponentAsFloat(56, 68, 0, 0)
FirstMomentMapVolume = slicer.mrmlScene.GetNodeByID(AstroMomentMapsParameterNode.GetFirstMomentVolumeNodeID())
pixelValue1 = FirstMomentMapVolume.GetImageData().GetScalarComponentAsFloat(56, 68, 0, 0)
SecondMomentMapVolume = slicer.mrmlScene.GetNodeByID(AstroMomentMapsParameterNode.GetSecondMomentVolumeNodeID())
pixelValue2 = SecondMomentMapVolume.GetImageData().GetScalarComponentAsFloat(56, 68, 0, 0)
if (math.fabs(pixelValue0 - 0.511788547039) < 1.e-6 and \
math.fabs(pixelValue1 - 5231.70947266) < 1.e-6 and \
math.fabs(pixelValue2 - 28.8058509827) < 1.e-6):
self.delayDisplay('Test passed', 700)
else:
self.delayDisplay('Test failed', 700)
# if run from Slicer interface remove the followinf exit
sys.exit()
def downloadWEIN069(self):
import AstroSampleData
astroSampleDataLogic = AstroSampleData.AstroSampleDataLogic()
self.delayDisplay('Getting WEIN069 Astro Volume')
WEIN069Volume = astroSampleDataLogic.downloadSample("WEIN069")
return WEIN069Volume
| Punzo/SlicerAstro | AstroMomentMaps/Testing/Python/AstroMomentMapsSelfTest.py | Python | bsd-3-clause | 6,817 |
# Copyright 2015 CloudFounders NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This package contains Linux OS distribution extensions
"""
| tcpcloud/openvstorage | ovs/extensions/os/__init__.py | Python | apache-2.0 | 644 |
##
# Copyright (c) 2008-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Directory-backed address book service resource and operations.
"""
__all__ = [
"DirectoryBackedAddressBookResource",
]
from twext.python.log import Logger
from twext.who.expression import Operand, MatchType, MatchFlags, \
MatchExpression, CompoundExpression
from twext.who.idirectory import FieldName
from twisted.internet.defer import deferredGenerator
from twisted.internet.defer import succeed, inlineCallbacks, maybeDeferred, \
returnValue
from twisted.python.constants import NamedConstant
from twistedcaldav import carddavxml
from twistedcaldav.config import config
from twistedcaldav.resource import CalDAVResource
from txdav.carddav.datastore.query.filter import IsNotDefined, TextMatch, \
ParameterFilter
from txdav.who.idirectory import FieldName as CalFieldName
from txdav.who.vcard import vCardKindToRecordTypeMap, recordTypeToVCardKindMap, \
vCardPropToParamMap, vCardConstantProperties, vCardFromRecord
from txdav.xml import element as davxml
from txdav.xml.base import twisted_dav_namespace, dav_namespace, parse_date, \
twisted_private_namespace
from txweb2 import responsecode
from txweb2.dav.resource import DAVPropertyMixIn
from txweb2.dav.resource import TwistedACLInheritable
from txweb2.dav.util import joinURL
from txweb2.http import HTTPError, StatusResponse
from txweb2.http_headers import MimeType, generateContentType, ETag
from xmlrpclib import datetime
import hashlib
import uuid
log = Logger()
MatchFlags_none = MatchFlags.NOT & ~MatchFlags.NOT # can't import MatchFlags_none
class DirectoryBackedAddressBookResource (CalDAVResource):
"""
Directory-backed address book
"""
def __init__(self, principalCollections, principalDirectory, uri):
CalDAVResource.__init__(self, principalCollections=principalCollections)
self.principalDirectory = principalDirectory
self.uri = uri
self.directory = None
def makeChild(self, name):
from twistedcaldav.simpleresource import SimpleCalDAVResource
return SimpleCalDAVResource(principalCollections=self.principalCollections())
return self.directory
def provisionDirectory(self):
if self.directory is None:
log.info(
"Setting search directory to {principalDirectory}",
principalDirectory=self.principalDirectory
)
self.directory = self.principalDirectory
# future: instantiate another directory based on /Search/Contacts (?)
return succeed(None)
def defaultAccessControlList(self):
if config.AnonymousDirectoryAddressBookAccess:
# DAV:Read for all principals (includes anonymous)
accessPrincipal = davxml.All()
else:
# DAV:Read for all authenticated principals (does not include anonymous)
accessPrincipal = davxml.Authenticated()
return succeed(
davxml.ACL(
davxml.ACE(
davxml.Principal(accessPrincipal),
davxml.Grant(
davxml.Privilege(davxml.Read()),
davxml.Privilege(davxml.ReadCurrentUserPrivilegeSet())
),
davxml.Protected(),
TwistedACLInheritable(),
),
)
)
def supportedReports(self):
result = super(DirectoryBackedAddressBookResource, self).supportedReports()
if config.EnableSyncReport:
# Not supported on the directory backed address book
result.remove(davxml.Report(davxml.SyncCollection(),))
return result
def resourceType(self):
return davxml.ResourceType.directory
def resourceID(self):
if self.directory:
resource_id = uuid.uuid5(uuid.UUID("5AAD67BF-86DD-42D7-9161-6AF977E4DAA3"), self.directory.guid).urn
else:
resource_id = "tag:unknown"
return resource_id
def isDirectoryBackedAddressBookCollection(self):
return True
def isAddressBookCollection(self):
return True
def isCollection(self):
return True
def accessControlList(self, request, inheritance=True, expanding=False, inherited_aces=None):
# Permissions here are fixed, and are not subject to inheritance rules, etc.
return self.defaultAccessControlList()
@inlineCallbacks
def renderHTTP(self, request):
if not self.directory:
raise HTTPError(StatusResponse(responsecode.SERVICE_UNAVAILABLE, "Service is starting up"))
response = (yield maybeDeferred(super(DirectoryBackedAddressBookResource, self).renderHTTP, request))
returnValue(response)
@inlineCallbacks
def doAddressBookDirectoryQuery(self, addressBookFilter, addressBookQuery, maxResults, defaultKind="individual"):
"""
Get vCards for a given addressBookFilter and addressBookQuery
"""
log.debug(
"doAddressBookDirectoryQuery: directory={directory} addressBookFilter={addressBookFilter}, addressBookQuery={addressBookQuery}, maxResults={maxResults}",
directory=self.directory, addressBookFilter=addressBookFilter, addressBookQuery=addressBookQuery, maxResults=maxResults
)
results = []
limited = False
maxQueryRecords = 0
vcardPropToRecordFieldMap = {
"FN": FieldName.fullNames,
"N": FieldName.fullNames,
"EMAIL": FieldName.emailAddresses,
"UID": FieldName.uid,
"ADR": (
CalFieldName.streetAddress,
# CalFieldName.floor,
),
"KIND": FieldName.recordType,
# LATER "X-ADDRESSBOOKSERVER-MEMBER": FieldName.membersUIDs,
}
propNames, expression = expressionFromABFilter(
addressBookFilter, vcardPropToRecordFieldMap, vCardConstantProperties,
)
if expression:
queryRecordType = None
if "KIND" not in propNames:
queryRecordType = vCardKindToRecordTypeMap.get(defaultKind)
# if CompoundExpression of MatchExpression: recordsWithFieldValue() else recordsMatchingType()
fields = []
if expression is not True:
def fieldForMatchExpression(match):
return (
match.fieldName.name,
match.fieldValue,
match.flags,
match.matchType,
)
if isinstance(expression, CompoundExpression):
operand = expression.operand
for match in expression.expressions:
if isinstance(match, MatchExpression):
if match.fieldName != FieldName.recordType:
fields.append(fieldForMatchExpression(match))
# else optimize: collect record type list for query
else:
# do all record types query
fields = []
break
elif isinstance(expression, MatchExpression):
operand = Operand.OR
if expression.fieldName != FieldName.recordType:
fields.append(fieldForMatchExpression(expression))
else:
recordType = expression.fieldValue
maxRecords = int(maxResults * 1.2)
# keep trying query till we get results based on filter. Especially when doing "all results" query
while True:
queryLimited = False
log.debug("doAddressBookDirectoryQuery: expression={expression!r}, propNames={propNames}", expression=expression, propNames=propNames)
allRecords = set()
if fields:
records = yield self.directory.recordsMatchingFields(fields, operand, queryRecordType)
log.debug(
"doAddressBookDirectoryQuery: recordsMatchingFields({f}, {o}): #records={n}, records={records!r}",
f=fields, o=operand, n=len(records), records=records
)
allRecords = set(records)
else:
recordTypes = set([queryRecordType]) if queryRecordType else set(self.directory.recordTypes()) & set(recordTypeToVCardKindMap.keys())
for recordType in recordTypes:
records = yield self.directory.recordsWithRecordType(recordType)
log.debug("doAddressBookDirectoryQuery: #records={n}, records={records!r}", n=len(records), records=records)
allRecords |= set(records)
vCardsResults = [(yield ABDirectoryQueryResult(self).generate(record)) for record in allRecords]
filteredResults = set()
for vCardResult in vCardsResults:
if addressBookFilter.match(vCardResult.vCard()):
log.debug("doAddressBookDirectoryQuery: vCard did match filter:\n{vcard}", vcard=vCardResult.vCard())
filteredResults.add(vCardResult)
else:
log.debug("doAddressBookDirectoryQuery: vCard did not match filter:\n{vcard}", vcard=vCardResult.vCard())
# no more results
if not queryLimited:
break
# more than requested results
if maxResults and len(filteredResults) >= maxResults:
break
# more than max report results
if len(filteredResults) >= config.MaxQueryWithDataResults:
break
# more than self limit
if maxQueryRecords and maxRecords >= maxQueryRecords:
break
# try again with 2x
maxRecords *= 2
if maxQueryRecords and maxRecords > maxQueryRecords:
maxRecords = maxQueryRecords
results = sorted(filteredResults, key=lambda result: result.vCard().propertyValue("UID"))
limited = maxResults and len(results) >= maxResults
log.info("limited={l} #results={n}", l=limited, n=len(results))
returnValue((results, limited,))
def propertiesInAddressBookQuery(addressBookQuery):
"""
Get the vCard properties requested by a given query
"""
etagRequested = False
propertyNames = []
if addressBookQuery.qname() == ("DAV:", "prop"):
for prop in addressBookQuery.children:
if isinstance(prop, carddavxml.AddressData):
for addressProperty in prop.children:
if isinstance(addressProperty, carddavxml.Property):
propertyNames.append(addressProperty.attributes["name"])
elif property.qname() == ("DAV:", "getetag"):
# for a real etag == md5(vCard), we need all properties
etagRequested = True
return (etagRequested, propertyNames if len(propertyNames) else None)
def expressionFromABFilter(addressBookFilter, vcardPropToSearchableFieldMap, constantProperties={}):
"""
Convert the supplied addressbook-query into a ds expression tree.
@param addressBookFilter: the L{Filter} for the addressbook-query to convert.
@param vcardPropToSearchableFieldMap: a mapping from vcard properties to searchable query attributes.
@param constantProperties: a mapping of constant properties. A query on a constant property will return all or None
@return: (filterProperyNames, expressions) tuple. expression==True means list all results, expression==False means no results
"""
def propFilterListQuery(filterAllOf, propFilters):
"""
Create an expression for a list of prop-filter elements.
@param filterAllOf: the C{True} if parent filter test is "allof"
@param propFilters: the C{list} of L{ComponentFilter} elements.
@return: (filterProperyNames, expressions) tuple. expression==True means list all results, expression==False means no results
"""
def combineExpressionLists(expressionList, allOf, addedExpressions):
"""
deal with the 4-state logic
addedExpressions=None means ignore
addedExpressions=True means all records
addedExpressions=False means no records
addedExpressions=[expressionlist] add to expression list
"""
if expressionList is None:
expressionList = addedExpressions
elif addedExpressions is not None:
if addedExpressions is True:
if not allOf:
expressionList = True # expressionList or True is True
# else expressionList and True is expressionList
elif addedExpressions is False:
if allOf:
expressionList = False # expressionList and False is False
# else expressionList or False is expressionList
else:
if expressionList is False:
if not allOf:
expressionList = addedExpressions # False or addedExpressions is addedExpressions
# else False and addedExpressions is False
elif expressionList is True:
if allOf:
expressionList = addedExpressions # False or addedExpressions is addedExpressions
# else False and addedExpressions is False
else:
expressionList.extend(addedExpressions)
return expressionList
def propFilterExpression(filterAllOf, propFilter):
"""
Create an expression for a single prop-filter element.
@param propFilter: the L{PropertyFilter} element.
@return: (filterProperyNames, expressions) tuple. expression==True means list all results, expression==False means no results
"""
def matchExpression(fieldName, matchString, matchType, matchFlags):
# special case recordType field
if fieldName == FieldName.recordType:
# change kind to record type
matchValue = vCardKindToRecordTypeMap.get(matchString.lower())
if matchValue is None:
matchValue = NamedConstant()
matchValue.description = u""
# change types and flags
matchFlags &= ~MatchFlags.caseInsensitive
matchType = MatchType.equals
else:
matchValue = matchString.decode("utf-8")
return MatchExpression(fieldName, matchValue, matchType, matchFlags)
def definedExpression(defined, allOf):
if constant or propFilter.filter_name in ("N", "FN", "UID", "KIND",):
return defined # all records have this property so no records do not have it
else:
# FIXME: The startsWith expression below, which works with LDAP and OD. is not currently supported
return True
'''
# this may generate inefficient LDAP query string
matchFlags = MatchFlags_none if defined else MatchFlags.NOT
matchList = [matchExpression(fieldName, "", MatchType.startsWith, matchFlags) for fieldName in searchableFields]
return andOrExpression(allOf, matchList)
'''
def andOrExpression(propFilterAllOf, matchList):
matchList = list(set(matchList))
if propFilterAllOf and len(matchList) > 1:
# add OR expression because parent will AND
return [CompoundExpression(matchList, Operand.OR), ]
else:
return matchList
def paramFilterElementExpression(propFilterAllOf, paramFilterElement): # @UnusedVariable
params = vCardPropToParamMap.get(propFilter.filter_name.upper())
defined = params and paramFilterElement.filter_name.upper() in params
# defined test
if defined != paramFilterElement.defined:
return False
# parameter value text match
if defined and paramFilterElement.filters:
paramValues = params[paramFilterElement.filter_name.upper()]
if paramValues and paramFilterElement.filters[0].text.upper() not in paramValues:
return False
return True
def textMatchElementExpression(propFilterAllOf, textMatchElement):
# preprocess text match strings for ds query
def getMatchStrings(propFilter, matchString):
if propFilter.filter_name in ("REV", "BDAY",):
rawString = matchString
matchString = ""
for c in rawString:
if c not in "TZ-:":
matchString += c
elif propFilter.filter_name == "GEO":
matchString = ",".join(matchString.split(";"))
if propFilter.filter_name in ("N", "ADR", "ORG",):
# for structured properties, change into multiple strings for ds query
if propFilter.filter_name == "ADR":
# split by newline and comma
rawStrings = ",".join(matchString.split("\n")).split(",")
else:
# split by space
rawStrings = matchString.split(" ")
# remove empty strings
matchStrings = []
for oneString in rawStrings:
if len(oneString):
matchStrings += [oneString, ]
return matchStrings
elif len(matchString):
return [matchString, ]
else:
return []
# end getMatchStrings
if constant:
# FIXME: match is not implemented in twisteddaldav.query.Filter.TextMatch so use _match for now
return textMatchElement._match([constant, ])
else:
matchStrings = getMatchStrings(propFilter, textMatchElement.text)
if not len(matchStrings):
# no searching text in binary ds attributes, so change to defined/not defined case
if textMatchElement.negate:
return definedExpression(False, propFilterAllOf)
# else fall through to attribute exists case below
else:
# use match_type where possible depending on property/attribute mapping
# FIXME: case-sensitive negate will not work. This should return all all records in that case
matchType = MatchType.contains
if propFilter.filter_name in ("NICKNAME", "TITLE", "NOTE", "UID", "URL", "N", "ADR", "ORG", "REV", "LABEL",):
if textMatchElement.match_type == "equals":
matchType = MatchType.equals
elif textMatchElement.match_type == "starts-with":
matchType = MatchType.startsWith
elif textMatchElement.match_type == "ends-with":
matchType = MatchType.endsWith
matchList = []
for matchString in matchStrings:
matchFlags = None
if textMatchElement.collation == "i;unicode-casemap" and textMatchElement.negate:
matchFlags = MatchFlags.caseInsensitive | MatchFlags.NOT
elif textMatchElement.collation == "i;unicode-casemap":
matchFlags = MatchFlags.caseInsensitive
elif textMatchElement.negate:
matchFlags = MatchFlags.NOT
else:
matchFlags = MatchFlags_none
matchList = [matchExpression(fieldName, matchString, matchType, matchFlags) for fieldName in searchableFields]
matchList.extend(matchList)
return andOrExpression(propFilterAllOf, matchList)
# attribute exists search
return definedExpression(True, propFilterAllOf)
# end textMatchElementExpression()
# searchablePropFilterAttrNames are attributes to be used by this propfilter's expression
searchableFields = vcardPropToSearchableFieldMap.get(propFilter.filter_name, [])
if isinstance(searchableFields, NamedConstant):
searchableFields = (searchableFields,)
constant = constantProperties.get(propFilter.filter_name)
if not searchableFields and not constant:
# not allAttrNames means propFilter.filter_name is not mapped
# return None to try to match all items if this is the only property filter
return None
# create a textMatchElement for the IsNotDefined qualifier
if isinstance(propFilter.qualifier, IsNotDefined):
textMatchElement = TextMatch(carddavxml.TextMatch.fromString(""))
textMatchElement.negate = True
propFilter.filters.append(textMatchElement)
# if only one propFilter, then use filterAllOf as propFilterAllOf to reduce subexpressions and simplify generated query string
if len(propFilter.filters) == 1:
propFilterAllOf = filterAllOf
else:
propFilterAllOf = propFilter.propfilter_test == "allof"
propFilterExpressions = None
for propFilterElement in propFilter.filters:
propFilterExpression = None
if isinstance(propFilterElement, ParameterFilter):
propFilterExpression = paramFilterElementExpression(propFilterAllOf, propFilterElement)
elif isinstance(propFilterElement, TextMatch):
propFilterExpression = textMatchElementExpression(propFilterAllOf, propFilterElement)
propFilterExpressions = combineExpressionLists(propFilterExpressions, propFilterAllOf, propFilterExpression)
if isinstance(propFilterExpressions, bool) and propFilterAllOf != propFilterExpression:
break
if isinstance(propFilterExpressions, list):
propFilterExpressions = list(set(propFilterExpressions))
if propFilterExpressions and (filterAllOf != propFilterAllOf):
propFilterExpressions = [CompoundExpression(propFilterExpressions, Operand.AND if propFilterAllOf else Operand.OR)]
return propFilterExpressions
# end propFilterExpression
expressions = None
for propFilter in propFilters:
propExpressions = propFilterExpression(filterAllOf, propFilter)
expressions = combineExpressionLists(expressions, filterAllOf, propExpressions)
# early loop exit
if isinstance(expressions, bool) and filterAllOf != expressions:
break
# convert to needsAllRecords to return
# log.debug("expressionFromABFilter: expressions={q!r}", q=expressions,)
if isinstance(expressions, list):
expressions = list(set(expressions))
if len(expressions) > 1:
expr = CompoundExpression(expressions, Operand.AND if filterAllOf else Operand.OR)
elif len(expressions):
expr = expressions[0]
else:
expr = not filterAllOf # empty expression list. should not happen
elif expressions is None:
expr = not filterAllOf
else:
# True or False
expr = expressions
properties = [propFilter.filter_name for propFilter in propFilters]
return (tuple(set(properties)), expr)
# Top-level filter contains zero or more prop-filters
properties = tuple()
expression = None
if addressBookFilter:
filterAllOf = addressBookFilter.filter_test == "allof"
if len(addressBookFilter.children):
properties, expression = propFilterListQuery(filterAllOf, addressBookFilter.children)
else:
expression = not filterAllOf
# log.debug("expressionFromABFilter: expression={q!r}, properties={pn}", q=expression, pn=properties)
return((properties, expression))
class ABDirectoryQueryResult(DAVPropertyMixIn):
"""
Result from ab query report or multiget on directory
"""
def __init__(self, directoryBackedAddressBook,):
self._directoryBackedAddressBook = directoryBackedAddressBook
# self._vCard = None
def __repr__(self):
return "<{self.__class__.__name__}[{rn}({uid})]>".format(
self=self,
fn=self.vCard().propertyValue("FN"),
uid=self.vCard().propertyValue("UID")
)
'''
def __hash__(self):
s = "".join([
"{attr}:{values}".format(attr=attribute, values=self.valuesForAttribute(attribute),)
for attribute in self.attributes
])
return hash(s)
'''
@inlineCallbacks
def generate(self, record, forceKind=None, addProps=None,):
self._vCard = yield vCardFromRecord(record, forceKind, addProps, None)
returnValue(self)
def vCard(self):
return self._vCard
def vCardText(self):
return str(self._vCard)
def uri(self):
return self.vCard().propertyValue("UID") + ".vcf"
def hRef(self, parentURI=None):
return davxml.HRef.fromString(joinURL(parentURI if parentURI else self._directoryBackedAddressBook.uri, self.uri()))
def readProperty(self, property, request):
if type(property) is tuple:
qname = property
else:
qname = property.qname()
namespace, name = qname
if namespace == dav_namespace:
if name == "resourcetype":
result = davxml.ResourceType.empty # @UndefinedVariable
return result
elif name == "getetag":
result = davxml.GETETag(ETag(hashlib.md5(self.vCardText()).hexdigest()).generate())
return result
elif name == "getcontenttype":
mimeType = MimeType('text', 'vcard', {})
result = davxml.GETContentType(generateContentType(mimeType))
return result
elif name == "getcontentlength":
result = davxml.GETContentLength.fromString(str(len(self.vCardText())))
return result
elif name == "getlastmodified":
if self.vCard().hasProperty("REV"):
modDatetime = parse_date(self.vCard().propertyValue("REV"))
else:
modDatetime = datetime.datetime.utcnow()
# strip time zone because time zones are unimplemented in davxml.GETLastModified.fromDate
d = modDatetime.date()
t = modDatetime.time()
modDatetimeNoTZ = datetime.datetime(d.year, d.month, d.day, t.hour, t.minute, t.second, t.microsecond, None)
result = davxml.GETLastModified.fromDate(modDatetimeNoTZ)
return result
elif name == "creationdate":
if self.vCard().hasProperty("REV"): # use modification date property if it exists
creationDatetime = parse_date(self.vCard().propertyValue("REV"))
else:
creationDatetime = datetime.datetime.utcnow()
result = davxml.CreationDate.fromDate(creationDatetime)
return result
elif name == "displayname":
# AddressBook.app uses N. Use FN or UID instead?
result = davxml.DisplayName.fromString(self.vCard().propertyValue("N"))
return result
elif namespace == twisted_dav_namespace:
return super(ABDirectoryQueryResult, self).readProperty(property, request)
return self._directoryBackedAddressBook.readProperty(property, request)
def listProperties(self, request): # @UnusedVariable
qnames = set(self.liveProperties())
# Add dynamic live properties that exist
dynamicLiveProperties = (
(dav_namespace, "quota-available-bytes"),
(dav_namespace, "quota-used-bytes"),
)
for dqname in dynamicLiveProperties:
qnames.remove(dqname)
for qname in self.deadProperties().list():
if (qname not in qnames) and (qname[0] != twisted_private_namespace):
qnames.add(qname)
yield qnames
listProperties = deferredGenerator(listProperties)
| macosforge/ccs-calendarserver | twistedcaldav/directorybackedaddressbook.py | Python | apache-2.0 | 30,423 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# HTTPSClientAuthConnection code comes courtesy of ActiveState website:
# http://code.activestate.com/recipes/
# 577548-https-httplib-client-connection-with-certificate-v/
import collections
import copy
import errno
import functools
import httplib
import os
import re
import urllib
import urlparse
try:
from eventlet.green import socket, ssl
except ImportError:
import socket
import ssl
try:
import sendfile
SENDFILE_SUPPORTED = True
except ImportError:
SENDFILE_SUPPORTED = False
from glance.common import auth
from glance.common import exception, utils
import glance.openstack.common.log as logging
from glance.openstack.common import strutils
LOG = logging.getLogger(__name__)
# common chunk size for get and put
CHUNKSIZE = 65536
VERSION_REGEX = re.compile(r"/?v[0-9\.]+")
def handle_unauthenticated(func):
"""
Wrap a function to re-authenticate and retry.
"""
@functools.wraps(func)
def wrapped(self, *args, **kwargs):
try:
return func(self, *args, **kwargs)
except exception.NotAuthenticated:
self._authenticate(force_reauth=True)
return func(self, *args, **kwargs)
return wrapped
def handle_redirects(func):
"""
Wrap the _do_request function to handle HTTP redirects.
"""
MAX_REDIRECTS = 5
@functools.wraps(func)
def wrapped(self, method, url, body, headers):
for _ in xrange(MAX_REDIRECTS):
try:
return func(self, method, url, body, headers)
except exception.RedirectException as redirect:
if redirect.url is None:
raise exception.InvalidRedirect()
url = redirect.url
raise exception.MaxRedirectsExceeded(redirects=MAX_REDIRECTS)
return wrapped
class HTTPSClientAuthConnection(httplib.HTTPSConnection):
"""
Class to make a HTTPS connection, with support for
full client-based SSL Authentication
:see http://code.activestate.com/recipes/
577548-https-httplib-client-connection-with-certificate-v/
"""
def __init__(self, host, port, key_file, cert_file,
ca_file, timeout=None, insecure=False):
httplib.HTTPSConnection.__init__(self, host, port, key_file=key_file,
cert_file=cert_file)
self.key_file = key_file
self.cert_file = cert_file
self.ca_file = ca_file
self.timeout = timeout
self.insecure = insecure
def connect(self):
"""
Connect to a host on a given (SSL) port.
If ca_file is pointing somewhere, use it to check Server Certificate.
Redefined/copied and extended from httplib.py:1105 (Python 2.6.x).
This is needed to pass cert_reqs=ssl.CERT_REQUIRED as parameter to
ssl.wrap_socket(), which forces SSL to check server certificate against
our client certificate.
"""
sock = socket.create_connection((self.host, self.port), self.timeout)
if self._tunnel_host:
self.sock = sock
self._tunnel()
# Check CA file unless 'insecure' is specificed
if self.insecure is True:
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
cert_reqs=ssl.CERT_NONE)
else:
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
ca_certs=self.ca_file,
cert_reqs=ssl.CERT_REQUIRED)
class BaseClient(object):
"""A base client class"""
DEFAULT_PORT = 80
DEFAULT_DOC_ROOT = None
# Standard CA file locations for Debian/Ubuntu, RedHat/Fedora,
# Suse, FreeBSD/OpenBSD
DEFAULT_CA_FILE_PATH = ('/etc/ssl/certs/ca-certificates.crt:'
'/etc/pki/tls/certs/ca-bundle.crt:'
'/etc/ssl/ca-bundle.pem:'
'/etc/ssl/cert.pem')
OK_RESPONSE_CODES = (
httplib.OK,
httplib.CREATED,
httplib.ACCEPTED,
httplib.NO_CONTENT,
)
REDIRECT_RESPONSE_CODES = (
httplib.MOVED_PERMANENTLY,
httplib.FOUND,
httplib.SEE_OTHER,
httplib.USE_PROXY,
httplib.TEMPORARY_REDIRECT,
)
def __init__(self, host, port=None, timeout=None, use_ssl=False,
auth_tok=None, creds=None, doc_root=None, key_file=None,
cert_file=None, ca_file=None, insecure=False,
configure_via_auth=True):
"""
Creates a new client to some service.
:param host: The host where service resides
:param port: The port where service resides
:param timeout: Connection timeout.
:param use_ssl: Should we use HTTPS?
:param auth_tok: The auth token to pass to the server
:param creds: The credentials to pass to the auth plugin
:param doc_root: Prefix for all URLs we request from host
:param key_file: Optional PEM-formatted file that contains the private
key.
If use_ssl is True, and this param is None (the
default), then an environ variable
GLANCE_CLIENT_KEY_FILE is looked for. If no such
environ variable is found, ClientConnectionError
will be raised.
:param cert_file: Optional PEM-formatted certificate chain file.
If use_ssl is True, and this param is None (the
default), then an environ variable
GLANCE_CLIENT_CERT_FILE is looked for. If no such
environ variable is found, ClientConnectionError
will be raised.
:param ca_file: Optional CA cert file to use in SSL connections
If use_ssl is True, and this param is None (the
default), then an environ variable
GLANCE_CLIENT_CA_FILE is looked for.
:param insecure: Optional. If set then the server's certificate
will not be verified.
:param configure_via_auth: Optional. Defaults to True. If set, the
URL returned from the service catalog for the image
endpoint will **override** the URL supplied to in
the host parameter.
"""
self.host = host
self.port = port or self.DEFAULT_PORT
self.timeout = timeout
# A value of '0' implies never timeout
if timeout == 0:
self.timeout = None
self.use_ssl = use_ssl
self.auth_tok = auth_tok
self.creds = creds or {}
self.connection = None
self.configure_via_auth = configure_via_auth
# doc_root can be a nullstring, which is valid, and why we
# cannot simply do doc_root or self.DEFAULT_DOC_ROOT below.
self.doc_root = (doc_root if doc_root is not None
else self.DEFAULT_DOC_ROOT)
self.key_file = key_file
self.cert_file = cert_file
self.ca_file = ca_file
self.insecure = insecure
self.auth_plugin = self.make_auth_plugin(self.creds, self.insecure)
self.connect_kwargs = self.get_connect_kwargs()
def get_connect_kwargs(self):
connect_kwargs = {}
# Both secure and insecure connections have a timeout option
connect_kwargs['timeout'] = self.timeout
if self.use_ssl:
if self.key_file is None:
self.key_file = os.environ.get('GLANCE_CLIENT_KEY_FILE')
if self.cert_file is None:
self.cert_file = os.environ.get('GLANCE_CLIENT_CERT_FILE')
if self.ca_file is None:
self.ca_file = os.environ.get('GLANCE_CLIENT_CA_FILE')
# Check that key_file/cert_file are either both set or both unset
if self.cert_file is not None and self.key_file is None:
msg = _("You have selected to use SSL in connecting, "
"and you have supplied a cert, "
"however you have failed to supply either a "
"key_file parameter or set the "
"GLANCE_CLIENT_KEY_FILE environ variable")
raise exception.ClientConnectionError(msg)
if self.key_file is not None and self.cert_file is None:
msg = _("You have selected to use SSL in connecting, "
"and you have supplied a key, "
"however you have failed to supply either a "
"cert_file parameter or set the "
"GLANCE_CLIENT_CERT_FILE environ variable")
raise exception.ClientConnectionError(msg)
if (self.key_file is not None and
not os.path.exists(self.key_file)):
msg = _("The key file you specified %s does not "
"exist") % self.key_file
raise exception.ClientConnectionError(msg)
connect_kwargs['key_file'] = self.key_file
if (self.cert_file is not None and
not os.path.exists(self.cert_file)):
msg = _("The cert file you specified %s does not "
"exist") % self.cert_file
raise exception.ClientConnectionError(msg)
connect_kwargs['cert_file'] = self.cert_file
if (self.ca_file is not None and
not os.path.exists(self.ca_file)):
msg = _("The CA file you specified %s does not "
"exist") % self.ca_file
raise exception.ClientConnectionError(msg)
if self.ca_file is None:
for ca in self.DEFAULT_CA_FILE_PATH.split(":"):
if os.path.exists(ca):
self.ca_file = ca
break
connect_kwargs['ca_file'] = self.ca_file
connect_kwargs['insecure'] = self.insecure
return connect_kwargs
def set_auth_token(self, auth_tok):
"""
Updates the authentication token for this client connection.
"""
# FIXME(sirp): Nova image/glance.py currently calls this. Since this
# method isn't really doing anything useful[1], we should go ahead and
# rip it out, first in Nova, then here. Steps:
#
# 1. Change auth_tok in Glance to auth_token
# 2. Change image/glance.py in Nova to use client.auth_token
# 3. Remove this method
#
# [1] http://mail.python.org/pipermail/tutor/2003-October/025932.html
self.auth_tok = auth_tok
def configure_from_url(self, url):
"""
Setups the connection based on the given url.
The form is:
<http|https>://<host>:port/doc_root
"""
LOG.debug(_("Configuring from URL: %s"), url)
parsed = urlparse.urlparse(url)
self.use_ssl = parsed.scheme == 'https'
self.host = parsed.hostname
self.port = parsed.port or 80
self.doc_root = parsed.path.rstrip('/')
# We need to ensure a version identifier is appended to the doc_root
if not VERSION_REGEX.match(self.doc_root):
if self.DEFAULT_DOC_ROOT:
doc_root = self.DEFAULT_DOC_ROOT.lstrip('/')
self.doc_root += '/' + doc_root
msg = (_("Appending doc_root %(doc_root)s to URL %(url)s") %
{'doc_root': doc_root, 'url': url})
LOG.debug(msg)
# ensure connection kwargs are re-evaluated after the service catalog
# publicURL is parsed for potential SSL usage
self.connect_kwargs = self.get_connect_kwargs()
def make_auth_plugin(self, creds, insecure):
"""
Returns an instantiated authentication plugin.
"""
strategy = creds.get('strategy', 'noauth')
plugin = auth.get_plugin_from_strategy(strategy, creds, insecure,
self.configure_via_auth)
return plugin
def get_connection_type(self):
"""
Returns the proper connection type
"""
if self.use_ssl:
return HTTPSClientAuthConnection
else:
return httplib.HTTPConnection
def _authenticate(self, force_reauth=False):
"""
Use the authentication plugin to authenticate and set the auth token.
:param force_reauth: For re-authentication to bypass cache.
"""
auth_plugin = self.auth_plugin
if not auth_plugin.is_authenticated or force_reauth:
auth_plugin.authenticate()
self.auth_tok = auth_plugin.auth_token
management_url = auth_plugin.management_url
if management_url and self.configure_via_auth:
self.configure_from_url(management_url)
@handle_unauthenticated
def do_request(self, method, action, body=None, headers=None,
params=None):
"""
Make a request, returning an HTTP response object.
:param method: HTTP verb (GET, POST, PUT, etc.)
:param action: Requested path to append to self.doc_root
:param body: Data to send in the body of the request
:param headers: Headers to send with the request
:param params: Key/value pairs to use in query string
:returns: HTTP response object
"""
if not self.auth_tok:
self._authenticate()
url = self._construct_url(action, params)
# NOTE(ameade): We need to copy these kwargs since they can be altered
# in _do_request but we need the originals if handle_unauthenticated
# calls this function again.
return self._do_request(method=method, url=url,
body=copy.deepcopy(body),
headers=copy.deepcopy(headers))
def _construct_url(self, action, params=None):
"""
Create a URL object we can use to pass to _do_request().
"""
action = urllib.quote(action)
path = '/'.join([self.doc_root or '', action.lstrip('/')])
scheme = "https" if self.use_ssl else "http"
netloc = "%s:%d" % (self.host, self.port)
if isinstance(params, dict):
for (key, value) in params.items():
if value is None:
del params[key]
continue
if not isinstance(value, basestring):
value = str(value)
params[key] = strutils.safe_encode(value)
query = urllib.urlencode(params)
else:
query = None
url = urlparse.ParseResult(scheme, netloc, path, '', query, '')
log_msg = _("Constructed URL: %s")
LOG.debug(log_msg, url.geturl())
return url
def _encode_headers(self, headers):
"""
Encodes headers.
Note: This should be used right before
sending anything out.
:param headers: Headers to encode
:returns: Dictionary with encoded headers'
names and values
"""
to_str = strutils.safe_encode
return dict([(to_str(h), to_str(v)) for h, v in headers.iteritems()])
@handle_redirects
def _do_request(self, method, url, body, headers):
"""
Connects to the server and issues a request. Handles converting
any returned HTTP error status codes to OpenStack/Glance exceptions
and closing the server connection. Returns the result data, or
raises an appropriate exception.
:param method: HTTP method ("GET", "POST", "PUT", etc...)
:param url: urlparse.ParsedResult object with URL information
:param body: data to send (as string, filelike or iterable),
or None (default)
:param headers: mapping of key/value pairs to add as headers
:note
If the body param has a read attribute, and method is either
POST or PUT, this method will automatically conduct a chunked-transfer
encoding and use the body as a file object or iterable, transferring
chunks of data using the connection's send() method. This allows large
objects to be transferred efficiently without buffering the entire
body in memory.
"""
if url.query:
path = url.path + "?" + url.query
else:
path = url.path
try:
connection_type = self.get_connection_type()
headers = self._encode_headers(headers or {})
if 'x-auth-token' not in headers and self.auth_tok:
headers['x-auth-token'] = self.auth_tok
c = connection_type(url.hostname, url.port, **self.connect_kwargs)
def _pushing(method):
return method.lower() in ('post', 'put')
def _simple(body):
return body is None or isinstance(body, basestring)
def _filelike(body):
return hasattr(body, 'read')
def _sendbody(connection, iter):
connection.endheaders()
for sent in iter:
# iterator has done the heavy lifting
pass
def _chunkbody(connection, iter):
connection.putheader('Transfer-Encoding', 'chunked')
connection.endheaders()
for chunk in iter:
connection.send('%x\r\n%s\r\n' % (len(chunk), chunk))
connection.send('0\r\n\r\n')
# Do a simple request or a chunked request, depending
# on whether the body param is file-like or iterable and
# the method is PUT or POST
#
if not _pushing(method) or _simple(body):
# Simple request...
c.request(method, path, body, headers)
elif _filelike(body) or self._iterable(body):
c.putrequest(method, path)
use_sendfile = self._sendable(body)
# According to HTTP/1.1, Content-Length and Transfer-Encoding
# conflict.
for header, value in headers.items():
if use_sendfile or header.lower() != 'content-length':
c.putheader(header, str(value))
iter = self.image_iterator(c, headers, body)
if use_sendfile:
# send actual file without copying into userspace
_sendbody(c, iter)
else:
# otherwise iterate and chunk
_chunkbody(c, iter)
else:
raise TypeError('Unsupported image type: %s' % body.__class__)
res = c.getresponse()
def _retry(res):
return res.getheader('Retry-After')
status_code = self.get_status_code(res)
if status_code in self.OK_RESPONSE_CODES:
return res
elif status_code in self.REDIRECT_RESPONSE_CODES:
raise exception.RedirectException(res.getheader('Location'))
elif status_code == httplib.UNAUTHORIZED:
raise exception.NotAuthenticated(res.read())
elif status_code == httplib.FORBIDDEN:
raise exception.Forbidden(res.read())
elif status_code == httplib.NOT_FOUND:
raise exception.NotFound(res.read())
elif status_code == httplib.CONFLICT:
raise exception.Duplicate(res.read())
elif status_code == httplib.BAD_REQUEST:
raise exception.Invalid(res.read())
elif status_code == httplib.MULTIPLE_CHOICES:
raise exception.MultipleChoices(body=res.read())
elif status_code == httplib.REQUEST_ENTITY_TOO_LARGE:
raise exception.LimitExceeded(retry=_retry(res),
body=res.read())
elif status_code == httplib.INTERNAL_SERVER_ERROR:
raise exception.ServerError()
elif status_code == httplib.SERVICE_UNAVAILABLE:
raise exception.ServiceUnavailable(retry=_retry(res))
else:
raise exception.UnexpectedStatus(status=status_code,
body=res.read())
except (socket.error, IOError) as e:
raise exception.ClientConnectionError(e)
def _seekable(self, body):
# pipes are not seekable, avoids sendfile() failure on e.g.
# cat /path/to/image | glance add ...
# or where add command is launched via popen
try:
os.lseek(body.fileno(), 0, os.SEEK_CUR)
return True
except OSError as e:
return (e.errno != errno.ESPIPE)
def _sendable(self, body):
return (SENDFILE_SUPPORTED and
hasattr(body, 'fileno') and
self._seekable(body) and
not self.use_ssl)
def _iterable(self, body):
return isinstance(body, collections.Iterable)
def image_iterator(self, connection, headers, body):
if self._sendable(body):
return SendFileIterator(connection, body)
elif self._iterable(body):
return utils.chunkreadable(body)
else:
return ImageBodyIterator(body)
def get_status_code(self, response):
"""
Returns the integer status code from the response, which
can be either a Webob.Response (used in testing) or httplib.Response
"""
if hasattr(response, 'status_int'):
return response.status_int
else:
return response.status
def _extract_params(self, actual_params, allowed_params):
"""
Extract a subset of keys from a dictionary. The filters key
will also be extracted, and each of its values will be returned
as an individual param.
:param actual_params: dict of keys to filter
:param allowed_params: list of keys that 'actual_params' will be
reduced to
:retval subset of 'params' dict
"""
try:
# expect 'filters' param to be a dict here
result = dict(actual_params.get('filters'))
except TypeError:
result = {}
for allowed_param in allowed_params:
if allowed_param in actual_params:
result[allowed_param] = actual_params[allowed_param]
return result
| cloudbau/glance | glance/common/client.py | Python | apache-2.0 | 23,522 |
#!/usr/bin/python
# Matt's MYUW Mobile test
# What these tests will check for:
# Landing page:
# * Correct number of critical notices
# * Correct number of unread notices
# * Correct email link, or lack thereof
# * Presense of registration card, if expected
# * Correct link names and URLs of registration resource links
# * Correct course card titles
# * Correct visual schedule course titles
# * Correct HFS husky card names
# * Library card present if expected
# * Correct number of library holds ready, if expected
# * Correct number of checked out items, if expected
# * Correct value of library fine, if expected
# * Presense of each expected future quarter
# * Lack of error messages
# * Check "No registration found" if expected
# * Check name and URLs of resource page links
# TODO: Notices page, tuition
import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
import selenium
import time
import sys
import os
from myuw_selenium.platforms import on_platforms, SeleniumLiveServerTestCase
# Myuw specific stuff
# Test user classes
from myuw_selenium.test.myuw_user import testUser, testUserDate
# General default settings
from myuw_selenium.test.musettings import *
# Expected data
from myuw_selenium.test.mudata import *
from myuw_selenium.test.resourcelinks import resLinks
from myuw_selenium.test.records import records
from myuw_selenium.test.academic_card import academic_card_values
from myuw_selenium.test.grade_card import grade_card_values
# Test scenario classes
from muwm_cases import myuw_user_scenario, myuw_date_scenario
# Mock data user scenarios
# Format for a scenario:
# @on_platforms() # Creates a copy of the test for every browser we test on
# class class_name(base_scenario, SeleniumLiveServerTestCase):
# # base_scenario is the base test case, such as myuw_user_scenario,
# # or myuw_date_scenario
# def postsetup
# self.user = testUser(self.driver, self, args...) # See myuw_user.py for docs
# self.username = 'netidGoesHere'
# self.setDate('yyyy-mm-dd') # Optional
'''
# jbothell user scenario
@on_platforms()
class myuw_jbothell(myuw_user_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
critical = 4,
unread = 10,
email = emails['live'],
regcard = ['Registration: Summer 2013'],
regholds = 1,
reglinks = (links['bts'], links['reg']),
schedule = True,
vSchedule = True,
courses = ('BCWRIT 500 A', 'BISSEB 259 A', 'BESS 102 A', 'BESS 102 AB'),
tuition = {'balance' : True, 'due' : 'future'},
HFS = ('stu',),
library = True,
libraryholds = 1,
textbooks = ('BISSEB 259 A', 'BCWRIT 500', 'BESS 102 A', 'BESS 102 AB'),
resources = resLinks['bothell'],
record = records['jbothell'],
academic_card = academic_card_values['jbothell']
)
self.username = 'jbothell'
def test_blah(self):
pass
# javerage user scenario
@on_platforms()
class myuw_javerage(myuw_user_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
critical = 5,
unread = 11,
email = emails['gmail'],
regcard = False,
regholds = 1,
reglinks = (links['sts'], links['reg']),
schedule = True,
vSchedule = True,
courses = ('PHYS 121 A', 'PHYS 121 AC', 'PHYS 121 AQ', 'TRAIN 100 A', 'TRAIN 101 A'),
#tuition = 'duefuture',
HFS = ('stu', 'staff', 'din'),
library = True,
libraryholds = 1,
libraryout = 1,
fq_fall = ('ENGL 207 A',),
fq_summera = ('ELCBUS 451',),
fq_summerb = ('TRAIN 101',),
resources = resLinks['seattle'],
record = records['javerage'],
academic_card = academic_card_values['javerage'],
#grade_card = grade_card_values['javerage']
)
# TODO: Add textbooks
self.username = 'javerage'
@on_platforms()
class myuw_jinter(myuw_user_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
critical = 4,
unread = 13,
email = False,
regcard = ['Registration: Summer 2013'],
schedule = False,
vSchedule = False,
noregfound = True,
#tuition = 'nofuture',
HFS = ('staff',),
library = True,
libraryout = 1 ,
libraryfine = '$3.25',
resources = resLinks['seattle'],
record = records['jinter']
)
self.username = 'jinter'
@on_platforms()
class myuw_jnew(myuw_user_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
critical = 2,
unread = 18,
email = False,
regcard = ['Registration: Summer 2013'],
schedule = True,
vSchedule = True,
courses = ('TRAIN 101 A',),
#tuition = 'nopast',
HFS = ('stu', 'din'),
library = True,
libraryfine = '$10.00',
resources = resLinks['seattle'],
record = records['jnew']
)
self.username = 'jnew'
@on_platforms()
class myuw_none(myuw_user_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = ['Registration: Summer 2013'],
noregfound = True,
HFS = (),
record = records['none']
)
self.username = 'none'
'''
@on_platforms()
class myuw_jbothell_date1(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
critical = 4,
unread = 10,
email = emails['live'],
regcard = True,
regholds = 1,
reglinks = (links['bts'], links['reg']),
schedule = True,
vSchedule = True,
courses = ('BCWRIT 500 A', 'BISSEB 259 A', 'BESS 102 A', 'BESS 102 AB'),
tuition = {'balance' : True, 'due' : 'future'},
HFS = ('stu',),
library = True,
libraryholds = 1,
textbooks = ('BISSEB 259 A', 'BCWRIT 500', 'BESS 102 A', 'BESS 102 AB'),
resources = resLinks['bothell'],
record = records['jbothell'],
academic_card = academic_card_values['jbothell']
)
self.username = 'jbothell'
self.setDate('2013-04-12')
@on_platforms()
class myuw_none_date1(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = 'Registration: Spring 2013',
tuition = {'balance' : False, 'due' : 'future'},
HFS = (),
records = records['none'],
noregfound = True,
)
self.username = 'none'
self.setDate('2013-02-01')
@on_platforms()
class myuw_none_date2(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = 'Registration: Spring 2013',
tuition = {'balance' : False, 'due' : 'future'},
HFS = (),
records = records['none'],
noregfound = True,
)
self.username = 'none'
self.setDate('2013-02-15')
@on_platforms()
class myuw_none_date3(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = 'Registration: Spring 2013',
tuition = {'balance' : False, 'due' : 'future'},
HFS = (),
records = records['none'],
noregfound = True,
)
self.username = 'none'
self.setDate('2013-03-04')
@on_platforms()
class myuw_none_date4(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = False,
tuition = {'balance' : False, 'due' : 'future'},
HFS = (),
records = records['none'],
noregfound = True,
)
self.username = 'none'
self.setDate('2013-03-11')
@on_platforms()
class myuw_none_date5(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = False,
tuition = {'balance' : False, 'due' : 'future'},
HFS = (),
records = records['none'],
noregfound = True,
)
self.username = 'none'
self.setDate('2013-04-01')
@on_platforms()
class myuw_none_date6(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = 'Registration: Summer 2013',
tuition = {'balance' : False, 'due' : 'future'},
HFS = (),
records = records['none'],
noregfound = True,
)
self.username = 'none'
self.setDate('2013-04-08')
@on_platforms()
class myuw_none_date7(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = ['Registration: Summer 2013'],
tuition = {'balance' : False, 'due' : 'future'},
HFS = (),
records = records['none'],
noregfound = True,
)
self.username = 'none'
self.setDate('2013-04-22')
@on_platforms()
class myuw_none_date8(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = ['Registration: Autumn 2013','Registration: Summer 2013'],
tuition = {'balance' : False, 'due' : 'future'},
HFS = (),
records = records['none'],
noregfound = True,
)
self.username = 'none'
self.setDate('2013-05-23')
@on_platforms()
class myuw_none_date9(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = 'Registration: Autumn 2013',
tuition = {'balance' : False, 'due' : 'future'},
HFS = (),
records = records['none'],
noregfound = True,
)
self.username = 'none'
self.setDate('2013-05-30')
@on_platforms()
class myuw_none_date10(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = False,
tuition = {'balance' : False, 'due' : 'future'},
HFS = (),
records = records['none'],
noregfound = True,
)
self.username = 'none'
self.setDate('2013-07-01')
@on_platforms()
class myuw_none_date11(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = ['Registration: Winter 2014'],
tuition = {'balance' : False, 'due' : 'future'},
HFS = (),
records = records['none'],
noregfound = True,
)
self.username = 'none'
self.setDate('2013-10-25')
@on_platforms()
class myuw_none_date12(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = False,
tuition = {'balance' : False, 'due' : 'future'},
HFS = (),
records = records['none'],
noregfound = True,
)
self.username = 'none'
self.setDate('2013-12-02')
@on_platforms()
class myuw_javerage_date1(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = False,
noregfound = False
)
self.username = 'javerage'
self.setDate('2013-07-01')
@on_platforms()
class myuw_javerage_date2(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = False,
noregfound = False,
futureQtrs = ['Spring 2013']
)
self.username = 'javerage'
self.setDate('2013-02-15')
@on_platforms()
class myuw_javerage_date3(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = False,
noregfound = False,
futureQtrs = [],
)
self.username = 'javerage'
self.setDate('2013-04-01')
@on_platforms()
class myuw_javerage_date4(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = False,
noregfound = False,
futureQtrs = ['Summer 2013 A-Term', 'Summer 2013 B-Term', 'Autumn 2013']
)
self.username = 'javerage'
self.setDate('2013-04-08')
@on_platforms()
class myuw_javerage_date5(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = False,
noregfound = False,
futureQtrs = ['Autumn 2013',]
)
self.username = 'javerage'
self.setDate('2013-06-24')
@on_platforms()
class myuw_javerage_date6(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = False,
noregfound = False,
futureQtrs = []
)
self.username = 'javerage'
self.setDate('2013-09-25')
@on_platforms()
class myuw_javerage_date7(myuw_date_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
regcard = False,
noregfound = False,
futureQtrs = [],
grade_card = True
)
self.username = 'javerage'
self.setDate('2013-03-28')
# Commenting this out for now, there seems to be a mismatch
# of data somewhere.
'''
@on_platforms()
class myuw_eight(myuw_user_scenario, SeleniumLiveServerTestCase):
def postsetup(self):
self.user = testUser(self.driver, self,
critical = 7,
unread = 10,
email = emails['gmail'],
regcard = False,
reglinks = (links['tts'], links['tqs'], links['reg']),
schedule = True,
vSchedule = True,
courses = ('PHYS 121 A', 'PHYS 121 AC', 'PHYS 121 AQ', 'TRAIN 100 A', 'TRAIN 101 A', 'ASL 101 A', 'ROLING 310 A', 'ARCTIC 200 A'),
tuition = {'balance' : True, 'due' : 'today'},
HFS = ('stu', 'din'),
library = True,
libraryholds = False,
libraryout = 2,
libraryfine = '$5.00',
fq_summera = ('ELCBUS 451 A', 'B BIO 180 A'),
fq_summerb = ('TRAIN 101 A', 'B BIO 180 A'),
fq_fall = ('ENGL 207 A',),
textbooks = ('PHYS 121 A', 'PHYS 121 AC', 'PHYS 121 AQ', 'TRAIN 100 A', 'TRAIN 101 A', 'ASL 101 A', 'ROLING 310 A', 'ARCTIC 200 A'),
resources = resLinks['tacoma'],
record = records['eight'],
academic_card = academic_card_values['eight'],
#grade_card = grade_card_values['eight']
)
self.username = 'eight'
# TODO: tuition due date stuff
'''
if __name__ == "__main__":
#unittest.main(warnings = 'ignore')
unittest.main()
| mattventura/myuw-selenium | myuw_selenium/test/muwm_testing.py | Python | apache-2.0 | 16,214 |
#!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/TEX/newglossary.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Validate that use of \newglossary in TeX source files causes SCons to
be aware of the necessary created glossary files.
Test configuration contributed by Robert Managan.
"""
import os
import TestSCons
test = TestSCons.TestSCons()
latex = test.where_is('latex')
if not latex:
test.skip_test("Could not find latex; skipping test(s).\n")
gloss = os.system('kpsewhich glossaries.sty')
if not gloss==0:
test.skip_test("glossaries.sty not installed; skipping test(s).\n")
test.write('SConstruct', """\
import os
env = Environment()
env.PDF('newglossary', 'newglossary.tex')
""")
test.write('newglossary.tex', r"""
\documentclass{report}
% for glossary
\newlength{\symcol}
\newlength{\symw}
\newcommand{\symtab}[1]{\setlength{\symcol}{1.3cm}\settowidth{\symw}{\ensuremath{#1}}\advance\symcol by -\symw\hspace{\symcol}}
\newcommand{\newsym}[5]{\newglossaryentry{#1}{name=\ensuremath{#2},description={\symtab{#2}{#4}},parent={#5},sort={#3}}}
\newcommand{\newacronymf}[3]{\newglossaryentry{#1}{name={#2},description={#3},first={#2}}}
\usepackage[acronym]{glossaries}
\newglossary[symlog]{symbol}{symi}{symo}{Symbols}
\newglossaryentry{nix}{
name={Nix},
description={Version 5}
}
\newglossary[deflog]{definition}{defi}{defo}{Definitions}
\newglossaryentry{defPower}{name=Ddyn,type={definition},description={def of 1 dynamic power consumption},sort={DP}}
\newacronym{gnu}{GNU}{GNU's Not UNIX}
\makeglossaries
\glstoctrue
%\loadglsentries[\acronymtype]{chapters/acronyms}
\loadglsentries[symbol]{symbols}
%\loadglsentries[definition]{defns}
\begin{document}
Here is a symbol: \gls{dynPower} and a glossary entry \gls{mel}
Acronyms \gls{gnu} and glossary entries \gls{nix}.
a definition \gls{defPower}
\glossarystyle{index}
\printglossary[type=symbol]
\printglossary[type=acronym]
\printglossary[type=main]
\printglossary[type=definition]
\glossarystyle{super}
\end{document}""")
test.write('symbols.tex', r"""
\newglossaryentry{mel}{name={Microelectronic Fundamentals},description={\nopostdesc},sort=d}
\newsym{dynPower}{P_{dyn}}{P}{Dynamic power consumption}{mel}
%\newcommand{\newsym}[5]{\newglossaryentry{#1}{name=\ensuremath{#2},description={\symtab{#2}{#4}},parent={#5},sort={#3}}}
""")
test.run(arguments = '.', stderr=None)
test.must_exist(test.workpath('newglossary.acn'))
test.must_exist(test.workpath('newglossary.acr'))
test.must_exist(test.workpath('newglossary.alg'))
test.must_exist(test.workpath('newglossary.aux'))
test.must_exist(test.workpath('newglossary.defi'))
test.must_exist(test.workpath('newglossary.deflog'))
test.must_exist(test.workpath('newglossary.defo'))
test.must_exist(test.workpath('newglossary.fls'))
test.must_exist(test.workpath('newglossary.glg'))
test.must_exist(test.workpath('newglossary.glo'))
test.must_exist(test.workpath('newglossary.gls'))
test.must_exist(test.workpath('newglossary.ist'))
test.must_exist(test.workpath('newglossary.log'))
test.must_exist(test.workpath('newglossary.pdf'))
test.must_exist(test.workpath('newglossary.symi'))
test.must_exist(test.workpath('newglossary.symlog'))
test.must_exist(test.workpath('newglossary.symo'))
test.run(arguments = '-c .')
x = "Could not remove 'newglossary.aux': No such file or directory"
test.must_not_contain_any_line(test.stdout(), [x])
test.must_not_exist(test.workpath('newglossary.acn'))
test.must_not_exist(test.workpath('newglossary.acr'))
test.must_not_exist(test.workpath('newglossary.alg'))
test.must_not_exist(test.workpath('newglossary.defi'))
test.must_not_exist(test.workpath('newglossary.deflog'))
test.must_not_exist(test.workpath('newglossary.defo'))
test.must_not_exist(test.workpath('newglossary.aux'))
test.must_not_exist(test.workpath('newglossary.fls'))
test.must_not_exist(test.workpath('newglossary.glg'))
test.must_not_exist(test.workpath('newglossary.glo'))
test.must_not_exist(test.workpath('newglossary.gls'))
test.must_not_exist(test.workpath('newglossary.ist'))
test.must_not_exist(test.workpath('newglossary.log'))
test.must_not_exist(test.workpath('newglossary.pdf'))
test.must_not_exist(test.workpath('newglossary.symi'))
test.must_not_exist(test.workpath('newglossary.symlog'))
test.must_not_exist(test.workpath('newglossary.symo'))
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| EmanueleCannizzaro/scons | test/TEX/newglossary.py | Python | mit | 5,563 |
#!/usr/bin/python
from transient import api
if __name__ == "__main__":
api.run()
| smilledge/transient | runserver.py | Python | mit | 86 |
import time
from openstates.utils import LXMLMixin
from billy.scrape.legislators import LegislatorScraper, Legislator
from .util import get_client, get_url, backoff
import lxml
HOMEPAGE_URLS = {
"lower": ("http://www.house.ga.gov/Representatives/en-US/"
"member.aspx?Member={code}&Session={sid}"),
"upper": ("http://www.senate.ga.gov/SENATORS/en-US/"
"member.aspx?Member={code}&Session={sid}")
}
class GALegislatorScraper(LegislatorScraper, LXMLMixin):
jurisdiction = 'ga'
sservice = get_client("Members").service
ssource = get_url("Members")
def clean_list(self, dirty_list):
new_list = []
for x in dirty_list:
if x is None:
new_list.append(x)
else:
new_list.append(x.strip())
return new_list
def scrape_homepage(self, url, kwargs):
url = url.format(**kwargs)
page = self.lxmlize(url)
images = page.xpath("//img[contains(@src, 'SiteCollectionImages')]")
if len(images) != 1:
raise Exception
return url, images[0].attrib['src']
def scrape_session(self, term, chambers, session):
session = self.metadata['session_details'][session]
sid = session['_guid']
members = backoff(
self.sservice.GetMembersBySession,
sid
)['MemberListing']
for member in members:
guid = member['Id']
member_info = backoff(self.sservice.GetMember, guid)
# Check to see if the member has vacated; skip if so:
try:
legislative_service = next(service for service
in member_info['SessionsInService']['LegislativeService']
if service['Session']['Id'] == sid)
except IndexError:
raise Exception("Something very bad is going on with the "
"Legislative service")
if legislative_service['DateVacated']:
continue
nick_name, first_name, middle_name, last_name = (
member_info['Name'][x] for x in [
'Nickname', 'First', 'Middle', 'Last'
]
)
first_name = nick_name if nick_name else first_name
if middle_name:
full_name = "%s %s %s" % (first_name, middle_name, last_name)
else:
full_name = "%s %s" % (first_name, last_name)
party = legislative_service['Party']
if party == 'Democrat':
party = 'Democratic'
elif party.strip() == '':
party = 'other'
chamber, district = (
legislative_service['District'][x] for x in [
'Type', 'Number'
]
)
chamber = {
"House": 'lower',
"Senate": 'upper'
}[chamber]
url, photo = self.scrape_homepage(HOMEPAGE_URLS[chamber],
{"code": guid, "sid": sid})
legislator = Legislator(
term,
chamber,
str(district),
full_name,
party=party,
last_name=last_name,
first_name=first_name,
url=url,
photo_url=photo,
_guid=guid
)
capital_address = self.clean_list([
member_info['Address'][x] for x in [
'Street', 'City', 'State', 'Zip'
]
])
capital_address = (" ".join(
addr_component for addr_component
in capital_address if addr_component
)).strip()
capital_contact_info = self.clean_list([
member_info['Address'][x] for x in [
'Email', 'Phone', 'Fax'
]
])
# Sometimes email is set to a long cryptic string.
# If it doesn't have a @ character, simply set it to None
# examples:
# 01X5dvct3G1lV6RQ7I9o926Q==&c=xT8jBs5X4S7ZX2TOajTx2W7CBprTaVlpcvUvHEv78GI=
# 01X5dvct3G1lV6RQ7I9o926Q==&c=eSH9vpfdy3XJ989Gpw4MOdUa3n55NTA8ev58RPJuzA8=
if capital_contact_info[0] and '@' not in capital_contact_info[0]:
capital_contact_info[0] = None
# if we have more than 2 chars (eg state)
# or a phone/fax/email address record the info
if len(capital_address) > 2 or not capital_contact_info.count(None) == 3:
if (capital_contact_info[0] \
and '[email protected]' in capital_contact_info[0]):
self.warning("XXX: GA SITE WAS HACKED.")
capital_contact_info[1] = None
if capital_address.strip() != "":
legislator.add_office(
'capitol',
'Capitol Address',
address=capital_address,
phone=capital_contact_info[1],
fax=capital_contact_info[2],
email=capital_contact_info[0]
)
district_address = self.clean_list([
member_info['DistrictAddress'][x] for x in [
'Street', 'City', 'State', 'Zip'
]
])
district_contact_info = self.clean_list([
member_info['DistrictAddress'][x] for x in [
'Email', 'Phone', 'Fax'
]
])
# Same issue with district email. See above comment
if district_contact_info[0] and '@' not in district_contact_info[0]:
district_contact_info[0] = None
district_address = (
" ".join(
addr_component for addr_component
in district_address if addr_component
)).strip()
if len(capital_address) > 2 or not capital_contact_info.count(None) == 3:
if (district_contact_info[1] and \
'[email protected]' in district_contact_info[1]):
self.warning("XXX: GA SITE WAS HACKED.")
district_contact_info[1] = None
if district_address.strip() != "":
legislator.add_office(
'district',
'District Address',
address=district_address,
phone=district_contact_info[1],
fax=district_contact_info[2],
email=district_contact_info[0]
)
legislator.add_source(self.ssource)
legislator.add_source(HOMEPAGE_URLS[chamber].format(
**{"code": guid, "sid": sid}))
self.save_legislator(legislator)
def scrape(self, term, chambers):
for t in self.metadata['terms']:
if t['name'] == term:
for session in t['sessions']:
self.scrape_session(term, chambers, session)
| cliftonmcintosh/openstates | openstates/ga/legislators.py | Python | gpl-3.0 | 7,273 |
import pipy
packpath = "pipy"
pipy.define_upload(packpath,
author="Karim Bahgat",
author_email="[email protected]",
license="MIT",
name="Pipy",
description="Blabla",
url="http://github.com/karimbahgat/Pipy",
keywords="bla bla",
classifiers=["License :: OSI Approved",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
'Intended Audience :: End Users/Desktop'],
changes=["testing",
"testing2"],
)
#pipy.generate_docs(packpath)
#pipy.upload_test(packpath)
#pipy.upload(packpath)
| karimbahgat/PyPi | upload.py | Python | mit | 964 |
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = 'Dungeons & Denizens'
language = 'en'
url = 'http://dungeond.com/'
start_date = '2005-08-23'
end_date = '2014-03-05'
active = False
rights = 'Graveyard Greg'
class Crawler(CrawlerBase):
def crawl(self, pub_date):
pass # Comic no longer published
| datagutten/comics | comics/comics/dungeond.py | Python | agpl-3.0 | 443 |
"""Tests for RH Cloud - Inventory, also known as Insights Inventory Upload
:Requirement: RH Cloud - Inventory
:CaseAutomation: Automated
:CaseLevel: System
:CaseComponent: RHCloud-Inventory
:Assignee: jpathan
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from datetime import datetime
from datetime import timedelta
import pytest
from nailgun import entities
from robottelo.api.utils import wait_for_tasks
from robottelo.rh_cloud_utils import get_local_file_data
from robottelo.rh_cloud_utils import get_remote_report_checksum
from robottelo.rh_cloud_utils import get_report_data
def setting_update(name, value):
"""change setting value"""
setting = entities.Setting().search(query={'search': f'name="{name}"'})[0]
setting.value = value
setting.update({'value'})
def disable_inventory_settings():
setting_update('obfuscate_inventory_hostnames', False)
setting_update('obfuscate_inventory_ips', False)
setting_update('exclude_installed_packages', False)
@pytest.fixture
def inventory_settings():
disable_inventory_settings()
yield
disable_inventory_settings()
def common_assertion(report_path, inventory_data, org):
"""Function to perform common assertions"""
local_file_data = get_local_file_data(report_path)
upload_success_msg = (
f'Done: /var/lib/foreman/red_hat_inventory/uploads/report_for_{org.id}.tar.xz'
)
upload_error_messages = ['NSS error', 'Permission denied']
assert 'Successfully generated' in inventory_data['generating']['terminal']
assert upload_success_msg in inventory_data['uploading']['terminal']
assert 'x-rh-insights-request-id' in inventory_data['uploading']['terminal'].lower()
for error_msg in upload_error_messages:
assert error_msg not in inventory_data['uploading']['terminal']
assert local_file_data['checksum'] == get_remote_report_checksum(org.id)
assert local_file_data['size'] > 0
assert local_file_data['extractable']
assert local_file_data['json_files_parsable']
slices_in_metadata = set(local_file_data['metadata_counts'].keys())
slices_in_tar = set(local_file_data['slices_counts'].keys())
assert slices_in_metadata == slices_in_tar
for slice_name, hosts_count in local_file_data['metadata_counts'].items():
assert hosts_count == local_file_data['slices_counts'][slice_name]
@pytest.mark.run_in_one_thread
@pytest.mark.tier3
def test_rhcloud_inventory_e2e(
inventory_settings, organization_ak_setup, registered_hosts, session
):
"""Generate report and verify its basic properties
:id: 833bd61d-d6e7-4575-887a-9e0729d0fa76
:customerscenario: true
:expectedresults:
1. Report can be generated
2. Report can be downloaded
3. Report has non-zero size
4. Report can be extracted
5. JSON files inside report can be parsed
6. metadata.json lists all and only slice JSON files in tar
7. Host counts in metadata matches host counts in slices
8. Assert Hostnames, IP addresses, and installed packages are present in report.
:BZ: 1807829, 1926100
"""
org, ak = organization_ak_setup
virtual_host, baremetal_host = registered_hosts
with session:
session.organization.select(org_name=org.name)
timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M')
session.cloudinventory.generate_report(org.name)
wait_for_tasks(
search_query='label = ForemanInventoryUpload::Async::GenerateReportJob'
f' and started_at >= "{timestamp}"',
search_rate=15,
max_tries=10,
)
report_path = session.cloudinventory.download_report(org.name)
inventory_data = session.cloudinventory.read(org.name)
common_assertion(report_path, inventory_data, org)
json_data = get_report_data(report_path)
hostnames = [host['fqdn'] for host in json_data['hosts']]
assert virtual_host.hostname in hostnames
assert baremetal_host.hostname in hostnames
ip_addresses = [
host['system_profile']['network_interfaces'][0]['ipv4_addresses'][0]
for host in json_data['hosts']
]
ipv4_addresses = [host['ip_addresses'][0] for host in json_data['hosts']]
assert virtual_host.ip_addr in ip_addresses
assert baremetal_host.ip_addr in ip_addresses
assert virtual_host.ip_addr in ipv4_addresses
assert baremetal_host.ip_addr in ipv4_addresses
all_host_profiles = [host['system_profile'] for host in json_data['hosts']]
for host_profiles in all_host_profiles:
assert 'installed_packages' in host_profiles
assert len(host_profiles['installed_packages']) > 1
@pytest.mark.stubbed
def test_hits_synchronization():
"""Synchronize hits data from cloud and verify it is displayed in Satellite
:id: c3d1edf5-f43a-4f85-bd80-825bde58f6b2
:Steps:
1. Prepare misconfigured machine and upload its data to Insights
2. Add Cloud API key in Satellite
3. In Satellite UI, Configure -> Insights -> Sync now
4. Go to Hosts -> All Hosts and assert there is "Insights" column with content
5. Open host page and assert there is new Insights recommendation tab
:expectedresults:
1. There's Insights column with number of recommendations and link to cloud
2. Recommendations are listed on single host page
:CaseAutomation: NotAutomated
"""
@pytest.mark.stubbed
def test_hosts_synchronization():
"""Synchronize list of available hosts from cloud and mark them in Satellite
:id: 2f1bdd42-140d-46f8-bad5-299c54620ee8
:Steps:
1. Prepare machine and upload its data to Insights
2. Add Cloud API key in Satellite
3. In Satellite UI, Configure -> Inventory upload -> Sync inventory status
4. Assert content of toast message once synchronization finishes
5. Go to Hosts -> All Hosts and assert content of status popover
6. Open host page and assert status on "Properties" tab
:expectedresults:
1. Toast message contains number of hosts synchronized and missed
2. Presence in cloud is displayed in popover status of host
3. Presence in cloud is displayed in "Properties" tab on single host page
:CaseAutomation: NotAutomated
"""
@pytest.mark.run_in_one_thread
@pytest.mark.tier3
def test_obfuscate_host_names(inventory_settings, organization_ak_setup, registered_hosts, session):
"""Test whether `Obfuscate host names` setting works as expected.
:id: 3c3a36b6-6566-446b-b803-3f8f9aab2511
:Steps:
1. Prepare machine and upload its data to Insights
2. Add Cloud API key in Satellite
3. Go to Configure > Inventory upload > enable “Obfuscate host names” setting.
4. Generate report after enabling the setting.
5. Check if host names are obfuscated in generated reports.
6. Disable previous setting.
7. Go to Administer > Settings > RH Cloud and enable "Obfuscate host names" setting.
8. Generate report after enabling the setting.
9. Check if host names are obfuscated in generated reports.
:expectedresults:
1. Obfuscated host names in reports generated.
:CaseAutomation: Automated
"""
org, ak = organization_ak_setup
virtual_host, baremetal_host = registered_hosts
with session:
session.organization.select(org_name=org.name)
# Enable obfuscate_hostnames setting on inventory page.
session.cloudinventory.update({'obfuscate_hostnames': True})
timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M')
session.cloudinventory.generate_report(org.name)
# wait_for_tasks report generation task to finish.
wait_for_tasks(
search_query='label = ForemanInventoryUpload::Async::GenerateReportJob'
f' and started_at >= "{timestamp}"',
search_rate=15,
max_tries=10,
)
report_path = session.cloudinventory.download_report(org.name)
inventory_data = session.cloudinventory.read(org.name)
# Assert that obfuscate_hostnames is enabled.
assert inventory_data['obfuscate_hostnames'] is True
# Assert that generated archive is valid.
common_assertion(report_path, inventory_data, org)
# Get report data for assertion
json_data = get_report_data(report_path)
hostnames = [host['fqdn'] for host in json_data['hosts']]
assert virtual_host.hostname not in hostnames
assert baremetal_host.hostname not in hostnames
# Assert that host ip_addresses are present in the report.
ipv4_addresses = [host['ip_addresses'][0] for host in json_data['hosts']]
assert virtual_host.ip_addr in ipv4_addresses
assert baremetal_host.ip_addr in ipv4_addresses
# Disable obfuscate_hostnames setting on inventory page.
session.cloudinventory.update({'obfuscate_hostnames': False})
# Enable obfuscate_hostnames setting.
setting_update('obfuscate_inventory_hostnames', True)
timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M')
session.cloudinventory.generate_report(org.name)
# wait_for_tasks report generation task to finish.
wait_for_tasks(
search_query='label = ForemanInventoryUpload::Async::GenerateReportJob'
f' and started_at >= "{timestamp}"',
search_rate=15,
max_tries=10,
)
report_path = session.cloudinventory.download_report(org.name)
inventory_data = session.cloudinventory.read(org.name)
assert inventory_data['obfuscate_hostnames'] is True
json_data = get_report_data(report_path)
hostnames = [host['fqdn'] for host in json_data['hosts']]
assert virtual_host.hostname not in hostnames
assert baremetal_host.hostname not in hostnames
ipv4_addresses = [host['ip_addresses'][0] for host in json_data['hosts']]
assert virtual_host.ip_addr in ipv4_addresses
assert baremetal_host.ip_addr in ipv4_addresses
@pytest.mark.run_in_one_thread
@pytest.mark.tier3
def test_obfuscate_host_ipv4_addresses(
inventory_settings, organization_ak_setup, registered_hosts, session
):
"""Test whether `Obfuscate host ipv4 addresses` setting works as expected.
:id: c0fc4ee9-a6a1-42c0-83f0-0f131ca9ab41
:customerscenario: true
:Steps:
1. Prepare machine and upload its data to Insights
2. Add Cloud API key in Satellite
3. Go to Configure > Inventory upload > enable “Obfuscate host ipv4 addresses” setting.
4. Generate report after enabling the setting.
5. Check if hosts ipv4 addresses are obfuscated in generated reports.
6. Disable previous setting.
7. Go to Administer > Settings > RH Cloud and enable "Obfuscate IPs" setting.
8. Generate report after enabling the setting.
9. Check if hosts ipv4 addresses are obfuscated in generated reports.
:expectedresults:
1. Obfuscated host ipv4 addresses in generated reports.
:BZ: 1852594, 1889690
:CaseAutomation: Automated
"""
org, ak = organization_ak_setup
virtual_host, baremetal_host = registered_hosts
with session:
session.organization.select(org_name=org.name)
# Enable obfuscate_ips setting on inventory page.
session.cloudinventory.update({'obfuscate_ips': True})
timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M')
session.cloudinventory.generate_report(org.name)
# wait_for_tasks report generation task to finish.
wait_for_tasks(
search_query='label = ForemanInventoryUpload::Async::GenerateReportJob'
f' and started_at >= "{timestamp}"',
search_rate=15,
max_tries=10,
)
report_path = session.cloudinventory.download_report(org.name)
inventory_data = session.cloudinventory.read(org.name)
# Assert that obfuscate_ips is enabled.
assert inventory_data['obfuscate_ips'] is True
# Assert that generated archive is valid.
common_assertion(report_path, inventory_data, org)
# Get report data for assertion
json_data = get_report_data(report_path)
hostnames = [host['fqdn'] for host in json_data['hosts']]
assert virtual_host.hostname in hostnames
assert baremetal_host.hostname in hostnames
# Assert that ip_addresses are obfuscated from report.
ip_addresses = [
host['system_profile']['network_interfaces'][0]['ipv4_addresses'][0]
for host in json_data['hosts']
]
ipv4_addresses = [host['ip_addresses'][0] for host in json_data['hosts']]
assert virtual_host.ip_addr not in ip_addresses
assert baremetal_host.ip_addr not in ip_addresses
assert virtual_host.ip_addr not in ipv4_addresses
assert baremetal_host.ip_addr not in ipv4_addresses
# Disable obfuscate_ips setting on inventory page.
session.cloudinventory.update({'obfuscate_ips': False})
# Enable obfuscate_inventory_ips setting.
setting_update('obfuscate_inventory_ips', True)
timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M')
session.cloudinventory.generate_report(org.name)
# wait_for_tasks report generation task to finish.
wait_for_tasks(
search_query='label = ForemanInventoryUpload::Async::GenerateReportJob'
f' and started_at >= "{timestamp}"',
search_rate=15,
max_tries=10,
)
report_path = session.cloudinventory.download_report(org.name)
inventory_data = session.cloudinventory.read(org.name)
assert inventory_data['obfuscate_ips'] is True
# Get report data for assertion
json_data = get_report_data(report_path)
hostnames = [host['fqdn'] for host in json_data['hosts']]
assert virtual_host.hostname in hostnames
assert baremetal_host.hostname in hostnames
ip_addresses = [
host['system_profile']['network_interfaces'][0]['ipv4_addresses'][0]
for host in json_data['hosts']
]
ipv4_addresses = [host['ip_addresses'][0] for host in json_data['hosts']]
assert virtual_host.ip_addr not in ip_addresses
assert baremetal_host.ip_addr not in ip_addresses
assert virtual_host.ip_addr not in ipv4_addresses
assert baremetal_host.ip_addr not in ipv4_addresses
@pytest.mark.run_in_one_thread
@pytest.mark.tier3
def test_exclude_packages_setting(
inventory_settings, organization_ak_setup, registered_hosts, session
):
"""Test whether `Exclude Packages` setting works as expected.
:id: 646093fa-fdd6-4f70-82aa-725e31fa3f12
:customerscenario: true
:Steps:
1. Prepare machine and upload its data to Insights
2. Add Cloud API key in Satellite
3. Go to Configure > Inventory upload > enable “Exclude Packages” setting.
4. Generate report after enabling the setting.
5. Check if packages are excluded from generated reports.
6. Disable previous setting.
7. Go to Administer > Settings > RH Cloud and enable
"Don't upload installed packages" setting.
8. Generate report after enabling the setting.
9. Check if packages are excluded from generated reports.
:expectedresults:
1. Packages are excluded from reports generated.
:BZ: 1852594
:CaseAutomation: Automated
"""
org, ak = organization_ak_setup
virtual_host, baremetal_host = registered_hosts
with session:
session.organization.select(org_name=org.name)
# Enable exclude_packages setting on inventory page.
session.cloudinventory.update({'exclude_packages': True})
timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M')
session.cloudinventory.generate_report(org.name)
wait_for_tasks(
search_query='label = ForemanInventoryUpload::Async::GenerateReportJob'
f' and started_at >= "{timestamp}"',
search_rate=15,
max_tries=10,
)
report_path = session.cloudinventory.download_report(org.name)
inventory_data = session.cloudinventory.read(org.name)
assert inventory_data['exclude_packages'] is True
# Disable exclude_packages setting on inventory page.
session.cloudinventory.update({'exclude_packages': False})
# Assert that generated archive is valid.
common_assertion(report_path, inventory_data, org)
# Get report data for assertion
json_data = get_report_data(report_path)
# Assert that right hosts are present in report.
hostnames = [host['fqdn'] for host in json_data['hosts']]
assert virtual_host.hostname in hostnames
assert baremetal_host.hostname in hostnames
# Assert that packages are excluded from report
all_host_profiles = [host['system_profile'] for host in json_data['hosts']]
for host_profiles in all_host_profiles:
assert 'installed_packages' not in host_profiles
# Enable exclude_installed_packages setting.
setting_update('exclude_installed_packages', True)
timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M')
session.cloudinventory.generate_report(org.name)
wait_for_tasks(
search_query='label = ForemanInventoryUpload::Async::GenerateReportJob'
f' and started_at >= "{timestamp}"',
search_rate=15,
max_tries=10,
)
report_path = session.cloudinventory.download_report(org.name)
inventory_data = session.cloudinventory.read(org.name)
assert inventory_data['exclude_packages'] is True
json_data = get_report_data(report_path)
hostnames = [host['fqdn'] for host in json_data['hosts']]
assert virtual_host.hostname in hostnames
assert baremetal_host.hostname in hostnames
all_host_profiles = [host['system_profile'] for host in json_data['hosts']]
for host_profiles in all_host_profiles:
assert 'installed_packages' not in host_profiles
| rplevka/robottelo | tests/foreman/ui/test_rhcloud_inventory.py | Python | gpl-3.0 | 18,489 |
import os.path, re, simplejson
from util import *
from extract_strings import load_strings_file, untranslated_count_for_lang
from extract_strings import extract_strings_from_c_files, get_missing_for_language
from extract_strings import dump_missing_per_language, write_out_strings_files
from extract_strings import key_sort_func, load_lang_index
g_can_upload = False
g_src_dir = os.path.join(os.path.split(__file__)[0], "..", "src")
config = load_config()
if not config.HasAwsCreds():
print("aws creds not present in config.py")
else:
try:
import boto.s3
from boto.s3.key import Key
g_can_upload = True
except:
print("You need boto library (http://code.google.com/p/boto/)")
print("svn checkout http://boto.googlecode.com/svn/trunk/ boto")
print("cd boto; python setup.py install")
S3_JS_NAME = "blog/sumatrapdf-langs.js"
# number of missing translations for a language to be considered
# incomplete (will be excluded from Translations_txt.cpp)
INCOMPLETE_MISSING_THRESHOLD = 40
TRANSLATIONS_TXT_C = """\
/* Generated by scripts\\update_translations.py
DO NOT EDIT MANUALLY */
#ifndef MAKELANGID
#include <windows.h>
#endif
#define LANGS_COUNT %(langs_count)d
#define STRINGS_COUNT %(translations_count)d
typedef struct {
const char *code;
const char *fullName;
LANGID id;
BOOL isRTL;
} LangDef;
#define _LANGID(lang) MAKELANGID(lang, SUBLANG_NEUTRAL)
LangDef gLangData[LANGS_COUNT] = {
%(lang_data)s
};
#undef _LANGID
const char *gTranslations[LANGS_COUNT * STRINGS_COUNT] = {
%(translations)s
};
"""
# use octal escapes because hexadecimal ones can consist of
# up to four characters, e.g. \xABc isn't the same as \253c
def c_oct(c):
o = "00" + oct(ord(c))
return "\\" + o[-3:]
def c_escape(txt, encode_to_utf=False):
if txt is None:
return "NULL"
# escape all quotes
txt = txt.replace('"', r'\"')
# and all non-7-bit characters of the UTF-8 encoded string
#print(txt)
if encode_to_utf:
# the old, pre-apptranslator translation system required encoding to utf8,
txt = re.sub(r"[\x80-\xFF]", lambda m: c_oct(m.group(0)[0]), txt.encode("utf-8"))
else:
# the new apptranslator-based translation system already has txt in utf8 at this point
txt = re.sub(r"[\x80-\xFF]", lambda m: c_oct(m.group(0)[0]), txt)
return '"%s"' % txt
def get_trans_for_lang(strings_dict, keys, lang_arg):
trans = []
for k in keys:
txt = None
for (lang, tr) in strings_dict[k]:
if lang_arg == lang:
# don't include a translation, if it's the same as the default
if tr != k:
txt = tr
break
trans.append(txt)
return trans
DEFAULT_LANG = "en"
def lang_sort_func(x,y):
# special case: default language is first
if x[0] == DEFAULT_LANG: return -1
if y[0] == DEFAULT_LANG: return 1
return cmp(x[1], y[1])
def make_lang_ids(langs, lang_index):
lang_ids = {}
for cols in lang_index:
if cols[1] and cols[2]:
id = "MAKELANGID(LANG_%s, SUBLANG_%s_%s)" % (cols[1], cols[1], cols[2].replace(" ", "_"))
elif cols[1]:
id = "_LANGID(LANG_%s)" % (cols[1])
else:
id = "-1" # invalid LANGID
lang_ids[cols[0]] = id.upper()
for lang in langs:
if lang[0] not in lang_ids:
print("Warning: Missing LANGID for %s (%s)" % (lang))
lang_ids[lang] = "-1"
return lang_ids
def make_lang_layouts(lang_index):
lang_layouts = {}
for cols in lang_index:
lang_layouts[cols[0]] = cols[3] == "RTL" and 1 or 0
return lang_layouts
def gen_c_code(langs_ex, strings_dict, file_name, lang_index, encode_to_utf=False):
langs = [cols[0] for cols in langs_ex]
assert DEFAULT_LANG == langs[0]
langs_count = len(langs)
translations_count = len(strings_dict)
keys = strings_dict.keys()
keys.sort(cmp=key_sort_func)
lines = []
for lang in langs:
if DEFAULT_LANG == lang:
trans = keys
else:
trans = get_trans_for_lang(strings_dict, keys, lang)
lines.append("")
lines.append(" /* Translations for language %s */" % lang)
lines += [" %s," % c_escape(t, encode_to_utf=encode_to_utf) for t in trans]
translations = "\n".join(lines)
lang_ids = make_lang_ids(langs_ex, lang_index)
lang_layouts = make_lang_layouts(lang_index)
lang_data = ['{ "%s", %s, %s, %d },' % (lang[0], c_escape(lang[1], encode_to_utf=encode_to_utf), lang_ids[lang[0]], lang_layouts[lang[0]]) for lang in langs_ex]
lang_data = "\n ".join(lang_data)
file_content = TRANSLATIONS_TXT_C % locals()
file(file_name, "wb").write(file_content)
def contributors_for_lang(contributors, lang):
return sorted(contributors.get(lang, []))
def gen_js_data(strings_dict, langs, contributors):
res = []
for (lang_iso, lang_name) in langs:
if DEFAULT_LANG == lang_iso: continue
lang_name = lang_name.split(" (")[0]
count = untranslated_count_for_lang(strings_dict, lang_iso)
svnurl = "http://sumatrapdf.googlecode.com/svn/trunk/strings/" + lang_iso + ".txt"
c = contributors_for_lang(contributors, lang_iso)
res.append([lang_iso, lang_name, count, svnurl, c])
return sorted(res, lambda x, y: cmp(y[2], x[2]) or cmp(x[1], y[1]))
# Generate json data as array of arrays in the format:
# [langname, lang-iso-code, untranslated_strings_count, svn_url, [contributors]]
# sorted by untranslated string count (biggest at the top)
def gen_and_upload_js(strings_dict, langs, contributors):
if not g_can_upload:
print("Can't upload javascript to s3")
return
data = gen_js_data(strings_dict, langs, contributors)
js = simplejson.dumps(data)
js = "var g_langsData = " + js + ";\n"
#print(js)
s3UploadDataPublic(js, S3_JS_NAME)
def get_untranslated_as_list(untranslated_dict):
return uniquify(sum(untranslated_dict.values(), []))
def remove_incomplete_translations(langs, strings, strings_dict, threshold=INCOMPLETE_MISSING_THRESHOLD):
assert langs[0][0] == DEFAULT_LANG
for lang in langs[1:]:
missing = get_missing_for_language(strings, strings_dict, lang[0])
if len(missing) >= threshold:
langs.remove(lang)
def main():
(strings_dict, langs, contributors) = load_strings_file()
strings = extract_strings_from_c_files()
for s in strings_dict.keys():
if s not in strings:
del strings_dict[s]
untranslated_dict = dump_missing_per_language(strings, strings_dict)
write_out_strings_files(strings_dict, langs, contributors, untranslated_dict)
untranslated = get_untranslated_as_list(untranslated_dict)
for s in untranslated:
if s not in strings_dict:
strings_dict[s] = []
langs.sort(lang_sort_func)
c_file_name = os.path.join(g_src_dir, "Translations_txt.cpp")
gen_and_upload_js(strings_dict, langs, contributors)
remove_incomplete_translations(langs, strings, strings_dict)
gen_c_code(langs, strings_dict, c_file_name, load_lang_index(), encode_to_utf=True)
def main_new():
import apptransdl
changed = apptransdl.downloadAndUpdateTranslationsIfChanged()
if changed:
print("\nNew translations received from the server, checkin Translations_txt.cpp and translations.txt")
if __name__ == "__main__":
main_new()
#main() | Erls-Corporation/SumatraPDF-2.2.1 | scripts/update_translations.py | Python | gpl-3.0 | 7,537 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Optional, TYPE_CHECKING
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
from . import models
from ._configuration import ContainerRegistryManagementClientConfiguration
from .operations import ConnectedRegistriesOperations, ExportPipelinesOperations, ImportPipelinesOperations, Operations, PipelineRunsOperations, PrivateEndpointConnectionsOperations, RegistriesOperations, ReplicationsOperations, ScopeMapsOperations, TokensOperations, WebhooksOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class ContainerRegistryManagementClient:
"""ContainerRegistryManagementClient.
:ivar connected_registries: ConnectedRegistriesOperations operations
:vartype connected_registries:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.ConnectedRegistriesOperations
:ivar export_pipelines: ExportPipelinesOperations operations
:vartype export_pipelines:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.ExportPipelinesOperations
:ivar registries: RegistriesOperations operations
:vartype registries:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.RegistriesOperations
:ivar import_pipelines: ImportPipelinesOperations operations
:vartype import_pipelines:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.ImportPipelinesOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.containerregistry.v2020_11_01_preview.operations.Operations
:ivar pipeline_runs: PipelineRunsOperations operations
:vartype pipeline_runs:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.PipelineRunsOperations
:ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
:vartype private_endpoint_connections:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.PrivateEndpointConnectionsOperations
:ivar replications: ReplicationsOperations operations
:vartype replications:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.ReplicationsOperations
:ivar scope_maps: ScopeMapsOperations operations
:vartype scope_maps:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.ScopeMapsOperations
:ivar tokens: TokensOperations operations
:vartype tokens: azure.mgmt.containerregistry.v2020_11_01_preview.operations.TokensOperations
:ivar webhooks: WebhooksOperations operations
:vartype webhooks:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.WebhooksOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The Microsoft Azure subscription ID.
:type subscription_id: str
:param base_url: Service URL. Default value is 'https://management.azure.com'.
:type base_url: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = ContainerRegistryManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.connected_registries = ConnectedRegistriesOperations(self._client, self._config, self._serialize, self._deserialize)
self.export_pipelines = ExportPipelinesOperations(self._client, self._config, self._serialize, self._deserialize)
self.registries = RegistriesOperations(self._client, self._config, self._serialize, self._deserialize)
self.import_pipelines = ImportPipelinesOperations(self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.pipeline_runs = PipelineRunsOperations(self._client, self._config, self._serialize, self._deserialize)
self.private_endpoint_connections = PrivateEndpointConnectionsOperations(self._client, self._config, self._serialize, self._deserialize)
self.replications = ReplicationsOperations(self._client, self._config, self._serialize, self._deserialize)
self.scope_maps = ScopeMapsOperations(self._client, self._config, self._serialize, self._deserialize)
self.tokens = TokensOperations(self._client, self._config, self._serialize, self._deserialize)
self.webhooks = WebhooksOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(
self,
request, # type: HttpRequest
**kwargs: Any
) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> ContainerRegistryManagementClient
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
| Azure/azure-sdk-for-python | sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2020_11_01_preview/_container_registry_management_client.py | Python | mit | 7,097 |
# *-* coding: utf-8 *-*
import urllib
import pandas as pd
import re
import time
from nltk.corpus import stopwords
from nltk import WordNetLemmatizer, word_tokenize
from nltk.stem.porter import PorterStemmer
import numpy as np
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.metrics import classification_report
from sklearn.naive_bayes import BernoulliNB, MultinomialNB
from sklearn.linear_model import LogisticRegression
#from sklearn.svm import LinearSVC
from prob_svm import LinearSVC_proba as LinearSVC
import random
import matplotlib.pyplot as plt
# Parte a)
# Importación de datos
#Nota: Esto está comentado para evitar descargar los datos cada vez
train_data_url = "http://www.inf.utfsm.cl/~jnancu/stanford-subset/polarity.train"
test_data_url = "http://www.inf.utfsm.cl/~jnancu/stanford-subset/polarity.dev"
#train_data_f = urllib.urlretrieve(train_data_url, "polarity.train")
#test_data_f = urllib.urlretrieve(test_data_url, "polarity.dev")
#Encabezado y count de la data
ftr = open("polarity.train", "r")
fts = open("polarity.dev", "r")
rows = [line.split(" ", 1) for line in ftr.readlines()]
train_df = pd.DataFrame(rows, columns=['Sentiment', 'Text'])
train_df['Sentiment'] = pd.to_numeric(train_df['Sentiment'])
rows = [line.split(" ", 1) for line in fts.readlines()]
test_df = pd.DataFrame(rows, columns=['Sentiment', 'Text'])
test_df['Sentiment'] = pd.to_numeric(test_df['Sentiment'])
print "\n a) Forma de los datos: Sentimientos negativos (-1) y positivos (+1)\n"
print "Datos training:"
print train_df['Sentiment'].value_counts()
print "\nDatos test:"
print test_df['Sentiment'].value_counts()
print ""
#Parte b)
#Definición de funciones
def word_preprocessing(text):
text = text.decode('utf-8', 'ignore')
text = re.sub(r'([a-z])\1+', r'\1\1', text)
words = word_tokenize(text)
commonwords = stopwords.words('english')
words = [word.lower() \
for word in words if word.lower() not in commonwords]
return ' '.join(words)
def word_extractor(text):
text = text.decode('utf-8', 'ignore')
words = word_tokenize(text)
commonwords = stopwords.words('english')
porter = PorterStemmer()
words = [porter.stem(word).encode('utf-8') for word in words]
words = ' '.join(words)
return words
#Cadenas de prueba
teststring = ["I love to eat cake"
, "I love eating cake"
, "I loved eating the cake"
, "I do not love eating cake"
, "I don't love eating cake"
, "Those are stupid dogs"
, "It wasn't really a baaaad movie"]
teststring_preproc = []
#Output pedido
print "b)\n"
print "Preprocesado SIN stemming:"
for i in range(0,len(teststring),1):
teststring_preproc.insert(i, word_preprocessing(teststring[i]) )
print teststring_preproc[i]
print "\nPreprocesado CON stemming:"
for i in range(0,len(teststring),1):
print word_extractor(teststring_preproc[i])
#Parte c)
#Definición de lematizador
def word_extractor2(text):
wordlemmatizer = WordNetLemmatizer()
text = text.decode('utf-8', 'ignore')
words = word_tokenize(text)
words = [wordlemmatizer.lemmatize(word) for word in words]
words = ' '.join(words)
return words
#Output pedido
print "\nc)\n"
print "Preprocesado CON lematizar:"
for i in range(0,len(teststring),1):
print word_extractor2(teststring_preproc[i])
#Parte d)
texts_train=[0,0,0,0]
texts_test=[0,0,0,0]
vectorizer=[0,0,0,0]
features_train=[0,0,0,0]
features_test=[0,0,0,0]
vocab=[0,0,0,0]
dist_train=[0,0,0,0]
count_train=[0,0,0,0]
dist_test=[0,0,0,0]
count_test=[0,0,0,0]
#Preparacion datos: [0]: Stemming , [1]: Stemming con preproc ,[2] Lematizar, [3] Lematizar con preproc
labels_train = np.asarray((train_df.Sentiment.astype(float)+1)/2.0)
labels_test = np.asarray((test_df.Sentiment.astype(float)+1)/2.0)
#Switch: 2: sólo hace stemming (se demora menos), 4: procesa además lemmatizer (más costoso)
my_switch = 4
texts_train[0] = [word_extractor(text) for text in train_df.Text]
texts_test[0] = [word_extractor(text) for text in test_df.Text]
texts_train[1] = [word_extractor(word_preprocessing(text)) for text in train_df.Text]
texts_test[1] = [word_extractor(word_preprocessing(text)) for text in test_df.Text]
if (my_switch >= 3):
texts_train[2] = [word_extractor2(text) for text in train_df.Text]
texts_test[2] = [word_extractor2(text) for text in test_df.Text]
texts_train[3] = [word_extractor2(word_preprocessing(text)) for text in train_df.Text]
texts_test[3] = [word_extractor2(word_preprocessing(text)) for text in test_df.Text]
print "\nd)\n"
#Contador de palabras post-procesamiento
for i in range(0,my_switch ,1):
vectorizer[i] = CountVectorizer(ngram_range=(1, 1), binary='False')
vectorizer[i].fit(np.asarray(texts_train[i]))
features_train[i] = vectorizer[i].transform(texts_train[i])
features_test[i] = vectorizer[i].transform(texts_test[i])
vocab[i] = vectorizer[i].get_feature_names()
dist_train[i] = list(np.array(features_train[i].sum(axis=0)).reshape(-1,))
count_train[i] = zip(vocab[i], dist_train[i])
if (i==0):
print "Top10 palabras:STEMMING:\n"
if (i==1):
print "Top10 palabras:STEMMING SIN STOPWORDS (PREPROCESADO):\n"
if (i==2):
print "Top10 palabras:LEMMATIZING:\n"
if (i==3):
print "Top10 palabras: LEMMATIZING SIN STOPWORDS (PREPROCESADO):\n"
print "\tTraining data:"
print "\t"+str(sorted(count_train[i], key=lambda x: x[1], reverse=True)[:100])
dist_test[i] = list(np.array(features_test[i].sum(axis=0)).reshape(-1,))
count_test[i] = zip(vocab[i], dist_test[i])
print "\tTest data:"
print "\t"+str(sorted(count_test[i], key=lambda x: x[1], reverse=True)[:100])
#Parte e)
global_accuracies=[]; #<-Me odio por esto
print "\n /*Parte e) no imprime nada*/ \n"
def score_the_model(model, x, y, xt, yt, text):
acc_tr = model.score(x, y)
acc_test = model.score(xt[:-1], yt[:-1])
print "Training Accuracy %s: %f" % (text, acc_tr)
print "Test Accuracy %s: %f" % (text, acc_test)
print "Detailed Analysis Testing Results ..."
print(classification_report(yt, model.predict(xt), target_names=['+', '-']))
global_accuracies.append((acc_tr,acc_test)) # Cada vez que esto se ejecuta muere un gatito. 48 gatitos muetos :C
#Parte f)
def do_NAIVE_BAYES(x, y, xt, yt):
model = BernoulliNB()
model = model.fit(x, y)
score_the_model(model, x, y, xt, yt, "BernoulliNB")
return model
print "\n f) Naive Bayes \n"
for i in range(0,my_switch ,1):
if (i==0):
print "STEMMING:\n"
if (i==1):
print "STEMMING SIN STOPWORDS (PREPROCESADO):\n"
if (i==2):
print "LEMMATIZING:\n"
if (i==3):
print "LEMMATIZING SIN STOPWORDS (PREPROCESADO):\n"
model = do_NAIVE_BAYES(features_train[i], labels_train, features_test[i], labels_test)
test_pred = model.predict_proba(features_test[i])
spl = random.sample(xrange(len(test_pred)), 15)
for text, sentiment in zip(test_df.Text[spl], test_pred[spl]):
print sentiment, text
#Parte g)
def do_MULTINOMIAL(x, y, xt, yt):
model = MultinomialNB()
model = model.fit(x, y)
score_the_model(model, x, y, xt, yt, "MULTINOMIAL")
return model
print "\n g) Naive Bayes Multinomial\n"
for i in range(0,my_switch ,1):
if (i==0):
print "STEMMING:\n"
if (i==1):
print "STEMMING SIN STOPWORDS (PREPROCESADO):\n"
if (i==2):
print "LEMMATIZING:\n"
if (i==3):
print "LEMMATIZING SIN STOPWORDS (PREPROCESADO):\n"
model = do_MULTINOMIAL(features_train[i], labels_train, features_test[i], labels_test)
test_pred = model.predict_proba(features_test[i])
spl = random.sample(xrange(len(test_pred)), 15)
for text, sentiment in zip(test_df.Text[spl], test_pred[spl]):
print sentiment, text
#Parte h)
Cs = [0.01, 0.1, 10, 100, 1000]
def do_LOGIT(x, y, xt, yt):
start_t = time.time()
Cs = [0.01, 0.1, 10, 100, 1000]
model = []
for i in range(0,len(Cs),1):
print "Usando C= %f" % Cs[i]
model.append ( LogisticRegression(penalty='l2', C=Cs[i]) )
model[i] = model[i].fit(x, y)
score_the_model(model[i], x, y, xt, yt, "LOGISTIC")
return model
print "\n h) Regresión logística regularizada con penalizador norma l_2\n"
for i in range(0,my_switch ,1):
if (i==0):
print "STEMMING:\n"
if (i==1):
print "STEMMING SIN STOPWORDS (PREPROCESADO):\n"
if (i==2):
print "LEMMATIZING:\n"
if (i==3):
print "LEMMATIZING SIN STOPWORDS (PREPROCESADO):\n"
models = do_LOGIT(features_train[i], labels_train, features_test[i], labels_test)
for j, model in enumerate(models):
print "\tC = "+str(Cs[j])+" :\n"
test_pred = model.predict_proba(features_test[i])
spl = random.sample(xrange(len(test_pred)), 15)
for text, sentiment in zip(test_df.Text[spl], test_pred[spl]):
print sentiment, text
#Parte i)
def do_SVM(x, y, xt, yt):
Cs = [0.01, 0.1, 10, 100, 1000]
model = []
for i in range (0,len(Cs),1):
print "El valor de C que se esta probando: %f" % Cs[i]
model.append( LinearSVC(C=Cs[i]) )
model[i] = model[i].fit(x, y)
score_the_model(model[i], x, y, xt, yt, "SVM")
return model
print "\n i) Support Vector Machine\n"
for i in range(0,my_switch ,1):
if (i==0):
print "STEMMING:\n"
if (i==1):
print "STEMMING SIN STOPWORDS (PREPROCESADO):\n"
if (i==2):
print "LEMMATIZING:\n"
if (i==3):
print "LEMMATIZING SIN STOPWORDS (PREPROCESADO):\n"
models = do_SVM(features_train[i], labels_train, features_test[i], labels_test)
for j, model in enumerate(models):
print "\tC = "+str(Cs[j])+" :\n"
test_pred = model.predict_proba(features_test[i])
spl = random.sample(xrange(len(test_pred)), 15)
for text, sentiment in zip(test_df.Text[spl], test_pred[spl]):
print sentiment, text
#Parte j)
plot_labels = [
"NB_S"
, "NB_SP"
, "NB_L"
, "NB_LP"
, "MN_S"
, "MN_SP"
, "MN_L"
, "MN_LP"
, "LO_S_-2"
, "LO_S_-1"
, "LO_S_+1"
, "LO_S_+2"
, "LO_S_+3"
, "LO_SP_-2"
, "LO_SP_-1"
, "LO_SP_+1"
, "LO_SP_+2"
, "LO_SP_+3"
, "LO_L_-2"
, "LO_L_-1"
, "LO_L_+1"
, "LO_L_+2"
, "LO_L_+3"
, "LO_LP_-2"
, "LO_LP_-1"
, "LO_LP_+1"
, "LO_LP_+2"
, "LO_LP_+3"
, "SV_S_-2"
, "SV_S_-1"
, "SV_S_+1"
, "SV_S_+2"
, "SV_S_+3"
, "SV_SP_-2"
, "SV_SP_-1"
, "SV_SP_+1"
, "SV_SP_+2"
, "SV_SP_+3"
, "SV_L_-2"
, "SV_L_-1"
, "SV_L_+1"
, "SV_L_+2"
, "SV_L_+3"
, "SV_LP_-2"
, "SV_LP_-1"
, "SV_LP_+1"
, "SV_LP_+2"
, "SV_LP_+3"
]
tr_accuracy = [item[0] for item in global_accuracies]
test_accuracy = [item[1] for item in global_accuracies]
ayuda=[]
for i in range(1,len(test_accuracy)+1):
ayuda.append(i)
print i,test_accuracy[i-1], tr_accuracy[i-1]
ax=plt.plot( ayuda, test_accuracy)
ax=plt.plot( ayuda, tr_accuracy)
ax=plt.xticks(ayuda, plot_labels, rotation=90)
ax=plt.legend(['Test accuracies', 'Training Accuracies'], loc='upper left')
plt.show()
| topotech/AID_tarea3 | Parte2/parte2.py | Python | unlicense | 11,260 |
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.gbm import H2OGradientBoostingEstimator
def swpreds_gbm():
# Training set has two predictor columns
# X1: 10 categorical levels, 100 observations per level; X2: Unif(0,1) noise
# Ratio of y = 1 per Level: cat01 = 1.0 (strong predictor), cat02 to cat10 = 0.5 (weak predictors)
swpreds = h2o.import_file(path=pyunit_utils.locate("smalldata/gbm_test/swpreds_1000x3.csv"))
swpreds["y"] = swpreds["y"].asfactor()
#Log.info("Summary of swpreds_1000x3.csv from H2O:\n")
#swpreds.summary()
# Train H2O GBM without Noise Column
h2o_gbm_model1 = H2OGradientBoostingEstimator(distribution="bernoulli", ntrees=50, max_depth=20, nbins=500)
h2o_gbm_model1.train(x="X1",y="y", training_frame=swpreds)
h2o_gbm_model1.show()
h2o_gbm_perf1 = h2o_gbm_model1.model_performance(swpreds)
h2o_auc1 = h2o_gbm_perf1.auc()
# Train H2O GBM Model including Noise Column:
h2o_gbm_model2 = h2o_gbm_model1
h2o_gbm_model2.train(x=["X1","X2"],y="y", training_frame=swpreds)
h2o_gbm_model2.show()
h2o_gbm_perf2 = h2o_gbm_model2.model_performance(swpreds)
h2o_auc2 = h2o_gbm_perf2.auc()
if __name__ == "__main__":
pyunit_utils.standalone_test(swpreds_gbm)
else:
swpreds_gbm()
| YzPaul3/h2o-3 | h2o-py/tests/testdir_algos/gbm/pyunit_swpreds_gbm.py | Python | apache-2.0 | 1,295 |
import os, sys, gevent, json, pickle, traceback, ctypes, numpy as np
import itertools, re
from time import time
from pathlib import Path
from gevent.queue import Queue
from gevent.subprocess import run, PIPE, STDOUT
from sakura.common.errors import IOReadException, IOWriteException
from base64 import b64encode, b64decode
from itertools import count
DEBUG = True
def print_debug(*args, **kwargs):
if DEBUG:
print(*args, **kwargs)
# iterate over "names" ensuring they are all different.
# if 2 names match, add suffix "(2)", then "(3)", etc.
def iter_uniq(names, conflict_pattern='%s(%d)'):
seen = set()
for name in names:
if name in seen:
for i in count(start=2):
alt_name = conflict_pattern % (name, i)
if alt_name not in seen:
name = alt_name
break
seen.add(name)
yield name
def camelcase(name):
name = re.sub('[^ _a-zA-Z0-9]', ' ', name)
return ''.join((w[0].upper() + w[1:]) for w in name.split())
def snakecase(name):
name = re.sub('[^ a-zA-Z0-9]', ' ', name)
name = re.sub('([a-z])([A-Z])', r'\1 \2', name) # if it was camel case
return '_'.join(w.lower() for w in name.split())
def create_names_dict(named_objects, name_format = None):
it1, it2 = itertools.tee(named_objects)
names, objects = (t[0] for t in it1), (t[1] for t in it2)
if name_format is not None:
names = (name_format(name) for name in names)
names = iter_uniq(names, conflict_pattern='%s_%d')
return dict(zip(names, objects))
class StdoutProxy(object):
def __init__(self, stdout):
self.stdout = stdout
def write(self, s):
self.stdout.write(s)
self.stdout.flush()
def __getattr__(self, attr):
return getattr(self.stdout, attr)
def set_unbuffered_stdout():
sys.stdout = StdoutProxy(sys.stdout)
def wait_greenlets(*greenlets):
gevent.joinall(greenlets, count=1)
class SimpleAttrContainer:
def __init__(self, **kwargs):
for k, v in kwargs.items():
v = self.load_val(v)
setattr(self, k, v)
def load_val(self, v):
if isinstance(v, dict):
v = SimpleAttrContainer(**v)
elif isinstance(v, tuple):
v = tuple(self.load_val(v2) for v2 in v)
elif isinstance(v, list):
v = list(self.load_val(v2) for v2 in v)
return v
def _asdict(self):
return self.__dict__.copy()
class MonitoredFunc(object):
def __init__(self, func, out_queue):
self.func = func
if out_queue is None:
self.out_queue = Queue()
else:
self.out_queue = out_queue
def __call__(self, *args, **kwargs):
try:
res = self.func(*args, **kwargs)
except BaseException as e:
# propagate exception to monitoring greenlet
self.out_queue.put(e)
self.out_queue.put(None)
def catch_issues(self):
# wait for end or exception
while True:
out = self.out_queue.get()
if isinstance(out, KeyboardInterrupt):
break
elif isinstance(out, BaseException):
raise out
# decorator allowing to catch exceptions in children greenlets
def monitored(func, out_queue = None):
return MonitoredFunc(func, out_queue)
OverriddenObjectClasses = {}
def override_object(obj, override):
bases = override.__class__, obj.__class__
if bases not in OverriddenObjectClasses:
# Favour methods of override over original object
# (thus the subclassing)
# Favour attributes of override over original object
# (thus the __getattr__ method)
class OverriddenObject(override.__class__, obj.__class__):
def __init__(self, obj, override):
self.override = override
self.obj = obj
def __getattr__(self, attr):
if hasattr(self.override, attr):
return getattr(self.override, attr)
else:
return getattr(self.obj, attr)
OverriddenObjectClasses[bases] = OverriddenObject
cls = OverriddenObjectClasses[bases]
return cls(obj, override)
class Enum:
def __init__(self, words):
self._words = words
for val, word in enumerate(words):
setattr(self, word, val)
def name(self, val):
return self._words[val]
def value(self, word):
return getattr(self, word)
def __len__(self):
return len(self._words)
def make_enum(*words):
return Enum(words)
# only load libraries if they are needed
class LazyFuncCaller:
libs = {}
def __init__(self, lib_name, func_name):
self.lib_name = lib_name
self.func_name = func_name
def __call__(self, *args, **kwargs):
if self.lib_name not in LazyFuncCaller.libs:
LazyFuncCaller.libs[self.lib_name] = ctypes.CDLL(self.lib_name)
func = getattr(LazyFuncCaller.libs[self.lib_name], self.func_name)
return func(*args, **kwargs)
# provide rollback capability to classes
class TransactionMixin:
def __init__(self):
self.rollback_cbs = []
def rollback(self):
for cb in reversed(self.rollback_cbs):
cb()
self.rollback_cbs = []
def add_rollback_cb(self, cb):
self.rollback_cbs += [ cb ]
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.rollback()
class ObservableEvent:
def __init__(self):
self.observer_callbacks = {}
self.prev_id = -1
def subscribe(self, cb):
"""Attach a callback to be called when event occurs"""
self.prev_id += 1
self.observer_callbacks[self.prev_id] = cb
return self.prev_id
def unsubscribe(self, cb_id):
"""Detach a callback"""
if cb_id in self.observer_callbacks:
del self.observer_callbacks[cb_id]
def notify(self, *args, **kwargs):
"""Notify subscribers when the event occurs"""
# we work on a copy because running a callback
# might actually recursively call this method...
callbacks = dict(self.observer_callbacks)
obsoletes = set()
for cb_id, cb in callbacks.items():
try:
cb(*args, **kwargs)
except Exception as e:
# probably obsolete callback
print_debug('Exception in event callback (can probably be ignored):')
print_debug(traceback.format_exc())
print_debug('Removing this obsolete event callback.')
obsoletes.add(cb_id)
for cb_id in obsoletes:
self.unsubscribe(cb_id)
def debug_ending_greenlets():
import gc, traceback, greenlet
for ob in gc.get_objects():
if not isinstance(ob, greenlet.greenlet):
continue
if not ob:
continue
print()
print('GREENLET:')
print(ob)
print(''.join(traceback.format_stack(ob.gr_frame)))
class StatusMixin:
def pack_status_info(self):
res = {}
if hasattr(self, 'enabled'):
res.update(enabled = self.enabled)
if not self.enabled:
res.update(disabled_message = self.disabled_message)
if getattr(self, 'enabled', True) and hasattr(self, 'warning_message'):
res.update(warning_message = self.warning_message)
return res
def run_cmd(cmd, cwd=None, **options):
if cwd is not None:
saved_cwd = Path.cwd()
os.chdir(str(cwd))
print(str(Path.cwd()) + ': ' + cmd)
status = run(cmd, shell=True, stdout=PIPE, stderr=STDOUT, **options)
if status.returncode != 0:
print(status.stdout)
raise Exception(cmd + ' failed!')
if cwd is not None:
os.chdir(str(saved_cwd))
return status.stdout.decode(sys.getdefaultencoding())
def yield_operator_subdirs(repo_dir):
# find all operator.py file
for op_py_file in repo_dir.glob('**/operator.py'):
op_dir = op_py_file.parent
# verify we also have the icon file
if not (op_dir / 'icon.svg').exists():
continue
# discard probably unwanted ones (virtual env, hidden files)
op_subpath = str(op_dir.relative_to(repo_dir))
if '/venv' in op_subpath or '/.' in op_subpath:
continue
# ok for this one
yield op_dir
class JsonProtocol:
def adapt(self, obj):
if isinstance(obj, str) and obj.startswith('__bytes_'):
return bytes.fromhex(obj[8:])
if isinstance(obj, (tuple, list)):
return tuple(self.adapt(item) for item in obj)
elif isinstance(obj, dict):
return { self.adapt(k): self.adapt(v) for k, v in obj.items() }
else:
return obj
def load(self, f):
try:
s = json.load(f)
except:
raise IOReadException("Could not read JSON message.")
return self.adapt(s)
def fallback_handler(self, obj):
if isinstance(obj, bytes):
return '__bytes_' + obj.hex()
elif isinstance(obj, np.ndarray):
return obj.tolist()
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.dtype):
return str(obj)
else:
import traceback; traceback.print_stack()
raise TypeError(
"Unserializable object {} of type {}".format(obj, type(obj))
)
def dump(self, obj, f):
# json.dump() function causes performance issues
# because it performs many small writes on f.
# So we json-encode in a string (json.dumps)
# and then write this whole string at once.
try:
res_json = json.dumps(obj,
separators=(',', ':'),
default=self.fallback_handler)
except BaseException as e:
print('FAILED to serialize an object, re-raise exception.')
print('object is', obj)
raise
try:
f.write(res_json)
except Exception as e:
# if f is closed, then the reason is obvious. Otherwise it's not.
if f.closed:
msg = "Failed to write JSON message (closed)."
else:
print('Unexpected failure will writting json message:')
traceback.format_exc()
msg = "Failed to write JSON message (unexpected)."
raise IOWriteException(msg)
class ChunkedIO:
CHUNK_SIZE = 4096
def __init__(self, f):
self.f = f
def write(self, data):
try:
while len(data) > 0:
self.f.write(data[:self.CHUNK_SIZE])
data = data[self.CHUNK_SIZE:]
except:
raise IOWriteException("Could not write pickle message.")
def flush(self):
self.f.flush()
def read(self, n = None):
try:
if n is None:
return self.f.read()
chunks = []
while n > 0:
chunk_len = min(n, self.CHUNK_SIZE)
chunk = self.f.read(chunk_len)
if len(chunk) == 0:
break
n -= len(chunk)
chunks.append(chunk)
return b''.join(chunks)
except:
raise IOReadException("Could not read pickle message.")
def readline(self):
try:
return self.f.readline()
except:
raise IOReadException("Could not read pickle message.")
JSON_PROTOCOL = JsonProtocol()
class FastChunkedPickle:
def _get_chunked_io(self, f):
chunked_io = getattr(f, '_chunked_io', None)
if chunked_io is None:
chunked_io = ChunkedIO(f)
setattr(f, '_chunked_io', chunked_io)
return chunked_io
def load(self, f):
return pickle.load(self._get_chunked_io(f))
def dump(self, obj, f):
# Default pickle protocol is version 3 (since python 3).
# Protocol version 4 mainly brings framing, which obviously improves performance
# when reading a stream. It is available since python 3.4.
return pickle.dump(obj, self._get_chunked_io(f), protocol = 4)
FAST_PICKLE = FastChunkedPickle()
# websocket messages are self delimited, thus their read and write
# methods do not specify a length, and they cannot be interfaced
# with pickle dump / load interface. We use loads / dumps instead.
class WsockPickle:
def load(self, f):
return pickle.loads(b64decode(f.read().encode('ascii')))
def dump(self, obj, f):
return f.write(b64encode(pickle.dumps(obj)).decode('ascii'))
WSOCK_PICKLE = WsockPickle()
class MonitoredList:
"""Wrapper around the 'list' class that can notify about changes."""
def __init__(self, *args):
MonitoredList._class_init()
self.backend = list(*args)
self.on_change = ObservableEvent()
@classmethod
def _class_init(cls):
if not hasattr(cls, 'append'): # if not done yet
for method_name in ('append clear extend insert pop remove reverse sort ' +
'__setitem__ __delitem__').split():
cls._attach_method(method_name, True)
for method_name in ('index ' +
'__contains__ __getitem__ __iter__ __len__ __repr__ __reversed__').split():
cls._attach_method(method_name, False)
@classmethod
def _attach_method(cls, method_name, alter):
def mlist_method(self, *args, **kwargs):
backend_method = getattr(self.backend, method_name)
res = backend_method(*args, **kwargs)
if alter:
self.on_change.notify()
return res
setattr(cls, method_name, mlist_method)
class Timer:
def __init__(self, period):
self.period = period
self.startup = time()
def reset(self):
self.startup = time()
def late(self):
return self.startup + self.period < time()
| eduble/panteda | sakura/common/tools.py | Python | gpl-3.0 | 14,228 |
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import copy
import logging
import botocore.serialize
import botocore.validate
from botocore import waiter, xform_name
from botocore.auth import AUTH_TYPE_MAPS
from botocore.awsrequest import prepare_request_dict
from botocore.config import Config
from botocore.docs.docstring import ClientMethodDocstring
from botocore.docs.docstring import PaginatorDocstring
from botocore.endpoint import EndpointCreator
from botocore.exceptions import ClientError, DataNotFoundError
from botocore.exceptions import OperationNotPageableError
from botocore.exceptions import UnknownSignatureVersionError
from botocore.hooks import first_non_none_response
from botocore.model import ServiceModel
from botocore.paginate import Paginator
from botocore.signers import RequestSigner
from botocore.utils import CachedProperty
from botocore.utils import fix_s3_host
from botocore.utils import get_service_module_name
from botocore.utils import switch_to_virtual_host_style
from botocore.utils import switch_host_s3_accelerate
from botocore.utils import S3_ACCELERATE_ENDPOINT
logger = logging.getLogger(__name__)
class ClientCreator(object):
"""Creates client objects for a service."""
def __init__(self, loader, endpoint_resolver, user_agent, event_emitter,
retry_handler_factory, retry_config_translator,
response_parser_factory=None):
self._loader = loader
self._endpoint_resolver = endpoint_resolver
self._user_agent = user_agent
self._event_emitter = event_emitter
self._retry_handler_factory = retry_handler_factory
self._retry_config_translator = retry_config_translator
self._response_parser_factory = response_parser_factory
def create_client(self, service_name, region_name, is_secure=True,
endpoint_url=None, verify=None,
credentials=None, scoped_config=None,
api_version=None,
client_config=None):
service_model = self._load_service_model(service_name, api_version)
cls = self._create_client_class(service_name, service_model)
client_args = self._get_client_args(
service_model, region_name, is_secure, endpoint_url,
verify, credentials, scoped_config, client_config)
return cls(**client_args)
def create_client_class(self, service_name, api_version=None):
service_model = self._load_service_model(service_name, api_version)
return self._create_client_class(service_name, service_model)
def _create_client_class(self, service_name, service_model):
class_attributes = self._create_methods(service_model)
py_name_to_operation_name = self._create_name_mapping(service_model)
class_attributes['_PY_TO_OP_NAME'] = py_name_to_operation_name
bases = [BaseClient]
self._event_emitter.emit('creating-client-class.%s' % service_name,
class_attributes=class_attributes,
base_classes=bases)
class_name = get_service_module_name(service_model)
cls = type(str(class_name), tuple(bases), class_attributes)
return cls
def _load_service_model(self, service_name, api_version=None):
json_model = self._loader.load_service_model(service_name, 'service-2',
api_version=api_version)
service_model = ServiceModel(json_model, service_name=service_name)
self._register_retries(service_model)
return service_model
def _register_retries(self, service_model):
endpoint_prefix = service_model.endpoint_prefix
# First, we load the entire retry config for all services,
# then pull out just the information we need.
original_config = self._loader.load_data('_retry')
if not original_config:
return
retry_config = self._retry_config_translator.build_retry_config(
endpoint_prefix, original_config.get('retry', {}),
original_config.get('definitions', {}))
logger.debug("Registering retry handlers for service: %s",
service_model.service_name)
handler = self._retry_handler_factory.create_retry_handler(
retry_config, endpoint_prefix)
unique_id = 'retry-config-%s' % endpoint_prefix
self._event_emitter.register('needs-retry.%s' % endpoint_prefix,
handler, unique_id=unique_id)
def _inject_s3_configuration(self, config_kwargs, scoped_config,
client_config):
s3_configuration = None
# Check the scoped config first.
if scoped_config is not None:
s3_configuration = scoped_config.get('s3')
# Until we have proper validation of the config file (including
# nested types), we have to account for the fact that the s3
# key could be parsed as a string, e.g 's3 = foo'.
# In the case we'll ignore the key for now.
if not isinstance(s3_configuration, dict):
logger.debug("The s3 config key is not a dictionary type, "
"ignoring its value of: %s", s3_configuration)
s3_configuration = None
# Convert logic for s3 accelerate options in the scoped config
# so that the various strings map to the appropriate boolean value.
if s3_configuration and \
'use_accelerate_endpoint' in s3_configuration:
# Make sure any further modifications to the s3 section will
# not affect the scoped config by making a copy of it.
s3_configuration = s3_configuration.copy()
# Normalize on different possible values of True
if s3_configuration['use_accelerate_endpoint'] in [
True, 'True', 'true']:
s3_configuration['use_accelerate_endpoint'] = True
else:
s3_configuration['use_accelerate_endpoint'] = False
# Next specfic client config values takes precedence over
# specific values in the scoped config.
if client_config is not None:
if client_config.s3 is not None:
if s3_configuration is None:
s3_configuration = client_config.s3
else:
# The current s3_configuration dictionary may be
# from a source that only should be read from so
# we want to be safe and just make a copy of it to modify
# before it actually gets updated.
s3_configuration = s3_configuration.copy()
s3_configuration.update(client_config.s3)
config_kwargs['s3'] = s3_configuration
def _get_client_args(self, service_model, region_name, is_secure,
endpoint_url, verify, credentials,
scoped_config, client_config):
service_name = service_model.endpoint_prefix
protocol = service_model.metadata['protocol']
parameter_validation = True
if client_config:
parameter_validation = client_config.parameter_validation
serializer = botocore.serialize.create_serializer(
protocol, parameter_validation)
event_emitter = copy.copy(self._event_emitter)
response_parser = botocore.parsers.create_parser(protocol)
endpoint_bridge = ClientEndpointBridge(
self._endpoint_resolver, scoped_config, client_config,
service_signing_name=service_model.metadata.get('signingName'))
endpoint_config = endpoint_bridge.resolve(
service_name, region_name, endpoint_url, is_secure)
# Override the user agent if specified in the client config.
user_agent = self._user_agent
if client_config is not None:
if client_config.user_agent is not None:
user_agent = client_config.user_agent
if client_config.user_agent_extra is not None:
user_agent += ' %s' % client_config.user_agent_extra
signer = RequestSigner(
service_name, endpoint_config['signing_region'],
endpoint_config['signing_name'],
endpoint_config['signature_version'],
credentials, event_emitter)
# Create a new client config to be passed to the client based
# on the final values. We do not want the user to be able
# to try to modify an existing client with a client config.
config_kwargs = dict(
region_name=endpoint_config['region_name'],
signature_version=endpoint_config['signature_version'],
user_agent=user_agent)
if client_config is not None:
config_kwargs.update(
connect_timeout=client_config.connect_timeout,
read_timeout=client_config.read_timeout)
# Add any additional s3 configuration for client
self._inject_s3_configuration(
config_kwargs, scoped_config, client_config)
new_config = Config(**config_kwargs)
endpoint_creator = EndpointCreator(event_emitter)
endpoint = endpoint_creator.create_endpoint(
service_model, region_name=endpoint_config['region_name'],
endpoint_url=endpoint_config['endpoint_url'], verify=verify,
response_parser_factory=self._response_parser_factory,
timeout=(new_config.connect_timeout, new_config.read_timeout))
return {
'serializer': serializer,
'endpoint': endpoint,
'response_parser': response_parser,
'event_emitter': event_emitter,
'request_signer': signer,
'service_model': service_model,
'loader': self._loader,
'client_config': new_config
}
def _create_methods(self, service_model):
op_dict = {}
for operation_name in service_model.operation_names:
py_operation_name = xform_name(operation_name)
op_dict[py_operation_name] = self._create_api_method(
py_operation_name, operation_name, service_model)
return op_dict
def _create_name_mapping(self, service_model):
# py_name -> OperationName, for every operation available
# for a service.
mapping = {}
for operation_name in service_model.operation_names:
py_operation_name = xform_name(operation_name)
mapping[py_operation_name] = operation_name
return mapping
def _create_api_method(self, py_operation_name, operation_name,
service_model):
def _api_call(self, *args, **kwargs):
# We're accepting *args so that we can give a more helpful
# error message than TypeError: _api_call takes exactly
# 1 argument.
if args:
raise TypeError(
"%s() only accepts keyword arguments." % py_operation_name)
# The "self" in this scope is referring to the BaseClient.
return self._make_api_call(operation_name, kwargs)
_api_call.__name__ = str(py_operation_name)
# Add the docstring to the client method
operation_model = service_model.operation_model(operation_name)
docstring = ClientMethodDocstring(
operation_model=operation_model,
method_name=operation_name,
event_emitter=self._event_emitter,
method_description=operation_model.documentation,
example_prefix='response = client.%s' % py_operation_name,
include_signature=False
)
_api_call.__doc__ = docstring
return _api_call
class ClientEndpointBridge(object):
"""Bridges endpoint data and client creation
This class handles taking out the relevant arguments from the endpoint
resolver and determining which values to use, taking into account any
client configuration options and scope configuration options.
This class also handles determining what, if any, region to use if no
explicit region setting is provided. For example, Amazon S3 client will
utilize "us-east-1" by default if no region can be resolved."""
DEFAULT_ENDPOINT = '{service}.{region}.amazonaws.com'
def __init__(self, endpoint_resolver, scoped_config=None,
client_config=None, default_endpoint=None,
service_signing_name=None):
self.service_signing_name = service_signing_name
self.endpoint_resolver = endpoint_resolver
self.scoped_config = scoped_config
self.client_config = client_config
self.default_endpoint = default_endpoint or self.DEFAULT_ENDPOINT
def resolve(self, service_name, region_name=None, endpoint_url=None,
is_secure=True):
region_name = self._check_default_region(service_name, region_name)
resolved = self.endpoint_resolver.construct_endpoint(
service_name, region_name)
if resolved:
return self._create_endpoint(
resolved, service_name, region_name, endpoint_url, is_secure)
else:
return self._assume_endpoint(service_name, region_name,
endpoint_url, is_secure)
def _check_default_region(self, service_name, region_name):
if region_name is not None:
return region_name
# Use the client_config region if no explicit region was provided.
if self.client_config and self.client_config.region_name is not None:
return self.client_config.region_name
def _create_endpoint(self, resolved, service_name, region_name,
endpoint_url, is_secure):
region_name, signing_region = self._pick_region_values(
resolved, region_name, endpoint_url)
if endpoint_url is None:
# Use the sslCommonName over the hostname for Python 2.6 compat.
hostname = resolved.get('sslCommonName', resolved.get('hostname'))
endpoint_url = self._make_url(hostname, is_secure,
resolved.get('protocols', []))
signature_version = self._resolve_signature_version(
service_name, resolved)
signing_name = self._resolve_signing_name(service_name, resolved)
return self._create_result(
service_name=service_name, region_name=region_name,
signing_region=signing_region, signing_name=signing_name,
endpoint_url=endpoint_url, metadata=resolved,
signature_version=signature_version)
def _assume_endpoint(self, service_name, region_name, endpoint_url,
is_secure):
if endpoint_url is None:
# Expand the default hostname URI template.
hostname = self.default_endpoint.format(
service=service_name, region=region_name)
endpoint_url = self._make_url(hostname, is_secure,
['http', 'https'])
logger.debug('Assuming an endpoint for %s, %s: %s',
service_name, region_name, endpoint_url)
# We still want to allow the user to provide an explicit version.
signature_version = self._resolve_signature_version(
service_name, {'signatureVersions': ['v4']})
signing_name = self._resolve_signing_name(service_name, resolved={})
return self._create_result(
service_name=service_name, region_name=region_name,
signing_region=region_name, signing_name=signing_name,
signature_version=signature_version, endpoint_url=endpoint_url,
metadata={})
def _create_result(self, service_name, region_name, signing_region,
signing_name, endpoint_url, signature_version,
metadata):
return {
'service_name': service_name,
'region_name': region_name,
'signing_region': signing_region,
'signing_name': signing_name,
'endpoint_url': endpoint_url,
'signature_version': signature_version,
'metadata': metadata
}
def _make_url(self, hostname, is_secure, supported_protocols):
if is_secure and 'https' in supported_protocols:
scheme ='https'
else:
scheme = 'http'
return '%s://%s' % (scheme, hostname)
def _resolve_signing_name(self, service_name, resolved):
# CredentialScope overrides everything else.
if 'credentialScope' in resolved \
and 'service' in resolved['credentialScope']:
return resolved['credentialScope']['service']
# Use the signingName from the model if present.
if self.service_signing_name:
return self.service_signing_name
# Just assume is the same as the service name.
return service_name
def _pick_region_values(self, resolved, region_name, endpoint_url):
signing_region = region_name
if endpoint_url is None:
# Do not use the region name or signing name from the resolved
# endpoint if the user explicitly provides an endpoint_url. This
# would happen if we resolve to an endpoint where the service has
# a "defaults" section that overrides all endpoint with a single
# hostname and credentialScope. This has been the case historically
# for how STS has worked. The only way to resolve an STS endpoint
# was to provide a region_name and an endpoint_url. In that case,
# we would still resolve an endpoint, but we would not use the
# resolved endpointName or signingRegion because we want to allow
# custom endpoints.
region_name = resolved['endpointName']
signing_region = region_name
if 'credentialScope' in resolved \
and 'region' in resolved['credentialScope']:
signing_region = resolved['credentialScope']['region']
return region_name, signing_region
def _resolve_signature_version(self, service_name, resolved):
# Client config overrides everything.
client = self.client_config
if client and client.signature_version is not None:
return client.signature_version
# Scoped config overrides picking from the endpoint metadata.
scoped = self.scoped_config
if scoped is not None:
service_config = scoped.get(service_name)
if service_config is not None and isinstance(service_config, dict):
version = service_config.get('signature_version')
if version:
logger.debug(
"Switching signature version for service %s "
"to version %s based on config file override.",
service_name, version)
return version
# Pick a signature version from the endpoint metadata if present.
if 'signatureVersions' in resolved:
potential_versions = resolved['signatureVersions']
if service_name == 's3':
# We currently prefer s3 over s3v4.
if 's3' in potential_versions:
return 's3'
elif 's3v4' in potential_versions:
return 's3v4'
if 'v4' in potential_versions:
return 'v4'
# Now just iterate over the signature versions in order until we
# find the first one that is known to Botocore.
for known in AUTH_TYPE_MAPS:
if known in potential_versions:
return known
raise UnknownSignatureVersionError(
signature_version=resolved.get('signatureVersions'))
class BaseClient(object):
# This is actually reassigned with the py->op_name mapping
# when the client creator creates the subclass. This value is used
# because calls such as client.get_paginator('list_objects') use the
# snake_case name, but we need to know the ListObjects form.
# xform_name() does the ListObjects->list_objects conversion, but
# we need the reverse mapping here.
_PY_TO_OP_NAME = {}
def __init__(self, serializer, endpoint, response_parser,
event_emitter, request_signer, service_model, loader,
client_config):
self._serializer = serializer
self._endpoint = endpoint
self._response_parser = response_parser
self._request_signer = request_signer
self._cache = {}
self._loader = loader
self._client_config = client_config
self.meta = ClientMeta(event_emitter, self._client_config,
endpoint.host, service_model,
self._PY_TO_OP_NAME)
self._register_handlers()
def _register_handlers(self):
# Register the handler required to sign requests.
self.meta.events.register('request-created.%s' %
self.meta.service_model.endpoint_prefix,
self._request_signer.handler)
self._register_s3_specific_handlers()
def _register_s3_specific_handlers(self):
# Register all of the s3 specific handlers
if self.meta.config.s3 is None:
s3_addressing_style = None
s3_accelerate = None
else:
s3_addressing_style = self.meta.config.s3.get('addressing_style')
s3_accelerate = self.meta.config.s3.get('use_accelerate_endpoint')
# Enable accelerate if the configuration is set to to true or the
# endpoint being used matches one of the Accelerate endpoints.
if s3_accelerate or S3_ACCELERATE_ENDPOINT in self._endpoint.host:
# Amazon S3 accelerate is being used then always use the virtual
# style of addressing because it is required.
self._force_virtual_style_s3_addressing()
# Also make sure that the hostname gets switched to
# s3-accelerate.amazonaws.com
self.meta.events.register_first(
'request-created.s3', switch_host_s3_accelerate)
elif s3_addressing_style:
# Otherwise go ahead with the style the user may have specified.
if s3_addressing_style == 'path':
self._force_path_style_s3_addressing()
elif s3_addressing_style == 'virtual':
self._force_virtual_style_s3_addressing()
def _force_path_style_s3_addressing(self):
# Do not try to modify the host if path is specified. The
# ``fix_s3_host`` usually switches the addresing style to virtual.
self.meta.events.unregister('before-sign.s3', fix_s3_host)
def _force_virtual_style_s3_addressing(self):
# If the virtual host addressing style is being forced,
# switch the default fix_s3_host handler for the more general
# switch_to_virtual_host_style handler that does not have opt out
# cases (other than throwing an error if the name is DNS incompatible)
self.meta.events.unregister('before-sign.s3', fix_s3_host)
self.meta.events.register(
'before-sign.s3', switch_to_virtual_host_style)
@property
def _service_model(self):
return self.meta.service_model
def _make_api_call(self, operation_name, api_params):
request_context = {}
operation_model = self._service_model.operation_model(operation_name)
request_dict = self._convert_to_request_dict(
api_params, operation_model, context=request_context)
handler, event_response = self.meta.events.emit_until_response(
'before-call.{endpoint_prefix}.{operation_name}'.format(
endpoint_prefix=self._service_model.endpoint_prefix,
operation_name=operation_name),
model=operation_model, params=request_dict,
request_signer=self._request_signer, context=request_context)
if event_response is not None:
http, parsed_response = event_response
else:
http, parsed_response = self._endpoint.make_request(
operation_model, request_dict)
self.meta.events.emit(
'after-call.{endpoint_prefix}.{operation_name}'.format(
endpoint_prefix=self._service_model.endpoint_prefix,
operation_name=operation_name),
http_response=http, parsed=parsed_response,
model=operation_model, context=request_context
)
if http.status_code >= 300:
raise ClientError(parsed_response, operation_name)
else:
return parsed_response
def _convert_to_request_dict(self, api_params, operation_model,
context=None):
# Given the API params provided by the user and the operation_model
# we can serialize the request to a request_dict.
operation_name = operation_model.name
# Emit an event that allows users to modify the parameters at the
# beginning of the method. It allows handlers to modify existing
# parameters or return a new set of parameters to use.
responses = self.meta.events.emit(
'provide-client-params.{endpoint_prefix}.{operation_name}'.format(
endpoint_prefix=self._service_model.endpoint_prefix,
operation_name=operation_name),
params=api_params, model=operation_model, context=context)
api_params = first_non_none_response(responses, default=api_params)
event_name = (
'before-parameter-build.{endpoint_prefix}.{operation_name}')
self.meta.events.emit(
event_name.format(
endpoint_prefix=self._service_model.endpoint_prefix,
operation_name=operation_name),
params=api_params, model=operation_model, context=context)
request_dict = self._serializer.serialize_to_request(
api_params, operation_model)
prepare_request_dict(request_dict, endpoint_url=self._endpoint.host,
user_agent=self._client_config.user_agent)
return request_dict
def get_paginator(self, operation_name):
"""Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you'd normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator("create_foo")``.
:raise OperationNotPageableError: Raised if the operation is not
pageable. You can use the ``client.can_paginate`` method to
check if an operation is pageable.
:rtype: L{botocore.paginate.Paginator}
:return: A paginator object.
"""
if not self.can_paginate(operation_name):
raise OperationNotPageableError(operation_name=operation_name)
else:
actual_operation_name = self._PY_TO_OP_NAME[operation_name]
# Create a new paginate method that will serve as a proxy to
# the underlying Paginator.paginate method. This is needed to
# attach a docstring to the method.
def paginate(self, **kwargs):
return Paginator.paginate(self, **kwargs)
paginator_config = self._cache['page_config'][
actual_operation_name]
# Add the docstring for the paginate method.
paginate.__doc__ = PaginatorDocstring(
paginator_name=actual_operation_name,
event_emitter=self.meta.events,
service_model=self.meta.service_model,
paginator_config=paginator_config,
include_signature=False
)
# Rename the paginator class based on the type of paginator.
paginator_class_name = str('%s.Paginator.%s' % (
get_service_module_name(self.meta.service_model),
actual_operation_name))
# Create the new paginator class
documented_paginator_cls = type(
paginator_class_name, (Paginator,), {'paginate': paginate})
paginator = documented_paginator_cls(
getattr(self, operation_name),
paginator_config)
return paginator
def can_paginate(self, operation_name):
"""Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you'd normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator("create_foo")``.
:return: ``True`` if the operation can be paginated,
``False`` otherwise.
"""
if 'page_config' not in self._cache:
try:
page_config = self._loader.load_service_model(
self._service_model.service_name,
'paginators-1',
self._service_model.api_version)['pagination']
self._cache['page_config'] = page_config
except DataNotFoundError:
self._cache['page_config'] = {}
actual_operation_name = self._PY_TO_OP_NAME[operation_name]
return actual_operation_name in self._cache['page_config']
def _get_waiter_config(self):
if 'waiter_config' not in self._cache:
try:
waiter_config = self._loader.load_service_model(
self._service_model.service_name,
'waiters-2',
self._service_model.api_version)
self._cache['waiter_config'] = waiter_config
except DataNotFoundError:
self._cache['waiter_config'] = {}
return self._cache['waiter_config']
def get_waiter(self, waiter_name):
config = self._get_waiter_config()
if not config:
raise ValueError("Waiter does not exist: %s" % waiter_name)
model = waiter.WaiterModel(config)
mapping = {}
for name in model.waiter_names:
mapping[xform_name(name)] = name
if waiter_name not in mapping:
raise ValueError("Waiter does not exist: %s" % waiter_name)
return waiter.create_waiter_with_client(
mapping[waiter_name], model, self)
@CachedProperty
def waiter_names(self):
"""Returns a list of all available waiters."""
config = self._get_waiter_config()
if not config:
return []
model = waiter.WaiterModel(config)
# Waiter configs is a dict, we just want the waiter names
# which are the keys in the dict.
return [xform_name(name) for name in model.waiter_names]
class ClientMeta(object):
"""Holds additional client methods.
This class holds additional information for clients. It exists for
two reasons:
* To give advanced functionality to clients
* To namespace additional client attributes from the operation
names which are mapped to methods at runtime. This avoids
ever running into collisions with operation names.
"""
def __init__(self, events, client_config, endpoint_url, service_model,
method_to_api_mapping):
self.events = events
self._client_config = client_config
self._endpoint_url = endpoint_url
self._service_model = service_model
self._method_to_api_mapping = method_to_api_mapping
@property
def service_model(self):
return self._service_model
@property
def region_name(self):
return self._client_config.region_name
@property
def endpoint_url(self):
return self._endpoint_url
@property
def config(self):
return self._client_config
@property
def method_to_api_mapping(self):
return self._method_to_api_mapping
| morissette/devopsdays-hackathon-2016 | venv/lib/python2.7/site-packages/botocore/client.py | Python | gpl-3.0 | 33,345 |
# -*- coding: utf-8 -*-
'''
Copyright (c) 2015 Heidelberg University Library
Distributed under the GNU GPL v3. For full terms see the file
LICENSE.md
'''
from omptables import define_omp_tables
#########################################################################
# This scaffolding model makes your app work on Google App Engine too
# File is released under public domain and you can use without limitations
#########################################################################
# if SSL/HTTPS is properly configured and you want all HTTP requests to
# be redirected to HTTPS, uncomment the line below:
# request.requires_https()
# app configuration made easy. Look inside private/appconfig.ini
from gluon.contrib.appconfig import AppConfig
from gluon.tools import Auth, Service, PluginManager, Crud
# once in production, remove reload=True to gain full speed
myconf = AppConfig(reload=True)
if not request.env.web2py_runtime_gae:
# if NOT running on Google App Engine use SQLite or other DB
db = DAL(
myconf.take('db.uri'),
pool_size=myconf.take(
'db.pool_size',
cast=int),
check_reserved=None,lazy_tables=True)
else:
# connect to Google BigTable (optional 'google:datastore://namespace')
db = DAL('google:datastore+ndb')
# store sessions and tickets there
session.connect(request, response, db=db)
# or store session in Memcache, Redis, etc.
# from gluon.contrib.memdb import MEMDB
# from google.appengine.api.memcache import Client
# session.connect(request, response, db = MEMDB(Client()))
# by default give a view/generic.extension to all actions from localhost
# none otherwise. a pattern can be 'controller/function.extension'
response.generic_patterns = ['*'] if request.is_local else []
# choose a style for forms
# or 'bootstrap3_stacked' or 'bootstrap2' or other
response.formstyle = myconf.take('forms.formstyle')
response.form_label_separator = myconf.take('forms.separator')
# (optional) optimize handling of static files
# response.optimize_css = 'concat,minify,inline'
# response.optimize_js = 'concat,minify,inline'
# (optional) static assets folder versioning
# response.static_version = '0.0.0'
#########################################################################
# Here is sample code if you need for
# - email capabilities
# - authentication (registration, login, logout, ... )
# - authorization (role based authorization)
# - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)
# - old style crud actions
# (more options discussed in gluon/tools.py)
#########################################################################
auth = Auth(db)
service = Service()
plugins = PluginManager()
crud = Crud(db)
# before define_tables()
auth.settings.hmac_key = 'sha512:2674c3a8-8bdd-4f1c-8eb1-efb9ec41ac24'
# create all tables needed by auth if not custom tables
# auth.define_tables(username=False, signature=False)
# configure email
mail = auth.settings.mailer
mail.settings.server = 'logging' if request.is_local else myconf.take(
'smtp.sender')
mail.settings.sender = myconf.take('smtp.sender')
mail.settings.login = myconf.take('smtp.login')
auth.settings.mailer = mail # for user email verification
auth.settings.registration_requires_verification = True
auth.settings.registration_requires_approval = True
auth.messages.verify_email = 'Click on the link http://' + request.env.http_host + \
URL('default', 'user', args=['verify_email']
) + '/%(key)s to verify your email'
auth.settings.reset_password_requires_verification = True
auth.messages.reset_password = 'Click on the link http://' + request.env.http_host + \
URL('default', 'user', args=['reset_password']
) + '/%(key)s to reset your password'
# configure auth policy
auth.settings.registration_requires_verification = False
auth.settings.registration_requires_approval = False
auth.settings.reset_password_requires_verification = True
#########################################################################
# Define your tables below (or better in another model file) for example
#
# >>> db.define_table('mytable',Field('myfield','string'))
##
# Fields can be 'string','text','password','integer','double','boolean'
# 'date','time','datetime','blob','upload', 'reference TABLENAME'
# There is an implicit 'id integer autoincrement' field
# Consult manual for more options, validators, etc.
#
# More API examples for controllers:
#
# >>> db.mytable.insert(myfield='value')
# >>> rows=db(db.mytable.myfield=='value').select(db.mytable.ALL)
# >>> for row in rows: print row.id, row.myfield
#########################################################################
# after defining tables, uncomment below to enable auditing
# auth.enable_record_versioning(db)
mail.settings.server = settings.email_server
mail.settings.sender = settings.email_sender
mail.settings.login = settings.email_login
define_omp_tables(db)
| UB-Heidelberg/UBHD-OMPArthistorikum | models/db.py | Python | gpl-3.0 | 4,966 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Written by: David Lanstein ( dlanstein gmail com )
from base import SforceBaseClient
class SforceEnterpriseClient(SforceBaseClient):
def __init__(self, wsdl, **kwargs):
super(SforceEnterpriseClient, self).__init__(wsdl, **kwargs)
# Core calls
def convertLead(self, leadConverts):
xml = self._marshallSObjects(leadConverts)
return super(SforceEnterpriseClient, self).convertLead(xml)
def create(self, sObjects):
xml = self._marshallSObjects(sObjects)
return super(SforceEnterpriseClient, self).create(xml)
def merge(self, sObjects):
xml = self._marshallSObjects(sObjects)
return super(SforceEnterpriseClient, self).merge(xml)
def process(self, sObjects):
xml = self._marshallSObjects(sObjects)
return super(SforceEnterpriseClient, self).process(xml)
def retrieve(self, fieldList, sObjectType, ids):
'''
Currently, this uses query() to emulate the retrieve() functionality, as suds' unmarshaller
borks on the sf: prefix that Salesforce prepends to all fields other than Id and type (any
fields not defined in the 'sObject' section of the Enterprise WSDL)
'''
# HACK HACK HACKITY HACK
if not isinstance(ids, (list, tuple)):
ids = (ids, )
# The only way to make sure we return objects in the correct order, and return None where an
# object can't be retrieved by Id, is to query each ID individually
sObjects = []
for id in ids:
queryString = 'SELECT Id, ' + fieldList + ' FROM ' + sObjectType + ' WHERE Id = \'' + id + '\' LIMIT 1'
queryResult = self.query(queryString)
if queryResult.size == 0:
sObjects.append(None)
continue
# There will exactly one record in queryResult.records[] at this point
record = queryResult.records[0]
sObject = self.generateObject(sObjectType)
for (k, v) in record:
setattr(sObject, k, v)
sObjects.append(sObject)
return self._handleResultTyping(sObjects)
def search(self, searchString):
searchResult = super(SforceEnterpriseClient, self).search(searchString)
# HACK <result/> gets unmarshalled as '' instead of an empty SearchResult
# return an empty SearchResult instead
if searchResult == '':
return self._sforce.factory.create('SearchResult')
return searchResult
def update(self, sObjects):
xml = self._marshallSObjects(sObjects)
return super(SforceEnterpriseClient, self).update(xml)
def upsert(self, externalIdFieldName, sObjects):
xml = self._marshallSObjects(sObjects)
return super(SforceEnterpriseClient, self).upsert(externalIdFieldName, xml)
# Utility calls
def sendEmail(self, sObjects):
xml = self._marshallSObjects(sObjects)
return super(SforceEnterpriseClient, self).sendEmail(xml)
| clearcare/salesforce-python-toolkit | sforce/enterprise.py | Python | lgpl-3.0 | 3,804 |
import errors
def validate_num_arguments_eq(num_args):
"""Validate that the number of supplied args is equal to some number"""
def decorator(func):
def wrapped_func(*args, **kwargs):
if len(args[1]) != num_args:
raise errors.InvalidArgumentError
else:
func(*args, **kwargs)
return wrapped_func
return decorator
def validate_num_arguments_lt(num_args):
"""Validate that the number of supplied args is less than to some number"""
def decorator(func):
def wrapped_func(*args, **kwargs):
if len(args[1]) > num_args:
raise errors.InvalidArgumentError
else:
func(*args, **kwargs)
return wrapped_func
return decorator
def validate_num_arguments_gt(num_args):
"""Validate that the number of supplied args is greater than to some number"""
def decorator(func):
def wrapped_func(*args, **kwargs):
if len(args[1]) < num_args:
raise errors.InvalidArgumentError
else:
func(*args, **kwargs)
return wrapped_func
return decorator
def parse_index(lst, id):
"""Validate an index to the list is within range and a digit and return it"""
if not id.isdigit():
raise errors.ExpectedItemError
idx = int(id) - 1
if idx > len(lst) - 1 or idx < 0:
raise errors.InvalidItemError
return idx
| dansackett/Todooo | todooo/validators.py | Python | mit | 1,462 |
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from models import Estudiante
# Define an inline admin descriptor for Estudiante model
# which acts a bit like a singleton
class EstudianteInline(admin.StackedInline):
model = Estudiante
can_delete = False
verbose_name_plural = 'estudiantes'
# Define a new User admin
class UserAdmin(UserAdmin):
inlines = (EstudianteInline, )
# Re-register UserAdmin
admin.site.unregister(User)
admin.site.register(User, UserAdmin) | prengifo/GuardabosquesUSB | GuardabosquesUSB/login/admin.py | Python | mit | 560 |
# -*- coding:utf-8 -*-
__author__ = 'Qian'
import tornado.web
import os
from MyRedisSession.RedisSession import SessionManager
setting = dict(
template_path=os.path.join(os.path.dirname(__file__), "MyTemplate"),
static_path=os.path.join(os.path.dirname(__file__), "Static"),
debug = True,
cookie_secret = "SGF0YWtlQDIwMTUtMTAtMDV8Y29va2llfDkxNDY2NzdkLTZiMWQtMTFlNS04NTljLWE0NWU2MGQ3MzRmOXw=",
session_secret = "SGF0YWtlQDIwMTUtMTAtMDV8c2Vzc2lvbnw5MTRiNzcyOC02YjFkLTExZTUtYjEwNC1hNDVlNjBkNzM0Zjl8",
login_url = "/Login/",
xsrf_cookies = True,
session_timeout =600,
)
#application = tornado.web.Application(
# handlers=url,
# **setting
# )
class MyApplication(tornado.web.Application):
def __init__(self, handlers, **settings):
# 初始化父类 tornado.web.Application
tornado.web.Application.__init__(self, handlers, **settings)
# 初始化该类的 session_manager
self.session_manager = SessionManager(settings["session_secret"], settings["session_timeout"]) | kakashi1016/MakeAppointment4TZMH | application.py | Python | gpl-2.0 | 1,012 |
from rest_auth.registration.serializers import (
RegisterSerializer as RARegisterSerializer
)
from rest_auth.serializers import (
LoginSerializer as RALoginSerializer,
PasswordResetSerializer as RAPasswordResetSerializer
)
from rest_framework import serializers
from .fields import TimezoneField
class LoginSerializer(RALoginSerializer):
"""Own serializer for login."""
username = None # DO NOT REMOVE THIS LINE.
class RegisterSerializer(RARegisterSerializer):
"""Own serializer for register."""
# DO NOT REMOVE BELOW LINE (username)
# It cause strange username requirement.
# We do not use username field but default of rest_auth serializer need
# username param. It must not be blank and not be longer than 0.
# WTF?!
username = None
# END
name = serializers.CharField(required=True, max_length=25)
tz = TimezoneField(required=True)
def get_cleaned_data(self):
return {
'email': self.validated_data.get('email', ''),
'password': self.validated_data.get('password1', ''),
'name': self.validated_data.get('name', ''),
'tz': self.validated_data.get('tz', ''),
}
class PasswordResetSerializer(RAPasswordResetSerializer):
"""Own serializer for reset password."""
def get_email_options(self):
"""Override this method to change default e-mail options"""
return {
'subject_template_name': 'auth/password_reset_subject.txt',
'email_template_name': 'auth/password_reset_email.html',
}
| item4/item4.net | api/auth/serializers.py | Python | agpl-3.0 | 1,576 |
from collections import OrderedDict
from django.conf import settings
from django.http import JsonResponse
from django.utils.datastructures import MultiValueDictKeyError
from rest_framework.viewsets import ViewSet
from rest_framework.decorators import list_route
from rest_api.tools import set_ikeys, split_cols
from rest_api.exceptions import (JasminSyntaxError, JasminError,
UnknownError, MissingKeyError,
MutipleValuesRequiredKeyError, ObjectNotFoundError)
STANDARD_PROMPT = settings.STANDARD_PROMPT
INTERACTIVE_PROMPT = settings.INTERACTIVE_PROMPT
class MORouterViewSet(ViewSet):
"Viewset for managing MO Routes"
lookup_field = 'order'
def _list(self, telnet):
"List MO router as python dict"
telnet.sendline('morouter -l')
telnet.expect([r'(.+)\n' + STANDARD_PROMPT])
result = telnet.match.group(0).strip().replace("\r", '').split("\n")
if len(result) < 3:
return {'morouters': []}
results = [l.replace(', ', ',').replace('(!)', '')
for l in result[2:-2] if l]
routers = split_cols(results)
print routers
return {
'morouters':
[
{
'order': r[0].strip().lstrip('#'),
'type': r[1],
'connectors': [c.strip() for c in r[2].split(',')],
'filters': [c.strip() for c in ' '.join(r[3:]).split(',')
] if len(r) > 3 else []
} for r in routers
]
}
def list(self, request):
"List MO routers. No parameters"
return JsonResponse(self._list(request.telnet))
def get_router(self, telnet, order):
"Return data for one morouter as Python dict"
morouters = self._list(telnet)['morouters']
try:
return {'morouter':
next((m for m in morouters if m['order'] == order), None)
}
except StopIteration:
raise ObjectNotFoundError('No MoROuter with order: %s' % order)
def retrieve(self, request, order):
"Details for one MORouter by order (integer)"
return JsonResponse(self.get_router(request.telnet, order))
@list_route(methods=['delete'])
def flush(self, request):
"Flush entire routing table"
telnet = request.telnet
telnet.sendline('morouter -f')
telnet.expect([r'(.+)\n' + STANDARD_PROMPT])
telnet.sendline('persist\n')
telnet.expect(r'.*' + STANDARD_PROMPT)
return JsonResponse({'morouters': []})
def create(self, request):
"""Create MORouter.
Required parameters: type, order, smppconnectors, httpconnectors
More than one connector is allowed only for RandomRoundrobinMORoute
---
# YAML
omit_serializer: true
parameters:
- name: type
description: One of DefaultRoute, StaticMORoute, RandomRoundrobinMORoute
required: true
type: string
paramType: form
- name: order
description: Router order, also used to identify router
required: true
type: string
paramType: form
- name: smppconnectors
description: List of SMPP connector ids.
required: false
type: array
paramType: form
- name: httpconnectors
description: List of HTTP connector ids.
required: false
type: array
paramType: form
- name: filters
description: List of filters, required except for DefaultRoute
required: false
type: array
paramType: form
"""
telnet = request.telnet
data = request.data
try:
rtype, order = data['type'], data['order']
except IndexError:
raise MissingKeyError(
'Missing parameter: type or order required')
rtype = rtype.lower()
telnet.sendline('morouter -a')
telnet.expect(r'Adding a new MO Route(.+)\n' + INTERACTIVE_PROMPT)
ikeys = OrderedDict({'type': rtype})
if rtype != 'defaultroute':
try:
filters = data['filters'].split(',')
except MultiValueDictKeyError:
raise MissingKeyError('%s router requires filters' % rtype)
ikeys['filters'] = ';'.join(filters)
ikeys['order'] = order
print(ikeys)
smppconnectors = data.get('smppconnectors', '')
httpconnectors = data.get('httpconnectors', '')
connectors = ['smpps(%s)' % c.strip()
for c in smppconnectors.split(',') if c.strip()
] + ['http(%s)' % c for c in httpconnectors.split(',') if c.strip()]
if rtype == 'randomroundrobinmoroute':
if len(connectors) < 2:
raise MutipleValuesRequiredKeyError(
'Round Robin route requires at least two connectors')
ikeys['connectors'] = ';'.join(connectors)
else:
if len(connectors) != 1:
raise MissingKeyError('one and only one connector required')
ikeys['connector'] = connectors[0]
set_ikeys(telnet, ikeys)
telnet.sendline('persist\n')
telnet.expect(r'.*' + STANDARD_PROMPT)
return JsonResponse({'morouter': self.get_router(telnet, order)})
def simple_morouter_action(self, telnet, action, order, return_moroute=True):
telnet.sendline('morouter -%s %s' % (action, order))
matched_index = telnet.expect([
r'.+Successfully(.+)' + STANDARD_PROMPT,
r'.+Unknown MO Route: (.+)' + STANDARD_PROMPT,
r'.+(.*)' + STANDARD_PROMPT,
])
if matched_index == 0:
telnet.sendline('persist\n')
if return_moroute:
telnet.expect(r'.*' + STANDARD_PROMPT)
return JsonResponse({'morouter': self.get_router(telnet, fid)})
else:
return JsonResponse({'order': order})
elif matched_index == 1:
raise UnknownError(detail='No router:' + order)
else:
raise JasminError(telnet.match.group(1))
def destroy(self, request, order):
"""Delete a morouter. One parameter required, the router identifier (a string)
HTTP codes indicate result as follows
- 200: successful deletion
- 404: nonexistent router
- 400: other error
"""
return self.simple_morouter_action(
request.telnet, 'r', order, return_moroute=False)
| jookies/jasmin-api | jasmin_api/rest_api/views/morouter.py | Python | apache-2.0 | 6,695 |
#!/usr/bin/python
from os import walk
from pathFind import *
from pathDraw import *
testingPath = './Assets/Mappings/Testing'
mappingsPath = './Assets/Mappings'
workingDirectory = mappingsPath
maps = []
for (dirpath, dirnames, filenames) in walk(workingDirectory):
maps.extend(filenames)
break
for mappingName in [m[:-4] for m in maps if '.txt' in m]:
f = open('%s/%s.txt' % (workingDirectory, mappingName), 'r')
mapping = list(f)
f.close()
height = len(mapping)
width = len(mapping[0])
visited = dict()
paths = list()
ladders = list()
exits = list()
i = 0
for line in mapping:
j = 0
for char in line:
if char == 'H':
ladders.append([j, i])
elif char == 'E':
exits.append([j,i])
j += 1
i += 1
# Exit order doesn't matter here, we're just trying to find an ordering of checkpoints to break the map up
checkpoints = list()
checkpoints.append(exits[0])
laddersCopy = list(ladders)
while True:
x = checkpoints[-1][0]
y = checkpoints[-1][1]
visited['%d,%d' % (x, y)] = 1
path = None
for i in xrange(1, 1000):
print "Currently on mapping %s, checkpoint %d, with %d iterations" % (mappingName, len(checkpoints), i)
path = pathFindApproxIDDFS(mapping, dict(visited), x, y, 0, 0, i)
if path != None:
paths.append(path)
checkpoint = list(path[-1])
checkpoints.append(checkpoint)
if checkpoint in ladders:
laddersCopy.remove(checkpoint)
checkpoints.append(laddersCopy[0])
break
if checkpoints[-1] in exits:
break
# Then do the fine-grained search.
print "Checkpoints in the map: ", checkpoints
if workingDirectory != testingPath:
visited = dict()
paths = list()
while len(checkpoints) > 1:
goal = checkpoints[1]
x = checkpoints[0][0]
y = checkpoints[0][1]
visited['%d,%d' % (x, y)] = 1
path = None
for i in xrange(1, 1000):
print "Currently on mapping %s, %d checkpoints left, with %d iterations" % (mappingName, len(checkpoints), i)
path = pathFindIDDFS(mapping, goal, dict(visited), x, y, 0, 0, i)
if path != None:
paths.append(path)
checkpoints = checkpoints[1:]
if goal in ladders:
checkpoints = checkpoints[1:]
break
directions = list()
for path in paths:
for tile in path:
x = tile[0]
y = tile[1]
directions += [(x * 16 + 8, y * 16 + 7)]
if mapping[y][x] not in ['E', 'H', 's']:
mapping[y] = mapping[y][:x] + '=' + mapping[y][x + 1:]
for row in mapping:
print row
pathDraw(directions, mappingName, height, width) | alexandermueller/PathfindEmAll | pathFindEmAll.py | Python | mit | 3,093 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Nicira Networks, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Salvatore Orlando, Nicira, Inc
#
import stubout
import fixtures
import mock
from oslo.config import cfg
from webob import exc
from neutron.common import constants
from neutron.common.test_lib import test_config
from neutron.db import api as db_api
from neutron.db import l3_db
from neutron.db import l3_gwmode_db
from neutron.db import models_v2
from neutron.extensions import l3
from neutron.extensions import l3_ext_gw_mode
from neutron.openstack.common import uuidutils
from neutron.tests import base
from neutron.tests.unit import test_db_plugin
from neutron.tests.unit import test_l3_plugin
_uuid = uuidutils.generate_uuid
FAKE_GW_PORT_ID = _uuid()
FAKE_GW_PORT_MAC = 'aa:bb:cc:dd:ee:ff'
FAKE_FIP_EXT_PORT_ID = _uuid()
FAKE_FIP_EXT_PORT_MAC = '11:22:33:44:55:66'
FAKE_FIP_INT_PORT_ID = _uuid()
FAKE_FIP_INT_PORT_MAC = 'aa:aa:aa:aa:aa:aa'
FAKE_ROUTER_PORT_ID = _uuid()
FAKE_ROUTER_PORT_MAC = 'bb:bb:bb:bb:bb:bb'
class StuboutFixture(fixtures.Fixture):
"""Setup stubout and add unsetAll to cleanup."""
def setUp(self):
super(StuboutFixture, self).setUp()
self.stubs = stubout.StubOutForTesting()
self.addCleanup(self.stubs.UnsetAll)
self.addCleanup(self.stubs.SmartUnsetAll)
def stubout_floating_ip_calls(stubs, fake_count=0):
def get_floatingips_count(_1, _2, filters):
return fake_count
stubs.Set(l3_db.L3_NAT_db_mixin, 'get_floatingips_count',
get_floatingips_count)
class TestExtensionManager(object):
def get_resources(self):
# Simulate extension of L3 attribute map
for key in l3.RESOURCE_ATTRIBUTE_MAP.keys():
l3.RESOURCE_ATTRIBUTE_MAP[key].update(
l3_ext_gw_mode.EXTENDED_ATTRIBUTES_2_0.get(key, {}))
return l3.L3.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
# A simple class for making a concrete class out of the mixin
class TestDbPlugin(test_l3_plugin.TestL3NatPlugin,
l3_gwmode_db.L3_NAT_db_mixin):
supported_extension_aliases = ["router", "ext-gw-mode"]
class TestL3GwModeMixin(base.BaseTestCase):
def setUp(self):
super(TestL3GwModeMixin, self).setUp()
stubout_fixture = self.useFixture(StuboutFixture())
self.stubs = stubout_fixture.stubs
self.target_object = TestDbPlugin()
# Patch the context
ctx_patcher = mock.patch('neutron.context', autospec=True)
mock_context = ctx_patcher.start()
self.addCleanup(db_api.clear_db)
self.addCleanup(ctx_patcher.stop)
self.context = mock_context.get_admin_context()
# This ensure also calls to elevated work in unit tests
self.context.elevated.return_value = self.context
self.context.session = db_api.get_session()
# Create sample data for tests
self.ext_net_id = _uuid()
self.int_net_id = _uuid()
self.int_sub_id = _uuid()
self.tenant_id = 'the_tenant'
self.network = models_v2.Network(
id=self.ext_net_id,
tenant_id=self.tenant_id,
admin_state_up=True,
status=constants.NET_STATUS_ACTIVE)
self.net_ext = l3_db.ExternalNetwork(network_id=self.ext_net_id)
self.context.session.add(self.network)
# The following is to avoid complains from sqlite on
# foreign key violations
self.context.session.flush()
self.context.session.add(self.net_ext)
self.router = l3_db.Router(
id=_uuid(),
name=None,
tenant_id=self.tenant_id,
admin_state_up=True,
status=constants.NET_STATUS_ACTIVE,
enable_snat=True,
gw_port_id=None)
self.context.session.add(self.router)
self.context.session.flush()
self.router_gw_port = models_v2.Port(
id=FAKE_GW_PORT_ID,
tenant_id=self.tenant_id,
device_id=self.router.id,
device_owner=l3_db.DEVICE_OWNER_ROUTER_GW,
admin_state_up=True,
status=constants.PORT_STATUS_ACTIVE,
mac_address=FAKE_GW_PORT_MAC,
network_id=self.ext_net_id)
self.router.gw_port_id = self.router_gw_port.id
self.context.session.add(self.router)
self.context.session.add(self.router_gw_port)
self.context.session.flush()
self.fip_ext_port = models_v2.Port(
id=FAKE_FIP_EXT_PORT_ID,
tenant_id=self.tenant_id,
admin_state_up=True,
device_id=self.router.id,
device_owner=l3_db.DEVICE_OWNER_FLOATINGIP,
status=constants.PORT_STATUS_ACTIVE,
mac_address=FAKE_FIP_EXT_PORT_MAC,
network_id=self.ext_net_id)
self.context.session.add(self.fip_ext_port)
self.context.session.flush()
self.int_net = models_v2.Network(
id=self.int_net_id,
tenant_id=self.tenant_id,
admin_state_up=True,
status=constants.NET_STATUS_ACTIVE)
self.int_sub = models_v2.Subnet(
id=self.int_sub_id,
tenant_id=self.tenant_id,
ip_version=4,
cidr='3.3.3.0/24',
gateway_ip='3.3.3.1',
network_id=self.int_net_id)
self.router_port = models_v2.Port(
id=FAKE_ROUTER_PORT_ID,
tenant_id=self.tenant_id,
admin_state_up=True,
device_id=self.router.id,
device_owner=l3_db.DEVICE_OWNER_ROUTER_INTF,
status=constants.PORT_STATUS_ACTIVE,
mac_address=FAKE_ROUTER_PORT_MAC,
network_id=self.int_net_id)
self.router_port_ip_info = models_v2.IPAllocation(
port_id=self.router_port.id,
network_id=self.int_net.id,
subnet_id=self.int_sub_id,
ip_address='3.3.3.1')
self.context.session.add(self.int_net)
self.context.session.add(self.int_sub)
self.context.session.add(self.router_port)
self.context.session.add(self.router_port_ip_info)
self.context.session.flush()
self.fip_int_port = models_v2.Port(
id=FAKE_FIP_INT_PORT_ID,
tenant_id=self.tenant_id,
admin_state_up=True,
device_id='something',
device_owner='compute:nova',
status=constants.PORT_STATUS_ACTIVE,
mac_address=FAKE_FIP_INT_PORT_MAC,
network_id=self.int_net_id)
self.fip_int_ip_info = models_v2.IPAllocation(
port_id=self.fip_int_port.id,
network_id=self.int_net.id,
subnet_id=self.int_sub_id,
ip_address='3.3.3.3')
self.fip = l3_db.FloatingIP(
id=_uuid(),
floating_ip_address='1.1.1.2',
floating_network_id=self.ext_net_id,
floating_port_id=FAKE_FIP_EXT_PORT_ID,
fixed_port_id=None,
fixed_ip_address=None,
router_id=None)
self.context.session.add(self.fip_int_port)
self.context.session.add(self.fip_int_ip_info)
self.context.session.add(self.fip)
self.context.session.flush()
self.fip_request = {'port_id': FAKE_FIP_INT_PORT_ID,
'tenant_id': self.tenant_id}
def _reset_ext_gw(self):
# Reset external gateway
self.router.gw_port_id = None
self.context.session.add(self.router)
self.context.session.flush()
def _test_update_router_gw(self, gw_info, expected_enable_snat):
self.target_object._update_router_gw_info(
self.context, self.router.id, gw_info)
router = self.target_object._get_router(
self.context, self.router.id)
try:
self.assertEqual(FAKE_GW_PORT_ID,
router.gw_port.id)
self.assertEqual(FAKE_GW_PORT_MAC,
router.gw_port.mac_address)
except AttributeError:
self.assertIsNone(router.gw_port)
self.assertEqual(expected_enable_snat, router.enable_snat)
def test_update_router_gw_with_gw_info_none(self):
self._test_update_router_gw(None, True)
def test_update_router_gw_with_network_only(self):
info = {'network_id': self.ext_net_id}
self._test_update_router_gw(info, True)
def test_update_router_gw_with_snat_disabled(self):
info = {'network_id': self.ext_net_id,
'enable_snat': False}
self._test_update_router_gw(info, False)
def test_make_router_dict_no_ext_gw(self):
self._reset_ext_gw()
router_dict = self.target_object._make_router_dict(self.router)
self.assertEqual(None, router_dict[l3.EXTERNAL_GW_INFO])
def test_make_router_dict_with_ext_gw(self):
router_dict = self.target_object._make_router_dict(self.router)
self.assertEqual({'network_id': self.ext_net_id,
'enable_snat': True},
router_dict[l3.EXTERNAL_GW_INFO])
def test_make_router_dict_with_ext_gw_snat_disabled(self):
self.router.enable_snat = False
router_dict = self.target_object._make_router_dict(self.router)
self.assertEqual({'network_id': self.ext_net_id,
'enable_snat': False},
router_dict[l3.EXTERNAL_GW_INFO])
def test_build_routers_list_no_ext_gw(self):
self._reset_ext_gw()
router_dict = self.target_object._make_router_dict(self.router)
routers = self.target_object._build_routers_list([router_dict], [])
self.assertEqual(1, len(routers))
router = routers[0]
self.assertIsNone(router.get('gw_port'))
self.assertIsNone(router.get('enable_snat'))
def test_build_routers_list_with_ext_gw(self):
router_dict = self.target_object._make_router_dict(self.router)
routers = self.target_object._build_routers_list(
[router_dict], [self.router.gw_port])
self.assertEqual(1, len(routers))
router = routers[0]
self.assertIsNotNone(router.get('gw_port'))
self.assertEqual(FAKE_GW_PORT_ID, router['gw_port']['id'])
self.assertTrue(router.get('enable_snat'))
def test_build_routers_list_with_ext_gw_snat_disabled(self):
self.router.enable_snat = False
router_dict = self.target_object._make_router_dict(self.router)
routers = self.target_object._build_routers_list(
[router_dict], [self.router.gw_port])
self.assertEqual(1, len(routers))
router = routers[0]
self.assertIsNotNone(router.get('gw_port'))
self.assertEqual(FAKE_GW_PORT_ID, router['gw_port']['id'])
self.assertFalse(router.get('enable_snat'))
class ExtGwModeTestCase(test_db_plugin.NeutronDbPluginV2TestCase,
test_l3_plugin.L3NatTestCaseMixin):
def setUp(self):
# Store l3 resource attribute map as it's will be updated
self._l3_attribute_map_bk = {}
for item in l3.RESOURCE_ATTRIBUTE_MAP:
self._l3_attribute_map_bk[item] = (
l3.RESOURCE_ATTRIBUTE_MAP[item].copy())
test_config['plugin_name_v2'] = (
'neutron.tests.unit.test_extension_ext_gw_mode.TestDbPlugin')
test_config['extension_manager'] = TestExtensionManager()
# for these tests we need to enable overlapping ips
cfg.CONF.set_default('allow_overlapping_ips', True)
super(ExtGwModeTestCase, self).setUp()
self.addCleanup(self.restore_l3_attribute_map)
def restore_l3_attribute_map(self):
l3.RESOURCE_ATTRIBUTE_MAP = self._l3_attribute_map_bk
def tearDown(self):
super(ExtGwModeTestCase, self).tearDown()
def _set_router_external_gateway(self, router_id, network_id,
snat_enabled=None,
expected_code=exc.HTTPOk.code,
neutron_context=None):
ext_gw_info = {'network_id': network_id}
if snat_enabled in (True, False):
ext_gw_info['enable_snat'] = snat_enabled
return self._update('routers', router_id,
{'router': {'external_gateway_info':
ext_gw_info}},
expected_code=expected_code,
neutron_context=neutron_context)
def test_router_create_show_no_ext_gwinfo(self):
name = 'router1'
tenant_id = _uuid()
expected_value = [('name', name), ('tenant_id', tenant_id),
('admin_state_up', True), ('status', 'ACTIVE'),
('external_gateway_info', None)]
with self.router(name=name, admin_state_up=True,
tenant_id=tenant_id) as router:
res = self._show('routers', router['router']['id'])
for k, v in expected_value:
self.assertEqual(res['router'][k], v)
def _test_router_create_show_ext_gwinfo(self, snat_input_value,
snat_expected_value):
name = 'router1'
tenant_id = _uuid()
with self.subnet() as s:
ext_net_id = s['subnet']['network_id']
self._set_net_external(ext_net_id)
input_value = {'network_id': ext_net_id}
if snat_input_value in (True, False):
input_value['enable_snat'] = snat_input_value
expected_value = [('name', name), ('tenant_id', tenant_id),
('admin_state_up', True), ('status', 'ACTIVE'),
('external_gateway_info',
{'network_id': ext_net_id,
'enable_snat': snat_expected_value})]
with self.router(
name=name, admin_state_up=True, tenant_id=tenant_id,
external_gateway_info=input_value) as router:
res = self._show('routers', router['router']['id'])
for k, v in expected_value:
self.assertEqual(res['router'][k], v)
def test_router_create_show_ext_gwinfo_default(self):
self._test_router_create_show_ext_gwinfo(None, True)
def test_router_create_show_ext_gwinfo_with_snat_enabled(self):
self._test_router_create_show_ext_gwinfo(True, True)
def test_router_create_show_ext_gwinfo_with_snat_disabled(self):
self._test_router_create_show_ext_gwinfo(False, False)
def _test_router_update_ext_gwinfo(self, snat_input_value,
snat_expected_value):
with self.router() as r:
with self.subnet() as s:
ext_net_id = s['subnet']['network_id']
self._set_net_external(ext_net_id)
self._set_router_external_gateway(
r['router']['id'], ext_net_id,
snat_enabled=snat_input_value)
body = self._show('routers', r['router']['id'])
res_gw_info = body['router']['external_gateway_info']
self.assertEqual(res_gw_info['network_id'], ext_net_id)
self.assertEqual(res_gw_info['enable_snat'],
snat_expected_value)
self._remove_external_gateway_from_router(
r['router']['id'], ext_net_id)
def test_router_update_ext_gwinfo_default(self):
self._test_router_update_ext_gwinfo(None, True)
def test_router_update_ext_gwinfo_with_snat_enabled(self):
self._test_router_update_ext_gwinfo(True, True)
def test_router_update_ext_gwinfo_with_snat_disabled(self):
self._test_router_update_ext_gwinfo(False, False)
| ykaneko/neutron | neutron/tests/unit/test_extension_ext_gw_mode.py | Python | apache-2.0 | 16,524 |
#!/usr/bin/env python
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""
This script is the wrapper that runs the low-bandwidth audio test.
After running the test, post-process steps for calculating audio quality of the
output files will be performed.
"""
import argparse
import collections
import logging
import os
import re
import shutil
import subprocess
import sys
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SRC_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir))
NO_TOOLS_ERROR_MESSAGE = (
'Could not find PESQ or POLQA at %s.\n'
'\n'
'To fix this run:\n'
' python %s %s\n'
'\n'
'Note that these tools are Google-internal due to licensing, so in order to '
'use them you will have to get your own license and manually put them in the '
'right location.\n'
'See https://cs.chromium.org/chromium/src/third_party/webrtc/tools_webrtc/'
'download_tools.py?rcl=bbceb76f540159e2dba0701ac03c514f01624130&l=13')
def _LogCommand(command):
logging.info('Running %r', command)
return command
def _ParseArgs():
parser = argparse.ArgumentParser(description='Run low-bandwidth audio tests.')
parser.add_argument('build_dir',
help='Path to the build directory (e.g. out/Release).')
parser.add_argument('--remove', action='store_true',
help='Remove output audio files after testing.')
parser.add_argument('--android', action='store_true',
help='Perform the test on a connected Android device instead.')
parser.add_argument('--adb-path', help='Path to adb binary.', default='adb')
parser.add_argument('--num-retries', default='0',
help='Number of times to retry the test on Android.')
parser.add_argument('--isolated-script-test-perf-output', default=None,
help='Path to store perf results in histogram proto format.')
parser.add_argument('--extra-test-args', default=[], action='append',
help='Extra args to path to the test binary.')
# Ignore Chromium-specific flags
parser.add_argument('--test-launcher-summary-output',
type=str, default=None)
args = parser.parse_args()
return args
def _GetPlatform():
if sys.platform == 'win32':
return 'win'
elif sys.platform == 'darwin':
return 'mac'
elif sys.platform.startswith('linux'):
return 'linux'
def _GetExtension():
return '.exe' if sys.platform == 'win32' else ''
def _GetPathToTools():
tools_dir = os.path.join(SRC_DIR, 'tools_webrtc')
toolchain_dir = os.path.join(tools_dir, 'audio_quality')
platform = _GetPlatform()
ext = _GetExtension()
pesq_path = os.path.join(toolchain_dir, platform, 'pesq' + ext)
if not os.path.isfile(pesq_path):
pesq_path = None
polqa_path = os.path.join(toolchain_dir, platform, 'PolqaOem64' + ext)
if not os.path.isfile(polqa_path):
polqa_path = None
if (platform != 'mac' and not polqa_path) or not pesq_path:
logging.error(NO_TOOLS_ERROR_MESSAGE,
toolchain_dir,
os.path.join(tools_dir, 'download_tools.py'),
toolchain_dir)
return pesq_path, polqa_path
def ExtractTestRuns(lines, echo=False):
"""Extracts information about tests from the output of a test runner.
Produces tuples
(android_device, test_name, reference_file, degraded_file, cur_perf_results).
"""
for line in lines:
if echo:
sys.stdout.write(line)
# Output from Android has a prefix with the device name.
android_prefix_re = r'(?:I\b.+\brun_tests_on_device\((.+?)\)\s*)?'
test_re = r'^' + android_prefix_re + (r'TEST (\w+) ([^ ]+?) ([^\s]+)'
r' ?([^\s]+)?\s*$')
match = re.search(test_re, line)
if match:
yield match.groups()
def _GetFile(file_path, out_dir, move=False,
android=False, adb_prefix=('adb',)):
out_file_name = os.path.basename(file_path)
out_file_path = os.path.join(out_dir, out_file_name)
if android:
# Pull the file from the connected Android device.
adb_command = adb_prefix + ('pull', file_path, out_dir)
subprocess.check_call(_LogCommand(adb_command))
if move:
# Remove that file.
adb_command = adb_prefix + ('shell', 'rm', file_path)
subprocess.check_call(_LogCommand(adb_command))
elif os.path.abspath(file_path) != os.path.abspath(out_file_path):
if move:
shutil.move(file_path, out_file_path)
else:
shutil.copy(file_path, out_file_path)
return out_file_path
def _RunPesq(executable_path, reference_file, degraded_file,
sample_rate_hz=16000):
directory = os.path.dirname(reference_file)
assert os.path.dirname(degraded_file) == directory
# Analyze audio.
command = [executable_path, '+%d' % sample_rate_hz,
os.path.basename(reference_file),
os.path.basename(degraded_file)]
# Need to provide paths in the current directory due to a bug in PESQ:
# On Mac, for some 'path/to/file.wav', if 'file.wav' is longer than
# 'path/to', PESQ crashes.
out = subprocess.check_output(_LogCommand(command),
cwd=directory, stderr=subprocess.STDOUT)
# Find the scores in stdout of PESQ.
match = re.search(
r'Prediction \(Raw MOS, MOS-LQO\):\s+=\s+([\d.]+)\s+([\d.]+)', out)
if match:
raw_mos, _ = match.groups()
return {'pesq_mos': (raw_mos, 'unitless')}
else:
logging.error('PESQ: %s', out.splitlines()[-1])
return {}
def _RunPolqa(executable_path, reference_file, degraded_file):
# Analyze audio.
command = [executable_path, '-q', '-LC', 'NB',
'-Ref', reference_file, '-Test', degraded_file]
process = subprocess.Popen(_LogCommand(command),
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate()
# Find the scores in stdout of POLQA.
match = re.search(r'\bMOS-LQO:\s+([\d.]+)', out)
if process.returncode != 0 or not match:
if process.returncode == 2:
logging.warning('%s (2)', err.strip())
logging.warning('POLQA license error, skipping test.')
else:
logging.error('%s (%d)', err.strip(), process.returncode)
return {}
mos_lqo, = match.groups()
return {'polqa_mos_lqo': (mos_lqo, 'unitless')}
def _MergeInPerfResultsFromCcTests(histograms, run_perf_results_file):
from tracing.value import histogram_set
cc_histograms = histogram_set.HistogramSet()
with open(run_perf_results_file, 'rb') as f:
contents = f.read()
if not contents:
return
cc_histograms.ImportProto(contents)
histograms.Merge(cc_histograms)
Analyzer = collections.namedtuple('Analyzer', ['name', 'func', 'executable',
'sample_rate_hz'])
def _ConfigurePythonPath(args):
script_dir = os.path.dirname(os.path.realpath(__file__))
checkout_root = os.path.abspath(
os.path.join(script_dir, os.pardir, os.pardir))
# TODO(https://crbug.com/1029452): Use a copy rule and add these from the out
# dir like for the third_party/protobuf code.
sys.path.insert(0, os.path.join(checkout_root, 'third_party', 'catapult',
'tracing'))
# The low_bandwidth_audio_perf_test gn rule will build the protobuf stub for
# python, so put it in the path for this script before we attempt to import
# it.
histogram_proto_path = os.path.join(
os.path.abspath(args.build_dir), 'pyproto', 'tracing', 'tracing', 'proto')
sys.path.insert(0, histogram_proto_path)
proto_stub_path = os.path.join(os.path.abspath(args.build_dir), 'pyproto')
sys.path.insert(0, proto_stub_path)
# Fail early in case the proto hasn't been built.
try:
import histogram_pb2
except ImportError as e:
logging.exception(e)
raise ImportError('Could not import histogram_pb2. You need to build the '
'low_bandwidth_audio_perf_test target before invoking '
'this script. Expected to find '
'histogram_pb2.py in %s.' % histogram_proto_path)
def main():
# pylint: disable=W0101
logging.basicConfig(level=logging.INFO)
logging.info('Invoked with %s', str(sys.argv))
args = _ParseArgs()
_ConfigurePythonPath(args)
# Import catapult modules here after configuring the pythonpath.
from tracing.value import histogram_set
from tracing.value.diagnostics import reserved_infos
from tracing.value.diagnostics import generic_set
pesq_path, polqa_path = _GetPathToTools()
if pesq_path is None:
return 1
out_dir = os.path.join(args.build_dir, '..')
if args.android:
test_command = [os.path.join(args.build_dir, 'bin',
'run_low_bandwidth_audio_test'),
'-v', '--num-retries', args.num_retries]
else:
test_command = [os.path.join(args.build_dir, 'low_bandwidth_audio_test')]
analyzers = [Analyzer('pesq', _RunPesq, pesq_path, 16000)]
# Check if POLQA can run at all, or skip the 48 kHz tests entirely.
example_path = os.path.join(SRC_DIR, 'resources',
'voice_engine', 'audio_tiny48.wav')
if polqa_path and _RunPolqa(polqa_path, example_path, example_path):
analyzers.append(Analyzer('polqa', _RunPolqa, polqa_path, 48000))
histograms = histogram_set.HistogramSet()
for analyzer in analyzers:
# Start the test executable that produces audio files.
test_process = subprocess.Popen(
_LogCommand(test_command + [
'--sample_rate_hz=%d' % analyzer.sample_rate_hz,
'--test_case_prefix=%s' % analyzer.name,
] + args.extra_test_args),
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
perf_results_file = None
try:
lines = iter(test_process.stdout.readline, '')
for result in ExtractTestRuns(lines, echo=True):
(android_device, test_name, reference_file, degraded_file,
perf_results_file) = result
adb_prefix = (args.adb_path,)
if android_device:
adb_prefix += ('-s', android_device)
reference_file = _GetFile(reference_file, out_dir,
android=args.android, adb_prefix=adb_prefix)
degraded_file = _GetFile(degraded_file, out_dir, move=True,
android=args.android, adb_prefix=adb_prefix)
analyzer_results = analyzer.func(analyzer.executable,
reference_file, degraded_file)
for metric, (value, units) in analyzer_results.items():
hist = histograms.CreateHistogram(metric, units, [value])
user_story = generic_set.GenericSet([test_name])
hist.diagnostics[reserved_infos.STORIES.name] = user_story
# Output human readable results.
print 'RESULT %s: %s= %s %s' % (metric, test_name, value, units)
if args.remove:
os.remove(reference_file)
os.remove(degraded_file)
finally:
test_process.terminate()
if perf_results_file:
perf_results_file = _GetFile(perf_results_file, out_dir, move=True,
android=args.android, adb_prefix=adb_prefix)
_MergeInPerfResultsFromCcTests(histograms, perf_results_file)
if args.remove:
os.remove(perf_results_file)
if args.isolated_script_test_perf_output:
with open(args.isolated_script_test_perf_output, 'wb') as f:
f.write(histograms.AsProto().SerializeToString())
return test_process.wait()
if __name__ == '__main__':
sys.exit(main())
| endlessm/chromium-browser | third_party/webrtc/audio/test/low_bandwidth_audio_test.py | Python | bsd-3-clause | 11,840 |
import blue_yellow_app.infrastructure.static_cache as static_cache
import pyramid.renderers
import pyramid.httpexceptions as exc
from blue_yellow_app.infrastructure.supressor import suppress
import blue_yellow_app.infrastructure.cookie_auth as cookie_auth
from blue_yellow_app.services.account_service import AccountService
class BaseController:
# This is better than @suppress,
# see https://github.com/mikeckennedy/python-for-entrepreneurs-course-demos/issues/24
__autoexpose__ = False
def __init__(self, request):
self.request = request
self.build_cache_id = static_cache.build_cache_id
layout_render = pyramid.renderers.get_renderer('blue_yellow_app:templates/shared/_layout.pt')
impl = layout_render.implementation()
self.layout = impl.macros['layout']
@property
def is_logged_in(self):
return False
# noinspection PyMethodMayBeStatic
@suppress()
def redirect(self, to_url, permanent=False):
if permanent:
raise exc.HTTPMovedPermanently(to_url)
raise exc.HTTPFound(to_url)
@property
def data_dict(self):
data = dict()
data.update(self.request.GET)
data.update(self.request.POST)
data.update(self.request.matchdict)
return data
@property
def logged_in_user_id(self):
return cookie_auth.get_user_id_via_auth_cookie(self.request)
@property
def logged_in_user(self):
uid = self.logged_in_user_id
if not uid:
return None
AccountService.find_account_by_id(uid)
| mikeckennedy/python-for-entrepreneurs-course-demos | 15-deployment/blue_yellow_app_deployment/blue_yellow_app/controllers/base_controller.py | Python | mit | 1,592 |
#!/usr/bin/env python3
###############################################################
# Copyright 2019 Lawrence Livermore National Security, LLC
# (c.f. AUTHORS, NOTICE.LLNS, COPYING)
#
# This file is part of the Flux resource manager framework.
# For details, see https://github.com/flux-framework.
#
# SPDX-License-Identifier: LGPL-3.0
###############################################################
import gc
import errno
import unittest
import flux
import flux.constants
from flux.core.inner import ffi
from flux.future import Future
from subflux import rerun_under_flux
def __flux_size():
return 2
class TestHandle(unittest.TestCase):
@classmethod
def setUpClass(self):
"""Create a handle, connect to flux"""
self.f = flux.Flux()
self.ping_payload = {"seq": 1, "pad": "stuff"}
def test_01_rpc_get(self):
future = self.f.rpc("broker.ping", self.ping_payload)
resp_payload = future.get()
self.assertDictContainsSubset(self.ping_payload, resp_payload)
def test_02_get_flux(self):
future = self.f.rpc("broker.ping", self.ping_payload)
future.get_flux()
# force a full garbage collection pass to test that the handle is not destructed
gc.collect(2)
resp_payload = future.get()
self.assertDictContainsSubset(self.ping_payload, resp_payload)
def test_02_future_wait_for(self):
future = self.f.rpc("broker.ping", self.ping_payload)
try:
future.wait_for(5)
resp_payload = future.get()
except EnvironmentError as e:
if e.errno == errno.ETIMEDOUT:
self.fail(msg="future fulfillment timed out")
else:
raise
self.assertDictContainsSubset(self.ping_payload, resp_payload)
def test_03_future_then(self):
"""Register a 'then' cb and run the reactor to ensure it runs"""
cb_ran = [False]
def then_cb(future, arg):
flux_handle = future.get_flux()
reactor = future.get_reactor()
try:
resp_payload = future.get()
cb_ran[0] = True
self.assertDictContainsSubset(arg, resp_payload)
finally:
# ensure that reactor is always stopped, avoiding a hung test
flux_handle.reactor_stop(reactor)
self.f.rpc(b"broker.ping", self.ping_payload).then(
then_cb, arg=self.ping_payload
)
# force a full garbage collection pass to test that our anonymous RPC doesn't disappear
gc.collect(2)
ret = self.f.reactor_run()
self.assertGreaterEqual(ret, 0, msg="Reactor exited with {}".format(ret))
self.assertTrue(cb_ran[0], msg="Callback did not run successfully")
def test_03_future_then_exception(self):
def then_cb(future):
raise RuntimeError("this is a test")
self.f.rpc("broker.ping", self.ping_payload).then(then_cb)
with self.assertRaises(RuntimeError) as cm:
rc = self.f.reactor_run()
def test_03_future_then_varargs(self):
cb_ran = [False]
def then_cb(future, one, two, three):
cb_ran[0] = True
try:
self.assertEqual(one, "one")
self.assertEqual(two, "two")
self.assertEqual(three, "three")
finally:
future.get_flux().reactor_stop()
self.f.rpc("broker.ping").then(then_cb, "one", "two", "three")
gc.collect(2)
ret = self.f.reactor_run()
self.assertGreaterEqual(ret, 0, msg="Reactor exited with < 0")
self.assertTrue(cb_ran[0], msg="Callback did not run successfully")
def test_03_future_then_noargs(self):
cb_ran = [False]
def then_cb(future):
cb_ran[0] = True
future.get_flux().reactor_stop()
self.f.rpc("broker.ping").then(then_cb)
gc.collect(2)
ret = self.f.reactor_run()
self.assertGreaterEqual(ret, 0, msg="Reactor exited with < 0")
self.assertTrue(cb_ran[0], msg="Callback did not run successfully")
def test_03_future_then_default_args(self):
cb_ran = [False]
def then_cb(future, args=None):
cb_ran[0] = True
try:
self.assertIsNone(args)
finally:
future.get_flux().reactor_stop()
self.f.rpc("broker.ping").then(then_cb)
gc.collect(2)
ret = self.f.reactor_run()
self.assertGreaterEqual(ret, 0, msg="Reactor exited with < 0")
self.assertTrue(cb_ran[0], msg="Callback did not run successfully")
def test_03_future_then_kwargs(self):
cb_ran = [False]
def then_cb(future, val1=None, val2=None, val3="default"):
cb_ran[0] = True
try:
self.assertTrue(val1)
self.assertTrue(val2)
# val3 gets default value
self.assertEqual(val3, "default")
finally:
future.get_flux().reactor_stop()
self.f.rpc("broker.ping").then(then_cb, val2=True, val1=True)
gc.collect(2)
ret = self.f.reactor_run()
self.assertGreaterEqual(ret, 0, msg="Reactor exited with < 0")
self.assertTrue(cb_ran[0], msg="Callback did not run successfully")
def test_04_double_future_then(self):
"""Register two 'then' cbs and ensure it throws an exception"""
with self.assertRaises(EnvironmentError) as cm:
rpc = self.f.rpc(b"broker.ping")
rpc.then(lambda x: None)
rpc.then(lambda x: None)
self.assertEqual(cm.exception.errno, errno.EEXIST)
def test_05_future_error_string(self):
with self.assertRaises(EnvironmentError) as cm:
payload = {"J": "", "urgency": -1000, "flags": 0}
future = self.f.rpc("job-ingest.submit", payload=payload)
future.get()
self.assertEqual(cm.exception.errno, errno.EINVAL)
# Ensure that the result of flux_future_error_string propagated up
self.assertEqual(cm.exception.strerror, future.error_string())
self.assertRegexpMatches(cm.exception.strerror, "urgency range is .*")
def test_06_blocking_methods(self):
future = Future(self.f.future_create(ffi.NULL, ffi.NULL))
self.assertFalse(future.is_ready())
with self.assertRaises(EnvironmentError) as cm:
future.wait_for(timeout=0)
self.assertEqual(cm.exception.errno, errno.ETIMEDOUT)
future.pimpl.fulfill(ffi.NULL, ffi.NULL)
self.assertTrue(future.is_ready())
try:
future.wait_for(0)
except EnvironmentError as e:
self.fail("future.wait_for raised an unexpected exception: {}".format(e))
def test_07_streaming_rpcs(self):
def continuation_cb(future, arg):
arg["count"] += 1
if arg["count"] >= arg["target"]:
self.f.reactor_stop()
future.reset()
def service_cb(fh, t, msg, arg):
for x in range(msg.payload["count"]):
fh.respond(msg, {"seq": x})
self.f.service_register("rpctest").get()
watcher = self.f.msg_watcher_create(
service_cb, flux.constants.FLUX_MSGTYPE_REQUEST, "rpctest.multi"
)
self.assertIsNotNone(watcher)
watcher.start()
arg = {"count": 0, "target": 3}
self.f.rpc(
"rpctest.multi",
{"count": arg["target"]},
flags=flux.constants.FLUX_RPC_STREAMING,
).then(continuation_cb, arg=arg)
ret = self.f.reactor_run()
self.assertEqual(arg["count"], arg["target"])
watcher.stop()
watcher.destroy()
fut = self.f.service_unregister("rpctest")
self.assertEqual(self.f.future_get(fut, ffi.NULL), 0)
def test_08_future_from_future(self):
orig_fut = Future(self.f.future_create(ffi.NULL, ffi.NULL))
new_fut = Future(orig_fut)
self.assertFalse(new_fut.is_ready())
orig_fut.pimpl.fulfill(ffi.NULL, ffi.NULL)
self.assertTrue(new_fut.is_ready())
orig_fut = self.f.rpc("broker.ping", payload=self.ping_payload)
new_fut = Future(orig_fut)
del orig_fut
resp_payload = new_fut.get()
# Future's `get` returns `None`, so just test that it is fulfilled
self.assertTrue(new_fut.is_ready())
orig_fut = self.f.rpc("foo.bar")
new_fut = Future(orig_fut)
del orig_fut
with self.assertRaises(EnvironmentError):
resp_payload = new_fut.get()
if __name__ == "__main__":
if rerun_under_flux(__flux_size()):
from pycotap import TAPTestRunner
unittest.main(testRunner=TAPTestRunner())
| chu11/flux-core | t/python/t0012-futures.py | Python | lgpl-3.0 | 8,826 |
__author__ = 'Amy'
| ptphp/PtServer | library/AppVersion.py | Python | bsd-3-clause | 19 |
from __future__ import print_function
import pprint
import traceback
import sys
print("Starting plugin CircularZone")
try:
from .CircularZone import *
CircularZone().register()
except Exception as e:
traceback.print_exc(file=sys.stdout)
pprint.pprint(e)
| jsreynaud/kicad-action-scripts | CircularZone/__init__.py | Python | gpl-3.0 | 271 |
#!/home/neale/.virtualenvs/aws_ssh_config/bin/python2
'''
@author Bommarito Consulting, LLC
@date 2012-12-23
Generate .ssh/config lines from EC2 instance information.
'''
# Imports
import boto.ec2
import os
import ConfigParser
from os.path import expanduser
import re
# Default user
defaultUser = 'ec2-user'
defaultRegion = 'eu-west-1'
# Default key path
userHome = expanduser("~")
defaultKeyPath = os.path.join(userHome, '.ssh')
def generate_profile_config(region_name, profile_name):
ec2 = boto.ec2.connect_to_region(region_name=region_name,
profile_name=profile_name)
# Get list of reservations.
reservationList = ec2.get_all_instances()
# Initialize instance data tuple
instanceData = []
try:
# Iterate over reservations
for reservation in reservationList:
# Iterate over instances
for instance in reservation.instances:
user = instance.tags.get('User', defaultUser)
name = instance.tags.get('Name', instance.id)
if instance.ip_address:
instanceData.append((name.replace(' ', '_'),
instance.ip_address,
instance.key_name, user))
if 'MC Containers' in instance.tags:
containers = instance.tags['MC Containers'].split()
for container in containers:
instanceData.append((container.replace(' ', '_'),
instance.ip_address,
instance.key_name, user))
# Generate .ssh/config output
config_file_name = defaultKeyPath + '/' + profile_name + '_config'
with open(config_file_name, 'w') as f:
print("Generating " + config_file_name)
f.write("#============ GENERATED DATA START ==================\n")
f.write("UserKnownHostsFile=/dev/null\n")
f.write("StrictHostKeyChecking=no\n\n")
for data in instanceData:
f.write("Host {0}\n".format(data[0]))
f.write(" HostName {host_name}\n".format(host_name=data[1]))
f.write(" User {user}\n".format(user=data[3]))
if (data[2] != None):
f.write(" IdentityFile {identity_file}\n".format(
identity_file=os.path. join(defaultKeyPath,
"{key_name}".format(
key_name=data[2]))))
f.write(" ControlPath ~/.ssh/ec2-{0}:%p.%r\n".format(
data[0]))
f.write("\n")
f.write("#============ GENERATED DATA END ==================\n")
f.write("# vim: ft=sshconfig\n")
except Exception as inst:
print(dir(inst))
print("Error..." + inst.message)
def main():
'''
Main method.
'''
config = ConfigParser.ConfigParser()
config.readfp(open(userHome + '/.aws/config'))
for section in config.sections():
profile_name = re.sub('^profile ', '', section)
region_name = config.get(section, 'region', defaultRegion)
generate_profile_config(region_name, profile_name)
if __name__ == "__main__":
main()
| sw1nn/dotfiles | bin/generate_aws_ssh_config.py | Python | epl-1.0 | 3,419 |
from os import mkdir, stat
from os.path import join, exists
import tempfile
import shutil
import sys
import hashlib
import bz2
from zipfile import ZipFile
from nose.tools import *
from pixiepatch import *
from pixiepatch.bz2compressor import BZ2Compressor
from pixiepatch.ziphandler import ZIPHandler
class Base(object):
def setUp(self):
self.dir = tempfile.mkdtemp()
self.sources = join(self.dir, 'source-1'), join(self.dir, 'source-2')
self.dists = join(self.dir, 'dist-1'), join(self.dir, 'dist-2')
for name in self.sources + self.dists:
mkdir(name)
def tearDown(self):
shutil.rmtree(self.dir)
class TestPlainEmpty(Base):
def setUp(self):
Base.setUp(self)
self.pp = PixiePatch()
self.pp.make_distribution('1', self.sources[0], self.dists[0])
def test_version(self):
version_file = join(self.dists[0], 'version')
assert exists(version_file)
with open(version_file, 'r') as f:
assert f.read() == '1\n'
def test_manifest(self):
manifest_file = join(self.dists[0], 'manifest')
assert exists(manifest_file)
manifest = self.pp.read_manifest(manifest_file)
assert manifest['version'] == '1'
assert len(manifest['files']) == 0
class TestPlainSingle(Base):
def setUp(self):
Base.setUp(self)
self.pp = PixiePatch()
with open(join(self.sources[0], 'a'), 'w') as f:
f.write('test\n' * 100)
self.pp.make_distribution('1', self.sources[0], self.dists[0])
def test_version(self):
version_file = join(self.dists[0], 'version')
assert exists(version_file)
with open(version_file, 'r') as f:
assert f.read() == '1\n'
def test_manifest(self):
manifest_file = join(self.dists[0], 'manifest')
assert exists(manifest_file)
manifest = self.pp.read_manifest(manifest_file)
assert manifest['version'] == '1'
assert len(manifest['files']) == 1
a = manifest['files']['a']
assert a['hash'] == hashlib.sha256('test\n' * 100).hexdigest()
assert a['dlsize'] == 500
assert a['delta'] is None
with open(join(self.dists[0], 'a'), 'r') as f:
assert f.read() == 'test\n' * 100
class TestPlainMulti(Base):
def setUp(self):
Base.setUp(self)
self.pp = PixiePatch()
self.pp.register_ignore_pattern('^c$')
with open(join(self.sources[0], 'a'), 'w') as f:
f.write('test\n' * 100)
with open(join(self.sources[0], 'b'), 'w') as f:
f.write('v1\n' * 100)
with open(join(self.sources[1], 'a'), 'w') as f:
f.write('test\n' * 100)
with open(join(self.sources[1], 'b'), 'w') as f:
f.write('v2\n' * 100)
with open(join(self.sources[1], 'c'), 'w') as f:
f.write('ignored\n')
self.pp.make_distribution('1', self.sources[0], self.dists[0])
self.pp.make_distribution('2', self.sources[1], self.dists[1], self.dists[0])
def test_version(self):
version_file = join(self.dists[1], 'version')
assert exists(version_file)
with open(version_file, 'r') as f:
assert f.read() == '2\n'
def test_manifest(self):
manifest_file = join(self.dists[1], 'manifest')
assert exists(manifest_file)
manifest = self.pp.read_manifest(manifest_file)
assert manifest['version'] == '2'
assert len(manifest['files']) == 2
a = manifest['files']['a']
assert a['hash'] == hashlib.sha256('test\n' * 100).hexdigest()
assert a['dlsize'] == 500
assert a['delta'] is None
b = manifest['files']['b']
assert b['hash'] == hashlib.sha256('v2\n' * 100).hexdigest()
assert b['dlsize'] == 300
assert b['delta'] is None
class SimpleSigner(Signer):
def __init__(self, sig):
self.sig = sig
def sign(self, contents):
return contents + self.sig
def verify(self, contents):
if not contents.endswith(self.sig):
raise VerificationError()
return contents[:-len(self.sig)]
class TestSigner(Base):
def setUp(self):
Base.setUp(self)
self.pp = PixiePatch(signer=SimpleSigner('valid'))
self.pp.make_distribution('1', self.sources[0], self.dists[0])
def test_manifest(self):
manifest_file = join(self.dists[0], 'manifest')
assert exists(manifest_file)
with open(manifest_file, 'r') as f:
assert f.read().endswith('valid')
manifest = self.pp.read_manifest(manifest_file)
assert manifest['version'] == '1'
assert len(manifest['files']) == 0
@raises(VerificationError)
def test_verification(self):
try:
self.pp.signer.sig = 'invalid'
manifest_file = join(self.dists[0], 'manifest')
self.pp.read_manifest(manifest_file)
finally:
self.pp.signer.sig = 'valid'
class TestCompressor(Base):
def setUp(self):
Base.setUp(self)
with open(join(self.sources[0], 'a'), 'w') as f:
f.write('test\n' * 100)
self.pp = PixiePatch(compressor=BZ2Compressor())
self.pp.make_distribution('1', self.sources[0], self.dists[0])
def test_compressed(self):
file = join(self.dists[0], 'a.bz2')
assert exists(file)
file_size = stat(file).st_size
assert file_size < 500
manifest_file = join(self.dists[0], 'manifest.bz2')
manifest = self.pp.read_manifest(manifest_file)
assert manifest['files']['a']['dlsize'] == file_size
class TestZipHandler(Base):
def setUp(self):
Base.setUp(self)
with ZipFile(join(self.sources[0], 'a.zip'), 'w') as f:
f.writestr('a', 'test\n' * 100)
f.writestr('b', 'b\n' * 100)
self.pp = PixiePatch()
self.pp.register_archive_handler('.zip', ZIPHandler())
self.pp.make_distribution('1', self.sources[0], self.dists[0])
def test_archives(self):
file = join(self.dists[0], 'a.zip', 'a')
assert exists(file)
with open(file, 'r') as f:
assert f.read() == 'test\n' * 100
manifest_file = join(self.dists[0], 'manifest')
manifest = self.pp.read_manifest(manifest_file)
assert len(manifest['files']) == 2
| dgym/pixiepatch | tests/test_distribution.py | Python | mit | 6,437 |
import os
import re
import sys
from coalib.misc import Constants
from coalib.output.ConfWriter import ConfWriter
from coalib.output.printers.LOG_LEVEL import LOG_LEVEL
from coalib.parsing.CliParsing import parse_cli, check_conflicts
from coalib.parsing.ConfParser import ConfParser
from coalib.settings.Section import Section
from coalib.settings.SectionFilling import fill_settings
from coalib.settings.Setting import Setting
def merge_section_dicts(lower, higher):
"""
Merges the section dictionaries. The values of higher will take
precedence over the ones of lower. Lower will hold the modified dict in
the end.
:param lower: A section.
:param higher: A section which values will take precedence over the ones
from the other.
:return: The merged dict.
"""
for name in higher:
if name in lower:
lower[name].update(higher[name], ignore_defaults=True)
else:
# no deep copy needed
lower[name] = higher[name]
return lower
def load_config_file(filename, log_printer, silent=False):
"""
Loads sections from a config file. Prints an appropriate warning if
it doesn't exist and returns a section dict containing an empty
default section in that case.
It assumes that the cli_sections are available.
:param filename: The file to load settings from.
:param log_printer: The log printer to log the warning/error to (in case).
:param silent: Whether or not to warn the user/exit if the file
doesn't exist.
:raises SystemExit: Exits when given filename is invalid and is not the
default coafile. Only raised when ``silent`` is
``False``.
"""
filename = os.path.abspath(filename)
try:
return ConfParser().parse(filename)
except FileNotFoundError:
if not silent:
if os.path.basename(filename) == Constants.default_coafile:
log_printer.warn("The default coafile " +
repr(Constants.default_coafile) + " was not "
"found. Ignoring it.")
else:
log_printer.err("The requested coafile " + repr(filename) +
" does not exist.")
sys.exit(2)
return {"default": Section("default")}
def save_sections(sections):
"""
Saves the given sections if they are to be saved.
:param sections: A section dict.
"""
default_section = sections["default"]
try:
if bool(default_section.get("save", "false")):
conf_writer = ConfWriter(
str(default_section.get("config", Constants.default_coafile)))
else:
return
except ValueError:
conf_writer = ConfWriter(str(default_section.get("save", ".coafile")))
conf_writer.write_sections(sections)
conf_writer.close()
def warn_nonexistent_targets(targets, sections, log_printer):
"""
Prints out a warning on the given log printer for all targets that are
not existent within the given sections.
:param targets: The targets to check.
:param sections: The sections to search. (Dict.)
:param log_printer: The log printer to warn to.
"""
for target in targets:
if target not in sections:
log_printer.warn(
"The requested section '{section}' is not existent. "
"Thus it cannot be executed.".format(section=target))
def load_configuration(arg_list, log_printer):
"""
Parses the CLI args and loads the config file accordingly, taking
default_coafile and the users .coarc into account.
:param arg_list: The list of command line arguments.
:param log_printer: The LogPrinter object for logging.
:return: A tuple holding (log_printer: LogPrinter, sections:
dict(str, Section), targets: list(str)). (Types
indicated after colon.)
"""
cli_sections = parse_cli(arg_list=arg_list)
check_conflicts(cli_sections)
if (
bool(cli_sections["default"].get("find_config", "False")) and
str(cli_sections["default"].get("config")) == ""):
cli_sections["default"].add_or_create_setting(
Setting("config", re.escape(find_user_config(os.getcwd()))))
targets = []
# We don't want to store targets argument back to file, thus remove it
for item in list(cli_sections["default"].contents.pop("targets", "")):
targets.append(item.lower())
if bool(cli_sections["default"].get("no_config", "False")):
sections = cli_sections
else:
default_sections = load_config_file(Constants.system_coafile,
log_printer)
user_sections = load_config_file(
Constants.user_coafile,
log_printer,
silent=True)
default_config = str(
default_sections["default"].get("config", ".coafile"))
user_config = str(user_sections["default"].get(
"config", default_config))
config = os.path.abspath(
str(cli_sections["default"].get("config", user_config)))
try:
save = bool(cli_sections["default"].get("save", "False"))
except ValueError:
# A file is deposited for the save parameter, means we want to save
# but to a specific file.
save = True
coafile_sections = load_config_file(config, log_printer, silent=save)
sections = merge_section_dicts(default_sections, user_sections)
sections = merge_section_dicts(sections, coafile_sections)
sections = merge_section_dicts(sections, cli_sections)
for section in sections:
if section != "default":
sections[section].defaults = sections["default"]
str_log_level = str(sections["default"].get("log_level", "")).upper()
log_printer.log_level = LOG_LEVEL.str_dict.get(str_log_level,
LOG_LEVEL.INFO)
return sections, targets
def find_user_config(file_path, max_trials=10):
"""
Uses the filepath to find the most suitable user config file for the file
by going down one directory at a time and finding config files there.
:param file_path: The path of the file whose user config needs to be found
:param max_trials: The maximum number of directories to go down to.
:return: The config file's path
"""
file_path = os.path.normpath(os.path.abspath(os.path.expanduser(
file_path)))
old_dir = None
base_dir = (file_path if os.path.isdir(file_path)
else os.path.dirname(file_path))
home_dir = os.path.expanduser("~")
while base_dir != old_dir and old_dir != home_dir and max_trials != 0:
config_file = os.path.join(base_dir, ".coafile")
if os.path.isfile(config_file):
return config_file
old_dir = base_dir
base_dir = os.path.dirname(old_dir)
max_trials = max_trials - 1
return ""
def get_config_directory(section):
if section is None:
return os.getcwd()
try:
path = str(section["config"])
return path if os.path.isdir(path) else os.path.dirname(path)
except IndexError:
if os.path.isfile(os.path.join(os.getcwd(), '.coafile')):
return os.getcwd()
return None
def gather_configuration(acquire_settings,
log_printer,
autoapply=None,
arg_list=None):
"""
Loads all configuration files, retrieves bears and all needed
settings, saves back if needed and warns about non-existent targets.
This function:
- Reads and merges all settings in sections from
- Default config
- User config
- Configuration file
- CLI
- Collects all the bears
- Fills up all needed settings
- Writes back the new sections to the configuration file if needed
- Gives all information back to caller
:param acquire_settings: The method to use for requesting settings. It will
get a parameter which is a dictionary with the
settings name as key and a list containing a
description in [0] and the names of the bears
who need this setting in all following indexes.
:param log_printer: The log printer to use for logging. The log level
will be adjusted to the one given by the section.
:param autoapply: Set whether to autoapply patches. This is
overridable via any configuration file/CLI.
:param arg_list: CLI args to use
:return: A tuple with the following contents:
- A dictionary with the sections
- Dictionary of list of local bears for each
section
- Dictionary of list of global bears for each
section
- The targets list
"""
# Note: arg_list can also be []. Hence we cannot use
# `arg_list = arg_list or default_list`
arg_list = sys.argv[1:] if arg_list is None else arg_list
sections, targets = load_configuration(arg_list, log_printer)
local_bears, global_bears = fill_settings(sections,
acquire_settings,
log_printer)
save_sections(sections)
warn_nonexistent_targets(targets, sections, log_printer)
if autoapply is not None:
if not autoapply and 'autoapply' not in sections['default']:
sections['default']['autoapply'] = "False"
return (sections,
local_bears,
global_bears,
targets)
| MattAllmendinger/coala | coalib/settings/ConfigurationGathering.py | Python | agpl-3.0 | 10,084 |
#!/usr/bin/env python
import argparse
import sys
import time
class MemoryInfo2Comments:
def __init__(self, rom_info_file):
self.mem_info = self._get_rom_info(rom_info_file)
def eval_addr(self, addr):
addr = addr.strip("$")
return int(addr, 16)
def _get_rom_info(self, rom_info_file):
sys.stderr.write(
f"Read ROM Info file: {rom_info_file.name!r}\n"
)
rom_info = []
next_update = time.time() + 0.5
for line_no, line in enumerate(rom_info_file):
if time.time() > next_update:
sys.stderr.write(
f"\rRead {line_no:d} lines..."
)
sys.stderr.flush()
next_update = time.time() + 0.5
try:
addr_raw, comment = line.split(";", 1)
except ValueError:
continue
try:
start_addr_raw, end_addr_raw = addr_raw.split("-")
except ValueError:
start_addr_raw = addr_raw
end_addr_raw = None
start_addr = self.eval_addr(start_addr_raw)
if end_addr_raw:
end_addr = self.eval_addr(end_addr_raw)
else:
end_addr = start_addr
rom_info.append(
(start_addr, end_addr, comment.strip())
)
sys.stderr.write(
f"ROM Info file: {rom_info_file.name!r} readed.\n"
)
return rom_info
def create_comments(self, outfile):
for start_addr, end_addr, comment in self.mem_info:
comment = comment.replace('"', '\\"')
comment = comment.replace('$', '\\$')
outfile.write(
f'\tcomment=0x{start_addr:x},"{comment}" \\\n'
)
# outfile.write(
# '\tlabel="%s",0x%x \\\n' % (comment, start_addr)
# )
def main(args):
rom_info = MemoryInfo2Comments(args.infile)
rom_info.create_comments(args.outfile)
def get_cli_args():
parser = argparse.ArgumentParser(
description="create comment statements from rom info for 6809dasm.pl"
)
parser.add_argument('infile', nargs='?',
type=argparse.FileType('r'), default=sys.stdin,
help="ROM Addresses info file or stdin"
)
parser.add_argument('outfile', nargs='?',
type=argparse.FileType('w'), default=sys.stdout,
help="output file or stdout"
)
args = parser.parse_args()
return args
if __name__ == '__main__':
# sys.argv += ["../ROM Addresses/Dragon32.txt"]
args = get_cli_args()
main(args)
| jedie/DragonPy | dragonpy/Dragon64/6809dasm_comments.py | Python | gpl-3.0 | 2,755 |
import re, sys, time, os
import functools as fu
import sublime, sublime_plugin
from copy import copy
from .lib.misc import *
from .lib import kill_ring
from .lib import isearch
import Default.paragraph as paragraph
from . import sbp_layout as ll
# repeatable commands
repeatable_cmds = set(['move', 'left_delete', 'right_delete', 'undo', 'redo'])
# built-in commands we need to do ensure_visible after being run
# REMIND: I think we can delete this.
built_in_ensure_visible_cmds = set(['move', 'move_to'])
class ViewWatcher(sublime_plugin.EventListener):
def __init__(self, *args, **kwargs):
super(ViewWatcher, self).__init__(*args, **kwargs)
self.pending_dedups = 0
def on_close(self, view):
ViewState.on_view_closed(view)
def on_activated(self, view):
update_pinned_status(view)
def on_deactivated(self, view):
self.disable_empty_active_mark(view)
def on_activated_async(self, view):
info = isearch.info_for(view)
if info and not view.settings().get("is_widget"):
# stop the search if we activated a new view in this window
info.done()
def on_query_context(self, view, key, operator, operand, match_all):
def test(a):
if operator == sublime.OP_EQUAL:
return a == operand
if operator == sublime.OP_NOT_EQUAL:
return a != operand
return False
if key == "i_search_active":
return test(isearch.info_for(view) is not None)
if key == "sbp_has_active_mark":
return test(CmdUtil(view).state.active_mark)
if key == "sbp_has_visible_selection":
return test(view.sel()[0].size() > 1)
if key == "sbp_use_alt_bindings":
return test(settings_helper.get("sbp_use_alt_bindings"))
if key == "sbp_use_super_bindings":
return test(settings_helper.get("sbp_use_super_bindings"))
if key == "sbp_alt+digit_inserts":
return test(settings_helper.get("sbp_alt+digit_inserts") or not settings_helper.get("sbp_use_alt_bindings"))
if key == 'sbp_has_prefix_argument':
return test(CmdUtil(view).has_prefix_arg())
if key == "sbp_catchall":
return True
def on_post_save(self, view):
# Schedule a dedup, but do not do it NOW because it seems to cause a crash if, say, we're
# saving all the buffers right now. So we schedule it for the future.
self.pending_dedups += 1
def doit():
self.pending_dedups -= 1
if self.pending_dedups == 0:
dedup_views(sublime.active_window())
sublime.set_timeout(doit, 50)
#
# Turn off active mark mode in all the views related to this view.
#
# REMIND: Sadly this is called N times for the N views that are related to the specified view,
# and then we iterator through all N views. So this is N-squared sadness, for usually 2 or fewer
# views ...
#
def on_modified(self, view):
self.disable_empty_active_mark(view, False)
def disable_empty_active_mark(self, view, must_be_empty = True):
for related_view in ViewState.most_recent_related_view(view):
util = CmdUtil(related_view)
selection = related_view.sel()
regions = list(selection)
if not must_be_empty or util.all_empty_regions(regions):
util.toggle_active_mark_mode(False)
ViewState.get(related_view).this_cmd = None
#
# CmdWatcher watches all the commands and tries to correctly process the following situations:
#
# - canceling i-search if another window command is performed or a mouse drag starts
# - override commands and run them N times if there is a numeric argument supplied
# - if transient mark mode, automatically extend the mark when using certain commands like forward
# word or character
#
class CmdWatcher(sublime_plugin.EventListener):
def __init__(self, *args, **kwargs):
super(CmdWatcher, self).__init__(*args, **kwargs)
def on_post_window_command(self, window, cmd, args):
# update_pinned_status(window.active_view())
info = isearch.info_for(window)
if info is None:
return None
# Some window commands take us to new view. Here's where we abort the isearch if that happens.
if window.active_view() != info.view:
info.done()
#
# Override some commands to execute them N times if the numeric argument is supplied.
#
def on_text_command(self, view, cmd, args):
# escape the current isearch if one is in progress, unless the command is already related to
# isearch
if isearch.info_for(view) is not None:
if cmd not in ('sbp_inc_search', 'sbp_inc_search_escape', 'drag_select'):
return ('sbp_inc_search_escape', {'next_cmd': cmd, 'next_args': args})
return
vs = ViewState.get(view)
if args is None:
args = {}
# first keep track of this_cmd and last_cmd (if command starts with "sbp_" it's handled
# elsewhere)
if not cmd.startswith("sbp_"):
vs.this_cmd = cmd
#
# Process events that create a selection. The hard part is making it work with the emacs
# region.
#
if cmd == 'drag_select':
# NOTE: This is called only when you click, NOT when you drag. So if you triple click
# it's called three times.
# NOTE: remember the view that performed the drag_select because of the
# on_selection_modified bug of using the wrong view if the same view is displayed more
# than once
self.drag_select_view = view
# cancel isearch if necessary
info = isearch.info_for(view)
if info:
info.done()
# Set drag_count to 0 when first drag_select command occurs.
if 'by' not in args:
vs.drag_count = 0
else:
self.drag_select_view = None
if cmd in ('move', 'move_to') and vs.active_mark and not args.get('extend', False):
# this is necessary or else the built-in commands (C-f, C-b) will not move when there is
# an existing selection
args['extend'] = True
return (cmd, args)
# now check for numeric argument and rewrite some commands as necessary
if not vs.argument_supplied:
return None
if cmd in repeatable_cmds:
count = vs.get_count()
args.update({
'cmd': cmd,
'_times': abs(count),
})
if count < 0 and 'forward' in args:
args['forward'] = not args['forward']
return ("sbp_do_times", args)
elif cmd == 'scroll_lines':
args['amount'] *= vs.get_count()
return (cmd, args)
#
# Post command processing: deal with active mark and resetting the numeric argument.
#
def on_post_text_command(self, view, cmd, args):
vs = ViewState.get(view)
util = CmdUtil(view)
if vs.active_mark and vs.this_cmd != 'drag_select' and vs.last_cmd == 'drag_select':
# if we just finished a mouse drag, make sure active mark mode is off
if cmd != "context_menu":
util.toggle_active_mark_mode(False)
# reset numeric argument (if command starts with "sbp_" this is handled elsewhere)
if not cmd.startswith("sbp_"):
vs.argument_value = 0
vs.argument_supplied = False
vs.last_cmd = cmd
if vs.active_mark and cmd != 'drag_select':
util.set_cursors(util.get_regions())
#
# Process the selection if it was created from a drag_select (mouse dragging) command.
#
# REMIND: This iterates all related views because sublime notifies for the same view N times, if
# there are N separate views open on the same buffer.
#
def on_selection_modified(self, active_view):
for view in ViewState.most_recent_related_view(active_view):
vs = ViewState.get(view)
selection = view.sel()
if len(selection) == 1 and vs.this_cmd == 'drag_select':
cm = CmdUtil(view, vs)
# # REMIND: we cannot rely on drag_count unfortunately because if you have the same
# # buffer in multiple views, they each get notified.
# if vs.drag_count >= 2 and not vs.active_mark:
# # wait until selection is at least 1 character long before activating
# region = view.sel()[0]
# if region.size() >= 1:
# cm.set_mark([sublime.Region(region.a, region.b)], and_selection=False)
# vs.active_mark = True
# elif vs.drag_count == 0:
# cm.toggle_active_mark_mode(False)
# vs.drag_count += 1
# update the mark ring
sel = selection[0]
vs.mark_ring.set([sublime.Region(sel.a, sel.a)], True)
class WindowCmdWatcher(sublime_plugin.EventListener):
def __init__(self, *args, **kwargs):
super(WindowCmdWatcher, self).__init__(*args, **kwargs)
def on_window_command(self, window, cmd, args):
# REMIND - JP: Why is this code here? Can't this be done in the SbpPaneCmd class?
# Check the move state of the Panes and make sure we stop recursion
if cmd == "sbp_pane_cmd" and args and args['cmd'] == 'move' and 'next_pane' not in args:
lm = ll.LayoutManager(window.layout())
if args["direction"] == 'next':
pos = lm.next(window.active_group())
else:
pos = lm.next(window.active_group(), -1)
args["next_pane"] = pos
return cmd, args
class SbpChainCommand(SbpTextCommand):
"""A command that easily runs a sequence of other commands."""
def run_cmd(self, util, commands, ensure_point_visible=False):
for c in commands:
if 'window_command' in c:
util.run_window_command(c['window_command'], c['args'])
elif 'command' in c:
util.run_command(c['command'], c['args'])
if ensure_point_visible:
util.ensure_visible(sublime.Region(util.get_point()))
#
# Calls run command a specified number of times.
#
class SbpDoTimesCommand(SbpTextCommand):
def run_cmd(self, util, cmd, _times, **args):
view = self.view
window = view.window()
visible = view.visible_region()
def doit():
# for i in range(_times):
# window.run_command(cmd, args)
# REMIND: window.run_command is much slower and I cannot remember why I used
# window.run_command...
for i in range(_times):
util.run_command(cmd, args)
if cmd in ('redo', 'undo'):
sublime.set_timeout(doit, 10)
else:
doit()
cursor = util.get_last_cursor()
if not visible.contains(cursor.b):
util.ensure_visible(cursor, True)
class SbpShowScopeCommand(SbpTextCommand):
def run_cmd(self, util, direction=1):
point = util.get_point()
name = self.view.scope_name(point)
region = self.view.extract_scope(point)
status = "%d bytes: %s" % (region.size(), name)
print(status)
util.set_status(status)
#
# Implements moving by words, emacs style.
#
class SbpMoveWordCommand(SbpTextCommand):
is_ensure_visible_cmd = True
def find_by_class_fallback(self, view, point, forward, classes, seperators):
if forward:
delta = 1
end_position = self.view.size()
if point > end_position:
point = end_position
else:
delta = -1
end_position = 0
if point < end_position:
point = end_position
while point != end_position:
if view.classify(point) & classes != 0:
return point
point += delta
return point
def find_by_class_native(self, view, point, forward, classes, separators):
return view.find_by_class(point, forward, classes, separators)
def run_cmd(self, util, direction=1):
view = self.view
separators = settings_helper.get("sbp_word_separators", default_sbp_word_separators)
# determine the direction
count = util.get_count() * direction
forward = count > 0
count = abs(count)
def call_find_by_class(point, classes, separators):
'''
This is a small wrapper that maps to the right find_by_class call
depending on the version of ST installed
'''
return self.find_by_class_native(view, point, forward, classes, separators)
def move_word0(cursor, first=False):
point = cursor.b
if forward:
if not first or not util.is_word_char(point, True, separators):
point = call_find_by_class(point, sublime.CLASS_WORD_START, separators)
point = call_find_by_class(point, sublime.CLASS_WORD_END, separators)
else:
if not first or not util.is_word_char(point, False, separators):
point = call_find_by_class(point, sublime.CLASS_WORD_END, separators)
point = call_find_by_class(point, sublime.CLASS_WORD_START, separators)
return sublime.Region(point, point)
for c in range(count):
util.for_each_cursor(move_word0, first=(c == 0))
#
# Advance to the beginning (or end if going backward) word unless already positioned at a word
# character. This can be used as setup for commands like upper/lower/capitalize words. This ignores
# the argument count.
#
class SbpMoveBackToIndentation(SbpTextCommand):
def run_cmd(self, util, direction=1):
view = self.view
def to_indentation(cursor):
start = cursor.begin()
while util.is_one_of(start, " \t"):
start += 1
return start
util.run_command("move_to", {"to": "hardbol", "extend": False})
util.for_each_cursor(to_indentation)
#
# Perform the uppercase/lowercase/capitalize commands on all the current cursors. If use_region is
# true, the command will be applied to the regions, not to words. The regions are either existing
# visible selection, OR, the emacs region(s) which might not be visible. If there are no non-empty
# regions and use_region=True, this command is a no-op.
#
class SbpChangeCaseCommand(SbpTextCommand):
re_to_underscore = re.compile('((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))')
re_to_camel = re.compile(r'(?!^)_([a-zA-Z])')
# re_to_camel = re.compile('((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))')
def underscore(self, text):
s1 = self.re_to_underscore.sub(r'_\1', text).lower()
return s1
def camel(self, text):
s1 = self.re_to_camel.sub(lambda m: m.group(1).upper(), text)
return s1
def run_cmd(self, util, mode, use_region=False, direction=1):
view = self.view
count = util.get_count(True)
# If cursors are not empty (e.g., visible marks) then we use the selection and we're in
# region mode. If the cursors are empty but the emacs regions are not, we use them as long
# as mode="regions". Otherwise, we generate regions by applying a word motion command.
selection = view.sel()
regions = list(selection)
empty_cursors = util.all_empty_regions(regions)
if empty_cursors and use_region:
emacs_regions = util.get_regions()
if emacs_regions and not util.all_empty_regions(emacs_regions):
empty_cursors = False
selection.clear()
selection.add_all(emacs_regions)
if empty_cursors:
if use_region:
return
# This works first by finding the bounds of the operation by executing a forward-word
# command. Then it performs the case command. But only if there are no selections or
# regions to operate on.
# run the move-word command so we can create a region
direction = -1 if count < 0 else 1
util.run_command("sbp_move_word", {"direction": 1})
# now the selection is at the "other end" and so we create regions out of all the
# cursors
new_regions = []
for r, s in zip(regions, selection):
new_regions.append(r.cover(s))
selection.clear()
selection.add_all(new_regions)
# perform the operation
if mode in ('upper', 'lower'):
util.run_command(mode + "_case", {})
elif mode == "title":
for r in selection:
util.view.replace(util.edit, r, view.substr(r).title())
elif mode in ("underscore", "camel"):
fcn = self.underscore if mode == "underscore" else self.camel
delta = 0
for r, s in zip(regions, selection):
orig = view.substr(s)
replace = fcn(orig)
this_delta = len(orig) - len(replace)
util.view.replace(util.edit, s, replace)
# We need to adjust the size of regions by this_delta, and the position of each
# region by the accumulated delta for when we put the selection back at the end.
if s.b > s.a:
r.b -= this_delta
else:
r.a -= this_delta
r.b -= delta
r.a -= delta
delta += this_delta
else:
print("Unknown case setting:", mode)
return
if empty_cursors and count > 0:
# was a word-based execution
for r in new_regions:
r.a = r.b = r.end()
selection.clear()
selection.add_all(new_regions)
else:
# we used the selection or the emacs regions
selection.clear()
selection.add_all(regions)
#
# A poor implementation of moving by s-expressions. The problem is it tries to use the built-in
# sublime capabilities for matching brackets, and it can be tricky getting that to work.
#
# The real solution is to figure out how to require/request the bracket highlighter code to be
# loaded and just use it.
#
class SbpMoveSexprCommand(SbpTextCommand):
is_ensure_visible_cmd = True
should_reset_target_column = True
def run_cmd(self, util, direction=1):
view = self.view
separators = settings_helper.get("sbp_sexpr_separators", default_sbp_sexpr_separators)
# determine the direction
count = util.get_count() * direction
forward = count > 0
count = abs(count)
def advance(cursor, first):
point = cursor.b
if forward:
limit = view.size()
while point < limit:
if util.is_word_char(point, True, separators):
point = view.find_by_class(point, True, sublime.CLASS_WORD_END, separators)
break
else:
ch = view.substr(point)
if ch in "({[`'\"":
next_point = util.to_other_end(point, direction)
if next_point is not None:
point = next_point
break
point += 1
else:
while point > 0:
if util.is_word_char(point, False, separators):
point = view.find_by_class(point, False, sublime.CLASS_WORD_START, separators)
break
else:
ch = view.substr(point - 1)
if ch in ")}]`'\"":
next_point = util.to_other_end(point, direction)
if next_point is not None:
point = next_point
break
point -= 1
cursor.a = cursor.b = point
return cursor
for c in range(count):
util.for_each_cursor(advance, (c == 0))
# Move to paragraph depends on the functionality provided by the default
# plugin in ST. So for now we use this.
class SbpMoveToParagraphCommand(SbpTextCommand):
def run_cmd(self, util, direction=1):
view = self.view
count = util.get_count() * direction
forward = count > 0
count = abs(count)
def advance(cursor):
whitespace = '\t\x0b\x0c\r \n'
if not forward:
# Remove whitespace and new lines for moving forward and backward paragraphs
this_region_begin = max(0, cursor.begin() - 1)
while this_region_begin > 0 and view.substr(this_region_begin) in whitespace:
this_region_begin -= 1
point = paragraph.expand_to_paragraph(view, this_region_begin).begin()
else:
this_region_end = cursor.end()
limit = self.view.size() - 1
while this_region_end < limit and view.substr(this_region_end) in whitespace:
this_region_end += 1
point = paragraph.expand_to_paragraph(self.view, this_region_end).end()
return sublime.Region(point)
for c in range(count):
util.for_each_cursor(advance)
s = view.sel()
util.ensure_visible(s[-1] if forward else s[0])
#
# A class which implements all the hard work of performing a move and then delete/kill command. It
# keeps track of the cursors, then runs the command to move all the cursors, and then performs the
# kill. This is used by the generic SbpMoveThenDeleteCommand command, but also commands that require
# input from a panel and so are not synchronous.
#
class MoveThenDeleteHelper():
def __init__(self, util):
self.util = util
self.selection = util.view.sel()
# assume forward kill direction
self.forward = True
# remember the current cursor positions
self.orig_cursors = [s for s in self.selection]
# Remember if previous was a kill command now, because if we check in self.finish() it's too
# late and the answer is always yes (because of this command we're "helping").
self.last_was_kill_cmd = util.state.last_was_kill_cmd()
#
# Finish the operation. Sometimes we're called later with a new util object, because the whole
# thing was done asynchronously (see the zap code).
#
def finish(self, new_util=None):
util = new_util if new_util else self.util
view = util.view
selection = self.selection
orig_cursors = self.orig_cursors
# extend all cursors so we can delete the bytes
new_cursors = list(selection)
# but first check to see how many regions collapsed as a result of moving the cursors (e.g.,
# if they pile up at the end of the buffer)
collapsed_regions = len(orig_cursors) - len(new_cursors)
if collapsed_regions == 0:
# OK - so now check to see how many collapse after we combine the beginning and end
# points of each region. We do that by creating the selection object, which disallows
# overlapping regions by collapsing them.
selection.clear()
for old,new in zip(orig_cursors, new_cursors):
if old < new:
selection.add(sublime.Region(old.begin(), new.end()))
else:
selection.add(sublime.Region(new.begin(), old.end()))
collapsed_regions = len(orig_cursors) - len(selection)
# OK one final check to see if any regions will overlap each other after we perform the
# kill.
if collapsed_regions == 0:
cursors = list(selection)
for i, c in enumerate(cursors[1:]):
if cursors[i].contains(c.begin()):
collapsed_regions += 1
if collapsed_regions != 0:
# restore everything to previous state and display a popup error
selection.clear()
selection.add_all(orig_cursors)
sublime.error_message("Couldn't perform kill operation because %d regions would have collapsed into adjacent regions!" % collapsed_regions)
return
# copy the text into the kill ring
regions = [view.substr(r) for r in view.sel()]
kill_ring.add(regions, forward=self.forward, join=self.last_was_kill_cmd)
# erase the regions
for region in selection:
view.erase(util.edit, region)
#
# This command remembers all the current cursor positions, executes a command on all the cursors,
# and then deletes all the data between the two.
#
class SbpMoveThenDeleteCommand(SbpTextCommand):
is_ensure_visible_cmd = True
is_kill_cmd = True
def run_cmd(self, util, move_cmd, **kwargs):
# prepare
helper = MoveThenDeleteHelper(util)
# peek at the count and update the helper's forward direction
count = util.get_count(True)
if 'direction' in kwargs:
count *= kwargs['direction']
helper.forward = count > 0
util.view.run_command(move_cmd, kwargs)
helper.finish()
#
# Goto the the Nth line as specified by the emacs arg count, or prompt for a line number of one
# isn't specified.
#
class SbpGotoLineCommand(SbpTextCommand):
is_ensure_visible_cmd = True
def run_cmd(self, util):
if util.has_prefix_arg():
util.goto_line(util.get_count())
else:
util.run_window_command("show_overlay", {"overlay": "goto", "text": ":"})
class SbpUniversalArgumentCommand(SbpTextCommand):
def run_cmd(self, util, value):
state = util.state
if not state.argument_supplied:
state.argument_supplied = True
if value == 'by_four':
state.argument_value = 4
elif value == 'negative':
state.argument_negative = True
else:
state.argument_value = value
elif value == 'by_four':
state.argument_value *= 4
elif isinstance(value, int):
state.argument_value *= 10
state.argument_value += value
elif value == 'negative':
state.argument_value = -state.argument_value
class SbpShiftRegionCommand(SbpTextCommand):
"""Shifts the emacs region left or right."""
def run_cmd(self, util, direction):
view = self.view
state = util.state
regions = util.get_regions()
if not regions:
regions = util.get_cursors()
if regions:
util.save_cursors("shift")
util.toggle_active_mark_mode(False)
selection = self.view.sel()
selection.clear()
# figure out how far we're moving
if state.argument_supplied:
cols = direction * util.get_count()
else:
cols = direction * util.get_tab_size()
# now we know which way and how far we're shifting, create a cursor for each line we
# want to shift
amount = abs(cols)
count = 0
shifted = 0
for region in regions:
for line in util.for_each_line(region):
count += 1
if cols < 0 and (line.size() < amount or not util.is_blank(line.a, line.a + amount)):
continue
selection.add(sublime.Region(line.a, line.a))
shifted += 1
# shift the region
if cols > 0:
# shift right
self.view.run_command("insert", {"characters": " " * cols})
else:
for i in range(amount):
self.view.run_command("right_delete")
# restore the region
util.restore_cursors("shift")
util.set_status("Shifted %d of %d lines in the region" % (shifted, count))
# Enum definition
def enum(**enums):
return type('Enum', (), enums)
SCROLL_TYPES = enum(TOP=1, CENTER=0, BOTTOM=2)
class SbpCenterViewCommand(SbpTextCommand):
'''
Reposition the view so that the line containing the cursor is at the
center of the viewport, if possible. Like the corresponding Emacs
command, recenter-top-bottom, this command cycles through
scrolling positions. If the prefix args are used it centers given an offset
else the cycling command is used
This command is frequently bound to Ctrl-l.
'''
last_sel = None
last_scroll_type = None
last_visible_region = None
def rowdiff(self, start, end):
r1,c1 = self.view.rowcol(start)
r2,c2 = self.view.rowcol(end)
return r2 - r1
def run_cmd(self, util, center_only=False):
view = self.view
point = util.get_point()
if util.has_prefix_arg():
lines = util.get_count()
line_height = view.line_height()
ignore, point_offy = view.text_to_layout(point)
offx, ignore = view.viewport_position()
view.set_viewport_position((offx, point_offy - line_height * lines))
elif center_only:
self.view.show_at_center(util.get_point())
else:
self.cycle_center_view(view.sel()[0])
def cycle_center_view(self, start):
if start != SbpCenterViewCommand.last_sel:
SbpCenterViewCommand.last_visible_region = None
SbpCenterViewCommand.last_scroll_type = SCROLL_TYPES.CENTER
SbpCenterViewCommand.last_sel = start
self.view.show_at_center(SbpCenterViewCommand.last_sel)
return
else:
SbpCenterViewCommand.last_scroll_type = (SbpCenterViewCommand.last_scroll_type + 1) % 3
SbpCenterViewCommand.last_sel = start
if SbpCenterViewCommand.last_visible_region == None:
SbpCenterViewCommand.last_visible_region = self.view.visible_region()
# Now Scroll to position
if SbpCenterViewCommand.last_scroll_type == SCROLL_TYPES.CENTER:
self.view.show_at_center(SbpCenterViewCommand.last_sel)
elif SbpCenterViewCommand.last_scroll_type == SCROLL_TYPES.TOP:
row,col = self.view.rowcol(SbpCenterViewCommand.last_visible_region.end())
diff = self.rowdiff(SbpCenterViewCommand.last_visible_region.begin(), SbpCenterViewCommand.last_sel.begin())
self.view.show(self.view.text_point(row + diff-2, 0), False)
elif SbpCenterViewCommand.last_scroll_type == SCROLL_TYPES.BOTTOM:
row, col = self.view.rowcol(SbpCenterViewCommand.last_visible_region.begin())
diff = self.rowdiff(SbpCenterViewCommand.last_sel.begin(), SbpCenterViewCommand.last_visible_region.end())
self.view.show(self.view.text_point(row - diff+2, 0), False)
class SbpSetMarkCommand(SbpTextCommand):
def run_cmd(self, util):
state = util.state
if state.argument_supplied:
cursors = state.mark_ring.pop()
if cursors:
util.set_cursors(cursors)
state.this_cmd = 'sbp_pop_mark'
elif state.this_cmd == state.last_cmd:
# at least two set mark commands in a row: turn ON the highlight
util.toggle_active_mark_mode()
else:
# set the mark
util.set_mark()
if settings_helper.get("sbp_active_mark_mode", False):
util.set_active_mark_mode()
class SbpCancelMarkCommand(SbpTextCommand):
def run_cmd(self, util):
if util.state.active_mark:
util.toggle_active_mark_mode()
util.state.mark_ring.clear()
class SbpSwapPointAndMarkCommand(SbpTextCommand):
def run_cmd(self, util, toggle_active_mark_mode=False):
if util.state.argument_supplied or toggle_active_mark_mode:
util.toggle_active_mark_mode()
else:
util.swap_point_and_mark()
class SbpEnableActiveMarkCommand(SbpTextCommand):
def run_cmd(self, util, enabled):
util.toggle_active_mark_mode(enabled)
class SbpMoveToCommand(SbpTextCommand):
is_ensure_visible_cmd = True
def run_cmd(self, util, to, always_push_mark=False):
if to == 'bof':
util.push_mark_and_goto_position(0)
elif to == 'eof':
util.push_mark_and_goto_position(self.view.size())
elif to in ('eow', 'bow'):
visible = self.view.visible_region()
pos = visible.a if to == 'bow' else visible.b
if always_push_mark:
util.push_mark_and_goto_position(pos)
else:
util.set_cursors([sublime.Region(pos)])
class SbpSelectAllCommand(SbpTextCommand):
def run_cmd(self, util, activate_mark=True):
# set mark at current position
util.set_mark()
# set a mark at end of file
util.set_mark(regions=[sublime.Region(self.view.size())])
# goto the top of the file
util.set_point(0)
if activate_mark:
util.toggle_active_mark_mode(True)
else:
util.ensure_visible(sublime.Region(0))
class SbpOpenLineCommand(SbpTextCommand):
def run_cmd(self, util):
view = self.view
count = util.get_count()
if count > 0:
for point in view.sel():
view.insert(util.edit, point.b, "\n" * count)
while count > 0:
view.run_command("move", {"by": "characters", "forward": False})
count -= 1
class SbpKillRegionCommand(SbpTextCommand):
is_kill_cmd = True
def run_cmd(self, util, is_copy=False):
view = self.view
regions = util.get_regions()
if regions:
data = [view.substr(r) for r in regions]
kill_ring.add(data, True, False)
if not is_copy:
for r in reversed(regions):
view.erase(util.edit, r)
else:
bytes = sum(len(d) for d in data)
util.set_status("Copied %d bytes in %d regions" % (bytes, len(data)))
util.toggle_active_mark_mode(False)
class SbpPaneCmdCommand(SbpWindowCommand):
def run_cmd(self, util, cmd, **kwargs):
if cmd == 'split':
self.split(self.window, util, **kwargs)
elif cmd == 'grow':
self.grow(self.window, util, **kwargs)
elif cmd == 'destroy':
self.destroy(self.window, **kwargs)
elif cmd in ('move', 'switch_tab'):
self.move(self.window, **kwargs)
else:
print("Unknown command")
#
# Grow the current selected window group (pane). Amount is usually 1 or -1 for grow and shrink.
#
def grow(self, window, util, direction):
if window.num_groups() == 1:
return
# Prepare the layout
layout = window.layout()
lm = ll.LayoutManager(layout)
rows = lm.rows()
cols = lm.cols()
cells = layout['cells']
# calculate the width and height in pixels of all the views
width = height = dx = dy = 0
for g,cell in enumerate(cells):
view = window.active_view_in_group(g)
w,h = view.viewport_extent()
width += w
height += h
dx += cols[cell[2]] - cols[cell[0]]
dy += rows[cell[3]] - rows[cell[1]]
width /= dx
height /= dy
current = window.active_group()
view = util.view
# Handle vertical moves
count = util.get_count()
if direction in ('g', 's'):
unit = view.line_height() / height
else:
unit = view.em_width() / width
window.set_layout(lm.extend(current, direction, unit, count))
# make sure point doesn't disappear in any active view - a delay is needed for this to work
def ensure_visible():
for g in range(window.num_groups()):
view = window.active_view_in_group(g)
util = CmdUtil(view)
util.ensure_visible(util.get_last_cursor())
sublime.set_timeout(ensure_visible, 50)
#
# Split the current pane in half. Clone the current view into the new pane. Refuses to split if
# the resulting windows would be too small.
def split(self, window, util, stype):
layout = window.layout()
current = window.active_group()
group_count = window.num_groups()
view = window.active_view()
extent = view.viewport_extent()
if stype == "h" and extent[1] / 2 <= 4 * view.line_height():
return False
if stype == "v" and extent[0] / 2 <= 20 * view.em_width():
return False
# Perform the layout
lm = ll.LayoutManager(layout)
if not lm.split(current, stype):
return False
window.set_layout(lm.build())
# couldn't find an existing view so we have to clone the current one
window.run_command("clone_file")
# the cloned view becomes the new active view
new_view = window.active_view()
# move the new view into the new group (add the end of the list)
window.set_view_index(new_view, group_count, 0)
# make sure the original view is the focus in the original pane
window.focus_view(view)
# switch to new pane
window.focus_group(group_count + 1)
# after a short delay make sure the two views are looking at the same area
def setup_views():
selection = new_view.sel()
selection.clear()
selection.add_all([r for r in view.sel()])
new_view.set_viewport_position(view.viewport_position(), False)
point = util.get_point()
new_view.show(point)
view.show(point)
sublime.set_timeout(setup_views, 10)
return True
#
# Destroy the specified pane=self|others.
#
def destroy(self, window, pane):
if window.num_groups() == 1:
return
view = window.active_view()
layout = window.layout()
current = window.active_group()
lm = ll.LayoutManager(layout)
if pane == "self":
views = [window.active_view_in_group(i) for i in range(window.num_groups())]
del(views[current])
lm.killSelf(current)
else:
lm.killOther(current)
views = [window.active_view()]
window.set_layout(lm.build())
for i in range(window.num_groups()):
view = views[i]
window.focus_group(i)
window.focus_view(view)
window.focus_group(max(0, current - 1))
dedup_views(window)
def move(self, window, **kwargs):
if 'next_pane' in kwargs:
window.focus_group(kwargs["next_pane"])
return
direction = kwargs['direction']
if direction in ("prev", "next"):
direction = 1 if direction == "next" else -1
current = window.active_group()
current += direction
num_groups = window.num_groups()
if current < 0:
current = num_groups - 1
elif current >= num_groups:
current = 0
window.focus_group(current)
else:
view = window.active_view()
group,index = window.get_view_index(view)
views = window.views_in_group(group)
direction = 1 if direction == "right" else -1
index += direction
if index >= len(views):
index = 0
elif index < 0:
index = len(views) - 1
window.focus_view(views[index])
#
# Close the N least recently touched views, leaving at least one view remaining.
#
class SbpCloseStaleViewsCommand(SbpWindowCommand):
def run_cmd(self, util, n_windows=None):
window = sublime.active_window()
sorted = ViewState.sorted_views(window, window.active_group())
if n_windows is None or util.has_prefix_arg():
n_windows = util.get_count()
while n_windows > 0 and len(sorted) > 1:
view = sorted.pop()
if view.is_dirty() or view.settings().get("pinned"):
continue
window.focus_view(view)
window.run_command('close')
n_windows -= 1
# go back to the original view
window.focus_view(util.view)
#
# Toggle the pinned state of the current view.
#
class SbpToggleViewPinnedCommand(SbpTextCommand):
def run_cmd(self, util):
view = self.view
settings = view.settings()
pinned = settings.get("pinned", False)
settings.set("pinned", not pinned)
update_pinned_status(view)
#
# Closes the current view and selects the most recently used one in its place. This is almost like
# kill buffer in emacs but if another view is displaying this file, it will still exist there. In
# short, this is like closing a tab but rather than selecting an adjacent tab, it selects the most
# recently used "buffer".
#
class SbpCloseCurrentViewCommand(SbpWindowCommand):
def run_cmd(self, util, n_windows=10):
window = sublime.active_window()
sorted = ViewState.sorted_views(window, window.active_group())
if len(sorted) > 0:
view = sorted.pop(0)
window.focus_view(view)
window.run_command('close')
if len(sorted) > 0:
window.focus_view(sorted[0])
else:
window.run_command('close')
#
# Exists only to support kill-line with multiple cursors.
#
class SbpMoveForKillLineCommand(SbpTextCommand):
def run_cmd(self, util, **kwargs):
view = self.view
state = util.state
line_mode = state.argument_supplied
count = util.get_count()
def advance(cursor):
start = cursor.b
text,index,region = util.get_line_info(start)
if line_mode:
# go down N lines
for i in range(abs(count)):
view.run_command("move", {"by": "lines", "forward": count > 0})
end = util.get_point()
if count != 0 and region.contains(end):
# same line we started on - must be on the last line of the file
end = region.end() if count > 0 else region.begin()
else:
# beginning of the line we ended up on
end = view.line(util.get_point()).begin()
util.set_cursors(sublime.Region(end))
else:
end = region.end()
# check if line is blank from here to the end and if so, delete the \n as well
import re
if re.match(r'[ \t]*$', text[index:]) and end < util.view.size():
end += 1
return sublime.Region(end, end)
util.for_each_cursor(advance)
#
# Emacs Yank and Yank Pop commands.
#
class SbpYankCommand(SbpTextCommand):
def run_cmd(self, util, pop=0, index=None):
if pop and util.state.last_cmd != 'sbp_yank':
util.set_status("Previous command was not yank!")
return
view = self.view
# Get the cursors as selection, because if there is a selection we want to replace it with
# what we're yanking.
cursors = list(view.sel())
data = kill_ring.get_current(len(cursors), pop, index)
if not data:
return
if pop != 0:
# erase existing regions
regions = util.get_regions()
if not regions:
return
for r in reversed(regions):
view.erase(util.edit, r)
# fetch updated cursors
cursors = util.get_cursors()
for region, data in reversed(list(zip(cursors, data))):
view.replace(util.edit, region, data)
util.state.mark_ring.set(util.get_cursors(begin=True), True)
util.make_cursors_empty()
util.ensure_visible(util.get_last_cursor())
#
# Like the yank command except it displays a menu of all the kills and lets you choose which one to
# yank.
#
class SbpChooseAndYank(SbpTextCommand):
def run_cmd(self, util, all_cursors=False):
# items is an array of (index, text) pairs
items = kill_ring.get_popup_sample(util.view)
def on_done(idx):
if idx >= 0:
index = items[idx][0]
if all_cursors:
util.run_command("sbp_yank_all_cursors", {"index": index})
else:
util.run_command("sbp_yank", {"index": index})
if items:
sublime.active_window().show_quick_panel([item[1] for item in items], on_done)
else:
util.set_status('Nothing in history')
#
# Like the yank command except this automatically creates the number of cursors you need to handle
# the yanked text. For example, if there are 10 yanked regions in the most recent kill, this command
# will automatically create 10 cursors on 10 lines, and then perform the yank.
#
class SbpYankAllCursorsCommand(SbpTextCommand):
def run_cmd(self, util, index=None):
view = self.view
# request the regions of text from the current kill
texts = kill_ring.get_current(0, 0, index)
if texts is None:
util.set_status("Nothing to yank")
# insert the right number of lines
point = util.get_point()
view.insert(util.edit, point, "\n" * len(texts))
regions = (sublime.Region(point + p) for p in range(len(texts)))
selection = view.sel()
selection.clear()
selection.add_all(regions)
view.run_command("sbp_yank")
#
# A special command that allows us to invoke incremental-search commands from the menu.
#
class SbpIncSearchFromMenuCommand(SbpTextCommand):
def run_cmd(self, util, **kwargs):
def doit():
util.run_command("sbp_inc_search", kwargs)
sublime.set_timeout(doit, 50)
class SbpIncSearchCommand(SbpTextCommand):
def run_cmd(self, util, cmd=None, **kwargs):
info = isearch.info_for(self.view)
if info is None or cmd is None:
regex = kwargs.get('regex', False)
if util.state.argument_supplied:
regex = not regex
info = isearch.set_info_for(self.view, isearch.ISearchInfo(self.view, kwargs['forward'], regex))
info.open()
else:
if cmd == "next":
info.next(**kwargs)
elif cmd == "pop_one":
info.pop()
elif cmd == "pop_group":
info.pop(True)
elif cmd == "append_from_cursor":
info.append_from_cursor()
elif cmd == "keep_all":
info.keep_all()
elif cmd == "done":
info.done()
elif cmd == "quit":
info.quit()
elif cmd == "yank":
info.input_view.run_command("sbp_yank")
elif cmd == "set_search":
view = info.input_view
view.replace(util.edit, sublime.Region(0, view.size()), kwargs['text'])
view.run_command("move_to", {"to": "eof"})
elif cmd == "history":
info.history(**kwargs)
else:
print("Not handling cmd", cmd, kwargs)
def is_visible(self, **kwargs):
# REMIND: is it not possible to invoke isearch from the menu for some reason. I think the
# problem is that a focus thing is happening and we're dismissing ourselves as a result. So
# for now we hide it.
return True
class SbpIncSearchEscapeCommand(SbpTextCommand):
# unregistered = True
def run_cmd(self, util, next_cmd, next_args):
info = isearch.info_for(self.view)
info.done()
if next_cmd in ("show_overlay",):
sublime.active_window().run_command(next_cmd, next_args)
else:
info.view.run_command(next_cmd, next_args)
#
# Indent for tab command. If the cursor is not within the existing indent, just call reindent. If
# the cursor is within the indent, move to the start of the indent and call reindent. If the cursor
# was already at the indent didn't change after calling reindent, indent one more level.
#
class SbpTabCmdCommand(SbpTextCommand):
def run_cmd(self, util, indent_on_repeat=False):
point = util.get_point()
indent,cursor = util.get_line_indent(point)
tab_size = util.get_tab_size()
if util.state.active_mark or cursor > indent:
util.run_command("reindent", {})
else:
if indent_on_repeat and util.state.last_cmd == util.state.this_cmd:
util.run_command("indent", {})
else:
# sublime gets screwy with indent if you're not currently a multiple of tab size
if (indent % tab_size) != 0:
delta = tab_size - (indent % tab_size)
self.view.run_command("insert", {"characters": " " * delta})
if cursor < indent:
util.run_command("move_to", {"to": "bol", "extend": False})
# re-indent and then if we're in the same place, indent another level
util.run_command("reindent", {})
indent2, cursor2 = util.get_line_indent(point)
if indent2 == indent:
util.run_command("indent", {})
#
# A quit command which is basically a no-op unless there are multiple cursors or a selection, in
# which case it tries to pick one end or the other to make the single selection.
#
class SbpQuitCommand(SbpTextCommand):
def run_cmd(self, util, favor_side="start"):
window = self.view.window()
# get all the regions
regions = list(self.view.sel())
if not util.all_empty_regions(regions):
util.make_cursors_empty(to_start=favor_side == "start")
util.toggle_active_mark_mode(False)
return
# If there is a selection or multiple cursors, set point to the end of it that is visible OR
# if neither the start nor end is visible, go to whichever is closest.
if regions and regions[0].begin() != regions[-1].end():
start = regions[0].a
end = regions[-1].b
favor_start = favor_side == "start"
favor_end = favor_side == "end"
start_visible = util.is_visible(start)
end_visible = util.is_visible(end)
pos = None
if not (start_visible or end_visible):
# pick whichever side is closest
visible = self.view.visible_region()
if abs(visible.begin() - start) < abs(visible.end() - end):
pos = start
else:
pos = end
elif len(regions) > 1:
if favor_start and start_visible:
pos = start
elif favor_end and end_visible:
pos = end
elif start_visible:
pos = start
elif end_visible:
pos = end
# default value for pos is the current end of the single selection
if pos is None:
pos = regions[-1].b
else:
regions = sublime.Region(pos)
util.set_selection(regions)
util.ensure_visible(regions)
return
#
# Cancel the mark if it's visible and we're supposed to.
#
if settings_helper.get("sbp_cancel_mark_enabled", False):
# if util.state.mark_ring.has_visible_mark():
util.run_command("sbp_cancel_mark")
#
# A class which knows how to ask for a single character and then does something with it.
#
class AskCharOrStringBase(SbpTextCommand):
def run_cmd(self, util, prompt="Type character"):
self.util = util
self.window = self.view.window()
self.count = util.get_count()
self.mode = "char"
# kick things off by showing the panel
self.window.show_input_panel(prompt, "", self.on_done, self.on_change, None)
def on_change(self, content):
# on_change is notified immediate upon showing the panel before a key is even pressed
if self.mode == "string" or len(content) < 1:
return
self.process_cursors(content)
def process_cursors(self, content):
util = self.util
self.window.run_command("hide_panel")
count = abs(self.count)
for i in range(count):
self.last_iteration = (i == count - 1)
util.for_each_cursor(self.process_one, content)
def on_done(self, content):
if self.mode == "string":
self.process_cursors(content)
#
# Jump to char command inputs one character and jumps to it. If include_char is True it goes just past
# the character in question, otherwise it stops just before it.
#
class SbpJumpToCharCommand(AskCharOrStringBase):
def run_cmd(self, util, *args, include_char=True, **kwargs):
if 'prompt' not in kwargs:
kwargs['prompt'] = "Jump to char: "
super(SbpJumpToCharCommand, self).run_cmd(util, *args, **kwargs)
self.include_char = include_char
def process_one(self, cursor, ch):
r = self.view.find(ch, cursor.end(), sublime.LITERAL)
if r:
p = r.begin()
if self.include_char or not self.last_iteration:
# advance one more if this is not the last_iteration or else we'll forever be stuck
# at the same position
p += 1
return p
return None
class SbpZapToCharCommand(SbpJumpToCharCommand):
is_kill_cmd = True
def run_cmd(self, util, **kwargs):
# prepare
self.helper = MoveThenDeleteHelper(util)
kwargs['prompt'] = "Zap to char: "
super(SbpZapToCharCommand, self).run_cmd(util, **kwargs)
def process_cursors(self, content):
# process cursors does all the work (of jumping) and then ...
super(SbpZapToCharCommand, self).process_cursors(content)
# Save the helper in view state and invoke a command to make use of it. We can't use it now
# because we don't have access to a valid edit object, because this function
# (process_cursors) is called asynchronously after the original text command has returned.
vs = ViewState.get(self.view)
vs.pending_move_then_delete_helper = self.helper
# ... we can finish what we started
self.window.run_command("sbp_finish_move_then_delete")
#
# A helper class which will simply finish what was started in a previous command that was using a
# MoveThenDeleteHelper class. Some commands return before they are finished (e.g., they pop up a
# panel) and so we need a new 'edit' instance to be able to perform any edit operations. This is how
# we do that.
#
class SbpFinishMoveThenDeleteCommand(SbpTextCommand):
is_kill_cmd = True
def run_cmd(self, util):
vs = ViewState.get(self.view)
helper = vs.pending_move_then_delete_helper
vs.pending_move_then_delete_helper = None
helper.finish(util)
#
# Jump to string command inputs a string and jumps to it (case sensitive).
# If include_string is True it jumps past the string being searched,
# otherwise it stops just before it.
#
class SbpJumpToStringCommand(AskCharOrStringBase):
def run_cmd(self, util, *args, include_string=True, **kwargs):
if 'prompt' not in kwargs:
kwargs['prompt'] = "Jump to string: "
super(SbpJumpToStringCommand, self).run_cmd(util, *args, **kwargs)
self.mode = "string"
self.include_string = include_string
def process_one(self, cursor, word):
r = self.view.find(word, cursor.end(), sublime.LITERAL)
if r:
if self.include_string is False:
# Jump to beginning of string
p = r.begin()
else:
# Jump to after the string
p = r.end()
return p
return None
# Largely unchanged from zap to char command besides calling jump to string
class SbpZapToStringCommand(SbpJumpToStringCommand):
is_kill_cmd = True
def run_cmd(self, util, **kwargs):
# prepare
self.helper = MoveThenDeleteHelper(util)
kwargs['prompt'] = "Zap to string: "
super(SbpZapToStringCommand, self).run_cmd(util, **kwargs)
def process_cursors(self, content):
# process cursors does all the work (of jumping) and then ...
super(SbpZapToStringCommand, self).process_cursors(content)
# Save the helper in view state and invoke a command to make use of it. We can't use it now
# because we don't have access to a valid edit object, because this function
# (process_cursors) is called asynchronously after the original text command has returned.
vs = ViewState.get(self.view)
vs.pending_move_then_delete_helper = self.helper
# ... we can finish what we started
self.window.run_command("sbp_finish_move_then_delete")
#
# A single command that does both ensuring newline at end of file AND deleting trailing whitespace.
# If this is not a single command, blank spaces at the end of the file will cause an extra newline.
# It's important to delete end of line whitespace before doing the end of file newline check.
#
class SbpTrimTrailingWhiteSpaceAndEnsureNewlineAtEofCommand(sublime_plugin.TextCommand):
def run(self, edit, trim_whitespace, ensure_newline):
# make sure you trim trailing whitespace FIRST and THEN check for Newline
if trim_whitespace:
trailing_white_space = self.view.find_all("[\t ]+$")
trailing_white_space.reverse()
for r in trailing_white_space:
self.view.erase(edit, r)
if ensure_newline:
if self.view.size() > 0 and self.view.substr(self.view.size() - 1) != '\n':
self.view.insert(edit, self.view.size(), "\n")
class SbpPreSaveWhiteSpaceHook(sublime_plugin.EventListener):
def on_pre_save(self, view):
trim = settings_helper.get("sbp_trim_trailing_white_space_on_save") == True
ensure = settings_helper.get("sbp_ensure_newline_at_eof_on_save") == True
if trim or ensure:
view.run_command("sbp_trim_trailing_white_space_and_ensure_newline_at_eof",
{"trim_whitespace": trim, "ensure_newline": ensure})
#
# Function to dedup views in all the groups of the specified window. This does not close views that
# have changes because that causes a warning to popup. So we have a monitor which dedups views
# whenever a file is saved in order to dedup them then when it's safe.
#
def dedup_views(window):
# remember the current group so we can focus back to it when we're done
group = window.active_group()
for g in range(window.num_groups()):
# get views for current group sorted by most recently used
active = window.active_view_in_group(g)
views = ViewState.sorted_views(window, g)
view_by_buffer_id = dict()
for v in views:
if v.is_dirty():
# we cannot nuke a dirty buffer or we'll get an annoying popup
continue
id = v.buffer_id()
if id in view_by_buffer_id:
# already have a view with this buffer - so nuke this one - it's older
window.focus_view(v)
window.run_command('close')
else:
view_by_buffer_id[id] = v
window.focus_view(active)
window.focus_group(group)
def plugin_loaded():
kill_ring.initialize()
isearch.initialize()
# preprocess this module
preprocess_module(sys.modules[__name__])
| grundprinzip/sublemacspro | jove.py | Python | bsd-3-clause | 60,708 |
#########################################################################
# Host.py
# 4.11.2014
# Author: A.T.
#########################################################################
""" Host - class for managing jobs on a host. Host objects are invoked
with LocalComputingElement or SSHComputingElement objects
"""
__RCSID__ = "$Id$"
import commands
import os
import glob
import shutil
import signal
import subprocess
import stat
import json
import multiprocessing
from datetime import datetime, timedelta
# Clean job info and output after so many days
CLEAN_DELAY = timedelta(7)
class Host(object):
def __init__(self):
self.nCores = 1
try:
self.nCores = multiprocessing.cpu_count()
except BaseException:
pass
def submitJob(self, **kwargs):
resultDict = {}
args = dict(kwargs)
MANDATORY_PARAMETERS = ['Executable', 'SharedDir', 'OutputDir', 'ErrorDir', 'WorkDir',
'InfoDir', 'ExecutionContext', 'JobStamps']
for argument in MANDATORY_PARAMETERS:
if argument not in args:
resultDict['Status'] = -1
resultDict['Message'] = 'No %s' % argument
return resultDict
nJobs = args.get('NJobs', 1)
stamps = args['JobStamps']
context = args.get('ExecutionContext', 'Local')
jobidName = context.upper() + '_JOBID'
nCores = args.get('NCores', 1)
# Prepare the executor command
runFileName = os.path.join(args['SharedDir'], 'run_detached.sh')
runFileName = os.path.expandvars(runFileName)
if os.path.isfile(runFileName):
os.unlink(runFileName)
runFile = open(runFileName, 'w')
runFile.write("""
( exec </dev/null
# echo $2
exec > $2
# echo $3
exec 2> $3
# echo $1
exec setsid $1
) &
kill -0 $! > /dev/null 2>&1 || exit 1
echo $!
exit 0
""")
runFile.close()
os.chmod(runFileName, stat.S_IXUSR | stat.S_IRUSR)
jobs = []
output = ''
args['RunFile'] = runFileName
for _i in range(int(nJobs)):
args['Stamp'] = stamps[_i]
envDict = os.environ
envDict[jobidName] = stamps[_i]
try:
jobDir = '%(WorkDir)s/%(Stamp)s' % args
jobDir = os.path.expandvars(jobDir)
os.makedirs(jobDir)
os.chdir(jobDir)
popenObject = subprocess.Popen(
[
"%(RunFile)s %(Executable)s %(OutputDir)s/%(Stamp)s.out %(ErrorDir)s/%(Stamp)s.err" %
args],
stdout=subprocess.PIPE,
shell=True,
env=envDict)
pid = popenObject.communicate()[0]
except OSError as x:
output = str(x)
break
pid = int(pid)
if pid:
# Store the job info
jobInfo = {'PID': pid,
'SubmissionTime': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
'JOBID': stamps[_i],
'NCores': nCores
}
jobString = json.dumps(jobInfo)
pidFileName = "%(InfoDir)s/%(Stamp)s.info" % args
pidFileName = os.path.expandvars(pidFileName)
pidFile = open(pidFileName, 'w')
pidFile.write(jobString)
pidFile.close()
jobs.append(stamps[_i])
else:
break
if jobs:
resultDict['Status'] = 0
resultDict['Jobs'] = jobs
else:
resultDict['Status'] = 1
resultDict['Message'] = output
return resultDict
def __cleanJob(self, stamp, infoDir, workDir, outputDir=None, errorDir=None):
jobDir = os.path.join(workDir, stamp)
if os.path.isdir(jobDir):
shutil.rmtree(jobDir)
pidFile = os.path.join(infoDir, '%s.info' % stamp)
if os.path.isfile(pidFile):
os.unlink(pidFile)
if outputDir:
outFile = os.path.join(outputDir, '%s.out' % stamp)
if os.path.isfile(outFile):
os.unlink(outFile)
if errorDir:
errFile = os.path.join(errorDir, '%s.err' % stamp)
if os.path.isfile(errFile):
os.unlink(errFile)
def __getJobInfo(self, infoDir, stamp):
jobInfo = {}
infoFileName = os.path.join(infoDir, '%s.info' % stamp)
infoFileName = os.path.expandvars(infoFileName)
if os.path.exists(infoFileName):
infoFile = open(infoFileName, 'r')
jobInfo = infoFile.read().strip()
infoFile.close()
jobInfo = json.loads(jobInfo)
return jobInfo
def getCEStatus(self, **kwargs):
""" Get the overall CE status
"""
resultDict = {'Running': 0, 'Waiting': 0}
MANDATORY_PARAMETERS = ['InfoDir', 'WorkDir', 'OutputDir', 'ErrorDir', 'User']
for argument in MANDATORY_PARAMETERS:
if argument not in kwargs:
resultDict['Status'] = -1
resultDict['Message'] = 'No %s' % argument
return resultDict
user = kwargs.get('User')
infoDir = kwargs.get('InfoDir')
workDir = kwargs.get('WorkDir')
outputDir = kwargs.get('OutputDir')
errorDir = kwargs.get('ErrorDir')
running = 0
usedCores = 0
infoDir = os.path.expandvars(infoDir)
infoFiles = glob.glob('%s/*.info' % infoDir)
for infoFileName in infoFiles:
infoFileName = os.path.expandvars(infoFileName)
infoFile = open(infoFileName, 'r')
jobInfo = infoFile.read().strip()
infoFile.close()
jobInfo = json.loads(jobInfo)
pid = jobInfo['PID']
cmd = 'ps -f -p %s --no-headers | wc -l' % pid
status, output = commands.getstatusoutput(cmd)
if status == 0:
if output.strip() == '1':
running += 1
usedCores += jobInfo['NCores']
else:
stamp = jobInfo['JOBID']
jobLife = datetime.utcnow() - datetime.strptime(jobInfo['SubmissionTime'], "%Y-%m-%d %H:%M:%S")
if jobLife > CLEAN_DELAY:
self.__cleanJob(stamp, infoDir, workDir, outputDir, errorDir)
else:
resultDict['Status'] = status
return resultDict
resultDict['Status'] = 0
resultDict['Running'] = running
availableCores = self.nCores - usedCores
resultDict['AvailableCores'] = availableCores
return resultDict
def __checkPid(self, pid, user):
if pid == 0:
return "Unknown"
status, output = commands.getstatusoutput('ps -f -p %s | grep %s | wc -l' % (pid, user))
if status == 0 and output.strip() == "1":
return "Running"
return "Done"
def getJobStatus(self, **kwargs):
resultDict = {}
MANDATORY_PARAMETERS = ['InfoDir', 'JobIDList', 'User']
for argument in MANDATORY_PARAMETERS:
if argument not in kwargs:
resultDict['Status'] = -1
resultDict['Message'] = 'No %s' % argument
return resultDict
user = kwargs.get('User')
infoDir = kwargs.get('InfoDir')
jobStamps = kwargs.get('JobIDList')
jobDict = {}
for stamp in jobStamps:
pid = self.__getJobInfo(infoDir, stamp).get('PID', 0)
jobDict[stamp] = self.__checkPid(pid, user)
resultDict['Status'] = 0
resultDict['Jobs'] = jobDict
return resultDict
def killJob(self, **kwargs):
resultDict = {}
MANDATORY_PARAMETERS = ['InfoDir', 'WorkDir', 'OutputDir',
'ErrorDir', 'JobIDList', 'User']
for argument in MANDATORY_PARAMETERS:
if argument not in kwargs:
resultDict['Status'] = -1
resultDict['Message'] = 'No %s' % argument
return resultDict
user = kwargs.get('User')
infoDir = kwargs.get('InfoDir')
workDir = kwargs.get('WorkDir')
outputDir = kwargs.get('OutputDir')
errorDir = kwargs.get('ErrorDir')
jobStamps = kwargs.get('JobIDList')
jobDict = {}
for stamp in jobStamps:
pid = self.__getJobInfo(infoDir, stamp).get('PID', 0)
if self.__checkPid(pid, user) == 'Running':
os.kill(pid, signal.SIGKILL)
self.__cleanJob(stamp, infoDir, workDir, outputDir, errorDir)
jobDict[stamp] = 'Killed'
else:
jobDict[stamp] = 'Done'
resultDict['Status'] = 0
resultDict['Successful'] = jobStamps
resultDict['Failed'] = []
resultDict['Jobs'] = jobDict
return resultDict
| arrabito/DIRAC | Resources/Computing/BatchSystems/Host.py | Python | gpl-3.0 | 8,010 |
# -*- coding: utf-8 -*-
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2008, Benjamin Kampmann <[email protected]>
"""
Another simple rss based Media Server, this time for TED.com content
"""
# I can reuse stuff. cool. But that also means we might want to refactor it into
# a base class to reuse
from coherence.backends.lolcats_storage import LolcatsStore
from coherence.backends.appletrailers_storage import Container
from coherence.backend import BackendItem
from coherence.upnp.core import DIDLLite
class TedTalk(BackendItem):
def __init__(self, parent_id, id, title=None, url=None,
duration=None, size=None):
self.parentid = parent_id
self.update_id = 0
self.id = id
self.location = url
self.name = title
self.item = DIDLLite.VideoItem(id, parent_id, self.name)
res = DIDLLite.Resource(self.location, 'http-get:*:video/mp4:*') # FIXME should be video/x-m4a
res.size = size
res.duration = duration
self.item.res.append(res)
class TEDStore(LolcatsStore):
implements = ['MediaServer']
rss_url = "http://feeds.feedburner.com/tedtalks_video?format=xml"
ROOT_ID = 0
def __init__(self, server, *args, **kwargs):
BackendStore.__init__(self,server,**kwargs)
self.name = kwargs.get('name', 'TEDtalks')
self.refresh = int(kwargs.get('refresh', 1)) * (60 *60)
self.next_id = 1001
self.last_updated = None
self.container = Container(None, self.ROOT_ID, self.name)
self.videos = {}
dfr = self.update_data()
dfr.addCallback(self.init_completed)
def get_by_id(self, id):
if int(id) == self.ROOT_ID:
return self.container
return self.videos.get(int(id), None)
def upnp_init(self):
if self.server:
self.server.connection_manager_server.set_variable( \
0, 'SourceProtocolInfo', ['http-get:*:video/mp4:*'])
def parse_data(self, xml_data):
root = xml_data.getroot()
pub_date = root.find('./channel/lastBuildDate').text
if pub_date == self.last_updated:
return
self.last_updated = pub_date
self.container.children = []
self.videos = {}
# FIXME: move these to generic constants somewhere
mrss = './{http://search.yahoo.com/mrss/}'
itunes = './{http://www.itunes.com/dtds/podcast-1.0.dtd}'
url_item = mrss + 'content'
duration = itunes + 'duration'
summary = itunes + 'summary'
for item in root.findall('./channel/item'):
data = {}
data['parent_id'] = self.ROOT_ID
data['id'] = self.next_id
data['title'] = item.find('./title').text.replace('TEDTalks : ', '')
# data ['summary'] = item.find(summary).text
# data ['duration'] = item.find(duration).text
try:
media_entry = item.find(url_item)
data['url'] = media_entry.get('url', None)
data['size'] = media_entry.get('size', None)
except IndexError:
continue
video = TedTalk(**data)
self.container.children.append(video)
self.videos[self.next_id] = video
self.next_id += 1
self.container.update_id += 1
self.update_id += 1
if self.server:
self.server.content_directory_server.set_variable(0, 'SystemUpdateID', self.update_id)
value = (self.ROOT_ID,self.container.update_id)
self.server.content_directory_server.set_variable(0, 'ContainerUpdateIDs', value)
| sreichholf/python-coherence | coherence/backends/ted_storage.py | Python | mit | 3,722 |
from foam.sfa.util.enumeration import Enum
# recognized top level rspec elements
RSpecElements = Enum(
AVAILABLE='AVAILABLE',
BWLIMIT='BWLIMIT',
EXECUTE='EXECUTE',
NETWORK='NETWORK',
COMPONENT_MANAGER='COMPONENT_MANAGER',
HARDWARE_TYPE='HARDWARE_TYPE',
INSTALL='INSTALL',
INTERFACE='INTERFACE',
INTERFACE_REF='INTERFACE_REF',
LOCATION='LOCATION',
LOGIN='LOGIN',
LINK='LINK',
LINK_TYPE='LINK_TYPE',
NODE='NODE',
PROPERTY='PROPERTY',
SERVICES='SERVICES',
SLIVER='SLIVER',
SLIVER_TYPE='SLIVER_TYPE',
LEASE='LEASE',
GRANULARITY='GRANULARITY',
SPECTRUM='SPECTRUM',
CHANNEL='CHANNEL',
POSITION_3D ='POSITION_3D',
)
class RSpecElement:
def __init__(self, element_type, path):
if not element_type in RSpecElements:
raise InvalidRSpecElement(element_type)
self.type = element_type
self.path = path
| dana-i2cat/felix | ofam/src/src/foam/sfa/rspecs/rspec_elements.py | Python | apache-2.0 | 935 |
# Copyright 2004 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import os
import types
import pprint
import warnings
from Synopsis import AST
from pygccxml import utils
from pygccxml.declarations import *
class scanner_t( AST.Visitor, object ):
def __init__(self, ast, decl_factory ):
self.logger = utils.loggers.cxx_parser
self.ast = ast
assert isinstance( decl_factory, decl_factory_t )
self.__decl_factory = decl_factory
#mapping from id -> decl
self.__decl = self.__decl_factory.create_namespace( name='::' )
self.global_ns = self.__decl
def read_deaclaration( self, node, decl ):
#this function should not be called for namespace
decl.name = node.name()
decl.location = location_t( file_name=node.file(), line=node.line() )
def visitModule( self, node ):
ns = self.__decl_factory.create_namespace( name=node.name() )
self.__decl.adopt_declaration( ns )
self.__decl = ns
super( scanner_t, self ).visitModule( node )
def visitEnum( self, node ):
values = []
for enumerator in node.enumerators():
print enumerator.name(), ':', enumerator.value()
values.append( ( enumerator.name(), enumerator.value() ) )
enum = self.__decl_factory.create_enumeration( values=values )
self.read_deaclaration( node, enum )
self.__decl.adopt_declaration( enum )
super( scanner_t, self ).visitEnum( node )
| jgresula/jagpdf | code/tools/external/python/pygccxml/parser/synopsis_scanner.py | Python | mit | 1,649 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from django.db import connection
from django.test import override_settings
from sqlalchemy.sql import (
and_, select, column, table,
)
from sqlalchemy.sql import compiler # type: ignore
from zerver.models import (
Realm, Recipient, Stream, Subscription, UserProfile, Attachment,
get_display_recipient, get_recipient, get_realm, get_stream, get_user_profile_by_email,
Reaction
)
from zerver.lib.message import (
MessageDict,
)
from zerver.lib.narrow import (
build_narrow_filter,
)
from zerver.lib.str_utils import force_bytes
from zerver.lib.sqlalchemy_utils import get_sqlalchemy_connection
from zerver.lib.test_helpers import (
POSTRequestMock,
TestCase,
get_user_messages, message_ids, queries_captured,
)
from zerver.lib.test_classes import (
ZulipTestCase,
)
from zerver.views.messages import (
exclude_muting_conditions,
get_old_messages_backend, ok_to_include_history,
NarrowBuilder, BadNarrowOperator, Query,
LARGER_THAN_MAX_MESSAGE_ID,
)
from typing import Mapping, Sequence, Tuple, Generic, Union, Any, Text
from six.moves import range
import os
import re
import ujson
def get_sqlalchemy_query_params(query):
# type: (Text) -> Dict[Text, Text]
dialect = get_sqlalchemy_connection().dialect # type: ignore
comp = compiler.SQLCompiler(dialect, query)
return comp.params
def fix_ws(s):
# type: (Text) -> Text
return re.sub('\s+', ' ', str(s)).strip()
def get_recipient_id_for_stream_name(realm, stream_name):
# type: (Realm, Text) -> Text
stream = get_stream(stream_name, realm)
return get_recipient(Recipient.STREAM, stream.id).id
def mute_stream(realm, user_profile, stream_name):
# type: (Realm, Text, Text) -> None
stream = get_stream(stream_name, realm)
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
subscription = Subscription.objects.get(recipient=recipient, user_profile=user_profile)
subscription.in_home_view = False
subscription.save()
class NarrowBuilderTest(ZulipTestCase):
def setUp(self):
# type: () -> None
self.realm = get_realm('zulip')
self.user_profile = get_user_profile_by_email("[email protected]")
self.builder = NarrowBuilder(self.user_profile, column('id'))
self.raw_query = select([column("id")], None, table("zerver_message"))
def test_add_term_using_not_defined_operator(self):
# type: () -> None
term = dict(operator='not-defined', operand='any')
self.assertRaises(BadNarrowOperator, self._build_query, term)
def test_add_term_using_stream_operator(self):
# type: () -> None
term = dict(operator='stream', operand='Scotland')
self._do_add_term_test(term, 'WHERE recipient_id = :recipient_id_1')
def test_add_term_using_stream_operator_zephyr_hack_null(self):
# type: () -> None
"""This tests the _pg_re_escape code path with a null byte"""
null_stream_name = u'fun\u0000test'
s = Stream.objects.create(realm=get_realm("mit"), name=null_stream_name)
Recipient.objects.create(type=Recipient.STREAM, type_id=s.id)
user_profile = get_user_profile_by_email("[email protected]")
builder = NarrowBuilder(user_profile, column('id'))
term = dict(operator='stream', operand=null_stream_name)
result = str(builder.add_term(self.raw_query, term))
self.assertTrue("WHERE recipient_id IN (:recipient_id_1)" in result)
def test_add_term_using_stream_operator_and_negated(self): # NEGATED
# type: () -> None
term = dict(operator='stream', operand='Scotland', negated=True)
self._do_add_term_test(term, 'WHERE recipient_id != :recipient_id_1')
def test_add_term_using_stream_operator_and_non_existing_operand_should_raise_error(self): # NEGATED
# type: () -> None
term = dict(operator='stream', operand='NonExistingStream')
self.assertRaises(BadNarrowOperator, self._build_query, term)
def test_add_term_using_is_operator_and_private_operand(self):
# type: () -> None
term = dict(operator='is', operand='private')
self._do_add_term_test(term, 'WHERE type = :type_1 OR type = :type_2')
def test_add_term_using_is_operator_private_operand_and_negated(self): # NEGATED
# type: () -> None
term = dict(operator='is', operand='private', negated=True)
self._do_add_term_test(term, 'WHERE NOT (type = :type_1 OR type = :type_2)')
def test_add_term_using_is_operator_and_non_private_operand(self):
# type: () -> None
for operand in ['starred', 'mentioned', 'alerted']:
term = dict(operator='is', operand=operand)
self._do_add_term_test(term, 'WHERE (flags & :flags_1) != :param_1')
def test_add_term_using_is_operator_non_private_operand_and_negated(self): # NEGATED
# type: () -> None
for operand in ['starred', 'mentioned', 'alerted']:
term = dict(operator='is', operand=operand, negated=True)
self._do_add_term_test(term, 'WHERE (flags & :flags_1) = :param_1')
def test_add_term_using_non_supported_operator_should_raise_error(self):
# type: () -> None
term = dict(operator='is', operand='non_supported')
self.assertRaises(BadNarrowOperator, self._build_query, term)
def test_add_term_using_topic_operator_and_lunch_operand(self):
# type: () -> None
term = dict(operator='topic', operand='lunch')
self._do_add_term_test(term, 'WHERE upper(subject) = upper(:param_1)')
def test_add_term_using_topic_operator_lunch_operand_and_negated(self): # NEGATED
# type: () -> None
term = dict(operator='topic', operand='lunch', negated=True)
self._do_add_term_test(term, 'WHERE upper(subject) != upper(:param_1)')
def test_add_term_using_topic_operator_and_personal_operand(self):
# type: () -> None
term = dict(operator='topic', operand='personal')
self._do_add_term_test(term, 'WHERE upper(subject) = upper(:param_1)')
def test_add_term_using_topic_operator_personal_operand_and_negated(self): # NEGATED
# type: () -> None
term = dict(operator='topic', operand='personal', negated=True)
self._do_add_term_test(term, 'WHERE upper(subject) != upper(:param_1)')
def test_add_term_using_sender_operator(self):
# type: () -> None
term = dict(operator='sender', operand='[email protected]')
self._do_add_term_test(term, 'WHERE sender_id = :param_1')
def test_add_term_using_sender_operator_and_negated(self): # NEGATED
# type: () -> None
term = dict(operator='sender', operand='[email protected]', negated=True)
self._do_add_term_test(term, 'WHERE sender_id != :param_1')
def test_add_term_using_sender_operator_with_non_existing_user_as_operand(self): # NEGATED
# type: () -> None
term = dict(operator='sender', operand='[email protected]')
self.assertRaises(BadNarrowOperator, self._build_query, term)
def test_add_term_using_pm_with_operator_and_not_the_same_user_as_operand(self):
# type: () -> None
term = dict(operator='pm-with', operand='[email protected]')
self._do_add_term_test(term, 'WHERE sender_id = :sender_id_1 AND recipient_id = :recipient_id_1 OR sender_id = :sender_id_2 AND recipient_id = :recipient_id_2')
def test_add_term_using_pm_with_operator_not_the_same_user_as_operand_and_negated(self): # NEGATED
# type: () -> None
term = dict(operator='pm-with', operand='[email protected]', negated=True)
self._do_add_term_test(term, 'WHERE NOT (sender_id = :sender_id_1 AND recipient_id = :recipient_id_1 OR sender_id = :sender_id_2 AND recipient_id = :recipient_id_2)')
def test_add_term_using_pm_with_operator_the_same_user_as_operand(self):
# type: () -> None
term = dict(operator='pm-with', operand='[email protected]')
self._do_add_term_test(term, 'WHERE sender_id = :sender_id_1 AND recipient_id = :recipient_id_1')
def test_add_term_using_pm_with_operator_the_same_user_as_operand_and_negated(self): # NEGATED
# type: () -> None
term = dict(operator='pm-with', operand='[email protected]', negated=True)
self._do_add_term_test(term, 'WHERE NOT (sender_id = :sender_id_1 AND recipient_id = :recipient_id_1)')
def test_add_term_using_pm_with_operator_and_more_than_user_as_operand(self):
# type: () -> None
term = dict(operator='pm-with', operand='[email protected], [email protected]')
self._do_add_term_test(term, 'WHERE recipient_id = :recipient_id_1')
def test_add_term_using_pm_with_operator_more_than_user_as_operand_and_negated(self): # NEGATED
# type: () -> None
term = dict(operator='pm-with', operand='[email protected], [email protected]', negated=True)
self._do_add_term_test(term, 'WHERE recipient_id != :recipient_id_1')
def test_add_term_using_pm_with_operator_with_non_existing_user_as_operand(self):
# type: () -> None
term = dict(operator='pm-with', operand='[email protected]')
self.assertRaises(BadNarrowOperator, self._build_query, term)
def test_add_term_using_pm_with_operator_with_existing_and_non_existing_user_as_operand(self):
# type: () -> None
term = dict(operator='pm-with', operand='[email protected],[email protected]')
self.assertRaises(BadNarrowOperator, self._build_query, term)
def test_add_term_using_id_operator(self):
# type: () -> None
term = dict(operator='id', operand=555)
self._do_add_term_test(term, 'WHERE id = :param_1')
def test_add_term_using_id_operator_and_negated(self): # NEGATED
# type: () -> None
term = dict(operator='id', operand=555, negated=True)
self._do_add_term_test(term, 'WHERE id != :param_1')
@override_settings(USING_PGROONGA=False)
def test_add_term_using_search_operator(self):
# type: () -> None
term = dict(operator='search', operand='"french fries"')
self._do_add_term_test(term, 'WHERE (lower(content) LIKE lower(:content_1) OR lower(subject) LIKE lower(:subject_1)) AND (search_tsvector @@ plainto_tsquery(:param_2, :param_3))')
@override_settings(USING_PGROONGA=False)
def test_add_term_using_search_operator_and_negated(self): # NEGATED
# type: () -> None
term = dict(operator='search', operand='"french fries"', negated=True)
self._do_add_term_test(term, 'WHERE NOT (lower(content) LIKE lower(:content_1) OR lower(subject) LIKE lower(:subject_1)) AND NOT (search_tsvector @@ plainto_tsquery(:param_2, :param_3))')
@override_settings(USING_PGROONGA=True)
def test_add_term_using_search_operator_pgroonga(self):
# type: () -> None
term = dict(operator='search', operand='"french fries"')
self._do_add_term_test(term, 'WHERE search_pgroonga @@ :search_pgroonga_1')
@override_settings(USING_PGROONGA=True)
def test_add_term_using_search_operator_and_negated_pgroonga(self): # NEGATED
# type: () -> None
term = dict(operator='search', operand='"french fries"', negated=True)
self._do_add_term_test(term, 'WHERE NOT (search_pgroonga @@ :search_pgroonga_1)')
def test_add_term_using_has_operator_and_attachment_operand(self):
# type: () -> None
term = dict(operator='has', operand='attachment')
self._do_add_term_test(term, 'WHERE has_attachment')
def test_add_term_using_has_operator_attachment_operand_and_negated(self): # NEGATED
# type: () -> None
term = dict(operator='has', operand='attachment', negated=True)
self._do_add_term_test(term, 'WHERE NOT has_attachment')
def test_add_term_using_has_operator_and_image_operand(self):
# type: () -> None
term = dict(operator='has', operand='image')
self._do_add_term_test(term, 'WHERE has_image')
def test_add_term_using_has_operator_image_operand_and_negated(self): # NEGATED
# type: () -> None
term = dict(operator='has', operand='image', negated=True)
self._do_add_term_test(term, 'WHERE NOT has_image')
def test_add_term_using_has_operator_and_link_operand(self):
# type: () -> None
term = dict(operator='has', operand='link')
self._do_add_term_test(term, 'WHERE has_link')
def test_add_term_using_has_operator_link_operand_and_negated(self): # NEGATED
# type: () -> None
term = dict(operator='has', operand='link', negated=True)
self._do_add_term_test(term, 'WHERE NOT has_link')
def test_add_term_using_has_operator_non_supported_operand_should_raise_error(self):
# type: () -> None
term = dict(operator='has', operand='non_supported')
self.assertRaises(BadNarrowOperator, self._build_query, term)
def test_add_term_using_in_operator(self):
# type: () -> None
mute_stream(self.realm, self.user_profile, 'Verona')
term = dict(operator='in', operand='home')
self._do_add_term_test(term, 'WHERE recipient_id NOT IN (:recipient_id_1)')
def test_add_term_using_in_operator_and_negated(self):
# type: () -> None
# negated = True should not change anything
mute_stream(self.realm, self.user_profile, 'Verona')
term = dict(operator='in', operand='home', negated=True)
self._do_add_term_test(term, 'WHERE recipient_id NOT IN (:recipient_id_1)')
def test_add_term_using_in_operator_and_all_operand(self):
# type: () -> None
mute_stream(self.realm, self.user_profile, 'Verona')
term = dict(operator='in', operand='all')
query = self._build_query(term)
self.assertEqual(str(query), 'SELECT id \nFROM zerver_message')
def test_add_term_using_in_operator_all_operand_and_negated(self):
# type: () -> None
# negated = True should not change anything
mute_stream(self.realm, self.user_profile, 'Verona')
term = dict(operator='in', operand='all', negated=True)
query = self._build_query(term)
self.assertEqual(str(query), 'SELECT id \nFROM zerver_message')
def test_add_term_using_in_operator_and_not_defined_operand(self):
# type: () -> None
term = dict(operator='in', operand='not_defined')
self.assertRaises(BadNarrowOperator, self._build_query, term)
def test_add_term_using_near_operator(self):
# type: () -> None
term = dict(operator='near', operand='operand')
query = self._build_query(term)
self.assertEqual(str(query), 'SELECT id \nFROM zerver_message')
def _do_add_term_test(self, term, where_clause):
# type: (Dict[str, Any], Text) -> None
self.assertTrue(where_clause in str(self._build_query(term)))
def _build_query(self, term):
# type: (Dict[str, Any]) -> Query
return self.builder.add_term(self.raw_query, term)
class BuildNarrowFilterTest(TestCase):
def test_build_narrow_filter(self):
# type: () -> None
fixtures_path = os.path.join(os.path.dirname(__file__),
'../fixtures/narrow.json')
scenarios = ujson.loads(open(fixtures_path, 'r').read())
self.assertTrue(len(scenarios) == 8)
for scenario in scenarios:
narrow = scenario['narrow']
accept_events = scenario['accept_events']
reject_events = scenario['reject_events']
narrow_filter = build_narrow_filter(narrow)
for e in accept_events:
self.assertTrue(narrow_filter(e))
for e in reject_events:
self.assertFalse(narrow_filter(e))
class IncludeHistoryTest(ZulipTestCase):
def test_ok_to_include_history(self):
# type: () -> None
realm = get_realm('zulip')
self.make_stream('public_stream', realm=realm)
# Negated stream searches should not include history.
narrow = [
dict(operator='stream', operand='public_stream', negated=True),
]
self.assertFalse(ok_to_include_history(narrow, realm))
# Definitely forbid seeing history on private streams.
narrow = [
dict(operator='stream', operand='private_stream'),
]
self.assertFalse(ok_to_include_history(narrow, realm))
# History doesn't apply to PMs.
narrow = [
dict(operator='is', operand='private'),
]
self.assertFalse(ok_to_include_history(narrow, realm))
# If we are looking for something like starred messages, there is
# no point in searching historical messages.
narrow = [
dict(operator='stream', operand='public_stream'),
dict(operator='is', operand='starred'),
]
self.assertFalse(ok_to_include_history(narrow, realm))
# simple True case
narrow = [
dict(operator='stream', operand='public_stream'),
]
self.assertTrue(ok_to_include_history(narrow, realm))
narrow = [
dict(operator='stream', operand='public_stream'),
dict(operator='topic', operand='whatever'),
dict(operator='search', operand='needle in haystack'),
]
self.assertTrue(ok_to_include_history(narrow, realm))
class GetOldMessagesTest(ZulipTestCase):
def get_and_check_messages(self, modified_params):
# type: (Dict[str, Union[str, int]]) -> Dict[str, Dict]
post_params = {"anchor": 1, "num_before": 1, "num_after": 1} # type: Dict[str, Union[str, int]]
post_params.update(modified_params)
payload = self.client_get("/json/messages", dict(post_params))
self.assert_json_success(payload)
result = ujson.loads(payload.content)
self.assertIn("messages", result)
self.assertIsInstance(result["messages"], list)
for message in result["messages"]:
for field in ("content", "content_type", "display_recipient",
"avatar_url", "recipient_id", "sender_full_name",
"sender_short_name", "timestamp", "reactions"):
self.assertIn(field, message)
# TODO: deprecate soon in favor of avatar_url
self.assertIn('gravatar_hash', message)
return result
def get_query_ids(self):
# type: () -> Dict[Text, int]
hamlet_user = get_user_profile_by_email('[email protected]')
othello_user = get_user_profile_by_email('[email protected]')
query_ids = {} # type: Dict[Text, int]
scotland_stream = get_stream('Scotland', hamlet_user.realm)
query_ids['scotland_recipient'] = get_recipient(Recipient.STREAM, scotland_stream.id).id
query_ids['hamlet_id'] = hamlet_user.id
query_ids['othello_id'] = othello_user.id
query_ids['hamlet_recipient'] = get_recipient(Recipient.PERSONAL, hamlet_user.id).id
query_ids['othello_recipient'] = get_recipient(Recipient.PERSONAL, othello_user.id).id
return query_ids
def test_successful_get_old_messages_reaction(self):
# type: () -> None
"""
Test old `/json/messages` returns reactions.
"""
self.login("[email protected]")
messages = self.get_and_check_messages(dict())
message_id = messages['messages'][0]['id']
self.login("[email protected]")
reaction_name = 'simple_smile'
url = '/json/messages/{}/emoji_reactions/{}'.format(message_id, reaction_name)
payload = self.client_put(url)
self.assert_json_success(payload)
self.login("[email protected]")
messages = self.get_and_check_messages({})
message_to_assert = None
for message in messages['messages']:
if message['id'] == message_id:
message_to_assert = message
break
self.assertEqual(len(message_to_assert['reactions']), 1)
self.assertEqual(message_to_assert['reactions'][0]['emoji_name'],
reaction_name)
def test_successful_get_old_messages(self):
# type: () -> None
"""
A call to GET /json/messages with valid parameters returns a list of
messages.
"""
self.login("[email protected]")
self.get_and_check_messages(dict())
# We have to support the legacy tuple style while there are old
# clients around, which might include third party home-grown bots.
self.get_and_check_messages(dict(narrow=ujson.dumps([['pm-with', '[email protected]']])))
self.get_and_check_messages(dict(narrow=ujson.dumps([dict(operator='pm-with', operand='[email protected]')])))
def test_get_old_messages_with_narrow_pm_with(self):
# type: () -> None
"""
A request for old messages with a narrow by pm-with only returns
conversations with that user.
"""
me = '[email protected]'
def dr_emails(dr):
# type: (Union[Text, List[Dict[str, Any]]]) -> Text
assert isinstance(dr, list)
return ','.join(sorted(set([r['email'] for r in dr] + [me])))
personals = [m for m in get_user_messages(get_user_profile_by_email(me))
if m.recipient.type == Recipient.PERSONAL or
m.recipient.type == Recipient.HUDDLE]
if not personals:
# FIXME: This is bad. We should use test data that is guaranteed
# to contain some personals for every user. See #617.
return
emails = dr_emails(get_display_recipient(personals[0].recipient))
self.login(me)
narrow = [dict(operator='pm-with', operand=emails)]
result = self.get_and_check_messages(dict(narrow=ujson.dumps(narrow)))
for message in result["messages"]:
self.assertEqual(dr_emails(message['display_recipient']), emails)
def test_get_old_messages_with_narrow_stream(self):
# type: () -> None
"""
A request for old messages with a narrow by stream only returns
messages for that stream.
"""
self.login("[email protected]")
# We need to subscribe to a stream and then send a message to
# it to ensure that we actually have a stream message in this
# narrow view.
self.subscribe_to_stream("[email protected]", 'Scotland')
self.send_message("[email protected]", "Scotland", Recipient.STREAM)
messages = get_user_messages(get_user_profile_by_email("[email protected]"))
stream_messages = [msg for msg in messages if msg.recipient.type == Recipient.STREAM]
stream_name = get_display_recipient(stream_messages[0].recipient)
stream_id = stream_messages[0].recipient.id
narrow = [dict(operator='stream', operand=stream_name)]
result = self.get_and_check_messages(dict(narrow=ujson.dumps(narrow)))
for message in result["messages"]:
self.assertEqual(message["type"], "stream")
self.assertEqual(message["recipient_id"], stream_id)
def test_get_old_messages_with_narrow_stream_mit_unicode_regex(self):
# type: () -> None
"""
A request for old messages for a user in the mit.edu relam with unicode
stream name should be correctly escaped in the database query.
"""
self.login("[email protected]")
# We need to susbcribe to a stream and then send a message to
# it to ensure that we actually have a stream message in this
# narrow view.
lambda_stream_name = u"\u03bb-stream"
self.subscribe_to_stream("[email protected]", lambda_stream_name)
lambda_stream_d_name = u"\u03bb-stream.d"
self.subscribe_to_stream("[email protected]", lambda_stream_d_name)
self.send_message("[email protected]", u"\u03bb-stream", Recipient.STREAM)
self.send_message("[email protected]", u"\u03bb-stream.d", Recipient.STREAM)
narrow = [dict(operator='stream', operand=u'\u03bb-stream')]
result = self.get_and_check_messages(dict(num_after=2,
narrow=ujson.dumps(narrow)))
messages = get_user_messages(get_user_profile_by_email("[email protected]"))
stream_messages = [msg for msg in messages if msg.recipient.type == Recipient.STREAM]
self.assertEqual(len(result["messages"]), 2)
for i, message in enumerate(result["messages"]):
self.assertEqual(message["type"], "stream")
stream_id = stream_messages[i].recipient.id
self.assertEqual(message["recipient_id"], stream_id)
def test_get_old_messages_with_narrow_stream_mit_unicode_null_regex(self):
# type: () -> None
self.login("[email protected]")
null_stream_name = u"foo\000-stream"
self.subscribe_to_stream("[email protected]", null_stream_name)
self.send_message("[email protected]", null_stream_name, Recipient.STREAM)
narrow = [dict(operator='stream', operand=null_stream_name)]
result = self.get_and_check_messages(dict(num_before=2, num_after=2,
narrow=ujson.dumps(narrow)))
messages = get_user_messages(get_user_profile_by_email("[email protected]"))
stream_messages = [msg for msg in messages if msg.recipient.type == Recipient.STREAM]
self.assertEqual(len(result["messages"]), 1)
self.assertEqual(result["messages"][0]["type"], "stream")
self.assertEqual(result["messages"][0]["recipient_id"], stream_messages[0].recipient.id)
def test_get_old_messages_with_narrow_topic_mit_unicode_regex(self):
# type: () -> None
"""
A request for old messages for a user in the mit.edu realm with unicode
topic name should be correctly escaped in the database query.
"""
self.login("[email protected]")
# We need to susbcribe to a stream and then send a message to
# it to ensure that we actually have a stream message in this
# narrow view.
self.subscribe_to_stream("[email protected]", "Scotland")
self.send_message("[email protected]", "Scotland", Recipient.STREAM,
subject=u"\u03bb-topic")
self.send_message("[email protected]", "Scotland", Recipient.STREAM,
subject=u"\u03bb-topic.d")
self.send_message("[email protected]", "Scotland", Recipient.STREAM,
subject=u"\u03bb-topic.d.d")
self.send_message("[email protected]", "Scotland", Recipient.STREAM,
subject=u"\u03bb-topic.d.d.d")
self.send_message("[email protected]", "Scotland", Recipient.STREAM,
subject=u"\u03bb-topic.d.d.d.d")
narrow = [dict(operator='topic', operand=u'\u03bb-topic')]
result = self.get_and_check_messages(dict(
num_after=100,
narrow=ujson.dumps(narrow)))
messages = get_user_messages(get_user_profile_by_email("[email protected]"))
stream_messages = [msg for msg in messages if msg.recipient.type == Recipient.STREAM]
self.assertEqual(len(result["messages"]), 5)
for i, message in enumerate(result["messages"]):
self.assertEqual(message["type"], "stream")
stream_id = stream_messages[i].recipient.id
self.assertEqual(message["recipient_id"], stream_id)
def test_get_old_messages_with_narrow_topic_mit_personal(self):
# type: () -> None
"""
We handle .d grouping for MIT realm personal messages correctly.
"""
self.login("[email protected]")
# We need to susbcribe to a stream and then send a message to
# it to ensure that we actually have a stream message in this
# narrow view.
self.subscribe_to_stream("[email protected]", "Scotland")
self.send_message("[email protected]", "Scotland", Recipient.STREAM,
subject=u".d.d")
self.send_message("[email protected]", "Scotland", Recipient.STREAM,
subject=u"PERSONAL")
self.send_message("[email protected]", "Scotland", Recipient.STREAM,
subject=u'(instance "").d')
self.send_message("[email protected]", "Scotland", Recipient.STREAM,
subject=u".d.d.d")
self.send_message("[email protected]", "Scotland", Recipient.STREAM,
subject=u"personal.d")
self.send_message("[email protected]", "Scotland", Recipient.STREAM,
subject=u'(instance "")')
self.send_message("[email protected]", "Scotland", Recipient.STREAM,
subject=u".d.d.d.d")
narrow = [dict(operator='topic', operand=u'personal.d.d')]
result = self.get_and_check_messages(dict(
num_before=50,
num_after=50,
narrow=ujson.dumps(narrow)))
messages = get_user_messages(get_user_profile_by_email("[email protected]"))
stream_messages = [msg for msg in messages if msg.recipient.type == Recipient.STREAM]
self.assertEqual(len(result["messages"]), 7)
for i, message in enumerate(result["messages"]):
self.assertEqual(message["type"], "stream")
stream_id = stream_messages[i].recipient.id
self.assertEqual(message["recipient_id"], stream_id)
def test_get_old_messages_with_narrow_sender(self):
# type: () -> None
"""
A request for old messages with a narrow by sender only returns
messages sent by that person.
"""
self.login("[email protected]")
# We need to send a message here to ensure that we actually
# have a stream message in this narrow view.
self.send_message("[email protected]", "Scotland", Recipient.STREAM)
self.send_message("[email protected]", "Scotland", Recipient.STREAM)
self.send_message("[email protected]", "[email protected]", Recipient.PERSONAL)
self.send_message("[email protected]", "Scotland", Recipient.STREAM)
narrow = [dict(operator='sender', operand='[email protected]')]
result = self.get_and_check_messages(dict(narrow=ujson.dumps(narrow)))
for message in result["messages"]:
self.assertEqual(message["sender_email"], "[email protected]")
def _update_tsvector_index(self):
# type: () -> None
# We use brute force here and update our text search index
# for the entire zerver_message table (which is small in test
# mode). In production there is an async process which keeps
# the search index up to date.
with connection.cursor() as cursor:
cursor.execute("""
UPDATE zerver_message SET
search_tsvector = to_tsvector('zulip.english_us_search',
subject || rendered_content)
""")
@override_settings(USING_PGROONGA=False)
def test_messages_in_narrow(self):
# type: () -> None
email = '[email protected]'
self.login(email)
def send(content):
# type: (Text) -> int
msg_id = self.send_message(
sender_name=email,
raw_recipients="Verona",
message_type=Recipient.STREAM,
content=content,
)
return msg_id
good_id = send('KEYWORDMATCH and should work')
bad_id = send('no match')
msg_ids = [good_id, bad_id]
send('KEYWORDMATCH but not in msg_ids')
self._update_tsvector_index()
narrow = [
dict(operator='search', operand='KEYWORDMATCH'),
]
raw_params = dict(msg_ids=msg_ids, narrow=narrow)
params = {k: ujson.dumps(v) for k, v in raw_params.items()}
result = self.client_post('/json/messages_in_narrow', params)
self.assert_json_success(result)
messages = ujson.loads(result.content)['messages']
self.assertEqual(len(list(messages.keys())), 1)
message = messages[str(good_id)]
self.assertEqual(message['match_content'],
u'<p><span class="highlight">KEYWORDMATCH</span> and should work</p>')
@override_settings(USING_PGROONGA=False)
def test_get_old_messages_with_search(self):
# type: () -> None
self.login("[email protected]")
messages_to_search = [
('breakfast', 'there are muffins in the conference room'),
('lunch plans', 'I am hungry!'),
('meetings', 'discuss lunch after lunch'),
('meetings', 'please bring your laptops to take notes'),
('dinner', 'Anybody staying late tonight?'),
]
for topic, content in messages_to_search:
self.send_message(
sender_name="[email protected]",
raw_recipients="Verona",
message_type=Recipient.STREAM,
content=content,
subject=topic,
)
self._update_tsvector_index()
narrow = [
dict(operator='sender', operand='[email protected]'),
dict(operator='search', operand='lunch'),
]
result = self.get_and_check_messages(dict(
narrow=ujson.dumps(narrow),
anchor=0,
num_after=10,
)) # type: Dict[str, Dict]
self.assertEqual(len(result['messages']), 2)
messages = result['messages']
meeting_message = [m for m in messages if m['subject'] == 'meetings'][0]
self.assertEqual(
meeting_message['match_subject'],
'meetings')
self.assertEqual(
meeting_message['match_content'],
'<p>discuss <span class="highlight">lunch</span> after ' +
'<span class="highlight">lunch</span></p>')
meeting_message = [m for m in messages if m['subject'] == 'lunch plans'][0]
self.assertEqual(
meeting_message['match_subject'],
'<span class="highlight">lunch</span> plans')
self.assertEqual(
meeting_message['match_content'],
'<p>I am hungry!</p>')
# Should not crash when multiple search operands are present
multi_search_narrow = [
dict(operator='search', operand='discuss'),
dict(operator='search', operand='after'),
]
multi_search_result = self.get_and_check_messages(dict(
narrow=ujson.dumps(multi_search_narrow),
anchor=0,
num_after=10,
)) # type: Dict[str, Dict]
self.assertEqual(len(multi_search_result['messages']), 1)
self.assertEqual(multi_search_result['messages'][0]['match_content'], '<p><span class="highlight">discuss</span> lunch <span class="highlight">after</span> lunch</p>')
@override_settings(USING_PGROONGA=False)
def test_get_old_messages_with_search_not_subscribed(self):
# type: () -> None
"""Verify support for searching a stream you're not subscribed to"""
self.subscribe_to_stream("[email protected]", "newstream")
self.send_message(
sender_name="[email protected]",
raw_recipients="newstream",
message_type=Recipient.STREAM,
content="Public special content!",
subject="new",
)
self._update_tsvector_index()
self.login("[email protected]")
stream_search_narrow = [
dict(operator='search', operand='special'),
dict(operator='stream', operand='newstream'),
]
stream_search_result = self.get_and_check_messages(dict(
narrow=ujson.dumps(stream_search_narrow),
anchor=0,
num_after=10,
num_before=10,
)) # type: Dict[str, Dict]
self.assertEqual(len(stream_search_result['messages']), 1)
self.assertEqual(stream_search_result['messages'][0]['match_content'],
'<p>Public <span class="highlight">special</span> content!</p>')
@override_settings(USING_PGROONGA=True)
def test_get_old_messages_with_search_pgroonga(self):
# type: () -> None
self.login("[email protected]")
messages_to_search = [
(u'日本語', u'こんにちは。今日はいい天気ですね。'),
(u'日本語', u'今朝はごはんを食べました。'),
(u'日本語', u'昨日、日本のお菓子を送りました。'),
('english', u'I want to go to 日本!'),
('english', 'Can you speak Japanese?'),
]
for topic, content in messages_to_search:
self.send_message(
sender_name="[email protected]",
raw_recipients="Verona",
message_type=Recipient.STREAM,
content=content,
subject=topic,
)
# We use brute force here and update our text search index
# for the entire zerver_message table (which is small in test
# mode). In production there is an async process which keeps
# the search index up to date.
with connection.cursor() as cursor:
cursor.execute("""
UPDATE zerver_message SET
search_pgroonga = subject || ' ' || rendered_content
""")
narrow = [
dict(operator='search', operand=u'日本'),
]
result = self.get_and_check_messages(dict(
narrow=ujson.dumps(narrow),
anchor=0,
num_after=10,
)) # type: Dict[str, Dict]
self.assertEqual(len(result['messages']), 4)
messages = result['messages']
japanese_message = [m for m in messages if m['subject'] == u'日本語'][-1]
self.assertEqual(
japanese_message['match_subject'],
u'<span class="highlight">日本</span>語')
self.assertEqual(
japanese_message['match_content'],
u'<p>昨日、<span class="highlight">日本</span>の' +
u'お菓子を送りました。</p>')
english_message = [m for m in messages if m['subject'] == 'english'][0]
self.assertEqual(
english_message['match_subject'],
'english')
self.assertEqual(
english_message['match_content'],
u'<p>I want to go to <span class="highlight">日本</span>!</p>')
# Should not crash when multiple search operands are present
multi_search_narrow = [
dict(operator='search', operand='can'),
dict(operator='search', operand='speak'),
]
multi_search_result = self.get_and_check_messages(dict(
narrow=ujson.dumps(multi_search_narrow),
anchor=0,
num_after=10,
)) # type: Dict[str, Dict]
self.assertEqual(len(multi_search_result['messages']), 1)
self.assertEqual(multi_search_result['messages'][0]['match_content'], '<p><span class="highlight">Can</span> you <span class="highlight">speak</span> Japanese?</p>')
def test_get_old_messages_with_only_searching_anchor(self):
# type: () -> None
"""
Test that specifying an anchor but 0 for num_before and num_after
returns at most 1 message.
"""
self.login("[email protected]")
anchor = self.send_message("[email protected]", "Verona", Recipient.STREAM)
narrow = [dict(operator='sender', operand='[email protected]')]
result = self.get_and_check_messages(dict(narrow=ujson.dumps(narrow),
anchor=anchor, num_before=0,
num_after=0)) # type: Dict[str, Dict]
self.assertEqual(len(result['messages']), 1)
narrow = [dict(operator='is', operand='mentioned')]
result = self.get_and_check_messages(dict(narrow=ujson.dumps(narrow),
anchor=anchor, num_before=0,
num_after=0))
self.assertEqual(len(result['messages']), 0)
def test_missing_params(self):
# type: () -> None
"""
anchor, num_before, and num_after are all required
POST parameters for get_old_messages.
"""
self.login("[email protected]")
required_args = (("anchor", 1), ("num_before", 1), ("num_after", 1)) # type: Tuple[Tuple[Text, int], ...]
for i in range(len(required_args)):
post_params = dict(required_args[:i] + required_args[i + 1:])
result = self.client_get("/json/messages", post_params)
self.assert_json_error(result,
"Missing '%s' argument" % (required_args[i][0],))
def test_bad_int_params(self):
# type: () -> None
"""
num_before, num_after, and narrow must all be non-negative
integers or strings that can be converted to non-negative integers.
"""
self.login("[email protected]")
other_params = [("narrow", {}), ("anchor", 0)]
int_params = ["num_before", "num_after"]
bad_types = (False, "", "-1", -1)
for idx, param in enumerate(int_params):
for type in bad_types:
# Rotate through every bad type for every integer
# parameter, one at a time.
post_params = dict(other_params + [(param, type)] +
[(other_param, 0) for other_param in
int_params[:idx] + int_params[idx + 1:]]
)
result = self.client_get("/json/messages", post_params)
self.assert_json_error(result,
"Bad value for '%s': %s" % (param, type))
def test_bad_narrow_type(self):
# type: () -> None
"""
narrow must be a list of string pairs.
"""
self.login("[email protected]")
other_params = [("anchor", 0), ("num_before", 0), ("num_after", 0)] # type: list[Tuple[Text, Union[int, str, bool]]]
bad_types = (False, 0, '', '{malformed json,',
'{foo: 3}', '[1,2]', '[["x","y","z"]]') # type: Tuple[Union[int, str, bool], ...]
for type in bad_types:
post_params = dict(other_params + [("narrow", type)])
result = self.client_get("/json/messages", post_params)
self.assert_json_error(result,
"Bad value for 'narrow': %s" % (type,))
def test_old_empty_narrow(self):
# type: () -> None
"""
'{}' is accepted to mean 'no narrow', for use by old mobile clients.
"""
self.login("[email protected]")
all_result = self.get_and_check_messages({}) # type: Dict[str, Dict]
narrow_result = self.get_and_check_messages({'narrow': '{}'}) # type: Dict[str, Dict]
self.assertEqual(message_ids(all_result), message_ids(narrow_result))
def test_bad_narrow_operator(self):
# type: () -> None
"""
Unrecognized narrow operators are rejected.
"""
self.login("[email protected]")
for operator in ['', 'foo', 'stream:verona', '__init__']:
narrow = [dict(operator=operator, operand='')]
params = dict(anchor=0, num_before=0, num_after=0, narrow=ujson.dumps(narrow))
result = self.client_get("/json/messages", params)
self.assert_json_error_contains(result,
"Invalid narrow operator: unknown operator")
def test_non_string_narrow_operand_in_dict(self):
# type: () -> None
"""
We expect search operands to be strings, not integers.
"""
self.login("[email protected]")
not_a_string = 42
narrow = [dict(operator='stream', operand=not_a_string)]
params = dict(anchor=0, num_before=0, num_after=0, narrow=ujson.dumps(narrow))
result = self.client_get("/json/messages", params)
self.assert_json_error_contains(result, 'elem["operand"] is not a string')
def exercise_bad_narrow_operand(self, operator, operands, error_msg):
# type: (Text, Sequence, Text) -> None
other_params = [("anchor", 0), ("num_before", 0), ("num_after", 0)] # type: List
for operand in operands:
post_params = dict(other_params + [
("narrow", ujson.dumps([[operator, operand]]))])
result = self.client_get("/json/messages", post_params)
self.assert_json_error_contains(result, error_msg)
def test_bad_narrow_stream_content(self):
# type: () -> None
"""
If an invalid stream name is requested in get_old_messages, an error is
returned.
"""
self.login("[email protected]")
bad_stream_content = (0, [], ["x", "y"]) # type: Sequence
self.exercise_bad_narrow_operand("stream", bad_stream_content,
"Bad value for 'narrow'")
def test_bad_narrow_one_on_one_email_content(self):
# type: () -> None
"""
If an invalid 'pm-with' is requested in get_old_messages, an
error is returned.
"""
self.login("[email protected]")
bad_stream_content = (0, [], ["x", "y"]) # type: Tuple[int, List[None], List[Text]]
self.exercise_bad_narrow_operand("pm-with", bad_stream_content,
"Bad value for 'narrow'")
def test_bad_narrow_nonexistent_stream(self):
# type: () -> None
self.login("[email protected]")
self.exercise_bad_narrow_operand("stream", ['non-existent stream'],
"Invalid narrow operator: unknown stream")
def test_bad_narrow_nonexistent_email(self):
# type: () -> None
self.login("[email protected]")
self.exercise_bad_narrow_operand("pm-with", ['[email protected]'],
"Invalid narrow operator: unknown user")
def test_message_without_rendered_content(self):
# type: () -> None
"""Older messages may not have rendered_content in the database"""
m = self.get_last_message()
m.rendered_content = m.rendered_content_version = None
m.content = 'test content'
# Use to_dict_uncached_helper directly to avoid having to deal with remote cache
d = MessageDict.to_dict_uncached_helper(m, True)
self.assertEqual(d['content'], '<p>test content</p>')
def common_check_get_old_messages_query(self, query_params, expected):
# type: (Dict[str, object], Text) -> None
user_profile = get_user_profile_by_email("[email protected]")
request = POSTRequestMock(query_params, user_profile)
with queries_captured() as queries:
get_old_messages_backend(request, user_profile)
for query in queries:
if "/* get_old_messages */" in query['sql']:
sql = str(query['sql']).replace(" /* get_old_messages */", '')
self.assertEqual(sql, expected)
return
self.fail("get_old_messages query not found")
def test_use_first_unread_anchor_with_some_unread_messages(self):
# type: () -> None
user_profile = get_user_profile_by_email("[email protected]")
# Have Othello send messages to Hamlet that he hasn't read.
self.send_message("[email protected]", "Scotland", Recipient.STREAM)
last_message_id_to_hamlet = self.send_message("[email protected]", "[email protected]", Recipient.PERSONAL)
# Add a few messages that help us test that our query doesn't
# look at messages that are irrelevant to Hamlet.
self.send_message("[email protected]", "[email protected]", Recipient.PERSONAL)
self.send_message("[email protected]", "[email protected]", Recipient.PERSONAL)
query_params = dict(
use_first_unread_anchor='true',
anchor=0,
num_before=10,
num_after=10,
narrow='[]'
)
request = POSTRequestMock(query_params, user_profile)
with queries_captured() as all_queries:
get_old_messages_backend(request, user_profile)
# Verify the query for old messages looks correct.
queries = [q for q in all_queries if '/* get_old_messages */' in q['sql']]
self.assertEqual(len(queries), 1)
sql = queries[0]['sql']
self.assertNotIn('AND message_id = %s' % (LARGER_THAN_MAX_MESSAGE_ID,), sql)
self.assertIn('ORDER BY message_id ASC', sql)
cond = 'WHERE user_profile_id = %d AND message_id >= %d' % (user_profile.id, last_message_id_to_hamlet)
self.assertIn(cond, sql)
cond = 'WHERE user_profile_id = %d AND message_id <= %d' % (user_profile.id, last_message_id_to_hamlet - 1)
self.assertIn(cond, sql)
def test_use_first_unread_anchor_with_no_unread_messages(self):
# type: () -> None
user_profile = get_user_profile_by_email("[email protected]")
query_params = dict(
use_first_unread_anchor='true',
anchor=0,
num_before=10,
num_after=10,
narrow='[]'
)
request = POSTRequestMock(query_params, user_profile)
with queries_captured() as all_queries:
get_old_messages_backend(request, user_profile)
# Next, verify the use_first_unread_anchor setting invokes
# the `message_id = LARGER_THAN_MAX_MESSAGE_ID` hack.
queries = [q for q in all_queries if '/* get_old_messages */' in q['sql']]
self.assertEqual(len(queries), 1)
self.assertIn('AND message_id <= %d' % (LARGER_THAN_MAX_MESSAGE_ID - 1,), queries[0]['sql'])
# There should not be an after_query in this case, since it'd be useless
self.assertNotIn('AND message_id >= %d' % (LARGER_THAN_MAX_MESSAGE_ID,), queries[0]['sql'])
def test_use_first_unread_anchor_with_muted_topics(self):
# type: () -> None
"""
Test that our logic related to `use_first_unread_anchor`
invokes the `message_id = LARGER_THAN_MAX_MESSAGE_ID` hack for
the `/* get_old_messages */` query when relevant muting
is in effect.
This is a very arcane test on arcane, but very heavily
field-tested, logic in get_old_messages_backend(). If
this test breaks, be absolutely sure you know what you're
doing.
"""
realm = get_realm('zulip')
self.make_stream('web stuff')
user_profile = get_user_profile_by_email("[email protected]")
user_profile.muted_topics = ujson.dumps([['Scotland', 'golf'], ['web stuff', 'css'], ['bogus', 'bogus']])
user_profile.save()
query_params = dict(
use_first_unread_anchor='true',
anchor=0,
num_before=0,
num_after=0,
narrow='[["stream", "Scotland"]]'
)
request = POSTRequestMock(query_params, user_profile)
with queries_captured() as all_queries:
get_old_messages_backend(request, user_profile)
# Do some tests on the main query, to verify the muting logic
# runs on this code path.
queries = [q for q in all_queries if str(q['sql']).startswith("SELECT message_id, flags")]
self.assertEqual(len(queries), 1)
stream = get_stream('Scotland', realm)
recipient_id = get_recipient(Recipient.STREAM, stream.id).id
cond = '''AND NOT (recipient_id = {scotland} AND upper(subject) = upper('golf'))'''.format(scotland=recipient_id)
self.assertIn(cond, queries[0]['sql'])
# Next, verify the use_first_unread_anchor setting invokes
# the `message_id = LARGER_THAN_MAX_MESSAGE_ID` hack.
queries = [q for q in all_queries if '/* get_old_messages */' in q['sql']]
self.assertEqual(len(queries), 1)
self.assertIn('AND message_id = %d' % (LARGER_THAN_MAX_MESSAGE_ID,),
queries[0]['sql'])
def test_exclude_muting_conditions(self):
# type: () -> None
realm = get_realm('zulip')
self.make_stream('web stuff')
user_profile = get_user_profile_by_email("[email protected]")
# Test the do-nothing case first.
user_profile.muted_topics = ujson.dumps([['irrelevant_stream', 'irrelevant_topic']])
user_profile.save()
# If nothing relevant is muted, then exclude_muting_conditions()
# should return an empty list.
narrow = [
dict(operator='stream', operand='Scotland'),
]
muting_conditions = exclude_muting_conditions(user_profile, narrow)
self.assertEqual(muting_conditions, [])
# Ok, now set up our muted topics to include a topic relevant to our narrow.
user_profile.muted_topics = ujson.dumps([['Scotland', 'golf'], ['web stuff', 'css'], ['bogus', 'bogus']])
user_profile.save()
# And verify that our query will exclude them.
narrow = [
dict(operator='stream', operand='Scotland'),
]
muting_conditions = exclude_muting_conditions(user_profile, narrow)
query = select([column("id").label("message_id")], None, table("zerver_message"))
query = query.where(*muting_conditions)
expected_query = '''
SELECT id AS message_id
FROM zerver_message
WHERE NOT (recipient_id = :recipient_id_1 AND upper(subject) = upper(:upper_1))
'''
self.assertEqual(fix_ws(query), fix_ws(expected_query))
params = get_sqlalchemy_query_params(query)
self.assertEqual(params['recipient_id_1'], get_recipient_id_for_stream_name(realm, 'Scotland'))
self.assertEqual(params['upper_1'], 'golf')
mute_stream(realm, user_profile, 'Verona')
narrow = []
muting_conditions = exclude_muting_conditions(user_profile, narrow)
query = select([column("id")], None, table("zerver_message"))
query = query.where(and_(*muting_conditions))
expected_query = '''
SELECT id
FROM zerver_message
WHERE recipient_id NOT IN (:recipient_id_1)
AND NOT
(recipient_id = :recipient_id_2 AND upper(subject) = upper(:upper_1) OR
recipient_id = :recipient_id_3 AND upper(subject) = upper(:upper_2))'''
self.assertEqual(fix_ws(query), fix_ws(expected_query))
params = get_sqlalchemy_query_params(query)
self.assertEqual(params['recipient_id_1'], get_recipient_id_for_stream_name(realm, 'Verona'))
self.assertEqual(params['recipient_id_2'], get_recipient_id_for_stream_name(realm, 'Scotland'))
self.assertEqual(params['upper_1'], 'golf')
self.assertEqual(params['recipient_id_3'], get_recipient_id_for_stream_name(realm, 'web stuff'))
self.assertEqual(params['upper_2'], 'css')
def test_get_old_messages_queries(self):
# type: () -> None
query_ids = self.get_query_ids()
sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage \nWHERE user_profile_id = {hamlet_id} AND message_id >= 0 ORDER BY message_id ASC \n LIMIT 11) AS anon_1 ORDER BY message_id ASC'
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 10}, sql)
sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage \nWHERE user_profile_id = {hamlet_id} AND message_id <= 100 ORDER BY message_id DESC \n LIMIT 11) AS anon_1 ORDER BY message_id ASC'
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 100, 'num_before': 10, 'num_after': 0}, sql)
sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM ((SELECT message_id, flags \nFROM zerver_usermessage \nWHERE user_profile_id = {hamlet_id} AND message_id <= 99 ORDER BY message_id DESC \n LIMIT 10) UNION ALL (SELECT message_id, flags \nFROM zerver_usermessage \nWHERE user_profile_id = {hamlet_id} AND message_id >= 100 ORDER BY message_id ASC \n LIMIT 11)) AS anon_1 ORDER BY message_id ASC'
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 100, 'num_before': 10, 'num_after': 10}, sql)
def test_get_old_messages_with_narrow_queries(self):
# type: () -> None
query_ids = self.get_query_ids()
sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND (sender_id = {othello_id} AND recipient_id = {hamlet_recipient} OR sender_id = {hamlet_id} AND recipient_id = {othello_recipient}) AND message_id >= 0 ORDER BY message_id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC'
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 10,
'narrow': '[["pm-with", "[email protected]"]]'},
sql)
sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND (flags & 2) != 0 AND message_id >= 0 ORDER BY message_id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC'
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 10,
'narrow': '[["is", "starred"]]'},
sql)
sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND sender_id = {othello_id} AND message_id >= 0 ORDER BY message_id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC'
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 10,
'narrow': '[["sender", "[email protected]"]]'},
sql)
sql_template = 'SELECT anon_1.message_id \nFROM (SELECT id AS message_id \nFROM zerver_message \nWHERE recipient_id = {scotland_recipient} AND zerver_message.id >= 0 ORDER BY zerver_message.id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC'
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 10,
'narrow': '[["stream", "Scotland"]]'},
sql)
sql_template = "SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND upper(subject) = upper('blah') AND message_id >= 0 ORDER BY message_id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC"
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 10,
'narrow': '[["topic", "blah"]]'},
sql)
sql_template = "SELECT anon_1.message_id \nFROM (SELECT id AS message_id \nFROM zerver_message \nWHERE recipient_id = {scotland_recipient} AND upper(subject) = upper('blah') AND zerver_message.id >= 0 ORDER BY zerver_message.id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC"
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 10,
'narrow': '[["stream", "Scotland"], ["topic", "blah"]]'},
sql)
# Narrow to pms with yourself
sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND sender_id = {hamlet_id} AND recipient_id = {hamlet_recipient} AND message_id >= 0 ORDER BY message_id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC'
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 10,
'narrow': '[["pm-with", "[email protected]"]]'},
sql)
sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND recipient_id = {scotland_recipient} AND (flags & 2) != 0 AND message_id >= 0 ORDER BY message_id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC'
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 10,
'narrow': '[["stream", "Scotland"], ["is", "starred"]]'},
sql)
@override_settings(USING_PGROONGA=False)
def test_get_old_messages_with_search_queries(self):
# type: () -> None
query_ids = self.get_query_ids()
sql_template = "SELECT anon_1.message_id, anon_1.flags, anon_1.subject, anon_1.rendered_content, anon_1.content_matches, anon_1.subject_matches \nFROM (SELECT message_id, flags, subject, rendered_content, ts_match_locs_array('zulip.english_us_search', rendered_content, plainto_tsquery('zulip.english_us_search', 'jumping')) AS content_matches, ts_match_locs_array('zulip.english_us_search', escape_html(subject), plainto_tsquery('zulip.english_us_search', 'jumping')) AS subject_matches \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND (search_tsvector @@ plainto_tsquery('zulip.english_us_search', 'jumping')) AND message_id >= 0 ORDER BY message_id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC" # type: Text
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 10,
'narrow': '[["search", "jumping"]]'},
sql)
sql_template = "SELECT anon_1.message_id, anon_1.subject, anon_1.rendered_content, anon_1.content_matches, anon_1.subject_matches \nFROM (SELECT id AS message_id, subject, rendered_content, ts_match_locs_array('zulip.english_us_search', rendered_content, plainto_tsquery('zulip.english_us_search', 'jumping')) AS content_matches, ts_match_locs_array('zulip.english_us_search', escape_html(subject), plainto_tsquery('zulip.english_us_search', 'jumping')) AS subject_matches \nFROM zerver_message \nWHERE recipient_id = {scotland_recipient} AND (search_tsvector @@ plainto_tsquery('zulip.english_us_search', 'jumping')) AND zerver_message.id >= 0 ORDER BY zerver_message.id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC"
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 10,
'narrow': '[["stream", "Scotland"], ["search", "jumping"]]'},
sql)
sql_template = 'SELECT anon_1.message_id, anon_1.flags, anon_1.subject, anon_1.rendered_content, anon_1.content_matches, anon_1.subject_matches \nFROM (SELECT message_id, flags, subject, rendered_content, ts_match_locs_array(\'zulip.english_us_search\', rendered_content, plainto_tsquery(\'zulip.english_us_search\', \'"jumping" quickly\')) AS content_matches, ts_match_locs_array(\'zulip.english_us_search\', escape_html(subject), plainto_tsquery(\'zulip.english_us_search\', \'"jumping" quickly\')) AS subject_matches \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND (content ILIKE \'%jumping%\' OR subject ILIKE \'%jumping%\') AND (search_tsvector @@ plainto_tsquery(\'zulip.english_us_search\', \'"jumping" quickly\')) AND message_id >= 0 ORDER BY message_id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC'
sql = sql_template.format(**query_ids)
self.common_check_get_old_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 10,
'narrow': '[["search", "\\"jumping\\" quickly"]]'},
sql)
| samatdav/zulip | zerver/tests/test_narrow.py | Python | apache-2.0 | 65,887 |
# ex40.py
class Song(object):
def __init__(self, lyrics):
self.lyrics = lyrics
def sing_me_a_song(self):
for line in self.lyrics:
print line
happy_bday = Song (["Happy birthday to you",
"I don't want to get sued",
"So I'll stop right there"])
bulls_on_parade = Song (["They rally around the family",
"With pockets full of shells"])
happy_bday.sing_me_a_song()
bulls_on_parade.sing_me_a_song() | CodeSheng/LPLHW | ex40.py | Python | apache-2.0 | 421 |
import craftbuildtools.app
from craftbuildtools.app import cli
cli()
| TechnicalBro/CraftBuildTools | craftbuildtools/__main__.py | Python | mit | 70 |
from contentbase.json_renderer import json_renderer
from contentbase.util import get_root_request
from elasticsearch import Elasticsearch
from elasticsearch.connection import Urllib3HttpConnection
from elasticsearch.serializer import SerializationError
from pyramid.settings import (
asbool,
aslist,
)
from .interfaces import (
APP_FACTORY,
ELASTIC_SEARCH,
INDEXER,
)
import json
import sys
PY2 = sys.version_info.major == 2
def includeme(config):
settings = config.registry.settings
settings.setdefault('contentbase.elasticsearch.index', 'contentbase')
config.add_request_method(datastore, 'datastore', reify=True)
addresses = aslist(settings['elasticsearch.server'])
config.registry[ELASTIC_SEARCH] = Elasticsearch(
addresses,
serializer=PyramidJSONSerializer(json_renderer),
connection_class=TimedUrllib3HttpConnection,
retry_on_timeout=True,
)
config.include('.cached_views')
if not config.registry.settings.get('indexer'):
config.include('.esstorage')
config.include('.indexer')
if asbool(settings.get('indexer')) and not PY2:
config.include('.mpindexer')
def datastore(request):
if request.__parent__ is not None:
return request.__parent__.datastore
datastore = 'database'
if request.params.get('frame') == 'edit':
return datastore
if request.method in ('HEAD', 'GET'):
datastore = request.params.get('datastore') or \
request.headers.get('X-Datastore') or \
request.registry.settings.get('collection_datastore', 'elasticsearch')
return datastore
class PyramidJSONSerializer(object):
mimetype = 'application/json'
def __init__(self, renderer):
self.renderer = renderer
def loads(self, s):
try:
return json.loads(s)
except (ValueError, TypeError) as e:
raise SerializationError(s, e)
def dumps(self, data):
# don't serialize strings
if isinstance(data, (type(''), type(u''))):
return data
try:
return self.renderer.dumps(data)
except (ValueError, TypeError) as e:
raise SerializationError(data, e)
class TimedUrllib3HttpConnection(Urllib3HttpConnection):
stats_count_key = 'es_count'
stats_time_key = 'es_time'
def stats_record(self, duration):
request = get_root_request()
if request is None:
return
duration = int(duration * 1e6)
stats = request._stats
stats[self.stats_count_key] = stats.get(self.stats_count_key, 0) + 1
stats[self.stats_time_key] = stats.get(self.stats_time_key, 0) + duration
def log_request_success(self, method, full_url, path, body, status_code, response, duration):
self.stats_record(duration)
return super(TimedUrllib3HttpConnection, self).log_request_success(
method, full_url, path, body, status_code, response, duration)
def log_request_fail(self, method, full_url, body, duration, status_code=None, exception=None):
self.stats_record(duration)
return super(TimedUrllib3HttpConnection, self).log_request_fail(
method, full_url, body, duration, status_code, exception)
| kidaa/encoded | src/contentbase/elasticsearch/__init__.py | Python | mit | 3,267 |
default_app_config = 'wagtail.contrib.modeladmin.apps.WagtailModelAdminAppConfig'
| kaedroho/wagtail | wagtail/contrib/modeladmin/__init__.py | Python | bsd-3-clause | 82 |
import math
from quaternion import *
__doc__ = '''A module which implements a trackball class.'''
class Trackball:
'''A trackball object. This is deformed trackball which is a hyperbolic
sheet of rotation away from the center. This particular function was chosen
after trying out several variations. The current transformation matrix
can be retrieved using the "matrix" attribute.'''
def __init__(self, size = 0.8, scale = 2.0, renorm = 97):
'''Create a Trackball object. "size" is the radius of the inner trackball
sphere. "scale" is a multiplier applied to the mouse coordinates before
mapping into the viewport. "renorm" is not currently used.'''
self.size = size
self.scale = scale
self.renorm = renorm
self.reset()
def reset(self):
self.quat = quaternion([1, 0, 0, 0])
def __track_project_to_sphere(self, px, py):
d2 = px**2 + py**2
d = math.sqrt(d2)
if d < self.size * 0.70710678118654752440:
# Inside sphere
return math.sqrt(self.size**2 - d2)
# On hyperbola
t = self.size/1.41421356237309504880
return t**2/d
def update(self, p1x, p1y, p2x, p2y, width, height, mat = 0):
'''Update the quaterion with a new rotation position derived
from the first point (p1) and the second point (p2). The
the mat parameter is not currently used.'''
if p1x == p2x and p1y == p2y:
self.quat = quaternion(1, 0, 0, 0)
else:
# First, figure out z-coordinates for projection of p1 and p2 to
# deformed sphere
p1x_u = self.scale*p1x/width - 1.0
p1y_u = 1.0 - self.scale*p1y/height
p2x_u = self.scale*p2x/width - 1.0
p2y_u = 1.0 - self.scale*p2y/height
P1 = (p1x_u,p1y_u,self.__track_project_to_sphere(p1x_u, p1y_u))
P2 = (p2x_u,p2y_u,self.__track_project_to_sphere(p2x_u, p2y_u))
a = [(P2[1]*P1[2]) - (P2[2]*P1[1]),
(P2[2]*P1[0]) - (P2[0]*P1[2]),
(P2[0]*P1[1]) - (P2[1]*P1[0])]
# Figure out how much to rotate around that axis.
d = map(lambda x, y: x - y, P1, P2)
t = math.sqrt(d[0]**2 + d[1]**2 + d[2]**2) / (2.0 * self.size)
# Avoid problems with out-of-control values...
t = max(min(t, 1.0), -1.0)
scale = t*math.sqrt(a[0]**2 + a[1]**2 + a[2]**2)
q = map(lambda x, y: x*y, a, [scale]*3) + [math.sqrt(1.0-t**2)]
self.quat = quaternion([q[3], q[0], q[1], q[2]]).normalized()
def __getattr__(self, name):
if name != 'matrix':
raise AttributeError, 'No attribute named "%s"' % name
return self.quat.asMatrix()
glTrackball = Trackball()
| MDAnalysis/pyQuteMol | python/trackball.py | Python | gpl-2.0 | 2,898 |
#!/usr/bin/python3
# @begin:license
#
# Copyright (c) 2015-2019, Benjamin Niemann <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @end:license
import enum
import logging
import os.path
import time
from typing import Any, Dict, List, Iterable
from PyQt5.QtCore import Qt
from PyQt5 import QtCore
from PyQt5 import QtGui
from PyQt5 import QtWidgets
from noisicaa import constants
from noisicaa import core
from noisicaa import music
from noisicaa.ui import slots
from noisicaa.ui import dynamic_layout
from noisicaa.ui import int_dial
from noisicaa.ui import control_value_dial
from noisicaa.ui import property_connector
from noisicaa.ui import ui_base
from noisicaa.ui.graph import base_node
from . import model
logger = logging.getLogger(__name__)
class State(enum.IntEnum):
WAIT_FOR_TRIGGER = 1
RECORDING = 2
HOLD = 3
class SignalPoint(object):
def __init__(self, screen_pos: int) -> None:
self.screen_pos = screen_pos
self.num_samples = 0
self.signal_sum = 0.0
self.max = None # type: float
self.min = None # type: float
@property
def avg(self) -> float:
return self.signal_sum / self.num_samples
def add_sample(self, value: float) -> None:
self.num_samples += 1
self.signal_sum += value
if self.max is None or value > self.max:
self.max = value
if self.min is None or value < self.min:
self.min = value
class Oscilloscope(slots.SlotContainer, QtWidgets.QWidget):
timeScale, setTimeScale, timeScaleChanged = slots.slot(int, 'timeScale', default=-2)
yScale, setYScale, yScaleChanged = slots.slot(int, 'yScale', default=0)
yOffset, setYOffset, yOffsetChanged = slots.slot(float, 'yOffset', default=0.0)
paused, setPaused, pausedChanged = slots.slot(bool, 'paused', default=False)
holdTime, setHoldTime, holdTimeChanged = slots.slot(int, 'holdTime', default=0)
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
self.setMinimumSize(20, 20)
self.__signal = [] # type: List[SignalPoint]
self.__state = State.WAIT_FOR_TRIGGER
self.__insert_pos = 0
self.__screen_pos = 0
self.__density = 3
self.__remainder = 0.0
self.__prev_sample = 0.0
self.__hold_begin = 0.0
self.__trigger_begin = 0.0
self.__trigger_found = False
self.__timePerPixel = 1.0
self.__timePerSample = 1.0 / 44100
self.__bg_color = QtGui.QColor(0, 0, 0)
self.__border_color = QtGui.QColor(100, 200, 100)
self.__grid_color = QtGui.QColor(40, 60, 40)
self.__center_color = QtGui.QColor(60, 100, 60)
self.__plot_pen = QtGui.QPen(QtGui.QColor(255, 255, 255))
self.__plot_pen.setWidth(1)
self.__plot_fill_color = QtGui.QColor(200, 255, 200, 100)
self.__label_color = QtGui.QColor(100, 200, 100)
self.__label_font = QtGui.QFont(self.font())
self.__label_font.setPointSizeF(0.8 * self.__label_font.pointSizeF())
self.__label_font_metrics = QtGui.QFontMetrics(self.__label_font)
self.__warning_pen = QtGui.QPen(QtGui.QColor(255, 255, 255))
self.__warning_font = QtGui.QFont(self.font())
self.__warning_font.setPointSizeF(0.9 * self.__warning_font.pointSizeF())
self.__warning_font.setBold(True)
self.__warning_font_metrics = QtGui.QFontMetrics(self.__warning_font)
self.__warning_pixmap = QtGui.QIcon(
os.path.join(constants.DATA_DIR, 'icons', 'warning.svg')).pixmap(
4 + self.__warning_font_metrics.capHeight(),
4 + self.__warning_font_metrics.capHeight())
self.__show_minor_grid = False
self.__show_major_grid = False
self.__show_y_labels = False
self.__show_x_labels = False
self.__time_step_size = 100
self.__plot_rect = None # type: QtCore.QRect
self.__bg_cache = None # type: QtGui.QPixmap
self.__update_timer = QtCore.QTimer(self)
self.__update_timer.timeout.connect(self.update)
self.__update_timer.setInterval(1000 // 20)
self.timeScaleChanged.connect(self.__timeScaleChanged)
self.timeScaleChanged.connect(lambda _: self.__invalidateBGCache())
self.yScaleChanged.connect(lambda _: self.__invalidateBGCache())
self.yOffsetChanged.connect(lambda _: self.__invalidateBGCache())
def __setState(self, state: State) -> None:
if state == self.__state:
return
if state == State.RECORDING:
self.__insert_pos = 0
self.__screen_pos = 0
self.__remainder = 0.0
elif state == State.HOLD:
self.__hold_begin = time.time()
elif state == State.WAIT_FOR_TRIGGER:
self.__trigger_begin = time.time()
self.__state = state
def __timeScaleChanged(self, value: int) -> None:
if self.__plot_rect is None:
return
self.__timePerPixel = self.__density * self.absTimeScale() / self.__time_step_size
if not self.paused():
self.__setState(State.WAIT_FOR_TRIGGER)
def absTimeScale(self) -> float:
time_scale = self.timeScale()
return [1, 2, 5][time_scale % 3] * 10.0 ** (time_scale // 3)
def absYScale(self) -> float:
y_scale = self.yScale()
return [1, 2, 5][y_scale % 3] * 10.0 ** (y_scale // 3)
def absHoldTime(self) -> float:
hold_time = self.holdTime()
return [1, 2, 5][hold_time % 3] * 10.0 ** (hold_time // 3)
@classmethod
def formatTimeScale(cls, time_scale: int) -> str:
mul = [1, 2, 5][time_scale % 3]
time_scale //= 3
if time_scale <= -4:
return '%dµs' % (mul * 10 ** (time_scale + 6))
elif time_scale <= -1:
return '%dms' % (mul * 10 ** (time_scale + 3))
else:
return '%ds' % (mul * 10 ** time_scale)
@classmethod
def formatHoldTime(cls, hold_time: int) -> str:
mul = [1, 2, 5][hold_time % 3]
hold_time //= 3
if hold_time <= -4:
return '%dµs' % (mul * 10 ** (hold_time + 6))
elif hold_time <= -1:
return '%dms' % (mul * 10 ** (hold_time + 3))
else:
return '%ds' % (mul * 10 ** hold_time)
@classmethod
def formatYScale(cls, y_scale: int) -> str:
return '%g' % ([1, 2, 5][y_scale % 3] * 10.0 ** (y_scale // 3))
def addValues(self, samples_per_value: int, values: Iterable[float]) -> None:
if self.__plot_rect is None:
return
trigger_value = -self.yOffset() * self.absYScale()
for value in values:
for _ in range(samples_per_value):
if self.__state == State.HOLD and not self.paused():
if time.time() - self.__hold_begin > self.absHoldTime():
self.__setState(State.WAIT_FOR_TRIGGER)
if self.__state == State.WAIT_FOR_TRIGGER:
if self.__prev_sample < trigger_value and value >= trigger_value:
self.__trigger_found = True
self.__setState(State.RECORDING)
elif time.time() - self.__trigger_begin > 10 * self.absTimeScale():
self.__setState(State.RECORDING)
self.__trigger_found = False
self.__prev_sample = value
if self.__state != State.RECORDING:
continue
if self.__timePerPixel >= self.__timePerSample:
self.__remainder += self.__timePerSample
if self.__remainder >= 0.0:
self.__remainder -= self.__timePerPixel
pnt = SignalPoint(self.__screen_pos)
pnt.add_sample(value)
self.__signal.insert(self.__insert_pos, pnt)
self.__insert_pos += 1
while (self.__insert_pos < len(self.__signal)
and (self.__signal[self.__insert_pos].screen_pos
<= self.__screen_pos)):
del self.__signal[self.__insert_pos]
self.__screen_pos += self.__density
else:
pnt = self.__signal[self.__insert_pos - 1]
pnt.add_sample(value)
else:
pnt = SignalPoint(self.__screen_pos)
pnt.add_sample(value)
self.__signal.insert(self.__insert_pos, pnt)
self.__insert_pos += 1
while (self.__insert_pos < len(self.__signal)
and self.__signal[self.__insert_pos].screen_pos <= self.__screen_pos):
del self.__signal[self.__insert_pos]
self.__remainder += self.__timePerSample
while self.__remainder >= 0.0:
self.__remainder -= self.__timePerPixel
self.__screen_pos += self.__density
if self.__screen_pos >= self.__plot_rect.width() + 10:
self.__setState(State.HOLD)
del self.__signal[self.__insert_pos:]
def step(self) -> None:
if self.paused() and self.__state == State.HOLD:
self.__setState(State.WAIT_FOR_TRIGGER)
def sizeHint(self) -> QtCore.QSize:
return QtCore.QSize(100, 100)
def minimumSizeHint(self) -> QtCore.QSize:
return QtCore.QSize(60, 60)
def resizeEvent(self, evt: QtGui.QResizeEvent) -> None:
if evt.size().width() > 20 and evt.size().height() > 20:
self.__show_major_grid = True
else:
self.__show_major_grid = False
if evt.size().width() > 100 and evt.size().height() > 100:
self.__show_minor_grid = True
else:
self.__show_minor_grid = False
y_label_width = self.__label_font_metrics.boundingRect('500000').width() + 3
if evt.size().width() >= y_label_width + 100 and evt.size().height() >= 60:
self.__show_y_labels = True
else:
self.__show_y_labels = False
x_label_height = self.__label_font_metrics.capHeight() + 2
if evt.size().width() >= 100 and evt.size().height() >= x_label_height + 100:
self.__show_x_labels = True
else:
self.__show_x_labels = False
if evt.size().width() >= 60 and evt.size().height() >= 60:
margin = 2
else:
margin = 0
border_left = margin
border_right = margin
border_top = margin
border_bottom = margin
if self.__show_y_labels:
border_left += y_label_width
if self.__show_x_labels:
border_bottom += x_label_height
if (evt.size().width() >= border_left + border_right + 10
and evt.size().height() >= border_top + border_bottom + 10):
self.__plot_rect = QtCore.QRect(
border_left, border_right,
evt.size().width() - border_left - border_right,
evt.size().height() - border_top - border_bottom)
self.__time_step_size = self.__plot_rect.height() // 2
else:
self.__plot_rect = None
self.__invalidateBGCache()
self.__timeScaleChanged(self.timeScale())
super().resizeEvent(evt)
def showEvent(self, evt: QtGui.QShowEvent) -> None:
self.__update_timer.start()
super().showEvent(evt)
def hideEvent(self, evt: QtGui.QHideEvent) -> None:
self.__update_timer.stop()
self.__invalidateBGCache()
super().hideEvent(evt)
def __invalidateBGCache(self) -> None:
self.__bg_cache = None
def __renderBG(self) -> None:
w = self.__plot_rect.width()
h = self.__plot_rect.height()
self.__bg_cache = QtGui.QPixmap(self.size())
painter = QtGui.QPainter(self.__bg_cache)
try:
painter.fillRect(self.__bg_cache.rect(), self.__bg_color)
painter.save()
painter.translate(self.__plot_rect.topLeft())
if self.__show_minor_grid:
for g in (-4, -3, -2, -1, 1, 2, 3, 4, 6, 7, 8, 9, 11, 12, 13, 14):
tick_pos = int((g / 10 - 0.5 * self.yOffset()) * (h - 1))
if not 0 <= tick_pos < h:
continue
painter.fillRect(0, tick_pos, w, 1, self.__grid_color)
x = 0
while x < w:
for g in (1, 2, 3, 4):
painter.fillRect(
x + int(g * self.__time_step_size / 5), 0, 1, h,
self.__grid_color)
x += self.__time_step_size
if self.__show_major_grid:
for tick in (-2.0, -1.0, 0.0, 1.0, 2.0):
tick_pos = int(0.5 * (1.0 - tick - self.yOffset()) * (h - 1))
if not 0 <= tick_pos < h:
continue
painter.fillRect(0, tick_pos, w, 1, self.__center_color)
x = self.__time_step_size
while x < w:
painter.fillRect(x, 0, 1, h, self.__center_color)
x += self.__time_step_size
painter.fillRect(0, 0, w, 1, self.__border_color)
painter.fillRect(0, h - 1, w, 1, self.__border_color)
painter.fillRect(0, 0, 1, h, self.__border_color)
painter.fillRect(w - 1, 0, 1, h, self.__border_color)
painter.restore()
painter.setFont(self.__label_font)
painter.setPen(self.__label_color)
if self.__show_x_labels and self.__time_step_size <= w:
t1 = self.formatTimeScale(self.timeScale())
t1r = self.__label_font_metrics.boundingRect(t1)
painter.drawText(
min(self.__plot_rect.left() + self.__time_step_size - t1r.width() // 2,
self.__plot_rect.right() - t1r.width()),
self.__plot_rect.bottom() + self.__label_font_metrics.capHeight() + 2,
t1)
if self.__show_y_labels:
y_min = self.__plot_rect.top() + self.__label_font_metrics.capHeight()
y_max = self.__plot_rect.bottom()
for tick in (-2.0, -1.0, 0.0, 1.0, 2.0):
tick_pos = int(0.5 * (1.0 - tick - self.yOffset()) * (h - 1))
if not 0 <= tick_pos < h:
continue
painter.fillRect(
self.__plot_rect.left() - 3, self.__plot_rect.top() + tick_pos,
3, 1,
self.__border_color)
y1 = '%g' % (tick * self.absYScale())
y1r = self.__label_font_metrics.boundingRect(y1)
label_pos = (
self.__plot_rect.top()
+ tick_pos
+ self.__label_font_metrics.capHeight() // 2)
label_pos = max(y_min, min(y_max, label_pos))
painter.drawText(self.__plot_rect.left() - y1r.width() - 4, label_pos, y1)
finally:
painter.end()
def paintEvent(self, evt: QtGui.QPaintEvent) -> None:
painter = QtGui.QPainter(self)
try:
if self.__bg_cache is None:
self.__renderBG()
painter.drawPixmap(0, 0, self.__bg_cache)
w = self.__plot_rect.width()
h = self.__plot_rect.height()
painter.setClipRect(self.__plot_rect)
painter.translate(self.__plot_rect.topLeft())
y_scale = self.absYScale()
y_offset = self.yOffset()
min_path = QtGui.QPolygon()
max_path = QtGui.QPolygon()
for pnt in self.__signal:
if pnt.screen_pos >= w:
break
x = pnt.screen_pos
min_value = pnt.min / y_scale + y_offset
min_y = int((h - 1) * (1.0 - min_value) / 2.0)
min_path.append(QtCore.QPoint(x, min_y))
max_value = pnt.max / y_scale + y_offset
max_y = int((h - 1) * (1.0 - max_value) / 2.0)
max_path.append(QtCore.QPoint(x, max_y))
if min_y > max_y + 1:
painter.fillRect(
x, max_y + 1,
self.__density, min_y - max_y - 1,
self.__plot_fill_color)
painter.setPen(self.__plot_pen)
painter.drawPolyline(min_path)
painter.drawPolyline(max_path)
if not self.__trigger_found and h > 5 * self.__warning_font_metrics.capHeight():
x = (w - self.__warning_font_metrics.boundingRect("No Trigger").width()
- self.__warning_pixmap.width() - 8)
if x > 0:
painter.drawPixmap(x, 3, self.__warning_pixmap)
x += self.__warning_pixmap.width() + 3
painter.setPen(self.__warning_pen)
painter.setFont(self.__warning_font)
painter.drawText(x, 5 + self.__warning_font_metrics.capHeight(), "No Trigger")
finally:
painter.end()
class OscilloscopeNodeWidget(ui_base.ProjectMixin, core.AutoCleanupMixin, QtWidgets.QWidget):
def __init__(self, node: model.Oscilloscope, session_prefix: str, **kwargs: Any) -> None:
super().__init__(**kwargs)
self.__node = node
self.__listeners = core.ListenerMap[str]()
self.add_cleanup_function(self.__listeners.cleanup)
self.__listeners['node-messages'] = self.audioproc_client.node_messages.add(
'%016x' % self.__node.id, self.__nodeMessage)
label_font = QtGui.QFont(self.font())
label_font.setPointSizeF(0.8 * label_font.pointSizeF())
self.__plot = Oscilloscope()
self.__paused = QtWidgets.QPushButton()
self.__paused.setCheckable(True)
self.__paused.setText("Pause")
self.__paused.toggled.connect(self.__plot.setPaused)
self.__step = QtWidgets.QPushButton()
self.__step.setText("Step")
self.__step.setEnabled(False)
self.__step.clicked.connect(self.__plot.step)
self.__paused.toggled.connect(self.__step.setEnabled)
self.__time_scale = int_dial.IntDial()
self.__time_scale.setMinimumSize(56, 56)
self.__time_scale.setRange(-12, 3)
self.__time_scale.setDefault(-5)
self.__time_scale.setDisplayFunc(Oscilloscope.formatTimeScale)
self.__time_scale.valueChanged.connect(self.__plot.setTimeScale)
self.__time_scale_connector = property_connector.IntDialConnector(
self.__time_scale, self.__node, 'time_scale',
mutation_name='%s: Change time scale' % self.__node.name,
context=self.context)
self.add_cleanup_function(self.__time_scale_connector.cleanup)
self.__time_scale_label = QtWidgets.QLabel("Time", self)
self.__time_scale_label.setFont(label_font)
self.__time_scale_label.setAlignment(Qt.AlignHCenter)
self.__hold_time = int_dial.IntDial()
self.__hold_time.setMinimumSize(56, 56)
self.__hold_time.setRange(-6, 3)
self.__hold_time.setDefault(-3)
self.__hold_time.setDisplayFunc(Oscilloscope.formatHoldTime)
self.__hold_time.valueChanged.connect(self.__plot.setHoldTime)
self.__hold_time_connector = property_connector.IntDialConnector(
self.__hold_time, self.__node, 'hold_time',
mutation_name='%s: Change hold time' % self.__node.name,
context=self.context)
self.add_cleanup_function(self.__hold_time_connector.cleanup)
self.__hold_time_label = QtWidgets.QLabel("Hold", self)
self.__hold_time_label.setFont(label_font)
self.__hold_time_label.setAlignment(Qt.AlignHCenter)
self.__y_scale = int_dial.IntDial()
self.__y_scale.setMinimumSize(56, 56)
self.__y_scale.setRange(-18, 18)
self.__y_scale.setDefault(1)
self.__y_scale.setDisplayFunc(Oscilloscope.formatYScale)
self.__y_scale.valueChanged.connect(self.__plot.setYScale)
self.__y_scale_connector = property_connector.IntDialConnector(
self.__y_scale, self.__node, 'y_scale',
mutation_name='%s: Change Y scale' % self.__node.name,
context=self.context)
self.add_cleanup_function(self.__y_scale_connector.cleanup)
self.__y_scale_label = QtWidgets.QLabel("Y Zoom", self)
self.__y_scale_label.setFont(label_font)
self.__y_scale_label.setAlignment(Qt.AlignHCenter)
self.__y_offset = control_value_dial.ControlValueDial()
self.__y_offset.setMinimumSize(56, 56)
self.__y_offset.setRange(-1.0, 1.0)
self.__y_offset.setDefault(0.0)
self.__y_offset.valueChanged.connect(self.__plot.setYOffset)
self.__y_offset_connector = property_connector.ControlValueDialConnector(
self.__y_offset, self.__node, 'y_offset',
mutation_name='%s: Change Y offset' % self.__node.name,
context=self.context)
self.add_cleanup_function(self.__y_offset_connector.cleanup)
self.__y_offset_label = QtWidgets.QLabel("Y Off", self)
self.__y_offset_label.setFont(label_font)
self.__y_offset_label.setAlignment(Qt.AlignHCenter)
layout = dynamic_layout.DynamicLayout(
dynamic_layout.VBox(
dynamic_layout.Widget(self.__plot, stretch=1),
dynamic_layout.HBox(
dynamic_layout.VBox(
dynamic_layout.Widget(self.__paused, priority=5),
dynamic_layout.Widget(self.__step, priority=5),
),
dynamic_layout.VBox(
dynamic_layout.Widget(self.__time_scale, priority=3),
dynamic_layout.Widget(self.__time_scale_label, priority=4),
stretch=1,
),
dynamic_layout.VBox(
dynamic_layout.Widget(self.__hold_time, priority=3),
dynamic_layout.Widget(self.__hold_time_label, priority=4),
stretch=1,
),
dynamic_layout.VBox(
dynamic_layout.Widget(self.__y_scale, priority=3),
dynamic_layout.Widget(self.__y_scale_label, priority=4),
stretch=1,
),
dynamic_layout.VBox(
dynamic_layout.Widget(self.__y_offset, priority=3),
dynamic_layout.Widget(self.__y_offset_label, priority=4),
stretch=1,
),
),
spacing=2,
)
)
self.setLayout(layout)
def __nodeMessage(self, msg: Dict[str, Any]) -> None:
signal_uri = 'http://noisicaa.odahoda.de/lv2/processor_oscilloscope#signal'
if signal_uri in msg:
samples_per_value, signal = msg[signal_uri]
self.__plot.addValues(samples_per_value, signal)
class OscilloscopeNode(base_node.Node):
has_window = True
def __init__(self, *, node: music.BaseNode, **kwargs: Any) -> None:
assert isinstance(node, model.Oscilloscope), type(node).__name__
self.__widget = None # type: QtWidgets.QWidget
self.__node = node # type: model.Oscilloscope
super().__init__(node=node, **kwargs)
def createBodyWidget(self) -> QtWidgets.QWidget:
assert self.__widget is None
body = OscilloscopeNodeWidget(
node=self.__node,
session_prefix='inline',
context=self.context)
self.add_cleanup_function(body.cleanup)
body.setAutoFillBackground(False)
body.setAttribute(Qt.WA_NoSystemBackground, True)
self.__widget = QtWidgets.QScrollArea()
self.__widget.setWidgetResizable(True)
self.__widget.setFrameShape(QtWidgets.QFrame.NoFrame)
self.__widget.setWidget(body)
return self.__widget
def createWindow(self, **kwargs: Any) -> QtWidgets.QWidget:
window = QtWidgets.QDialog(**kwargs)
window.setAttribute(Qt.WA_DeleteOnClose, False)
window.setWindowTitle("Oscilloscope")
body = OscilloscopeNodeWidget(
node=self.__node,
session_prefix='window',
context=self.context)
self.add_cleanup_function(body.cleanup)
layout = QtWidgets.QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
layout.addWidget(body)
window.setLayout(layout)
return window
| odahoda/noisicaa | noisicaa/builtin_nodes/oscilloscope/node_ui.py | Python | gpl-2.0 | 25,830 |
"""
Tests of completion xblock runtime services
"""
import ddt
from completion.models import BlockCompletion
from completion.services import CompletionService
from completion.test_utils import CompletionWaffleTestMixin
from opaque_keys.edx.keys import CourseKey
from xmodule.library_tools import LibraryToolsService
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, LibraryFactory
from xmodule.tests import get_test_system
from openedx.core.djangolib.testing.utils import skip_unless_lms
from common.djangoapps.student.tests.factories import UserFactory
@ddt.ddt
@skip_unless_lms
class CompletionServiceTestCase(CompletionWaffleTestMixin, SharedModuleStoreTestCase):
"""
Test the data returned by the CompletionService.
"""
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.course = CourseFactory.create()
with cls.store.bulk_operations(cls.course.id):
cls.chapter = ItemFactory.create(
parent=cls.course,
category="chapter",
publish_item=False,
)
cls.sequence = ItemFactory.create(
parent=cls.chapter,
category='sequential',
publish_item=False,
)
cls.vertical = ItemFactory.create(
parent=cls.sequence,
category='vertical',
publish_item=False,
)
cls.html = ItemFactory.create(
parent=cls.vertical,
category='html',
publish_item=False,
)
cls.problem = ItemFactory.create(
parent=cls.vertical,
category="problem",
publish_item=False,
)
cls.problem2 = ItemFactory.create(
parent=cls.vertical,
category="problem",
publish_item=False,
)
cls.problem3 = ItemFactory.create(
parent=cls.vertical,
category="problem",
publish_item=False,
)
cls.problem4 = ItemFactory.create(
parent=cls.vertical,
category="problem",
publish_item=False,
)
cls.problem5 = ItemFactory.create(
parent=cls.vertical,
category="problem",
publish_item=False,
)
cls.store.update_item(cls.course, UserFactory().id)
cls.problems = [cls.problem, cls.problem2, cls.problem3, cls.problem4, cls.problem5]
def setUp(self):
super().setUp()
self.override_waffle_switch(True)
self.user = UserFactory.create()
self.other_user = UserFactory.create()
self.course_key = self.course.id
self.other_course_key = CourseKey.from_string("course-v1:ReedX+Hum110+1904")
self.block_keys = [problem.location for problem in self.problems]
self.completion_service = CompletionService(self.user, self.course_key)
# Proper completions for the given runtime
BlockCompletion.objects.submit_completion(
user=self.user,
block_key=self.html.location,
completion=1.0,
)
for idx, block_key in enumerate(self.block_keys[0:3]):
BlockCompletion.objects.submit_completion(
user=self.user,
block_key=block_key,
completion=1.0 - (0.2 * idx),
)
# Wrong user
for idx, block_key in enumerate(self.block_keys[2:]):
BlockCompletion.objects.submit_completion(
user=self.other_user,
block_key=block_key,
completion=0.9 - (0.2 * idx),
)
# Wrong course
BlockCompletion.objects.submit_completion(
user=self.user,
block_key=self.other_course_key.make_usage_key('problem', 'other'),
completion=0.75,
)
def _bind_course_module(self, module):
"""
Bind a module (part of self.course) so we can access student-specific data.
"""
module_system = get_test_system(course_id=module.location.course_key)
module_system.descriptor_runtime = module.runtime._descriptor_system # pylint: disable=protected-access
module_system._services['library_tools'] = LibraryToolsService(self.store, self.user.id) # pylint: disable=protected-access
def get_module(descriptor):
"""Mocks module_system get_module function"""
sub_module_system = get_test_system(course_id=module.location.course_key)
sub_module_system.get_module = get_module
sub_module_system.descriptor_runtime = descriptor._runtime # pylint: disable=protected-access
descriptor.bind_for_student(sub_module_system, self.user.id)
return descriptor
module_system.get_module = get_module
module.xmodule_runtime = module_system
def test_completion_service(self):
# Only the completions for the user and course specified for the CompletionService
# are returned. Values are returned for all keys provided.
assert self.completion_service.get_completions(self.block_keys) == {
self.block_keys[0]: 1.0, self.block_keys[1]: 0.8,
self.block_keys[2]: 0.6, self.block_keys[3]: 0.0,
self.block_keys[4]: 0.0
}
@ddt.data(True, False)
def test_enabled_honors_waffle_switch(self, enabled):
self.override_waffle_switch(enabled)
assert self.completion_service.completion_tracking_enabled() == enabled
def test_vertical_completion(self):
assert self.completion_service.vertical_is_complete(self.vertical) is False
for block_key in self.block_keys:
BlockCompletion.objects.submit_completion(
user=self.user,
block_key=block_key,
completion=1.0
)
assert self.completion_service.vertical_is_complete(self.vertical) is True
def test_vertical_partial_completion(self):
block_keys_count = len(self.block_keys)
for i in range(block_keys_count - 1):
# Mark all the child blocks completed except the last one
BlockCompletion.objects.submit_completion(
user=self.user,
block_key=self.block_keys[i],
completion=1.0
)
assert self.completion_service.vertical_is_complete(self.vertical) is False
def test_can_mark_block_complete_on_view(self):
assert self.completion_service.can_mark_block_complete_on_view(self.course) is False
assert self.completion_service.can_mark_block_complete_on_view(self.chapter) is False
assert self.completion_service.can_mark_block_complete_on_view(self.sequence) is False
assert self.completion_service.can_mark_block_complete_on_view(self.vertical) is False
assert self.completion_service.can_mark_block_complete_on_view(self.html) is True
assert self.completion_service.can_mark_block_complete_on_view(self.problem) is False
def test_vertical_completion_with_library_content(self):
library = LibraryFactory.create(modulestore=self.store)
ItemFactory.create(parent=library, category='problem', publish_item=False, user_id=self.user.id)
ItemFactory.create(parent=library, category='problem', publish_item=False, user_id=self.user.id)
ItemFactory.create(parent=library, category='problem', publish_item=False, user_id=self.user.id)
# Create a new vertical to hold the library content block
# It is very important that we use parent_location=self.sequence.location (and not parent=self.sequence), since
# sequence is a class attribute and passing it by value will update its .children=[] which will then leak into
# other tests and cause errors if the children no longer exist.
lib_vertical = ItemFactory.create(
parent_location=self.sequence.location,
category='vertical',
publish_item=False,
)
library_content_block = ItemFactory.create(
parent=lib_vertical,
category='library_content',
max_count=1,
source_library_id=str(library.location.library_key),
user_id=self.user.id,
)
library_content_block.refresh_children()
lib_vertical = self.store.get_item(lib_vertical.location)
self._bind_course_module(lib_vertical)
# We need to refetch the library_content_block to retrieve the
# fresh version from the call to get_item for lib_vertical
library_content_block = [child for child in lib_vertical.get_children()
if child.scope_ids.block_type == 'library_content'][0]
## Ensure the library_content_block is properly set up
# This is needed so we can call get_child_descriptors
self._bind_course_module(library_content_block)
# Make sure the runtime knows that the block's children vary per-user:
assert library_content_block.has_dynamic_children()
assert len(library_content_block.children) == 3
# Check how many children each user will see:
assert len(library_content_block.get_child_descriptors()) == 1
# No problems are complete yet
assert not self.completion_service.vertical_is_complete(lib_vertical)
for block_key in self.block_keys:
BlockCompletion.objects.submit_completion(
user=self.user,
block_key=block_key,
completion=1.0
)
# Library content problems aren't complete yet
assert not self.completion_service.vertical_is_complete(lib_vertical)
for child in library_content_block.get_child_descriptors():
BlockCompletion.objects.submit_completion(
user=self.user,
block_key=child.scope_ids.usage_id,
completion=1.0
)
assert self.completion_service.vertical_is_complete(lib_vertical)
def test_vertical_completion_with_nested_children(self):
# Create a new vertical.
# It is very important that we use parent_location=self.sequence.location (and not parent=self.sequence), since
# sequence is a class attribute and passing it by value will update its .children=[] which will then leak into
# other tests and cause errors if the children no longer exist.
parent_vertical = ItemFactory(parent_location=self.sequence.location, category='vertical')
extra_vertical = ItemFactory(parent=parent_vertical, category='vertical')
problem = ItemFactory(parent=extra_vertical, category='problem')
parent_vertical = self.store.get_item(parent_vertical.location)
# Nothing is complete
assert not self.completion_service.vertical_is_complete(parent_vertical)
for block_key in self.block_keys:
BlockCompletion.objects.submit_completion(
user=self.user,
block_key=block_key,
completion=1.0
)
# The nested child isn't complete yet
assert not self.completion_service.vertical_is_complete(parent_vertical)
BlockCompletion.objects.submit_completion(
user=self.user,
block_key=problem.location,
completion=1.0
)
assert self.completion_service.vertical_is_complete(parent_vertical)
| eduNEXT/edx-platform | openedx/tests/completion_integration/test_services.py | Python | agpl-3.0 | 11,712 |
"""
GOCDBSyncCommand module
This command updates the downtime dates from the DowntimeCache table in case they changed
after being fetched from GOCDB. In other words, it ensures that all the downtime dates in
the database are current.
"""
import errno
import xml.dom.minidom as minidom
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.Core.LCG.GOCDBClient import GOCDBClient
from DIRAC.Core.LCG.GOCDBClient import _parseSingleElement
from DIRAC.ResourceStatusSystem.Command.Command import Command
from DIRAC.ResourceStatusSystem.Client.ResourceManagementClient import ResourceManagementClient
class GOCDBSyncCommand(Command):
def __init__(self, args=None, clients=None):
super(GOCDBSyncCommand, self).__init__(args, clients)
if "GOCDBClient" in self.apis:
self.gClient = self.apis["GOCDBClient"]
else:
self.gClient = GOCDBClient()
if "ResourceManagementClient" in self.apis:
self.rmClient = self.apis["ResourceManagementClient"]
else:
self.rmClient = ResourceManagementClient()
self.seenHostnames = set()
def doNew(self, masterParams=None):
"""
Gets the downtime IDs and dates of a given hostname from the local database and compares the results
with the remote database of GOCDB. If the downtime dates have been changed it updates the local database.
:param: `masterParams` - string
:return: S_OK / S_ERROR
"""
if masterParams:
hostname = masterParams
else:
return S_ERROR(errno.EINVAL, "masterParams is not provided")
result = self.rmClient.selectDowntimeCache(name=hostname)
if not result["OK"]:
return result
for downtimes in result["Value"]:
localDBdict = {
"DowntimeID": downtimes[3],
"FORMATED_START_DATE": downtimes[6].strftime("%Y-%m-%d %H:%M"),
"FORMATED_END_DATE": downtimes[7].strftime("%Y-%m-%d %H:%M"),
}
response = self.gClient.getHostnameDowntime(hostname, ongoing=True)
if not response["OK"]:
return response
doc = minidom.parseString(response["Value"])
downtimeElements = doc.getElementsByTagName("DOWNTIME")
for dtElement in downtimeElements:
GOCDBdict = _parseSingleElement(
dtElement, ["PRIMARY_KEY", "ENDPOINT", "FORMATED_START_DATE", "FORMATED_END_DATE"]
)
localDowntimeID = localDBdict["DowntimeID"]
GOCDBDowntimeID = GOCDBdict["PRIMARY_KEY"] + " " + GOCDBdict["ENDPOINT"]
if localDowntimeID == GOCDBDowntimeID:
if localDBdict["FORMATED_START_DATE"] != GOCDBdict["FORMATED_START_DATE"]:
result = self.rmClient.addOrModifyDowntimeCache(
downtimeID=localDBdict["DowntimeID"], startDate=GOCDBdict["FORMATED_START_DATE"]
)
gLogger.verbose("The start date of %s has been changed!" % downtimes[3])
if not result["OK"]:
return result
if localDBdict["FORMATED_END_DATE"] != GOCDBdict["FORMATED_END_DATE"]:
result = self.rmClient.addOrModifyDowntimeCache(
downtimeID=localDBdict["DowntimeID"], endDate=GOCDBdict["FORMATED_END_DATE"]
)
gLogger.verbose("The end date of %s has been changed!" % downtimes[3])
if not result["OK"]:
return result
return S_OK()
def doCache(self):
return S_OK()
def doMaster(self):
"""
This method calls the doNew method for each hostname that exists
in the DowntimeCache table of the local database.
:return: S_OK / S_ERROR
"""
# Query DB for all downtimes
result = self.rmClient.selectDowntimeCache()
if not result["OK"]:
return result
for data in result["Value"]:
# If already processed don't do it again
if data[0] in self.seenHostnames:
continue
# data[0] contains the hostname
gLogger.verbose("Checking if the downtime of %s has been changed" % data[0])
result = self.doNew(data[0])
if not result["OK"]:
return result
self.seenHostnames.add(data[0])
return S_OK()
| DIRACGrid/DIRAC | src/DIRAC/ResourceStatusSystem/Command/GOCDBSyncCommand.py | Python | gpl-3.0 | 4,607 |
import sys
import decimal
import math
from importlib import import_module
from time import time
sys.path.append("..")
decmath = import_module("decmath")
decimal.getcontext().prec = 70
def tts(tuple):
return '(%s)' % ', '.join(map(repr, tuple))
def Bench(func, *args):
t0 = time()
print("Standard Math:")
print(func + tts(args) + " =", getattr(math, func)(*args))
print("In", time() - t0, "seconds.")
print()
t0 = time()
print("DecMath:")
print(func + tts(args) + " =", getattr(decmath, func)(*args))
print("In", time() - t0, "seconds.")
print("----------------------------------")
print()
Bench("erf", 13)
| ElecProg/decmath | util/Benchmarking.py | Python | mit | 663 |
#!/usr/bin/env python
#
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import time
from gnuradio import gr, gr_unittest, blocks
import pmt
class test_pdu(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_000(self):
# Just run some data through and make sure it doesn't puke.
src_data = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
src = blocks.pdu_to_tagged_stream(blocks.byte_t)
snk3 = blocks.tagged_stream_to_pdu(blocks.byte_t)
snk2 = blocks.vector_sink_b()
snk = blocks.tag_debug(1, "test")
snk.set_display(False)
dbg = blocks.message_debug()
# Test that the right number of ports exist.
pi = snk3.message_ports_in()
po = snk3.message_ports_out()
self.assertEqual(len(pi), 1) #system port is defined automatically
self.assertEqual(len(po), 1)
self.tb.connect(src, snk)
self.tb.connect(src, snk2)
self.tb.connect(src, snk3)
self.tb.msg_connect(snk3, "pdus", dbg, "store")
# make our reference and message pmts
port = "pdus"
msg = pmt.cons( pmt.PMT_NIL, pmt.make_u8vector(16, 0xFF))
# post the message
src.to_basic_block().post(port, msg)
src.to_basic_block().post("system",
pmt.cons(pmt.intern("done"), pmt.from_long(1)))
self.tb.start()
self.tb.wait()
# Get the vector of data from the vector sink
result_data = snk2.data()
# Get the vector of data from the message sink
# Convert the message PMT as a pair into its vector
result_msg = dbg.get_message(0)
msg_vec = pmt.cdr(result_msg)
#pmt.print(msg_vec)
# Convert the PMT vector into a Python list
msg_data = []
for i in range(16):
msg_data.append(pmt.u8vector_ref(msg_vec, i))
actual_data = 16*[0xFF,]
self.assertEqual(actual_data, list(result_data))
self.assertEqual(actual_data, msg_data)
def test_001(self):
#Test the overflow buffer in pdu_to_tagged_stream
src_data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0]
src = blocks.pdu_to_tagged_stream(blocks.float_t)
snk = blocks.vector_sink_f()
self.tb.connect(src, snk)
port = "pdus"
msg = pmt.cons( pmt.PMT_NIL, pmt.init_f32vector(10, src_data))
src.to_basic_block().post(port, msg)
src.to_basic_block().post("system",
pmt.cons(pmt.intern("done"), pmt.from_long(1)))
self.tb.start()
self.tb.wait()
self.assertEqual(src_data, list(snk.data()) )
def test_002_tags_plus_data(self):
packet_len = 16
src_data = list(range(packet_len))
tag1 = gr.tag_t()
tag1.offset = 0
tag1.key = 'spam'
tag1.value = pmt.from_long(23)
tag2 = gr.tag_t()
tag2.offset = 10 # Must be < packet_len
tag2.key = 'eggs'
tag2.value = pmt.from_long(42)
src = blocks.vector_source_f(src_data, tags=(tag1, tag2))
s2ts = blocks.stream_to_tagged_stream(gr.sizeof_float, vlen=1, packet_len=packet_len, len_tag_key="packet_len")
ts2pdu = blocks.tagged_stream_to_pdu(blocks.float_t, "packet_len")
dbg = blocks.message_debug()
self.tb.connect(src, s2ts, ts2pdu)
self.tb.msg_connect(ts2pdu, "pdus", dbg, "store")
self.tb.start()
self.tb.wait()
result_msg = dbg.get_message(0)
metadata = pmt.to_python(pmt.car(result_msg))
vector = pmt.f32vector_elements(pmt.cdr(result_msg))
self.assertEqual(metadata, {'eggs': 42, 'spam': 23})
self.assertFloatTuplesAlmostEqual(tuple(vector), src_data)
if __name__ == '__main__':
gr_unittest.run(test_pdu, "test_pdu.xml")
| bastibl/gnuradio | gr-blocks/python/blocks/qa_pdu.py | Python | gpl-3.0 | 4,605 |
# -*- coding: utf-8 -*-
# Copyright (C) 2018 Compassion CH
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import models, fields, api
class HrAttendanceRules(models.Model):
_name = 'hr.attendance.rules'
_description = "HR attendance break time rule"
##########################################################################
# FIELDS #
##########################################################################
name = fields.Char('Name', compute='_compute_name')
time_from = fields.Float(
'From', help='Threshold in hours when the duration break change')
time_to = fields.Float('To', help='In hour')
due_break = fields.Float('Minimum break', help='In hour')
due_break_total = fields.Float('Total break', help='In hour')
##########################################################################
# FIELDS METHODS #
##########################################################################
@api.multi
@api.depends('time_from', 'time_to')
def _compute_name(self):
for this in self:
this.name = str(int(this.time_from)) + ' - ' + str(int(
this.time_to))
| eicher31/compassion-modules | hr_attendance_management/models/hr_attendance_rules.py | Python | agpl-3.0 | 1,311 |
import game2
import othello
class benchPlayer(object):
"""
Reinforcement Learning in The Game Of Othello - By Michiel Van Der Ree and Marco Wiering(IEEE member)
http://www.ai.rug.nl/~mwiering/GROUP/ARTICLES/paper-othello.pdf
A better evaluation function which gives more preference to squares
on the edge of the board and on the corners.
"""
def __init__(self):
pass
def get_evaluation_score(self,game):
# if the game is over, give a 1000 point bonus to the winning player
if game.terminal_test():
score = game.score()
if score > 0:
return 1000
elif score < 0:
return -1000
else:
return 0
opp = -1 * game.player # the opponent
board_values = [[80,-26,24,-1,-5,28,-18,76],
[-23,-39,-18,-9,-6,-8,-39,-1],
[46,-16,4,1,-3,6,-20,52],
[-13,-5,2,-1,4,3,-12,-2],
[5,-6,1,-2,-3,0,-9,-5],
[48,-13,12,5,0,5,-24,41],
[-27,-53,-11,-1,-11,-16,-58,-15],
[87,-25,27,-1,5,36,-3,100]]
score = 0
for i in othello.range_size:
for j in othello.range_size:
# any piece gets a value of 1
# an edge piece gets a value of 6
# a corner piece gets a value of 11
# subtract 10 for the four diagonal square near the corners
# subtract 5 for the rows and cols near the edge
# TODO: only charge the penalty when the edge or corner is not
# occupied
delta = board_values[i][j]
if game.board[i][j] == game.player:
score += delta
elif game.board[i][j] == opp:
score -= delta
return score
def play_next_move(self, game_orig):
"""
Find the best move in the game
Returns a tuple (estimated value, operator)
The game must support the following functions:
copy() to make a deep copy of the game
terminal_test() to determine whether the game is over
"""
best = None
# try each move
for move in game_orig.generate_moves():
g = game_orig.copy()
g.play_move(move)
# evaluate the position and choose the best move
# NOTE: the minimax function computes the value for the current
# player which is the opponent so we need to invert the value
val = -1 * self.get_evaluation_score(g)
# update the best operator so far
if best is None or val > best[0]:
best = (val, move)
return best | shubhamjain0594/OthelloReinforcementLearning | bench.py | Python | gpl-2.0 | 2,751 |
#!/usr/bin/env python3
# coding=utf-8
# The arrow library is used to handle datetimes
import arrow
# The request library is used to fetch content through HTTP
import requests
# The BeautifulSoup library is used to parse HTML
from bs4 import BeautifulSoup
def fetch_production(zone_key='PA', session=None, target_datetime=None, logger=None):
"""Requests the last known production mix (in MW) of a given country
Arguments:
zone_key (optional) -- used in case a parser is able to fetch multiple countries
session (optional) -- request session passed in order to re-use an existing session
Return:
A dictionary in the form:
{
'zoneKey': 'FR',
'datetime': '2017-01-01T00:00:00Z',
'production': {
'biomass': 0.0,
'coal': 0.0,
'gas': 0.0,
'hydro': 0.0,
'nuclear': null,
'oil': 0.0,
'solar': 0.0,
'wind': 0.0,
'geothermal': 0.0,
'unknown': 0.0
},
'storage': {
'hydro': -10.0,
},
'source': 'mysource.com'
}
"""
if target_datetime:
raise NotImplementedError('This parser is not yet able to parse past dates')
r = session or requests.session()
url = 'http://sitr.cnd.com.pa/m/pub/gen.html'
response = r.get(url)
response.encoding = 'utf-8'
html_doc = response.text
soup = BeautifulSoup(html_doc, 'html.parser')
productions = soup.find('table', {'class': 'sitr-pie-layout'}).find_all('span')
map_generation = {
'Hídrica': 'hydro',
'Eólica': 'wind',
'Solar': 'solar',
'Biogas': 'gas',
'Térmica': 'unknown'
}
data = {
'zoneKey': 'PA',
'production': {},
'storage': {},
'source': 'https://www.cnd.com.pa/',
}
for prod in productions:
prod_data = prod.string.split(' ')
production_mean = map_generation[prod_data[0]]
production_value = float(prod_data[1])
data['production'][production_mean] = production_value
# Parse the datetime and return a python datetime object
spanish_date = soup.find('div', {'class': 'sitr-update'}).find('span').string
date = arrow.get(spanish_date, 'DD-MMMM-YYYY H:mm:ss', locale="es", tzinfo="America/Panama")
data['datetime'] = date.datetime
return data
if __name__ == '__main__':
"""Main method, never used by the Electricity Map backend, but handy for testing."""
print('fetch_production() ->')
print(fetch_production())
| tmrowco/electricitymap | parsers/PA.py | Python | gpl-3.0 | 2,533 |
class VigenereCracker:
def __init__(self, language, minLen, maxLen):
self.LANGUAGE = language
#Key length could be from 1 to 13 bytes
self.KEYLENBOUNDS = range(minLen,maxLen)
self.SUMQPSQUARE = 0.065
self.KEYLENGTHFOUND = -1
self.KEY = []
self.CONTENT = None
def setContent(self, _content):
self.CONTENT = _content
def reset(self):
self.KEYLENGTHFOUND = -1
self.CONTENT = None
self.KEY = []
def __FoundKeyLen(self):
if not self.CONTENT:
return None
_KEYLENDICT_ = {}
for i in self.KEYLENBOUNDS:
retChar = self.takeCharEveryKPos(0, i, self.CONTENT)
mapChar = self.countOccurrenceAndFrequency(retChar)
_KEYLENDICT_[i] = mapChar
_kMAX = -1
_sumQsquareMAX = 0
for k in _KEYLENDICT_:
_val = self.computeSumQiSquare(_KEYLENDICT_[k])
if _val > _sumQsquareMAX:
_sumQsquareMAX = _val
_kMAX = k
self.KEYLENGTHFOUND = _kMAX
return _kMAX
def getKeyLen(self):
return self.KEYLENGTHFOUND
def FoundKey(self):
if not self.CONTENT:
return None
self.__FoundKeyLen()
if self.KEYLENGTHFOUND == -1:
return None
for i in range(self.KEYLENGTHFOUND):
_resultsDecrypt = {}
_firstTryCrypt = self.takeCharEveryKPos(i, self.KEYLENGTHFOUND, self.CONTENT)
for tryK in range(1,256):
_resultsDecrypt[tryK] = []
for el in _firstTryCrypt:
_resultsDecrypt[tryK].append(el ^ tryK)
_candidateDecrypt = {}
for tryK in _resultsDecrypt:
if self.verifyDecrypt(_resultsDecrypt[tryK]):
_candidateDecrypt[tryK] = _resultsDecrypt[tryK]
_maximizeK = 0
_maximizeSum = 0
for candidateK in _candidateDecrypt:
_map = self.countOccurrenceAndFrequency(_candidateDecrypt[candidateK])
_val = self.computeSumQPiSquareLowerCaseLetter(_map)
if abs(_val - self.SUMQPSQUARE) < abs(_maximizeSum - self.SUMQPSQUARE):
_maximizeK = candidateK
_maximizeSum = _val
self.KEY.append(_maximizeK)
return self.KEY
def takeCharEveryKPos(self, start_pos, k_pos, content):
_Index = start_pos
_retChars = []
_retChars.append(content[_Index])
_Index+=k_pos
while _Index < len(content):
_retChars.append(content[_Index])
_Index+=k_pos
return _retChars
def countOccurrenceAndFrequency(self, content):
_map = {}
for value in content:
if not value in _map:
_map[value] = {'Occurence':0,'Frequency':0}
_map[value]['Occurence'] += 1
for value in _map:
_map[value]['Frequency'] = float(_map[value]['Occurence'])/len(content)*1.0
return _map
def computeSumQiSquare(self, _map):
_sum = float(0.0)
for el in _map:
_q = _map[el]['Frequency']
_qsquare = _q * _q
_sum += _qsquare
return _sum
def computeSumQPiSquareLowerCaseLetter(self,_map):
_sum = float(0.0)
for el in _map:
if self.LANGUAGE.containsLetter(el):
_q = _map[el]['Frequency']
_qsquare = _q * self.LANGUAGE.getFrequency(el)
_sum += _qsquare
return _sum
def verifyDecrypt(self, content):
for el in content:
if el < 32 or el > 127:
return False
return True
| JohnJakeChambers/break-the-vigenere | VigenereCracker.py | Python | gpl-3.0 | 3,920 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import fixtures
import logging
import os
import stat
import tempfile
from unittest.mock import ANY, call, patch
import snapcraft
from snapcraft import repo
from snapcraft import tests
class UbuntuTestCase(tests.TestCase):
def setUp(self):
super().setUp()
fake_logger = fixtures.FakeLogger(level=logging.ERROR)
self.useFixture(fake_logger)
tempdirObj = tempfile.TemporaryDirectory()
self.addCleanup(tempdirObj.cleanup)
self.tempdir = tempdirObj.name
@patch('snapcraft.repo.apt')
def test_get_package(self, mock_apt):
project_options = snapcraft.ProjectOptions(
use_geoip=False)
ubuntu = repo.Ubuntu(self.tempdir, project_options=project_options)
ubuntu.get(['fake-package', 'fake-package-arch1:test',
'fake-package-arch2:%s' % project_options.deb_arch])
mock_apt.assert_has_calls([
call.apt_pkg.config.set('Dir::Cache::Archives',
os.path.join(self.tempdir, 'download')),
call.apt_pkg.config.set('Apt::Install-Recommends', 'False'),
call.apt_pkg.config.find_file('Dir::Etc::Trusted'),
call.apt_pkg.config.set('Dir::Etc::Trusted', ANY),
call.apt_pkg.config.find_file('Dir::Etc::TrustedParts'),
call.apt_pkg.config.set('Dir::Etc::TrustedParts', ANY),
call.apt_pkg.config.clear('APT::Update::Post-Invoke-Success'),
call.progress.text.AcquireProgress(),
call.Cache(memonly=True, rootdir=ANY),
call.Cache().update(fetch_progress=ANY, sources_list=ANY),
call.Cache(memonly=True, rootdir=self.tempdir),
call.Cache().open(),
])
mock_apt.assert_has_calls([
call.Cache().fetch_archives(progress=ANY),
])
# __getitem__ is tricky
self.assertIn(
call('fake-package'),
mock_apt.Cache().__getitem__.call_args_list)
self.assertNotIn(
call('fake-package-arch1'),
mock_apt.Cache().__getitem__.call_args_list)
self.assertIn(
call('fake-package-arch2'),
mock_apt.Cache().__getitem__.call_args_list)
@patch('snapcraft.repo._get_geoip_country_code_prefix')
def test_sources_is_none_uses_default(self, mock_cc):
mock_cc.return_value = 'ar'
self.maxDiff = None
sources_list = repo._format_sources_list(
'', use_geoip=True, deb_arch='amd64')
expected_sources_list = \
'''deb http://ar.archive.ubuntu.com/ubuntu/ xenial main restricted
deb http://ar.archive.ubuntu.com/ubuntu/ xenial-updates main restricted
deb http://ar.archive.ubuntu.com/ubuntu/ xenial universe
deb http://ar.archive.ubuntu.com/ubuntu/ xenial-updates universe
deb http://ar.archive.ubuntu.com/ubuntu/ xenial multiverse
deb http://ar.archive.ubuntu.com/ubuntu/ xenial-updates multiverse
deb http://security.ubuntu.com/ubuntu xenial-security main restricted
deb http://security.ubuntu.com/ubuntu xenial-security universe
deb http://security.ubuntu.com/ubuntu xenial-security multiverse
'''
self.assertEqual(sources_list, expected_sources_list)
def test_no_geoip_uses_default_archive(self):
sources_list = repo._format_sources_list(
repo._DEFAULT_SOURCES, deb_arch='amd64', use_geoip=False)
expected_sources_list = \
'''deb http://archive.ubuntu.com/ubuntu/ xenial main restricted
deb http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted
deb http://archive.ubuntu.com/ubuntu/ xenial universe
deb http://archive.ubuntu.com/ubuntu/ xenial-updates universe
deb http://archive.ubuntu.com/ubuntu/ xenial multiverse
deb http://archive.ubuntu.com/ubuntu/ xenial-updates multiverse
deb http://security.ubuntu.com/ubuntu xenial-security main restricted
deb http://security.ubuntu.com/ubuntu xenial-security universe
deb http://security.ubuntu.com/ubuntu xenial-security multiverse
'''
self.assertEqual(sources_list, expected_sources_list)
@patch('snapcraft.repo._get_geoip_country_code_prefix')
def test_sources_amd64_vivid(self, mock_cc):
self.maxDiff = None
mock_cc.return_value = 'ar'
sources_list = repo._format_sources_list(
repo._DEFAULT_SOURCES, deb_arch='amd64',
use_geoip=True, release='vivid')
expected_sources_list = \
'''deb http://ar.archive.ubuntu.com/ubuntu/ vivid main restricted
deb http://ar.archive.ubuntu.com/ubuntu/ vivid-updates main restricted
deb http://ar.archive.ubuntu.com/ubuntu/ vivid universe
deb http://ar.archive.ubuntu.com/ubuntu/ vivid-updates universe
deb http://ar.archive.ubuntu.com/ubuntu/ vivid multiverse
deb http://ar.archive.ubuntu.com/ubuntu/ vivid-updates multiverse
deb http://security.ubuntu.com/ubuntu vivid-security main restricted
deb http://security.ubuntu.com/ubuntu vivid-security universe
deb http://security.ubuntu.com/ubuntu vivid-security multiverse
'''
self.assertEqual(sources_list, expected_sources_list)
@patch('snapcraft.repo._get_geoip_country_code_prefix')
def test_sources_armhf_trusty(self, mock_cc):
sources_list = repo._format_sources_list(
repo._DEFAULT_SOURCES, deb_arch='armhf', release='trusty')
expected_sources_list = \
'''deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ trusty universe
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates universe
deb http://ports.ubuntu.com/ubuntu-ports/ trusty multiverse
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates multiverse
deb http://ports.ubuntu.com/ubuntu-ports trusty-security main restricted
deb http://ports.ubuntu.com/ubuntu-ports trusty-security universe
deb http://ports.ubuntu.com/ubuntu-ports trusty-security multiverse
'''
self.assertEqual(sources_list, expected_sources_list)
self.assertFalse(mock_cc.called)
def test_fix_symlinks(self):
os.makedirs(self.tempdir + '/a')
open(self.tempdir + '/1', mode='w').close()
os.symlink('a', self.tempdir + '/rel-to-a')
os.symlink('/a', self.tempdir + '/abs-to-a')
os.symlink('/b', self.tempdir + '/abs-to-b')
os.symlink('1', self.tempdir + '/rel-to-1')
os.symlink('/1', self.tempdir + '/abs-to-1')
repo._fix_artifacts(debdir=self.tempdir)
self.assertEqual(os.readlink(self.tempdir + '/rel-to-a'), 'a')
self.assertEqual(os.readlink(self.tempdir + '/abs-to-a'), 'a')
self.assertEqual(os.readlink(self.tempdir + '/abs-to-b'), '/b')
self.assertEqual(os.readlink(self.tempdir + '/rel-to-1'), '1')
self.assertEqual(os.readlink(self.tempdir + '/abs-to-1'), '1')
def test_fix_suid(self):
files = {
'suid_file': (0o4765, 0o0765),
'guid_file': (0o2777, 0o0777),
'suid_guid_file': (0o6744, 0o0744),
'suid_guid_sticky_file': (0o7744, 0o1744),
}
for key in files:
with self.subTest(key=key):
file = os.path.join(self.tempdir, key)
open(file, mode='w').close()
os.chmod(file, files[key][0])
repo._fix_artifacts(debdir=self.tempdir)
self.assertEqual(
stat.S_IMODE(os.stat(file).st_mode), files[key][1])
def test_fix_pkg_config(self):
pc_file = os.path.join(self.tempdir, 'granite.pc')
with open(pc_file, 'w') as f:
f.write('prefix=/usr\n')
f.write('exec_prefix=${prefix}\n')
f.write('libdir=${prefix}/lib\n')
f.write('includedir=${prefix}/include\n')
f.write('\n')
f.write('Name: granite\n')
f.write('Description: elementary\'s Application Framework\n')
f.write('Version: 0.4\n')
f.write('Libs: -L${libdir} -lgranite\n')
f.write('Cflags: -I${includedir}/granite\n')
f.write('Requires: cairo gee-0.8 glib-2.0 gio-unix-2.0 '
'gobject-2.0\n')
repo._fix_artifacts(debdir=self.tempdir)
with open(pc_file) as f:
pc_file_content = f.read()
expected_pc_file_content = """prefix={}/usr
exec_prefix=${{prefix}}
libdir=${{prefix}}/lib
includedir=${{prefix}}/include
Name: granite
Description: elementary's Application Framework
Version: 0.4
Libs: -L${{libdir}} -lgranite
Cflags: -I${{includedir}}/granite
Requires: cairo gee-0.8 glib-2.0 gio-unix-2.0 gobject-2.0
""".format(self.tempdir)
self.assertEqual(pc_file_content, expected_pc_file_content)
def test_fix_shebang(self):
rootdir = 'root'
files = [
{
'path': os.path.join(rootdir, 'bin', 'a'),
'content': '#!/usr/bin/python\nimport this',
'expected': '#!/usr/bin/env python\nimport this',
},
{
'path': os.path.join(rootdir, 'sbin', 'b'),
'content': '#!/usr/bin/python\nimport this',
'expected': '#!/usr/bin/env python\nimport this',
},
{
'path': os.path.join(rootdir, 'usr', 'bin', 'c'),
'content': '#!/usr/bin/python\nimport this',
'expected': '#!/usr/bin/env python\nimport this',
},
{
'path': os.path.join(rootdir, 'usr', 'sbin', 'd'),
'content': '#!/usr/bin/python\nimport this',
'expected': '#!/usr/bin/env python\nimport this',
},
{
'path': os.path.join(rootdir, 'opt', 'bin', 'e'),
'content': '#!/usr/bin/python\nraise Exception()',
'expected': '#!/usr/bin/python\nraise Exception()',
},
{
'path': os.path.join(rootdir, 'bin', 'd'),
'content': '#!/usr/bin/python3\nraise Exception()',
'expected': '#!/usr/bin/python3\nraise Exception()',
},
]
for f in files:
with self.subTest(key=f['path']):
os.makedirs(os.path.dirname(f['path']), exist_ok=True)
with open(f['path'], 'w') as fd:
fd.write(f['content'])
repo._fix_shebangs(rootdir)
with open(f['path'], 'r') as fd:
self.assertEqual(fd.read(), f['expected'])
class BuildPackagesTestCase(tests.TestCase):
def test_invalid_package_requested(self):
with self.assertRaises(EnvironmentError) as raised:
repo.install_build_packages(['package-does-not-exist'])
self.assertEqual(
"Could not find a required package in 'build-packages': "
'"The cache has no package named \'package-does-not-exist\'"',
str(raised.exception))
| stgraber/snapcraft | snapcraft/tests/test_repo.py | Python | gpl-3.0 | 11,643 |
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import os
import stat
import sys
# find the import for catkin's python package - either from source space or from an installed underlay
if os.path.exists(os.path.join('/opt/ros/kinetic/share/catkin/cmake', 'catkinConfig.cmake.in')):
sys.path.insert(0, os.path.join('/opt/ros/kinetic/share/catkin/cmake', '..', 'python'))
try:
from catkin.environment_cache import generate_environment_script
except ImportError:
# search for catkin package in all workspaces and prepend to path
for workspace in "/opt/ros/kinetic".split(';'):
python_path = os.path.join(workspace, 'lib/python2.7/dist-packages')
if os.path.isdir(os.path.join(python_path, 'catkin')):
sys.path.insert(0, python_path)
break
from catkin.environment_cache import generate_environment_script
code = generate_environment_script('/home/pranay/ros_ws/kraken_3.0/resources/build/devel/env.sh')
output_filename = '/home/pranay/ros_ws/kraken_3.0/resources/build/catkin_generated/setup_cached.sh'
with open(output_filename, 'w') as f:
#print('Generate script for cached setup "%s"' % output_filename)
f.write('\n'.join(code))
mode = os.stat(output_filename).st_mode
os.chmod(output_filename, mode | stat.S_IXUSR)
| pranaypratyush/buoy_detect | resources/build/catkin_generated/generate_cached_setup.py | Python | mit | 1,316 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
import tvm
AllTypes = ["float32", "float64", "float16", "uint8", "int8", "int32", "int64"]
RealTypes = ["float32", "float64", "float16"]
def assign_by_req(a, req, otype=None):
b = tvm.te.placeholder(a.shape, name='assign_by_req_b', dtype=a.dtype)
if req == "kAddTo":
c = tvm.te.compute(a.shape, lambda *idx: a[idx].astype(otype) + b[idx]
if otype else a[idx] + b[idx])
else:
c = tvm.te.compute(a.shape, lambda *idx: a[idx].astype(otype) if otype else a[idx])
return b, c
def reduce_axes(X, axes, reducer, atype=None):
def get_index(idx, ridx):
j = 0
k = 0
ret = []
for val in axes:
ret.append(idx[j] if val == 0 else ridx[k])
j += (val == 0)
k += (val != 0)
return tuple(ret)
ishape = X.shape
odim = (len(ishape) + 1 - axes[0]) // 2
oshape = [tvm.te.size_var() for _ in range(odim)]
ridx = [tvm.te.reduce_axis((0, ishape[i])) for (i, val) in enumerate(axes) if val == 1]
ret = tvm.te.compute(oshape, lambda *idx: reducer(X[get_index(idx, ridx)].astype(atype)
if atype else X[get_index(idx, ridx)],
axis=ridx), name='ret')
return ret
| leezu/mxnet | contrib/tvmop/utils.py | Python | apache-2.0 | 2,129 |
from paddle.trainer_config_helpers import *
settings(learning_rate=1e-4, batch_size=1000)
a = data_layer(name='a', size=10)
b = data_layer(name='b', size=10)
result = addto_layer(input=[a, b])
concat1 = concat_layer(input=[a, b])
concat2 = concat_layer(input=[
identity_projection(input=a),
identity_projection(input=b)
])
outputs(result, concat1, concat2) | zuowang/Paddle | python/paddle/trainer_config_helpers/tests/configs/util_layers.py | Python | apache-2.0 | 368 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from os import path
from .tools import accepted_extensions, get_setting
from ..libraries.readconfig import ReadConfig
from ..platformio.project_recognition import ProjectRecognition
from .quick_menu import QuickMenu
from ..api import deviot
logger = deviot.create_logger('Deviot')
class ProjectCheck(QuickMenu):
"""
ProjectCheck handles the actions between sublime text and platformio.
Before run a platformio command like initilize, compile or upload, this
class check if the project meets the requirements to proceed with the
command, for example if the current file has been saved, or if it's saved
is in the src folder when the platformio sutrcture options is marked
"""
def __init__(self):
super(ProjectCheck, self).__init__()
deviot.set_logger_level()
self.board_id = None
self.port_id = None
self.init_option = None
def is_iot(self):
"""IOT
Checks if the file in the current view is in the list
of the IOT types (accepted) or not
Returns:
bool -- true if is in the list false if not
"""
ext = self.get_file_extension()
accepted = accepted_extensions()
if(ext not in accepted):
return False
return True
def is_empty(self):
"""Empty File
Checks if the file is empty or not
Returns:
bool -- true is if empty
"""
size = self.view.size()
if(size > 0):
return False
return True
def is_unsaved(self):
"""Unsaved View
Check if the view has unsaved changes
Returns:
bool -- True if it's unsaved
"""
return self.view.is_dirty()
def check_main_requirements(self):
"""Main Requirements
If a sketch has been never unsaved and it's not empty,
this method will save it with a randon name based in
the time in the temp path. If the sketch is empty it
will not allow the command (compile/uplaod/clean) to
continue.
It the sketch hasn't the an IOT extension, it will be
considerated not IOT so, you can process the file.
If the file has unsaved changes, it will save it before
to process the file
Returns:
bool -- False if any of the requirements fails.
"""
logger.debug("==============")
logger.debug("check_main_requirements")
if(self.is_empty()):
logger.debug("empty sketch")
self.print("not_empty_sketch")
return False
freeze = get_setting('freeze_sketch', None)
if("Deviot" in self.view.name() and not freeze):
logger.debug("file not iot (terminal, other)")
self.print("not_iot_{0}", "")
return False
if(not self.get_file_name()):
logger.debug("unsaved file")
self.save_code_infile()
self.cwd = self.get_working_project_path()
if(not self.is_iot()):
logger.debug("file not iot")
self.print("not_iot_{0}", self.get_file_name())
return False
self.check_unsaved_changes()
if(not path.exists(self.cwd)):
from .tools import make_folder
make_folder(self.cwd)
logger.debug("requirements passed")
return True
def check_unsaved_changes(self):
"""Check unsaved changes
Saves the changes if the view is dirty (with chages)
"""
if(self.is_unsaved()):
self.view.run_command('save')
def structurize_project(self):
"""Structure Files
If a project isn't initialized, it need to be checked
if the open file is inside of the src folder, if it isn't
the file need to be moved to the src folder
"""
pio_structure = self.get_structure_option()
if(pio_structure):
file_path = self.get_file_path()
dst = add_folder_to_filepath(file_path, 'src')
if('src' not in file_path and not path.exists(dst)):
from shutil import move
from .tools import get_setting, save_setting
move(file_path, dst)
self.view.retarget(dst)
if(get_setting('freeze_sketch', None)):
save_setting('freeze_sketch', dst)
def override_src(self, wipe=False):
"""Adds src_dir
When you don't want to keep the platformio file structure, you need
to add the 'src_dir' flag in the platformio.ini with the path of your
sketch/project. Here we add that option when platformio structure is
not enabled
"""
if(self.is_native()):
return
logger.debug("==============")
logger.debug("override_src")
logger.debug("wipe %s", wipe)
write_file = False
current_src = None
platformio_head = 'platformio'
pio_structure = self.get_structure_option()
project_path = self.get_project_path()
ini_path = self.get_ini_path()
config = ReadConfig()
config.read(ini_path)
# get string if exists
if(config.has_option(platformio_head, 'src_dir')):
current_src = config.get(platformio_head, 'src_dir')[0]
logger.debug("current_src %s", current_src)
# remove option
if(wipe and current_src):
logger.debug("remove src_dir from ini")
config.remove_option(platformio_head, 'src_dir')
if(not config.options(platformio_head)):
config.remove_section(platformio_head)
write_file = True
# check section
if(not config.has_section(platformio_head)):
logger.debug("added platformio header")
config.add_section(platformio_head)
write_file = True
# change only in case it's different
if(project_path != current_src):
logger.debug("update src_dir path to", project_path)
config.set(platformio_head, 'src_dir', project_path)
write_file = True
if(write_file):
logger.debug("writing ini file")
with open(ini_path, 'w') as configfile:
config.write(configfile)
def close_file(self):
"""Close File Window
Close the current focused windows in sublime text
"""
self.window.run_command('close_file')
def check_board_selected(self):
"""Checks Board Selection
If an environment is stored in the preferences file, it will
be loaded in the board_id object, if not, it will show the
quick panel to select the board
"""
logger.debug("==============")
logger.debug("check_board_selected")
self.board_id = self.get_environment()
if(not self.board_id):
selected_boards = self.get_selected_boards()
if(not selected_boards):
self.window.run_command('deviot_select_boards')
return
self.window.run_command('deviot_select_environment')
return
def check_port_selected(self):
"""Checks Serial Port Selection
If the serial port is stored in the preferences file, it will
be loaded in the port_id object, if not, it will show the
quick panel to select the port
"""
logger.debug("==============")
logger.debug("check_port_selected")
from re import search
self.port_id = self.get_serial_port()
if(self.port_id == 'not'):
self.port_id = 'not'
return
ports_list = self.get_ports_list()
ini_path = self.get_ini_path()
if(ini_path):
config = ReadConfig()
config.read(ini_path)
environment = 'env:{0}'.format(self.board_id)
if(config.has_option(environment, 'upload_protocol')):
self.port_id = True
return
port_ready = [port[2]
for port in ports_list if self.port_id == port[2]]
ip_device = search(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$",
self.port_id) if self.port_id else None
if(not port_ready and ip_device is None):
self.window.run_command('deviot_select_port')
self.port_id = None
def check_serial_monitor(self):
"""Check monitor serial
Checks if the monitor serial is currently running
and close it.
It will also stores a reference in the preferences (last_action)
to run the serial monitor next time
"""
from . import serial
from .tools import save_setting
port_id = self.port_id
if(port_id in serial.serials_in_use):
serial_monitor = serial.get_serial_monitor(port_id)
serial_monitor.stop()
del serial.serial_monitor_dict[port_id]
save_setting('run_monitor', True)
def check_auth_ota(self):
"""Check auth
Checks if the selected port is a mdns service, and if it needs
authentification to upload the sketch
Returns:
bool -- None when not auth, false when none pass is stored
"""
logger.debug("==============")
logger.debug("check_auth_ota")
from .tools import get_setting
from re import search
ended = True
platform = self.get_platform()
if(not platform):
return ended
ip_device = search(
r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", self.port_id)
if(platform and 'espressif' not in platform and ip_device is not None):
return False
if(platform and 'espressif' not in platform):
return ended
auth = None
ini_path = self.get_ini_path()
config = ReadConfig()
config.read(ini_path)
ports_list = self.get_ports_list()
for port in ports_list:
if(self.port_id == port[2]):
try:
auth = port[3]
break
except:
return ended
environment = 'env:{0}'.format(self.board_id)
auth_pass = get_setting('auth_pass', None)
if(auth is None):
if(not auth_pass):
if(config.has_option(environment, 'upload_flags')):
config.remove_option(environment, 'upload_flags')
logger.debug("writing upload_flags")
with open(ini_path, 'w') as configfile:
config.write(configfile)
return ended
if(auth != 'None' and not auth_pass):
from .tools import save_sysetting
self.window.run_command("deviot_set_password")
save_sysetting('last_action', 3)
return ended
flag = '--auth={0}'.format(auth_pass)
current = config.get(environment, 'upload_flags')
if(current):
current = current[0]
if(current != flag):
config.set(environment, 'upload_flags', flag)
logger.debug("write auth ini")
with open(ini_path, 'w') as configfile:
config.write(configfile)
return ended
def save_code_infile(self):
"""Save Code
If the sketch in the current view has been not saved, it generate
a random name and stores in a temp folder.
Arguments: view {ST Object} -- Object with multiples options of ST
"""
from os import path
from time import time
from sublime import Region
from ..libraries.file import File
from ..libraries.tools import make_folder
ext = '.ino'
tmppath = self.get_temp_path()
filename = str(time()).split('.')[0]
filepath = path.join(tmppath, filename, 'src')
make_folder(filepath)
fullpath = filename + ext
fullpath = path.join(filepath, fullpath)
region = Region(0, self.view.size())
text = self.view.substr(region)
file = File(fullpath)
file.write(text)
self.view.set_scratch(True)
self.window.run_command('close')
self.view = self.window.open_file(fullpath)
def add_folder_to_filepath(src_path, new_folder):
"""Add folder
Add a new folder at the end of the given specific path
Arguments:
src_path {str} -- initial path including the filename
new_folder {str} -- string to add after the last folder in the path
Returns:
str -- file path with the new folder added
"""
folder = path.dirname(src_path)
file_name = path.basename(src_path)
new_path = path.join(folder, new_folder, file_name)
return new_path
| gepd/Deviot | libraries/project_check.py | Python | apache-2.0 | 13,120 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "huntnet.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| srohatgi/cloud | huntnet/manage.py | Python | apache-2.0 | 250 |
""" This module handles everything that is floor-related"""
from game_objects.serializable import Serializable
import logging
from error_stuff import log_error
class Curse(object):
"""Curse enumaration"""
No_Curse, Blind, Darkness, Lost, Maze, Unknown, Labyrinth, Cursed = range(8)
class Floor(Serializable):
""" This class represent a floor and handles everything related to its properties"""
__floor_id_to_label = {
"f1": "B1",
"f2": "B2",
"f3": "C1",
"f4": "C2",
"f5": "D1",
"f6": "D2",
"f7": "W1",
"f8": "W2",
"f9": "SHEOL",
"f10": "CATH",
"f11": "DARK",
"f12": "CHEST",
"f1x": "BXL",
"f3x": "CXL",
"f5x": "DXL",
"f7x": "WXL",
"f1g": "B",
"f2g": "C",
"f3g": "D",
"f4g": "W",
"f5g": "SHEOL",
"f6g": "SHOP",
"f7g": "GREED",
}
serialize = [('floor_id', basestring), ('curse', int)]
def __init__(self, floor_id, curse=Curse.No_Curse):
self.floor_id = floor_id
self.curse = curse
def add_curse(self, curse):
"""Add a curse to this floor"""
if curse is None:
curse = Curse.No_Curse # None is the same as no curse
self.curse = curse
if self.curse == Curse.Labyrinth:
self.floor_id += 'x' # If we are Curse of the Labyrinth, then we are XL
def floor_has_curse(self, curse):
"""Return true if the floor has the curse"""
return curse == self.curse
def name(self, xl_disabled=False):
"""Return the floor name"""
id = self.floor_id
if xl_disabled and id.endswith('x'):
id = id[:-1]
return Floor.__floor_id_to_label[id]
def __eq__(self, other):
if not isinstance(other, Floor):
return False
return other is not None and self.floor_id == other.floor_id
def __ne__(self, other):
if not isinstance(other, Floor):
return True
return other is None or self.floor_id != other.floor_id
@staticmethod
def from_valid_json(json_dic, *args):
""" Create a Floor from a type-checked dic """
floor_id = json_dic['floor_id']
curse = json_dic['curse']
if (floor_id not in Floor.__floor_id_to_label or
curse < Curse.No_Curse or
curse > Curse.Labyrinth):
log_error("ERROR: Invalid foor_id or curse (" + floor_id + ", " + curse + ")")
return None
return Floor(floor_id, curse)
| Hyphen-ated/RebirthItemTrackerTest | src/game_objects/floor.py | Python | bsd-2-clause | 2,595 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2016 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
import json
from string import Template
import textwrap
def mywrap(s):
lines = textwrap.wrap(s, 60, initial_indent='', subsequent_indent=' '*16,
break_on_hyphens=False)
return ' \\\n'.join(lines)
def format_packages(key, dependencies, result):
names = []
names_doc = []
names_dev = []
for d in dependencies:
name = d.get(key)
if name is not None:
if d['purpose'] == 'doc':
names_doc.append(name)
elif d['purpose'] == 'dev':
names_dev.append(name)
else:
names.append(name)
names = ' '.join(names)
names_doc = ' '.join(names_doc)
names_dev = ' '.join(names_dev)
if len(names) > 0:
result['dependencies_%s' % key] = mywrap(names)
if len(names_doc) > 0:
result['doc_dependencies_%s' % key] = mywrap(names_doc)
if len(names_dev) > 0:
result['dev_dependencies_%s' % key] = mywrap(names_dev)
def format_list(suffix, line_formatter, dependencies, result):
lines = []
lines_doc = []
lines_dev = []
for d in dependencies:
line = line_formatter(d)
if line is not None:
if d['purpose'] == 'doc':
lines_doc.append(line)
elif d['purpose'] == 'dev':
lines_dev.append(line)
else:
lines.append(line)
result['dependencies_%s' % suffix] = '\n'.join(lines)
result['doc_dependencies_%s' % suffix] = '\n'.join(lines_doc)
result['dev_dependencies_%s' % suffix] = '\n'.join(lines_dev)
def formatter_rst(d):
line = '* %s %s: %s' % (d['title'], d['version_requirement'], d['homepage'])
comment = d.get('comment')
if comment is not None:
line += ' (%s)' % comment
return line
def formatter_macports_rst(d):
macports_name = d.get('macports_name')
if macports_name is not None:
macports_url = d['macports_url']
return '* ``%s``: %s' % (macports_name, macports_url)
def formatter_macports_command(d):
macports_name = d.get('macports_name')
if macports_name is not None:
macports_command = d['macports_command']
return ' %s' % (macports_command)
def dict_raise_on_duplicates(ordered_pairs):
"""Reject duplicate keys when loading JSON data."""
d = {}
for k, v in ordered_pairs:
if k in d:
raise ValueError("duplicate key: %r" % (k,))
else:
d[k] = v
return d
def collect_fields():
# Load the dependencies data
with open('../dependencies.json') as f:
dependencies = json.load(f, object_pairs_hook=dict_raise_on_duplicates)
result = {}
# dependencies_rst and dep_dependencies_rst
format_list('rst', formatter_rst, dependencies, result)
# dependencies_* and doc_dependencies_*
format_packages('fedora_24_rpm', dependencies, result)
format_packages('fedora_24_pip', dependencies, result)
format_packages('fedora_22_rpm', dependencies, result)
format_packages('fedora_22_pip', dependencies, result)
format_packages('ubuntu_16_deb', dependencies, result)
format_packages('ubuntu_16_pip', dependencies, result)
format_packages('ubuntu_15_deb', dependencies, result)
format_packages('ubuntu_15_pip', dependencies, result)
format_packages('macports_pip', dependencies, result)
# macports stuff
format_list('macports_rst', formatter_macports_rst, dependencies, result)
format_list('macports_command', formatter_macports_command, dependencies, result)
# turn dependencies into a dictonary to get a few final things
dependencies = dict((d['name'], d) for d in dependencies)
result['custom_install_libxc'] = dependencies['libxc']['install_command']
result['custom_install_libint'] = dependencies['libint']['install_command']
return result
big_fat_warning = """\
..
: THIS FILE IS AUTOMATICALLY GENERATED. CHANGES TO THIS FILE WILL BE OVERWRITTEN
: WHEN REBUILDING THE DOCUMENTATION. MAKE CHANGES IN
: %s
: OR
: ../dependencies.json
: INSTEAD.
"""
def substitute_file(fields, fn):
assert fn.endswith('.template')
fields['big_fat_warning'] = big_fat_warning % fn
with open(fn) as f:
t = Template(f.read())
with open(fn[:-9], 'w') as f:
f.write(t.safe_substitute(fields))
def main():
fields = collect_fields()
substitute_file(fields, 'user_download_and_install_linux.rst.template')
substitute_file(fields, 'user_download_and_install_mac.rst.template')
if __name__ == '__main__':
main()
| crisely09/horton | doc/update_install_doc.py | Python | gpl-3.0 | 5,435 |
"""Tests for Infoblox Plugin
:Requirement: Infoblox
:CaseLevel: System
:CaseComponent: Infobloxintegration
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from robottelo.decorators import stubbed, tier3, upgrade
from robottelo.test import TestCase
class InfobloxTestCase(TestCase):
@stubbed()
@tier3
@upgrade
def test_set_dns_provider(self):
"""Check Infoblox DNS plugin is set as provider
:id: 23f76fa8-79bb-11e6-a3d4-68f72889dc7f
:Steps: Set infoblox as dns provider with options
--foreman-proxy-dns=true
--foreman-proxy-plugin-provider=infoblox
--enable-foreman-proxy-plugin-dns-infoblox
--foreman-proxy-plugin-dns-infoblox-dns-server=<ip>
--foreman-proxy-plugin-dns-infoblox-username=<username>
--foreman-proxy-plugin-dns-infoblox-password=<password>
:expectedresults: Check inflobox is set as DNS provider
:CaseLevel: System
:CaseAutomation: notautomated
"""
@stubbed()
@tier3
@upgrade
def test_set_dhcp_provider(self):
"""Check Infoblox DHCP plugin is set as provider
:id: 40783976-7e68-11e6-b728-68f72889dc7f
:Steps: Set infoblox as dhcp provider with options
--foreman-proxy-dhcp=true
--foreman-proxy-plugin-dhcp-provider=infoblox
--enable-foreman-proxy-plugin-dhcp-infoblox
--foreman-proxy-plugin-dhcp-infoblox-dhcp-server=<ip>
--foreman-proxy-plugin-dhcp-infoblox-username=<username>
--foreman-proxy-plugin-dhcp-infoblox-password=<password>
:expectedresults: Check inflobox is set as DHCP provider
:CaseLevel: System
:CaseAutomation: notautomated
"""
@stubbed()
@tier3
def test_update_dns_appliance_credentials(self):
"""Check infoblox appliance credentials are updated
:id: 2e84a8b4-79b6-11e6-8bf8-68f72889dc7f
:Steps: Pass appliance credentials via installer options
--foreman-proxy-plugin-dns-infoblox-username=<username>
--foreman-proxy-plugin-dns-infoblox-password=<password>
:expectedresults: config/dns_infoblox.yml should be updated with
infoblox_hostname, username & password
:CaseLevel: System
:CaseAutomation: notautomated
"""
@stubbed()
@tier3
@upgrade
def test_enable_dns_plugin(self):
"""Check Infoblox DNS plugin can be enabled on server
:id: f8be8c34-79b2-11e6-8992-68f72889dc7f
:Steps: Enable Infoblox plugin via installer options
--enable-foreman-proxy-plugin-dns-infoblox
:CaseLevel: System
:expectedresults: Check DNS plugin is enabled on host
:CaseAutomation: notautomated
"""
@stubbed()
@tier3
def test_disable_dns_plugin(self):
"""Check Infoblox DNS plugin can be disabled on host
:id: c5f563c6-79b3-11e6-8cb6-68f72889dc7f
:Steps: Disable Infoblox plugin via installer
:expectedresults: Check DNS plugin is disabled on host
:CaseLevel: System
:CaseAutomation: notautomated
"""
@stubbed()
@tier3
@upgrade
def test_enable_dhcp_plugin(self):
"""Check Infoblox DHCP plugin can be enabled on host
:id: 75650c06-79b6-11e6-ad91-68f72889dc7f
:Steps: Enable Infoblox plugin via installer option
--enable-foreman-proxy-plugin-dhcp-infoblox
:expectedresults: Check DHCP plugin is enabled on host
:CaseLevel: System
:CaseAutomation: notautomated
"""
@stubbed()
@tier3
def test_disable_dhcp_plugin(self):
"""Check Infoblox DHCP plugin can be disabled on host
:id: ea347f34-79b7-11e6-bb03-68f72889dc7f
:Steps: Disable Infoblox plugin via installer
:expectedresults: Check DHCP plugin is disabled on host
:CaseLevel: System
:CaseAutomation: notautomated
"""
@stubbed()
@tier3
@upgrade
def test_dhcp_ip_range(self):
"""Check host get IP from Infoblox IP range while provisioning a host
:id: ba957e82-79bb-11e6-94c5-68f72889dc7f
:Steps: Provision a host with infoblox as dhcp provider
:expectedresults: Check host ip is on infoblox range configured by
option --foreman-proxy-plugin-dhcp-infoblox-use-ranges=true
:CaseLevel: System
:CaseAutomation: notautomated
"""
@stubbed()
@tier3
@upgrade
def test_dns_records(self):
"""Check DNS records are updated via infoblox DNS plugin
:id: 007ad06e-79bc-11e6-885f-68f72889dc7f
:Steps:
1. Provision a host with infoblox as dns provider
2. Update a DNS record on infoblox
:expectedresults: Check host dns is updated accordingly to infoblox
:CaseLevel: System
:CaseAutomation: notautomated
"""
| pgagne/robottelo | tests/foreman/installer/test_infoblox.py | Python | gpl-3.0 | 5,003 |
def test_add_contact(app, db, json_contacts, check_ui):
a_contact = json_contacts
old_contacts = db.get_contact_list()
contact = app.contact.create(a_contact)
assert len(old_contacts) + 1 == app.contact.count()
new_contacts = db.get_contact_list()
old_contacts.append(contact)
assert sorted(old_contacts) == sorted(new_contacts)
if check_ui:
assert sorted(new_contacts) == sorted(app.contact.get_contact_list())
| evgeniy-shorgin/python_training | test/test_add_contact.py | Python | apache-2.0 | 454 |
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
import os
import random
def milsong_checkpoint():
milsong_train = h2o.upload_file(pyunit_utils.locate("bigdata/laptop/milsongs/milsongs-train.csv.gz"))
milsong_valid = h2o.upload_file(pyunit_utils.locate("bigdata/laptop/milsongs/milsongs-test.csv.gz"))
distribution = "gaussian"
# build first model
ntrees1 = random.sample(range(50,100),1)[0]
max_depth1 = random.sample(range(2,6),1)[0]
min_rows1 = random.sample(range(10,16),1)[0]
print "ntrees model 1: {0}".format(ntrees1)
print "max_depth model 1: {0}".format(max_depth1)
print "min_rows model 1: {0}".format(min_rows1)
from h2o.estimators.gbm import H2OGradientBoostingEstimator
model1 = H2OGradientBoostingEstimator(ntrees=ntrees1,
max_depth=max_depth1,
min_rows=min_rows1,
distribution=distribution)
model1.train(x=range(1,milsong_train.ncol),
y=0,
training_frame=milsong_train,
validation_frame=milsong_valid)
# save the model, then load the model
path = pyunit_utils.locate("results")
assert os.path.isdir(path), "Expected save directory {0} to exist, but it does not.".format(path)
model_path = h2o.save_model(model1, path=path, force=True)
assert os.path.isdir(model_path), "Expected load directory {0} to exist, but it does not.".format(model_path)
restored_model = h2o.load_model(model_path)
# continue building the model
ntrees2 = ntrees1 + 50
max_depth2 = max_depth1
min_rows2 = min_rows1
print "ntrees model 2: {0}".format(ntrees2)
print "max_depth model 2: {0}".format(max_depth2)
print "min_rows model 2: {0}".format(min_rows2)
model1 = H2OGradientBoostingEstimator(ntrees=ntrees2,
max_depth=max_depth2,
min_rows=min_rows2,
distribution=distribution)
model1.train(x=range(1,milsong_train.ncol),
y=0,
training_frame=milsong_train,
validation_frame=milsong_valid)
if __name__ == "__main__":
pyunit_utils.standalone_test(milsong_checkpoint)
else:
milsong_checkpoint()
| pchmieli/h2o-3 | h2o-py/tests/testdir_algos/gbm/pyunit_milsongs_large_gbm.py | Python | apache-2.0 | 2,318 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.