repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
alexis-roche/nipy
nipy/labs/statistical_mapping.py
Python
bsd-3-clause
15,304
0.000915
from __future__ import absolute_import # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import numpy as np import scipy.stats as sp_stats # Use the nibabel image object from nibabel import Nifti1Image as Image from nibabel.affines import apply_affine from ..io.nibcompat import get_affine from ..algorithms.graph.field import field_from_graph_and_data from ..algorithms.graph.graph import wgraph_from_3d_grid from ..algorithms.statistics import empirical_pvalue from .glm import glm from .group.permutation_test import \ permutation_test_onesample, permutation_test_twosample # FIXME: rename permutation_test_onesample class #so that name starts with upper case ############################################################################### # Cluster statistics ############################################################################### def bonferroni(p, n): return np.minimum(1., p * n) def simulated_pvalue(t, simu_t): return 1 - np.searchsorted(simu_t, t) / float(np.size(simu_t)) def cluster_stats(zimg, mask, height_th, height_control='fpr', cluster_th=0, nulls={}): """ Return a list of clusters, each cluster being represented by a dictionary. Clusters are sorted by descending size order. Within each cluster, local maxima are sorted by descending depth order. Parameters ---------- zimg: z-score image mask: mask image height_th: cluster forming threshold height_control: string false positive control meaning of cluster forming threshold: 'fpr'|'fdr'|'bonferroni'|'none' cluster_th: cluster size threshold null_s : cluster-level calibration method: None|'rft'|array Notes ----- This works only with three dimensional data """ # Masking if len(mask.shape) > 3: xyz = np.where((mask.get_data() > 0).squeeze()) zmap = zimg.get_data().squeeze()[xyz] else: xyz = np.where(mask.get_data() > 0) zmap = zimg.get_data()[xyz] xyz = np.array(xyz).T nvoxels = np.size(xyz, 0) # Thresholding if height_control == 'fpr': zth = sp_stats.norm.isf(height_th) elif height_control == 'fdr': zth = empirical_pvalue.gaussian_fdr_threshold(zmap, height_th) elif height_control == 'bonferroni': zth = sp_stats.norm.isf(height_th / nvoxels) else: ## Brute-force thresholding zth = height_th pth = sp_stats.norm.sf(zth) above_th = zmap > zth if len(np.where(above_th)[0]) == 0: return None, None ## FIXME zmap_th = zmap[above_th] xyz_th = xyz[above_th] # Clustering ## Extract local maxima and connex components above some threshold ff = field_from_graph_and_data(wgraph_from_3d_grid(xyz_th, k=18), zmap_th) maxima, depth = ff.get_local_maxima(th=zth) labels = ff.cc() ## Make list of clusters, each cluster being a dictionary clusters = [] for k in range(labels.max() + 1): s = np.sum(labels == k) if s >= cluster_th: in_cluster = labels[maxima] == k m = maxima[in_cluster] d = depth[in_cluster] sorted = d.argsort()[::-1] clusters.append({'size': s, 'maxima': m[sorted], 'depth': d[sorted]}) ## Sort clusters by descending size order clusters.sort(key=lambda c : c['size'], reverse=True) # FDR-corrected p-values fdr_pvalue = empirical_pvalue.gaussian_fdr(zmap)[above_th] # Default "nulls" if not 'zmax' in nulls: nulls['zmax'] = 'bonferroni' if not 'smax' in nulls: nulls['smax'] = None if not 's' in nulls: nulls['s'] = None # Report significance levels in each cluster for c in clusters: maxima = c['maxima'] zscore = zmap_th[maxima] pval = sp_stats.norm.sf(zscore) # Replace array indices with real coordinates c['maxima'] = apply_affine(get_affine(zimg), xyz_th[maxima]) c['zscore'] = zscore c['pvalue'] = pval c['fdr_pvalue'] = fdr_pvalue[maxima] # Voxel-level corrected p-values p = None if nulls['zmax'] == 'bonferroni': p = bonferroni(pval, nvoxels) elif isinstance(nulls['zmax'], np.ndarray): p = simulated_pvalue(zscore, nulls['zmax']) c['fwer_pvalue'] = p # C
luster-level p-values (corrected) p = None if isinstance(nulls['smax'], np.ndarray): p = simulated_pvalue(c['size'], nulls['sm
ax']) c['cluster_fwer_pvalue'] = p # Cluster-level p-values (uncorrected) p = None if isinstance(nulls['s'], np.ndarray): p = simulated_pvalue(c['size'], nulls['s']) c['cluster_pvalue'] = p # General info info = {'nvoxels': nvoxels, 'threshold_z': zth, 'threshold_p': pth, 'threshold_pcorr': bonferroni(pth, nvoxels)} return clusters, info ############################################################################### # Peak_extraction ############################################################################### def get_3d_peaks(image, mask=None, threshold=0., nn=18, order_th=0): """ returns all the peaks of image that are with the mask and above the provided threshold Parameters ---------- image, (3d) test image mask=None, (3d) mask image By default no masking is performed threshold=0., float, threshold value above which peaks are considered nn=18, int, number of neighbours of the topological spatial model order_th=0, int, threshold on topological order to validate the peaks Returns ------- peaks, a list of dictionaries, where each dict has the fields: vals, map value at the peak order, topological order of the peak ijk, array of shape (1,3) grid coordinate of the peak pos, array of shape (n_maxima,3) mm coordinates (mapped by affine) of the peaks """ # Masking if mask is not None: bmask = mask.get_data().ravel() data = image.get_data().ravel()[bmask > 0] xyz = np.array(np.where(bmask > 0)).T else: shape = image.shape data = image.get_data().ravel() xyz = np.reshape(np.indices(shape), (3, np.prod(shape))).T affine = get_affine(image) if not (data > threshold).any(): return None # Extract local maxima and connex components above some threshold ff = field_from_graph_and_data(wgraph_from_3d_grid(xyz, k=18), data) maxima, order = ff.get_local_maxima(th=threshold) # retain only the maxima greater than the specified order maxima = maxima[order > order_th] order = order[order > order_th] n_maxima = len(maxima) if n_maxima == 0: # should not occur ? return None # reorder the maxima to have decreasing peak value vals = data[maxima] idx = np.argsort(- vals) maxima = maxima[idx] order = order[idx] vals = data[maxima] ijk = xyz[maxima] pos = np.dot(np.hstack((ijk, np.ones((n_maxima, 1)))), affine.T)[:, :3] peaks = [{'val': vals[k], 'order': order[k], 'ijk': ijk[k], 'pos': pos[k]} for k in range(n_maxima)] return peaks ############################################################################### # Statistical tests ############################################################################### def prepare_arrays(data_images, vardata_images, mask_images): from .mask import intersect_masks # Compute mask intersection mask = intersect_masks(mask_images, threshold=1.) # Compute xyz coordinates from mask xyz = np.array(np.where(mask > 0)) # Prepare data & vardata arrays data = np.array([(d.get_data()[xyz[0], xyz[1], xyz[2]]).squeeze() for d in data_images]).squeeze() if vardata_images is None: vardata = None else: vardata = np.array([(d.get_data()[xyz[0], xyz[1], xyz[2]]).squeeze() for d in vardata_images]).squeeze() return data
rodrigofaccioli/drugdesign
virtualscreening/vina/spark/prepare_receptor.py
Python
apache-2.0
2,232
0.002688
import ConfigParser as configparser import os import sys from pyspark import SparkContext, SparkConf, SparkFiles from pyspark.sql import SQLContext, Row from datetime import datetime from os_utils import make_directory, preparing_path, time_execution_log, check_file_exists from subprocess import Popen, PIPE from vina_utils import get_files_pdb, get_name_model_pdb if __name__ == '__main__': sc = SparkContext() sqlCtx = SQLContext(sc) config = configparser.ConfigParser() config.read('config.ini') pythonsh = config.get('VINA', 'pythonsh') script_receptor4 = config.get('VINA', 'script_receptor4') pdb_path = config.get('DEFAULT', 'pdb_path') pdbqt_receptor_path = config.get('DEFAULT', 'pdbqt_receptor_path') path_spark_drugdesign = config.get('DRUGDESIGN', 'path_spark_drugdesign') make_directory(pdbqt_receptor_path) # Adding Python Source file sc.addPyFile(os
.path.join(path_spark_drugdesign, "vina_utils.py")) sc.addPyFile(os.path.join(path_spark_drugdesign, "json_utils.py")) sc.addPyFile(os.path.join(path_spark_drugdesign, "os_utils.py")) # Broadcast pythonsh = sc.broadcast(pythonsh) script_receptor4 = sc.broadcast(script_receptor4) pdbqt_receptor_path = sc.broadcast(pdbqt_re
ceptor_path) def run_prepare_receptor_spark(receptor): receptor_pdbqt = os.path.join(pdbqt_receptor_path.value, get_name_model_pdb(receptor)) command = ''.join([pythonsh.value, ' ', script_receptor4.value, ' -r ', receptor, ' -o ', receptor_pdbqt, '.pdbqt', ' -v ']) proc = Popen(command, shell=True, stdout=PIPE) proc.communicate() start_time = datetime.now() list_receptor = get_files_pdb(pdb_path) vina_dockingRDD = sc.parallelize(list_receptor) vina_dockingRDD.foreach(run_prepare_receptor_spark) finish_time = datetime.now() time_execution_log(finish_time, start_time, "prepare_receptor_spark.log")
doubleDragon/QuantBot
quant/observers/t_bithumb.py
Python
mit
40,197
0.004183
#!/usr/bin/env python # -*- coding: UTF-8 -*- from __future__ import division import logging import time from quant import config from quant.brokers import broker_factory from .basicbot import BasicBot from quant.common import log MESSAGE_TRY_AGAIN = 'Please try again' class T_Bithumb(BasicBot): """ bch: python -m quant.cli -mBithumb_BCH_KRW,Bitfinex_BCH_BTC,Bithumb_BTC_KRW -o=T_Bithumb_BCH -f=bithumb_bch -v 目前的限制: """ def __init__(self, base_pair, pair_1, pair_2, **kwargs): super(T_Bithumb, self).__init__() self.base_pair = base_pair self.pair_1 = pair_1 self.pair_2 = pair_2 self.monitor_only = kwargs['monitor_only'] """小数位进度,krw定价为2, btc定价为8""" self.precision = kwargs['precision'] """交易所和币种对应的手续费, 一般为1%, 2%, 2.5%""" self.fee_base = kwargs['fee_base'] self.fee_pair1 = kwargs['fee_pair1'] self.fee_pair2 = kwargs['fee_pair2'] """交易所限制的最小交易量,由交易所和币种共同决定""" self.min_stock_base = kwargs['min_stock_base'] self.min_stock_1 = kwargs['min_stock_pair1'] self.min_stock_2 = kwargs['min_stock_pair2'] self.min_amount_market = max(self.min_stock_base, self.min_stock_1) self.min_amount_mid = self.min_stock_2 self.last_update_min_stock = 0.0 """单次交易的最大量和最小量""" self.max_trade_amount = kwargs['max_trade_amount'] self.min_trade_amount = kwargs['min_trade_amount'] # 赢利触发点,差价,百分比更靠谱? self.trigger_percent = 0.7 self.last_trade = 0 self.skip = False # just for count for chance profit self.count_forward = 0 self.count_reverse = 0 self.origin_assets = {} self.risk_count = 0 self.logging_balance = True if not self.monitor_only: self.brokers = broker_factory.create_brokers([self.base_pair, self.pair_1, self.pair_2]) self.update_min_stock() self.update_balance() self.logger_other = log.get_logger('log/bithumb_other.log') logging.debug("T_Bithumb params: " + str(kwargs)) def is_depths_available(self, depths): if not depths: return False res = self.base_pair in depths and self.pair_1 in depths and self.pair_2 in depths if not res: return False if not depths[self.base_pair]['bids'] or not depths[self.base_pair]['asks']: return False if not depths[self.pair_1]['bids'] or not depths[self.pair_1]['asks']: return False if not depths[self.pair_2]['bids'] or not depths[self.pair_2]['asks']: return False base_bid_price = depths[self.base_pair]['bids'][0]['price'] base_ask_price = depths[self.base_pair]['asks'][0]['price'] if base_ask_price <= 0 or base_bid_price <= 0: return False pair1_bid_price = depths[self.pair_1]['bids'][0]['price'] pair1_ask_price = depths[self.pair_1]['asks'][0]['price'] if pair1_ask_price <= 0 or pair1_bid_price <= 0: return False pair2_bid_price = depths[self.pair_2]['bids'][0]['price'] pair2_ask_price = depths[self.pair_2]['asks'][0]['price'] if pair2_ask_price <= 0 or pair2_bid_price <= 0: return False return True def terminate(self): super(T_Bithumb, self).terminate() self.brokers[self.pair_1].cancel_all() def update_min_stock(self): # 更新bfx的最小交易量, 1个小时更新一次 now = time.time() diff = now - self.last_update_min_stock if diff > 3600: min_stock = self.brokers[self.pair_1].get_min_stock() if min_stock: self.min_stock_1 = min_stock self.min_amount_market = max(self.min_stock_base, self.min_stock_1) logging.debug('update %s min stock: %s' % (self.pair_1, min_stock)) self.last_update_min_stock = now def update_other(self): if not self.monitor_only: self.update_min_stock() def tick(self, depths): if not self.is_depths_available(depths): return self.skip = False self.forward(depths) self.reverse(depths) def forward(self, depths): logging.debug("==============正循环, base买 合成卖==============") base_pair_ask_amount = depths[self.base_pair]['asks'][0]['amount'] base_pair_ask_price = depths[self.base_pair]['asks'][0]['price'] base_pair_ask_price_real = base_pair_ask_price * (1 + self.fee_base) logging.debug("forward======>base_pair: %s ask_price:%s" % (self.base_pair, base_pair_ask_price)) """所有的real都是带手续费的价格""" pair1_bid_amount = depths[self.pair_1]['bids'][0]['amount'] pair1_bid_price = depths[self.pair_1]['bids'][0]['price'] pair1_bid_price_real = pair1_bid_price * (1 - self.fee_pair1) pair2_bid_amount = depths[self.pair_2]['bids'][0]['amount'] pair2_bid_price = depths[self.pair_2]['bids'][0]['price'] pair2_bid_price_real = pair2_bid_price * (1 - self.fee_pair2) synthetic_bid_price = round(pair1_bid_price * pair2_bid_price, self.precision) synthetic_bid_price_real = round(pair1_bid_price_real * pair2_bid_price_real, self.precision) """价差, diff=卖-买""" p_diff = round(synthetic_bid_price - base_pair_ask_price, self.precision) logging.debug("forward======>%s bid_price: %s, %s bid_price: %s" % (self.pair_1, pair1_bid_price, self.pair_2, pair2_bid_price)) logging.debug("forward======>synthetic_bid_price: %s, p_diff: %s" % (synthetic_bid_price, p_diff)) if pair1_bid_price == 0: return pair_2to1_quote_amount = round(pair2_bid_amount / pair1_bid_price, 8) """市场限制base最多能买多少个bch, pair1 最多能卖多少个bch, 并且在上线和下线范围内[5, 0.05]""" """吃单50%, 两个目的:1,增加成交几率; 2,在🈷️余额充足的前提下,委单的手续费部分可能不能成交(极端)""" hedge_quote_amount_market = min(base_pair_ask_amount, pair1_bid_amount) hedge_quote_amount_market = min(hedge_quote_amount_market, pair_2to1_quote_amount) hedge_quote_amount_market = min(self.max_trade_amount, hedge_quote_amount_market) hedge_quote_amount_market = hedge_quote_amount_market / 2 hedge_mid_amount_market = round(hedge_quote_amount_market * pair1_bid_price, 8) if self.monitor_only: hedge_quote_amount = hedge_quote_amount_market hedge_mid_amount = round(hedge_quote_amount * pair1_bid_price, 8) if hedge_quote_amount < self.min_amount_market: """bitfinex限制bch_krw最小可交易的bch order size为0.001""" logging.debug("forward======>hedge_quote_amount is too small! %s" % hedge_quote_amount) return if hedge_mid_amount < self.min_amount_mid: """bitfinex限制btc_krw最小可交易amount为0.005, liqui限制单次交易btc的amount为0.0001, 所以这里取0.005""" logging.debug("forward======>hedge_mid_amount is too small! %s" % hedge_mid_amount) return else: """余额限制base最多能买多少个bch, pair1 最多能卖多少个bch, 要带上手续费""" hedge_quote_amount_balance = round(min(self.brokers[self.pair_1].bch_available,
self.brokers[ self.base_pair].krw_available / base_pair_ask_price_real), 8) hedge_mid_amount_balance = round(min(self.brokers[self.pair_2].btc_available, self.broker
s[self.pair_1].bch_available * pair1_bid_price_real), 8) """取市场和余额共同限制的amount""" hedge_quote_amount = min(hedge_quote_amount_market, hedge_quote_amount_balance, self.min_trade_amount) hedge_mid_amount = hedge_quote_amount * pair1_bid_price logging.debug("forward======>balance allow quote: %s and mid: %s, market allow quote: %s and btc: %s " % (hedge_quote_amount_balance, hedge_mid_amount_balance, hedge_quote_amount_market, hedge_mid_amount_market))
mozilla-it/autocert
autocert/api/destination/factory.py
Python
mit
854
0.003513
#!/usr/bin/env python3 # -*- coding: utf-8 -*- ''' destination.factory ''' from destination.zeus import ZeusDestination from destination.aws import AwsDestination from exceptions
import AutocertError from config import CFG from app import app class DestinationFactoryError(AutocertError): def __init__(self, destination): msg = f'destination factory error with {destination}' super(DestinationFactoryError, self).__init__(msg) def create_destination(destination, ar, cfg, timeout, verbosity): d =
None if destination == 'aws': d = AwsDestination(ar, cfg, verbosity) elif destination == 'zeus': d = ZeusDestination(ar, cfg, verbosity) else: raise DestinationFactoryError(destination) dests = list(CFG.destinations.zeus.keys()) if d.has_connectivity(timeout, dests): return d
bradleyayers/django-revisionfield
django_revisionfield/models.py
Python
bsd-2-clause
828
0.001208
# -*- coding: utf8 -*- from django.db import models from django.db.models import F class Revision(models.Model): """ A blank model (except for ``id``) that is merely an implementation detail of django
-revisionfield. """ number = models.PositiveIntegerField() @staticmethod def next(): """ Returns the next revision. :returns: next available revision
:rtype: ``int`` """ try: current = Revision.objects.get().number except Revision.DoesNotExist: revision, created = Revision.objects.get_or_create(number=1) current = revision.number while Revision.objects.filter(number=current).update(number=F('number') + 1) != 1: current = Revision.objects.get().number return current + 1
klmitch/nova
nova/tests/unit/compute/test_provider_config.py
Python
apache-2.0
17,242
0.000058
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import ddt import fixtures import microversion_parse import os from unittest import mock from oslo_utils.fixture import uuidsentinel from oslotest import base from nova.compute import provider_config from nova import exception as nova_exc class SchemaValidationMixin(base.BaseTestCase): """This class provides the basic methods for running schema validation test cases. It can be used along with ddt.file_data to test a specific schema version using tests defined in yaml files. See SchemaValidationTestCasesV1 for an example of how this was done for schema version 1. Because decorators can only access class properties of the class they are defined in (even when overriding values in the subclass), the decorators
need to be placed in the subclass. This is why there are test_ functions in the subclass that call the run_test_ methods in this class. This should keep things simple as more schema versions are added. """ def setUp(self): super(SchemaValidationMixin, self).setUp() self.mock_load_yaml = self.useFixture( fixtures.MockPatchObject( provider_config, '_load_yaml_file')).mock
self.mock_LOG = self.useFixture( fixtures.MockPatchObject( provider_config, 'LOG')).mock def set_config(self, config=None): data = config or {} self.mock_load_yaml.return_value = data return data def run_test_validation_errors(self, config, expected_messages): self.set_config(config=config) actual_msg = self.assertRaises( nova_exc.ProviderConfigException, provider_config._parse_provider_yaml, 'test_path').message for msg in expected_messages: self.assertIn(msg, actual_msg) def run_test_validation_success(self, config): reference = self.set_config(config=config) actual = provider_config._parse_provider_yaml('test_path') self.assertEqual(reference, actual) def run_schema_version_matching( self, min_schema_version, max_schema_version): # note _load_yaml_file is mocked so the value is not important # however it may appear in logs messages so changing it could # result in tests failing unless the expected_messages field # is updated in the test data. path = 'test_path' # test exactly min and max versions are supported self.set_config(config={ 'meta': {'schema_version': str(min_schema_version)}}) provider_config._parse_provider_yaml(path) self.set_config(config={ 'meta': {'schema_version': str(max_schema_version)}}) provider_config._parse_provider_yaml(path) self.mock_LOG.warning.assert_not_called() # test max major+1 raises higher_major = microversion_parse.Version( major=max_schema_version.major + 1, minor=max_schema_version.minor) self.set_config(config={'meta': {'schema_version': str(higher_major)}}) self.assertRaises(nova_exc.ProviderConfigException, provider_config._parse_provider_yaml, path) # test max major with max minor+1 is logged higher_minor = microversion_parse.Version( major=max_schema_version.major, minor=max_schema_version.minor + 1) expected_log_call = ( "Provider config file [%(path)s] is at schema version " "%(schema_version)s. Nova supports the major version, but " "not the minor. Some fields may be ignored." % { "path": path, "schema_version": higher_minor}) self.set_config(config={'meta': {'schema_version': str(higher_minor)}}) provider_config._parse_provider_yaml(path) self.mock_LOG.warning.assert_called_once_with(expected_log_call) @ddt.ddt class SchemaValidationTestCasesV1(SchemaValidationMixin): MIN_SCHEMA_VERSION = microversion_parse.Version(1, 0) MAX_SCHEMA_VERSION = microversion_parse.Version(1, 0) @ddt.unpack @ddt.file_data('provider_config_data/v1/validation_error_test_data.yaml') def test_validation_errors(self, config, expected_messages): self.run_test_validation_errors(config, expected_messages) @ddt.unpack @ddt.file_data('provider_config_data/v1/validation_success_test_data.yaml') def test_validation_success(self, config): self.run_test_validation_success(config) def test_schema_version_matching(self): self.run_schema_version_matching(self.MIN_SCHEMA_VERSION, self.MAX_SCHEMA_VERSION) @ddt.ddt class ValidateProviderConfigTestCases(base.BaseTestCase): @ddt.unpack @ddt.file_data('provider_config_data/validate_provider_good_config.yaml') def test__validate_provider_good_config(self, sample): provider_config._validate_provider_config(sample, "fake_path") @ddt.unpack @ddt.file_data('provider_config_data/validate_provider_bad_config.yaml') def test__validate_provider_bad_config(self, sample, expected_messages): actual_msg = self.assertRaises( nova_exc.ProviderConfigException, provider_config._validate_provider_config, sample, 'fake_path').message self.assertIn(actual_msg, expected_messages) @mock.patch.object(provider_config, 'LOG') def test__validate_provider_config_one_noop_provider(self, mock_log): expected = { "providers": [ { "identification": {"name": "NAME1"}, "inventories": { "additional": [ {"CUSTOM_RESOURCE_CLASS": {}} ] } }, { "identification": {"name": "NAME_453764"}, "inventories": { "additional": [] }, "traits": { "additional": [] } } ] } data = copy.deepcopy(expected) valid = provider_config._validate_provider_config(data, "fake_path") mock_log.warning.assert_called_once_with( "Provider NAME_453764 defined in " "fake_path has no additional " "inventories or traits and will be ignored." ) # assert that _validate_provider_config does not mutate inputs self.assertEqual(expected, data) # assert that the first entry in the returned tuple is the full set # of providers not a copy and is equal to the expected providers. self.assertIs(data['providers'][0], valid[0]) self.assertEqual(expected['providers'][0], valid[0]) class GetProviderConfigsTestCases(base.BaseTestCase): @mock.patch.object(provider_config, 'glob') def test_get_provider_configs_one_file(self, mock_glob): expected = { "$COMPUTE_NODE": { "__source_file": "example_provider.yaml", "identification": { "name": "$COMPUTE_NODE" }, "inventories": { "additional": [ { "CUSTOM_EXAMPLE_RESOURCE_CLASS": { "total": 100, "reserved": 0, "min_unit": 1, "max_unit": 10, "step_size": 1,
EmreAtes/spack
lib/spack/spack/test/cmd/url.py
Python
lgpl-2.1
5,856
0.000342
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This
program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public #
License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import re import pytest from spack.url import UndetectableVersionError from spack.main import SpackCommand from spack.cmd.url import name_parsed_correctly, version_parsed_correctly from spack.cmd.url import url_summary url = SpackCommand('url') class MyPackage: def __init__(self, name, versions): self.name = name self.versions = versions def test_name_parsed_correctly(): # Expected True assert name_parsed_correctly(MyPackage('netcdf', []), 'netcdf') assert name_parsed_correctly(MyPackage('r-devtools', []), 'devtools') assert name_parsed_correctly(MyPackage('py-numpy', []), 'numpy') assert name_parsed_correctly(MyPackage('octave-splines', []), 'splines') assert name_parsed_correctly(MyPackage('imagemagick', []), 'ImageMagick') # noqa assert name_parsed_correctly(MyPackage('th-data', []), 'TH.data') # Expected False assert not name_parsed_correctly(MyPackage('', []), 'hdf5') assert not name_parsed_correctly(MyPackage('hdf5', []), '') assert not name_parsed_correctly(MyPackage('yaml-cpp', []), 'yamlcpp') assert not name_parsed_correctly(MyPackage('yamlcpp', []), 'yaml-cpp') assert not name_parsed_correctly(MyPackage('r-py-parser', []), 'parser') assert not name_parsed_correctly(MyPackage('oce', []), 'oce-0.18.0') # noqa def test_version_parsed_correctly(): # Expected True assert version_parsed_correctly(MyPackage('', ['1.2.3']), '1.2.3') assert version_parsed_correctly(MyPackage('', ['5.4a', '5.4b']), '5.4a') assert version_parsed_correctly(MyPackage('', ['5.4a', '5.4b']), '5.4b') assert version_parsed_correctly(MyPackage('', ['1.63.0']), '1_63_0') assert version_parsed_correctly(MyPackage('', ['0.94h']), '094h') # Expected False assert not version_parsed_correctly(MyPackage('', []), '1.2.3') assert not version_parsed_correctly(MyPackage('', ['1.2.3']), '') assert not version_parsed_correctly(MyPackage('', ['1.2.3']), '1.2.4') assert not version_parsed_correctly(MyPackage('', ['3.4a']), '3.4') assert not version_parsed_correctly(MyPackage('', ['3.4']), '3.4b') assert not version_parsed_correctly(MyPackage('', ['0.18.0']), 'oce-0.18.0') # noqa def test_url_parse(): url('parse', 'http://zlib.net/fossils/zlib-1.2.10.tar.gz') def test_url_with_no_version_fails(): # No version in URL with pytest.raises(UndetectableVersionError): url('parse', 'http://www.netlib.org/voronoi/triangle.zip') @pytest.mark.network def test_url_list(): out = url('list') total_urls = len(out.split('\n')) # The following two options should not change the number of URLs printed. out = url('list', '--color', '--extrapolation') colored_urls = len(out.split('\n')) assert colored_urls == total_urls # The following options should print fewer URLs than the default. # If they print the same number of URLs, something is horribly broken. # If they say we missed 0 URLs, something is probably broken too. out = url('list', '--incorrect-name') incorrect_name_urls = len(out.split('\n')) assert 0 < incorrect_name_urls < total_urls out = url('list', '--incorrect-version') incorrect_version_urls = len(out.split('\n')) assert 0 < incorrect_version_urls < total_urls out = url('list', '--correct-name') correct_name_urls = len(out.split('\n')) assert 0 < correct_name_urls < total_urls out = url('list', '--correct-version') correct_version_urls = len(out.split('\n')) assert 0 < correct_version_urls < total_urls @pytest.mark.network def test_url_summary(): """Test the URL summary command.""" # test url_summary, the internal function that does the work (total_urls, correct_names, correct_versions, name_count_dict, version_count_dict) = url_summary(None) assert 0 < correct_names <= sum(name_count_dict.values()) <= total_urls # noqa assert 0 < correct_versions <= sum(version_count_dict.values()) <= total_urls # noqa # make sure it agrees with the actual command. out = url('summary') out_total_urls = int( re.search(r'Total URLs found:\s*(\d+)', out).group(1)) assert out_total_urls == total_urls out_correct_names = int( re.search(r'Names correctly parsed:\s*(\d+)', out).group(1)) assert out_correct_names == correct_names out_correct_versions = int( re.search(r'Versions correctly parsed:\s*(\d+)', out).group(1)) assert out_correct_versions == correct_versions
henniggroup/MPInterfaces
mpinterfaces/mat2d/intercalation/startup.py
Python
mit
3,088
0.000648
from __future__ import print_function, division, unicode_literals import operator from pymatgen.core.periodic_table import Element from pymatgen.core.structure import Structure from mpinterfaces.mat2d.intercalation.analysis import get_interstitial_sites __author__ = "Michael Ashton" __copyright__ = "Copyright 2017, Henniggroup" __maintainer__ = "Michael Ashton" __email__ = "[email protected]" __status__ = "Production" __date__ = "March 3, 2017" def inject_ions(structure, ion, atomic_fraction): """ Adds ions to a percentage of interstitial sites into a structure that results in an at% less than or equal to the specified atomic_fraction. Starts by filling interstitial sites with the largest voronoi radius, and then works downward. Args: structure (Structure): Pymatgen Structure object to intercalate into. ion (str): name of atom to intercalate, e.g. 'Li', or 'Mg'. atomic_fraction (int): This fraction of the final intercalated structure will be intercalated atoms. Must be < 1.0. Returns: structure. Includes intercalated atoms. TODO: Also require that if two interstitial sites are roughly the same size, then fill the one furthest from other intercalated ions. """
specie = Element(ion) # If the structure isn't big enough to accomodate such a small # atomic fraction, multiply it into a supercell. n_ions = 1. while not n_ions / (structure.num_sites+n_ions) <= atomic_fraction: # A supercell in all 3 dimenions is not usually necessary, # but is the most relia
ble for finding interstitial sites. # Flat or narrow supercells give a poor triangulation. structure.make_supercell(2) if structure.num_sites * atomic_fraction > 3: print("The algorithm is working, but may take several minutes " "due to the relatively large number of ions to " "intercalate.") interstitial_sites = get_interstitial_sites(structure)["tetrahedral"] while n_ions / (structure.num_sites + 1) <= atomic_fraction: at_p = int(round(n_ions*100. / (structure.num_sites + 1), 0)) try: structure.append(species=specie, coords=interstitial_sites[0][0], validate_proximity=True, properties={'velocities': [0.0, 0.0, 0.0]}, coords_are_cartesian=True) interstitial_sites =get_interstitial_sites(structure)["tetrahedral"] n_ions += 1 print("Currently at ~{} at %".format(at_p)) except ValueError: # The largest site is too close to another atom, so your # structure is already full! raise ValueError("The atomic fraction specified exceeds the " "number of reasonably distant interstitial " "sites in the structure. Please choose a " "smaller atomic fraction and try again.") return structure
volpino/Yeps-EURAC
tools/filters/axt_to_lav_code.py
Python
mit
306
0.01634
def exec_after_process(app, inp_data, out_data, param
_dict, tool, stdout, stderr): for name,data in out_data.items(): if name == "seq_file2": data.dbkey = param_dict['dbkey_2'] app.model.context.add( data ) app.model.context.flush()
break
crakensio/djph2
craken_project/manage.py
Python
mit
263
0.003802
#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "craken_pr
oject.settings.local") from django.co
re.management import execute_from_command_line execute_from_command_line(sys.argv)
MISP/misp-modules
tests/test.py
Python
agpl-3.0
34,783
0.002048
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import unittest import requests import base64 import json import os import io import re import zipfile from hashlib import sha256 from email.mime.application import MIMEApplication from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart from email.header import Header class TestModules(unittest.TestCase): def setUp(self): self.maxDiff = None self.headers = {'Content-Type': 'application/json'} self.url = "http://127.0.0.1:6666/" def test_introspection(self): response = requests.get(self.url + "modules") print(response.json()) response.connection.close() def test_cve(self): with open('tests/bodycve.json', 'r') as f: response = requests.post(self.url + "query", data=f.read()) print(response.json()) response.connection.close() def test_dns(self): with open('tests/body.json', 'r') as f: response = requests.post(self.url + "query", data=f.read()) print(response.json()) response.connection.close() with open('tests/body_timeout.json', 'r') as f: response = requests.post(self.url + "query", data=f.read()) print(response.json()) response.connection.close() def test_openioc(self): with open("tests/openioc.xml", "rb") as f: content = base64.b64encode(f.read()) data = json.dumps({"module": "openiocimport", "data": content.decode(),
}) response = requests.post(se
lf.url + "query", data=data).json() print(response) print("OpenIOC :: {}".format(response)) values = [x["values"][0] for x in response["results"]] assert("mrxcls.sys" in values) assert("mdmcpq3.PNF" in values) @unittest.skip("Need Rewrite") def test_email_headers(self): query = {"module": "email_import"} query["config"] = {"unzip_attachments": None, "guess_zip_attachment_passwords": None, "extract_urls": None} message = get_base_email() text = """I am a test e-mail""" message.attach(MIMEText(text, 'plain')) query['data'] = decode_email(message) data = json.dumps(query) response = requests.post(self.url + "query", data=data) results = response.json()['results'] values = [x["values"] for x in results] types = {} for i in results: types.setdefault(i["type"], 0) types[i["type"]] += 1 # Check that there are the appropriate number of items # Check that all the items were correct self.assertEqual(types['target-email'], 1) self.assertIn('[email protected]', values) self.assertEqual(types['email-dst-display-name'], 4) self.assertIn('Last One', values) self.assertIn('Other Friend', values) self.assertIn('Second Person', values) self.assertIn('Testy Testerson', values) self.assertEqual(types['email-dst'], 4) self.assertIn('[email protected]', values) self.assertIn('[email protected]', values) self.assertIn('[email protected]', values) self.assertIn('[email protected]', values) self.assertEqual(types['email-src-display-name'], 2) self.assertIn("Innocent Person", values) self.assertEqual(types['email-src'], 2) self.assertIn("[email protected]", values) self.assertIn("[email protected]", values) self.assertEqual(types['email-thread-index'], 1) self.assertIn('AQHSR8Us3H3SoaY1oUy9AAwZfMF922bnA9GAgAAi9s4AAGvxAA==', values) self.assertEqual(types['email-message-id'], 1) self.assertIn("<[email protected]>", values) self.assertEqual(types['email-subject'], 1) self.assertIn("Example Message", values) self.assertEqual(types['email-header'], 1) self.assertEqual(types['email-x-mailer'], 1) self.assertIn("mlx 5.1.7", values) self.assertEqual(types['email-reply-to'], 1) self.assertIn("<CI7DgL-A6dm92s7gf4-88g@[email protected]>", values) @unittest.skip("Need Rewrite") def test_email_attachment_basic(self): query = {"module": "email_import"} query["config"] = {"unzip_attachments": None, "guess_zip_attachment_passwords": None, "extract_urls": None} message = get_base_email() text = """I am a test e-mail""" message.attach(MIMEText(text, 'plain')) with open("tests/EICAR.com", "rb") as fp: eicar_mime = MIMEApplication(fp.read(), 'com') eicar_mime.add_header('Content-Disposition', 'attachment', filename="EICAR.com") message.attach(eicar_mime) query['data'] = decode_email(message) data = json.dumps(query) response = requests.post(self.url + "query", data=data) values = [x["values"] for x in response.json()['results']] self.assertIn('EICAR.com', values) for i in response.json()['results']: if i["type"] == 'email-attachment': self.assertEqual(i["values"], "EICAR.com") if i['type'] == 'malware-sample': attch_data = base64.b64decode(i["data"]) self.assertEqual(attch_data, b'X5O!P%@AP[4\\PZX54(P^)7CC)7}$EICAR-STANDARD-ANTIVIRUS-TEST-') @unittest.skip("Need Rewrite") def test_email_attachment_unpack(self): query = {"module": "email_import"} query["config"] = {"unzip_attachments": "true", "guess_zip_attachment_passwords": None, "extract_urls": None} message = get_base_email() text = """I am a test e-mail""" message.attach(MIMEText(text, 'plain')) with open("tests/EICAR.com.zip", "rb") as fp: eicar_mime = MIMEApplication(fp.read(), 'zip') eicar_mime.add_header('Content-Disposition', 'attachment', filename="EICAR.com.zip") message.attach(eicar_mime) query['data'] = decode_email(message) data = json.dumps(query) response = requests.post(self.url + "query", data=data) values = [x["values"] for x in response.json()["results"]] self.assertIn('EICAR.com', values) self.assertIn('EICAR.com.zip', values) for i in response.json()['results']: if i['type'] == 'malware-sample' and i["values"] == 'EICAR.com.zip': with zipfile.ZipFile(io.BytesIO(base64.b64decode(i["data"])), 'r') as zf: with zf.open("EICAR.com") as ec: attch_data = ec.read() self.assertEqual(attch_data, b'X5O!P%@AP[4\\PZX54(P^)7CC)7}$EICAR-STANDARD-ANTIVIRUS-TEST-') if i['type'] == 'malware-sample' and i["values"] == 'EICAR.com': attch_data = base64.b64decode(i["data"]) self.assertEqual(attch_data, b'X5O!P%@AP[4\\PZX54(P^)7CC)7}$EICAR-STANDARD-ANTIVIRUS-TEST-') @unittest.skip("Need Rewrite") def test_email_dont_unpack_compressed_doc_attachments(self): """Ensures that compressed """ query = {"module": "email_import"} query["config"] = {"unzip_attachments": "true", "guess_zip_attachment_passwords": None, "extract_urls": None} message = get_base_email() text = """I am a test e-mail""" message.attach(MIMEText(text, 'plain')) with open("tests/test_files/test.docx", "rb") as fp: eicar_mime = MIMEApplication(fp.read(), 'zip') eicar_mime.add_header('Content-Disposition', 'attachment', filename="test.docx") message.attach(eicar_mime) query['data'] = decode_email(message) data = json.dumps(query) response = requests.post(self.url +
jyi/ITSP
prophet-gpl/tools/rev-test.py
Python
mit
2,304
0.027344
#!/usr/bin/env python from sys import argv from os import system from php_tester import php_tester if __name__ == "__main__": assert(len(argv) > 3); repo_src = argv[1]; repo_test = argv[2]; revision = argv[3]; if (len(argv) == 4): out_file = "revlog" + revision + ".txt"; revision2 = revision + "^1"; deps_dir = "php-deps"; else: assert(len(argv) > 6); revision2 = argv[4]; out_file = argv[5]; deps_dir = argv[6]; workdir = "__tmp" + revision; system("mkdir " + workdir); repo1 = workdir + "/tmp1"; repo2 = workdir + "/tmp2"; testdir = workdir + "/tests"; system("cp -rf "+ repo_src + " " + repo1); system("cp -rf "+ repo_src + " " + repo2); system("cp -rf "+ repo_test + " " + testdir); tester1 = php_tester(workdir, repo1, testdir); tester2 = php_tester(workdir, repo2, testdir); ret = tester1.set_revision(revision, deps_dir); assert(ret); ret = tester2.set_revision(revision2, deps_dir); assert(ret); s1 = tester1.test_all(); s2 = tester2.test_all(); s1.discard(7648); s1.discard(6551); s1.discard(6702); s1.discard(10982); s2.discard(7648); s2.discard(6551); s2.discard(6702); s2.discard(10982); diff12 = s1 - s2; diff21 = s2 - s1; common = s1 & s2; fout = open(out_file, "w"); print >>fout, "-"; print >>fout, "-"; outdiff = []; for i in diff12: # repeat 5 times for non-determinism bad = False; for j in range(0, 5): tmp = set(); tmp.add(i); r1 = tester1.test(tmp); r2 = tester2.test(tmp); if (len(r1) != 1): bad = True; break; if (len(r2) != 0): bad = True; break; if not bad:
outdiff.append(i); print >>fout, "Diff Cases: Tot", len(outdiff); for i in outdiff: print >>fout, i, print >>fout; print >>fout, "Positive Cases: Tot", len(common); for i in common: print >>fout, i,
print >>fout; print >>fout, "Regression Cases: Tot", len(diff21); for i in diff21: print >>fout, i, print >>fout; fout.close(); system("rm -rf " + workdir);
unioslo/cerebrum
Cerebrum/modules/no/uio/PostmasterCommands.py
Python
gpl-2.0
4,574
0.001968
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2011, 2012 University of Oslo, Norway # # This file is part of Cerebrum. # # Cerebrum is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Cerebrum is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Cerebrum; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Cerebrum functionality for Postmaster's webservice. """ import cereconf from Cerebrum import Errors from Cerebrum.Utils import Factory class Commands: """The available commands for the postmaster webservice. The public commands are in the Postmaster server, which calls this class for the Cerebrum functionality. This class is instantiated for each incoming call, and closed and destroyed after each call. Note that we should explicitly shut down db connections, since the server could run many connections in parallell. Note that this class should be independent of what server and communication form we are using. """ def __init__(self): self.db = Factory.get('Database')() self.co = Factory.get('Constants')(self.db) def close(self): """Explicitly close the current instance of this class. This is to make sure that all is closed down correctly, even if the garbace collector can't destroy the instance. For now, this means the database link. """ if hasattr(self, 'db'): try: self.db.close() except Exception, e: log.warning("Problems with db.close: %s" % e) def _get_aff_status(self, input): """Return a list of CerebrumCodes for given affiliation or affilation status strings, e.g
. 'STUDENT', 'STUDENT/aktiv' and 'ANSATT/vitenskapelig'. Returned in two lists, affs and statuses. """ affs = list()
stats = list() for string in input: try: aff, status = string.split('/', 1) except ValueError: affs.append(self.co.PersonAffiliation(string)) else: stats.append(self.co.PersonAffStatus(self.co.PersonAffiliation(aff), status)) return (affs, stats) def _get_ous(self, skos): """Return ou_ids for given skos. If the sko is not complete, its sub OUs are returned as well. """ ou = Factory.get('OU')(self.db) ou_ids = [] for sko in skos: ou_ids += [ous['ou_id'] for ous in ou.get_stedkoder(fakultet=sko[:2] or None, institutt=sko[2:4] or None, avdeling=sko[4:6] or None)] return ou_ids def get_addresses_by_affiliation(self, status, source, skos=None): """Find persons that has the given affiliations/statuses from the given source systems and at the given stedkoder (SKOs), if any. Return a list of all the persons' primary e-mail addresses. Note that some persons might not have any user affiliations, thus having no *primary* affiliation, even if they have user accounts with e-mail addresses. """ affs = stats = ou_ids = None if status: affs, stats = self._get_aff_status(status) if source: source = [self.co.AuthoritativeSystem(s) for s in source] if skos: ou_ids = self._get_ous(skos) if not ou_ids: raise Errors.CerebrumRPCException('OUs not found') pe = Factory.get('Person')(self.db) pe2email = dict(pe.list_primary_email_address(self.co.entity_person)) rows = [] if affs: rows += pe.list_affiliations(affiliation=affs, ou_id=ou_ids) if stats: rows += pe.list_affiliations(status=stats, ou_id=ou_ids) ret = set(pe2email[row['person_id']] for row in rows if pe2email.has_key(row['person_id'])) print 'DEBUG: Returning %d e-mail addresses' % len(ret) return ret
miurahr/translate
translate/tools/test_phppo2pypo.py
Python
gpl-2.0
1,682
0
# phppo2pypo unit tests # Author: Wil Clouser <[email protected]> # Date: 2009-12-03 from io import BytesIO from translate.convert import test_convert from translate.tools import phppo2pypo class TestPhpPo2PyPo:
def test_single_po(self): inputfile = b""" # This user comment refers to: %1$s #. This developer comment does too: %1$s #: some/path.php:111 #, php-format msgid "I have %2$s apples and %1$s oranges" msgstr "I have %2$s apples and %1$s oranges" """ outputfile = BytesIO() phppo2pypo.convertphp2py(inputfile, outputfile) output = outputfile.getvalue().decode("utf-8") assert "refers to: {0}" in output assert "does too: {0}" in output assert
'msgid "I have {1} apples and {0} oranges"' in output assert 'msgstr "I have {1} apples and {0} oranges"' in output def test_plural_po(self): inputfile = b""" #. This developer comment refers to %1$s #: some/path.php:111 #, php-format msgid "I have %1$s apple" msgid_plural "I have %1$s apples" msgstr[0] "I have %1$s apple" msgstr[1] "I have %1$s apples" """ outputfile = BytesIO() phppo2pypo.convertphp2py(inputfile, outputfile) output = outputfile.getvalue().decode("utf-8") assert 'msgid "I have {0} apple"' in output assert 'msgid_plural "I have {0} apples"' in output assert 'msgstr[0] "I have {0} apple"' in output assert 'msgstr[1] "I have {0} apples"' in output class TestPhpPo2PyPoCommand(test_convert.TestConvertCommand, TestPhpPo2PyPo): """Tests running actual phppo2pypo commands on files""" convertmodule = phppo2pypo defaultoptions = {}
lalinsky/acute-dbapi
setup.py
Python
mit
1,836
0.014161
# # acute-dbapi setup # Ken Kuhlman (acute at redlagoon dot net), 2007 from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages, Extension import sys import os setup( name="acute-dbapi", version="0.1.0", description="Python DB-API testsuite", author="Ken Kuhlman", author_email="[email protected]", license="MIT License", url = "http://code.google.com/p/acute-dbapi/", ##py_modules=[ 'acute' ], #install_requires=['nose>=0.10a1',], #extras_requires={ # 'core-testing':["pysqlite", ] #}, packages = ['acute'], package_dir = {'acute':'acute'}, #test_suite = 'nose.collector', #TODO: This is also the project summary page on google code. Keep in sync. long_description = """ Welcome t
o the home page for acute-dbapi, a DB-API compliance test suite. Acute is still in it's infancy, but it's reached th
e level of maturity that it would benefit from community input. It currently contains 71 tests, and many more will be added soon. Comments, suggestions, and patches are all warmly welcome. There are several TODOs listed in the [TODO] file, and many more generously sprinkled throughout the code; if you'd like to help out but don't know where to begin, feel free to take a crack at one of them! Please read the project's [README] for an introduction to the suite. You'll also find usage, architecture, and project philosophy information there. If you just want to see the results, take a look at TestResults, and DriverFeatures on the project wiki. """, classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Topic :: Database :: Front-Ends", ] )
commshare/etna_viv
tools/etnaviv/parse_fdr.py
Python
gpl-3.0
9,109
0.004611
''' Parse execution data log stream. Allows access to selected parts of program memory at the time of recorded events. ''' # Copyright (c) 2012-2013 Wladimir J. van der Laan # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sub license, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice (including the # next paragraph) shall be included in all copies or substantial portions # of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. from __future__ import print_function, division, unicode_literals import os,
sys,
struct from collections import namedtuple from bisect import bisect_right from binascii import b2a_hex LITTLE_ENDIAN = b'<' BIG_ENDIAN = b'>' # target architecture description ENDIAN = LITTLE_ENDIAN DEBUG = False RECTYPE_CHAR = b'B' # always 8 bit MAGIC_CHAR = b'I' # always 32 bit WORD_CHAR = b'I' # 32 bit ADDR_CHAR = b'I' # 32/64 bit SHORT_STRING_SIZE_CHAR = b'B' # struct specifiers for decoding RECTYPE_SPEC = struct.Struct(ENDIAN + RECTYPE_CHAR) HDR_SPEC = struct.Struct(ENDIAN + MAGIC_CHAR + WORD_CHAR) WORD_SPEC = struct.Struct(ENDIAN + WORD_CHAR) ADDR_SPEC = struct.Struct(ENDIAN + ADDR_CHAR) RANGE_SPEC = struct.Struct(ENDIAN + ADDR_CHAR + ADDR_CHAR) SHORT_STRING_SIZE_SPEC = struct.Struct(ENDIAN + SHORT_STRING_SIZE_CHAR) FDR_MAGIC = 0x8e1aaa8f FDR_VERSION = 1 class RTYPE: ''' FDR record types ''' RANGE_DATA = 0 RANGE_TEMP_DATA = 1 ADD_UPDATED_RANGE = 2 REMOVE_UPDATED_RANGE = 3 EVENT = 4 COMMENT = 5 def read_spec(f, spec): return spec.unpack(f.read(spec.size)) def read_short_string(f): (size,) = read_spec(f, SHORT_STRING_SIZE_SPEC) return f.read(size) Event = namedtuple('Event', ['event_type', 'parameters']) Comment = namedtuple('Comment', ['data']) Parameter = namedtuple('Parameter', ['name','value']) class FDRLoader(object): ''' High-level interface for playing back FDR files. The object is an iterable that returns event records: - Event(...) in case of an event - Comment(...) in case of an comment Also it can be subscripted to return the current contents of a memory range, like fdr[ptr:ptr+4] to return a range, or just fdr[ptr] to return one byte. An IndexError will be raised if either the start or stop is out of range (or not up to date at the time of this event). ''' def __init__(self, input_file): self.f = open(input_file, 'rb') magic,version = read_spec(self.f, HDR_SPEC) if magic != FDR_MAGIC: raise ValueError('Magic value %08x not recognized (should be %08x)' % (magic, FDR_MAGIC)) if version != FDR_VERSION: raise ValueError('Version %08x not recognized (should be %08x)' % (version, FDR_VERSION)) # Stored memory ranges self.stored = [] # Active memory ranges self.updated_ranges = [] # Temporary data self.temp_ranges = [] # Cached list of starting addresses for bisection self.updated_ranges_start = [] self.temp_ranges_start = [] # IMPORTANT precondition: all ranges must be non-overlapping def _flush_temps(self): self.temp_ranges = [] self.temp_ranges_start = [] def __iter__(self): f = self.f while True: try: rt, = read_spec(f, RECTYPE_SPEC) except struct.error: # could not parse entire structure; end of file allowed here break if rt == RTYPE.RANGE_DATA: addr_start,addr_end = read_spec(f, RANGE_SPEC) data = f.read(addr_end - addr_start) if DEBUG: print('RANGE_DATA 0x%08x 0x%08x %s...' % (addr_start, addr_end, b2a_hex(data[0:16]))) # TODO update self.stored self.update(addr_start, addr_end, data) elif rt == RTYPE.RANGE_TEMP_DATA: addr_start,addr_end = read_spec(f, RANGE_SPEC) data = f.read(addr_end - addr_start) if DEBUG: print('RANGE_TEMP_DATA 0x%08x 0x%08x %s...' % (addr_start, addr_end, b2a_hex(data[0:16]))) self.temp_ranges.append((addr_start, addr_end, data)) elif rt == RTYPE.ADD_UPDATED_RANGE: addr_start,addr_end = read_spec(f, RANGE_SPEC) if DEBUG: print('ADD_UPDATED_RANGE 0x%08x 0x%08x' % (addr_start, addr_end)) self.updated_ranges.append((addr_start, addr_end, bytearray(addr_end - addr_start))) self.updated_ranges.sort() self.updated_ranges_start = [r[0] for r in self.updated_ranges] elif rt == RTYPE.REMOVE_UPDATED_RANGE: addr_start,addr_end = read_spec(f, RANGE_SPEC) i = bisect_right(self.updated_ranges_start, addr_start) - 1 if DEBUG: print('REMOVE_UPDATED_RANGE 0x%08x 0x%08x (%i)' % (addr_start, addr_end, i)) assert(self.updated_ranges[i][0] == addr_start and self.updated_ranges[i][1] == addr_end) del self.updated_ranges[i] # keep cached list of ranges up-to-date self.updated_ranges_start = [r[0] for r in self.updated_ranges] #self.updated_ranges.remove((addr_start, addr_end)) elif rt == RTYPE.EVENT: event_type = read_short_string(f) num_parameters, = read_spec(f, WORD_SPEC) parameters = {} for i in range(num_parameters): par = Parameter( name=read_short_string(f), value=read_spec(f, ADDR_SPEC)[0]) parameters[par.name] = par parstr = ' '.join([('%s=0x%x' % par) for par in parameters.itervalues()]) self.temp_ranges.sort() self.temp_ranges_start = [r[0] for r in self.temp_ranges] if DEBUG: print('EVENT %s %s' % (event_type, parstr)) yield Event(event_type, parameters) self._flush_temps() elif rt == RTYPE.COMMENT: size, = read_spec(f, ADDR_SPEC) comment = f.read(size) if DEBUG: print('COMMENT') yield Comment(comment) else: raise ValueError('Unexpected record type %i' % rt) def __getitem__(self, key): ''' Get one byte or a range of bytes from this memory map. ''' # Support slicing as well as single lookups if isinstance(key, slice): start = key.start stop = key.stop if key.step is not None: raise KeyError('Extended slices not supported') else: start = key stop = key+1 try: return self.fetch(self.temp_ranges_start, self.temp_ranges, start, stop) except IndexError,e: # need to convert to str explicitly because struct won't work with bytearray return str(self.fetch(self.updated_ranges_start, self.updated_ranges, start, stop)) def fetch(self, ranges_start, ranges, start, stop): '''Look up in stored or temp ranges''' # XXX we don't handle the case of a request spanning multiple co
derivationBud/prez
source/greenparadise.py
Python
mit
548
0.032847
import re,json def isValid(number): match=re.match(r
'[456]\d{3}-?\d{4}-?\d{4}-?\d{4}$',number) if not match: return False else: digits=number.rep
lace("-","") result=re.search(r'(\d)\1\1\1',digits) if result: return False return True records=json.load(open("greenparadise.json")) for record in records: valid=isValid(record["card"]) if not valid: print("Invalid card:",record["card"]) if valid != record["valid"]: print("Unmatched valid tag:",record["card"],valid,record["valid"])
wangyangjun/RealtimeStreamBenchmark
script/pull-updates.py
Python
apache-2.0
715
0.018182
#!/bin/python from __future__ import print_function import subprocess import sys import os import json from util import appendline, get_ip_address if __name__ == "__main__": path = os.path.dirname(os.path.realpath(__file__)) config = json.load(open(path+'/cluster-config.json')); for node in config['nodes']: files = subprocess.check_output(["ssh", "cloud-user@"+node['ip'], 'ls /home/cloud-user']).split('\n') if 'StreamBench' not in files: p = subproces
s.Popen('ssh c
loud-user@'+node['ip']+' "git clone https://github.com/wangyangjun/StreamBench.git"', shell=True) else: p = subprocess.Popen('ssh cloud-user@'+node['ip']+' "cd /home/cloud-user/StreamBench;git checkout .;git pull;"', shell=True)
Reimilia/Privacy_Server
resources/common/json_parser.py
Python
mit
8,021
0.012093
import json import copy formtable = {"name":[u'name'], "gender":[u'gender'], "contact":[u'contact'], "address":[u'address']} def is_reserved_layer(dict,reserved_word): for key in dict: if len(reserved_word)<=len(key) and reserved_word == key[:len(reserved_word)]: return True return False def json_reduce_layer(source,reserved_word): if type(source)==list: if is_reserved_layer(source[0],reserved_word): #print source for i in range(len(source)): temp_dict = source.pop(0); for temp_key in temp_dict: source.append(temp_dict[temp_key][0]) #print source json_reduce_layer(source,reserved_word) else: for item in source: json_reduce_layer(item,reserved_word) elif type(source)==dict: for key in source: print source[key] json_reduce_layer(source[key],reserved_word) ''' def json_reduce_layer(source, reserved_word): if type(source)==dict: for key in source: if(type(source[key])==list and len(source[key])==1): if(type(source[key][0])==dict and is_reserved_layer(source[key][0],reserved_word)): temp_dict = source[key].pop() for temp_key in temp_dict: source[key].append(temp_dict[temp_key]) for item in source[key]: json_reduce_layer(item, reserved_word) elif type(source)==list: for item in source: if(type(item)==dict): if is_reserved_layer(item,reserved_word): #this item is a reserved_layer temp_dict = source.pop(obj=list[0]) for key in temp_dict: source.append(temp_dict[key]) for item in source: json_reduce_layer(item, reserved_word) ''' def json_reduce_structure(source): if type(source)==dict: for key in source: if(type(source[key])==list and len(source[key])==1): source[key] = source[key][0] json_reduce_structure(source[key]) elif type(source)==list: for item in source: if(type(item)==dict): json_reduce_structure(item) def json_write(source,list,reserved_word): if(len(source)==1): #if the source is the list item in the source list, append it to the dest list list.append(source[0]) else: if(len(list)==0): #the list is empty, append a new dict to it dict = {} dict[source[0]] = [] list.append(dict) else: #there already have a dict in the list: list[0] if not list[0].has_key(source[0]): #add key source[0] in the dict list[0][source[0]] = [] json_write(source[1:],list[0][source[0]],reserved_word) def list2json(source,reserved_wo
rd): ''' :param source: a list of list :return: a dict which can be converted into json str use json.dumps() ''' dest = jso
n_gene(source,reserved_word) json_reduce_layer(dest,reserved_word) json_reduce_structure(dest) return dest def json_gene(list,reserved_word): proto = {} for item in list: if not proto.has_key(item[0]): proto[item[0]] = [] json_write(item[1:],proto[item[0]],reserved_word) return proto def listequal(list1,list2): ''' compare the elements in these two list :param list1: :param list2: :return: Ture if two list are equal ''' if len(list1)!=len(list2): return False else: for i in range(len(list1)): if list1[i]!=list2[i]: return False return True def extend(prefix, extendlist, raw): ''' :param prefix: list of key, there maybe more than one item corresponding to it :param extendlist:extended item will append to this list :param raw:patient's info comeform :return: ''' for item in raw: if listequal(prefix, item[:len(prefix)]): extendlist.append(item) def form2list(form,formtable,raw): extendlist = []; for item in form: extend(formtable[item],extendlist,raw) return extendlist def retrieve(policy, raw): ''' :param policy: a list to identify a item of patient's info, the policy[-1] is the attribute of the item :param raw: result of json2list() :return: return processed patient's info ''' newlist = policy not_found_flag = True for item in raw: if listequal(policy[:-1],item[:-1]): not_found_flag = False newlist[-1] = 'Mask' return newlist, item[:-1] if not_found_flag: newlist[-1]= 'Not Found' return newlist, 0 def conver(item, templist, result,reserved_word): ''' :param item: list or dict to be convert :param templist: a temp list :param result: every item in result is a convert result :return: ''' if type(item)==dict: for key in item: templist.append(key) conver(item[key], templist, result,reserved_word) templist.pop() elif type(item) == list: for i in range(len(item)): tempkey = reserved_word+str(i) templist.append(tempkey) conver(item[i],templist,result,reserved_word) templist.pop() #for arg in item: #conver(arg, templist, result) elif type(item) == unicode: templist.append(item) resultitem = copy.deepcopy(templist) result.append(resultitem) #print item templist.pop() def json2list(jsonfile,reserved_word): ''' :param jsonfile: dict come from json.dumps :return:a list, every item in this list is a list [key1,key2,...,keyn,value], it show the position of value in original json file ''' result = [] templist = [] conver(jsonfile,templist,result,reserved_word) return result # For Test Purpose def simplejsontest(): reserved_word = 'PRIVACY_POLICY_JSON_PARSOR_LAYER_MARK' s = json.loads( ''' { "resourceType": "Patient", "text": { "status": "generated", "div": "<div><p>Freda Penn</p ></div>" }, "name": { "text": "Protected data due to privacy policy" }, "gender": "female" } ''' ) newlist = json2list(s,reserved_word) for item in newlist: print item thelist = [[u'Patient', 'PRIVACY_POLICY_JSON_PARSOR_LAYER_MARK0', 'PRIVACY_POLICY_JSON_PARSOR_LAYER_MARK0', u'name'], [u'Patient', 'PRIVACY_POLICY_JSON_PARSOR_LAYER_MARK0', 'PRIVACY_POLICY_JSON_PARSOR_LAYER_MARK2', u'text'], [u'Patient', 'PRIVACY_POLICY_JSON_PARSOR_LAYER_MARK0', 'PRIVACY_POLICY_JSON_PARSOR_LAYER_MARK3', u'Freda Penn'], [u'Patient', 'PRIVACY_POLICY_JSON_PARSOR_LAYER_MARK1', 'PRIVACY_POLICY_JSON_PARSOR_LAYER_MARK0', u'gender'], [u'Patient', 'PRIVACY_POLICY_JSON_PARSOR_LAYER_MARK1', 'PRIVACY_POLICY_JSON_PARSOR_LAYER_MARK1', u'female']] testlist = [] testlist.append([u'name', 'parallel_dict0', u'use', u'official']) testlist.append([u'name', 'parallel_dict0', u'given', 'parallel_dict0', u'Peter']) testlist.append([u'name', 'parallel_dict0', u'given', 'parallel_dict1', u'James']) testlist.append([u'name', 'parallel_dict0', u'fhir_comments', 'parallel_dict0', u" Peter James Chalmers, but called 'Jim' "]) testlist.append([u'name', 'parallel_dict0', u'family', 'parallel_dict0', u'Chalmers']) testlist.append([u'name', 'parallel_dict1', u'use', u'usual']) testlist.append([u'name', 'parallel_dict1', u'given', 'parallel_dict0', u'Jim']) result = list2json(newlist,reserved_word) print result print json.dumps(result,indent=4) if __name__ == '__main__': #test() #jsontest() simplejsontest()
tensorflow/tensorflow
tensorflow/python/tools/optimize_for_inference.py
Python
apache-2.0
5,141
0.005057
# pylint: disable=g-bad-file-header # Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== r"""Removes parts of a graph that are only needed for training. There are several common transformations that can be applied to GraphDefs created to train a model, that help reduce the amount of computation needed when the network is used only for inference. These include: - Removing training-only operations like checkpoint saving. - Stripping out parts of the graph that are never reached. - Removing debug operations like CheckNumerics. - Folding batch normalization ops into the pre-calculated weights. - Fusing common operations into unified versions. This script takes either a frozen binary GraphDef file (where the weight variables have been converted into constants by the freeze_graph script), or a text GraphDef proto file (the weight variables are stored in a separate checkpoint file), and outputs a new GraphDef with the optimizations applied. If the input graph is a text graph file, make sure to include the node that restores the variable weights in output_names. That node is usually named "restore_all". An example of command-line usage is: bazel build tensorflow/python/tools:optimize_for_inference && \ bazel-bin/tensorflow/python/tools/optimize_for_inference \ --input=frozen_inception_graph.pb \ --output=optimized_inception_graph.pb \ --frozen_graph=True \ --input_names=Mul \ --output_names=softmax """ import argparse import os import sys from absl import app from google.protobuf import text_format from tensorflow.core.framework import graph_pb2 from tensorflow.python.framework import dtypes from tensorflow.python.framework import graph_io from tensorflow.python.platform import gfile from tensorflow.python.tools import optimize_for_inference_lib FLAGS = None def main(unused_args): if not gfile.Exists(FLAGS.input): print("Input graph file '" + FLAGS.input + "' does not exist!") return -1 input_graph_def = graph_pb2.GraphDef() with gfile.Open(FLAGS.input, "rb") as f: data = f.read() if FLAGS.frozen_graph: input_graph_def.ParseFromString(data) else: text_format.Merge(data.decode("utf-8"), input_graph_def) output_graph_def = optimize_for_inference_lib.optimize_for_inference( input_graph_def, FLAGS.input_names.split(","), FLAGS.output_names.split(","), _parse_placeholder_types(FLAGS.placeholder_type_enum), FLAGS.toco_compatible) if FLAGS.frozen_graph: f = gfile.GFile(FLAGS.output, "w") f.write(output_graph_def.SerializeToString()) else: graph_io.write_graph(output_graph_def, os.path.dirname(FLAGS.output), os.path.basename(FLAGS.output)) return 0 def _parse_placeholder_types(values): """Extracts placeholder types from a comma separate list.""" values = [int(value) for value in values.split(",")] return values if len(values) > 1 else values[0] def parse_args(): """Parses command line arguments.""" parser = argparse.ArgumentParser() parser.register("type", "bool", lambda v: v.lower() == "true") parser.add_argument( "--input", type=str, default="", help="TensorFlow \'GraphDef\' file to load.") parser.add_argument( "--output", type=str, default="", help="File to save the output graph to.") parser.add_ar
gument( "--input_names", type=str, default="", help="Input node names, comma separated.") parser.add_argument( "--output_names", type=str, default="", help="Output node names, comma separated.") parser.add_argument( "--frozen_graph", nargs="?", const=True, type="bool", default=True, help="""\ If true, the input graph is a binary frozen GraphDef file; if false, it is a text GraphDef prot
o file.\ """) parser.add_argument( "--placeholder_type_enum", type=str, default=str(dtypes.float32.as_datatype_enum), help="""\ The AttrValue enum to use for placeholders. Or a comma separated list, one value for each placeholder.\ """) parser.add_argument( "--toco_compatible", type=bool, default=False, help="""\ If true, only use ops compatible with Tensorflow Lite Optimizing Converter.\ """) return parser.parse_known_args() if __name__ == "__main__": FLAGS, unparsed = parse_args() app.run(main=main, argv=[sys.argv[0]] + unparsed)
lukehsiao/RobotSoccer
MotionControl/scripts/kalman_filter/Sample.py
Python
mit
2,688
0.011161
import math from param import * class Sample: def __init__(self): self.time = 0 self.home1_x = 0.0 self.home1_y = 0.0 self.home1_theta = 0.0 self.home2_x = 0.0 self.home2_y = 0.0 self.home2_theta = 0.0 self.away1_x = 0.0 self.away1_y = 0.0 self.away1_theta = 0.0 self.away2_x = 0.0 self.away2_y = 0.0 self.away2_theta = 0.0 self.ball_x = 0.0 self.ball_y = 0.0 self.kill = 0.0 def setDataFromSample(self,data): self.time = round(timeToInt(data.header.stamp),2) self.home1_theta = round(degreeToRadian(data.home1_theta),3) home1_x = pixelToMeter(data.home1_x) home1_y = pixelToMeter(data.home1_y) angleField = math.atan2(home1_y, home1_x) mag = math.sqrt(home1_x**2+home1_y**2) angleCamera = math.atan(HEIGHT_CAMERA/mag) offset = HEIGHT_ROBOT / math.tan(angleCamera) home1_x = home1_x - offset * math.cos(angleField) home1_y = home1_y - offset * math.sin(angleField) self.home1_x = round(home1_x,3) self.home1_y = round(home1_y,3) self.home2_x = pixelToMeter(data.home2_x) self.home2_y = pixelToMeter(data.home2_y) self.home2_theta = degreeToRadian(data.home2_theta) self.away1_x = pixelToMeter(data.away1_x) self.away1_y = pixelToMeter(data.away1_y) self.away1_theta = degreeToRadian(data.away1_theta) self.away2_x = pixelToMeter(data.away2_x) self.away2_y = p
ixelToMeter(data.away2_y) self.away2_theta = degreeToRadian(data.away2_theta) self.ball_x = pixelToMeter(data.ball_x) self.ball_y = pixelToMeter(data.ball_y) def getDiscreteSample(self): home1_x = meterToPixel(self.home1_x); home1_y = meterToPixel(self.home1_y); h
ome1_theta = radianToDegree(self.home1_theta); home2_x = meterToPixel(self.home2_x); home2_y = meterToPixel(self.home2_y); home2_theta = radianToDegree(self.home2_theta); away1_x = meterToPixel(self.away1_x); away1_y = meterToPixel(self.away1_y); away1_theta = radianToDegree(self.away1_theta); away2_x = meterToPixel(self.away2_x); away2_y = meterToPixel(self.away2_y); away2_theta = radianToDegree(self.away2_theta); ball_x = meterToPixel(self.ball_x); ball_y = meterToPixel(self.ball_y); return (home1_x, home1_y, home1_theta, home2_x, home2_y, home2_theta, away1_x, away1_y, away1_theta, away2_x, away2_x, away2_theta, ball_x, ball_y)
pymelibre/ecolepo
agrostore/stores/admin.py
Python
gpl-3.0
135
0
from django.co
ntrib import admin from .models import ( Store, Seller ) admin.si
te.register(Store) admin.site.register(Seller)
HamsterHuey/py_matlab_funcs
histfit.py
Python
mit
2,005
0.009476
# -*- coding: utf-8
-*- """ Created on Mon May 05 23:10:12 2014 @author: Sudeep Mandal """ import numpy as np from scipy.optimize import curve_fit import matplotlib.pyplot as plt def histfit(data, ax=None, **kwargs): """ Emulates MATLAB histfit
() function. Plots histogram & Gaussian fit to data Currently only implements Gaussian fit Parameters: ----------- data : 1D array data to be plotted as histogram ax : axes handle to use for plotting (eg: pass handle for using histfit to plot in a subplot of an existing figure) kwargs: bins - Number of bins, defaults to sqrt(N) color - Color of histogram bars. eg: 'r','g','b', etc. Default = 'g' Returns: -------- ax : axes handle of histfit plot fit_coeffs : Gaussian fit parameters from fitting routine fit_coeffs[0] is the Amplitude, fit_coeffs[1] = mu, fit_coeffs[2] = sigma """ bins = kwargs['bins'] if 'bins' in kwargs else np.sqrt(len(data)) color = kwargs['color'] if 'color' in kwargs else 'g' if ax is None: ax = plt.gca() # Plot histogram n, bins, patches = ax.hist(data, bins=bins, alpha=0.6, color=color) bin_centers = (bins[:-1] + bins[1:])/2 # Define model function to be used to fit to the data above: def gauss(x, *p): A, mu, sigma = p return A*np.exp(-(x-mu)**2/(2.*sigma**2)) # p0 is the initial guess for the fitting coefficients (A, mu and sigma above) p0 = [1., np.mean(data), np.std(data)] fit_coeffs, var_matrix = curve_fit(gauss, bin_centers, n, p0=p0) mu, sigma = fit_coeffs[1], fit_coeffs[2] xnormfit = np.arange(mu-4*sigma,mu+4*sigma,(8*sigma/100)) ynormfit = gauss(xnormfit, *fit_coeffs) # Plot Fit ax.plot(xnormfit, ynormfit, 'r-', linewidth=2,) ax.axis('tight') ax.set_ylim(np.array(ax.get_ylim()) * 1.1) return ax, fit_coeffs
liosha2007/plone-groupdocs-signature-source
src/groupdocs/signature/portlets/__init__.py
Python
apache-2.0
294
0.006803
from zope.i18nmessageid import MessageFactory PloneMessageFactory = MessageFac
tory('plone') from Products.CMFCore.permissions import setDefaultRoles setDefaultRoles('signature.portlets.gdsignature: Add GroupDocs Signature portlet',
('Manager', 'Site Administrator', 'Owner',))
justusc/Elemental
examples/interface/SOC.py
Python
bsd-3-clause
4,486
0.041685
# # Copyright (c) 2009-2015, Jack Poulson # All rights reserved. # # This file is part of Elemental and is under the BSD 2-Clause License, # which can be found in the LICENSE file in the root directory, or at # http://opensource.org/licenses/BSD-2-Clause # import El, math, time m = 10 cutoff = 1000 output = True worldRank = El.mpi.WorldRank() worldSize = El.mpi.WorldSize() # Construct s and z in the (product) cone # ======================================= def ConstructPrimalDual(m): s = El.DistMultiVec() z = El.DistMultiVec() orders = El.DistMultiVec(El.iTag) firstInds = El.DistMultiVec(El.iTag) sampleRad = 1./math.sqrt(1.*m) El.Uniform( s, 3*m, 1, 0, sampleRad ) El.Uniform( z, 3*m, 1, 0, sampleRad ) s.Set( 0, 0, 2. ) s.Set( m, 0, 3. ) s.Set( 2*m, 0, 4. ) z.Set( 0, 0, 5. ) z.Set( m, 0, 6. ) z.Set( 2*m, 0, 7. ) El.Zeros( orders, 3*m, 1 ) El.Zeros( firstInds, 3*m, 1 ) for i in xrange(m): orders.Set( i, 0, m ) orders.Set( i+m, 0, m ) orders.Set( i+2*m, 0, m ) firstInds.Set( i, 0, 0 ) firstInds.Set( i+m, 0, m ) firstInds.Set( i+2*m, 0, 2*m ) return s, z, orders, firstInds s, z, orders, firstInds = ConstructPrimalDual(m) n = s.Height() if output: El.Print( s, "s" ) El.Print( z, "z" ) El.Print( orders, "orders" ) El.Print( firstInds, "firstInds" ) # Compute the (Jordan) determinants and number of non-positive SOC members # ======================================================================== sDets = El.SOCDets( s, orders, firstInds, cutoff ) zDets = El.SOCDets( z, orders, firstInds, cutoff ) sDetsBcast = El.DistMultiVec() zDetsBcast = El.DistMultiVec() El.Copy( sDets, sDetsBcast ) El.Copy( zDets, zDetsBcast ) El.SOCBroadcast( sDetsBcast, orders, firstInds, cutoff ) El.SOCBroadcast( zDetsBcast, orders, firstInds, cutoff ) sNumNonPos = El.NumNonSOC( s, orders, firstInds, cutoff ) zNumNonPos = El.NumNonSOC( z, orders, firstInds, cutoff ) if output: El.Print( sDets, "det(s)" ) El.Print( zDets, "det(z)" ) El.Print( sDetsBcast, "Broadcasted det(s)" ) El.Print( zDetsBcast, "Broadcasted det(z)" ) if worldRank == 0: print "# non-SOC in s:", sNumNonPos print "# non-SOC in z:", zNumNonPos # Compute the square-roots of s and z # =================================== sRoot = El.SOCSquareRoot( s, orders, firstInds, cutoff ) zRoot = El.SOCSquareRoot( z, orders, firstInds, cutoff ) sRootSquared = El.SOCApply( sRoot, sRoot, orders, firstInds, cutoff ) zRootSquared = El.SOCApply( zRoot, zRoot, orders, firstInds, cutoff ) if output: El.Print( sRoot, "sqrt(s)" ) El.Print( zRoot, "sqrt(z)" ) El.Print( sRootSquared, "(sqrt(s))^2" ) El.Print( zRootSquared, "(sqrt(z))^2" ) # Compute the inverses of s and z # =============================== sInv = El.SOCInverse( s, orders, firstInds, cutoff ) zInv = El.SOCInverse( z, orders, firstInds, cutoff ) sInv_s = El.SOCApply( sInv, s, orders, firstInds, cutoff ) zInv_z = El.SOCApply( zInv, z, orders, firstInds, cutoff ) s_sInv = El.SOCApply( s, sInv, orders, firstInds, cutoff ) z_zInv = El.SOCApply( z, zInv, orders, firstInds, cutoff ) if output: El.Print( sInv, "inv(s)" ) El.Print( zInv, "inv(z)" ) El.Print( sInv_s, "s o inv(s)" ) El.Print( zInv_z, "z o inv(z)" ) El.Print( s_sInv, "inv(s) o s" ) El.Print( z_zInv, "inv(z) o z" ) # Compute the Nesterov-Todd scaling point of (s,z) # ================================================ w = El.SOCNesterovTodd( s, z, orders, firstInds, cutoff ) wRoot = El.SOCSquareRoot( w, orders, firstInds, cutoff ) wRootInv = El.SOCInverse( wRoot, orders, firstInds, cutoff ) sNT = El.SOCApplyQuadratic( wRootInv, s, orders, firstInds, cutoff ) zNT = El.SOCApplyQuadratic( wRoot, z, orders, firstInds, cutoff ) if output: El.Print( w, "w" ) El.Print( sNT, "s
_NT" ) El.Print( zNT, "z_NT" ) # Compute the minimum non-negative step length, alpha, such that s + alpha y # touches the boundary of the product cone y = El.DistMultiVec() El.Uniform( y, n, 1 ) upperBound = 100. alpha = El.MaxStepInSOC( s, y, orders, firstInds, upperBound, cutoff ) p = El.DistMultiVec() El.Copy( s, p ) El.Axpy( alpha, y,
p ) pDets = El.SOCDets( p, orders, firstInds, cutoff ) if output: El.Print( y, "y" ) if worldRank == 0: print "maximum step in cone is:", alpha El.Print( p, "s + alpha y" ) El.Print( pDets, "det(s + alpha y)" ) # Require the user to press a button before the figures are closed El.Finalize() if worldSize == 1: raw_input('Press Enter to exit')
mariosky/evo-drawings
venv/lib/python2.7/site-packages/py2neo/packages/httpstream/__init__.py
Python
agpl-3.0
842
0
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2013-2014, Nigel Small # # Licensed under the Apache License, Version 2.0 (the "License"); # y
ou may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ HTTPStream """ __author
__ = "Nigel Small" __copyright__ = "2013-2014, Nigel Small" __email__ = "[email protected]" __license__ = "Apache License, Version 2.0" __version__ = "1.2.0" from .http import *
chiara-paci/baskerville
baskervilleweb/bibliography/apps.py
Python
gpl-3.0
177
0.011299
# rock_n_roll/apps.py from
django.apps import AppConfig class BibliographyConfig(AppConfig): name = 'bibliography'
verbose_name = "bibliography" #pippio = "pippo"
keisetsu/joblist
indeed.py
Python
mit
5,528
0.000543
#!/usr/bin/env python import dateutil.parser import dateutil.tz import feedparser import re from datetime import datetime, timedelta from joblist import JobList class FilterException(Exception): pass class IndeedJobList(JobList): '''Joblist class for Indeed This joblist is for the indeed.com rss feed. Indeed has an API, but it requires registration and is more suited to companies repackaging their data. The RSS feed works just fine for the kind of search I'm interested in. ''' base_url = ('http://www.indeed.{domain}/rss?q={keywords}&l={location}' '&sort=date&start={offset}') page_size = 20 def collect_results(self, keywords, location, radius, filter_location=(), filter_title=(), country='us', max_results=1000, oldest=None): '''Collect results for indeed.com (.ca, etc) The feeds site is "indeed.com/rss?" plus these parameters: * q: a set of keywords, combined with "+" * l: the location (a zip code, "city, state", "remote", or just a state) * sort: "date" or "relevance", I guess * offset: The rss returns up to 20 results, you can page through them using this parameter :param keywords: str A space-separated list of keywords, arguments to the "q" operator :param location: str a zip code, "city, state" combination, "remote", or state code. Argument to "l" :param radius: int radius around a location. Argument to "r". May use 0 to limit to the location exactly. :param filter_location: str an iterable of locations to be removed from results. Any location that contains any of the strings will be ignored. :param filter_title: str an iterable of strings to filter titles. A title will be ignored if it contains any of the strings. :param country: str A two-letter country code. Defaults to "us", which will try indeed.com; will try any other code if provided, but there is no guarantee other codes will be handled well. :param max_results: int A maximum number of results. The results may be less than this, but the function will stop querying if this number is reached. :param oldest: timedelta Anything older than today - oldest will be ignored. :returns: A generator which when called will yield a dict of the following format: { 'date': The reported date of the entry, 'id': 'indeed$' + indeed's id for the job entry, 'link': a link to indeed's page about the entry, 'location': the entry's reported location, 'source': the reported author of the post, 'title': the reported title } ''' domain = 'com' if country is not 'us': domain = country if oldest is None: oldest = timedelta(weeks=52) oldest_cutoff = datetime.now(tz=dateutil.tz.tzlocal()) - oldest pages = 0 found = 0 cutoff = False previous = () while foun
d < max_results: # Get a page of feed results (sorted by date), and process # it until either a date older than *oldest_cutoff* # appears or all the entries have been processed offset = pages * self.page_size feed = feedparser.parse( self.base_url.format
(domain=domain, keywords=keywords, location=location, radius=radius, offset=offset) ) new = [] for entry in feed['entries']: # We've seen this before, skip it. if entry['id'] in previous: continue new.append(entry['id']) entry_date = dateutil.parser.parse(entry['published']) if oldest_cutoff > entry_date: return None entry_title = entry['title'] entry_location = 'Unspecified' try: entry_location = entry_title.split(' - ')[-1] except IndexError: pass try: for location_filter in filter_location: if re.search(location_filter, entry_location, re.IGNORECASE): raise FilterException for title_filter in filter_title: if re.search(title_filter, entry_title, re.IGNORECASE): raise FilterException except FilterException: continue found += 1 yield { 'date': entry_date, 'id': 'indeed$' + entry['id'], 'link': entry['link'], 'location': entry_location, 'source': entry['source']['title'], 'title': entry_title, } if not new: # The assumption is that if none of the entries are new, # indeed is just repeating and the current group # of jobs is ended return None previous = tuple(new) pages += 1
marco-lilek/musiClr
src/utils/runner.py
Python
mit
1,645
0.021884
import ntpath import mp3parser from modifyTag import TagWrapper from glob import glob from os.path import join def pathLeaf(path): head, tail = ntpath.split(path) return tail or ntpath.basename(head) class Runner(): def __init__(self, configs): # We do this here to make it clear what data we need self.targetDir = configs['targetDir'] self.artistList = configs['artists'] self.useLastFm = configs['settings']['useLastFM'] self.overwriteTags = configs['settings']['overwriteTags'] self.files = glob(join(self.targetDir, "*.mp3")) self.results = [] def run(self, dlg): # They now share the same information, so we shouldn't need to reassign at the end mp3parser.artistList = self.artistList finished = 0 for fileName in self.files: finished += 1 raw = pathLeaf(fileName)[:-4] dlg.Update(finished, "Current Song: " + raw) with TagWrapper(fileName) as tag: if tag.tag is None: self.results.append(["Error", raw, "", "", fileName])
elif not self.overwriteTags and tag.hasTags(): mp3parser.addToArtistList(tag.tag.artist) self.results.append(["Skipped", raw, tag.tag.artist, tag.tag.name, fileName]) else: artist, name = mp3parser.parse(raw, self.useLastFm) if artist is None: self.results.append(["B
ad", raw, "", "", fileName]) else: tag.modify(artist, name) self.results.append(["Good", raw, artist, name, fileName])
appop/bitcoin
qa/rpc-tests/rpcbind_test.py
Python
mit
4,449
0.004271
#!/usr/bin/env python3 # Copyright (c) 2014-2016 The nealcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test running nealcoind with the -rpcbind and -rpcallowip options.""" from test_framework.test_framework import nealcoinTestFramework from test_framework.util import * from test_framework.netutil import * class RPCBindTest(nealcoinTestFramework): def __init__(self): super().__init__() self.setup_clean_chain = True self.num_nodes = 1 def setup_network(self): pass def setup_nodes(self): pass def run_bind_test(self, allow_ips, connect_to, addresses, expected): ''' Start a node with requested rpcallowip and rpcbind parameters, then try to connect, and check if the set of bound addresses matches the expected set. ''' expected = [(addr_to_hex(addr), port) for (addr, port) in expected] base_args = ['-disablewallet', '-nolisten'] if allow_ips: base_args += ['-rpcallowip=' + x for x in allow_ips] binds = ['-rpcbind='+addr for addr in addresses] self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, [base_args + binds], connect_to) pid = nealcoind_processes[0].pid assert_equal(set(get_bind_addrs(pid)), set(expected)) stop_nodes(self.nodes) def run_allowip_test(self, allow_ips, rpchost, rpcport): ''' Start a node with rpcallow IP, and request getnetworkinfo at a non-localhost IP. ''' base_args = ['-disablewallet', '-nolisten'] + ['-rpcallowip='+x for x in allow_ips] self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, [base_args]) # connect to node through non-loopback interface node = get_rpc_proxy(rpc_url(0, "%s:%d" % (rpchost, rpcport)), 0) node.getnetworkinfo() stop_nodes(self.nodes) def run_test(self): # due to OS-specific network stats queries, this test works only on Linux assert(sys.platform.startswith('linux')) # find the first non-loopback interface for testing non_loopback_ip = None for name,ip in all_interfaces(): if ip != '127.0.0.1': non_loopback_ip = ip break if non_loopback_ip is None: assert(not 'This test requires at least one non-loopback IPv4 interface') print("Using interface %s for testing" % non_loopback_ip) defaultport = rpc_port(0) # check default without rpcallowip (IPv4 and IPv6 localhost) self.run_bind_test(None, '127.0.0.1', [], [('127.0.0.1', defaultport), ('::1', defaultport)]) # check default with rpcallowip (IPv6 any) self.run_bind_test(['127.0.0.1'], '127.0.0.1', [], [('::0'
, defaultport)]) # check only IPv4 localhost (explicit) self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1'
], [('127.0.0.1', defaultport)]) # check only IPv4 localhost (explicit) with alternative port self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'], [('127.0.0.1', 32171)]) # check only IPv4 localhost (explicit) with multiple alternative ports on same host self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'], [('127.0.0.1', 32171), ('127.0.0.1', 32172)]) # check only IPv6 localhost (explicit) self.run_bind_test(['[::1]'], '[::1]', ['[::1]'], [('::1', defaultport)]) # check both IPv4 and IPv6 localhost (explicit) self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'], [('127.0.0.1', defaultport), ('::1', defaultport)]) # check only non-loopback interface self.run_bind_test([non_loopback_ip], non_loopback_ip, [non_loopback_ip], [(non_loopback_ip, defaultport)]) # Check that with invalid rpcallowip, we are denied self.run_allowip_test([non_loopback_ip], non_loopback_ip, defaultport) try: self.run_allowip_test(['1.1.1.1'], non_loopback_ip, defaultport) assert(not 'Connection not denied by rpcallowip as expected') except JSONRPCException: pass if __name__ == '__main__': RPCBindTest().main()
bfontaine/jinja2_maps
tests/test_base.py
Python
mit
718
0.005571
# -*- coding: UTF-8 -*- from base import TestCase from jinja2_maps.base import _nor
malize_location class Location(object): def __init__(self, **kw): for k, v in kw.items(): setattr(self, k, v) class TestBase(TestCase): def test_normalize_location_dict(self): d = {"latitude": 42.0, "longitude": 17.0} self.assertEquals(d, _normalize_location(d)) def test_normalize_location_nondict(self): self.assertEquals({"latitude": 42.3, "longitude": 17.1}, _normalize_location(Location(latitude=42.3, long
itude=17.1))) self.assertEquals({"latitude": 41.3, "longitude": 16.1}, _normalize_location(Location(lat=41.3, lng=16.1)))
pez2001/sVimPy
test_scripts/test_nested_function.py
Python
gpl-2.0
46
0.086957
def
f2(): def f3
(): print("f3") f3() f2()
BNBLORD/bookingsynclord
bookingsynclord/data_store/ChangeOverStore.py
Python
gpl-3.0
346
0.008671
fr
om GenericStore import GenericStore class ChangeOverStore(GenericStore): """Store used to manage Source entities. BookingSync doc : http://developers.bookingsync.com/reference/endpoints/change_overs/ """ def __init__(self,credential_manager):
super(ChangeOverStore, self).__init__(credential_manager, "change_overs")
littlecodersh/EasierLife
Scripts/LogInput&Output/py3.py
Python
mit
642
0.012461
import sys class outPip(object): def __init__(self, fileDir): self.fileDir = fileDir self.console = sys.stdout def write(self, s): self.console.write(s) with open(s
elf.fileDir, 'a') as f: f.write(s) def flush(self): self.console.flush() new_input = input def inPip(fileDir): def _input(hint): s = new_input(hint) with open(fileDir, 'a') as f: f.write(s) return s return _input sys.stdout = outPip('out.log') i
nput = inPip('out.log') print('This will appear on your console and your file.') print('So is this line.') input('yo')
cgalleguillosm/accasim
accasim/base/scheduler_class.py
Python
mit
26,009
0.009574
""" MIT License Copyright (c) 2017 cgalleguillosm, AlessioNetti Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import logging from sys import maxsize from random import seed from abc import abstractmethod, ABC from sortedcontainers.sortedlist import SortedListWithKey from enum import Enum from copy import deepcopy from accasim.base.resource_manager_class import ResourceManager from accasim.base.allocator_class import AllocatorBase class DispatcherError(Exception): pass class JobVerification(Enum): REJECT = -1 # All jobs are rejected NO_CHECK = 0 # No verification CHECK_TOTAL = 1 # Total requested resources are verified CHECK_REQUEST = 2 # Each node x resources are verified class SchedulerBase(ABC): """ This class allows to implement dispatching methods by integrating with an implementation of this class an allocator (:class:`accasim.base.allocator_class.AllocatorBase`). An implementation of this class could also serve as a entire dispatching method if the allocation class is not used as default (:class:`.allocator` = None), but the resource manager must be set on the allocator using :func:`accasim.base.allocator_class.AllocatorBase.set_resource_manager`. """ MAXSIZE = maxsize ALLOW_MAPPING_SAME_NODE = True def __init__(self, _seed, allocator=None, job_check=Job
Verification.CHECK_REQUEST, **kwargs): """ Construct a scheduler :param seed: Seed for the random state :param resource_manager: A Resource Manager object for dealing with system resources. :param allocator: Allocator object to be used by the scheduler to allocater after schedule generation. If
an allocator isn't defined, the scheduler class must generate the entire dispatching plan. :param job_check: A job may be rejected if it doesnt comply with: - JobVerification.REJECT: Any job is rejected - JobVerification.NO_CHECK: All jobs are accepted - JobVerification.CHECK_TOTAL: If the job requires more resources than the available in the system. - JobVerification.CHECK_REQUEST: if an individual request by node requests more resources than the available one. :param kwargs: - skip_jobs_on_allocation: If the allocator is predefined and this parameter is true, the allocator will try to allocate jobs as much as possible. Otherwise, the allocation will stop after the first fail. """ seed(_seed) self._counter = 0 self.allocator = None self._logger = logging.getLogger('accasim') self._system_capacity = None self._nodes_capacity = None self.resource_manager = None if allocator: assert isinstance(allocator, AllocatorBase), 'Allocator not valid for scheduler' self.allocator = allocator # self.set_resource_manager(resource_manager) assert(isinstance(job_check, JobVerification)), 'job_check invalid type. {}'.format(job_check.__class__) if job_check == JobVerification.REJECT: print('All jobs will be rejected, and for performance purposes the rejection messages will be omitted.') self._job_check = job_check # Check resources self._min_required_availability = kwargs.pop('min_resources', None) # ['core', 'mem']s # Skip jobs during allocation self.skip_jobs_on_allocation = kwargs.pop('skip_jobs_on_allocation', False) @property def name(self): """ Name of the schedulign method """ raise NotImplementedError @abstractmethod def get_id(self): """ Must return the full ID of the scheduler, including policy and allocator. :return: the scheduler's id. """ raise NotImplementedError @abstractmethod def scheduling_method(self, cur_time, es_dict, es): """ This function must map the queued events to available nodes at the current time. :param cur_time: current time :param es_dict: dictionary with full data of the job events :param es: events to be scheduled :return a tuple of (time to schedule, event id, list of assigned nodes), an array jobs id of rejected jobs """ raise Exception('This function must be implemented!!') def set_resource_manager(self, resource_manager): """ Set a resource manager. :param resource_manager: An instantiation of a resource_manager class or None """ if resource_manager: if self.allocator: self.allocator.set_resource_manager(resource_manager) assert isinstance(resource_manager, ResourceManager), 'Resource Manager not valid for scheduler' self.resource_manager = resource_manager else: self.resource_manager = None def schedule(self, cur_time, es_dict, es): """ Method for schedule. It calls the specific scheduling method. :param cur_time: current time :param es_dict: dictionary with full data of the events :param es: events to be scheduled :return: a tuple of (time to schedule, event id, list of assigned nodes), array of rejected job ids. """ assert(self.resource_manager is not None), 'The resource manager is not defined. It must defined prior to run the simulation.' self._counter += 1 self._logger.debug("{} Dispatching: #{} decision".format(cur_time, self._counter)) self._logger.debug('{} Dispatching: {} queued jobs'.format(cur_time, len(es))) self._logger.debug('{} Dispatching: {}'.format(cur_time, self.resource_manager.current_usage)) rejected = [] # At least a job need 1 core and 1 kb/mb/gb of mem to run if self._min_required_availability and any([self.resource_manager.resources.full[res] for res in self._min_required_availability]): self._logger.debug("There is no availability of one of the min required resource to run a job. The dispatching process will be delayed until there is enough resources.") return [(None, e, []) for e in es], rejected accepted = [] # Verify jobs with the defined Job Policy for e in es: job = es_dict[e] if not job.get_checked() and not self._check_job_request(job): if self._job_check != JobVerification.REJECT: self._logger.warning('{} has been rejected by the dispatcher. ({})'.format(e, self._job_check)) rejected.append(e)
shoopio/shoop
shuup_tests/simple_cms/test_custom_templates.py
Python
agpl-3.0
1,062
0.000942
# This file is part of Shuup. # # Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. import datetime import pytest from shuup.simple_cms.models import Page from shuup.simple_cms.views import PageView from shuup.testing.factories import get_default_shop from shuup.testing.utils import apply_request_middleware from shuup_tests.simple_cms.utils import create_page @pytest.mark.djang
o_db @pytest.mark.parametrize("template_name", ["page.jinja", "page_sidebar.jinja"]) def test_superuser_can_see_invisible_page(rf, template_name): template_path = "shuup/simple_cms/" + template_name page = create_page(template_name=template_path, available_from=datetime.date(1988, 1, 1), shop=get_default_shop()) view_f
unc = PageView.as_view() request = apply_request_middleware(rf.get("/")) response = view_func(request, url=page.url) response.render() assert response.template_name[0] == template_path
Zincr0/pyscrap
setup.py
Python
apache-2.0
1,541
0.012979
# -*- coding=utf-8 -*- #Copyright 2012 Daniel Osvaldo Mondaca Seguel # #Licensed under the Apache License, Version 2.0 (the "License"); #you may not use this file except in compliance with the License. #You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #Unless required by applicable law or agreed to in writing, software #distributed under the License is distributed on an "AS IS" BASIS, #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #See the License for the specific language governing permissions and #limitations under the License. import sys import os from setuptools import setup reload(sys) sys.setdefaultencoding('utf-8') files = ["pyscrap/*"] def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( scripts=["bin/wscrap"], name="pyscrap", version="0.0.9", author="Daniel Mondaca", author_email="[email protected]", description=("micro framework for web scraping"), license = "Apache 2.0 License", keywords = "web scraping", url = "http://github.com/Nievous/pyscrap", packages=["pyscr
ap"], install_requires = ["lxml", "simplejson"], long_description=read("README.txt"), package_data = {
"package": files}, classifiers=[ "Development Status :: 4 - Beta", "Topic :: Software Development", "License :: OSI Approved :: Apache Software License", "Operating System :: POSIX :: Linux", "Programming Language :: Python", ], )
alexhayes/django-psi
psi/__init__.py
Python
mit
22
0
__version__ = '0
.3.0'
andb0t/Fuxenpruefung
src/i18n.py
Python
gpl-3.0
9,386
0.003525
import files CURRENT_LANGUAGE = 'ger' class dictionary: def __init__(self): pass score = {'ger': 'Punkte', 'bay': 'Punkte', } rank = {'ger': 'Platz', 'bay': 'Bloz', } time = {'ger': 'Datum', 'bay': 'Datum', } username = {'ger': 'Name', 'bay': 'Nom', } def lang(): return CURRENT_LANGUAGE def translate(word): try:
return getattr(dictionary, word.lower())[lang()] except KeyError: return word def lang_button_image(): return files.resource_path('', 'images\\' + lang() + '_' + switch_language(False) + '.png') def switch_language(doSwitch=True): global CURRENT_LANGUAGE if CURRENT_LANGUAGE == 'ger'
: if doSwitch: CURRENT_LANGUAGE = 'eng' return 'eng' elif CURRENT_LANGUAGE == 'eng': if doSwitch: CURRENT_LANGUAGE = 'bay' return 'bay' elif CURRENT_LANGUAGE == 'bay': if doSwitch: CURRENT_LANGUAGE = 'ger' return 'ger' longNames = {'ger': ['Kleine Frage', 'Mittlere Frage', 'Große Frage', 'Permanente Frage', 'Scherzfrage', 'Archiv'], 'eng': ['Small question', 'Medium question', 'Hard question', 'Permanent question', 'Joke question', 'Archive'], 'bay': ['Gloane Frog', 'Normale Frog', 'Gscheide Frog', 'Dauerfrog', 'Schmarrnfrog', 'Oide Frogn'], } shortNames = {'ger': ['S', 'M', 'H', 'P', 'J', 'A'], 'eng': ['S', 'M', 'H', 'P', 'J', 'A'], 'bay': ['S', 'M', 'H', 'P', 'J', 'A'], } startButtonText = {'ger': ['Start', 'Schließen'], 'eng': ['Go!', 'Close'], 'bay': ["Af geht's!", 'A Rua is!'], } linkLabelText = {'ger': 'AGV Webseite', 'eng': 'AGV website', 'bay': "Webseitn", } errorTitle = {'ger': 'Bierjunge!', 'eng': 'Error!', 'bay': 'Foisch!', } errorText = {'ger': ['Keine Fragensammlung ausgewaehlt! Nochmal!', 'Falsches Passwort! Nochmal!', 'Fehler in Fragensammlung: '], 'eng': ['No question file selected. Retry!', 'Bad password. Retry!', 'Error in question file: '], 'bay': ['Koa Frognkatalog gfunna. Nomoi!', 'Posswoat foisch. Nomoi!', 'Foische Form im Frognkatalog: '], } dictInit = {'ger': ['Erstelle neue Fuxenprüfung', 'Zeige Fragenstatistik', 'Zeige alle Fragen', 'Interaktives Quiz', 'Fuxensnake'], 'eng': ['Compile new exam', 'Show question statistics', 'Show all questions', 'Interactive quiz', 'Fox Snake'], 'bay': ['Gib ma a neie Pruefung', "Zeig ma d'Statistik", "Zeig olle Frogn her", 'Machma a Quiz', 'Fuxnspui'], } examTitle = {'ger': ['Fuxenprüfung', 'Fuxenlösung'], 'eng': ['Exam', 'Solution'], 'bay': ["Afgobntext fia'd Fuxn", 'Loesung fian FM'], } statisticsHeader = {'ger': ['Fragenpool nach Schwierigkeit', 'Fragenpool nach Thema'], 'eng': ['Question pool by difficulty', 'Question pool by topic'], 'bay': ["Frognkatalog noch wia schwar s'is", 'Frognkatalog noch Thema'], } statisticsColHeader = {'ger': ['Kategorie', 'Anzahl', 'Anteil'], 'eng': ['Category', 'Number', 'Fraction'], 'bay': ['Kategorie', 'Zohl', 'Brozent'], } allquestionsHeader = {'ger': 'Alle verfügbaren Fragen und Antworten', 'eng': 'All available questions and answers', 'bay': "Olle Frogn und Antwortn di wo's gibt", } allquestionsColHeader = {'ger': ['Frage', 'Antwort', 'Kateg.', 'Schw.', 'Platz'], 'eng': ['Question', 'Answer', 'Categ.', 'Diff.', 'Space'], 'bay': ['Frog', 'Antwort', 'Kateg.', 'Schw.', 'Ploz'], } appHeader = {'ger': 'Fuxenprüfungsgenerator', 'eng': 'Exam generator', 'bay': "Afgobnautomat fia'd Fuxn", } passwordText = {'ger': ['Passwort', 'Datei ist verschlüsselt! Bitte Passwort eingeben:'], 'eng': ['Password', 'File is encryoted! Please enter password:'], 'bay': ['Posswoat', "Abgsperrt! Schlisse eigebn bitt'schen:"], } examFile = {'ger': ['fuxenpruefung.txt', 'fuxenloesung.txt'], 'eng': ['exam.txt', 'solution.txt'], 'bay': ['fuxenpruefung.txt', 'fuxenloesung.txt'], } quizTitle = {'ger': 'Fuxenquiz', 'eng': 'Interactive quiz', 'bay': "Quiz", } yesNo = {'ger': ['Klar!', 'Nein'], 'eng': ['Yes!', 'No'], 'bay': ['No halle', 'Na'], } quizButton = {'ger': ['Weiss ich', 'Keine Ahnung', 'Überspringen', 'Abbruch'], 'eng': ['I know it', "I don't know", 'Skip', 'Cancel'], 'bay': ['Woass I', 'Woas I ned', 'Andere Frog', 'Etz reichts!'], } quizHeader = {'ger': ['Testergebnis', 'Interpretation'], 'eng': ['Test result', 'Interpretation'], 'bay': ["Zeignis", 'Wos hoast des etz'], } quizInterpretation = {'ger': ['Durchgefallen', 'Knapp bestanden', 'Befriedigend', 'Gut gemacht', 'Perfekt'], 'eng': ['Insufficient', 'Barely passed', 'Sufficient', 'Well done', 'Perfect'], 'bay': ['A totaler Depp', 'Do homma scho bessane ghabt', 'Ned schlecht', 'Sauber, du woast echt vui', 'Du host gspickt'], } quizCorrect = {'ger': 'der Antworten wurden richtig beantwortet', 'eng': 'of the questions were answered correctly', 'bay': 'hom gstimmt', } answerCorrect = {'ger': ['Richtig', 'Falsch', 'Übersprungen'], 'eng': ['Correct', 'Wrong', 'Skipped'], 'bay': ['Hod gstimmt', 'Foisch', 'Übersprunga'], } snakeWelcome = {'ger': 'Fuxensnake', 'eng': 'Fox Snake', 'bay': 'Fuxnspui', } snakeInstruction = {'ger': ['Kurzanleitung', 'Trink ein Bier!', 'Der Fux gibt einen Punkt pro getrunkenes Bier', 'Der Goldfux! {0} x so viel wert, beeil dich!', 'Jägermeister! Achtung, so stark wie {0} Bier!', 'Der Eimer für Notfälle'], 'eng': ['Instructions', 'Have a pint!', 'A fox gives you a point for every pint', 'The golden Fox! {0} times the value, hurry up!', 'Jaegermeister! Strong as {0} beer, can you handle it?', 'The bucket for emergencies'], 'bay': ['Afgom', 'Dring a Bier!', 'Fia a jeds Bier gibts bei am Fux an Punkt', 'Da Goidfux! {0} moi so vui wert aber glei wieder weg!', 'A Jager, stoak wia {0} Bier. Hostas im Kreiz?', "Da Kibe, wann's zvui werd"], } snakeScore = {'ger': 'Punktestand', 'eng': 'Score', 'bay': 'Punkte', } hitKey = {'ger': ['Zum Start Pfeiltaste drücken!', 'Spiel vorbei', 'Escape Taste für Abbruch', 'Enter Taste für Neustart'], 'eng': ['Press an arrow key to start!', 'Game over', '<esc> to continue', '<return> for restart'], 'bay': ['Druck af a Pfeiltastn zum Spuin!', "Aus is'", 'Druck af Escape wannst gnua host!', 'Nomoi mit da Enter Tastn'], } snakeUserNameRequest = {'ger': ['Name eingeben', 'zum Ändern klicken', 'Name', 'Zeichen maximal'], 'eng': ['Enter y
VulcanTechnologies/oauth2lib
oauth2lib/utils.py
Python
mit
2,411
0.005807
from __future__ import absolute_import, division, print_function, unicode_literals import string import urllib try: from urllib.parse import urlparse, urlencode, urljoin, parse_qsl, urlunparse from urllib.request import urlopen, Request from urllib.error import HTTPError except ImportError: from urlparse import urlparse, urljoin, urlunparse, parse_qsl from urllib import urlencode from urllib2 import urlopen, Request, HTTPError from random import SystemRandom try: UNICODE_ASCII_CHARACTERS = (string.ascii_letters + string.digits) except AttributeError: UNICODE_ASCII_CHARACTERS = (string.ascii_letters.decode('ascii') + string.digits.decode('ascii')) def random_ascii_string(length): random = SystemRandom() return ''.join([random.choice(UNICODE_ASCII_CHARACTERS) for x in range(length)]) def url_query_params(url): """Return query parameters as a dict from the specified URL. :param url: URL. :type url: str :rtype: dict """ return dict(parse_qsl(urlparse(url).query, True)) def url_dequery(url): """Return a URL with the query component removed. :param url: URL to dequery. :type url: str :rtype: str """ url = urlparse(url) return urlunparse((url.scheme, url.netloc, url.path, url.params, '', url.fragment)) def build_url(base, additional_params=None): """Construct a URL based off of base containing all parameters in the query portion of base plus any additional parameters. :param base: Base URL :type base: str ::param additional_params: Additional query parameters to include. :type additional_params: dict :rtype: str """ url = urlparse(base) query_params = {} query_params.update(parse_qsl(url.query, True)) if additional_params is not None: query_params.update(additional_params) for k, v in additional_params.items(): if v is None: query_params.pop(k) return urlu
nparse((url.scheme, url.netloc, url.path, url.params, urlencode(query_params),
url.fragment))
namccart/pybombs
pybombs/utils/sysutils.py
Python
gpl-3.0
4,522
0.002654
# # Copyright 2015 Free Software Foundation, Inc. # # This file is part of PyBOMBS # # PyBOMBS is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # PyBOMBS is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details.
# # You should have received a copy of the GNU General Public License # along with PyBOMBS; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA
. # """ System Utils """ from __future__ import print_function import re import os import os.path as op from pybombs.pb_exception import PBException def which(program, env=None): """ Equivalent to Unix' `which` command. Returns None if the executable `program` can't be found. If a full path is given (e.g. /usr/bin/foo), it will return the path if the executable can be found, or None otherwise. If no path is given, it will search PATH. """ def is_exe(fpath): " Check fpath is an executable " return os.path.isfile(fpath) and os.access(fpath, os.X_OK) if env is None: env = os.environ if os.path.split(program)[0] and is_exe(program): return program else: for path in os.environ.get("PATH", "").split(os.pathsep): exe_file = os.path.join(path, program) if is_exe(exe_file): return exe_file return None def dir_is_writable(dir_path): " Returns True if dir_path is a writable directory " return op.isdir(dir_path) and os.access(dir_path, os.W_OK|os.X_OK) def mkdir_writable(dir_path, log=None): """ Create a directory if it doesn't yet exist. Returns True if that worked and the dir is writable. Throws a PBException if the parent path does not exist. Calls os.mkdir(), which can also throw. """ parent_dir = os.path.split(os.path.normpath(dir_path))[0] if len(parent_dir) and not dir_is_writable(parent_dir): raise PBException("Can't create dir `{0}'. Parent directory does not exist.".format(dir_path)) if not op.isdir(dir_path): if log is not None: log.info("Creating directory `{0}'".format(dir_path)) os.mkdir(dir_path) return dir_is_writable(dir_path) def mkdirp_writable(dir_path, log=None): """ Like mkdir_writable(), but creates all parents if necessary (like mkdir -p) """ if dir_is_writable(dir_path): return True parent = os.path.split(dir_path)[0] if len(parent) != 0: if not mkdirp_writable(parent, log): return False return mkdir_writable(dir_path, log) def require_subdirs(base_path, subdirs, log=None): """ subdirs is a list of subdirectories that need to exist inside path. If this is satisfied, returns True. """ if not dir_is_writable(base_path): if log: log.error("Base path {0} does not exist".format(base_path)) return False common_prefix = os.path.commonprefix( [os.path.normpath(os.path.join(base_path, x)) for x in subdirs] ) if not op.normpath(common_prefix) in op.normpath(base_path): raise PBException("Invalid subdir list (going outside base path)") return all([mkdirp_writable(os.path.join(base_path, subdir), log) for subdir in subdirs]) def write_file_in_subdir(base_path, file_path, content): """ Write 'content' to a file. The absolute path to the file comes from joining base_path and file_path. However, if file_path tries to go outside base_path, an exception is raised. """ abs_file_path = os.path.join(base_path, file_path) if not op.normpath(base_path) in op.normpath(abs_file_path): raise PBException("Attempting write to file outside base_path") open(abs_file_path, 'w').write(content) def is_virtualenv(path): " Returns True if path is actually a Python virtualenv (False if not) " venv_test_file = op.join(path, 'bin', 'activate') if not op.isfile(venv_test_file): return False try: is_venv = re.search("VIRTUAL_ENV", open(venv_test_file).read()) is not None return is_venv except IOError: return False if __name__ == "__main__": print(which("vim"))
danstoner/python_experiments
pgu/examples/gui5.py
Python
gpl-2.0
1,996
0.021042
"""Tables, Widgets, and Groups! An example of tables and most of the included widgets. """ import pygame from pygame.locals import * # the following line is not needed if pgu is installed import sys; sys.path.insert(0, "..") from pgu import gui # Load an alternate theme to show how it is done. You can also # specify a path (absolute or relative) to your own custom theme: # # app = gui.Desktop(theme=gui.Theme("path/to/theme")) # app = gui.Desktop() app.connect(gui.QUIT,app.quit,None) ##The table code is entered much like HTML. ##:: c = gui.Table() c.tr() c.td(gui.Label("Gui Widgets"),colspan=4) def cb(): print("Clicked!") btn = gui.Button("Click Me!") btn.connect(gui.CLICK, cb) c.tr() c.td(gui.Label("Button")) c.td(btn,colspan=3) ## c.tr() c.td(gui.Label("Switch")) c.td(gui.Switch(False),colspan=3) c.tr() c.td(gui.Label("Checkbox")) ##Note how Groups are used for Radio buttons, Checkboxes, and Tools. ##:: g = gui.Group(value=[1,3]) c.td(gui.Checkbox(g,value=1)
) c.td(gui.Checkbox(g,value=2)) c.td(gui.Checkbox(g,value=3)) ## c.tr() c.td(gui.Label("Radio")) g = gui.Group() c.td(gui.Radio(g,value=1)) c.td(gui.Radio(g,value=2)) c.td(gui.Radio(g,value=3)) c.tr() c.td(gui.Label("Select")) e = gui.Select() e.add("Goat",'goat') e.add("Horse",'hor
se') e.add("Dog",'dog') e.add("Pig",'pig') c.td(e,colspan=3) c.tr() c.td(gui.Label("Tool")) g = gui.Group(value='b') c.td(gui.Tool(g,gui.Label('A'),value='a')) c.td(gui.Tool(g,gui.Label('B'),value='b')) c.td(gui.Tool(g,gui.Label('C'),value='c')) c.tr() c.td(gui.Label("Input")) def cb(): print("Input received") w = gui.Input(value='Cuzco',size=8) w.connect("activate", cb) c.td(w,colspan=3) c.tr() c.td(gui.Label("Slider")) c.td(gui.HSlider(value=23,min=0,max=100,size=20,width=120),colspan=3) c.tr() c.td(gui.Label("Keysym")) c.td(gui.Keysym(),colspan=3) c.tr() c.td(gui.Label("Text Area"), colspan=4, align=-1) c.tr() c.td(gui.TextArea(value="Cuzco the Goat", width=150, height=70), colspan=4) app.run(c)
maweis1981/hey001
mayversion/mayversion/accounts/models.py
Python
lgpl-3.0
4,215
0.024061
#!/usr/bin/env python # encoding: utf-8 from django.db import models,connection from django.contrib.auth.models import User,UserManager class UserProfile(models.Model): GENDER_CHOICES = ( (1, '男'), (2, '女'), ) PROVINCE_CHOICES = ( ('JiangSu','江苏'), ('ShangHai','上海'), ('ShangHai','上海'), ('ShangHai','上海'), ('ShangHai','上海'), ('ShangHai','上海'), ) user = models.ForeignKey(User, unique=True) gender = models.IntegerField(max_length=1, choices=GENDER_CHOICES,default=1) birthday = models.DateField(blank=True,null=True) province = models.CharField(max_length=20, choices=PROVINCE_CHOICES,default=1) city = models.CharField(max_length=10) livecity = models.CharField(max_length=10) regist_date = models.DateField(auto_now_add=True) def __unicode__(self): return self.user.username def set_user_id(self,raw_user_id): raw_user = User.objects.get(pk = raw_user_id) if raw_user: self.user = raw_user else: raise Exception('User not exist') def get_more_profile(self): u = UserMoreProfile.objects.get(user = self) return u User.profile = property(lambda u: UserProfile.objects.get_or_create(user=u)[0]) class UserMoreProfile(models.Model): NATIONAL_CHOICES = ( ('Han','汉族'), ('Hui','回族'), ) COUNTRY_CHOICES = ( ('China','中国'), ('America','AMERICA'), ) INCOMING_CHOICES = ( (1,'<2000'), (2,'2000-5000'), (3,'5000-10000'), (4,'10000-20000'), (5,'>20000'), ) BLOOD_CHOICES = ( ('A','A型'), ('B','B型'), ('AB','AB型'), ('O','O型'), ) BODY_CHOICES = ( ('A','较瘦'), ('AA','适中'), ('AAA','强壮'), ('AAA','较胖'), ) DEGREE_CHOICES = ( ('never','从不'), ('little','有时'), ('normal','经常'), ) user = models.ForeignKey(UserProfile, unique=True) height = models.IntegerField("身高",null=True) weight = models.IntegerField("体重",null=True) blood = models.CharField("血型", max_length=10, choices = BLOOD_CHOICES,default=1) body = models.CharField("身材", max_length=10, choices = BODY_CHOICES,default=1) national = models.CharField("民族", max_length=10, choices = NATIONAL_CHOICES,default=1) country = models.CharField("国籍", max_length=10, choices = COUNTRY_CHOICES,default=1) gradute = models.CharField("学历", max_length=10) industry = models.CharField("行业", max_length=10) incoming = models.IntegerField("收入", choices=INCOMING_CHOICES,default=1,null=True) smoking = models.CharField("抽烟", max_length=10, choices = DEGREE_CHOICES,d
efault=1) drinking = models.CharField("喝酒", max_length=10, choices = DEGREE_CHOICES,default=1) family = models.CharField("家庭情况", max_length=10) language = models.Cha
rField("语言", max_length=10) hobby = models.CharField("兴趣", max_length=10) short = models.SlugField("座右铭") def set_user_id(self,raw_user_id): raw_user = UserProfile.objects.get(pk = raw_user_id) if raw_user: self.user = raw_user else: raise Exception('no user exist') class WhoVisitMe(models.Model): master = models.ForeignKey(User,related_name="masters") visitor = models.ForeignKey(User,related_name="visitors") visit_time = models.DateTimeField(auto_now_add=True) def whoVisitMe(self): m_user = self.master visitor_list = WhoVisitMe.objects.filter(master=m_user).order_by('-visit_time') visitor_user_list = [] for v in visitor_list: visitor_user_list.append(v.visitor) return visitor_user_list def __unicode__(self): return '%s visit %s ' % (self.visitor,self.master) def listWhoVisitMe(user): query = ("select distinct visitor_id from accounts_whovisitme " "where master_id='%(user_id)s'") % { 'user_id': user.id, } cursor = connection.cursor() visitor_ids = cursor.execute(query) visitorList = [] for v in cursor.fetchall(): print v[0] visitorList.append(User.objects.get(pk=int(v[0]))) return visitorList
ostravaTokyo/hfls
python/readFetiData.py
Python
unlicense
14,274
0.027533
import numpy as np from scipy import sparse import scipy.sparse.linalg as spla import pylab as plt from scipy.linalg import block_diag # # nSub = 2 def load_matrix_basic(pathToFile,makeSparse,makeSymmetric, offset): f0 = open(pathToFile).readlines() firstLine = f0.pop(0) #removes the first line tmp = np.zeros((len(f0),3), dtype = float) for i in range(len(f0)): line = f0[i] k = line.split() tmp[i,0] = float(k[0]) tmp[i,1] = float(k[1]) tmp[i,2] = float(k[2]) if (tmp.shape[0]==1): tmp = [] else: n = np.int32(tmp[0,0]) m = np.int32(tmp[0,1]) I = tmp[1::,0]-offset; J = tmp[1::,1]-offset; V = tmp[1::,2] # # print str0,i,j if (makeSymmetric): logInd = J != I; I = np.concatenate((I,J[logInd])) J = np.concatenate((J,I[logInd])) V = np.concatenate((V,V[logInd])) if (makeSparse): tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo() else: if (m==1): tmp = V else: tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray() return tmp def load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset): pathToFile = path+'/'+str(i)+'/'+str0+str(j)+'.txt' # tmp = load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset) return tmp path0 = "../data" if 1: K = [] K_reg = [] Fc = [] R = [] Rf = [] Bc = [] Bf = [] BcT_dense = [] Gc = [] # Gf = [] Gf_p = [] Gc = [] Fc_p = [] rhs = [] xx = [] Kplus_f_test = []
KplusBcT_p = [] Bc_nonzRow = []
KplusBcT = [] BcKplus_tmp = [] # BcK_dense = [] K_UT = [] # x_out = [] # x_out_p = [] # Lumped = [] # Lumped = [] for i in range(nSub): K.append(load_matrix(path0,"dump_K_","",str(i),False,True,1)) K_UT.append(load_matrix(path0,"dump_K_","",str(i),False,False,1)) K_reg.append(load_matrix(path0,"dump_K_reg_","",str(i),False,True,1)) Fc.append(load_matrix(path0,"dump_Fc_","",str(i),False,False,1)) R.append(load_matrix(path0,"dump_R_","",str(i),False,False,1)) Rf.append(load_matrix(path0,"dump_Rf_","",str(i),False,False,1)) Bc.append(load_matrix(path0,"dump_Bc_","",str(i),False,False,1)) Bf.append(load_matrix(path0,"dump_Bf_","",str(i),False,False,1)) Gf_p.append(np.dot(Bf[i],Rf[i])) # Lumped.append(load_matrix(path0,"dump_Lumped_","",str(i),False,False,1)) BcT_dense.append(load_matrix(path0,"dump_BcT_dense_","",str(i),False,False,1)) Gc.append(load_matrix(path0,"dump_Gc_","",str(i),False,False,1)) # Gf.append(load_matrix(path0,"dump_Gf_","",str(i),False,False,1)) indBc = np.abs(Bc[i]).sum(axis=1)>0 Bc_nonzRow.append( Bc[i][indBc,:]) # Fc.append( np.dot(Bc_nonzRow[i], np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))) # Lumped.append( np.dot(Bc_nonzRow[i], np.dot(K[i],Bc_nonzRow[i].T))) rhs.append(load_matrix(path0,"dump_rhs_","",str(i),False,False,1)) # xx.append(load_matrix(path0,"dump_xxTest_","",str(i),False,False,1)) # Kplus_f_test.append(load_matrix(path0,"dump_Kplus_f_test_","",str(i),False,False,1)) # KplusBcT_p = BcKplus_List[i] # BcK_dense.append(load_matrix(path0,"dump_BcK_dense_","",str(i),False,False,1)) # BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T) Gc.append(np.dot(Bc[i], R[i])) KplusBcT.append(load_matrix(path0,"dump_KplusBcT_","",str(i),False,False,1)) KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)) # BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T) # x_out.append(load_matrix(path0,"dump_x_out_","",str(i),False,False,1)) Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i])) # iK_K = np.linalg.solve(K_reg[i],K[i]) # K_iK_K = np.dot(K[i],iK_K) # del_ = np.linalg.norm(K_iK_K - K[i] ) / np.linalg.norm(K[i]) # print(del_) # tmp_g = np.dot(Bc[i],np.linalg.solve(K_reg[i], rhs[i])) tmp_e = -np.dot(R[i].T,rhs[i]) if (i == 0): g_p = tmp_g e_p = tmp_e; else: g_p += tmp_g; e_p = np.concatenate((e_p,tmp_e)) print(' ...%d '%(i)) # gc_p = np.concatenate((g_p,e_p)) # gc_p = np.concatenate((gc_p,np.zeros(6))) Gc_clust = load_matrix(path0,"dump_Gc_clust_","",str(0),False,False,1) Ac_clust = load_matrix(path0,"dump_Ac_clust_","",str(0),False,True,1) Fc_clust = load_matrix(path0,"dump_Fc_clust_","",str(0),False,True,1) ker_GcTGc = load_matrix(path0,"dump_kerGc_","",str(0),False,False,1) # gc = load_matrix(path0,"dump_gc_","",str(0),False,False,1) # lam_alpha = load_matrix(path0,"dump_lam_alpha_","",str(0),False,False,1) # lam_alpha_p = np.linalg.solve(Ac_clust, gc) # nLam = Bc[0].shape[0] # lam_p = lam_alpha_p[0:nLam] ## alpha_p = lam_alpha[nLam:] # for i in range(nSub): # print (" ! %d " % (i)) # x10 = np.linalg.solve(K_reg[i],rhs[i]) # x11 = np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p)) # # print alpha_p[(6*i):(6*(i+1))] # x2 = np.dot(R[i],alpha_p[(6*i):(6*(i+1))]) # # x_out_p.append(x10 - x11 + x2) # print( "||x_out - x_out_p || = %e " % np.linalg.norm(x_out[i] - x_out_p[i])) Ac_clust_python = np.hstack((Fc_clust,Gc_clust)) Z = np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1])) print ( Z.shape) Ac_clust_python = np.vstack((Ac_clust_python,Z)) Gf_clust = load_matrix(path0,"dump_Gf_clust_","",str(0),False,False,1) # test = load_matrix(path0,"dump_testXYZ_","",str(0),False,False,1) # KpOnes= load_matrix(path0,"dump_KplusONES_","",str(0),False,False,1) #K_regD = K_reg[0] #frhs = rhs[0] #xxD = xx[0] #RD = R[0] #for i in range(1,nSub): # K_regD = block_diag(K_regD,K_reg[i]); # RD = block_diag(RD,R[i]); # frhs = np.concatenate((frhs,rhs[i])) # xxD = np.concatenate((xxD,xx[i])) # for i in range(nSub - 1): if (i == 0): Bc_g = np.hstack((Bc[0],Bc[1])) else: Bc_g = np.hstack((Bc_g,Bc[i+1])) for i in range(nSub - 1): if (i == 0): Bf_g = np.hstack((Bf[0],Bf[1])) else: Bf_g = np.hstack((Bf_g,Bf[i+1])) for i in range(nSub - 1): if (i == 0): Gf_g = Gf_p[0]+ Gf_p[1] else: Gf_g += Gf_p[i+1] weigth = np.loadtxt(path0+'/dump_weigth.txt') #Fc__ = np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T)) # # #gc__ = np.dot(Bc_g,np.linalg.solve(K_regD,frhs)) #ec__ = - np.dot(RD.T,frhs) # #gc__ = np.concatenate((gc__,ec__)) #H = ker_GcTGc #AA0 = np.hstack((Fc__,Gc_clust)) #AB1 = # # #ZZ1 = np.zeros((Gc_clust.shape[0], H.shape[1])) #AA1 = np.vstack((ZZ1,H)) #AA01 = np.hstack((AA0,AA1)) #A0 = np.hstack((K_regD,Bc_g.T)) # #nB = Bc_g.shape[0] #Bc_Z = np.hstack((Bc_g,np.zeros((nB,nB)))) # #crhs = np.zeros(nB); # #A = np.vstack((A0,Bc_Z)) # #b = np.concatenate((frhs,crhs)) # #x = np.linalg.solve(A,b) # #xxD = np.concatenate((xxD,crhs)) #Bc_g = np.hstack((Bc_g,Bc[2])) #Bc_g = np.hstack((Bc_g,Bc[2])) #BcT_dense = load_matrix(path0,"dump_BcT_dense_","",str(0),True,True,1) #Fc_clust = load_matrix(path0,"dump_Fc_clust_","",str(0),True,True,1) #Ac_clust = load_matrix(path0,"dump_Ac_clust_","",str(0),True,True,1) #GcTGc = load_matrix(path0,"dump_GcTGc_clust_","",str(0),False,True,1) #GfTGf = load_matrix(path0,"dump_GfTGf_","",str(0),False,False,1) #iGfTGf = load_matrix(path0,"dump_iGfTGf_","",str(0),False,False,1) #ker_Ac = load_matrix(path0,"dump_ker_Ac_","",str(0),False,False,1) ##KpBcT0 = load_matrix(path0,"dump_KplusBcT_","",str(0),False,False,1) ##KpBcT1 = load_matrix(path0,"dump_KplusBcT_","",str(1),False,False,1) # # #dFc_eig = load_matrix(path0,"dump_Fc_clust_","",str(444),False,False,1) ##dFc_svd = load_matrix(path0,"dump_Fc_clust_","",str(555),False,False,1) #dAc_eig = load_matrix(path0,"dump_Ac_clust_","",str(444),False,False,1) ##dAc_svd = load_mat
tundish/volcasample
volcasample/project.py
Python
gpl-3.0
8,341
0.000959
#!/usr/bin/env python3 # encoding: UTF-8 # This file is part of volcasample. # # volcasample is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # volcasample is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with volcasample. If not, see <http://www.gnu.org/licenses/>. import bisect from collections import OrderedDict from collections import namedtuple import functools import glob import json import os import sys import wave from volcasample.audio import Audio import volcasample.syro __doc__ = """ This module provides a workflow for a Volca Sample project. """ class Project: plot = functools.partial( print, sep="", end="", file=sys.stdout, flush=True ) @staticmethod def scale(pos=0, n=100): Project.plot(*[i // 10 for i in range(pos, n)]) Project.plot("\n") Project.plot(*[i % 10 for i in range(pos, n)]) Project.plot("\n") @staticmethod def progress_point(n=None, clear=2, quiet=False, file_=sys.stderr): if quiet: return elif isinstance(n, int): msg = "." if n % 10 else n // 10 end = "" elif n is None: end = "\n" * clear msg = " OK." else: msg = n end = "" if len(n) == 1 else "\n" * clear print(msg, end=end, file=file_, flush=True) @staticmethod def parse_initial(text): return OrderedDict([ (int(f + s), {" ": None, "x": False}.get(t.lower(), True)) for f, s, t in zip(*text.splitlines()) ]) @staticmethod def optimise(targets, initial:list, vote): jobs = OrderedDict([( tgt["slot"], (volcasample.syro.DataType.Sample_Erase, tgt["path"])) for tgt, keep in zip(targets, initial) if "path" in tgt and keep is False ]) jobs.update(OrderedDict([( tgt["slot"], (volcasample.syro.DataType.Sample_Compress, tgt["path"])) for tgt, keep in zip(targets, initial) if "path" in tgt and keep is True ])) return jobs @staticmethod def create(path, start=0, span=None, quiet=False): stop = min(100, (start + span) if span is not None else 101) Project.progress_point( "Creating project tree at {0}".format(path), quiet=quiet ) for i in range(start, stop): os.makedirs( os.path.join(path, "{0:02}".format(i)), exist_ok=True, ) Project.progress_point(i, quiet=quiet) Project.progress_point(quiet=quiet) return len(os.listdir(path)) @staticmethod def refresh(path, start=0, span=None, quiet=False): stop = min(100, (start + span) if span is not None else 101) Project.progress_point( "Refreshing project at {0}".format(path), quiet=quiet ) tgts = ( os.path.join(path, "{0:02}".format(i), "*.wav") for i in range(start, stop) ) for n, tgt in zip(range(start, stop), tgts): # Metadata defaults metadata = OrderedDict([("slot", n), ("vote", 0)]) # Try to load previous metadata fP = os.path.join( path, "{0:02}".format(n), "metadata.json" ) try: with open(fP, "r") as prev: metadata.update(json.load(prev)) except FileNotFoundError: pass # Use default values for metadata try: src = next(iter(glob.glob(tgt))) w = wave.open(src, "rb") params = w.getparams() metadata.update(Audio.metadata(params, src)) except (FileNotFoundError, StopIteration): pass # Use default values for metadata Project.progress_point(n, quiet=quiet) with open(fP, "w") as new: json.dump(metadata, new, indent=0, sort_keys=True) yield metadata Project.progress_point(quiet=quiet) @staticmethod def vote(path, val=None, incr=0, start=0, span=None, quiet=False): stop = min(100, (start + span) if span is not None else 101) tgts = list(Project.refresh(path, start, span, quiet)) for n, tgt in zip(range(start, stop), tgts): tgt["vote"] = val if isinstance(val, int) else tgt["vote"] + incr Project.progress_point( "{0} vote{1} for slot {2:02}. Value is {3}".format( "Checked" if not (val or incr) else "Applied", " increment" if val is None and incr else "", n, tgt.get("vote", 0) ), quiet=quiet
) metadata = os.
path.join(path, "{0:02}".format(n), "metadata.json") with open(metadata, "w") as new: json.dump(tgt, new, indent=0, sort_keys=True) yield tgt @staticmethod def check(path, start=0, span=None, quiet=False): stop = min(100, (start + span) if span is not None else 101) tgts = list(Project.refresh(path, start, span, quiet=True)) for n, tgt in zip(range(start, stop), tgts): if tgt.get("nchannels", 0) > 1 or tgt.get("sampwidth", 0) > 2: fP = os.path.splitext(tgt["path"])[0] + ".ref" try: os.replace(tgt["path"], fP) with wave.open(fP, "rb") as wav: Audio.wav_to_mono(wav, tgt["path"]) except FileNotFoundError: pass yield from Project.refresh(path, n, span=1, quiet=True) Project.progress_point(n, quiet=quiet) Project.progress_point(quiet=quiet) def audition(path, start=0, span=None, quiet=False, silent=False): def grade( nFrames, breaks=[1, 20 * 1024, 100 * 1024, 500 * 1024, 2 * 1024 * 1024], ramp=" .:iI#" ): return ramp[bisect.bisect(breaks, nFrames)] stop = min(100, (start + span) if span is not None else 101) Project.progress_point( "Auditioning project at {0}".format(path), quiet=quiet ) tgts = list(Project.refresh(path, start, span, quiet=True)) Project.scale(start, stop) # 4 MB, 65s grades = [grade(tgt.get("nframes", 0)) for tgt in tgts] for n, grd, tgt in zip(range(start, stop), grades, tgts): if "path" in tgt: Project.plot(grd) wav = wave.open(tgt["path"], "rb") if not silent: rv = Audio.play(wav) if rv is None: return else: rv.wait_done() yield wav else: Project.plot(" ") yield None Project.progress_point(quiet=quiet) def __init__(self,path, start, span, quiet=True): self.path, self.start, self.span = path, start, span self.quiet = quiet self._assets = [] def __enter__(self): self._assets = [] for metadata in self.check( self.path, self.start, self.span, quiet=self.quiet ): self._assets.append(metadata) return self def __exit__(self, exc_type, exc_val, exc_tb): self._assets = [] return False def assemble(self, locn, initial=[], vote=0, optimiser=None): optimiser = optimiser or Project.optimise jobs = optimiser(self._assets, initial, vote) if job
abramhindle/UnnaturalCodeFork
python/testdata/launchpad/cronscripts/rosetta-approve-imports.py
Python
agpl-3.0
523
0
#!/usr/bin/python -S # # Copyright 2009 Canonical Ltd. This software is licensed under the # GNU
Affero General Public License version 3 (see the file LICENSE). """Perform auto-approvals and auto-blocks on translation import queue""" import _pythonpath from lp.translations.scripts.import_queue_ga
rdener import ImportQueueGardener if __name__ == '__main__': script = ImportQueueGardener( 'translations-import-queue-gardener', dbuser='translations_import_queue_gardener') script.lock_and_run()
marioharper182/OptionsPricing
Accelerate/pnlEuropean.py
Python
apache-2.0
3,084
0.027562
__author__ = 'Mario' import wx import wx.xrc ########################################################################### ## Class MainPanel ########################################################################### class MainPanel ( wx.Panel ): def __init__( self, parent ): wx.Panel.__init__ ( self, parent, id = wx.ID_ANY, pos = wx.DefaultPosition, size = wx.Size( 500,300 ), style = wx.TAB_TRAVERSAL ) txtCtrlSizer = wx.BoxSizer( wx.VERTICAL ) self.StockPrice = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) txtCtrlSizer.Add( self.StockPrice, 0, wx.ALL, 5 ) self.StockPriceText = wx.StaticText(self, -1, 'Stock Price', pos = wx.Point(125, 10)) self.OptionPrice = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) txtCtrlSizer.Add( self.OptionPrice, 0, wx.ALL, 5 ) self.OptionStrikeText = wx.StaticText(self, -1, 'Option Strike Price', pos = wx.Point(125, 42)) self.OptionYears = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyStrin
g, wx.DefaultPosition, wx.DefaultSize, 0 ) txtCtrlSizer.Add( self.OptionYears, 0, wx.ALL, 5 ) self.OptionYearsText = wx.StaticText(self, -1, 'Option
Time Length', pos = wx.Point(125, 75)) self.RiskFree = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) txtCtrlSizer.Add( self.RiskFree, 0, wx.ALL, 5 ) self.RiskFreeText = wx.StaticText(self, -1, 'Risk Free Rate', pos = wx.Point(125, 110)) self.Volatility = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) txtCtrlSizer.Add( self.Volatility, 0, wx.ALL, 5 ) self.VolatilityText = wx.StaticText(self, -1, 'Input Volatility', pos = wx.Point(125, 142)) buttonSizer = wx.BoxSizer( wx.HORIZONTAL ) self.computeButton = wx.Button( self, wx.ID_ANY, u"Compute", wx.DefaultPosition, wx.DefaultSize, 0 ) buttonSizer.Add( self.computeButton, 0, wx.ALL, 5 ) self.clearButton = wx.Button( self, wx.ID_ANY, u"Clear", wx.DefaultPosition, wx.DefaultSize, 0 ) buttonSizer.Add( self.clearButton, 0, wx.ALL, 5 ) ## Bindings self.computeButton.Bind(wx.EVT_BUTTON, self.OnCompute) self.clearButton.Bind(wx.EVT_BUTTON, self.OnClear) txtCtrlSizer.Add( buttonSizer, 1, wx.EXPAND, 5 ) self.SetSizer( txtCtrlSizer ) self.Layout() def OnCompute(self, event): stockPrice = self.StockPrice.GetValue() optionStrike = self.OptionPrice.GetValue() optionYears = self.OptionYears.GetValue() Riskfree = self.RiskFree.GetValue() Volatility = self.Volatility.GetValue() # print(stockPrice, optionStrike, optionYears, Riskfree, Volatility) # def OnClear(self, event): self.StockPrice.Clear() self.OptionPrice.Clear() self.OptionYears.Clear() self.RiskFree.Clear() self.Volatility.Clear() # pass def __del__( self ): pass
citrix-openstack-build/debtcollector
debtcollector/removals.py
Python
apache-2.0
3,792
0
# Copyright 2014 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import inspect from oslo_utils import reflection import wrapt from debtcollector import _utils def remove(f=None, message=None, version=None, removal_version=None, stacklevel=3): """Decorates a function, method, or class to emit a deprecation warning :param str message: A message to include in the deprecation warning :param str version: Specify what version the removed function is present in :param str removal_version: What version the function will be removed. If '?' is used this implies an undefined future version :param int stacklevel: How many entries deep in the call stack before ignoring """ if f is None: return functools.partial(remove, message=message, version=version, removal_version=removal_version, stacklevel=stacklevel) @wrapt.decorator def wrapper(f, instance, args, kwargs): try: # Prefer the py3.x name (if we can get at it...) f_name = f.__qualname__ qualified = True if inspect.isclass(f): _prefix_pre = "Using class" else: _pr
efix_pre = "Using function/method" except AttributeError: f_name = f.__name__ qualified = False if not qualified: _prefix_pre = "Using function/method" if instance is None: # Decorator was used on a class if inspect.isclass(f): _prefix_pre = "Using class" module_name = inspect.getmodule(f
).__name__ if module_name == '__main__': f_name = reflection.get_class_name( f, fully_qualified=False) else: f_name = reflection.get_class_name( f, fully_qualified=True) base_name = None # Decorator was a used on a function else: module_name = inspect.getmodule(f).__name__ if module_name != '__main__': f_name = reflection.get_callable_name(f) base_name = None # Decorator was used on a classmethod or instancemethod else: base_name = reflection.get_class_name(instance, fully_qualified=False) if base_name: function_name = ".".join([base_name, f_name]) else: function_name = f_name else: function_name = f_name _prefix = _prefix_pre + " %s is deprecated" % function_name out_message = _utils.generate_message( _prefix, version=version, removal_version=removal_version, message=message) _utils.deprecation(out_message, stacklevel) return f(*args, **kwargs) return wrapper(f)
F5Networks/f5-common-python
f5/bigip/tm/sys/test/functional/test_folder.py
Python
apache-2.0
3,258
0
# Copyright 2016 F5 Networks Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from requests import HTTPError TESTDESCRIPTION = "TESTDESCRIPTION" def setup_folder_test(request, mgmt_root, name, subpath): def teardown(): '''Remove the f1 folder only. We don't want to delete all folders because some of them are system folders that we didn't create. ''' try: f1.delete() except HTTPError as err: if err.response.status_code != 404: raise request.addfinalizer(teardown) fc1 = mgmt_root.tm.sys.folders f1 = fc1.folder.create(name=name, subPath=subpath) return f1, fc1 class TestFolder(object): def test_CURDL(self, request, mgmt_root): # Create f1, fc1 = setup_folder_test(request, mgmt_root, 'testfolder', '/') assert f1.name == 'testfolder' assert f1.subPath == '/' assert f1.fullPath == '/testfolder' # Load - Test with the various partition/name combinations f2 = fc1.folder.load(partition='testfolder') f3 = fc1.folder.load(name='testfolder') for f in [f2, f3]: assert f.name == f1.name assert f.generation == f1.generation # Update - Make sure that the deviceGroup logic is working f1.description = TESTDESCRIPTION f1.update() assert f1.description == TESTDESCRIPTION assert f1.deviceGroup == 'none' assert f1.inheritedDevicegroup == 'true' assert f1.generation > f2.generation # Refresh f2.refresh() assert f1.generation == f2.generation # We assume delete is taken care of by teardown def test_load_root_folder_by_name(self, mgmt_root): fc = mgmt_root.tm.sys.folders f = fc.folder.load(name='/') assert f.name == '/'
assert f.fullPath == '/' def test_load_root_folder_by_partition(self, mgmt_root): fc = mgmt_root.tm.sys.folders f = fc.folder.load(partition='/') assert f.name == '/' assert f.fullPath == '/' def test_load_root_no_attributes(self, mgmt_root): fc = mgmt_root.tm.sys.folders f = fc.folder.load() asse
rt f.name == '/' assert f.fullPath == '/' class TestFolderCollection(object): def test_get_collection(self, request, mgmt_root): setup_folder_test(request, mgmt_root, 'testfolder', '/') fc = mgmt_root.tm.sys.folders folders = fc.get_collection() assert len(folders) found_folder = False for folder in folders: if folder.__dict__.get('name', '') == 'testfolder': found_folder = True break assert found_folder
mluo613/osf.io
osf/migrations/0026_preprintservice_license.py
Python
apache-2.0
583
0.001715
# -*- coding: utf-8 -*- # Generated by
Django 1.9 on 2016-12-09 21:59 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('osf', '0025_preprintprovider_social_instagram'), ] operations = [ migrations.AddField( model_name='preprintservice',
name='license', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='osf.NodeLicenseRecord'), ), ]
huntxu/neutron
neutron/tests/functional/agent/test_dhcp_agent.py
Python
apache-2.0
17,256
0.000174
# Copyright (c) 2015 Red Hat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import os.path import eventlet import fixtures import mock import netaddr from neutron_lib import constants as lib_const from oslo_config import fixture as fixture_config from oslo_utils import uuidutils from neutron.agent.common import ovs_lib from neutron.agent.dhcp import agent from neutron.agent import dhcp_agent from neutron.agent.linux import dhcp from neutron.agent.linux import external_process from neutron.agent.linux import interface from neutron.agent.linux import ip_lib from neutron.agent.linux import utils from neutron.common import utils as common_utils from neutron.conf.agent import common as config from neutron.tests.common import net_helpers from neutron.tests.functional.agent.linux import helpers from neutron.tests.functional import base class DHCPAgentOVSTestFramework(base.BaseSudoTestCase): _DHCP_PORT_MAC_ADDRESS = netaddr.EUI("24:77:03:7d:00:4c") _DHCP_PORT_MAC_ADDRESS.dialect = netaddr.mac_unix _TENANT_PORT_MAC_ADDRESS = netaddr.EUI("24:77:03:7d:00:3a") _TENANT_PORT_MAC_ADDRESS.dialect = netaddr.mac_unix _IP_ADDRS = {
4: {'addr': '192.168.10.11', 'cidr': '192.168.10.0/24', 'gateway': '192.168.10.1'}, 6: {'addr': '2001:db8:0:1::c0a8:a0b', 'cidr': '20
01:db8:0:1::c0a8:a00/120', 'gateway': '2001:db8:0:1::c0a8:a01'}, } def setUp(self): super(DHCPAgentOVSTestFramework, self).setUp() config.setup_logging() self.conf_fixture = self.useFixture(fixture_config.Config()) self.conf = self.conf_fixture.conf dhcp_agent.register_options(self.conf) # NOTE(cbrandily): TempDir fixture creates a folder with 0o700 # permissions but agent dir must be readable by dnsmasq user (nobody) agent_config_dir = self.useFixture(fixtures.TempDir()).path self.useFixture( helpers.RecursivePermDirFixture(agent_config_dir, 0o555)) self.conf.set_override("dhcp_confs", agent_config_dir) self.conf.set_override( 'interface_driver', 'neutron.agent.linux.interface.OVSInterfaceDriver') self.conf.set_override('report_interval', 0, 'AGENT') br_int = self.useFixture(net_helpers.OVSBridgeFixture()).bridge self.conf.set_override('ovs_integration_bridge', br_int.br_name) self.mock_plugin_api = mock.patch( 'neutron.agent.dhcp.agent.DhcpPluginApi').start().return_value mock.patch('neutron.agent.rpc.PluginReportStateAPI').start() self.agent = agent.DhcpAgentWithStateReport('localhost') self.ovs_driver = interface.OVSInterfaceDriver(self.conf) self.conf.set_override('check_child_processes_interval', 1, 'AGENT') def network_dict_for_dhcp(self, dhcp_enabled=True, ip_version=4, prefix_override=None): net_id = uuidutils.generate_uuid() subnet_dict = self.create_subnet_dict( net_id, dhcp_enabled, ip_version, prefix_override) port_dict = self.create_port_dict( net_id, subnet_dict.id, mac_address=str(self._DHCP_PORT_MAC_ADDRESS), ip_version=ip_version) port_dict.device_id = common_utils.get_dhcp_agent_device_id( net_id, self.conf.host) net_dict = self.create_network_dict( net_id, [subnet_dict], [port_dict]) return net_dict def create_subnet_dict(self, net_id, dhcp_enabled=True, ip_version=4, prefix_override=None): cidr = self._IP_ADDRS[ip_version]['cidr'] if prefix_override is not None: cidr = '/'.join((cidr.split('/')[0], str(prefix_override))) sn_dict = dhcp.DictModel({ "id": uuidutils.generate_uuid(), "network_id": net_id, "ip_version": ip_version, "cidr": cidr, "gateway_ip": (self. _IP_ADDRS[ip_version]['gateway']), "enable_dhcp": dhcp_enabled, "dns_nameservers": [], "host_routes": [], "ipv6_ra_mode": None, "ipv6_address_mode": None}) if ip_version == 6: sn_dict['ipv6_address_mode'] = lib_const.DHCPV6_STATEFUL return sn_dict def create_port_dict(self, network_id, subnet_id, mac_address, ip_version=4, ip_address=None): ip_address = (self._IP_ADDRS[ip_version]['addr'] if not ip_address else ip_address) port_dict = dhcp.DictModel({ "id": uuidutils.generate_uuid(), "name": "foo", "mac_address": mac_address, "network_id": network_id, "admin_state_up": True, "device_id": uuidutils.generate_uuid(), "device_owner": "foo", "fixed_ips": [{"subnet_id": subnet_id, "ip_address": ip_address}], }) return port_dict def create_network_dict(self, net_id, subnets=None, ports=None, non_local_subnets=None): subnets = [] if not subnets else subnets ports = [] if not ports else ports non_local_subnets = [] if not non_local_subnets else non_local_subnets net_dict = dhcp.NetModel(d={ "id": net_id, "subnets": subnets, "non_local_subnets": non_local_subnets, "ports": ports, "admin_state_up": True, "tenant_id": uuidutils.generate_uuid(), }) return net_dict def get_interface_name(self, network, port): device_manager = dhcp.DeviceManager(conf=self.conf, plugin=mock.Mock()) return device_manager.get_interface_name(network, port) def configure_dhcp_for_network(self, network, dhcp_enabled=True): self.agent.configure_dhcp_for_network(network) self.addCleanup(self._cleanup_network, network, dhcp_enabled) def _cleanup_network(self, network, dhcp_enabled): self.mock_plugin_api.release_dhcp_port.return_value = None if dhcp_enabled: self.agent.call_driver('disable', network) def assert_dhcp_resources(self, network, dhcp_enabled): ovs = ovs_lib.BaseOVS() port = network.ports[0] iface_name = self.get_interface_name(network, port) self.assertEqual(dhcp_enabled, ovs.port_exists(iface_name)) self.assert_dhcp_namespace(network.namespace, dhcp_enabled) self.assert_accept_ra_disabled(network.namespace) self.assert_dhcp_device(network.namespace, iface_name, dhcp_enabled) def assert_dhcp_namespace(self, namespace, dhcp_enabled): self.assertEqual(dhcp_enabled, ip_lib.network_namespace_exists(namespace)) def assert_accept_ra_disabled(self, namespace): actual = ip_lib.IPWrapper(namespace=namespace).netns.execute( ['sysctl', '-b', 'net.ipv6.conf.default.accept_ra']) self.assertEqual('0', actual) def assert_dhcp_device(self, namespace, dhcp_iface_name, dhcp_enabled): dev = ip_lib.IPDevice(dhcp_iface_name, namespace) self.assertEqual(dhcp_enabled, ip_lib.device_exists( dhcp_iface_name, namespace)) if dhcp_enabled: self.assertEqual(self._DHCP_PORT_MAC_ADDRESS, dev.link.address) def _plug_port_for_dhcp_request(self, network, port): namespace = network.namespace vif_name = self.get_interface_name(network.id, port) self.ovs_driver.plug(network.id, port.id, vif_name, port.mac_address,
DedMemez/ODS-August-2017
building/DistributedSuitInterior.py
Python
apache-2.0
16,068
0.004232
# Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.building.DistributedSuitInterior from panda3d.core import Point3, Vec3, headsUp from direct.interval.IntervalGlobal import * from direct.distributed.ClockDelta import * from ElevatorConstants import * import ElevatorUtils from toontown.toonbase import ToontownGlobals from toontown.toonbase import ToontownBattleGlobals from direct.directnotify import DirectNotifyGlobal from direct.fsm import ClassicFSM, State from direct.distributed import DistributedObject from direct.fsm import State from toontown.battle import BattleBase from toontown.hood import ZoneUtil from toontown.suit import SuitDNA class DistributedSuitInterior(DistributedObject.DistributedObject): id = 0 def __init__(self, cr): DistributedObject.DistributedObject.__init__(self, cr) self.toons = [] self.activeIntervals = {} self.openSfx = loader.loadSfx('phase_5/audio/sfx/elevator_door_open.ogg') self.closeSfx = loader.loadSfx('phase_5/audio/sfx/elevator_door_close.ogg') self.suits = [] self.reserveSuits = [] self.joiningReserves = [] self.distBldgDoId = None self.currentFloor = -1 self.numFloors = None self.elevatorName = self.__uniqueName('elevator') self.floorModel = None self.elevatorOutOpen = 0 self.BottomFloor_SuitPositions = [Point3(0, 15, 0), Point3(10, 20, 0), Point3(-7, 24, 0), Point3(-10, 0, 0)] self.BottomFloor_SuitHs = [75, 170, -91, -44] self.Cubicle_SuitPositions = [Point3(0, 18, 0), Point3(10, 12, 0), Point3(-9, 11, 0), Point3(-3, 13, 0)] self.Cubicle_SuitHs = [170, 56, -52, 10] self.BossOffice_SuitPositions = [Point3(0, 15, 0), Point3(10, 20, 0), Point3(-10, 6, 0), Point3(-17, 34, 11)] self.BossOffice_SuitHs = [170, 120, 12, 38] self.waitMusic = loader.loadMusic('phase_7/audio/bgm/encntr_toon_winning_indoor.ogg') self.elevatorMusic = loader.loadMusic('phase_7/audio/bgm/tt_elevator.ogg') self.fsm = ClassicFSM.ClassicFSM('DistributedSuitInterior', [State.State('WaitForAllToonsInside', self.enterWaitForAllToonsInside, self.exitWaitForAllToonsInside, ['Elevator']), State.State('Elevator', self.enterElevator, self.exitElevator, ['Battle']), State.State('Battle', self.enterBattle, self.exitBattle, ['Resting', 'Reward', 'ReservesJoining']), State.State('ReservesJoining', self.enterReservesJoining, self.exitReservesJoining, ['Battle']), State.State('Resting', self.enterResting, self.exitResting, ['Elevator']), State.State('Reward', self.enterReward, self.exitReward, ['Off']), State.State('Off', self.enterOff, self.exitOff, ['Elevator', 'WaitForAllToonsInside', 'Battle'])], 'Off', 'Off') self.fsm.enterInitialState() return def __uniqueName(self, name): DistributedSuitInterior.id += 1 return name + '%d' % DistributedSuitInterior.id def generate(self): DistributedObject.DistributedObject.generate(self) self.announceGenerateName = self.uniqueName('generate') self.accept(self.announceGenerateName, self.handleAnnounceGenerate) self.elevatorModelIn = loader.loadModel('phase_4/models/modules/elevator') self.leftDoorIn = self.elevatorModelIn.find('**/left-door') self.rightDoorIn = self.elevatorModelIn.find('**/right-door') self.elevatorModelOut = loader.loadModel('phase_4/models/modules/elevator') self.leftDoorOut = self.elevatorModelOut.find('**/left-door') self.rightDoorOut = self.elevatorModelOut.find('**/right-door') def setElevatorLights(self, elevatorModel): npc = elevatorModel.findAllMatches('**/floor_light_?;+s') for i in xrange(npc.getNumPaths()): np = npc.getPath(i) floor = int(np.getName()[-1:]) - 1 if floor == self.currentFloor: np.setColor(LIGHT_ON_COLOR) elif floor < self.numFloors: np.setColor(LIGHT_OFF_COLOR) else: np.hide() def handleAnnounceGenerate(self, obj): self.ignore(self.announceGenerateName) self.sendUpdate('setAvatarJoined', []) def disable(self): self.fsm.requestFinalState() self.__cleanupIntervals() self.ignoreAll() self.__cleanup() DistributedObject.DistributedObject.disable(self) def delete(self): del self.waitMusic del self.elevatorMusic del self.openSfx del self.closeSfx del self.fsm DistributedObject.DistributedObject.delete(self) def __cleanup(self): self.toons = [] self.suits = [] self.reserveSuits = [] self.joiningReserves = [] if self.elevatorMod
elIn != None:
self.elevatorModelIn.removeNode() if self.elevatorModelOut != None: self.elevatorModelOut.removeNode() if self.floorModel != None: self.floorModel.removeNode() self.leftDoorIn = None self.rightDoorIn = None self.leftDoorOut = None self.rightDoorOut = None return def __addToon(self, toon): self.accept(toon.uniqueName('disable'), self.__handleUnexpectedExit, extraArgs=[toon]) def __handleUnexpectedExit(self, toon): self.notify.warning('handleUnexpectedExit() - toon: %d' % toon.doId) self.__removeToon(toon, unexpected=1) def __removeToon(self, toon, unexpected = 0): if self.toons.count(toon) == 1: self.toons.remove(toon) self.ignore(toon.uniqueName('disable')) def __finishInterval(self, name): if name in self.activeIntervals: interval = self.activeIntervals[name] if interval.isPlaying(): interval.finish() def __cleanupIntervals(self): for interval in self.activeIntervals.values(): interval.finish() self.activeIntervals = {} def __closeInElevator(self): self.leftDoorIn.setPos(3.5, 0, 0) self.rightDoorIn.setPos(-3.5, 0, 0) def getZoneId(self): return self.zoneId def setZoneId(self, zoneId): self.zoneId = zoneId def getExtZoneId(self): return self.extZoneId def setExtZoneId(self, extZoneId): self.extZoneId = extZoneId def getDistBldgDoId(self): return self.distBldgDoId def setDistBldgDoId(self, distBldgDoId): self.distBldgDoId = distBldgDoId def setNumFloors(self, numFloors): self.numFloors = numFloors def setToons(self, toonIds, hack): self.toonIds = toonIds oldtoons = self.toons self.toons = [] for toonId in toonIds: if toonId != 0: if toonId in self.cr.doId2do: toon = self.cr.doId2do[toonId] toon.stopSmooth() self.toons.append(toon) if oldtoons.count(toon) == 0: self.__addToon(toon) else: self.notify.warning('setToons() - no toon: %d' % toonId) for toon in oldtoons: if self.toons.count(toon) == 0: self.__removeToon(toon) def setSuits(self, suitIds, reserveIds, values): oldsuits = self.suits self.suits = [] self.joiningReserves = [] for suitId in suitIds: if suitId in self.cr.doId2do: suit = self.cr.doId2do[suitId] self.suits.append(suit) suit.fsm.request('Battle') suit.buildingSuit = 1 suit.reparentTo(render) if oldsuits.count(suit) == 0: self.joinin
Xobb/fabric-bolt
src/fabric_bolt/projects/views.py
Python
mit
17,972
0.003561
""" Views for the Projects App """ import datetime import subprocess import sys from django.http import StreamingHttpResponse, HttpResponseRedirect from django.db.models.aggregates import Count from django.contrib import messages from django.views.generic import CreateView, UpdateView, DetailView, View, DeleteView, RedirectView from django.core.urlresolvers import reverse_lazy, reverse from django.shortcuts import get_object_or_404 from django.forms import CharField, PasswordInput, Select, FloatField, BooleanField from django.conf import settings from django_tables2 import RequestConfig from django_tables2.views import SingleTableView from fabric.main import find_fabfile, load_fabfile, _task_names from fabric_bolt.hosts.models import Host from fabric_bolt.projects import forms, tables, models # These options are passed to Fabric as: fab task --abort-on-prompts=True --user=root ... fabric_special_options = ['no_agent', 'forward-agent', 'config', 'disable-known-hosts', 'keepalive', 'password', 'parallel', 'no-pty', 'reject-unknown-hosts', 'skip-bad-hosts', 'timeout', 'command-timeout', 'user', 'warn-only', 'pool-size'] def get_fabric_tasks(request): """ Generate a list of fabric tasks that are available """ try: docstring, callables, default = load_fabfile(settings.FABFILE_PATH) all_tasks = _task_names(callables) dict_with_docs = {task: callables[task].__doc__ for task in all_tasks} except Exception as e: messages.error(request, 'Error loading fabfile: ' + str(e)) dict_with_docs = {} return dict_with_docs class BaseGetProjectCreateView(CreateView): """ Reusable class for create views that need the project pulled in """ def dispatch(self, request, *args, **kwargs): # Lets set the project so we can use it later project_id = kwargs.get('project_id') self.project = models
.Project.objects.get(pk=project_id) return super(BaseGetProjectCreateView, self).dispatch(request, *args, **kwargs) class ProjectList(SingleTableView): """ Project List page """ table_class = tables.ProjectTable model = models.Project queryset = models.Project.active_records.all() clas
s ProjectCreate(CreateView): """ Create a new project """ model = models.Project form_class = forms.ProjectCreateForm template_name_suffix = '_create' def form_valid(self, form): """After the form is valid lets let people know""" ret = super(ProjectCreate, self).form_valid(form) # Good to make note of that messages.add_message(self.request, messages.SUCCESS, 'Project %s created' % self.object.name) return ret class ProjectDetail(DetailView): """ Display the Project Detail/Summary page: Configurations, Stages, and Deployments """ model = models.Project def get_context_data(self, **kwargs): context = super(ProjectDetail, self).get_context_data(**kwargs) configuration_table = tables.ConfigurationTable(self.object.project_configurations(), prefix='config_') RequestConfig(self.request).configure(configuration_table) context['configurations'] = configuration_table stages = self.object.get_stages().annotate(deployment_count=Count('deployment')) context['stages'] = stages stage_table = tables.StageTable(stages, prefix='stage_') RequestConfig(self.request).configure(stage_table) context['stage_table'] = stage_table deployment_table = tables.DeploymentTable(models.Deployment.objects.filter(stage__in=stages).select_related('stage', 'task'), prefix='deploy_') RequestConfig(self.request).configure(deployment_table) context['deployment_table'] = deployment_table return context class ProjectUpdate(UpdateView): """ Update a project """ model = models.Project form_class = forms.ProjectUpdateForm template_name_suffix = '_update' success_url = reverse_lazy('projects_project_list') class ProjectDelete(DeleteView): """ Deletes a project by setting the Project's date_deleted. We save projects for historical tracking. """ model = models.Project def delete(self, request, *args, **kwargs): self.object = self.get_object() self.object.date_deleted = datetime.datetime.now() self.object.save() messages.add_message(request, messages.WARNING, 'Project {} Successfully Deleted'.format(self.object)) return HttpResponseRedirect(reverse('projects_project_list')) class ProjectConfigurationCreate(BaseGetProjectCreateView): """ Create a Project Configuration. These are used to set the Fabric env object for a task. """ model = models.Configuration template_name_suffix = '_create' form_class = forms.ConfigurationCreateForm def form_valid(self, form): """Set the project on this configuration after it's valid""" self.object = form.save(commit=False) self.object.project = self.project if self.kwargs.get('stage_id', None): current_stage = models.Stage.objects.get(pk=self.kwargs.get('stage_id')) self.object.stage = current_stage self.object.save() # Good to make note of that messages.add_message(self.request, messages.SUCCESS, 'Configuration %s created' % self.object.key) return super(ProjectConfigurationCreate, self).form_valid(form) def get_success_url(self): success_url = super(ProjectConfigurationCreate, self).get_success_url() if self.object.stage: success_url = reverse('projects_stage_view', args=(self.object.pk, self.object.stage.pk)) return success_url class ProjectConfigurationUpdate(UpdateView): """ Update a Project Configuration """ model = models.Configuration template_name_suffix = '_update' form_class = forms.ConfigurationUpdateForm class ProjectConfigurationDelete(DeleteView): """ Delete a project configuration from a project """ model = models.Configuration def dispatch(self, request, *args, **kwargs): return super(ProjectConfigurationDelete, self).dispatch(request, *args, **kwargs) def get_success_url(self): """Get the url depending on what type of configuration I deleted.""" if self.stage_id: url = reverse('projects_stage_view', args=(self.project_id, self.stage_id)) else: url = reverse('projects_project_view', args=(self.project_id,)) return url def delete(self, request, *args, **kwargs): obj = self.get_object() # Save where I was before I go and delete myself self.project_id = obj.project.pk self.stage_id = obj.stage.pk if obj.stage else None messages.success(self.request, 'Configuration {} Successfully Deleted'.format(self.get_object())) return super(ProjectConfigurationDelete, self).delete(self, request, *args, **kwargs) class DeploymentCreate(CreateView): """ Form to create a new Deployment for a Project Stage. POST will kick off the DeploymentOutputStream view. """ model = models.Deployment form_class = forms.DeploymentForm def dispatch(self, request, *args, **kwargs): #save the stage for later self.stage = get_object_or_404(models.Stage, pk=int(kwargs['pk'])) all_tasks = get_fabric_tasks(self.request) if self.kwargs['task_name'] not in all_tasks: messages.error(self.request, '"{}" is not a valid task.'. format(self.kwargs['task_name'])) return HttpResponseRedirect(reverse('projects_stage_view', kwargs={'project_id': self.stage.project_id, 'pk': self.stage.pk })) self.task_name = self.kwargs['task_name'] self.task_description = all_tasks.get(self.task_name, None) return super(DeploymentCreate, self).dispatch(request, *args, **kwargs) def get_form(self, form_class): stage_configurations = self.stage.get_queryset_configurations(prompt_me_f
coxmediagroup/django-shorturls
src/shorturls/urls.py
Python
bsd-3-clause
255
0.039216
from django.conf import s
ettings from django.conf.urls.defaults import * urlpatterns = patterns('', url( regex = '^(?P<prefix>%s)(?P<tiny>\w+)$' % '|'.join(settings.SHORTEN_MODELS.
keys()), view = 'shorturls.views.redirect', ), )
the-blue-alliance/the-blue-alliance
src/backend/common/helpers/rankings_helper.py
Python
mit
1,985
0.001008
from typing import List, Optional from backend.common.consts.ranking_sort_orders import SORT_ORDER_INFO from backend.common.models.event_details import EventDetails from backend.common.models.event_ranking import EventRanking from backend.common.models.event_team_status import WLTRecord from backend.common.models.keys import TeamKey, Year from backend.common.models.ranking_sort_order_info import RankingSortOrderInfo class RankingsHelper: NO_RECORD_YEARS = {2010, 2015, 2021} QUAL_AVERAGE_YEARS = {2015} @classmethod def build_ranking( cls, year: Year, rank: int, team_key: TeamKey, wins: int, losses: int, ties: int, qual_average: Optional[float], matches_played: int, dq: int, sort_orders: List[float], ) -> EventRanking: record: Optional[WLTRecord] = None if year not in cls.NO_RECORD_YEARS: record = { "wins": int(wins), "losses": int(losses), "ties": int(ties),
} if year not in cls.QUAL_AVERAGE_YEARS: qual_average = None sort_orders_sanitized = [] for so in sort_orders: try: sort_orders_sanitized.append(float(so)) except Exception: sort_orders_sanitized.append(0.0) return { "rank": int(rank),
"team_key": team_key, "record": record, # None if record doesn't affect rank (e.g. 2010, 2015) "qual_average": qual_average, # None if qual_average doesn't affect rank (all years except 2015) "matches_played": int(matches_played), "dq": int(dq), "sort_orders": sort_orders_sanitized, } @classmethod def get_sort_order_info( cls, event_details: EventDetails ) -> Optional[List[RankingSortOrderInfo]]: return SORT_ORDER_INFO.get(event_details.game_year)
d53dave/cgopt
csaopt/instancemanager/awstools.py
Python
mit
13,839
0.004119
import boto3 import logging import time from string import Template from pyhocon import ConfigTree from botocore.exceptions import ClientError from typing import List, Any, Tuple, Dict from . import Instance from .instancemanager import InstanceManager from ..utils import random_str, random_int log = logging.getLogger() def _
interpolate_userscript_template_vals(script: bytes, **kwargs: str) -> bytes: return Template(script.decode('utf-8')).substitute(kwargs).encode() def _has_exit_status(instance) -> bool: instance.reload() return instance.state['Name'] == 'shutting-down' or instance.state['Name'] == 'terminated' class AWSTools(InstanceManager): """The AWSTools class provides an abstraction over boto3 and EC2 for the use with CSAOpt This is a context manager and crea
tes required instances on `__enter__()`, disposing of the managed instances in `__exit__()`. These two methods as well as :meth:`instancemanager.awstools.AWSTools.get_running_instances` are the only methods called by the Runner (i.e. the only public methods). This class will use boto3 to (1) create a security group, (2) configure ingress to the broker backend (currently Redis, as used by Dramatiq). It then (3) creates as many worker instances as requested and runs 'user-data' scripts after startup, which is to say, bash scripts that set up and the required software (Redis, CSAOpt Worker, etc.). After the run AWSTools (4) terminates all managed instances and removes the security group. Note: If the AWS credentials are not provided in the config file, boto3 will look into the following environment variables: `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` How to create IAM credentials (i.e. AWS keys): * Create (or reuse) IAM user with programmatic access * Assign to a (potentially new) group with AmazonEC2FullAccess * Store the access key and secret key Args: config: Configuration for current optimization run internal_conf: Internal CSAOpt configuration """ def __init__(self, config: ConfigTree, internal_conf: ConfigTree) -> None: self.region = config.get('remote.aws.region', internal_conf['remote.aws.default_region']) if config.get('remote.aws.secret_key', False) and config.get('remote.aws.access_key', False): self.ec2_resource: boto3.session.Session.resource = boto3.resource( 'ec2', aws_access_key_id=config['remote.aws.access_key'], aws_secret_access_key=config['remote.aws.secret_key'], region_name=self.region) else: # This will look for the env variables self.ec2_resource: boto3.session.Session.resource = boto3.resource('ec2', region_name=self.region) self.ec2_client = self.ec2_resource.meta.client # ec2.Instance is of <class 'boto3.resources.factory.ec2.Instance'> but this cannot be # used as a type hint here because it is generated by the factory at runtime, I assume. self.workers: List[Any] = [] self.broker: Any = None self.security_group_prefix: str = internal_conf.get('remote.aws.security_group_prefix', 'csaopt_') self.security_group_id: str = '' self.worker_count: int = config['remote.aws.worker_count'] worker_ami_key = 'remote.aws.worker_ami' broker_ami_key = 'remote.aws.broker_ami' self.broker_ami = config.get(broker_ami_key, internal_conf[broker_ami_key]) self.worker_ami = config.get(worker_ami_key, internal_conf[worker_ami_key]) self.timeout_provision = config.get('remote.aws.timeout_provision', internal_conf['remote.aws.timeout_provision']) self.timeout_startup = config.get('remote.aws.timeout_startup', internal_conf['remote.aws.timeout_startup']) self.broker_port = internal_conf.get('broker.defaults.remote_port') self.broker_password = config.get('remote.aws.instances.broker_password', None) if self.broker_password is None: self.broker_password = random_str(32) self.debug_on_cpu = config.get('debug.gpu_simulator', '') self.terminate_on_exit = config.get('remote.terminate_on_exit', False) self.use_existing_instances = False existing_instances = config.get('remote.aws.instances', None) if existing_instances is not None: self.use_existing_instances = True self.existing_instances = existing_instances self.provision_args: Dict[str, str] = { 'broker_image': config.get('remote.aws.broker_ami', internal_conf['remote.aws.broker_ami']), 'worker_image': config.get('remote.aws.worker_ami', internal_conf['remote.aws.worker_ami']), 'broker_instance_type': config.get('remote.aws.broker_instance_type', internal_conf['remote.aws.broker_instance_type']), 'worker_instance_type': config.get('remote.aws.worker_instance_type', internal_conf['remote.aws.worker_instance_type']) } data_base = internal_conf['remote.aws.userdata_rel_path'] with open(data_base + '-broker.sh', 'rb') as broker_data, open(data_base + '-worker.sh', 'rb') as worker_data: self.user_data_scripts: Dict[str, bytes] = {'broker': broker_data.read(), 'worker': worker_data.read()} def _get_from_ids(self, broker_id: str, worker_ids: List[str]) -> Tuple[Any, Any]: broker = self.ec2_resource.Instance(broker_id) workers = map(lambda worker_id: self.ec2_resource.Instance(worker_id), worker_ids) return broker, list(workers) def _provision_instances(self, timeout_ms: int, count: int = 2, **kwargs: str) -> Tuple[Any, Any]: """Start and configure instances Args: timeout_ms: General timeout for the provisioning of requested instances count: number of worker instances to be created kwargs: Any other parameters that are required for startup """ broker_userdata = _interpolate_userscript_template_vals( self.user_data_scripts['broker'], external_port=self.broker_port, redis_password=self.broker_password) broker = self.ec2_resource.create_instances( ImageId=kwargs['broker_image'], MinCount=1, MaxCount=1, UserData=broker_userdata, SecurityGroupIds=[self.security_group_id], InstanceType=kwargs['broker_instance_type'])[0] worker_userdata = _interpolate_userscript_template_vals( self.user_data_scripts['worker'], debug='1' if self.debug_on_cpu else 'off', redis_host=broker.private_ip_address, redis_port=self.broker_port, redis_password=self.broker_password) workers = self.ec2_resource.create_instances( ImageId=kwargs['worker_image'], MinCount=count, MaxCount=count, InstanceType=kwargs['worker_instance_type'], UserData=worker_userdata, SecurityGroupIds=[self.security_group_id]) return broker, workers def __map_ec2_instance(self, instance: Any, is_broker: bool = False, **kwargs: Any) -> Instance: """Maps a boto/EC2 instance to the internal Instance type Args: instance: Instance object returned by boto3 (which has a runtime type and therefore untyped here) is_broker: Flag indicating whether a given instance is a broker or not kwargs: Any other parameters that should be available on the produced object Returns: An abstract instance object """ return Instance(instance.id, instance.public_ip_address, is_broker=is_broker, **kwargs) def get_running_instances(self) -> Tuple[Instance, List[Instance]]: """Update and get currently managed instances Returns: A tuple of broker, [worker] """ self.broker.reload() for worker in self.workers: worker.reload() broker_ins
SUSE/azure-sdk-for-python
azure-servicefabric/azure/servicefabric/version.py
Python
mit
494
0
# coding=utf-8 # --------------------------------------
------------------------------------ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is
# regenerated. # -------------------------------------------------------------------------- VERSION = "5.6.130"
rdmilligan/SaltwashAR
scripts/robot.py
Python
gpl-3.0
3,499
0.003715
# Copyright (C) 2015 Ross D Milligan # GNU GENERAL PUBLIC LICENSE Version 3 (full notice can be found at https://github.com/rdmilligan/SaltwashAR) from rockyrobotframes import * from sportyrobotframes import * from constants import * class Robot: def __init__(self): self.body_frame = None self.head_passive_frames = None self.head_speaking_frames = None self.head_happy_frames = None self.head_sad_frames = None self.head_angry_frames = None self.degrees_90_frame = None self.degrees_180_frame = None self.degrees_270_frame = None self.head_frame_index = 0 self.is_rendered = False self.is_facing = False # reset robot def reset(self): self.is_rendered = False self.is_facing = False # get next frame def next_frame(self, rotation, is_speaking, emotion): self.is_rendered = True # handle any rotation if rotation != 0: self.is_facing = False if rotation == 1: glCallList(self.degrees_90_frame) elif rotation == 2: glCallList(self.degrees_180_frame) elif rotation == 3: glCallList(self.degrees_270_frame) return # otherwise handle facing robot self.is_facing = True glCallList(self.body_frame) if is_speaking: self._render_head(self.head_speaking_frames) elif emotion == HAPPY: self._render_head(self.head_happy_frames) elif emotion == SAD: self._render_head(self.head_sad_frames) elif emotion == ANGRY: self._render_head(
self.head_angry_frames) else: self._render_head(self.head_passive_frames) # render the robot's head def _render_head(self, frames): self.head_frame_index += 1 if self.head_frame_index >= len(frames): sel
f.head_frame_index = 0 glCallList(frames[self.head_frame_index]) class RockyRobot(Robot): # load frames def load_frames(self, is_animated): self.body_frame = rocky_robot_body_frame() self.head_passive_frames = rocky_robot_head_passive_frames(is_animated) self.head_speaking_frames = rocky_robot_head_speaking_frames(is_animated) self.head_happy_frames = rocky_robot_head_happy_frames(is_animated) self.head_sad_frames = rocky_robot_head_sad_frames(is_animated) self.head_angry_frames = rocky_robot_head_angry_frames(is_animated) self.degrees_90_frame = rocky_robot_degrees_90_frame() self.degrees_180_frame = rocky_robot_degrees_180_frame() self.degrees_270_frame = rocky_robot_degrees_270_frame() class SportyRobot(Robot): # load frames def load_frames(self, is_animated): self.body_frame = sporty_robot_body_frame() self.head_passive_frames = sporty_robot_head_passive_frames(is_animated) self.head_speaking_frames = sporty_robot_head_speaking_frames(is_animated) self.head_happy_frames = sporty_robot_head_happy_frames(is_animated) self.head_sad_frames = sporty_robot_head_sad_frames(is_animated) self.head_angry_frames = sporty_robot_head_angry_frames(is_animated) self.degrees_90_frame = sporty_robot_degrees_90_frame() self.degrees_180_frame = sporty_robot_degrees_180_frame() self.degrees_270_frame = sporty_robot_degrees_270_frame()
arsenovic/clifford
clifford/g3c.py
Python
bsd-3-clause
240
0
from . import Cl, conformalize layout_orig, blades_orig = Cl(3) layout, bla
des, stuff = conformalize(layout_orig) locals().update(blades) locals().update(stuff) # for shorter reprs layout.__name__ = 'layout' layout.__module__ =
__name__
ahtn/keyplus
host-software/keyplus/constants/settings.py
Python
mit
3,115
0.004815
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2018 [email protected] # Licensed under the MIT license (http://opensource.org/licenses/MIT) from keyplus.utility import inverse_map AES_KEY_LEN = 16 EP_VENDOR_SIZE = 64 VENDOR_REPORT_LEN = 64 FLASH_WRITE_PACKET_LEN = EP_VENDOR_SIZE - 5 SETTINGS_RF_INFO_SIZE = 64 SETTINGS_RF_INFO_HEADER_SIZE = (SETTINGS_RF_INFO_SIZE - AES_KEY_LEN*2) SETTINGS_SIZE = 512 LAYOUT_HEADER_SIZE = 1 MAX_NUMBER_KEYBOARDS = 64 MAX_NUMBER_LAYOUTS = MAX_NUMBER_KEYBOARDS MAX_NUMBER_DEVICES = 64 MAX_MATRIX_SIZE = 32 LAYOUT_ID_NONE = 0xfe LAYOUT_ID_INVALID = 0xff # Report KEYBOARD_REPORT_MODE_AUTO = 0 # 6kro -> nkro if more than 6 keys pressed KEYBOARD_REPORT_MODE_NKRO = 1 # nkro KEYBOARD_REPORT_MODE_6KRO = 2 # 6kro REPORT_MODE_STR_MAP = { KEYBOARD_REPORT_MODE_AUTO: "Auto NKRO", KEYBOARD_REPORT_MODE_6KRO: "6KRO", KEYBOARD_REPORT_MODE_NKRO: "NKRO", } def report_mode_to_str(mode): if mode in REPORT_MODE_STR_MAP: return REPORT_MODE_STR_MAP[mode] else: return "Unknown({})".format(mode) # FEATURE_CTRL bit mask values FEATURE_CTRL_USB_DISABLE = (1 << 0) FEATURE_CTRL_WIRED_DISABLE = (1 << 1) FEATURE_CTRL_RF_DISABLE = (1 << 2) FEATURE_CTRL_RF_MOUSE_DISABLE = (1 << 3) FEATURE_CTRL_BT_DISABLE = (1 << 4) FEATURE_CTRL_RESERVED_0 = (1 << 5) FEATURE_CTRL_RESERVED_1 = (1 << 6) FEATURE_CTRL_RESERVED_2 = (1 << 7) ############################################################################### # firmware info constants # #############################################################################
## SUPPORT_SCANNING_MASK = 0x01 SUPPORT_SCANNING_COL_ROW_MASK = 0x02 SUPPORT_SCANNING_ROW_COL_MASK = 0x04 SUPPORT_SCANNING_PINS_MASK = 0x08 SUPPORT_SCANNING_ARBITRARY_MASK = 0x10 SUPPORT_SCANNING_BUILT_IN_MASK = 0x20 SUPPORT_KEY_MEDIA = 0x01 SUPPORT_KEY_MOUSE = 0x02 SUPPORT_KEY_LAYERS = 0x04 SUPPORT_KEY_STICKY = 0x08 SUPPORT_KEY_TAP = 0x10 SUPPORT_KEY_HOLD = 0x20 SUPPORT_KRO_N = 0x01; SUPPO
RT_KRO_6 = 0x02; SUPPORT_LED_INDICATORS = 0x01 SUPPORT_LED_BACKLIGHTING = 0x02 SUPPORT_LED_WS2812 = 0x04 SUPPORT_NRF24 = 0x01 SUPPORT_I2C = 0x02 SUPPORT_UNIFYING = 0x04 SUPPORT_USB = 0x08 SUPPORT_BT = 0x10 VERSION_IS_STABLE = 0x01 VERSION_RESERVED_1 = 0x02 VERSION_RESERVED_2 = 0x04 VERSION_RESERVED_3 = 0x08 SUPPORT_MOUSE = 0x01 SUPPORT_MOUSE_GESTURE = 0x02 MATRIX_SCANNER_INTERNAL_NONE = 0x00 MATRIX_SCANNER_INTERNAL_FAST_ROW_COL = 0x01 MATRIX_SCANNER_INTERNAL_BASIC_SCAN = 0x02 MATRIX_SCANNER_INTERNAL_HARD_CODED = 0x03 MATRIX_SCANNER_INTERNAL_VIRTUAL = 0x04 MATRIX_SCANNER_INTERNAL_CUSTOM = 0xff INTERNAL_SCAN_METHOD_NAME_TABLE = { "none": MATRIX_SCANNER_INTERNAL_NONE, "fast_row_col": MATRIX_SCANNER_INTERNAL_FAST_ROW_COL, "basic_scan": MATRIX_SCANNER_INTERNAL_BASIC_SCAN, "hard_coded": MATRIX_SCANNER_INTERNAL_HARD_CODED, "virtual": MATRIX_SCANNER_INTERNAL_VIRTUAL, "custom": MATRIX_SCANNER_INTERNAL_CUSTOM, } INTERNAL_SCAN_METHOD_TABLE = inverse_map(INTERNAL_SCAN_METHOD_NAME_TABLE) VIRTUAL_MAP_TABLE_SIZE = 0x300
mozilla/captain
captain/wsgi.py
Python
mpl-2.0
1,653
0.00121
""" WSGI config for captain project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one
. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os import site # We defer to a DJANGO_SETTINGS_MOD
ULE already in the environment. This breaks # if running multiple sites in the same mod_wsgi process. To fix this, use # mod_wsgi daemon mode with each site in its own daemon process, or use # os.environ["DJANGO_SETTINGS_MODULE"] = "captain.settings" os.environ.setdefault("DJANGO_SETTINGS_MODULE", "captain.settings") # Add the app dir to the python path so we can import manage. wsgidir = os.path.dirname(__file__) site.addsitedir(os.path.abspath(os.path.join(wsgidir, '../'))) # Manage adds /vendor to the Python path. import manage # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application)
ANR-kamoulox/Telemeta
telemeta/models/corpus.py
Python
agpl-3.0
2,752
0.002544
# -*- coding: utf-8 -*- # Copyright (C) 2010 Samalyse SARL # Copyright (C) 2010-2014 Parisson SARL # This file is part of Telemeta. # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Authors: Olivier Guilyardi <[email protected]> # David LIPSZYC <[email protected]> # Guillaume Pellerin <[email protected]> from __future__ import division from django.utils.translation import ugettext_lazy as _ from telemeta.models.core import * from telemeta.models.resource import *
from telemeta.models.collection import * class MediaCorpus(MediaBaseResource): "Describe a corpus" element_type = 'corpus' children_type = 'collections
' children = models.ManyToManyField(MediaCollection, related_name="corpus", verbose_name=_('collections'), blank=True) recorded_from_year = IntegerField(_('recording year (from)'), help_text=_('YYYY')) recorded_to_year = IntegerField(_('recording year (until)'), help_text=_('YYYY')) objects = MediaCorpusManager() permissions = (("can_download_corpus_epub", "Can download corpus EPUB"),) @property def public_id(self): return self.code @property def has_mediafile(self): for child in self.children.all(): if child.has_mediafile: return True return False def computed_duration(self): duration = Duration() for child in self.children.all(): duration += child.computed_duration() return duration computed_duration.verbose_name = _('total available duration') class Meta(MetaCore): db_table = 'media_corpus' verbose_name = _('corpus') verbose_name_plural = _('corpus') ordering = ['code'] class MediaCorpusRelated(MediaRelated): "Corpus related media" resource = ForeignKey(MediaCorpus, related_name="related", verbose_name=_('corpus')) class Meta(MetaCore): db_table = 'media_corpus_related' verbose_name = _('corpus related media') verbose_name_plural = _('corpus related media')
mhotwagner/backstage
foti/serializers.py
Python
mit
343
0
from r
est_framework import serializers from .models import Foto class FotoSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Foto fields = ( 'id', 'name', 'location', 'date', 'image', 'created', 'updated'
)
krafczyk/spack
var/spack/repos/builtin/packages/leveldb/package.py
Python
lgpl-2.1
2,768
0.000723
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of
the GNU Lesse
r General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import glob from spack import * class Leveldb(MakefilePackage): """LevelDB is a fast key-value storage library written at Google that provides an ordered mapping from string keys to string values.""" homepage = "https://github.com/google/leveldb" url = "https://github.com/google/leveldb/archive/v1.20.tar.gz" version('1.20', '298b5bddf12c675d6345784261302252') version('1.18', '73770de34a2a5ab34498d2e05b2b7fa0') depends_on("snappy") def install(self, spec, prefix): mkdirp(prefix.lib.pkgconfig) libraries = glob.glob('out-shared/libleveldb.*') libraries += glob.glob('out-static/libleveldb.*') for library in libraries: install(library, prefix.lib) install_tree('include/leveldb', prefix.include) with open(join_path(prefix.lib, 'pkgconfig', 'leveldb.pc'), 'w') as f: f.write('prefix={0}\n'.format(prefix)) f.write('exec_prefix=${prefix}\n') f.write('libdir={0}\n'.format(prefix.lib)) f.write('includedir={0}\n'.format(prefix.include)) f.write('\n') f.write('Name: leveldb\n') f.write('Description: LevelDB is a fast key-value storage library' ' written at Google that provides an ordered mapping from' ' string keys to string values.\n') f.write('Version: {0}\n'.format(spec.version)) f.write('Cflags: -I${includedir}\n') f.write('Libs: -L${libdir} -lleveldb\n')
1ukash/horizon
horizon/dashboards/project/networks/forms.py
Python
apache-2.0
2,056
0.000486
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Copyright 2012 NEC Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy
of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under
the License. import logging from django.core.urlresolvers import reverse from django.utils.translation import ugettext_lazy as _ from horizon import api from horizon import exceptions from horizon import forms from horizon import messages LOG = logging.getLogger(__name__) class UpdateNetwork(forms.SelfHandlingForm): name = forms.CharField(label=_("Name"), required=False) tenant_id = forms.CharField(widget=forms.HiddenInput) network_id = forms.CharField(label=_("ID"), widget=forms.TextInput( attrs={'readonly': 'readonly'})) failure_url = 'horizon:project:networks:index' def handle(self, request, data): try: network = api.quantum.network_modify(request, data['network_id'], name=data['name']) msg = _('Network %s was successfully updated.') % data['name'] LOG.debug(msg) messages.success(request, msg) return network except: msg = _('Failed to update network %s') % data['name'] LOG.info(msg) redirect = reverse(self.failure_url) exceptions.handle(request, msg, redirect=redirect)
microsoft/task_oriented_dialogue_as_dataflow_synthesis
tests/test_dataflow/multiwoz/test_create_programs.py
Python
mit
10,600
0.002075
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. import json import os from typing import Any, Dict, Iterator, List from dataflow.core.utterance_tokenizer import UtteranceTokenizer from dataflow.multiwoz.create_programs import create_programs_for_trade_dialogue from dataflow.multiwoz.salience_model import DummySalienceModel, VanillaSalienceModel def load_test_trade_dialogues(data_dir: str) -> Iterator[Dict[str, Any]]: """Returns selected test dialogues. To extract a dialogue from the TRADE processed json file: $ jq '.[] | select (.dialogue_idx == "MUL1626.json")' dev_dials.json """ multiwoz_2_1_dir = os.path.join(data_dir, "multiwoz_2_1") for dialogue_id in [ "MUL1626.json", "PMUL3166.json", "MUL2258.json", "MUL2199.json", "MUL2096.json", "PMUL3470.json", "PMUL4478.json", ]: trade_dialogue_file = os.path.join(multiwoz_2_1_dir, dialogue_id) trade_dialogue = json.load(open(trade_dialogue_file)) yield trade_dialogue def test_create_programs_with_dummy_salience_model(data_dir: str): """Tests creating programs with a dummy salience model.""" utterance_tokenizer = UtteranceTokenizer() salience_model = DummySalienceModel() expected_num_refer_calls = { "MUL1626.json": 0, "PMUL3166.json": 0, "MUL2258.json": 0, "MUL2199.json": 0, "MUL2096.json": 0, "PMUL3470.json": 0, "PMUL4478.json": 0, } for trade_dialogue in load_test_trade_dialogues(data_dir): dataflow_dialogue, num_refer_calls, _ = create_programs_for_trade_dialogue( trade_dialogue=trade_dialogue, keep_all_domains=True, remove_none=False, fill_none=False, salience_model=salience_model, no_revise=False, avoid_empty_plan=False, utterance_tokenizer=utterance_tokenizer, ) dialogue_id = dataflow_dialogue.dialogue_id assert ( num_refer_calls == expected_num_refer_calls[dialogue_id] ), "{} failed".format(dialogue_id) def test_create_programs_without_revise(data_dir: str): """Tests creating programs without revise calls. It should not use refer calls even with a valid salience model. """ utterance_tokenizer = UtteranceTokenizer() salience_model = VanillaSalienceModel() for trade_dialogue in load_test_trade_dialogues(data_dir): for avoid_empty_plan in [True, False]: _, num_refer_calls, _ = create_programs_for_trade_dialogue( trade_dialogue=trade_dialogue, keep_all_domains=True, remove_none=False, fill_none=False, salience_model=salience_model, no_revise=True, avoid_empty_plan=avoid_empty_plan, utterance_tokenizer=utterance_tokenizer, ) assert num_refer_calls == 0 def test_create_programs_with_vanilla_salience_model(data_dir: str): """Tests creating programs with a vanilla salience model. """ utterance_tokenizer = UtteranceTokenizer() salience_model = VanillaSalienceModel() expected_num_refer_calls = { "MUL1626.json": 1, "PMUL3166.json": 0, "MUL2258.json": 1, "MUL2199.json": 1, "MUL2096.json": 0, "PMUL3470.json": 0, "PMUL4478.json": 0, } for trade_dialogue in load_test_trade_dialogues(data_dir): dataflow_dialogue, num_refer_calls, _ = create_programs_for_trade_dialogue( trade_dialogue=trade_dialogue, keep_all_domains=True, remove_none=False, fill_none=False, salience_model=salience_model, no_revise=False, avoid_empty_plan=False, utterance_tokenizer=utterance_tokenizer, ) dialogue_id = dataflow_dialogue.dialogue_id assert ( num_refer_calls == expected_num_refer_calls[dialogue_id] ), "{} failed".format(dialogue_id) def test_create_programs_with_revise(trade_dialogue_1: Dict[str, Any]): utterance_tokenizer = UtteranceTokenizer() salience_model = VanillaSalienceModel() expected_plans: List[str] = [ # turn 1 """(find (Constraint[Hotel] :name (?= "none") :type (?= "none")))""", # turn 2 """(ReviseConstraint :new (Constraint[Hotel] :name (?= "hilton") :pricerange (?= "cheap") :type (?= "guest house")) :oldLocation (Constraint[Constraint[Hotel]]) :rootLocation (roleConstraint #(Path "output")))""", # turn 3 """(ReviseConstraint :new (Constraint[Hotel] :name (?= "none")) :oldLocation (Constraint[Constraint[Hotel]]) :rootLocation (roleConstraint #(Path "output")))""", # turn 4 """(abandon (Constraint[Hotel]))""", # turn 5 """(find (Constraint[Hotel] :area (?= "west")))""", # turn 6 """(find (Constraint[Restaurant] :area (refer (Constraint[Area]))))""", # turn 7 """(ReviseConstraint :new (Constraint[Restaurant] :pricerange (refer (Constraint[Pricerange]))) :oldLocation (Constraint[Constraint[Restaurant]]) :rootLocation (roleConstraint #(Path "output")))""", # turn 8 "()", # turn 9 """(find (Constraint[Taxi] :departure (?= "none")))""", # turn 10 "()", ] dataflow_dialogue, _, _ = create_programs_for_trade_dialogue( trade_dialogue=trade_dialogue_1, keep_all_domains=True, remove_none=False, fill_none=False, salience_model=salience_model, no_revise=False, avoid_empty_plan=False, utterance_tokenizer=utterance_tokenizer, ) for turn, expected_lispress in zip(dataflow_dialogue.turns, expected_plans): lispress = turn.lispress assert lispress == expected_lispress def test_create_programs_with_revise_with_fill_none(trade_dialogue_1:
Dict[
str, Any]): utterance_tokenizer = UtteranceTokenizer() salience_model = VanillaSalienceModel() expected_plans: List[str] = [ # turn 1 """(find (Constraint[Hotel] :area (?= "none") :book-day (?= "none") :book-people (?= "none") :book-stay (?= "none") :internet (?= "none") :name (?= "none") :parking (?= "none") :pricerange (?= "none") :stars (?= "none") :type (?= "none")))""", # turn 2 """(ReviseConstraint :new (Constraint[Hotel] :name (?= "hilton") :pricerange (?= "cheap") :type (?= "guest house")) :oldLocation (Constraint[Constraint[Hotel]]) :rootLocation (roleConstraint #(Path "output")))""", # turn 3 """(ReviseConstraint :new (Constraint[Hotel] :name (?= "none")) :oldLocation (Constraint[Constraint[Hotel]]) :rootLocation (roleConstraint #(Path "output")))""", # turn 4 """(abandon (Constraint[Hotel]))""", # turn 5 """(find (Constraint[Hotel] :area (?= "west") :book-day (?= "none") :book-people (?= "none") :book-stay (?= "none") :internet (?= "none") :name (?= "none") :parking (?= "none") :pricerange (?= "none") :stars (?= "none") :type (?= "none")))""", # turn 6 """(find (Constraint[Restaurant] :area (refer (Constraint[Area])) :book-day (?= "none") :book-people (?= "none") :book-time (?= "none") :food (?= "none") :name (?= "none") :pricerange (?= "none")))""", # turn 7 """(ReviseConstraint :new (Constraint[Restaurant] :pricerange (refer (Constraint[Pricerange]))) :oldLocation (Constraint[Constraint[Restaurant]]) :rootLocation (roleConstraint #(Path "output")))""", # turn 8 "()", # turn 9 """(find (Constraint[Taxi] :arriveby (?= "none") :departure (?= "none") :destination (?= "none") :leaveat (?= "none")))""", # turn 10 "()", ] dataflow_dialogue, _, _ = create_programs_for_trade_dialogue( trade_dialogue=trade_dialogue_1, keep_all_domains=True, remove_none=False, fill_none=True, salience_model=salience_model, no_revise=False, avoid_empty_plan=False, utterance_tokenizer=utterance_tok
RoyalTS/econ-python-environment
waf.py
Python
bsd-3-clause
1,785
0
#!/usr/bin/env python # Thomas Nagy, 2005-2013 # Modifications by Hans-Martin von Gaudecker for econ-project-templates """ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXE
MPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import os import sys VERSION = "1.7.16" cwd = os.getcwd() join = os.path.join name = sys.argv[0] base = os.path.dirname(os.path.abspath(name)) wafdir = join(base, '.mywaflib') sys.path.insert(0, wafdir) if __name__ == '__main__': from waflib import Scripting Scripting.waf_entry_point(cwd, VERSION, wafdir)
jwesstrom/cleverMirror
main.py
Python
gpl-3.0
1,864
0.004299
# -*- coding: utf-8 -*- import datetime from kivy.app import App from kivy.uix.widget import Widget import random from kivy.clock import Clock from kivy.properties import StringProperty, NumericProperty from webScrape import webScraper class MirrorWindow(Widget): dayPrint = ['Sön', 'Mån', 'Tis', 'Ons', 'Tors', 'Fre', 'Lör'] secondsAnim = NumericProperty(0) minute =
NumericProperty(0) time = StringProperty('') day = StringProperty('') date = StringProperty('') weather1 = StringProperty('') weather2 = Strin
gProperty('') weather3 = StringProperty('') seconds = StringProperty('') def update(self, dt): self.time = datetime.datetime.today().strftime("%H:%M") self.day = self.dayPrint[int(datetime.date.today().strftime('%w'))] self.date = datetime.date.today().strftime('%y%m%d') #self.seconds = str (( int (datetime.datetime.today().strftime('%f')) / 1000 ) ) #self.seconds = ( int (datetime.datetime.today().strftime('%f')) / 1000 ) self.seconds = str(datetime.datetime.today().strftime('%S')) # self.weather1 = (' ').join(webScraper().weather()[0][:3]) # self.weather2 = (' ').join(webScraper().weather()[1][:3]) # self.weather3 = (' ').join(webScraper().weather()[2][:3]) #60 000 000 if self.secondsAnim < 360: self.secondsAnim = self.secondsAnim + 6 else: self.secondsAnim = 0 #self.minute = int (datetime.datetime.today().strftime('%S') ) if self.minute < 360: self.minute = self.minute + 0.1 else: self.minute = 0.1 class MirrorApp(App): def build(self): mirrorWindow = MirrorWindow() Clock.schedule_interval(mirrorWindow.update, 0.01) return mirrorWindow if __name__ == '__main__': MirrorApp().run()
fp7-ofelia/pypelib
src/pypelib/persistence/backends/django/RuleTableModel.py
Python
lgpl-3.0
1,069
0.034612
import os import sys import time from django.db import models ''' @author: lbergesio,omoya,CarolinaFernandez @organization: i2CAT, OFELIA FP7 Django RuleTable Model class ''' #Django is required to run this model class PolicyRuleTableModel(models.Model): class Meta: """RuleTable model class""" app_label = 'pypelib' db_table = 'pypelib_RuleTableModel' type = models.CharField(max_length = 16, default="") #terminal/non terminal uuid = models.CharField(max_length = 512, default="") # uuid name = models.TextField(max_length = 120, default="") # name # FIXME: set 'name' to 'unique', but that seems only possible with 'CharField' #name = models.CharField(max_length = 120, default="", unique=True) # name defaultParser = models
.CharField(max_length = 64, default="", blank =True, null =True) defaultPersistence = models.CharField(max_length = 64, default="", blank =True,
null =True) defaultPersistenceFlag = models.BooleanField()
pinterest/teletraan
deploy-agent/tests/unit/deploy/utils/test_exec.py
Python
apache-2.0
5,911
0.001184
# Copyright 2016 Pinterest, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import time import signal import tempfile import unittest import mock import tests from deployd.common.executor import Executor from deployd.common.types import AgentStatus, PingStatus class TestUtilsFunctions(tests.TestCase): @classmethod def setUpClass(cls): cls.fdout, cls.fdout_fn = tempfile.mkstemp() cls.pingServer = mock.Mock() cls.pingServer.__call__ = mock.Mock(return_value=False) cls.executor = Executor(callback=cls.pingServer) cls.executor.LOG_FILENAME = cls.fdout_fn @classmethod def tearDownClass(cls): os.close(cls.fdout) os.remove(cls.fdout_fn) def test_run_bad_script(self): fdout_fn = tempfile.mkstemp()[1] with open(fdout_fn, 'w') as f: f.write('echo hello') os.chmod(fdout_fn, 0o755) ping_server = mock.Mock(return_value=True) executor = Executor(callback=ping_server) executor.LOG_FILENAME = self.fdout_fn executor.MAX_RUNNING_TIME = 4 executor.MIN_RUNNING_TIME = 2 executor.DEFAULT_TAIL_LINES = 1 executor.PROCESS_POLL_INTERVAL = 2 executor.MAX_RETRY = 3 deploy_report = executor.run_cmd(cmd=fdout_fn) self.assertTrue(ping_server.called) self.assertEqual(deploy_report.status_code, AgentStatus.ABORTED_BY_SERVER) os.remove(fdout_fn) def test_run_command(self): cmd = ['echo', 'hello world'] self.executor.MAX_RUNNING_TIME = 5 self.executor.MAX_RETRY = 3 self.executor.PROCESS_POLL_INTERVAL = 2 self.executor.MIN_RUNNING_TIME = 2 self.executor.BACK_OFF = 2 self.executor.MAX_SLEEP_INTERVAL = 60 self.executor.MAX_TAIL_BYTES = 10240 self.executor.LOG_FILENAME = self.fdout_fn deploy_report = self.executor.run_cmd(cmd=cmd) self.assertEqual(deploy_report.status_code, AgentStatus.SUCCEEDED) def test_run_command_with_big_output(self): cmd = ['seq', '1000000'] self.executor.MIN_RUNNING_TIME = 2 deploy_report = self.executor.run_cmd(cmd=cmd) self.assertEqual(deploy_report.status_code, AgentStatus.SUCCEEDED) self.assertIsNotNone(deploy_report.output_msg) def test_run_command_with_max_retry(self): cmd = ['ls', '-ltr', '/abc'] ping_server = mock.Mock(return_value=False) executor = Executor(callback=ping_server) executor.LOG_FILENAME = self.fdout_fn executor.MAX_RUNNING_TIME = 5 executor.MIN_RUNNING_TIME = 2 executor.MAX_RETRY = 3 executor.DEFAULT_TAIL_LINES = 1 executor.PROCESS_POLL_INTERVAL = 2 executor.BACK_OFF = 2 executor.MAX_SLEEP_INTERVAL = 60 executor.MAX_TAIL_BYTES = 10240 deploy_report = executor.run_cmd(cmd=cmd) self.assertEqual(deploy_report.status_code, AgentStatus.TOO_MANY_RETRY) # in ubuntu: error message is 'ls: cannot access /abc: No such file or directory' # in mac osx: error message is 'ls: /abc: No such file or directory' self.assertEqual(deploy_report.retry_times, 3) def test_run_command_with_timeout(self): cmd = ['ls', '-ltr', '/abc'] ping_server = mock.Mock(return_value=True) executor = Executor(callback=ping_server) executor.LOG_FILENAME = self.fdout_fn executor.MAX_RUNNING_TIME = 4 executor.MIN_RUNNING_TIME = 2 executor.DEFAULT_TAIL_LINES = 1 executor.MAX_RETRY = 3 executor.PROCESS_POLL_INTERVAL = 2 executor.MAX_TAIL_BYTES = 10240 deploy_report = executor.run_cmd(cmd=cmd) self.assertEqual(deploy_report.status_code, AgentStatus.ABORTED_BY_SERVER) def test_run_command_with_timeout_error(self): cmd = ['sleep', '20'] ping_server = mock.Mock(return_value=False) executor = Executor(callback=ping_server) executor.LOG_FILENAME = self.fdout_fn executor.MAX_RUNNING_TIME = 4 executor.MIN_RUNNING_TIME = 2 executor.DEFAULT_TAIL_LINES = 1 executor.MAX_
RETRY
= 3 executor.PROCESS_POLL_INTERVAL = 2 executor.BACK_OFF = 2 executor.MAX_SLEEP_INTERVAL = 60 executor.MAX_TAIL_BYTES = 10240 deploy_report = executor.run_cmd(cmd=cmd) self.assertTrue(ping_server.called) self.assertEqual(deploy_report.status_code, AgentStatus.SCRIPT_TIMEOUT) def test_run_command_with_shutdown_timeout(self): cmd = ['sleep', '5m'] ping_server = mock.Mock(return_value=PingStatus.PLAN_CHANGED) os.killpg = mock.Mock() executor = Executor(callback=ping_server) executor.LOG_FILENAME = self.fdout_fn executor.MAX_RUNNING_TIME = 4 executor.MIN_RUNNING_TIME = 2 executor.DEFAULT_TAIL_LINES = 1 executor.MAX_RETRY = 3 executor.PROCESS_POLL_INTERVAL = 2 executor.BACK_OFF = 2 executor.MAX_SLEEP_INTERVAL = 5 executor.MAX_TAIL_BYTES = 10240 executor.TERMINATE_TIMEOUT = 0 deploy_report = executor.run_cmd(cmd=cmd) self.assertEqual(os.killpg.call_count, 2) calls = [mock.call(mock.ANY, signal.SIGTERM), mock.call(mock.ANY, signal.SIGKILL)] os.killpg.assert_has_calls(calls) if __name__ == '__main__': unittest.main()
abathur/dbg_census
dbg_census/eq2.py
Python
apache-2.0
128
0.03125
from . import census class Stat
s(census.Stats): namespace = "eq2" def __str__(self): return
"EVERQUST 2 STATS API"
PascualArroyo/Domotics
Server/myconfig.py
Python
gpl-2.0
9,785
0.006847
#Es necesario cambiar estos datos por los parametros de nuestro servidor, usuarios, password userDb = "userDb" passDb = "passDb" mail = "*********@gmail.com" passMail = "passMail" nameDb = "domotics_db" urlDb = "urlDb" serverPort = 8080 #Security Code Device updateCode = "UPDATE device SET code = '%s' WHERE id = '%s' AND (code = '%s' OR connectionStatus = 0)" updateCodeRemote = "UPDATE device SET code = '%s' WHERE idDevice = '%s'" #manage Port selectGetPort = "SELECT port FROM device WHERE id = '%s' AND code ='%s'" #Remotes selectGetDevicesRemote = "SELECT deviceRemote.id AS id, deviceRemote.pipeSend AS pipeSend, deviceRemote.pipeRecv AS pipeRecv, deviceRemote.type AS type FROM device deviceRemote, device deviceCentral WHERE deviceRemote.idDevice = deviceCentral.id AND deviceCentral.id = '%s' AND deviceCentral.code = '%s'" #Type device selectGetTypeDevice = "SELECT type FROM device WHERE id = '%s' AND code ='%s'" #Get User id selectUserId = "SELECT id FROM user WHERE login = '%s' AND password = '%s'" #Check users and mails selectUserExists = "SELECT login FROM user WHERE login = '%s'" selectMailExists = "SELECT login FROM user WHERE mail = '%s'" selectUserExistsCheck = "SELECT login FROM user WHERE login = '%s' AND active = '1'" selectMailExistsWithoutCheck = "SELECT login FROM user WHERE mail = '%s' AND active != '1'" #SignIn user insertSignIn = "INSERT INTO user (login, name, mail, password, active) VALUES ('%s', '%s', '%s', '%s', '%d')" updateSignIn = "UPDATE user SET login = '%s', name = '%s', password = '%s', active = '%d' WHERE mail = '%s'" #Check SignIn updateCheckSignIn = "UPDATE user SET active = 1 WHERE login = '%s' AND password = '%s' AND active = '%s'" #LogIn selectLogIn = "SELECT id, name, active FROM user WHERE login = '%s' AND password = '%s' AND active = '1'" #List locations of user selectLocationsUser = "SELECT location.id AS id, location.name AS name, location.security AS security FROM user, location, userLocation WHERE userLocation.idUser = user.id AND userLocation.idLocation = location.id AND user.id = '%s'" #Check Device User checkDeviceUser = "SELECT device.id AS idDevice FROM user, device, userLocation, locationDevice WHERE device.id = locationDevice.idDevice AND locationDevice.idLocation = userLocation.idLocation AND userLocation.idUser = user.id AND user.id = '%s' AND device.id = '%s'" #Check Location User checkLocationUser = "SELECT userLocation.idLocation AS idLocation FROM userLocation WHERE userLocation.idUser = '%s' AND userLocation.idLocation = '%s'" #list devices of locations and user selectDevicesLocation = "SELECT device.id AS id, device.name AS name, device.publicIp AS publicIp, device.privateIp AS privateIp, device.port AS port, DATE_FORMAT(device.timeStamp,'%%d/%%m/%%Y %%H:%%i:%%s') AS timeStamp, device.connectionStatus AS connectionStatus, device.RIPMotion AS RIPMotion, device.alarm AS alarm, device.type AS type, device.idDevice AS idDevice, device.pipeSend AS pipeSend, device.pipeRecv AS pipeRecv, device.code AS code, device.connectionMode AS connectionMode, device.version AS version FROM user, location, device, userLocation, locationDevice WHERE device.id = locationDevice.idDevice AND locationDevice.idLocation = location.id AND location.id = '%s' AND location.id = userLocation.idLocation AND userLocation.idUser = user.id AND user.id = '%s'" #create new location selectCheckLocationUser = "SELECT location.name AS name FROM user, location, userLocation WHERE userLocation.idUser = user.id AND userLocation.idLocation = location.id AND user.id = '%s' AND location.name = '%s'" insertLocation = "INSERT INTO location (name, security) VALUES ('%s','1')" insertLocationUser = "INSERT INTO userLocation (idUser, idLocation) VALUES ('%s','%s')" #edit location selectCheckUpdateLocationUser = "SELECT location.name AS name FROM user, location, userLocation WHERE userLocation.idUser = user.id AND userLocation.idLocation = location.id AND user.id = '%s' AND location.name = '%s' AND location.id != '%s'" updateLocation = "UPDATE location SET name = '%s' WHERE id = '%s'" updateLocationSecurity = "UPDATE location SET security = '%s' WHERE id = '%s'" #delete location deleteUserLocation = "DELETE FROM userLocation WHERE idLocation = '%s'" deleteLocation = "DELETE FROM location WHERE id = '%s'" #insert device insertDeviceServer = "INSERT INTO device (name, port, timeStamp, type, idDevice) VALUES ('%s', '%s', NOW(), '%s', '%s')" insertLocationDevice = "INSERT INTO locationDevice (idLocation, idDevice) VALUES ('%s', '%s')" #Update Devices updateDevice = "UPDATE device SET name = '%s', port = '%s', connectionMode = '%s', RIPMotion = '%s' WHERE id = '%s'" updateDevicePipes = "UPDATE device SET pipeSend = '%s', pipeRecv = '%s' WHERE id = '%s'" updateIpDevice = "UPDATE device SET publicIp = '%s', privateIp = '%s' WHERE id = '%s' AND code = '%s'" updateNotOnline = "UPDATE device SET connectionStatus = '0' WHERE connectionStatus != '0' AND TIMEDIFF(NOW(), device.timeStamp) > TIME('00:01:00')" updateOnline = "UPDATE device SET connectionStatus = '%s', devic
e.timeStamp = NOW() WHERE id = '%s' AND code = '%s'" #Check Device Remote for Delete checkDeviceRemote = "SELECT id FROM de
vice WHERE idDevice = '%s'" #Delete devices deleteTimerDevice = "DELETE FROM timer WHERE idDevice = '%s'" deleteAlertDevice = "DELETE FROM alert WHERE idDevice = '%s'" deleteSensorsData = "DELETE FROM sensors WHERE idDevice = '%s'" deleteLocationDevice = "DELETE FROM locationDevice WHERE idDevice = '%s'" deleteDevice = "DELETE FROM device WHERE id = '%s'" #Security selectLocationSecurity = "SELECT user.mail AS email, user.name AS nameUser, location.id AS idLocation, location.security AS security, location.name AS nameLocation, device.name AS nameDevice, device.RIPMotion AS RIPMotion, device.alarm AS alarm FROM location, device, locationDevice, userLocation, user WHERE device.id = locationDevice.idDevice AND locationDevice.idLocation = location.id AND device.id ='%s' AND device.code = '%s' AND userLocation.idLocation = location.id AND userLocation.idUser = user.id" updateAlarm = "UPDATE device SET alarm = '%s' WHERE id = '%s'" selectDevicesLocationOpenPort = "SELECT device.id AS id, device.publicIp AS publicIp, device.port AS port, device.name AS name, device.code AS code FROM device, locationDevice WHERE locationDevice.idLocation = '%s' AND locationDevice.idDevice = device.id AND device.connectionStatus = '1' AND device.RIPMotion = '1'" selectDevicesLocationUserOpenPort = "SELECT device.publicIp AS publicIp, device.port AS port, device.name AS name, device.code AS code FROM device, locationDevice, userLocation WHERE locationDevice.idLocation = '%s' AND locationDevice.idDevice = device.id AND device.connectionStatus = '1' AND userLocation.idLocation = locationDevice.idLocation AND userLocation.idUser = '%s'" selectDevicesOtherLocationOpenPort = "SELECT device.publicIp AS publicIp, device.port AS port, device.name AS name, device.code AS code FROM device, locationDevice WHERE locationDevice.idLocation <> '%s' AND locationDevice.idDevice = device.id AND device.connectionStatus = '1'" selectDevicesLocationOpenPortCameras = "SELECT device.publicIp AS publicIp, device.port AS port, device.name AS name, device.code AS code FROM device, locationDevice WHERE locationDevice.idLocation = '%s' AND locationDevice.idDevice = device.id AND device.connectionStatus = '1' AND device.type = '2'" checkDeviceAlarmStatus = "SELECT alarm FROM device WHERE id = '%s' AND code ='%s'" #Alert insertAlert = "INSERT INTO alert (date, time, type, idDevice) VALUES (CURRENT_DATE(), CURRENT_TIME(), '%s', '%s')" checkInsertAlert = "SELECT id FROM alert WHERE alert.type = '%s' AND alert.idDevice = '%s' AND alert.date = CURRENT_DATE() AND CURRENT_TIME()-alert.time < TIME('00:02:00')" selectAlert = "SELECT DATE_FORMAT(alert.date,'%%d/%%m/%%Y') AS date, DATE_FORMAT(alert.time,'%%H:%%i') AS time, alert.type AS type FROM device, alert, locationDevice, userLocation WHERE device.id = alert.idDevice AND device.id = '%s' AND alert.date = STR_TO_DATE('%s','%%d/%%m/%%Y') AND locationDevice.idDevice = device.id AND locationDevice.idLocation = userLocation.idLocation AN
Yelp/paasta
paasta_tools/check_services_replication_tools.py
Python
apache-2.0
8,925
0.000896
# Copyright 2015-2019 Yelp Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import logging import sys from typing import Any from typing import Callable from typing import List from typing
import Optional from typing import Sequence from typing import Tuple from typing
import Type from typing import Union import a_sync from marathon import MarathonClient from marathon.models.task import MarathonTask from mypy_extensions import Arg from mypy_extensions import NamedArg from paasta_tools.kubernetes_tools import get_all_nodes from paasta_tools.kubernetes_tools import get_all_pods from paasta_tools.kubernetes_tools import KubeClient from paasta_tools.kubernetes_tools import V1Node from paasta_tools.kubernetes_tools import V1Pod from paasta_tools.marathon_tools import get_marathon_clients from paasta_tools.marathon_tools import get_marathon_servers from paasta_tools.mesos_tools import get_slaves from paasta_tools.monitoring_tools import ReplicationChecker from paasta_tools.paasta_service_config_loader import PaastaServiceConfigLoader from paasta_tools.smartstack_tools import KubeSmartstackEnvoyReplicationChecker from paasta_tools.smartstack_tools import MesosSmartstackEnvoyReplicationChecker from paasta_tools.utils import DEFAULT_SOA_DIR from paasta_tools.utils import InstanceConfig_T from paasta_tools.utils import list_services from paasta_tools.utils import load_system_paasta_config from paasta_tools.utils import SPACER from paasta_tools.utils import SystemPaastaConfig try: import yelp_meteorite except ImportError: yelp_meteorite = None log = logging.getLogger(__name__) CheckServiceReplication = Callable[ [ Arg(InstanceConfig_T, "instance_config"), Arg(Sequence[Union[MarathonTask, V1Pod]], "all_tasks_or_pods"), Arg(Any, "replication_checker"), NamedArg(bool, "dry_run"), ], Optional[bool], ] def parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser() parser.add_argument( "-d", "--soa-dir", dest="soa_dir", metavar="SOA_DIR", default=DEFAULT_SOA_DIR, help="define a different soa config directory", ) parser.add_argument( "--crit", dest="under_replicated_crit_pct", type=float, default=10, help="The percentage of under replicated service instances past which " "the script will return a critical status", ) parser.add_argument( "--min-count-critical", dest="min_count_critical", type=int, default=5, help="The script will not return a critical status if the number of " "under replicated service instances is below this number, even if the " "percentage is above the critical percentage.", ) parser.add_argument( "service_instance_list", nargs="*", help="The list of service instances to check", metavar="SERVICE%sINSTANCE" % SPACER, ) parser.add_argument( "-v", "--verbose", action="store_true", dest="verbose", default=False ) parser.add_argument( "--dry-run", action="store_true", dest="dry_run", help="Print Sensu alert events and metrics instead of sending them", ) options = parser.parse_args() return options def check_services_replication( soa_dir: str, cluster: str, service_instances: Sequence[str], instance_type_class: Type[InstanceConfig_T], check_service_replication: CheckServiceReplication, replication_checker: ReplicationChecker, all_tasks_or_pods: Sequence[Union[MarathonTask, V1Pod]], dry_run: bool = False, ) -> Tuple[int, int]: service_instances_set = set(service_instances) replication_statuses: List[bool] = [] for service in list_services(soa_dir=soa_dir): service_config = PaastaServiceConfigLoader(service=service, soa_dir=soa_dir) for instance_config in service_config.instance_configs( cluster=cluster, instance_type_class=instance_type_class ): if ( service_instances_set and f"{service}{SPACER}{instance_config.instance}" not in service_instances_set ): continue if instance_config.get_docker_image(): is_well_replicated = check_service_replication( instance_config=instance_config, all_tasks_or_pods=all_tasks_or_pods, replication_checker=replication_checker, dry_run=dry_run, ) if is_well_replicated is not None: replication_statuses.append(is_well_replicated) else: log.debug( "%s is not deployed. Skipping replication monitoring." % instance_config.job_id ) num_under_replicated = len( [status for status in replication_statuses if status is False] ) return num_under_replicated, len(replication_statuses) def emit_cluster_replication_metrics( pct_under_replicated: float, cluster: str, scheduler: str, dry_run: bool = False, ) -> None: metric_name = "paasta.pct_services_under_replicated" if dry_run: print(f"Would've sent value {pct_under_replicated} for metric '{metric_name}'") else: meteorite_dims = {"paasta_cluster": cluster, "scheduler": scheduler} gauge = yelp_meteorite.create_gauge(metric_name, meteorite_dims) gauge.set(pct_under_replicated) def main( instance_type_class: Type[InstanceConfig_T], check_service_replication: CheckServiceReplication, namespace: str, mesos: bool = False, ) -> None: args = parse_args() if args.verbose: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.WARNING) system_paasta_config = load_system_paasta_config() cluster = system_paasta_config.get_cluster() replication_checker: ReplicationChecker if mesos: tasks_or_pods, slaves = get_mesos_tasks_and_slaves(system_paasta_config) replication_checker = MesosSmartstackEnvoyReplicationChecker( mesos_slaves=slaves, system_paasta_config=system_paasta_config, ) else: tasks_or_pods, nodes = get_kubernetes_pods_and_nodes(namespace) replication_checker = KubeSmartstackEnvoyReplicationChecker( nodes=nodes, system_paasta_config=system_paasta_config, ) count_under_replicated, total = check_services_replication( soa_dir=args.soa_dir, cluster=cluster, service_instances=args.service_instance_list, instance_type_class=instance_type_class, check_service_replication=check_service_replication, replication_checker=replication_checker, all_tasks_or_pods=tasks_or_pods, dry_run=args.dry_run, ) pct_under_replicated = 0 if total == 0 else 100 * count_under_replicated / total if yelp_meteorite is not None: emit_cluster_replication_metrics( pct_under_replicated, cluster, scheduler="mesos" if mesos else "kubernetes", dry_run=args.dry_run, ) if ( pct_under_replicated >= args.under_replicated_crit_pct and count_under_replicated >= args.min_count_critical ): log.critical( f"{pct_under_replicated}% of instances ({count_under_replicated}/{total}) " f"are under replicated (past {args.under_replicated_crit_pct} is critical)!" ) sys.exit(2) else: sys.exit(0) def get_m
edsfault/Edsfault-processing.js
tools/packer.py
Python
mit
1,163
0.009458
#!/usr/bin/env python import sys, os, os.p
ath, signal import jsshellhelper from optparse import OptionParser from subprocess import Popen, PIPE, STDOUT # Uses jsshell https://developer.mozilla.org/en/Introduction_to_the_JavaScript_shell class Pa
cker(object): toolsdir = os.path.dirname(os.path.abspath(__file__)) def run(self, jsshell, filename): tmpFile = jsshellhelper.createEscapedFile(filename) cmd = [jsshell, '-f', os.path.join(self.toolsdir, 'packer.js'), '-f', os.path.join(self.toolsdir, 'cleaner.js'), '-f', tmpFile, '-e', "var input = __unescape_string(); print(pack(input, 62, 1, 0));"] proc = Popen(cmd, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() if stdout: print stdout else: print stderr tmpFile = jsshellhelper.cleanUp(tmpFile) def main(): parser = OptionParser() options, args = parser.parse_args() if len(args) < 2: print >>sys.stderr, """Usage: %s <path to jsshell> <js file>""" % sys.argv[0] sys.exit(1) packer = Packer() packer.run(args[0], args[1]) if __name__ == '__main__': main()
antmicro/distant-rec
tools/shebang-replace.py
Python
apache-2.0
1,211
0.004129
#!/usr/bin/env python3 import sys from os import listdir, chdir from os.path import isfile, abspath UNTIL = '/build/' REPLACE_WITH = '/b/f/w' def bangchange(file_path): script = File(file_path) if script.flist[0].find("#!") == 0: if script.flist[0].find(UNTIL) > 0: print("\033[92m" + "[MOD] {}".format(file_path)) where_divide = script.flist[0].find(UNTIL) script.flist[0] = "#!" + REPLACE_WITH + script.flist[0][where_divide:] script.flush() class File: def
__init__(self, path): self.fh = open(path, "r+") try: self.fstring = self.fh.read() except UnicodeDecodeError: print("\033[94m" + "[SKP] {}".format(path)) self.fstring = "" se
lf.flist = self.fstring.split("\n") def flush(self): self.fstring = "\n".join(self.flist) self.fh.seek(0) self.fh.write(self.fstring) self.fh.close() def main(): if len(sys.argv) != 2: print("\033[91m"+"[FAIL] Invalid arguments") return 1 chdir(sys.argv[1]) for filename in listdir("."): if isfile(abspath(filename)): bangchange(filename) main()
MyRobotLab/myrobotlab
src/main/resources/resource/Clock/clock_6_clock_stopped.py
Python
apache-2.0
122
0.016393
clock.addListener("clockStopped", "python", "clock_stopped")
def clock_stopped(): print("The clock
has been stopped")
n0mjs710/DMRlink
ipsc/dmrlink_config.py
Python
gpl-3.0
11,481
0.004703
#!/usr/bin/env python # ############################################################################### # Copyright (C) 2016-2018 Cortney T. Buffington, N0MJS <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ############################################################################### import ConfigParser import sys from socket import getaddrinfo, IPPROTO_UDP # Does anybody read this stuff? There's a PEP somewhere that says I should do this. __author__ = 'Cortney T. Buffington, N0MJS' __copyright__ = 'Copyright (c) 2016-2018 Cortney T. Buffington, N0MJS and the K0USY Group' __license__ = 'GNU GPLv3' __maintainer__ = 'Cort Buffington, N0MJS' __email__ = '[email protected]' def get_address(_config): ipv4 = '' ipv6 = '' socket_info = getaddrinfo(_config, None, 0, 0, IPPROTO_UDP) for item in socket_info: if item[0] == 2: ipv4 = item[4][0] elif item[0] == 30: ipv6 = item[4][0] if ipv4: return ipv4 if ipv6: return ipv6 return 'invalid address' def build_config(_config_file): config = ConfigParser.ConfigParser() if not config.read(_config_file): sys.exit('Configuration file \''+_config_file+'\' is not a valid configuration file! Exiting...') CONFIG = {} CONFIG['GLOBAL'] = {} CONFIG['REPORTS'] = {} CONFIG['LOGGER'] = {} CONFIG['ALIASES'] = {} CONFIG['SYSTEMS'] = {} try: for section in config.sections(): if section == 'GLOBAL': CONFIG['GLOBAL'].update({ 'PATH': config.get(section, 'PATH') }) elif section == 'REPORTS': CONFIG['REPORTS'].update({ 'REPORT_NETWORKS': config.get(section, 'REPORT_NETWORKS'), 'REPORT_RCM': config.get(section, 'REPORT_RCM'), 'REPORT_INTERVAL': config.getint(section, 'REPORT_INTERVAL'), 'REPORT_PORT': config.get(section, 'REPORT_PORT'), 'REPORT_CLIENTS': config.get(section, 'REPORT_CLIENTS').split(','), 'PRINT_PEERS_INC_MODE': config.getboolean(section, 'PRINT_PEERS_INC_MODE'), 'PRINT_PEERS_INC_FLAGS': config.getboolean(section, 'PRINT_PEERS_INC_FLAGS') }) if CONFIG['REPORTS']['REPORT_PORT']: CONFIG['REPORTS']['REPORT_PORT'] = int(CONFIG['REPORTS']['REPORT_PORT']) if CONFIG['REPORTS']['REPORT_RCM']: CONFIG['REPORTS']['REPORT_RCM'] = bool(CONFIG['REPORTS']['REPORT_RCM']) elif section == 'LOGGER': CONFIG['LOGGER'].update({ 'LOG_FILE': config.get(section, 'LOG_FILE'), 'LOG_HANDLERS': config.get(section, 'LOG_HANDLERS'), 'LOG_LEVEL': config.get(section, 'LOG_LEVEL'), 'LOG_NAME': config.get(section, 'LOG_NAME') }) elif section == 'ALIASES': CONFIG['ALIASES'].update({ 'TRY_DOWNLOAD': config.getboolean(section, 'TRY_DOWNLOAD'), 'PATH': config.get(section, 'PATH'), 'PEER_FILE': config.get(section, 'PEER_FILE'), 'SUBSCRIBER_FILE': config.get(section, 'SUBSCRIBER_FILE'), 'TGID_FILE': config.get(section, 'TGID_FILE'), 'LOCAL_FILE': config.get(section, 'LOCAL_FILE'), 'PEER_URL': config.get(section, 'PEER_URL'), 'SUBSCRIBER_URL': config.get(section, 'SUBSCRIBER_URL'), 'STALE_TIME': config.getint(section, 'STALE_DAYS') * 86400, }) elif config.getboolean(section, 'ENABLED'): CONFIG['SYSTEMS'].update({section: {'LOCAL': {}, 'MASTER': {}, 'PEERS': {}}}) CONFIG['SYSTEMS'][section]['LOCAL'].update({ # In case we want to keep config, but not actually connect to the network 'ENABLED': config.getboolean(section, 'ENABLED'), # These items are used to create the MODE byte 'PEER_OPER': config.getboolean(section, 'PEER_OPER'),
'IPSC_MODE': config.get(section, 'IPSC_MODE'), 'TS1_LINK': config.getboolean(section, 'TS1_LINK'), 'TS2_LINK': config.getboolean(section, 'TS2_LINK'), 'MODE': '', # These items are used to create the multi-byte FLAGS field 'AUTH_ENABLED': config.getboolean(section, 'AUTH_ENABLED'), 'CSBK_CALL': config.getboolean(section, 'CSBK_CA
LL'), 'RCM': config.getboolean(section, 'RCM'), 'CON_APP': config.getboolean(section, 'CON_APP'), 'XNL_CALL': config.getboolean(section, 'XNL_CALL'), 'XNL_MASTER': config.getboolean(section, 'XNL_MASTER'), 'DATA_CALL': config.getboolean(section, 'DATA_CALL'), 'VOICE_CALL': config.getboolean(section, 'VOICE_CALL'), 'MASTER_PEER': config.getboolean(section, 'MASTER_PEER'), 'FLAGS': '', # Things we need to know to connect and be a peer in this IPSC 'RADIO_ID': hex(int(config.get(section, 'RADIO_ID')))[2:].rjust(8,'0').decode('hex'), 'IP': config.get(section, 'IP'), 'PORT': config.getint(section, 'PORT'), 'ALIVE_TIMER': config.getint(section, 'ALIVE_TIMER'), 'MAX_MISSED': config.getint(section, 'MAX_MISSED'), 'AUTH_KEY': (config.get(section, 'AUTH_KEY').rjust(40,'0')).decode('hex'), 'GROUP_HANGTIME': config.getint(section, 'GROUP_HANGTIME'), 'NUM_PEERS': 0, }) # Master means things we need to know about the master peer of the network CONFIG['SYSTEMS'][section]['MASTER'].update({ 'RADIO_ID': '\x00\x00\x00\x00', 'MODE': '\x00', 'MODE_DECODE': '', 'FLAGS': '\x00\x00\x00\x00', 'FLAGS_DECODE': '', 'STATUS': { 'CONNECTED': False, 'PEER_LIST': False, 'KEEP_ALIVES_SENT': 0, 'KEEP_ALIVES_MISSED': 0, 'KEEP_ALIVES_OUTSTANDING': 0, 'KEEP_ALIVES_RECEIVED': 0, 'KEEP_ALIVE_RX_TIME': 0 }, 'IP': '', 'PORT': '' }) if not CONFIG['SYSTEMS'][section]['LOCAL']['MASTER_PEER']: CONFIG['SYSTEMS'][section]['MASTER'].update({ 'IP': get_address(config.get(section, 'MASTER_IP')), 'PORT': config.getint(section, 'MASTER_PORT') }) # Temporary locations for building MODE and FLAG data MODE_BYTE = 0 FLAG_1 =
pluradj/incubator-tinkerpop
gremlin-python/src/main/jython/gremlin_python/process/strategies.py
Python
apache-2.0
5,953
0.001008
''' Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' __author__ = 'Marko A. Rodriguez (http://markorodriguez.com)' from gremlin_python.process.traversal import TraversalStrategy ######################### # DECORATION STRATEGIES # ######################### class ConnectiveStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class ElementIdStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) # EventStrategy doesn't make sense outside JVM traversal machine class HaltedTraverserStrategy(TraversalStrategy): def __init__(self, halted_traverser_factory=None): TraversalStrategy.__init__(self) if halted_traverser_factory is not None: self.configuration["haltedTraverserFactory"] = halted_traverser_factory class OptionsStrategy(TraversalStrategy): def __init__(self, options=None): TraversalStrategy.__init__(self, configuration=options) class PartitionStrategy(TraversalStrategy): def __init__(self, partition_key=None, write_partition=None, read_partitions=None, include_meta_properties=None): TraversalStrategy.__init__(self) if partition_key is not None: self.configuration["partitionKey"] = partition_key if write_partition is not None: self.configuration["writePartition"] = write_partition if write_partition is not None: self.configuration["readPartitions"] = read_partitions if include_meta_properties is not None: self.configuration["includeMetaProperties"] = include_meta_properties class SubgraphStrategy(TraversalStrategy): def __init__(self, vertices=None, edges=None, vertex_properties=None): TraversalStrategy.__init__(self) if vertices is not None: self.configuration["vertices"] = vertices if edges is not None: self.configuration["edges"] = edges if vertex_properties is not None: self.configuration["vertexProperties"] = vertex_properties class VertexProgramStrategy(TraversalStrategy): def __init__(self, graph_computer=None, workers=None, persist=None, result=None, vertices=None, edges=None, configuration=None): TraversalStrategy.__init__(self) if graph_computer is not None: self.configuration["graphComputer"] = graph_computer if workers is not None: self.configuration["workers"] = workers if persist is not None: self.configuration["persist"] = persist if result is not None: self.configuration["result"] = result if vertices is not None: self.configuration["vertices"] = vertices if edges is not None: self.configuration["edges"] = edges if configuration is not None: self.configuration.update(configuration) ########################### # FINALIZATION STRATEGIES # ########################### class MatchAlgorithmStrategy(TraversalStrategy): def __init__(self, match_algorithm=None): TraversalStrategy.__init__(self) if match_algorithm is not None: self.configuration["matchAlgorithm"] = match_algorithm ########################### # OPTIMIZATION STRATEGIES # ########################### class AdjacentToIncidentStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class FilterRankingStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class IdentityRemovalStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class IncidentToAdjacentStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class InlineFilterStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class LazyBarrierStrategy(TraversalStrategy): def __init__(self):
TraversalStrategy.__init__(self) class MatchPredicateStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class OrderLimitStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class PathProcessorStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class PathRetractionStr
ategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class CountStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class RepeatUnrollStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class GraphFilterStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class EarlyLimitStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) ########################### # VERIFICATION STRATEGIES # ########################### class LambdaRestrictionStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self) class ReadOnlyStrategy(TraversalStrategy): def __init__(self): TraversalStrategy.__init__(self)
fr34k8/Archipel
ArchipelAgent/archipel-agent-hypervisor-geolocalization/archipelagenthypervisorgeolocalization/geoloc.py
Python
agpl-3.0
7,356
0.006661
# -*- coding: utf-8 -*- # # geoloc.py # # Copyright (C) 2010 Antoine Mercadal <[email protected]> # Copyright, 2011 - Franck Villaume <[email protected]> # This file is part of ArchipelProject # http://archipelproject.org # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import httplib import xmpp from archipelcore.archipelPlugin import TNArchipelPlugin from archipelcore.utils import build_error_iq ARCHIPEL_NS_HYPERVISOR_GEOLOC = "archipel:hypervisor:geolocalization" ARCHIPEL_ERROR_CODE_LOCALIZATION_GET = -9001 class TNHypervisorGeolocalization (TNArchipelPlugin): """ This plugin allow to geolocalize the hypervisor. """ def __init__(self, configuration, entity, entry_point_group): """ Initialize the module. @type configuration: Configuration object @param configuration: the configuration @type entity: L{TNArchipelEntity} @param entity: the entity that owns the plugin @type entry_point_group: string @param entry_point_group: the group name of plugin entry_point """ TNArchipelPlugin.__init__(self, configuration=configuration, entity=entity, entry_point_group=entry_point_group) self.plugin_deactivated = False try: mode = self.configuration.get("GEOLOCALIZATION", "localization_mode") lat = "" lon = "" if mode == "auto": service = self.configuration.get("GEOLOCALIZATION", "localization_service_url") request = self.configuration.get("GEOLOCALIZATION", "localization_service_request") method = self.configuration.get("GEOLOCALIZATION", "localization_service_method") conn = httplib.HTTPConnection(service) conn.request(method, request) data_node = xmpp.simplexml.NodeBuilder(data=str(conn.getresponse().read())).getDom() lat = data_node.getTagData("Latitude") lon = data_node.getTagData("Longitude") else: lat = self.configuration.getfloat("GEOLOCALIZATION", "localization_latitude") lon = self.configuration.getfloat("GEOLOCALIZATION", "localization_longitude") string = "<gelocalization><Latitude>"+str(lat)+"</Latitude>\n<Longitude>"+str(lon)+"</Longitude></gelocalization>" self.localization_information = xmpp.simplexml.NodeBuilder(data=string).getDom() registrar_item = { "commands" : ["where are you", "localize"], "parameters": {}, "method": self.message_get, "permissions": ["geolocalization_get"], "description": "give my the latitude and longitude." } self.entity.add_message_registrar_item(registrar_item) # permissions self.entity.permission_center.create_permission("geolocalization_get", "Authorizes user to get the entity location coordinates", False) except Exception as ex: self.plugin_deactivated = True; self.entity.log.error("Cannot initialize geolocalization. plugin deactivated. Exception: %s" % str(ex)) ### Plugin interface def register_handler
s(self): """ This method will be called by the plugin user when it will be necessary to register module for listening to stanza. """ if self.plugin_deactivated: return self.entity.xmppclient.RegisterHandler('iq', self.process_iq, ns=ARCHIPEL_NS_HYPERVISOR_GEOLOC) def unregister_handlers(self): """ Unregister the handlers. """
if self.plugin_deactivated: return self.entity.xmppclient.UnregisterHandler('iq', self.process_iq, ns=ARCHIPEL_NS_HYPERVISOR_GEOLOC) @staticmethod def plugin_info(): """ Return informations about the plugin. @rtype: dict @return: dictionary contaning plugin informations """ plugin_friendly_name = "Hypervisor Geolocalization" plugin_identifier = "geolocalization" plugin_configuration_section = "GEOLOCALIZATION" plugin_configuration_tokens = [ "localization_mode", "localization_latitude", "localization_longitude", "localization_service_url", "localization_service_request", "localization_service_method", "localization_service_response_root_node"] return { "common-name" : plugin_friendly_name, "identifier" : plugin_identifier, "configuration-section" : plugin_configuration_section, "configuration-tokens" : plugin_configuration_tokens } ### XMPP Processing def process_iq(self, conn, iq): """ This method is invoked when a ARCHIPEL_NS_HYPERVISOR_GEOLOC IQ is received. It understands IQ of type: - get @type conn: xmpp.Dispatcher @param conn: ths instance of the current connection that send the stanza @type iq: xmpp.Protocol.Iq @param iq: the received IQ """ reply = None action = self.entity.check_acp(conn, iq) self.entity.check_perm(conn, iq, action, -1, prefix="geolocalization_") if action == "get": reply = self.iq_get(iq) if reply: conn.send(reply) raise xmpp.protocol.NodeProcessed def iq_get(self, iq): """ Return the geolocalization information. @type iq: xmpp.Protocol.Iq @param iq: the received IQ """ reply = iq.buildReply("result") try: reply.setQueryPayload([self.localization_information]) except Exception as ex: reply = build_error_iq(self, ex, iq, ARCHIPEL_ERROR_CODE_LOCALIZATION_GET) return reply def message_get(self, msg): """ Return the geolocalization information asked by message. @type msg: xmpp.Protocol.Message @param msg: the received message @rtype: string @return: string containing the answer to send """ lat = self.localization_information.getTagData("Latitude") lon = self.localization_information.getTagData("Longitude") return "I'm localized at longitude: %s latitude: %s" % (lon, lat)
jorgehog/Deux-kMC
scripts/autocorr/run.py
Python
gpl-3.0
881
0.014756
import sys import os import numpy as np sys.path.append(os.path.join(os.getcwd(), "..")) from run_utils import run_kmc, parse_input from ParameterJuggler import ParameterSet def main(): controller, path, app, cfg, n_procs = parse_input(sys.argv) alpha_values = ParameterSet(cfg, "alpha\s*=\s*(.*)\;") alpha_values.initialize_set(np.linspace(0.5, 2, 16)) heights = ParameterSet(cfg, "confiningSurfaceHeight\s*=\s*(.*)\;") heights.initialize_set([20.]) diffusions = ParameterSet(cfg, "diffuse\s*=\s*(.*)\;") diffusions.initialize_set([3]) controller.register_parameter_set(alpha_values) controller.register_parameter_set(heights) controller.register_parameter_set(diffusions) controller.set_repeats(
20) controller.run(run_kmc, path
, app, cfg, ask=False, n_procs=n_procs, shuffle=True) if __name__ == "__main__": main()
kustomzone/Rusthon
regtests/c++/try_except_finally.py
Python
bsd-3-clause
301
0.059801
''' c++ finally ''
' def myfunc(): b = False try: print('trying something that will fail...') print('some call that fails at runtime') f = open('/tmp/nosuchfile') except: print('got exception') finally: print('finally cleanup') b = True TestError( b == True ) def main(): myfun
c()
xiaoyuanW/gem5
configs/common/MemConfig.py
Python
bsd-3-clause
8,019
0.00212
# Copyright (c) 2013 ARM Limited # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Andreas Sandberg # Andreas Hansson import m5.objects import inspect import sys from textwrap import TextWrapper # Dictionary of mapping names of real memory controller models to # classes. _mem_classes = {} # Memory aliases. We make sure they exist before we add them to the # fina; list. A target may be specified as a tuple, in which case the # first available memory controller model in the tuple will be used. _mem_aliases_all = [ ("simple_mem", "SimpleMemory"), ("ddr3_1600_x64", "DDR3_1600_x64"), ("lpddr2_s4_1066_x32", "LPDDR2_S4_1066_x32"), ("lpddr3_1600_x32", "LPDDR3_1600_x32"), ("wio_200_x128", "WideIO_200_x128"), ("dramsim2", "DRAMSim2") ] # Filtered list of aliases. Only aliases for existing memory # controllers exist in this list. _mem_aliases = {} def is_mem_class(cls): """Determine if a class is a memory controller that can be instantiated""" # We can't use the normal inspect.isclass because the ParamFactory # and ProxyFactory classes have a tendency to confuse it. try: return issubclass(cls, m5.objects.AbstractMemory) and \ not cls.abstract except TypeError: return False def get(name): """Get a memory class from a user provided class name or alias.""" real_name = _mem_aliases.get(name, name) try: mem_class = _mem_classes[real_name] return mem_class except KeyError: print "%s is not a valid memory controller." % (name,) sys.exit(1) def print_mem_list(): """Print a list of available memory classes including their aliases.""" print "Available memory classes:" doc_wrapper = TextWrapper(initial_indent="\t\t", subsequent_indent="\t\t") for name, cls in _mem_classes.items(): print "\t%s" % name # Try to extract the class documentation from the class help # string. doc = inspect.getdoc(cls) if doc: for line in doc_wrapper.wrap(doc): print line if _mem_aliases: print "\nMemory aliases:" for alias, target in _mem_aliases.items(): print "\t%s => %s" % (alias, target) def mem_names(): """Return a list of valid memory names.""" return _mem_classes.keys() + _mem_aliases.keys() # Add all memory controllers in the object hierarchy. for name, cls in inspect.getmembers(m5.objects, is_mem_class): _mem_classes[name] = cls for alias, target in _mem_aliases_all: if isinstance(target, tuple): # Some aliases contain a list of memory controller models # sorted in priority order. Use the first target that's # available. for t in target: if t in _mem_classes: _mem_aliases[alias] = t break elif target in _mem_classes: # Normal alias _mem_aliases[alias] = target def create_mem_ctrl(cls, r, i, nbr_mem_ctrls, intlv_bits, cache_line_size): """ Helper function for creating a single memoy controller from the given options. This function is invoked multiple times in config_mem function to create an array of controllers. """ import math # The default behaviour is to interleave on cache line granularity cache_line_bit = int(math.log(cache_line_size, 2)) - 1 intlv_low_bit = cache_line_bit # Create an instance so we can figure out the address # mapping and row-buffer size ctrl = cls() # Only do this for DRAMs if issubclass(cls, m5.objects.DRAMCtrl): # Inform each controller how many channels to account # for ctrl.channels = nbr_mem_ctrls # If the channel bits are appearing after the column # bits, we need to add the appropriate number of bits # for the row buffer size if ctrl.addr_mapping.value == 'RoRaBaChCo': # This computation only really needs to happen # once, but as we rely on having an instance we # end up having to repeat it for each and every # one rowbuffer_size = ctrl.device_rowbuffer_size.value * \ ctrl.devices_per_rank.value intlv_low_bit = int(math.log(rowbuffer_size, 2)) - 1 # We got all we need to configure the appropriate address # range ctrl.range = m5.objects.AddrRange(r.start, size = r.size(), intlvHighBit = \ intlv_low_bit + intlv_bits, intlvBits = intlv_bits, intlvMatch = i) return ctrl def config_mem(options, system): """ Create the memory controllers based o
n the options and attach them. If requested, we make a multi-channel configuration of the selected memory controller class by creating multiple instances of the specific class. The individual controllers have their parameters set such that the address range is interleaved between them. """ nbr_mem_ctrls = options.mem_channels import math from m5.util import fatal intlv_bits
= int(math.log(nbr_mem_ctrls, 2)) if 2 ** intlv_bits != nbr_mem_ctrls: fatal("Number of memory channels must be a power of 2") cls = get(options.mem_type) mem_ctrls = [] # For every range (most systems will only have one), create an # array of controllers and set their parameters to match their # address mapping in the case of a DRAM for r in system.mem_ranges: for i in xrange(nbr_mem_ctrls): mem_ctrls.append(create_mem_ctrl(cls, r, i, nbr_mem_ctrls, intlv_bits, system.cache_line_size.value)) system.mem_ctrls = mem_ctrls # Connect the controllers to the membus for i in xrange(len(system.mem_ctrls)): system.mem_ctrls[i].port = system.membus.master
mredar/ucldc_oai_harvest
oai_harvester/read_sqs_queue.py
Python
bsd-3-clause
1,363
0.007337
#!/usr/bin/env python '''Check a queue and get times and info for messages in q''' import sys import os import logging import json import datetime import boto.sqs as sqs QUEUE_OAI_HARVEST = os.environ.get('QUEUE_OAI_HARVEST', 'OAI_harvest') QUEUE_OAI_HARVEST_ERR = os.environ.get('QUEUE_OAI_HARVEST_ERR', 'OAI_harvest_error') QUEUE_OAI_HARVEST_HARVESTING = os.environ.get('QUEUE_OAI_HARVEST_HARVESTING', 'OAI_harvest_harvesting') SQS_CONNECTION = sqs.connect_to_region('us-east-1') logging.basicConfig(level=logging.INFO) def main(args): q_name = args[1] q_harvesting = SQS_CONNECTION.get_queue(q_name) msgs = q_harvesting.get_messages(num_messages=10, visibility_timeout=10, attributes='All') #copied from boto implementation of save_to_file while(msgs): for m in msgs: msg_dict = json.loads(m.get_body()) print 'ID:', m.id, '\n' sent_dt = datetime.datetime.fromtimestamp(float(m.attributes['SentTimestamp'])/1000) print 'SENT AT: ', sent_dt print 'IN QUEUE FOR:', datetime.datetime.now()-sent_dt print msg_dict print '\n\n' msgs = q_harvesting.get_messages(num_messages=10, visibility_timeout=10, attributes='All') if __name__=="__main_
_": if len(sys.argv) < 2: print '
Usage: read_sqs_queue.py <queue_name>' main(sys.argv)
sihrc/tornado-boilerplate
indico/routes/auth.py
Python
mit
1,532
0.001305
""" IndicoService Authentication Route Creating and Maintaining Users """ from indico.utils.auth.auth_utils import auth, user_hash from indico.utils.auth.facebook_utils import check_access_token from indico.error import FacebookTokenError from indico.utils import unpack, mongo_callback, type_check from indico.routes.handler import IndicoHandler from indico.db import current_time import indico.db.user_db as UserDB import indico.db.auth_db as AuthDB class AuthHandler(IndicoHandler):
# Create User if doesn't exist @unpack("access_tok
en", "oauth_id", "user") @type_check(str, str, dict) def login(self, access_token, oauth_id, user): if not check_access_token(access_token): raise FacebookTokenError() @mongo_callback(self) def sync_callback(result): self.respond({ "user": result, "indico_key": indico_key }) @mongo_callback(self) def find_callback(result): if not result: user["created"] = current_time() UserDB.sync_user(user, "facebook" + oauth_id, sync_callback) @mongo_callback(self) def save_key_callback(result): UserDB.find_user(user_id, find_callback) user_id = "facebook" + oauth_id indico_key = user_hash(user_id) AuthDB.save_key(indico_key, user_id, save_key_callback) @auth def check(self, data): self.respond(True) AuthRoute = (r"/auth/(?P<action>[a-zA-Z]+)?", AuthHandler)
ifduyue/sentry
src/sentry/south_migrations/0373_backfill_projectteam.py
Python
bsd-3-clause
95,648
0.007935
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import DataMigration from django.db import IntegrityError, models, transaction from sentry.utils.query import RangeQuerySetWrapperWithProgressBar class Migration(DataMigration): # Flag to indicate if this migration is too risky # to run online and needs to be coordinated for offline is_dangerous = True def forwards(self, orm): db.commit_transaction() try: self._forwards(orm) except Exception: db.start_transaction() raise db.start_transaction() def _forwards(self, orm): "Write your forwards methods here." projects = orm.Project.objects.all().select_related('team') for project in RangeQuerySetWrapperWithProgressBar(projects): try: with transaction.atomic(): orm.ProjectTeam.objects.create( project=project, team=project.team, ) except IntegrityError: pass def backwards(self, orm): "Write your backwards methods here." models = { 'sentry.activity': { 'Meta': {'object_name': 'Activity'}, 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}), 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}), 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}) }, 'sentry.apiapplication': { 'Meta': {'object_name': 'ApiApplication'}, 'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'client_id': ('django.db.models.fields.CharField', [], {'default': "'8c2727381d5f41eda719ebab3ab8bd920ecb60c63bd1417e892dba232b472b65'", 'unique': 'True', 'max_length': '64'}), 'client_secret': ('sentry.db.models.fields.encrypted.EncryptedTextField', [], {'default': "'12f0b0da1ec14b8588328a66b0bf5458327473702e7c400a8865d1c78b4233d8'"}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'homepage_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'default': "'Famous Goose'", 'max_length': '64', 'blank': 'True'}), 'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}), 'privacy_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}), 'redirect_uris': ('django.db.models.fields.TextField', [], {}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}), 'terms_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}) }, 'sentry.apiauthorization': { 'Meta': {'unique_together': "(('user', 'application'),)", 'object_name': 'ApiAuthorization'}, 'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}), 'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.apigrant': { 'Meta': {'object_name': 'ApiGrant'}, 'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']"}), 'code': ('django.db.models.fields.CharField', [], {'default':
"'315c19be5328426e81e059c18e6064b1'", 'max_length': '64', 'db_index': 'True'}), 'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 1, 4, 0, 0)', 'db_index': 'True'}), 'id': ('sentry.db.models.fie
lds.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'redirect_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}), 'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.apikey': { 'Meta': {'object_name': 'ApiKey'}, 'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}), 'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}), 'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}), 'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}) }, 'sentry.apitoken': { 'Meta': {'object_name': 'ApiToken'}, 'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 2, 3, 0, 0)', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'refresh_token': ('django.db.models.fields.CharField', [], {'default': "'4db40bfa5d9b417ab64c308c09e866473e3e1cfa6e3c4853a687c278992b2ac6'", 'max_length': '64', 'unique': 'True', 'null': 'True'}), 'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}), 'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}), 'token': ('django.db.models.fields.CharField', [], {'default': "'478a2cc485184b599ed1f05fd17eb33483d617d9d3234e8dbccc2f3f3767ba19'", 'unique': 'True', 'max_length': '64'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.auditlogentry': { 'Me
fperignon/siconos
CI/machinery/ci_task.py
Python
apache-2.0
11,193
0.000893
"""CI task management Note : this should remain independant of siconos. """ import os import shutil from subprocess import check_call, CalledProcessError import time import multiprocessing class TimeoutException(Exception): pass class RunableProcessing(multiprocessing.Process): def __init__(self, func, *args, **kwargs): self.queue = multiprocessing.Queue(maxsize=1) args = (func,) + args multiprocessing.Process.__init__(self, target=self.run_func, args=args, kwargs=kwargs) def run_func(self, func, *args, **kwargs): try: result = func(*args, **kwargs) self.queue.put((True, result)) except Exception as e: self.queue.put((False, e)) def done(self): return self.queue.full() def result(self): return self.queue.get() def timeout(seconds, forc
e_kill=True): if seconds == 0: def wrapper(function): return function return wrapper else: def wrapper(function): def inner(*args, **kwargs): now = time.time() proc = RunableProcessing(function, *args, **kwargs) proc.start() proc.join(seconds) if proc.is_alive(): if force_kill: proc.terminate() runtime = in
t(time.time() - now) raise TimeoutException( 'timed out after {0} seconds'.format(runtime)) if not proc.done(): proc.terminate() assert proc.done() success, result = proc.result() if success: # return time.time() - now, result return result else: # raise time.time() - now, result raise result return inner return wrapper # Some builds take really a long time @timeout(10000) def call(*args, **kwargs): try: return_code = check_call(*args, **kwargs) except CalledProcessError as error: print(error) return_code = 1 return return_code class CiTask(object): def __init__(self, mode='Continuous', build_configuration='Release', docker=False, distrib=None, ci_config=None, fast=True, pkgs=None, srcs=None, targets=dict(), cmake_cmd='cmake', cmake_args=[], make_cmd='make', make_args=[], directories=[]): """Create a task, see examples in tasks.py. """ self._docker = docker self._fast = fast self._distrib = distrib self._mode = mode self._build_configuration = build_configuration self._ci_config = ci_config self._pkgs = pkgs self._srcs = srcs self._targets = targets self._cmake_cmd = cmake_cmd self._make_cmd = make_cmd self._cmake_args = cmake_args self._make_args = make_args self._directories = directories def template_maker(self): assert False return '-'.join(self._pkgs) def build_dir(self, src): """Return a name depending on src, distrib and task name """ if self._distrib is None: assert self._docker is False import platform distrib = platform.platform() else: distrib = self._distrib if isinstance(self._ci_config, str): ci_config_name = self._ci_config else: ci_config_name = '-'.join(self._ci_config) return ('build-' + src.replace('.', '-') + distrib.replace(':', '-') + '-' + ci_config_name).replace('/', '-') def templates(self): return ','.join(self._pkgs) def copy(self): """ duplicate a task and possibly extend configuration of the result. """ def init(mode=self._mode, docker=self._docker, build_configuration=self._build_configuration, distrib=self._distrib, ci_config=self._ci_config, fast=self._fast, pkgs=self._pkgs, srcs=self._srcs, targets=self._targets, cmake_cmd=self._cmake_cmd, make_cmd=self._make_cmd, cmake_args=self._cmake_args, make_args=self._make_args, add_directories=None, add_pkgs=None, remove_pkgs=None, add_srcs=None, remove_srcs=None, add_targets=None, remove_targets=None): # WARNING: remember that default arg are mutable in python # http://docs.python-guide.org/en/latest/writing/gotchas/ new_targets = self._targets new_distrib = None new_srcs = srcs if type(distrib) == list: new_distrib = ':'.join(distrib) else: if distrib is not None: assert type(distrib) == str new_distrib = distrib if add_pkgs is not None: pkgs = self._pkgs + add_pkgs if remove_pkgs is not None: pkgs = list(filter(lambda p: p not in remove_pkgs, pkgs)) if add_srcs is not None: new_srcs = self._srcs + add_srcs if remove_srcs is not None: new_srcs = list(filter(lambda p: p not in remove_srcs, srcs)) if add_targets is not None: new_targets[add_targets[0]] += add_targets[1:] if remove_targets is not None: new_targets[remove_targets[0]] = list( filter(lambda p: p not in remove_targets[1:], new_targets[remove_targets[0]])) if add_directories is not None: directories = self._directories + add_directories else: directories = self._directories if type(targets) == list: new_targets[targets[0]] = targets[1:] else: assert type(targets) == dict new_targets = targets from copy import deepcopy new_task = deepcopy(self) new_task.__init__(mode=mode, build_configuration=build_configuration, docker=docker, distrib=new_distrib, ci_config=ci_config, fast=fast, pkgs=pkgs, srcs=new_srcs, targets=new_targets, cmake_cmd=cmake_cmd, cmake_args=cmake_args, make_cmd=make_cmd, make_args=make_args, directories=directories) return new_task return init def run(self, root_dir, dry_run=False): return_code = 0 for src in self._srcs: # --- Path to CMakeLists.txt --- full_src = os.path.join(root_dir, src) # --- Create build dir for src config --- bdir = self.build_dir(src) if not dry_run: if os.path.exists(bdir): shutil.rmtree(bdir, ignore_errors=True) os.makedirs(bdir) # hm python is so lovely if isinstance(self._ci_config, str): ci_config_args = self._ci_config else: ci_config_args = ','.join(self._ci_config) # --- List of arguments for cmake command --- cmake_args = self._cmake_args make_args = self._make_args if self._docker: cmake_args += ['-DMODE={0}'.format(self._mode), '-DCI_CONFIG={0}'.format(ci_config_args), '-DWITH_DOCKER=1',
ubuntu-core/snapcraft
tests/unit/plugins/v2/test_meson.py
Python
gpl-3.0
3,544
0.000847
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2020 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from testtools.matchers import Equals from testtools import TestCase from snapcraft.plugins.v2.meson import MesonPlugin class MesonPluginTest(TestCase): def test_schema(self): schema = MesonPlugin.get_schema() self.assertThat( schema, Equals( { "$schema": "http://json-schema.org/draft-04/schema#", "additionalProperties": False, "properties": {
"meson-parameters": { "default": [], "items": {"type": "string"}, "type": "array", "uniqueItems": True, }, "meson-version": {"default": "", "type": "string"}, },
"required": ["source"], "type": "object", } ), ) def test_get_build_packages(self): plugin = MesonPlugin(part_name="my-part", options=lambda: None) self.assertThat( plugin.get_build_packages(), Equals( { "ninja-build", "gcc", "python3-pip", "python3-setuptools", "python3-wheel", } ), ) def test_get_build_environment(self): plugin = MesonPlugin(part_name="my-part", options=lambda: None) self.assertThat(plugin.get_build_environment(), Equals(dict())) def test_get_build_commands(self): class Options: meson_parameters = list() meson_version = "" plugin = MesonPlugin(part_name="my-part", options=Options()) self.assertThat( plugin.get_build_commands(), Equals( [ "/usr/bin/python3 -m pip install -U meson", "[ ! -d .snapbuild ] && meson .snapbuild", "(cd .snapbuild && ninja)", '(cd .snapbuild && DESTDIR="${SNAPCRAFT_PART_INSTALL}" ninja install)', ] ), ) def test_get_build_commands_with_options(self): class Options: meson_parameters = ["--buildtype=release"] meson_version = "2.2" plugin = MesonPlugin(part_name="my-part", options=Options()) self.assertThat( plugin.get_build_commands(), Equals( [ "/usr/bin/python3 -m pip install -U meson==2.2", "[ ! -d .snapbuild ] && meson --buildtype=release .snapbuild", "(cd .snapbuild && ninja)", '(cd .snapbuild && DESTDIR="${SNAPCRAFT_PART_INSTALL}" ninja install)', ] ), )
ajhalme/kbsim
lib/Vec2d.py
Python
gpl-3.0
10,798
0.007409
import operator import math class Vec2d(object): """2d vector class, supports vector and scalar operators, and also provides a bunch of high level functions """ __slots__ = ['x', 'y'] def __init__(self, x_or_pair, y = None): if y == None: self.x = x_or_pair[0] self.y = x_or_pair[1] else: self.x = x_or_pair self.y = y def __len__(self): return 2 def __getitem__(self, key): if key == 0: return self.x elif key == 1: return self.y else: raise IndexError("Invalid subscript "+str(key)+" to Vec2d") def __setitem__(self, key, value): if key == 0: self.x = value elif key == 1: self.y = value else: raise IndexError("Invalid subscript "+str(key)+" to Vec2d") # String representaion (for debugging) def __repr__(self): return 'Vec2d(%s, %s)' % (self.x, self.y) # Comparison def __eq__(self, other): if hasattr(other, "__getitem__") and len(other) == 2: return self.x == other[0] and self.y == other[1] else: return False def __ne__(self, other): if hasattr(other, "__getitem__") and len(other) == 2: return self.x != other[0] or self.y != other[1] else: return True def __nonzero__(self): return self.x or self.y # Generic operator handlers def _o2(self, other, f): "Any two-operator operation where the left operand is a Vec2d" if isinstance(other, Vec2d): return Vec2d(f(self.x, other.x), f(self.y, other.y)) elif (hasattr(other, "__getitem__")): return Vec2d(f(self.x, other[0]), f(self.y, other[1])) else: return Vec2d(f(self.x, other), f(self.y, other)) def _r_o2(self, other, f): "Any two-operator operation where the right operand is a Vec2d" if (hasattr(other, "__getitem__")): return Vec2d(f(other[0], self.x), f(other[1], self.y)) else: return Vec2d(f(other, self.x), f(other, self.y)) def _io(self, other, f): "inplace operator" if (hasattr(other, "__getitem__")): self.x = f(self.x, other[0]) self.y = f(self.y, other[1]) else: self.x = f(self.x, other) self.y = f(self.y, other) return self # Addition def __add__(self, other): if isinstance(other, Vec2d): return Vec2d(self.x + other.x, self.y + other.y) elif hasattr(other, "__getitem__"): return Vec2d(self.x + other[0], self.y + other[1]) else: return Vec2d(self.x + other, self.y + other) __radd__ = __add__ def __iadd__(self, other): if isinstance(other, Vec2d): self.x += other.x self.y += other.y elif hasattr(other, "__getitem__"): self.x += other[0] self.y += other[1] else: self.x += other self.y += other return self # Subtraction def __sub__(self, other): if isinstance(other, Vec2d): return Vec2d(self.x - other.x, self.y - other.y) elif (hasattr(other, "__getitem__")): return Vec2d(self.x - other[0], self.y - other[1]) else: return Vec2d(self.x - other, self.y - other) def __rsub__(self, other): if isinstance(other, Vec2d): return Vec2d(other.x - self.x, other.y - self.y) if (hasattr(other, "__getitem__")): return Vec2d(other[0] - self.x, other[1] - self.y) else: return Vec2d(other - self.x, other - self.y) def __isub__(self, other): if isinstance(other, Vec2d): self.x -= other.x self.y -= other.y elif (hasattr(other, "__getitem__")): self.x -= other[0] self.y -= other[1] else: self.x -= other self.y -= other return self # Multiplication def __mul__(self, other): if isinstance(other, Vec2d): return Vec2d(self.x*other.y, self.y*other.y) if (hasattr(other, "__getitem__")): return Vec2d(self.x*other[0], self.y*other[1]) else: return Vec2d(self.x*other, self.y*other) __rmul__ = __mul__ def __imul__(self, other): if isinstance(other, Vec2d): self.x *= other.x self.y *= other.y elif (hasattr(other, "__getitem__")): self.x *= other[0] self.y *= other[1] else: self.x *= other self.y *= other return self # Division def __div__(self, other): return self._o2(other, operator.div) def __rdiv__(self, other): return self._r_o2(other, operator.div) def __idiv__(self, other): return self._io(other, operator.div) def __floordiv__(self, other): return self._o2(other, operator.floordiv) def __rfloordiv__(self, other): return self._r_o2(other, operator.floordiv) def __ifloordiv__(self, other): return self._io(other, operator
.floordiv) def __truediv__(self, other): return self._o2(other, operator.truediv) def __rtruediv__(self, other): return self._r_o2(other, operator.truediv) def __itruediv__(self, other): return self._io(other, operator.floordiv) # Modulo def __mod__(self, o
ther): return self._o2(other, operator.mod) def __rmod__(self, other): return self._r_o2(other, operator.mod) def __divmod__(self, other): return self._o2(other, operator.divmod) def __rdivmod__(self, other): return self._r_o2(other, operator.divmod) # Exponentation def __pow__(self, other): return self._o2(other, operator.pow) def __rpow__(self, other): return self._r_o2(other, operator.pow) # Bitwise operators def __lshift__(self, other): return self._o2(other, operator.lshift) def __rlshift__(self, other): return self._r_o2(other, operator.lshift) def __rshift__(self, other): return self._o2(other, operator.rshift) def __rrshift__(self, other): return self._r_o2(other, operator.rshift) def __and__(self, other): return self._o2(other, operator.and_) __rand__ = __and__ def __or__(self, other): return self._o2(other, operator.or_) __ror__ = __or__ def __xor__(self, other): return self._o2(other, operator.xor) __rxor__ = __xor__ # Unary operations def __neg__(self): return Vec2d(operator.neg(self.x), operator.neg(self.y)) def __pos__(self): return Vec2d(operator.pos(self.x), operator.pos(self.y)) def __abs__(self): return Vec2d(abs(self.x), abs(self.y)) def __invert__(self): return Vec2d(-self.x, -self.y) # vectory functions def get_length_sqrd(self): return self.x**2 + self.y**2 def get_length(self): return math.sqrt(self.x**2 + self.y**2) def __setlength(self, value): length = self.get_length() self.x *= value/length self.y *= value/length length = property(get_length, __setlength, None, "gets or sets the magnitude of the vector") def rotate(self, angle_degrees): radians = math.radians(angle_degrees) cos = math.cos(radians) sin = math.sin(radians) x = self.x*cos - self.y*sin y = self.x*sin + self.y*cos self.x = x self.y = y def rotated(self, angle_degrees): radians = mat
suutari-ai/shoop
shuup_tests/simple_cms/test_plugins.py
Python
agpl-3.0
2,133
0.000469
# This file is part of Shuup. # # Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. import pytest from shuup.simple_cms.plugins import PageLinksPlugin from shuup_tests.front.fixtures import get_jinja_context from .utils import create_page @pytest.mark.django_db @pytest.mark.parametrize("show_all_pages", [True, False]) def test_page_links_plugin_hide_expired(show_all_pages): """ Make sure plugin correctly filters out expired pages based on plugin configuration """ context = get_jinja_context() page = create_page(eternal=True, visible_in_menu=True) another_page = create_page(eternal=True, visible_in_menu=True) plugin = PageLinksPlugin({"pages": [page.pk, another_page.pk], "show_all_pages": show_all_pages}) assert page in plugin.get_context_data(context)["pages"] page.available_from = None page.available_to = None page.save() assert page in plugin.get_context_data(context)["pages"] plugin.config["hide_expired"] = True pages_in_context = plugin.get_context_data(context)["pages"] assert page not in pages_in_context assert another_page in pages_in_context @pytest.mark.django_db def test_page_links_plugin_show_all(): """ Test that show_all_pages forces plugin to return all visible pages """ context = get_jinja_context() page = create_page(eternal=True, visible_in_menu=True) plugin = PageLinksPlugin({"show_all_pages": False}) assert not plu
gin.get_context_data(context)["pages"] plugin = PageLinksPlugin({"show_all_pages": True}) assert page in plugin.get_context_data(context)["pages"] @pytest.mark.django_db def test_plugin_renders_absolute_links(): """ Test that the plugin renders only absolute links. """ context = get_jinja_context() page = create_page(eternal=True,
visible_in_menu=True) absolute_link = "/%s" % page.url plugin = PageLinksPlugin({"show_all_pages": True}) assert absolute_link in plugin.render(context)
Matt-Stammers/Python-Foundations
Simple Functions/Reversing_split_words.py
Python
gpl-3.0
450
0.011111
# What if you j
ust wanted to split a word in half and return each half on its own: # You could do the following: def reverse(st): words = list(st.split(' ')) print(words) rev_word = words[::-1] print(rev_word) return ' '.join(rev_word) # but this can be condensed to one sentence: def reverse(st): return ' '.join(st.split(' ')[::-1]) # or like this: def reverse(st): return ' '.join(reversed
(st.split(' ')))
jllivermont/hotjar-task
survey/notifier.py
Python
apache-2.0
457
0
import os import pusher CHANNEL =
"response-updates" client = pusher.Pusher( app_id=os.environ.get("PUSHER_APP_ID"), key=os.environ.get("PUSHER_KEY"), secret=os.environ.get("PUSHER_SECRET"), cluster='eu', ssl=True ) if "IS_PROD" in os.environ else None def notify(msg_type, payload): """Notifies that a SurveyResponse has been create
d or modified""" if client is not None: client.trigger(CHANNEL, msg_type, payload)
ccc-ffm/christian
modules/hq.py
Python
gpl-3.0
3,293
0.00334
import os from datetime import datetime class HQ(object): def __init__(self, fpath, kpath): self.people_in_hq = 0 self.keys_in_hq = 0 self.joined_users = [] self.hq_status = 'unknown' self.status_since = datetime.now().strftime('%Y-%m-%d %H:%M') self.is_clean = True self.joined_keys = [] self.status = None self.fpath = fpath if os.path.isfile(fpath) and os.path.getsize(fpath) > 0: with open(fpath, 'r') as userfile: self.joined_users = [line.strip() for line in userfile] self.is_clean = True if self.joined_users[0] == "clean" else False self.joined_users.pop(0) self.joined_users = list(set(self.joined_users)) self.people_in_hq = len(self.joined_users) if os.path.isfile(kpath) and os.path.getsize(kpath) > 0: with open(kpath, 'r') as statefile: keys = [line.strip() for line in statefile] keys = list(set(self.joined_users)) for user in self.joined_users: if user in keys: self.joined_keys.append(user) self.keys_in_hq = len(self.joined_keys) def hq_set(self, status): if status == "open": self.hq_open(False) elif status == "closed": self.hq_close(False) elif status == "private": self.hq_private(False) def update_time(self): self.status_since = datetime.now().strftime('%Y-%m-%d %H:%M') def hq_open(self, setStatus=True): self.hq_status = 'open' self.update_time() if setStatus: self.status.setStatus('open') def hq_close(self, setStatus=True): self.hq_status = 'closed' self.update_time() self.people_in_hq = 0 self.keys_in_hq = 0 del(self.joined_users[:]) del(self.joined_keys[:]) if setStatus: self.status.setStatus('closed') def hq_private(self, setStatus=True): self.hq_status = 'private' self.update_time() if setStatus: self.status.setStatus('private') def hq_clean(self): self.is_cle
an = True self.savestates() def hq_dirty(self): self.is_clean = False self.savestates() def hq_join(self,user): self.people_in_hq += 1 self.joined_users.append(user) self.savestates() def hq_leave(self,user): if user in self.joined_users: self.people_in_hq -=1 self.joined_u
sers.remove(user) self.savestates() def hq_keyjoin(self,user): self.keys_in_hq +=1 self.joined_keys.append(user) self.hq_join(user) def hq_keyleave(self,user): if user in self.joined_keys: self.keys_in_hq -=1 self.joined_keys.remove(user) self.hq_leave(user) def get_hq_status(self): return self.hq_status, self.status_since def get_hq_clean(self): return self.is_clean def savestates(self): userfile=open(self.fpath,'w+') userfile.write("clean\n" if self.is_clean else "dirty\n") for user in set(self.joined_users): userfile.write("%s\n" % user) userfile.close()
felinx/poweredsites
poweredsites/libs/decorators.py
Python
apache-2.0
3,016
0.002984
# -*- coding: utf-8 -*- # # Copyright(c) 2010 poweredsites.org # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import urllib from torna
do.web import HTTPError from tornado.options import options from poweredsites.libs import cache # cache decorator alias def admin(method): """Decorate with this method to restrict to site admins.""" @functools.wraps(method) def wrapper(self, *args, **kwargs): if no
t self.current_user: if self.request.method == "GET": url = self.get_login_url() if "?" not in url: url += "?" + urllib.urlencode(dict(next=self.request.full_url())) self.redirect(url) return raise HTTPError(403) elif not self.is_admin: if self.request.method == "GET": self.redirect(options.home_url) return raise HTTPError(403) else: return method(self, *args, **kwargs) return wrapper def staff(method): """Decorate with this method to restrict to site staff.""" @functools.wraps(method) def wrapper(self, *args, **kwargs): if not self.current_user: if self.request.method == "GET": url = self.get_login_url() if "?" not in url: url += "?" + urllib.urlencode(dict(next=self.request.full_url())) self.redirect(url) return raise HTTPError(403) elif not self.is_staff: if self.request.method == "GET": self.redirect(options.home_url) return raise HTTPError(403) else: return method(self, *args, **kwargs) return wrapper def authenticated(method): """Decorate methods with this to require that the user be logged in. Fix the redirect url with full_url. Tornado use uri by default. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): if not self.current_user: if self.request.method == "GET": url = self.get_login_url() if "?" not in url: url += "?" + urllib.urlencode(dict(next=self.request.full_url())) self.redirect(url) return raise HTTPError(403) return method(self, *args, **kwargs) return wrapper
mpetyx/pyrif
3rdPartyLibraries/FuXi-master/test/additionalDLPTests.py
Python
mit
8,179
0.002078
# import copy # import sys import unittest from rdflib.graph import Graph # from rdflib.namespace import NamespaceManager from rdflib import ( # RDF, # RDFS, Namespace, # Variable, # Literal, # URIRef, # BNode ) from rdflib.util import first from FuXi.Rete.RuleStore import ( # N3RuleStore, SetupRuleStore ) # from FuXi.Rete import ReteNetwork # from FuXi.Horn.PositiveConditions import PredicateExtentFactory # from FuXi.Rete.RuleStore import N3RuleStore # from FuXi.Rete.Util import generateTokenSet from FuXi.Syntax.InfixOWL import * from FuXi.DLP import ( SKOLEMIZED_CLASS_NS, # MapDLPtoNetwork, # MalformedDLPFormulaError ) EX_NS = Namespace('http://example.com/') EX = ClassNamespaceFactory(EX_NS) class AdditionalDescriptionLogicTests(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph def testGCIConDisjunction(self): conjunct = EX.Foo & (EX.Omega | EX.Alpha) (EX.Bar) += conjunct ruleStore, ruleGraph, network = SetupRuleStore( makeNetwork=True) rules = network.setupDescriptionLogicProgramming( self.ontGraph, derivedPreds=[EX_NS.Bar], addPDSemantics=False, constructNetwork=False) self.assertEqual( repr(rules), 'set([Forall ?X ( ex:Bar(?X) :- And( ex:Foo(?X) ex:Alpha(?X) ) ), Forall ?X ( ex:Bar(?X) :- And( ex:Foo(?X) ex:Omega(?X) ) )])') self.assertEqual(len(rules), 2, "There should be 2 rules") # def testMalformedUnivRestriction(self): # someProp = Property(EX_NS.someProp) # conjunct = EX.Foo & (someProp|only|EX.Omega) # conjunct.identifier = EX_NS.Bar # ruleStore,ruleGraph,network=SetupRuleStore(makeNetwork=True) # self.failUnlessRaises(MalformedDLPFormulaError, # network.setupDescriptionLogicProgramming, # self.ontGraph, # derivedPreds=[EX_NS.Bar], # addPDSemantics=False, # constructNetwork=False) def testBasePredicateEquivalence(self): (EX.Foo).equivalentClass = [EX.Bar] self.assertEqual(repr(Class(EX_NS.Foo)), "Class: ex:Foo EquivalentTo: ex:Bar") ruleStore, ruleGraph, network = SetupRuleStore( makeNetwork=True) rules = network.setupDescriptionLogicProgramming( self.ontGraph, addPDSemantics=False, constructNetwork=False) self.assertEqual( repr(rules), 'set([Forall ?X ( ex:Bar(?X) :- ex:Foo(?X) ), Forall ?X ( ex:Foo(?X) :- ex:Bar(?X) )])') self.assertEqual(len(rules), 2, "There should be 2 rules") def testExistentialInRightOfGCI(self): someProp = Property(EX_NS.someProp) existential = someProp | some | EX.Omega existential += EX.Foo self.assertEqual( repr(Class(EX_NS.Foo)), "Class: ex:Foo SubClassOf: ( ex:someProp SOME ex:Omega )") ruleStore, ruleGraph, network = SetupRuleStore( makeNetwork=True) rules = network.setupDescriptionLogicProgramming( self.ontGraph, addPDSemantics=False, constructNetwork=False) # self.assertEqual(len(rules), # 1, # "There should be 1 rule: %s"%rules) # rule=rules[0] # self.assertEqual(repr(rule.formula.body), # "ex:Foo(?X)") # self.assertEqual(len(rule.formula.head.formula), # 2) def testValueRestrictionInLeftOfGCI(self): someProp = Property(EX_NS.someProp) leftGCI = (someProp | value | EX.fish) & EX.Bar foo = EX.Foo foo += leftGCI self.assertEqual(repr(leftGCI), 'ex:Bar THAT ( ex:someProp VALUE <http://example.com/fish> )') ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) rules = network.setu
pDescriptionLogicProgramming( self.ontGraph, addPDSemantics=False, constructNetwork=False) self.assertEqual( repr(rules), "set([Forall ?X ( ex:Foo(?X) :- " + \ "And( ex:someProp(?X ex
:fish) ex:Bar(?X) ) )])") def testNestedConjunct(self): nestedConj = (EX.Foo & EX.Bar) & EX.Baz (EX.Omega) += nestedConj ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) rules = network.setupDescriptionLogicProgramming( self.ontGraph, addPDSemantics=False, constructNetwork=False) for rule in rules: if rule.formula.head.arg[-1] == EX_NS.Omega: self.assertEqual(len(rule.formula.body), 2) skolemPredicate = [term.arg[-1] for term in rule.formula.body if term.arg[-1].find(SKOLEMIZED_CLASS_NS) != -1] self.assertEqual(len(skolemPredicate), 1, "Couldn't find skolem unary predicate!") else: self.assertEqual(len(rule.formula.body), 2) skolemPredicate = rule.formula.head.arg[-1] self.failUnless( skolemPredicate.find(SKOLEMIZED_CLASS_NS) != -1, "Head should be a unary skolem predicate") skolemPredicate = skolemPredicate[0] def testOtherForm(self): contains = Property(EX_NS.contains) locatedIn = Property(EX_NS.locatedIn) topConjunct = ( EX.Cath & (contains | some | (EX.MajorStenosis & (locatedIn | value | EX_NS.LAD))) & (contains | some | (EX.MajorStenosis & (locatedIn | value | EX_NS.RCA)))) (EX.NumDisV2D) += topConjunct from FuXi.DLP.DLNormalization import NormalFormReduction NormalFormReduction(self.ontGraph) ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) rules = network.setupDescriptionLogicProgramming( self.ontGraph, derivedPreds=[EX_NS.NumDisV2D], addPDSemantics=False, constructNetwork=False) from FuXi.Rete.Magic import PrettyPrintRule for rule in rules: PrettyPrintRule(rule) def testOtherForm2(self): hasCoronaryBypassConduit = Property(EX_NS.hasCoronaryBypassConduit) ITALeft = EX.ITALeft ITALeft += ( hasCoronaryBypassConduit | some | EnumeratedClass(members=[ EX_NS.CoronaryBypassConduit_internal_thoracic_artery_left_insitu, EX_NS.CoronaryBypassConduit_internal_thoracic_artery_left_free])) from FuXi.DLP.DLNormalization import NormalFormReduction self.assertEquals( repr(Class(first(ITALeft.subSumpteeIds()))), "Some Class SubClassOf: Class: ex:ITALeft ") NormalFormReduction(self.ontGraph) self.assertEquals( repr(Class(first(ITALeft.subSumpteeIds()))), 'Some Class SubClassOf: Class: ex:ITALeft . EquivalentTo: ( ( ex:hasCoronaryBypassConduit VALUE <http://example.com/CoronaryBypassConduit_internal_thoracic_artery_left_insitu> ) OR ( ex:hasCoronaryBypassConduit VALUE <http://example.com/CoronaryBypassConduit_internal_thoracic_artery_left_free> ) )') if __name__ == '__main__': unittest.main()
ooovector/qtlab_replacement
plotting_scripts/gain_compression_upd.py
Python
gpl-3.0
892
0.045964
import tables from numpy import * from matplotlib.pyplot import * from matplotlib.widgets import Button #Open HDF5 data file db = lambda x: 20*log10(x) ax1 = sub
plot(2,2,1) ax2 = subplot(2,2,2) ax3 = subplot(2,2,3) def update(event): f = tables.open_file('data.h5', mode='r') data_2d = array(f.root.data) c
_coord = array(f.root.column_coordinate) r_coord = array(f.root.row_coordinate) ind = int(len(c_coord)/2)+2 ref = array(f.root.ref) data_2d = data_2d/ref ax1.clear() m1=ax1.pcolormesh( c_coord, r_coord, db(abs(data_2d)) ) ax2.clear() m2 = ax2.plot( c_coord, db(abs(data_2d[0])) ) ax2.grid() ax3.clear() m3 = ax3.plot( r_coord, db(abs(data_2d[::,ind] ))) ax3.grid() gcf().canvas.draw() f.close() return m1, m2, m3 m1,m2,m3 = update(0) colorbar(m1, ax=ax1) ax_upd = axes([0.81, 0.05, 0.1, 0.075]) b_upd = Button(ax_upd, 'Update') b_upd.on_clicked(update) show()
arborh/tensorflow
tensorflow/python/keras/mixed_precision/experimental/layer_correctness_test.py
Python
apache-2.0
5,874
0.003745
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests various Layer subclasses have correct outputs with mixed precision.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.distribute import mirrored_strategy from tensorflow.python.eager import context from tensorflow.python.framework import test_util from tensorflow.python.keras import keras_parameterized from tensorflow.python.keras import layers from tensorflow.python.keras import models from tensorflow.python.keras import testing_utils from tensorflow.python.keras.layers import recurrent from tensorflow.python.keras.layers import recurrent_v2 from tensorflow.python.keras.mixed_precision.experimental import policy from tensorflow.python.platform import test def create_mirrored_strategy(): if context.num_gpus() >= 1: return mirrored_strategy.MirroredStrategy(['cpu:0', 'gpu:0']) else: return mirrored_strategy.MirroredStrategy(['cpu:0']) @test_util.run_all_in_graph_and_eager_modes class LayerCorrectnessTest(keras_parameterized.TestCase): def _create_model_from_layer(self, layer, input_shape): x = layers.Input(batch_input_shape=input_shape) y = layer(x) model = models.Model(x, y) model.compile('sgd', 'mse') return model def _test_layer(self, f32_layer, input_shape): """Tests a layer by comparing the float32 and mixed precision weights. A float32 layer, a mixed precision layer, a distributed float32 layer, and a distributed mixed precision layer are run. The four layers are identical other than their dtypes and distribution strategies. The weights after running fit() are asserted to be close. Running the distributed float32 layer does not test mixed precision but we still test it for debugging purposes. If the distributed mixed precision layer fails, it's easier to debug if you know whether the issue also occurs in the distributed float32 layer. A
rgs: f32_layer: A float32 layer. The other three layers will automatically be created
from this input_shape: The shape of the inputs to the layer, including the batch dimension. """ strategy = create_mirrored_strategy() # Create the layers assert f32_layer.dtype == f32_layer._compute_dtype == 'float32' config = f32_layer.get_config() distributed_f32_layer = f32_layer.__class__.from_config(config) config['dtype'] = policy.Policy('mixed_float16') mp_layer = f32_layer.__class__.from_config(config) distributed_mp_layer = f32_layer.__class__.from_config(config) # Compute per_replica_input_shape for the distributed models global_batch_size = input_shape[0] assert global_batch_size % strategy.num_replicas_in_sync == 0 per_replica_batch_size = ( global_batch_size // strategy.num_replicas_in_sync) per_replica_input_shape = list(input_shape) per_replica_input_shape[0] = per_replica_batch_size # Create the models f32_model = self._create_model_from_layer(f32_layer, input_shape) mp_model = self._create_model_from_layer(mp_layer, input_shape) with strategy.scope(): distributed_f32_model = self._create_model_from_layer( distributed_f32_layer, per_replica_input_shape) distributed_mp_model = self._create_model_from_layer( distributed_mp_layer, per_replica_input_shape) # Set all model weights to the same values f32_weights = f32_model.get_weights() for model in mp_model, distributed_f32_model, distributed_mp_model: model.set_weights(f32_weights) # Run fit() on models x = np.random.normal(size=input_shape) y = np.random.normal(size=input_shape) for model in (f32_model, mp_model, distributed_f32_model, distributed_mp_model): model.fit(x, y, batch_size=global_batch_size) # Assert all models have close weights f32_weights = f32_model.get_weights() self.assertAllClose( mp_model.get_weights(), f32_weights, rtol=1e-2, atol=1e-4) self.assertAllClose( distributed_f32_model.get_weights(), f32_weights, rtol=1e-2, atol=1e-4) self.assertAllClose( distributed_mp_model.get_weights(), f32_weights, rtol=1e-2, atol=1e-4) # Note: There is no need to test every layer subclass here, as otherwise this # test would take too long. Only layers which do something special or are # unusual in regards to mixed precision need to be tested. # We test RNNs as some RNNs use the implementation_selector grappler pass, # which can cause issues with AutoCastVariables. @testing_utils.enable_v2_dtype_behavior def test_simple_rnn(self): self._test_layer(recurrent.SimpleRNN(units=4, return_sequences=True), input_shape=(4, 4, 4)) @testing_utils.enable_v2_dtype_behavior def test_gru(self): self._test_layer(recurrent_v2.GRU(units=4, return_sequences=True), input_shape=(4, 4, 4)) @testing_utils.enable_v2_dtype_behavior def test_lstm(self): self._test_layer(recurrent_v2.LSTM(units=4, return_sequences=True), input_shape=(4, 4, 4)) if __name__ == '__main__': test.main()
foxmask/orotangi
setup.py
Python
bsd-3-clause
1,297
0
from setuptools import setup, find_packages from orotangi import __version__ as version install_requires = [ 'Django==1.11.18', 'djangorestframework==3.6.2', 'django-cors-headers==2.0.2', 'django-filter==1.0.2', 'python-dateutil==2.6.0' ] setup( name='orotangi', version=version, description='Your Thoughts, Everywhere', author='FoxMaSk', maintainer='FoxMaSk', author_email='[email protected]', maintainer_email='[email protected]', url='https://github.com/foxmask/orotangi', download_url="https://github.com/f
oxmask/orotangi/" "archive/orotangi-" + version + ".zip", packages
=find_packages(exclude=['orotangi/local_settings']), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.11', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Topic :: Internet', 'Topic :: Communications', 'Topic :: Database', ], install_requires=install_requires, include_package_data=True, )
roadmapper/ansible
test/units/modules/cloud/amazon/test_aws_acm.py
Python
gpl-3.0
4,581
0.000873
# (c) 2019 Telstra Corporation Limited # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.modules.cloud.amazon.aws_acm import pem_chain_split, chain_compare from ansible.module_utils._text import to_bytes, to_text from pprint import pprint def test_chain_compare(): # The functions we're testing take module as an argument # Just so they can call module.fail_json # Let's just use None for the unit tests, # Because they shouldn't fail # And if they do, fail_json is not applicable module = None fixture_suffix = 'test/units/modules/cloud/amazon/fixtures/certs' # Te
st chain split fun
ction on super simple (invalid) certs expected = ['aaa', 'bbb', 'ccc'] for fname in ['simple-chain-a.cert', 'simple-chain-b.cert']: path = fixture_suffix + '/' + fname with open(path, 'r') as f: pem = to_text(f.read()) actual = pem_chain_split(module, pem) actual = [a.strip() for a in actual] if actual != expected: print("Expected:") pprint(expected) print("Actual:") pprint(actual) raise AssertionError("Failed to properly split %s" % fname) # Now test real chains # chains with same same_as should be considered equal test_chains = [ { # Original Cert chain 'path': fixture_suffix + '/chain-1.0.cert', 'same_as': 1, 'length': 3 }, { # Same as 1.0, but longer PEM lines 'path': fixture_suffix + '/chain-1.1.cert', 'same_as': 1, 'length': 3 }, { # Same as 1.0, but without the stuff before each -------- 'path': fixture_suffix + '/chain-1.2.cert', 'same_as': 1, 'length': 3 }, { # Same as 1.0, but in a different order, so should be considered different 'path': fixture_suffix + '/chain-1.3.cert', 'same_as': 2, 'length': 3 }, { # Same as 1.0, but with last link missing 'path': fixture_suffix + '/chain-1.4.cert', 'same_as': 3, 'length': 2 }, { # Completely different cert chain to all the others 'path': fixture_suffix + '/chain-4.cert', 'same_as': 4, 'length': 3 }, { # Single cert 'path': fixture_suffix + '/a.pem', 'same_as': 5, 'length': 1 }, { # a different, single cert 'path': fixture_suffix + '/b.pem', 'same_as': 6, 'length': 1 } ] for chain in test_chains: with open(chain['path'], 'r') as f: chain['pem_text'] = to_text(f.read()) # Test to make sure our regex isn't too greedy chain['split'] = pem_chain_split(module, chain['pem_text']) if len(chain['split']) != chain['length']: print("Cert before split") print(chain['pem_text']) print("Cert after split") pprint(chain['split']) print("path: %s" % chain['path']) print("Expected chain length: %d" % chain['length']) print("Actual chain length: %d" % len(chain['split'])) raise AssertionError("Chain %s was not split properly" % chain['path']) for chain_a in test_chains: for chain_b in test_chains: expected = (chain_a['same_as'] == chain_b['same_as']) # Now test the comparison function actual = chain_compare(module, chain_a['pem_text'], chain_b['pem_text']) if expected != actual: print("Error, unexpected comparison result between \n%s\nand\n%s" % (chain_a['path'], chain_b['path'])) print("Expected %s got %s" % (str(expected), str(actual))) assert(expected == actual)
uvbs/steam-limiter
updateapp/main.py
Python
bsd-2-clause
18,251
0.011506
#!/usr/bin/env python # # Copyright (C) 2011 Nigel Bree # All Rights Reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. #----------------------------------------------------------------------------- # This application mainly provides a simple way for installations of the Steam # Limiter application from http://steam-limiter.googlecode.com to determine # whether an updated version of the application has been made available. That's # just a simple matter of retrieving some data from a URL to retrieve the # current version number and a download URL for the latest installer. # # Given the way that web access to the source repository in Google Code works # in principle that could be used instead, but there are a couple of advantages # to having a service like this instead, in addition to the fact that as with # the NSIS installer for the limiter client it's a handy example of how to do # such things. # # For instance, an additional thing I could add to this is to have an installer # extension for the limiter client app which can retrieve the client's real IP # and thus suggest to it the ideal server (or server list) to set as the filter # default, instead of assuming TelstraClear - TC is the default ISP at present # since that's my ISP, but the filter app is usable by other New Zealand ISPs # and it would be nice to be able to make that seamless. # import jinja2 import os import webapp2 import logging from google.appengine.ext.webapp import template from google.appengine.ext import db from google.appengine.api import users, xmpp, mail # These will most likely eventually become some datastore items in future, but # making them static will do just to start the update support off. self_base = 'http://steam-limiter.appspot.com' if 'Development' in os.environ ['SERVER_SOFTWARE']: self_base = 'http://localhost:8080' old_defaults = { 'latest': '0.7.1.0', 'download': 'http://steam-limiter.googlecode.com/files/steamlimit-0.7.1.0.exe' } new_defaults = { 'latest': '0.7.1.0', 'download': self_base + '/files/steamlimit-0.7.1.0.exe', 'proxyfilter': 'content*.steampowered.com=%(proxy)s;*.cs.steampowered.com=%(proxy)s', 'proxyallow': '//%(proxy)s=*;//content*.steampowered.com=%(proxy)s;//cs.steampowered.com=%(proxy)s' } import app_common import old_isps # Data on unmetering for ISPs, for the expanded rule types available in the # v2 of the configuration API, for steam-limiter 0.7.1.0 and later where I # can now handle rewriting "CS" type servers to normal ones, and where I can # thus afford to use a simpler rule format for ISPs running proxy servers to # do unmetering (which is most of them). # # For this version, I'm also completely dropping all the old Port 27030 rules new_isps = { - 1: { 'name': 'Unknown', 'server': '0.0.0.0', 'filter': '# No specific content server for your ISP' }, # Note that most NZ Universities appear to have peering with and/or student # internet provided via Snap! - most I've lumped in as part of Snap! but # Waikato is a special case having an old netblock with a full class B and # it is being set as its own case, just using the same rules as Snap! for # now. I'll call it Lightstream (which is a semi-commercial spinoff used # for student internet) since that's probably most useful. # Note that aside from most NZ ISPs not generally understanding the concept # of giving things like servers DNS names, pretty much all of these are # filtered so I can't detect whether they support port 80 or not, and none # of the ISPs document this properly. # TelstraClear have made wlgwpstmcon01.telstraclear.co.nz go away, but the # new name is steam.cdn.vodafone.co.nz - Valve have actually just # started to advertise this server as steam.cdn.vodafone.co.nz, so I'm also # allowing it that way but it appears to not be blocking requests via the # content*.steampowered.com name and so it's all good! It appears that # despite no official announcement, they've actually done something right. 0: { 'name': 'TelstraClear New Zealand', 'proxy': 'steam.cdn.vodafone.co.nz' }, 10: { 'name': 'Telstra BigPond Australia', 'proxy': 'steam.content.bigpondgames.com' }, 11: { 'name': 'Internode Australia', 'proxy': 'steam.cdn.on.net' }, 12: { 'name': 'iiNet Australia', 'proxy': 'steam.cdn.on.net' }, # iPrimus evidently support a small list of Steam servers hosted in some # regional peering exchanges, and that's it (no proxy servers). 14: { 'name': 'iPrimus Australia', 'filter': 'content*.steampowered.com=steam.ix.asn.au,steam01.qld.ix.asn.au,steam01.vic.ix.asn.au;' + '*.cs.steampowered.com=valve217.cs.steampowered.com', 'allow': '//*.ix.asn.au=*;//*.steampowered.com=*' }, # Simi
larly to iPrimus, these small regional ISPs don't document what they # do and so
me of this data may be out of data due to acquisitions, since the # iiNet group has acquired a lot of regional ISPs. 15: { 'name': 'Westnet Internet Services (Perth, WA)', 'filter': 'content*.steampowered.com=valve217.cs.steampowered.com,files-oc-syd.games.on.net', 'allow': '//steam.cdn.on.net=*' }, 16: { 'name': 'Adam Internet (Adelaide, SA)', 'filter': '*:27030=valve217.cs.steampowered.com,files-oc-syd.games.on.net;' + 'content*.steampowered.com=valve217.cs.steampowered.com,files-oc-syd.games.on.net', 'allow': '//steam.cdn.on.net=*' }, 17: { 'name': 'EAccess Broadband, Australia', 'filter': '# No known unmetered Steam server' }, # Slots 18-29 are reserved for future Australian ISPs or tertiary institutions. # Because it seems customers with dual ISP accounts is common in South # Africa (along with a large fraction of the retail ISPs being pure # resellers), detection in ZA needs extra work from the client side to # be sure of what connectivity is present, so there are rule extensions # to detect dual-ISP situations and prefer the WebAfrica unmetered server # if there's connectivity to the WebAfrica customer side. 30: { 'name': 'Internet Solutions (Johannesburg, South Africa)', 'server': '196.38.180.3', 'filter': '*:27030=steam.isgaming.co.za', 'allow': '//steam.isgaming.co.za=*', 'test': { 'report': True, 'steam.wa.co.za icmp *.wa.co.za': { 0: { 'ispname': 'WebAfrica/IS dual ISP', 'filterrule': '*:27030=steam.wa.co.za,steam2.wa.co.za;content*.steampowered.com=steam.wa.co.za,steam2.wa.co.za', 'allow': '//*.wa.co.za=*;//content*.steampowered.com=*' } } } }, 31: { 'name': 'webafrica (Cape Town, South Africa)', 'server': '41.185.24.21',
stianvi/ansible-modules-core
cloud/openstack/_quantum_router.py
Python
gpl-3.0
7,042
0.007242
#!/usr/bin/python #coding: utf-8 -*- # (c) 2013, Benno Joy <[email protected]> # # This module is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This software is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this software. If not, see <http://www.gnu.org/licenses/>. try: try: from neutronclient.neutron import client except ImportError: from quantumclient.quantum import client from keystoneclient.v2_0 import client as ksclient HAVE_DEPS = True except ImportError: HAVE_DEPS = False DOCUMENTATION = ''' --- module: quantum_router version_added: "1.2" author: "Benno Joy (@bennojoy)" deprecated: Deprecated in 2.0. Use os_router instead short_description: Create or Remove router from openstack description: - Create or Delete routers from OpenStack options: login_username: description: - login username to authenticate to keystone required: true default: admin login_password: description: - Password of login user required: true default: 'yes' login_tenant_name: description: - The tenant name of the login user required: true default: 'yes' auth_url: description: - The keystone url for authentication required: false default: 'http://127.0.0.1:35357/v2.0/' region_name: description: - Name of the region required: false default: None state: description: - Indicate desired state of the resource choices: ['present', 'absent'] default: present name: description: - Name to be give to the router required: true default: None tenant_name: description: - Name of the tenant for which the router has to be created, if none router would be created for the login tenant. required: false default: None admin_state_up: description: - desired admin state of the created router . required: false default: true requirements: - "python >= 2.6" - "python-neutronclient or python-quantumclient" - "python-keystoneclient" ''' EXAMPLES = ''' # Creates a router for tenant admin - quantum_router: state=present login_username=admin login_password=admin login_tenant_name=admin name=router1" ''' _os_keystone = None _os_tenant_id = None def _get_ksclient(module, kwargs): try: kclient = ksclient.Client(username=kwargs.get('login_username'), password=kwargs.get('login_password'), tenant_name=kwargs.get('login_tenant_name'), auth_url=kwargs.get('auth_url')) except Exception as e: module.fail_json(msg = "Error authenticating to the keystone: %s " % e.message) global _os_keystone _os_keystone = kclient return kclient def _get_endpoint(module, ksclient): try: endpoint = ksclient.service_catalog.url_for(service_type='network', endpoint_type='publicURL') except Exception as e: module.fail_json(msg = "Error getting network endpoint: %s" % e.message) return endpoint def _get_neutron_client(module, kwargs): _ksclient = _get_ksclient(module, kwargs) token = _ksclient.auth_token endpoint = _get_endpoint(module, _ksclient) kwargs = { 'token': token, 'endpoint_url': endpoint } try: neutron = client.Client('2.0', **kwargs) except Exception as e: module.fail_json(msg = "Error in connecting to neutron: %s " % e.message) return neutron def _set_tenant_id(module): global _os_tenant_id if not module.params['tenant_name']: _os_tenant_id = _os_keystone.tenant_id else: tenant_name = module.params['tenant_name'] for tenant in _os_keystone.tenants.list(): if tenant.name == tenant_name: _os_tenant_id = tenant.id break if not _os_tenant_id: module.fail_json(msg = "The tenant id cannot be found, please check the parameters") def _get_router_id(module, neutron): kwargs = { 'name': module.params['name'], 'tenant_id': _os_tenant_id, }
try: routers = neutron.list_routers(**kwargs) except Exception as e: module.fail_json(msg = "Error in getting the router list: %s " % e.message) if not routers['routers']: return None return routers['routers'][0]['id'] def _create_router(module, neutron): router = { 'name': module.params['name'], 'tenant_id': _os_tenant_id, 'admin_state_up': module.params['admin_state_up'], } try: new_router = neutron.
create_router(dict(router=router)) except Exception as e: module.fail_json( msg = "Error in creating router: %s" % e.message) return new_router['router']['id'] def _delete_router(module, neutron, router_id): try: neutron.delete_router(router_id) except: module.fail_json("Error in deleting the router") return True def main(): argument_spec = openstack_argument_spec() argument_spec.update(dict( name = dict(required=True), tenant_name = dict(default=None), state = dict(default='present', choices=['absent', 'present']), admin_state_up = dict(type='bool', default=True), )) module = AnsibleModule(argument_spec=argument_spec) if not HAVE_DEPS: module.fail_json(msg='python-keystoneclient and either python-neutronclient or python-quantumclient are required') neutron = _get_neutron_client(module, module.params) _set_tenant_id(module) if module.params['state'] == 'present': router_id = _get_router_id(module, neutron) if not router_id: router_id = _create_router(module, neutron) module.exit_json(changed=True, result="Created", id=router_id) else: module.exit_json(changed=False, result="success" , id=router_id) else: router_id = _get_router_id(module, neutron) if not router_id: module.exit_json(changed=False, result="success") else: _delete_router(module, neutron, router_id) module.exit_json(changed=True, result="deleted") # this is magic, see lib/ansible/module.params['common.py from ansible.module_utils.basic import * from ansible.module_utils.openstack import * if __name__ == '__main__': main()