repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
duynguyen/incubator-openwhisk
|
tests/dat/actions/malformed.py
|
Python
|
apache-2.0
| 124
| 0
|
"""Invalid Python comment test."""
// invalid p
|
ython comment # noqa -- tell linters to ignore the intentional s
|
yntax error
|
ncoevoet/ChanReg
|
test.py
|
Python
|
mit
| 1,737
| 0.000576
|
###
# Copyright (c) 2013, Nicolas Coevoet
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT
|
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot
|
.test import *
class ChanRegTestCase(PluginTestCase):
plugins = ('ChanReg',)
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
ging/horizon
|
openstack_dashboard/dashboards/idm/home/views.py
|
Python
|
apache-2.0
| 3,184
| 0.000942
|
# Copyright (C) 2014 Universidad Politecnica de Madrid
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.shortcuts import redirect
from horizon import exceptions
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard import fiware_api
from openstack_dashboard.dashboards.idm import utils as idm_utils
from openstack_dashboard.dashboards.idm.home import tables as home_tables
LOG = logging.getLogger('idm_logger')
class IndexView(tables.MultiTableView):
table_classes = (home_tables.OrganizationsTable,
home_tables.ApplicationsTable)
template_name = 'idm/home/index.html'
def dispatch(self, request, *args, **kwargs):
if request.organization.id != request.user.default_project_id:
return redirect("/idm/home_orgs/")
|
return super(IndexView, self).dispatch(request, *args, **kwargs)
def has_more_data(self, table):
return False
def get_organizations_data(self):
organizations = []
# try:
# organizations = fiware_api.keystone.project_list(
# self.request,
|
# user=self.request.user.id)
# switchable_organizations = [org.id for org
# in self.request.organizations]
# organizations = sorted(organizations, key=lambda x: x.name.lower())
# for org in organizations:
# if org.id in switchable_organizations:
# setattr(org, 'switchable', True)
# except Exception:
# exceptions.handle(self.request,
# ("Unable to retrieve organization list."))
return idm_utils.filter_default(organizations)
def get_applications_data(self):
applications = []
# try:
# # TODO(garcianavalon) extract to fiware_api
# all_apps = fiware_api.keystone.application_list(self.request)
# apps_with_roles = [a.application_id for a
# in fiware_api.keystone.user_role_assignments(
# self.request,
# user=self.request.user.id,
# organization=self.request.organization)]
# applications = [app for app in all_apps
# if app.id in apps_with_roles]
# applications = sorted(applications, key=lambda x: x.name.lower())
# except Exception:
# exceptions.handle(self.request,
# ("Unable to retrieve application list."))
return idm_utils.filter_default(applications)
|
alexadusei/ProjectEuler
|
q008.py
|
Python
|
mit
| 1,143
| 0.006999
|
#-------------------------------------------------------------------------------
# Name: Largest Product in a Series
# Purpose: The four adjacent digits in the 1000-digit number (provided in
# textfile 'q008.txt') that have the greatest product are
# 9 x 9 x 8 x 9 = 5832. Find the thirteen adjacent digits in the
# 1000-digit number that have the greatest product. What is the
# value of this product?
# Answer: 23514624000
# Author: Alex Adusei
#---------------------------------------------------------------------------
|
----
# H
|
elper function to read large number from textfile
def readNums():
file = open("q008.txt")
nums = []
for line in file:
line = line.strip()
for i in range (len(line)):
nums += [int(line[i])]
return nums
numbers = readNums()
currentSum = 0
DIGITS = 13
for i in range(len(numbers)):
digitSum = 1
if i < (len(numbers) - DIGITS):
for k in range(DIGITS):
digitSum *= numbers[i+k]
if digitSum > currentSum:
currentSum = digitSum
print currentSum
|
TornikeNatsvlishvili/skivri.ge
|
backend/backend/extensions.py
|
Python
|
mit
| 94
| 0.010638
|
from flask_pymongo import PyMongo
from flask_cors import CORS
|
mongo = PyMongo()
cors = CORS
|
()
|
crunchmail/munch-core
|
src/munch/core/celery.py
|
Python
|
agpl-3.0
| 4,888
| 0
|
import os
import sys
import logging
from functools import wraps
import celery
from celery.signals import celeryd_after_setup
from kombu import Queue
from django.conf import settings
from munch.core.utils import get_worker_types
log = logging.getLogger('munch')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'munch.settings')
def catch_exception(f):
@wraps(f)
def wrapper(*args, **kwargs):
try:
f(*args, **kwargs)
except Exception as err:
sys.stderr.write(str(err))
raise
return wrapper
class Celery(celery.Celery):
def on_configure(self):
if hasattr(settings, 'RAVEN_CONFIG'):
from raven import Client
from raven.contrib.celery import register_signal
from raven.contrib.celery import register_logger_signal
client = Client(settings.RAVEN_CONFIG.get('dsn'))
register_logger_signal(client)
register_signal(client)
class CeleryRouteMap(object):
def __init__(self, app):
self.app = app
self.exchange = settings.CELERY_DEFAULT_EXCHANGE
self.exchange_type = settings.CELERY_DEFAULT_EXCHANGE_TYPE
self.queues = {
'default': {
'name': settings.CELERY_DEFAULT_QUEUE,
'routing_key': settings.CELERY_DEFAULT_ROUTING_KEY
}
}
self.routes = {}
def add_queue(self, worker_type, queue):
self.app.amqp.queues.add(
Queue(
queue, routing_key='{}.#'.format(queue),
queue_arguments={'x-max-priority': 100}
)
)
self.queues.update({
worker_type: {'name': queue, 'routing_key': queue}})
log.debug('Added queue {} for {} workers'.format(
queue, worker_type.upper()))
def register_route(self, task, worker_type, munch_app):
if worker_type not in self.queues:
raise ValueError(
'Can not register celery route. '
'No queue defined for worker_type {}'.format(
worker_type.upper()))
self.routes.update({t
|
ask: {'worker': worker_type, 'key': munch_app}})
log.debug(
'Registered route for {} on {} workers'.format(
task, worker_type.upper()))
def import_tasks_map(self, tasks_map, munch_app):
for worker, tasks in tasks_map.items():
for task in tasks:
self.register_route(task, worker, munch_app)
def lookup_route(se
|
lf, task):
if task in self.routes:
worker = self.routes.get(task)['worker']
key = self.routes.get(task)['key']
queue = self.queues.get(worker)
return {
'queue': queue['name'],
'exchange': self.exchange,
'exchange_type': self.exchange_type,
'routing_key': '{}.{}'.format(queue['routing_key'], key)
}
return None
def register_to_queue(self, queue):
self.app.amqp.queues.select_add(
queue, routing_key='{}.#'.format(queue),
queue_arguments={'x-max-priority': 100})
def register_as_worker(self, worker_type):
if worker_type not in self.queues:
raise ValueError(
'Can not register as worker {}. '
'No queue defined for this worker_type'.format(
worker_type.upper()))
self.register_to_queue(self.queues[worker_type]['name'])
def get_queue_for(self, worker_type):
return self.queues.get(worker_type, 'default')['name']
def get_workers_map(self):
workers_map = {}
for k, v in self.routes.items():
workers_map.setdefault(v['worker'], []).append(k)
return workers_map
class CeleryRouter(object):
def route_for_task(self, task, args=None, kwargs=None):
return munch_tasks_router.lookup_route(task)
# Celery App initialization
app = Celery('munch', broker=settings.BROKER_URL)
app.config_from_object('django.conf:settings')
munch_tasks_router = CeleryRouteMap(app)
# Queues, Tasks and worker registration methods for munch.core
def add_queues():
munch_tasks_router.add_queue('core', 'munch.core')
munch_tasks_router.add_queue('status', 'munch.status')
munch_tasks_router.add_queue('gc', 'munch.gc')
def register_tasks():
tasks_map = {
'gc': ['munch.core.mail.tasks.purge_raw_mail']
}
munch_tasks_router.import_tasks_map(tasks_map, 'munch')
@celeryd_after_setup.connect
@catch_exception
def configure_worker(instance, **kwargs):
if any([t in get_worker_types() for t in ['gc', 'all']]):
from .mail.tasks import purge_raw_mail # noqa
sys.stdout.write(
'[core-app] Registering worker as GARBAGE COLLECTOR...')
munch_tasks_router.register_as_worker('gc')
|
dwang159/iris-api
|
src/iris/role_lookup/mailing_list.py
|
Python
|
bsd-2-clause
| 2,203
| 0.002724
|
# Copyright (c) LinkedIn Corporation. All rights reserved. Licensed under the BSD-2 Clause license.
# See LICENSE in the project root for license information.
from iris import db
from iris.role_lookup import IrisRoleLookupException
import logging
logger = logging.getLogger(__name__)
class mailing_list(object):
def __init__(self, config):
self.max_list_names = config.get('ldap_lists', {}).get('max_unrolled_users', 0)
def get(self, role, target):
if role == 'mailing-list':
return self.unroll_mailing_list(target)
else:
return None
def unroll_mailing_list(self, list_name):
connection = db.engine.raw_connection()
cursor = connection.cursor()
cursor.execute('''
SELECT `mailing_list`.`target_id`,
`mailing_list`.`count`
FROM `mailing_list`
JOIN `target` on `target`.`id` = `mailing_list`.`target_id`
WHERE `target`.`name` = %s
''', list_name)
list_info = cursor.fetchone()
if not list_info:
|
logger.warn('Invalid mailing list %s', list_name)
cursor.close()
connection.close()
return None
list_id, list_count = list_info
if self.max_list_names > 0 and list_count >= self.max_list_names:
logger.warn('Not returning any results for list group %s as it contains too many members (%s > %s)',
list_name, list_count, self.max_list_nam
|
es)
cursor.close()
connection.close()
raise IrisRoleLookupException('List %s contains too many members to safely expand (%s >= %s)' % (list_name, list_count, self.max_list_names))
cursor.execute('''SELECT `target`.`name`
FROM `mailing_list_membership`
JOIN `target` on `target`.`id` = `mailing_list_membership`.`user_id`
WHERE `mailing_list_membership`.`list_id` = %s''', [list_id])
names = [row[0] for row in cursor]
cursor.close()
connection.close()
logger.info('Unfurled %s people from list %s', len(names), list_name)
return names
|
teampopong/crawlers
|
twitter/setup.py
|
Python
|
agpl-3.0
| 1,997
| 0.002504
|
#! /usr/bin/python2.7
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import requests
from requests_oauthlib import OAuth1
from urlparse import parse_qs
REQUEST_TOKEN_URL = "https://api.twitter.com/oauth/request_token"
AUTHORIZE_URL = "https://api.twitter.com/oauth/authorize?oauth_token="
ACCESS_TOKEN_URL = "https://api.twitter.com/oauth/access_token"
def setup_oauth1(keys):
"""Authorize your app via identifier."""
# Request token
oauth = OAuth1(keys['consumer_key'], client_secret=keys['consumer_secret'])
r = requests.post(url=REQUEST_TOKEN_URL, auth=oauth)
credentials = parse_qs(r.content)
resource_owner_key = credentials.get('oauth_token')[0]
resource_owner_secret = credentials.get('oauth_token_secret')[0]
# Authorize
authorize_url = AUTHORIZE_URL + resource_owner_key
print 'Please go here and authorize: ' + authorize_url
verifier = raw_input('Please input the verifier: ')
oauth = OAuth1(keys['consumer_key'],
client_secret=keys['consumer_secret'],
resource_owner_key=resource_owner_key,
resource_owner_secret=resource_owner_secret,
verifier=verifier)
# Finally, obtain the Access Token
r = requ
|
ests.post(url=ACCESS_TOKEN_URL, auth=oauth)
credentials = parse_qs(r.content)
token = credentials.get('oauth_token')[0]
secret = credentials.get('oauth_token_secret')[0]
return token, secret
def get_oauth1(keys):
if not keys['oauth_token']:
keys['oauth_token'], keys['oauth_token_secret']\
= setup_oauth1(keys)
pri
|
nt '\nInput the keys below to twitter/settings.py'
import pprint; pprint.pprint(keys)
import sys; sys.exit()
oauth = OAuth1(keys['consumer_key'],
client_secret=keys['consumer_secret'],
resource_owner_key=keys['oauth_token'],
resource_owner_secret=keys['oauth_token_secret'])
return oauth
|
keegancsmith/MCP
|
setup.py
|
Python
|
bsd-2-clause
| 772
| 0
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='MCP',
version='0.2',
author='Keegan Carruthers-Smith',
author_email='[email protected]',
url='https://github.com/keegancsmith/MCP',
license='BSD',
py_modules=['mcp'],
description='A program to orchestrate Entellect Ch
|
allenge bot matches.',
long_description=file('READM
|
E.rst').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Utilities',
],
entry_points={'console_scripts': ['mcp = mcp:main']},
)
|
uwosh/Campus_Directory_web_service
|
getCurrentOrNextSemesterCX.py
|
Python
|
gpl-2.0
| 1,292
| 0.006192
|
# returns the current PeopleSoft semester code, as of today
# if today is between semesters, returns the next semester code
import cx_Oracle
def getCurrentOrNextSemesterCX (self):
file = open('/opt/Plone-2.5.5/zeocluster/client1/Extensions/Oracle_Database_Connection_NGUYEN_PRD.txt', 'r')
for line in file.readlines():
if line <> "" and not line.startswith('#'):
connString = line
file.close()
connection = cx_Oracle.connect(connString)
cursor = connection.cursor()
# get the current semester code if we are within a semester
cursor.execute("""select strm from ps_term_tbl where institution = 'UWOSH' and acad_career = 'UGRD' and term_begin_dt <= sysdate and term_end_dt >= sysdate""")
for column_1 in cursor:
try:
return column_1[0]
except:
pass
# otherwise get the next semester
|
code
cursor.execute("""select t1.strm, t1.descr from ps_term_tbl t1 where t1.institution = 'UWOSH' and t1.acad_career = 'UGRD' and t1.term_begin_dt = (select min(term_begin_dt) from ps_term_tbl t2 where t2.institution = t1.i
|
nstitution and t2.acad_career = t1.acad_career and term_begin_dt > sysdate)""")
for column_1 in cursor:
try:
return column_1[0]
except:
pass
|
yaoshengzhe/vitess
|
test/backup.py
|
Python
|
bsd-3-clause
| 5,848
| 0.007011
|
#!/usr/bin/env python
import warnings
# Dropping a table inexplicably produces a warning despite
# the "IF EXISTS" clause. Squelch these warnings.
warnings.simplefilter('ignore')
import logging
import unittest
import environment
import tablet
import utils
use_mysqlctld = True
tablet_master = tablet.Tablet(use_mysqlctld=use_mysqlctld)
tablet_replica1 = tablet.Tablet(use_mysqlctld=use_mysqlctld)
tablet_replica2 = tablet.Tablet(use_mysqlctld=use_mysqlctld)
setup_procs = []
def setUpModule():
try:
environment.topo_server().setup()
# start mysql instance external to the test
global setup_procs
setup_procs = [
tablet_master.init_mysql(),
tablet_replica1.init_mysql(),
tablet_replica2.init_mysql(),
]
if use_mysqlctld:
tablet_master.wait_for_mysqlctl_socket()
tablet_replica1.wait_for_mysqlctl_socket()
tablet_replica2.wait_for_mysqlctl_socket()
else:
utils.wait_procs(setup_procs)
except:
tearDownModule()
raise
def tearDownModule():
if utils.options.skip_teardown:
return
if use_mysqlctld:
# Try to terminate mysqlctld gracefully, so it kills its mysqld.
for proc in setup_procs:
utils.kill_sub_process(proc, soft=True)
teardown_procs = setup_procs
else:
teardown_procs = [
tablet_master.teardown_mysql(),
tablet_replica1.teardown_mysql(),
tablet_replica2.teardown_mysql(),
]
utils.wait_procs(teardown_procs, raise_on_error=False)
environment.topo_server().teardown()
utils.kill_sub_processes()
utils.remove_tmp_files()
tablet_master.remove_tree()
tablet_replica1.remove_tree()
tablet_replica2.remove_tree()
class TestBackup(unittest.TestCase):
def tearDown(self):
tablet.Tablet.check_vttablet_count()
environment.topo_server().wipe()
for t in [tablet_master, tablet_replica1, tablet_replica2]:
t.reset_replication()
t.clean_dbs()
_create_vt_insert_test = '''create table vt_insert_test (
id bigint auto_increment,
msg varchar(64),
primary key (id)
) Engine=InnoDB'''
def _insert_master(self, index):
tablet_master.mquery(
'vt_test_keyspace',
"insert into vt_insert_test (msg) values ('test %s')" %
index, write=True)
def test_backup(self):
"""Test backup flow.
test_backup will:
- create a shard with master and replica1 only
- run InitShardMaster
- insert some data
- take a backup
- insert more data on the master
- bring up tablet_replica2 after the fact, let it restore the backup
- check all data is right (before+after backup data)
- list the backup, remove it
"""
for t in tablet_master, tablet_replica1:
t.create_db('vt_test_keyspace')
tablet_master.init_tablet('master', 'test_keyspace', '0', start=True,
supports_backups=True)
tablet_replica1.init_tablet('replica', 'test_keyspace', '0', start=True,
supports_backups=True)
utils.run_vtctl(['In
|
itShardMaster', 'test_keyspace/0',
tablet_master.tablet_alias])
# insert data on master, wait for slave to get it
tablet_master.mquery('vt_test_keyspace', self._create_vt_insert_test)
self._insert_master(1)
timeout = 10
while True:
try:
result = tablet_replica1.mquery(
'vt_test_keyspace', 'select count(*) from vt_insert_test')
if result[0][0] == 1:
break
except:
|
# ignore exceptions, we'll just timeout (the tablet creation
# can take some time to replicate, and we get a 'table vt_insert_test
# does not exist exception in some rare cases)
logging.exception('exception waiting for data to replicate')
timeout = utils.wait_step('slave tablet getting data', timeout)
# backup the slave
utils.run_vtctl(['Backup', tablet_replica1.tablet_alias], auto_log=True)
# insert more data on the master
self._insert_master(2)
# now bring up the other slave, health check on, init_tablet on, restore on
tablet_replica2.start_vttablet(wait_for_state='SERVING',
target_tablet_type='replica',
init_keyspace='test_keyspace',
init_shard='0',
supports_backups=True)
# check the new slave has the data
timeout = 10
while True:
result = tablet_replica2.mquery(
'vt_test_keyspace', 'select count(*) from vt_insert_test')
if result[0][0] == 2:
break
timeout = utils.wait_step('new slave tablet getting data', timeout)
# list the backups
backups, _ = utils.run_vtctl(tablet.get_backup_storage_flags() +
['ListBackups', 'test_keyspace/0'],
mode=utils.VTCTL_VTCTL, trap_output=True)
backups = backups.splitlines()
logging.debug('list of backups: %s', backups)
self.assertEqual(len(backups), 1)
self.assertTrue(backups[0].endswith(tablet_replica1.tablet_alias))
# remove the backup
utils.run_vtctl(
tablet.get_backup_storage_flags() +
['RemoveBackup', 'test_keyspace/0', backups[0]],
auto_log=True, mode=utils.VTCTL_VTCTL)
# make sure the list of backups is empty now
backups, err = utils.run_vtctl(tablet.get_backup_storage_flags() +
['ListBackups', 'test_keyspace/0'],
mode=utils.VTCTL_VTCTL, trap_output=True)
backups = backups.splitlines()
logging.debug('list of backups after remove: %s', backups)
self.assertEqual(len(backups), 0)
for t in tablet_master, tablet_replica1, tablet_replica2:
t.kill_vttablet()
if __name__ == '__main__':
utils.main()
|
kawamon/hue
|
desktop/core/ext-py/Django-1.11.29/tests/gis_tests/geos_tests/test_geos.py
|
Python
|
apache-2.0
| 54,041
| 0.001129
|
from __future__ import unicode_literals
import ctypes
import json
import random
from binascii import a2b_hex, b2a_hex
from io import BytesIO
from unittest import skipUnless
from django.contrib.gis import gdal
from django.contrib.gis.geos import (
HAS_GEOS, GeometryCollection, GEOSException, GEOSGeometry, LinearRing,
LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon,
fromfile, fromstr,
)
from django.contrib.gis.geos.libgeos import geos_version_info
from django.contrib.gis.shortcuts import numpy
from django.template import Context
from django.template.engine import Engine
from django.test import SimpleTestCase, ignore_warnings, mock
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_bytes
from django.utils.six.moves import range
from ..test_data import TestDataMixin
@skipUnless(HAS_GEOS, "Geos is required.")
class GEOSTest(SimpleTestCase, TestDataMixin):
def test_wkt(self):
"Testing WKT output."
for g in self.geometries.wkt_out:
geom = fromstr(g.wkt)
if geom.hasz:
self.assertEqual(g.ewkt, geom.wkt)
def test_hex(self):
"Testing HEX output."
for g in self.geometries.hex_wkt:
geom = fromstr(g.wkt)
self.assertEqual(g.hex, geom.hex.decode())
def test_hexewkb(self):
"Testing (HEX)EWKB output."
# For testing HEX(EWKB).
ogc_hex = b'01010000000000000000000000000000000000F03F'
ogc_hex_3d = b'01010000800000000000000000000000000000F03F0000000000000040'
# `SELECT ST_AsHEXEWKB(ST_GeomFromText('POINT(0 1)', 4326));`
hexewkb_2d = b'0101000020E61000000000000000000000000000000000F03F'
# `SELECT ST_AsHEXEWKB(ST_GeomFromEWKT('SRID=4326;POINT(0 1 2)'));`
hexewkb_3d = b'01010000A0E61000000000000000000000000000000000F03F0000000000000040'
pnt_2d = Point(0, 1, srid=4326)
pnt_3d = Point(0, 1, 2, srid=4326)
# OGC-compliant HEX will not have SRID value.
self.assertEqual(ogc_hex, pnt_2d.hex)
self.assertEqual(ogc_hex_3d, pnt_3d.hex)
# HEXEWKB should be appropriate for its dimension -- have to use an
# a WKBWriter w/dimension set accordingly, else GEOS will insert
# garbage into 3D coordinate if there is none.
self.assertEqual(hexewkb_2d, pnt_2d.hexewkb)
self.assertEqual(hexewkb_3d, pnt_3d.hexewkb)
self.assertIs(GEOSGeometry(hexewkb_3d).hasz, True)
# Same for EWKB.
self.assertEqual(six.memoryview(a2b_hex(hexewkb_2d)), pnt_2d.ewkb)
self.assertEqual(six.memoryview(a2b_hex(hexewkb_3d)), pnt_3d.ewkb)
# Redundant sanity check.
self.assertEqual(4326, GEOSGeometry(hexewkb_2d).srid)
def test_kml(self):
"Testing KML output."
for tg in self.geometries.wkt_out:
geom = fromstr(tg.wkt)
kml = getattr(tg, 'kml', False)
if kml:
self.assertEqual(kml, geom.kml)
def test_errors(self):
"Testing the Error handlers."
# string-based
for err in self.geometries.errors:
with self.assertRaises((GEOSException, ValueError)):
fromstr(err.wkt)
# Bad WKB
with self.assertRaises(GEOSException):
GEOSGeometry(six.memoryview(b'0'))
class NotAGeometry(object):
pass
# Some other object
with self.assertRaises(TypeError):
GEOSGeometry(NotAGeometry())
# None
with self.assertRaises(TypeError):
GEOSGeometry(None)
def test_wkb(self):
"Testing WKB output."
for g in self.geometries.hex_wkt:
geom = fromstr(g.wkt)
wkb = geom.wkb
self.assertEqual(b2a_hex(wkb).decode().upper(), g.hex)
def test_create_hex(self):
"Testing creation from HEX."
for g in self.geometries.hex_wkt:
geom_h = GEOSGeometry(g.hex)
# we need to do this so decimal places get normalized
geom_t = fromstr(g.wkt)
self.assertEqual(geom_t.wkt, geom_h.wkt)
def test_create_wkb(self):
"Testing creation from WKB."
for g in self.geometries.hex_wkt:
wkb = six.memoryview(a2b_hex(g.hex.encode()))
geom_h = GEOSGeometry(wkb)
# we need to do this so decimal places get normalized
geom_t = fromstr(g.wkt)
self.assertEqual(geom_t.wkt, geom_h.wkt)
def test_ewkt(self):
"Testing EWKT."
srids = (-1, 32140)
for srid in srids:
for p in self.geometries.polygons:
ewkt = 'SRID=%d;%s' % (srid, p.wkt)
poly = fromstr(ewkt)
self.assertEqual(srid, poly.srid)
self.assertEqual(srid, poly.shell.srid)
self.assertEqual(srid, fromstr(poly.ewkt).srid) # Checking export
def test_json(self):
"Testing GeoJSON input/output (via GDAL)."
for g in self.geometries.json_geoms:
geom = GEOSGeometry(g.wkt)
if not hasattr(g, 'not_equal'):
# Loading jsons to prevent decimal differences
self.assertEqual(json.loads(g.json), json.loads(geom.json))
self.assertEqual(json.loads(g.json), json.loads(geom.geojson))
self.assertEqual(GEOSGeometry(g.wkt), GEOSGeometry(geom.json))
def test_fromfile(self):
"Testing the fromfile() factory."
ref_pnt = GEOSGeometry('POINT(5 23)')
wkt_f = BytesIO()
wkt_f.write(force_bytes(ref_pnt.wkt))
wkb_f = BytesIO()
wkb_f.write(bytes(ref_pnt.wkb))
# Other tests use `fromfile()` on string filenames so those
# aren't tested here.
for fh in (wkt_f, wkb_f):
fh.seek(0)
pnt = fromfile(fh)
self.assertEqual(ref_pnt, pnt)
def test_eq(self):
"Testing equivalence."
p = fromstr('POINT(5 23)')
self.assertEqual(p, p.wkt)
self.assertNotEqual(p, 'foo')
ls = fromstr('LINESTRING(0 0, 1 1, 5 5)')
self.assertEqual(ls, ls.wkt)
self.assertNotEqual(p, 'bar')
# Error shouldn't be raise on equivalence testing with
# an invalid type.
for g in (p, ls):
self.assertNotEqual(g, None)
self.assertNotEqual(g, {'foo': 'bar'})
self.assertNotEqual(g, False)
def test_eq_with_srid(self):
"Testing non-equivalence with different srids."
p0 = Point(5, 23)
p1 = Point(5, 23, srid=4326)
p2 = Point(5, 23, srid=32632)
# GEOS
self.assertNotEqual(p0, p1)
self.assertNotEqual(p1, p2)
# EWKT
self.assertNotEqual(p0, p1.ewkt)
self.assertNotEqual(p1, p0.ewkt)
self.assertNotEqual(p1, p2.ewkt)
# Equivalence with matching SRIDs
self.assertEqual(p2, p2)
self.assertEqual(p2, p2.ewkt)
# WKT contains no SRID so will not equal
self.assertNotEqual(p2, p2.wkt)
# SRID of 0
self.assertEqual(p0, 'SRID=0;POINT (5 23)')
self.assertNotEqual(p1, 'SRID=0;POINT (5 23)')
def test_points(self):
"Testing Point objects."
prev = fromstr('POINT(0 0)')
for p in self.geometries.points:
# Creating the point from the WKT
pnt = fromstr(p.wkt)
self.assertEqual(pnt.geom_type, 'Point')
self.assertEqual(pnt.geom_typeid, 0)
self.assertEqual(pnt.dims, 0)
self.assertEqual(p.x, pnt.x)
self.assertEqual(p.y, pnt.y)
self.assertEqual(pnt, fromstr(p.wkt))
self.assertEqual(False, pnt == prev) # Use assertEqual to test __eq__
# Making sure that the point's X, Y components are what we ex
|
pect
self.assertAlmostEqual(p.x, pnt.tuple[0], 9)
self.a
|
ssertAlmostEqual(p.y, pnt.tuple[1], 9)
# Testing the third dimension, and getting the tuple arguments
if hasattr(p, 'z'):
self.assertIs(pnt.hasz, True)
|
sasha-gitg/python-aiplatform
|
google/cloud/aiplatform_v1/types/dataset_service.py
|
Python
|
apache-2.0
| 14,664
| 0.00075
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.aiplatform_v1.types import annotation
from google.cloud.aiplatform_v1.types import data_item
from google.cloud.aiplatform_v1.types import dataset as gca_dataset
from google.cloud.aiplatform_v1.types import operation
from google.protobuf import field_mask_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1",
manifest={
"CreateDatasetRequest",
"CreateDatasetOperationMetadata",
"GetDatasetRequest",
"UpdateDatasetRequest",
"ListDatasetsRequest",
"ListDatasetsResponse",
"DeleteDatasetRequest",
"ImportDataRequest",
"ImportDataResponse",
"ImportDataOperationMetadata",
"ExportDataRequest",
"ExportDataResponse",
"ExportDataOperationMetadata",
"ListDataItemsRequest",
"ListDataItemsResponse",
"GetAnnotationSpecRequest",
"ListAnnotationsRequest",
"ListAnnotationsResponse",
},
)
class CreateDatasetRequest(proto.Message):
r"""Request message for
[DatasetService.CreateDataset][google.cloud.aiplatform.v1.DatasetService.CreateDataset].
Attributes:
parent (str):
Required. The resource name of the Location to create the
Dataset in. Format:
``projects/{project}/locations/{location}``
dataset (google.cloud.aiplatform_v1.types.Dataset):
Required. The Dataset to create.
"""
parent = proto.Field(proto.STRING, number=1,)
dataset = proto.Field(proto.MESSAGE, number=2, message=gca_dataset.Dataset,)
class CreateDatasetOperationMetadata(proto.Message):
r"""Runtime operation information for
[DatasetService.CreateDataset][google.cloud.aiplatform.v1.DatasetService.CreateDataset].
Attributes:
generic_metadata (google.cloud.aiplatform_v1.types.GenericOperationMetadata):
The operation generic information.
"""
generic_metadata = proto.Field(
proto.MESSAGE, number=1, message=operation.GenericOperationMetadata,
)
class GetDatasetRequest(proto.Message):
r"""Request message for
[DatasetService.GetDataset][google.cloud.aiplatform.v1.DatasetService.GetDataset].
Attributes:
name (str):
Required. The name of the Dataset resource.
read_mask (google.protobuf.field_mask_pb2.FieldMask):
Mask specifying which fields to read.
"""
name = proto.Field(proto.STRING, number=1,)
read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,)
class UpdateDatasetRequest(proto.Message):
r"""Request message for
[DatasetService.UpdateDataset][google.cloud.aiplatform.v1.DatasetService.UpdateDataset].
Attributes:
dataset (google.cloud.aiplatform_v1.types.Dataset):
Required. The Dataset which replaces the
resource on the server.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. The update mask applies to the resource. For the
``FieldMask`` definition, see
[google.protobuf.FieldMask][google.protobuf.FieldMask].
Updatable fields:
- ``display_name``
- ``description``
- ``labels``
"""
dataset = proto.Field(proto.MESSAGE, number=1, message=gca_dataset.Dataset,)
update_mask = proto.Field(
proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,
)
class ListDatasetsRequest(proto.Message):
r"""Request message for
[DatasetService.ListDatasets][google.cloud.aiplatform.v1.DatasetService.ListDatasets].
Attributes:
parent (str):
Required. The name of the Dataset's parent resource. Format:
``projects/{project}/locations/{location}``
filter (str):
An expression for filtering the results of the request. For
field names both snake_case and camelCase are supported.
- ``display_name``: supports = and !=
- ``metadata_schema_uri``: supports = and !=
- ``labels`` supports general map functions that is:
- ``labels.key=value`` - ke
|
y:value equality
- \`labels.key:\* or labels:key - key existence
- A key including a space must be quoted.
``labels."a key"``.
Some examples:
- ``displayName="myDisplayName"``
- ``labels.myKey="myValue"``
page_size (int):
The standard list pag
|
e size.
page_token (str):
The standard list page token.
read_mask (google.protobuf.field_mask_pb2.FieldMask):
Mask specifying which fields to read.
order_by (str):
A comma-separated list of fields to order by, sorted in
ascending order. Use "desc" after a field name for
descending. Supported fields:
- ``display_name``
- ``create_time``
- ``update_time``
"""
parent = proto.Field(proto.STRING, number=1,)
filter = proto.Field(proto.STRING, number=2,)
page_size = proto.Field(proto.INT32, number=3,)
page_token = proto.Field(proto.STRING, number=4,)
read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask_pb2.FieldMask,)
order_by = proto.Field(proto.STRING, number=6,)
class ListDatasetsResponse(proto.Message):
r"""Response message for
[DatasetService.ListDatasets][google.cloud.aiplatform.v1.DatasetService.ListDatasets].
Attributes:
datasets (Sequence[google.cloud.aiplatform_v1.types.Dataset]):
A list of Datasets that matches the specified
filter in the request.
next_page_token (str):
The standard List next-page token.
"""
@property
def raw_page(self):
return self
datasets = proto.RepeatedField(
proto.MESSAGE, number=1, message=gca_dataset.Dataset,
)
next_page_token = proto.Field(proto.STRING, number=2,)
class DeleteDatasetRequest(proto.Message):
r"""Request message for
[DatasetService.DeleteDataset][google.cloud.aiplatform.v1.DatasetService.DeleteDataset].
Attributes:
name (str):
Required. The resource name of the Dataset to delete.
Format:
``projects/{project}/locations/{location}/datasets/{dataset}``
"""
name = proto.Field(proto.STRING, number=1,)
class ImportDataRequest(proto.Message):
r"""Request message for
[DatasetService.ImportData][google.cloud.aiplatform.v1.DatasetService.ImportData].
Attributes:
name (str):
Required. The name of the Dataset resource. Format:
``projects/{project}/locations/{location}/datasets/{dataset}``
import_configs (Sequence[google.cloud.aiplatform_v1.types.ImportDataConfig]):
Required. The desired input locations. The
contents of all input locations will be imported
in one batch.
"""
name = proto.Field(proto.STRING, number=1,)
import_configs = proto.RepeatedField(
proto.MESSAGE, number=2, message=gca_dataset.ImportDataConfig,
)
class ImportDataResponse(proto.Message):
r"""Response message for
[DatasetService.ImportData][google.cloud.aiplatform.v1.DatasetService.ImportData].
"""
class ImportDataOperationMetadata(proto.Message):
r"""Runtime operation information for
[DatasetService.ImportData][google.cloud.aiplatform.v1.DatasetService.ImportData].
Attributes:
|
rven/odoo
|
addons/website_event/models/__init__.py
|
Python
|
agpl-3.0
| 296
| 0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copy
|
right and licensing details.
from . import event_event
from . import event_registration
from . import event_type
from . import website
from . import website_event_menu
|
from . import website_menu
from . import website_visitor
|
bhell/jimi
|
jimi/jimi/catalog/urls.py
|
Python
|
bsd-3-clause
| 178
| 0.005618
|
from django.conf.urls.defaults import
|
patterns
urlpatterns = patterns("jimi.catalog.views",
(r"^/?$", "all_categories"),
(r"^(?P<slug>[-\w]+)/$", "node", {}, "node")
|
,
)
|
factorlibre/l10n-spain
|
l10n_es_facturae/models/__init__.py
|
Python
|
agpl-3.0
| 405
| 0
|
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from . import payment_mode
from . import res_company
from . import res_partner
from . impo
|
rt account_tax_template
from . import account_tax
from . import res_currency
from . import account_invoice_integration
from . import account_invoice_integration_method
from . import account_invoice_integration_log
from . impo
|
rt account_invoice
|
windskyer/mvpn
|
mvpn/openstack/common/excutils.py
|
Python
|
gpl-2.0
| 655
| 0.001527
|
class save_and_reraise_exception(object):
def __init__(self):
self.reraise = True
def __enter__(self):
self.type_, self.value, self.tb, = sys.exc_info()
return self
def __exit__(self, exc_
|
type, exc_val, exc_tb):
if exc_type is not None:
logging.error(_('Original exception being dropped: %s'),
traceback.format_exception(self.type_,
self.value,
self.tb))
|
return False
if self.reraise:
six.reraise(self.type_, self.value, self.tb)
|
fernandolobato/balarco
|
clients/migrations/0003_auto_20170221_0107.py
|
Python
|
mit
| 804
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-02-21 01:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('clients', '0002_contact'),
]
operations = [
migrations.AlterField(
mode
|
l_name='contact',
name='alternate_email',
field=models.EmailField(blank=True, max_length=255),
),
migrations.AlterField(
model_name='contact',
name='alternate_phone',
field=models.CharField(blank=True, max_length=50)
|
,
),
migrations.AlterField(
model_name='contact',
name='phone',
field=models.CharField(blank=True, max_length=50),
),
]
|
TheStackBox/xuansdk
|
SDKLibrary/com/cloudMedia/theKuroBox/sdk/paramTypes/kbxHSBColor.py
|
Python
|
gpl-3.0
| 2,217
| 0.00406
|
##############################################################################################
# Copyright 2014-2015 Cloud Media Sdn. Bhd.
#
# This file is part of Xuan Application Development SDK.
#
# Xuan Application Development SDK is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Xuan Application Development SDK is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTAB
|
ILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Xuan Application Development SDK. If not, see <http://www.gnu.org/licenses/>.
##############################################################################################
from com.cloudMedia.theKuroBox.sdk.paramTypes.kbxObject import KBXObjectType
from com.cloudMedia.theKuroBox.sdk.paramTypes.kbxParam
|
Type import KBXParamType
from com.cloudMedia.theKuroBox.sdk.paramTypes.kbxParamWrapper import KBXParamWrapper
from com.cloudMedia.theKuroBox.sdk.util.logger import Logger
class KBXHSBColorType(KBXObjectType):
TYPE_NAME = "kbxHSBColor"
PROP_KBX_PARAM_OBJ_KEY_HUE = "h"
PROP_KBX_PARAM_OBJ_KEY_SATURATION = "s"
PROP_KBX_PARAM_OBJ_KEY_BRIGHTNESS = "b"
def __init__(self, kbxParamIsRequired=True):
pass
def cast(self, value):
pass
class DTO(dict):
@staticmethod
def build(h, s, b):
pass
def set_hue(self, value):
pass
def set_saturation(self, value):
pass
def set_brightness(self, value):
pass
def get_hue(self):
pass
def get_saturation(self):
pass
def get_brightness(self):
pass
class KBXHSBColor(KBXHSBColorType, KBXParamWrapper):
def __init__(self, kbxParamName, kbxParamIsRequired=True, **kbxParamProps):
pass
|
calancha/DIRAC
|
ResourceStatusSystem/Agent/CacheFeederAgent.py
|
Python
|
gpl-3.0
| 6,141
| 0.033708
|
# $HeadURL: $
''' CacheFeederAgent
This agent feeds the Cache tables with the outputs of the cache commands.
'''
from DIRAC import S_OK#, S_ERROR, gConfig
from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.Core.DISET.RPCClient import RPCClient
from DIRAC.Core.LCG.GOCDBClient import GOCDBClient
from DIRAC.ResourceStatusSystem.Client.ResourceStatusClient import ResourceStatusClient
from DIRAC.ResourceStatusSystem.Command import CommandCaller
#from DIRAC.ResourceStatusSystem.Utilities import CSHelpers
from DIRAC.ResourceStatusSystem.Utilities import Utils
ResourceManagementClient = getattr(Utils.voimport( 'DIRAC.ResourceStatusSystem.Client.ResourceManagementClient' ),'ResourceManagementClient')
__RCSID__ = '$Id: $'
AGENT_NAME = 'ResourceStatus/CacheFeederAgent'
class CacheFeederAgent( AgentModule ):
'''
The CacheFeederAgent feeds the cache tables for the client and the accounting.
It runs periodically a set of commands, and stores it's results on the
tables.
'''
# Too many public methods
# pylint: disable-msg=R0904
def __init__( self, *args, **kwargs ):
AgentModule.__init__( self, *args, **kwargs )
self.commands = {}
self.clients = {}
self.cCaller = None
self.rmClient = None
def initialize( self ):
self.am_setOption( 'shifterProxy', 'DataManager' )
self.rmClient = ResourceManagementClient()
self.commands[ 'Downtime' ] = [ { 'Downtime' : {} } ]
self.commands[ 'SpaceTokenOccupancy' ] = [ { 'SpaceTokenOccupancy' : {} } ]
#PilotsCommand
# self.commands[ 'Pilots' ] = [
# { 'PilotsWMS' : { 'element' : 'Site', 'siteName' : None } },
# { 'PilotsWMS' : { 'element' : 'Resource', 'siteName' : None } }
# ]
#FIXME: do not forget about hourly vs Always ...etc
#AccountingCacheCommand
# self.commands[ 'AccountingCache' ] = [
# {'SuccessfullJobsBySiteSplitted' :{'hours' :24, 'plotType' :'Job' }},
# {'FailedJobsBySiteSplitted' :{'hours' :24, 'plotType' :'Job' }},
# {'SuccessfullPilotsBySiteSplitted' :{'hours' :24, 'plotType' :'Pilot' }},
# {'FailedPilotsBySiteSplitted' :{'hours' :24, 'plotType' :'Pilot' }},
# {'SuccessfullPilotsByCESplitted' :{'hours' :24, 'plotType' :'Pilot' }},
# {'FailedPilotsByCESplitted' :{'hours' :24, 'plotType' :'Pilot' }},
# {'RunningJobsBySiteSplitted' :{'hours' :24, 'plotType' :'Job' }},
## {'RunningJobsBySiteSplitted' :{'hours' :168, 'plotType' :'Job' }},
## {'RunningJobsBySiteSplitted' :{'hours' :720, 'plotType' :'Job' }},
## {'RunningJobsBySiteSplitted' :{'hours' :8760, 'plotType' :'Job' }},
# ]
#VOBOXAvailability
# self.commands[ 'VOBOXAvailability' ] = [
# { 'VOBOXAvailability' : {} }
#
#Reuse clients for the commands
self.clients[ 'GOCDBClient' ] = GOCDBClient()
self.clients[ 'ReportGenerator' ] = RPCClient( 'Accounting/ReportGenerator' )
self.clients[ 'ReportsClient' ] = ReportsClient()
self.clients[ 'ResourceStatusClient' ] = ResourceStatusClient()
self.clients[ 'ResourceManagementClient' ] = ResourceManagementClient()
sel
|
f.clients[ 'WMSAdministrator' ] = RPCClient( 'WorkloadManagement/WMSAdministrator' )
self.cCaller = CommandCaller
return S_OK()
def loadCommand( self, commandModule, commandDict ):
commandName = commandDict.keys()[ 0 ]
commandArgs = commandDict[ commandName ]
commandTuple = ( '%sComma
|
nd' % commandModule, '%sCommand' % commandName )
commandObject = self.cCaller.commandInvocation( commandTuple, pArgs = commandArgs,
clients = self.clients )
if not commandObject[ 'OK' ]:
self.log.error( 'Error initializing %s' % commandName )
return commandObject
commandObject = commandObject[ 'Value' ]
# Set master mode
commandObject.masterMode = True
self.log.info( '%s/%s' % ( commandModule, commandName ) )
return S_OK( commandObject )
def execute( self ):
for commandModule, commandList in self.commands.items():
self.log.info( '%s module initialization' % commandModule )
for commandDict in commandList:
commandObject = self.loadCommand( commandModule, commandDict )
if not commandObject[ 'OK' ]:
self.log.error( commandObject[ 'Message' ] )
continue
commandObject = commandObject[ 'Value' ]
results = commandObject.doCommand()
if not results[ 'OK' ]:
self.log.error( results[ 'Message' ] )
continue
results = results[ 'Value' ]
if not results:
self.log.info( 'Empty results' )
continue
self.log.verbose( 'Command OK Results' )
self.log.verbose( results )
return S_OK()
################################################################################
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
|
dataplumber/edge
|
src/main/python/libraries/edge/opensearch/templateresponse.py
|
Python
|
apache-2.0
| 1,202
| 0.00416
|
import logging
from xml.dom.minidom import *
from jinja2 import Environment, Template
from edge.dateutility import DateUtility
from edge.opensearch.response import Response
class TemplateResponse(Response):
def __init__(self):
super(TemplateResponse, self).__init__()
self.env = Environment()
self.env.trim_blocks = True
self.env.autoescape = True
self.variables = {}
self.env.filters['convertISOTime'] = DateUtility.convertISOTime
def setTemplate(self, template):
self.template = self.env.from_string(template)
def generate(self, pretty=False):
logging.debug('TemplateResponse.generate is called.')
if pretty:
try :
xmlStr = self.template.render(self.variables).encode('utf-8').replace('\n', '')
except Exception as e:
logging.
|
debug("Problem generating template " + str(e))
xmlStr = self.template.render({}).encode('utf-8').replace('\n', '')
document =
|
xml.dom.minidom.parseString(xmlStr)
return document.toprettyxml()
else:
return self.template.render(self.variables).replace('\n', '')
|
jeremiah-c-leary/vhdl-style-guide
|
vsg/tests/generate/test_rule_015.py
|
Python
|
gpl-3.0
| 1,158
| 0.004318
|
import os
import unittest
from vsg.rules import generate
from vsg import vhdlFile
from vsg.tests import utils
sTestDir = os.path.dirname(__file__)
lFile, eError =vhdlFile.utils.read_vhdlfile(os.path.join(sTestDir,'rule_015_test_input.vhd'))
lExpected = []
lExpected.append('')
utils.read_file(os.path.join(sTestDir, 'rule_015_test_input.fixed.vhd'), lExpected)
class test_generate_rule(unittest.TestCase):
def setUp(self):
self.oFile = vhdlFile.vhdlFile(lFile)
self.assertIsNone(eError)
def test_rule_015(self):
oRule = generate.rule_015()
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'generate')
|
self.assertEqual(oRule.ide
|
ntifier, '015')
lExpected = [20, 25, 30]
oRule.analyze(self.oFile)
self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations))
def test_fix_rule_015(self):
oRule = generate.rule_015()
oRule.fix(self.oFile)
lActual = self.oFile.get_lines()
self.assertEqual(lExpected, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule.violations, [])
|
danggrianto/big-list-of-naughty-strings
|
setup.py
|
Python
|
mit
| 458
| 0.002183
|
from setuptools import find_packages, setup
setup(
name='blns',
versio
|
n='0.1.7',
url='https://github.com/danggrianto/big-list-of-naughty-strings',
license='MIT',
author='Daniel Anggrianto',
author_email='[email protected]',
description='Big List of Naug
|
hty String. Forked from https://github.com/minimaxir/big-list-of-naughty-strings',
keywords='Big List of Naughty String',
packages=['blns'],
platforms='any',
)
|
magicmilo/MiSynth-Wavetable-Generator
|
main.py
|
Python
|
apache-2.0
| 9,015
| 0.003439
|
#coding: utf-8
#!/usr/bin/env python3
#Initial test code for MiSynth Wave Generator
#Opens Wave Files And Cuts And Plays Them As The FPGA will
#Synth plays back 2048 samples at frequency of note
#Effective sample rate is 901,120Hz @ 440Hz
#CURRENTLY A DRAWING LOOP TO BE SOLVED, THANKS WX/PYTHON FOR YOUR
#COMPLETE LACK OF TRANSPARENCY
#ALWAYS USE TKINTER
import wave
import wx
import audiothread
import wavehandle
import sdisp
class MyFrame(wx.Frame):
def __init__(self, parent, title, wavehandle):
wx.Frame.__init__(self, parent, -1, title, size=(1024, 624))
self.wavehandle = wavehandle
self.scale = 8
self.shift = 0
self.drawcnt = 0
self.scope = [0]
# Create the menubar
menuBar = wx.MenuBar()
menu = wx.Menu()
menu.Append(wx.ID_OPEN, "Open\tAlt-O", "Open Wave")
menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit")
# bind the menu event s
self.Bind(wx.EVT_MENU, self.OnOpenButton, id=wx.ID_OPEN)
self.Bind(wx.EVT_ME
|
NU, self.OnQuitButton, id=wx.ID_EXI
|
T)
menuBar.Append(menu, "&Actions")
self.SetMenuBar(menuBar)
self.wavepanel = WavePanel(self, self.getscale, self.setsector)
self.wavepanel.SetBackgroundColour(wx.Colour(32,55,91))
self.scopepanel = ScopePanel(self)
self.scopepanel.SetBackgroundColour(wx.Colour(20,25,20))
self.buttonpanel = wx.Panel(self, -1, pos=(0, 384), size=(1024, 40))
self.textpanel = sdisp.TextPanel(self)
self.timestamp = wx.StaticText(self.wavepanel, -1,
("Time: " + str(0.0)
+ "/" + str(0.0)),
pos=(2, 2),
style=wx.ALIGN_LEFT)
self.timestamp.SetForegroundColour((217, 66, 244))
btnOpen = wx.Button(self.buttonpanel, wx.ID_OPEN, "Open",
pos=(2, 0), size=(80, 40))
btnExport = wx.Button(self.buttonpanel, -1, "Export",
pos=(84, 0), size=(80, 40))
btnQuit = wx.Button(self.buttonpanel, wx.ID_EXIT, "Quit",
pos=(166, 0), size=(80, 40))
self.btnPlay = wx.ToggleButton(self.buttonpanel, -1, "Play",
pos=(943, 0), size=(80, 40))
# bind the button events to handlers
self.Bind(wx.EVT_BUTTON, self.OnOpenButton, btnOpen)
self.Bind(wx.EVT_BUTTON, self.OnExportButton, btnExport)
self.Bind(wx.EVT_BUTTON, self.OnQuitButton, btnQuit)
self.Bind(wx.EVT_TOGGLEBUTTON, self.OnPlayButton, self.btnPlay)
self.Bind(wx.EVT_MOUSEWHEEL, self.onMouseWheel)
self.wavepanel.Bind(wx.EVT_PAINT, self.onPaint)
self.contentNotSaved = False
self.fileloaded = False
self.quadrant = -1
self.Centre()
def setsector(self, sector):
self.quadrant = abs(sector)
self.Refresh()
def getscale(self):
return self.scale
def getSample(self, sector):
print("obtaining sample")
if self.quadrant == -1:
self.setsector(1)
sample = self.wavehandle.getaudiodata(self.shift, 0, sector)
return sample
def onPaint(self, event):
self.drawcnt += 1
#print("Drawing" + str(self.drawcnt))
dc = wx.PaintDC(self.wavepanel)
dc.Clear()
totalseconds = self.wavehandle.gettotaltime()
shiftseconds = self.wavehandle.framestoseconds(self.shift)
self.timestamp.SetLabel("Time: " + str(shiftseconds) + "/" + str(
totalseconds))
dc.SetBrush(wx.Brush(wx.Colour(16, 28, 45), wx.SOLID))
dc.DrawRectangle(256, 0, 512, 256)
# Centre Line
pointdata = self.wavehandle.getdrawpoints(self.shift)
for x in range(1, 1024): # Ugly
if (x > 256) and (x < 768):
dc.SetPen(wx.Pen((0, 255, 242), 1, wx.PENSTYLE_SOLID))
else:
dc.SetPen(wx.Pen((183, 204, 163), 1, wx.PENSTYLE_SOLID))
dc.DrawLine(x - 1, pointdata[x - 1], x, pointdata[x])
#dc.DrawPoint(x, pointdata[x])
if (x == 256) or (x == 768):
dc.SetPen(wx.Pen((0, 0, 0), 1, wx.PENSTYLE_DOT))
dc.DrawLine(x, 0, x, 256)
if (x == 496) or (x == 528):
dc.SetPen(wx.Pen((0, 0, 0), 1, wx.PENSTYLE_DOT))
dc.DrawLine(x, 0, x, 256)
dc = wx.PaintDC(self.scopepanel)
dc.Clear()
dc.SetPen(wx.Pen((256,0,0), 1, wx.PENSTYLE_SOLID))
for x in range(0, 1024):
if len(self.scope) > 1:
p = self.scope[x % len(self.scope)] + 64
else:
p = 64
dc.DrawPoint(x, p)
def OnPlayButton(self, event):
if self.btnPlay.GetValue():
self.audiohandle = audiothread.AudioHandler()
if self.fileloaded:
self.audiohandle.setsample(self.getSample(self.quadrant), 2048)
self.scope = self.audiohandle.getscopesample()
print("sample length: " + str(len(self.scope)))
self.audiohandle.start()
else:
self.audiohandle.stop()
self.audiohandle = None
def onMouseWheel(self, event):
if self.wavepanel.mouseOver:
if self.wavepanel.ctrlDown:
if event.GetWheelRotation() > 0:
if(self.scale > 1):
self.scale = self.scale >> 1
else:
if(self.scale < 2097151):
self.scale = self.scale << 1
self.Refresh()
else:
if event.GetWheelRotation() > 0:
if(self.shift > 0):
self.shift -= 2000
else:
if (self.shift < 10000000):
self.shift += 2000
self.Refresh()
if self.scopepanel.mouseOver:
if event.GetWheelRotation() > 0:
self.audiohandle.setshift(1)
else:
self.audiohandle.setshift(-1)
self.scope = self.audiohandle.getscopesample()
self.Refresh()
def OnOpenButton(self, evt):
#Open file
with wx.FileDialog(self, "Open .wav file.", wildcard="WAV files (*.wav)|*.wav",
style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST) as fileDialog:
if fileDialog.ShowModal() == wx.ID_CANCEL:
return # the user changed their mind
pathname = fileDialog.GetPath()
try:
with wave.open(pathname, 'r') as file:
self.wavehandle.loadwave(file)
self.Refresh()
self.fileloaded = True
except IOError:
wx.LogError("Cannot open file '%s'." % pathname)
def OnExportButton(self, evt):
print("Export")
def OnQuitButton(self, evt):
self.Close()
class WavePanel(wx.Panel): #just handles mouseover events
def __init__(self, parent, getter, sender):
wx.Panel.__init__(self, parent, pos=(0,0),size=(1024, 256))
self.mouseOver = False
self.ctrlDown = False
self.Bind(wx.EVT_ENTER_WINDOW, self.onMouseOver)
self.Bind(wx.EVT_LEAVE_WINDOW, self.onMouseLeave)
self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress)
self.Bind(wx.EVT_KEY_UP, self.onKeyRelease)
self.Bind(wx.EVT_LEFT_DOWN, self.onMouseClick)
self.getter = getter
self.sender = sender
def onMouseClick(self, event):
if self.mouseOver:
x, y = self.ScreenToClient(wx.GetMousePosition())
sector = abs(x // (2048 / self.getter()))
self.sender(sector)
def onMouseOver(self, event):
self.mouseOver = True
def onMouseLeave(self, event):
self.mouseOver = False
def onKeyPress(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_CONTROL:
self.ctrlDown = True
def onKeyRelease(self, event):
keycode = event
|
lvh/txyoga
|
setup.py
|
Python
|
isc
| 528
| 0.017045
|
import setuptools
setuptools.setup(name=
|
'txyoga',
version='0',
description='REST toolkit for Twisted',
url='https://github.com/lvh/txyoga',
author='Laurens Van Houtven',
author_email='[email protected]',
packages = setuptools.find_packages(),
requires=['twisted'],
license='ISC',
classifiers=[
"Development Status :: 3 - Alpha",
"Framework :: Twisted",
"License :: OSI Approved :: ISC License (ISCL)",
|
"Topic :: Internet :: WWW/HTTP",
])
|
0shimax/DL-vision
|
src/net/cifar10.py
|
Python
|
mit
| 1,111
| 0
|
import chainer
import chainer.functions as F
import chainer.links as L
class Cifar10(chainer.Chain):
def __init__(self, n_class, in_ch):
super().__init__(
conv1=L.Convolution2D(in_ch, 32, 5, pad=2),
conv2=L.Convolution2D(32, 32, 5, pad=2),
conv3=L.Convolution2D(32, 64, 5, pad=2),
fc4=F.Linear(1344, 4096),
fc5=F.Linear(4096, n_class),
)
self.train = True
self.n_class = n_class
def __call__(self, x
|
, t):
x.volatile = True
h = F.max_pooling_2d(F.elu(self.conv1(x)), 3, stride=2)
h = F.max_pooling_2d(F.elu(self.conv2(h)), 3, stride=2)
|
h = F.elu(self.conv3(h))
h.volatile = False
h = F.spatial_pyramid_pooling_2d(h, 3, F.MaxPooling2D)
h = F.dropout(F.elu(self.fc4(h)), ratio=0.5, train=self.train)
h = self.fc5(h)
self.prob = F.softmax(h)
self.loss = F.softmax_cross_entropy(h, t)
self.accuracy = F.accuracy(h, t)
chainer.report({'loss': self.loss, 'accuracy': self.accuracy}, self)
return self.loss
|
tiangolo/fastapi
|
docs_src/extending_openapi/tutorial003.py
|
Python
|
mit
| 205
| 0
|
from fastapi
|
import FastAPI
app = FastAPI(swagger_ui_parameters={"syntaxHighlight": False})
@app.get("/users/{username}")
async def read_user(username: str):
retu
|
rn {"message": f"Hello {username}"}
|
nens/githubinfo
|
githubinfo/commits.py
|
Python
|
gpl-3.0
| 11,594
| 0.000086
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from collections import defaultdict
# from pprint import pprint
import argparse # Note: python 2.7+
import datetime
import json
import logging
import os
import sys
import requests
from githubinfo import __version__
ORG_REPOS_URL = 'https://api.github.com/orgs/{organization}/repos'
COMMITS_URL = 'https://api.github.com/repos/{owner}/{project}/commits'
BRANCHES_URL = 'https://api.github.com/repos/{owner}/{project}/branches'
# Settings are global and can be modified by some setup/init method.
SETTINGS = {
'auth': None, # Set it to ('username', 'very_secret').
'days': 7,
'organizations': [
'ddsc',
'lizardsystem',
'nens',
],
'extra_projects': [
# ('organization', 'project'),
('reinout', 'buildout'),
('reinout', 'django-rest-framework'),
('reinout', 'serverinfo'),
('reinout', 'z3c.dependencychecker'),
('rvanlaar', 'djangorecipe'),
('zestsoftware', 'zest.releaser'),
],
}
SETTINGS_FILENAME = 'settings.json'
logger = logging.getLogger(__name__)
def since():
"""Return iso-formatted string for github from-that-date query."""
now = datetime.datetime.now()
a_while_ago = now - datetime.timedelta(days=SETTINGS['days'])
return a_while_ago.isoformat()
def grab_json(url, params=None, second_try=False):
"""Return json from URL, including handling pagination."""
auth = SETTINGS['auth']
if isinstance(auth, list):
auth = tuple(auth)
req = requests.get(url, auth=auth, params=params)
if req.status_code == 401 and not second_try:
# Unauthorized. Somehow this happens to me in rare cases.
# Retry it once.
logger.warn("Got a 401 unauthorized on %s, retrying it", url)
return grab_json(url, params=params, second_try=True)
result = req.json()
is_expected_type = (isinstance(result, list) or isinstance(result, dict))
if not is_expected_type and not second_try:
# Wrong type. String error message, probably.
# Retry it once.
logger.warn("Got a wrong type (%r) on %s, retrying it", result, url)
return grab_json(url, params=params, second_try=True)
if req.links.get('next'):
# Paginated content, so we want to grab the rest.
url = req.links['next']['url']
# The assumption is "paginated content means it is a list".
result += grab_json(url, params=params)
return result
def is_testfile(fileinfo):
filepath = fileinfo['filename']
if 'testsettings.py' in filepath:
# This one almost always doesn't have anything to do with
# an added test.
return False
if 'test' in filepath:
return True
if filepath.endswith('.rst') or filepath.endswith('.txt'):
# Possible doctest.
if '>>>' in fileinfo.get('patch', ''):
return True
return False
def load_custom_settings(settings_file=SETTINGS_FILENAME):
"""Update our default settings with the json found in the settings file.
"""
# Note: settings_file is only a kwarg to make it testable.
if os.path.exists(settings_file):
custom_settings = json.loads(open(settings_file).read())
SETTINGS.update(custom_settings)
class Commit(object):
"""Wrapper around a commit dict from github's API."""
def __init__(self, the_dict):
self.num_testfiles_changed = 0
self.user = the_dict['commit']['committer']['name']
commit_url = the_dict['url']
commit_info = grab_json(commit_url)
for changed_file in commit_info.get('files', []):
if is_testfile(changed_file):
self.num_testfiles_changed += 1
logger.debug("Test file: {}".format(changed_file['filename']))
@property
def is_testcommit(self):
return bool(self.num_testfiles_changed)
class TestCommitCounter(object):
def __init__(self):
self.num_commits = 0
self.num_testcommits = 0
self.testfiles_changed = 0
def __cmp__(self, other):
return cmp((-self.num_testcommits, self.num_commits),
(-other.num_testcommits, other.num_commits))
def add_commit(self, commit):
self.num_commits += 1
if commit.is_testcommit:
self.num_testcommits += 1
self.testfiles_changed += commit.num_testfiles_changed
@property
def percentage(self):
"""Return percentage of test commits to total.
Return it as a string including parentheses.
If there are no test commits, omit the percentage.
"""
if not self.num_testcommits:
return ''
result = str(int(100.0 * self.num_testcommits / self.num_commits))
return '({}%)'.format(result)
def print_info(self):
msg = "{name}: {tested} {percentage}"
print(msg.format(name=self.name,
tested=self.num_testcommits,
percentage=self.percentage))
def as_dict(self):
percentage = self.percentage.replace('(', '').replace(')', '') # Sigh.
return dict(name=self.name,
num_testcommits=self.num_testcommits,
percentage=percentage)
class Project(TestCommitCounter):
def __init__(self, owner, project, users,
restrict_to_known_users=False):
super(Project, self).__init__()
self.owner = owner
self.name = project
self.users = users
self.restrict_to_known_users = restrict_to_known_users
def load(self):
l
|
ogger.debug("Loading project {}...".format
|
(self.name))
self.branch_SHAs = self.load_branches()
self.commits = self.load_project_commits()
self.load_individual_commits()
def load_branches(self):
"""Return SHAs of commits for branches."""
url = BRANCHES_URL.format(owner=self.owner, project=self.name)
branches = grab_json(url)
if not isinstance(branches, list):
logger.warn("Expected list, got %r, retrying.", branches)
return self.load_branches()
return [branch['commit']['sha'] for branch in branches]
def load_project_commits(self):
result = []
url = COMMITS_URL.format(owner=self.owner, project=self.name)
for branch_SHA in self.branch_SHAs:
result += grab_json(url, params={'since': since(),
'sha': branch_SHA})
return result
def load_individual_commits(self):
for commit in self.commits:
if not isinstance(commit, dict):
logger.warn("dict in commit isn't a dict: %r" % commit)
logger.debug("the full list of commits:")
logger.debug(self.commits)
logger.warn("Continuing anyway...")
continue
the_commit = Commit(commit)
if self.restrict_to_known_users:
if the_commit.user not in self.users:
continue
self.users[the_commit.user].add_commit(the_commit)
self.add_commit(the_commit)
@property
def is_active(self):
return bool(self.num_commits)
class User(TestCommitCounter):
name = None # We set that from within the commits.
def add_commit(self, commit):
if not self.name:
self.name = commit.user
TestCommitCounter.add_commit(self, commit)
def show_config():
"""Print the current configuration
TODO: add some usage instructions.
"""
if not os.path.exists(SETTINGS_FILENAME):
logger.warn("""
%s does not exist. See https://pypi.python.org/pypi/githubinfo for
a configuration explanation.
The defaults are probably not what you want :-)""")
logger.info("The current settings are:")
print(json.dumps(SETTINGS, indent=2))
sys.exit(0)
def parse_commandline():
"""Parse commandline options and set up logging.
"""
parser = argparse.ArgumentParser(
description
|
kdart/pycopia3
|
process/setup.py
|
Python
|
apache-2.0
| 1,057
| 0
|
#!
|
/usr/bin/python3.4
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
import sys
from glob import glob
from setuptools import setup
NAME = "pycopia3-process"
VERSION = "1.0"
if sys.platform not in ("win32", "cli"):
DATA_FILES = [
('/etc/pycopia', glob("etc/*")),
]
else:
DATA_FILES = []
setup(name=NAME, version=VERSION,
namespace_packages=["pycopia"],
packages=["pycopia"],
test_suite="test.Pro
|
cessTests",
# install_requires=['pycopia-core>=1.0.dev-r138,==dev'],
data_files=DATA_FILES,
description="Modules for running, interacting with, and managing processes.", # noqa
long_description=open("README.md").read(),
license="LGPL",
author="Keith Dart",
keywords="pycopia framework",
url="http://www.pycopia.net/",
classifiers=["Operating System :: POSIX",
"Topic :: Software Development :: Libraries :: Python Modules", # noqa
"Topic :: System :: Operating System",
"Intended Audience :: Developers"],
)
|
boto/s3transfer
|
tests/unit/test_subscribers.py
|
Python
|
apache-2.0
| 3,197
| 0
|
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License'). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the 'license' file accompanying this file. This file is
# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from s3transfer.exceptions import InvalidSubscriberMethodError
from s3transfer.subscribers import BaseSubscriber
from tests import unittest
class ExtraMethodsSubscriber(BaseSubscriber):
def extra_method(self):
return 'called extra method'
class NotCallableSubscriber(BaseSubscriber):
on_done = 'foo'
class NoKwargsSubscriber(BaseSubscriber):
def on_done(self):
pass
class OverrideMethodSubscriber(BaseSubscriber):
def on_queued(self, **kwargs):
return kwargs
class OverrideConstructorSubscriber(BaseSubscriber):
def __init__(self, arg1, arg2):
self.arg1 = arg1
self.arg2 = arg2
class TestSubscribers(unittest.TestCase):
def test_can_instantiate_base_subscriber(self):
try:
BaseSubscriber()
except InvalidSubscriberMethodError:
self.fail('BaseSubscriber should be instantiable')
def test_can_call_base_subscriber_method(self):
subscriber = BaseSubscriber()
try:
subscriber.on_done(future=None)
except Exception as e:
self.fail(
'Should be able to call base class subscriber method. '
'instead got: %s' % e
)
def test_subclass_can_have_and_call_additional_methods(self):
subscriber = ExtraMethodsSubscriber()
self.assertEqual(subscriber.extra_method(), 'called extra method')
def test_can_subclass_and_override_method_from_base_subscriber(self):
subscriber = OverrideMethodSubscriber()
# Make sure that the overridden method is called
self.assertEqual(subscriber.on_queued(foo='bar'), {'foo': 'bar'})
def test_can_subclass_and_override_constructor_from_base_class(self):
subscriber = OverrideConstructorSubscriber('foo', arg2='bar')
# Make sure you can create a custom constructor.
self.assertEqual(subscriber.arg1, 'foo')
self.assertEqual(subscriber.arg2, 'bar')
def test_invalid_arguments_in_constructor_of_subclass_subscriber(self):
# The override constructor should still have validation of
|
# constructor args.
with self.assertRaises(TypeError):
OverrideConstructorSubscriber()
def test_not_callable_in_subclass_subscriber_method(self):
with self.assertRaisesRegex(
InvalidSubscriberMethodError, 'must be c
|
allable'
):
NotCallableSubscriber()
def test_no_kwargs_in_subclass_subscriber_method(self):
with self.assertRaisesRegex(
InvalidSubscriberMethodError, 'must accept keyword'
):
NoKwargsSubscriber()
|
yuanagain/seniorthesis
|
src/intervals/newton.py
|
Python
|
mit
| 4,396
| 0.036624
|
from interval import interval, inf, imath, fpu
from complexinterval import ComplexInterval, _one, _zero
from complexpolynomial import ComplexPolynomial
class Newton:
def __init__(self, start, poly):
self.start = start
self.poly = poly
self.iterates = 0
self.deriv = poly.derive()
self.step = start
def iterate(self):
"""
Performs one Newton iteration, returns change between values.
"""
self.iterates += 1
x = self.step.midpoint()
fx = self.poly(x)
## iterate on derivative
## self.deriv = self.deriv.derive()
self.step = x - (fx / self.deriv(x))
## return the change
diff = x - self.step
return diff
def iterate_until(self, res = 10**-6, max_iterates = 20):
"""
Iterates until at resolution or until maximum number
of iterations has been reached. Returns True if convergence
achieved, returns False otherwise.
"""
res_box = ComplexInterval(interval([res, -res]), interval([res, -res]))
while (self.iterates < max_iterates - 1):
if self.iterate() in res_box:
return True
if self.iterate() in res_box:
return True
return False
def __str__(self):
"""
Returns string representation
"""
return "Newton's Iterator\n" + "Start: " + str(self.start) + "\nFunction: " + str(self.poly)
def main():
print("Testing Newton")
print("Testing Complex Polynomials")
print("----------------------------")
xa = interval([1, 2])
xb = interval([5, 6])
x = ComplexInterval(xa, xb)
ya = interval([4, 7])
yb = interval([2, 3])
y = ComplexInterval(ya, yb)
wa = interval([2, 2])
wb = interval([3, 3])
w = ComplexInterval(wa, wb)
za = interval([4, 4])
zb = interval([5, 5])
z = ComplexInterval(za, zb)
a_0_a = interval([1, 1])
a_0_b = interval([5, 5])
a_0 = ComplexInterval(a_0_a, a_0_b)
a_1_a = interval([1, 1])
a_1_b = interval([5, 5])
a_1 = ComplexInterval(a_1_a, a_1_b)
a_2_a = interval([3, 3])
a_2_b = interval([2, 2])
a_2 = ComplexInterval(a_2_a, a_2_b)
a_3_a = interval([7, 7])
a_3_b = interval([-4, -4])
a_3 = ComplexInterval(a_3_a, a_3_b)
a_4_a = interval([-6, -6])
a_4_b = interval([1, 1])
a_4 = ComplexInterval(a_4_a, a_4_b)
a_5 = ComplexInterval(interval([2]), interval([0]))
a_6 = ComplexInterval(interval([2]), interval([0]))
coeffs = [a_0, a_1, a_2, a_3, a_4, a_5, a_6]
print("Testing Complex Constructor")
print("----------------------------")
poly_1 = ComplexPolynomial(coeffs)
print(poly_1)
poly_2 = ComplexPolynomial([_zero(), a_4])
print(poly_2)
poly_3 = ComplexPolyn
|
omial([a_5, a_6, a_3, a_1, a_0])
print(poly_3)
print("============================")
print("Testing Evaluation")
print("----------------------------")
print(poly_1(w))
print(poly_1(_one()))
print(poly_1(_zero()))
print("")
print(poly_2(w))
print(poly_2(_one()))
print(poly_2(_zero()))
print("")
print(poly_3(w))
print(poly_3(_one()))
print(poly_3(_zero()))
print("============================")
print("Derivation")
print(
|
"----------------------------")
print(poly_1.derive())
print(poly_1.derive().derive())
print(poly_1.derive().derive().derive())
print("")
print(poly_2.derive())
print(poly_2.derive().derive())
print("")
print(poly_3.derive())
print(poly_3.derive().derive())
print("============================")
print("Newton's Method Constructor")
print("----------------------------")
start1 = ComplexInterval(interval([0]), interval([0]))
start2 = ComplexInterval(interval([1]), interval([1]))
start3 = ComplexInterval(interval([0]), interval([0]))
n_1 = Newton(start1, poly_1)
n_2 = Newton(start2, poly_2)
n_3 = Newton(start3, poly_3)
print(n_1)
print("")
print(n_2)
print("")
print(n_3)
print("")
print("============================")
print("Testing Iteration")
print("----------------------------")
for i in range(10):
print(n_1.iterate())
print("----------------------------")
for i in range(10):
print(n_2.iterate())
print("----------------------------")
for i in range(10):
print(n_3.iterate())
# print(fpu.isnan(n_3.iterate().a))
print("============================")
print("Testing convergence")
print("----------------------------")
print(n_1.iterate_until())
print("----------------------------")
print(n_2.iterate_until())
print("----------------------------")
print(n_3.iterate_until())
# print(fpu.isnan(n_3.iterate().a))
print("============================")
if __name__=="__main__":
main()
|
bengosney/rhgd3
|
gardens/migrations/0026_auto_20180308_0720.py
|
Python
|
gpl-3.0
| 653
| 0.001531
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-03-08 07:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('gardens', '0025_auto_20180216_1951'),
]
operations = [
migrations.AlterModelOptions(
name='worktype',
|
options={'ordering': ('position',), 'verbose_name': 'Work Type', 'verbose_name_plural': 'Work Types'},
),
migrations.AddField(
model_name='worktype',
name='position',
|
field=models.PositiveIntegerField(default=0),
),
]
|
mikewrock/phd_backup_full
|
devel/lib/python2.7/dist-packages/phd/srv/_calc_service.py
|
Python
|
apache-2.0
| 19,928
| 0.018567
|
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from phd/calc_serviceRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class calc_serviceRequest(genpy.Message):
_md5sum = "504533770c671b8893346f8f23298fee"
_type = "phd/calc_serviceRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """int32[] pre_ids
int32[] post_ids
string datum
string location
"""
__slots__ = ['pre_ids','post_ids','datum','location']
_slot_types = ['int32[]','int32[]','string','string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
pre_ids,post_ids,datum,location
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(calc_serviceRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.pre_ids is None:
self.pre_ids = []
if self.post_ids is None:
self.post_ids = []
if self.datum is None:
self.datum = ''
if self.location is None:
self.location = ''
else:
self.pre_ids = []
self.post_ids = []
self.datum = ''
self.location = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
length = len(self.pre_ids)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *self.pre_ids))
length = len(self.post_ids)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *self.post_ids))
_x = self.datum
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.location
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
self.pre_ids = struct.unpack(pattern, str[start:end])
start = end
end += 4
|
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
self.post_ids = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.datum = str[start:end].decode('utf-8')
else:
self.datum = str[start:end]
|
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.location = str[start:end].decode('utf-8')
else:
self.location = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
length = len(self.pre_ids)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(self.pre_ids.tostring())
length = len(self.post_ids)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(self.post_ids.tostring())
_x = self.datum
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.location
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
self.pre_ids = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
self.post_ids = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.datum = str[start:end].decode('utf-8')
else:
self.datum = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.location = str[start:end].decode('utf-8')
else:
self.location = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from phd/calc_serviceResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import std_msgs.msg
import sensor_msgs.msg
class calc_serviceResponse(genpy.Message):
_md5sum = "d638895a709be2cef85df359cc39f0dc"
_type = "phd/calc_serviceResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """sensor_msgs/PointCloud2 cloud_out
================================================================================
MSG: sensor_msgs/PointCloud2
# This message holds a collection of N-dimensional points, which may
# contain additional information such as normals, intensity, etc. The
# point data is stored as a binary blob, its layout described by the
# contents of the "fields" array.
# The point cloud data may be organized 2d (image-like) or 1d
# (unordered). Point clouds organized as 2d images may be produced by
# camera depth sensors such as
|
ESSolutions/ESSArch_Core
|
ESSArch_Core/auth/migrations/0016_auto_20181221_1716.py
|
Python
|
gpl-3.0
| 505
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-12-21 1
|
6:16
from django.db import migrations, models
import django.db.models.deletion
import mptt.fields
import picklefield.fields
class Migration(migrations.Migration):
dependencies = [
('essauth', '0015_proxypermission'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='language',
field=models.CharF
|
ield(default='en', max_length=10),
),
]
|
databricks/spark-pr-dashboard
|
sparkprs/controllers/prs.py
|
Python
|
apache-2.0
| 3,838
| 0.002866
|
import google.appengine.ext.ndb as ndb
import json
import logging
import datetime
from flask import Blueprint
from flask import Response
from natsort import natsorted
from sparkprs import cache, app
from sparkprs.models import Issue, JIRAIssue
prs = Blueprint('prs', __name__)
@prs.route('/search-open-prs')
@cache.cached(timeout=60)
def search_open_prs():
prs = Issue.query(Issue.state == "open").order(-Issue.updated_at).fetch()
return search_prs(prs)
@prs.route('/search-stale-prs')
@cache.cached(timeout=60)
def search_stale_prs():
issueQuery = ndb.AND(Issue.state == "open",
Issue.updated_at < datetime.datetime.today() - datetime.timedelta(days=30))
stalePrs = Issue.query(issueQuery).order(-Issue.updated_at).fetch()
return search_prs(stalePrs)
def search_prs(prs):
json_dicts = []
for pr in prs:
try:
last_jenkins_comment_dict = None
if pr.last_jenkins_comment:
last_jenkins_comment_dict = {
'body': pr.last_jenkins_comment['body'],
'user': {'login': pr.last_jenkins_comment['user']['login']},
'html_url': pr.last_jenkins_comment['html_url'],
'date': [pr.last_jenkins_comment['created_at']],
}
d = {
'parsed_title': pr.parsed_title,
'number': pr.number,
'updated_at': str(pr.updated_at),
'user': pr.user,
'state': pr.state,
'components': pr.components,
'lines_added': pr.lines_added,
'lines_deleted': pr.lines_deleted,
'lines_changed': pr.lines_changed,
'is_mergeable': pr.is_mergeable,
'commenters': [
{
'username': u,
'data': d,
'is_committer': u in app.config.get('COMMITTER_GITHUB_USERNAMES', []),
} for (u, d) in pr.commenters],
'last_jenkins_outcome': pr.last_jenkins_outcome,
'last_jenkins_comment': last_jenkins_comment_dict,
}
# Use the first JIRA's information to populate the "Priority" and "Issue Type" columns:
jiras = pr.parsed_title["jiras"]
if jiras:
d['closed_jiras'] = []
first_jira = JIRAIssue.get_by_id("%s-%i" % (app.config['JIRA_PROJECT'], jiras[0]))
if first_jira:
d['jira_priority_name'] = first_jira.priority_name
d['jira_priority_icon_url'] = first_jira.priority_icon_url
d['jira_issuetype_name'] = first_jira.issuetype_name
d['jira_issuetype_icon_url'] = first_jira.issuetype_icon_url
d['jira_shepherd_display_name'] = first_jira.shepherd_display_name
# If a pull request is linked against multiple JIRA issues, then the target
# versions should be union of the individual issues' target versions:
target_v
|
ersions = set()
for jira_number in jiras:
jira = JIRAIssue.get_by_id("%s-%i" % (app.config['JIRA_PROJECT'], jira_number))
if jira:
target_versions.update(jira.target_versions)
if jira.is_closed:
|
d['closed_jiras'].append(jira_number)
if target_versions:
d['jira_target_versions'] = natsorted(target_versions)
json_dicts.append(d)
except:
logging.error("Exception while processing PR #%i", pr.number)
raise
response = Response(json.dumps(json_dicts), mimetype='application/json')
return response
|
chancegrissom/qmk_firmware
|
lib/python/qmk/cli/list/__init__.py
|
Python
|
gpl-2.0
| 24
| 0
|
fr
|
om . import keyboards
| |
jnewland/home-assistant
|
homeassistant/components/ring/binary_sensor.py
|
Python
|
apache-2.0
| 3,663
| 0
|
"""This component provides HA sensor support for Ring Door Bell/Chimes."""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.binary_sensor import (
PLATFORM_SCHEMA, BinarySensorDevice)
from homeassistant.const import (
ATTR_ATTRIBUTION, CONF_ENTITY_NAMESPACE, CONF_MONITORED_CONDITIONS)
import homeassistant.helpers.config_validation as cv
from . import ATTRIBUTION, DATA_RING, DEFAULT_ENTITY_NAMESPACE
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=10)
# Sensor types: Name, category, device_class
SENSOR_TYPES = {
'ding': ['Ding', ['doorbell'], 'occupancy'],
'motion': ['Motion', ['doorbell', 'stickup_cams'], 'motion'],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_ENTITY_NAMESPACE, default=DEFAULT_ENTITY_NAMESPACE):
cv.string,
vol.Required(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)):
vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a sensor for a Ring device."""
ring = hass.data[DATA_RING]
sensors = []
for device in ring.doorbells: # ring.doorbells is doing I/O
for sensor_type in config[CONF_MONITORED_CONDITIONS]:
if 'doorbell' in SENSOR_TYPES[sensor_type][1]:
sensors.append(RingBinarySensor(hass, device, sensor_type))
for device in ring.stickup_cams: # ring.stickup_cams is doing I/O
for sensor_type in config[CONF_MONITORED_CONDITIONS]:
if 'stickup_cams' in SENSOR_TYPES[sensor_type][1]:
sensors.append(RingBinarySensor(hass, device, sensor_type))
add_entities(sensors, True)
class RingBinarySensor(BinarySensorDevice):
"""A binary sensor implementation for Ring device."""
def __init__(self, hass, data, sensor_type):
"""Initialize a sensor for Ring device."""
super(RingBinarySensor, self).__init__()
self._sensor_type = sensor_type
self._data = data
self._name = "{0} {1}".format(
self._data.name, SENSOR_TYPES.get(self._sensor_type)[0])
self._device_class = SENSOR_TYPES.get(self._sensor_type)[2]
self._state = None
self._unique_id = '{}-{}'.format(self._data.id, self._sensor_type)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return True if the binary sensor is on."""
return self._state
@property
def device_class(self):
"""Return the class of the binary sensor."""
return self._device_class
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = {}
attrs[ATTR_ATTRIBUTION] = ATTRIBUTION
attrs['device_id'] = se
|
lf._data.id
attrs['firmware'] = self._data.firmware
attrs['timezone'] = self._data.timezone
if self._data.alert and self._data.alert_expires_at:
attrs['expires_at'] = self._data.alert_expires_at
|
attrs['state'] = self._data.alert.get('state')
return attrs
def update(self):
"""Get the latest data and updates the state."""
self._data.check_alerts()
if self._data.alert:
if self._sensor_type == self._data.alert.get('kind') and \
self._data.account_id == self._data.alert.get('doorbot_id'):
self._state = True
else:
self._state = False
|
elliotf/appenginewiki
|
wiki.py
|
Python
|
lgpl-2.1
| 9,987
| 0.003304
|
#!/usr/bin/env python
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple Google App Engine wiki application.
The main distinguishing feature is that editing is in a WYSIWYG editor
rather than a text editor with special syntax. This application uses
google.appengine.api.datastore to access the datastore. This is a
lower-level API on which google.appengine.ext.db depends.
"""
#__author__ = 'Bret Taylor'
__author__ = 'Elliot Foster'
import cgi
import datetime
import os
import re
import sys
import urllib
import urlparse
import logging
import wikimarkup
from google.appengine.api import datastore
from google.appengine.api import datastore_types
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
# for Data::Dumper-like stuff
#import pprint
#pp = pprint.PrettyPrinter(indent=4)
#lib_path = os.path.join(os.path.dirname(__file__), 'lib')
#sys.path.append(lib_path)
_DEBUG = True
class BaseRequestHandler(webapp.RequestHandler):
def generate(self, template_name, template_values={}):
values = {
'request': self.request,
'user': users.get_current_user(),
'login_url': users.create_login_url(self.request.uri),
'logout_url': users.create_logout_url(self.request.uri),
'application_name': 'lilwiki',
}
values.update(template_values)
directory = os.path.dirname(__file__)
path = os.path.join(directory, os.path.join('templates', template_name))
self.response.out.write(template.render(path, values, debug=_DEBUG))
def head(self, *args):
pass
def get(self, *args):
pass
def post(self, *args):
pass
class MainPageHandler(BaseRequestHandler):
def get(self):
user = users.get_current_user();
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
query = datastore.Query('Page')
query['owner'] = user
query.Order(('modified', datastore.Q
|
uery.DESCENDING))
page_list = []
for entity in query.Get(100):
page_list.append(Page(entity['name'], entity))
self.generat
|
e('index.html', {
'pages': page_list,
})
class PageRequestHandler(BaseRequestHandler):
def get(self, page_name):
# if we don't have a user, we won't know which namespace to use (for now)
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
page_name = urllib.unquote(page_name)
page = Page.load(page_name, user)
modes = ['view', 'edit']
mode = self.request.get('mode')
if not page.entity:
logging.debug('page "' + page_name + '" not found, creating new instance.')
mode = 'edit'
if not mode in modes:
logging.debug('defaulting mode to view')
mode = 'view'
self.generate(mode + '.html', {
'page': page,
})
def post(self, page_name):
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
page_name = urllib.unquote(page_name)
page = Page.load(page_name, user)
page.content = self.request.get('content')
page.save()
self.redirect(page.view_url())
class Page(object):
""" A wiki page, has attributes:
name
content
owner
is_public -- implement later
"""
def __init__(self, name, entity=None):
self.name = name
self.entity = entity
if entity:
self.content = entity['content']
self.owner = entity['owner']
self.modified = entity['modified']
else:
self.content = '= ' + self.name + " =\n\nStarting writing about " + self.name + ' here.'
def entity(self):
return self.entity
def edit_url(self):
return '/%s?mode=edit' % (urllib.quote(self.name))
def view_url(self):
name = self.name
name = urllib.quote(name)
return '/' + name
def save(self):
if self.entity:
entity = self.entity
logging.debug('saving existing page ' + self.name)
else:
logging.debug('saving new page ' + self.name)
entity = datastore.Entity('Page')
entity['owner'] = users.get_current_user()
entity['name'] = self.name
entity['content'] = datastore_types.Text(self.content)
entity['modified'] = datetime.datetime.now()
datastore.Put(entity)
def wikified_content(self):
# TODO: check memcache for rendered page?
# replacements here
transforms = [
AutoLink(),
WikiWords(),
HideReferers(),
]
content = self.content
content = wikimarkup.parse(content)
for transform in transforms:
content = transform.run(content, self)
return content
@staticmethod
def load(name, owner):
if not owner:
owner = users.get_current_user()
query = datastore.Query('Page')
query['name'] = name
query['owner'] = owner
entities = query.Get(1)
if len(entities) < 1:
return Page(name)
else:
return Page(name, entities[0])
@staticmethod
def exists(name, owner):
logging.debug('looking up ' + name)
if not owner:
logging.debug('Were not given a user when looking up ' + name)
owner = users.get_current_user()
return Page.load(name, owner).entity
class Transform(object):
"""Abstraction for a regular expression transform.
Transform subclasses have two properties:
regexp: the regular expression defining what will be replaced
replace(MatchObject): returns a string replacement for a regexp match
We iterate over all matches for that regular expression, calling replace()
on the match to determine what text should replace the matched text.
The Transform class is more expressive than regular expression replacement
because the replace() method can execute arbitrary code to, e.g., look
up a WikiWord to see if the page exists before determining if the WikiWord
should be a link.
"""
def run(self, content, page):
"""Runs this transform over the given content.
Args:
content: The string data to apply a transformation to.
Returns:
A new string that is the result of this transform.
"""
self.page = page
parts = []
offset = 0
for match in self.regexp.finditer(content):
parts.append(content[offset:match.start(0)])
parts.append(self.replace(match))
offset = match.end(0)
parts.append(content[offset:])
return ''.join(parts)
class WikiWords(Transform):
"""Translates WikiWords to links.
"""
def __init__(self):
self.regexp = re.compile(r'(?<![A-Za-z])[A-Z][a-z]*([A-Z][a-z]+/?)+(?P<link_close>[^<]*</[Aa]>)?')
def replace(self, match):
wikiword = match.group(0)
if (match.group('link_close')):
# we're inside a link element, so don't rewrite
return wikiword
if wikiword == self.page.name:
# don't link to the current page
return wikiword
if Page.exists(wikiword, self.page.owner):
# link to that page
return '<a class="wikiword" href="/
|
pablo-the-programmer/Registration
|
conference/conference/wsgi.py
|
Python
|
mit
| 1,415
| 0.000707
|
"""
WSGI config for p1 project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running mu
|
ltiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "p1.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "conference.settings")
# This application object is used by any WSGI server configured to use this
#
|
file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
MSeifert04/numpy
|
numpy/core/tests/test_deprecations.py
|
Python
|
bsd-3-clause
| 24,541
| 0.001222
|
"""
Tests related to deprecation warnings. Also a convenient place
to document how deprecations should eventually be turned into errors.
"""
from __future__ import division, absolute_import, print_function
import datetime
import sys
import operator
import warnings
import pytest
import shutil
import tempfile
import numpy as np
from numpy.testing import (
assert_raises, assert_warns, assert_, assert_array_equal
)
from numpy.core._multiarray_tests import fromstring_null_term_c_api
try:
import pytz
_has_pytz = True
except ImportError:
_has_pytz = False
class _DeprecationTestCase(object):
# Just as warning: warnings uses re.match, so the start of this message
# must match.
message = ''
warning_cls = DeprecationWarning
def setup(self):
self.warn_ctx = warnings.catch_warnings(record=True)
self.log = self.warn_ctx.__enter__()
# Do *not* ignore other DeprecationWarnings. Ignoring warnings
# can give very confusing results because of
# https://bugs.python.org/issue4180 and it is probably simplest to
# try to keep the tests cleanly giving only the right warning type.
# (While checking them set to "error" those are ignored anyway)
# We still have them show up, because otherwise they would be raised
warnings.filterwarnings("always", category=self.warning_cls)
warnings.filterwarnings("always", message=self.message,
category=self.warning_cls)
def teardown(self):
self.warn_ctx.__exit__()
def assert_deprecated(self, function, num=1, ignore_others=False,
function_fails=False,
exceptions=np._NoValue,
args=(), kwargs={}):
"""Test if DeprecationWarnings are given and raised.
|
This first checks if the function when called gives `num`
DeprecationWarnings, after that it tries to raise these
DeprecationWarnings and compares them with `exceptions`.
The exceptions can be different for cases where this code path
is simply not anticipated and the exception is replaced.
Parameters
----------
function : callable
|
The function to test
num : int
Number of DeprecationWarnings to expect. This should normally be 1.
ignore_others : bool
Whether warnings of the wrong type should be ignored (note that
the message is not checked)
function_fails : bool
If the function would normally fail, setting this will check for
warnings inside a try/except block.
exceptions : Exception or tuple of Exceptions
Exception to expect when turning the warnings into an error.
The default checks for DeprecationWarnings. If exceptions is
empty the function is expected to run successfully.
args : tuple
Arguments for `function`
kwargs : dict
Keyword arguments for `function`
"""
# reset the log
self.log[:] = []
if exceptions is np._NoValue:
exceptions = (self.warning_cls,)
try:
function(*args, **kwargs)
except (Exception if function_fails else tuple()):
pass
# just in case, clear the registry
num_found = 0
for warning in self.log:
if warning.category is self.warning_cls:
num_found += 1
elif not ignore_others:
raise AssertionError(
"expected %s but got: %s" %
(self.warning_cls.__name__, warning.category))
if num is not None and num_found != num:
msg = "%i warnings found but %i expected." % (len(self.log), num)
lst = [str(w) for w in self.log]
raise AssertionError("\n".join([msg] + lst))
with warnings.catch_warnings():
warnings.filterwarnings("error", message=self.message,
category=self.warning_cls)
try:
function(*args, **kwargs)
if exceptions != tuple():
raise AssertionError(
"No error raised during function call")
except exceptions:
if exceptions == tuple():
raise AssertionError(
"Error raised during function call")
def assert_not_deprecated(self, function, args=(), kwargs={}):
"""Test that warnings are not raised.
This is just a shorthand for:
self.assert_deprecated(function, num=0, ignore_others=True,
exceptions=tuple(), args=args, kwargs=kwargs)
"""
self.assert_deprecated(function, num=0, ignore_others=True,
exceptions=tuple(), args=args, kwargs=kwargs)
class _VisibleDeprecationTestCase(_DeprecationTestCase):
warning_cls = np.VisibleDeprecationWarning
class TestNonTupleNDIndexDeprecation(object):
def test_basic(self):
a = np.zeros((5, 5))
with warnings.catch_warnings():
warnings.filterwarnings('always')
assert_warns(FutureWarning, a.__getitem__, [[0, 1], [0, 1]])
assert_warns(FutureWarning, a.__getitem__, [slice(None)])
warnings.filterwarnings('error')
assert_raises(FutureWarning, a.__getitem__, [[0, 1], [0, 1]])
assert_raises(FutureWarning, a.__getitem__, [slice(None)])
# a a[[0, 1]] always was advanced indexing, so no error/warning
a[[0, 1]]
class TestComparisonDeprecations(_DeprecationTestCase):
"""This tests the deprecation, for non-element-wise comparison logic.
This used to mean that when an error occurred during element-wise comparison
(i.e. broadcasting) NotImplemented was returned, but also in the comparison
itself, False was given instead of the error.
Also test FutureWarning for the None comparison.
"""
message = "elementwise.* comparison failed; .*"
def test_normal_types(self):
for op in (operator.eq, operator.ne):
# Broadcasting errors:
self.assert_deprecated(op, args=(np.zeros(3), []))
a = np.zeros(3, dtype='i,i')
# (warning is issued a couple of times here)
self.assert_deprecated(op, args=(a, a[:-1]), num=None)
# Element comparison error (numpy array can't be compared).
a = np.array([1, np.array([1,2,3])], dtype=object)
b = np.array([1, np.array([1,2,3])], dtype=object)
self.assert_deprecated(op, args=(a, b), num=None)
def test_string(self):
# For two string arrays, strings always raised the broadcasting error:
a = np.array(['a', 'b'])
b = np.array(['a', 'b', 'c'])
assert_raises(ValueError, lambda x, y: x == y, a, b)
# The empty list is not cast to string, and this used to pass due
# to dtype mismatch; now (2018-06-21) it correctly leads to a
# FutureWarning.
assert_warns(FutureWarning, lambda: a == [])
def test_void_dtype_equality_failures(self):
class NotArray(object):
def __array__(self):
raise TypeError
# Needed so Python 3 does not raise DeprecationWarning twice.
def __ne__(self, other):
return NotImplemented
self.assert_deprecated(lambda: np.arange(2) == NotArray())
self.assert_deprecated(lambda: np.arange(2) != NotArray())
struct1 = np.zeros(2, dtype="i4,i4")
struct2 = np.zeros(2, dtype="i4,i4,i4")
assert_warns(FutureWarning, lambda: struct1 == 1)
assert_warns(FutureWarning, lambda: struct1 == struct2)
assert_warns(FutureWarning, lambda: struct1 != 1)
assert_warns(FutureWarning, lambda: struct1 != struct2)
def test_array_richcompare_legacy_weirdness(self):
# It doesn't really work to use assert_deprecated here, b/c part of
# the point of assert_deprecated is to check that when warnings ar
|
Peter-Slump/mahjong
|
tests/mahjong/models/test_tabel.py
|
Python
|
mit
| 1,613
| 0
|
import unittest
from mahjong.models import Table, ALL_WINDS, WIND_EAST, WIND_NORTH, WIND_SOUTH
import mahjong.services.stone
class MahjongTabelModelTestCase(unittest.TestCase):
def setUp(self):
self.table = Table(stones=mahjong.services.stone.get_all_shuffled())
self.table.wall_wind = WIND_EAST # Open the table
def test_walls_are_created(self):
"""
Case: A table is initialized
Expected: The walls are created
"""
self.assertEqual(len(self.table.walls), 4)
for wind, wall in self.table.walls.items():
self.assertEqual(len(wall), 36)
self.assertIn(wind, ALL_WINDS)
def test_get_current_wall(self):
"""
Case: current wall get requested
Expected: The wall of the wall wind is returned
"""
self.assertEqual(
self.table.walls[self.table.wall_wind],
self.table.current_wall
)
def test_stone_iteration(s
|
elf):
"""
Case: we iterate throught the stones of the table
Expected: we get the s
|
ame stones as the list we give
"""
stones = mahjong.services.stone.get_all_shuffled()
table = Table(stones=stones)
table.wall_wind = WIND_NORTH # Last wind
table.wall_index = 35 # Last stone
for stone in table:
self.assertEqual(stone, stones.pop())
def test_number_stones_returned(self):
self.table.wall_wind = WIND_NORTH
self.table.wall_index = 35
stones = self.table.get_stones(count=3)
self.assertEqual(len(stones), 3)
|
savi-dev/horizon
|
horizon/dashboards/syspanel/projects/forms.py
|
Python
|
apache-2.0
| 5,991
| 0.001335
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django import shortcuts
from django.contrib import messages
from django.utils.translation import ugettext_lazy as _
from horizon import api
from horizon import exceptions
from horizon import forms
LOG = logging.getLogger(__name__)
class AddUser(forms.SelfHandlingForm):
tenant_id = forms.CharField(widget=forms.widgets.HiddenInput())
user_id = forms.CharField(widget=forms.widgets.HiddenInput())
role_id = forms.ChoiceField(label=_("Role"))
def __init__(self, *args, **kwargs):
roles = kwargs.pop('roles')
super(AddUser, self).__init__(*args, **kwargs)
role_choices = [(role.id, role.name) for role in roles]
self.fields['role_id'].choices = role_choices
def handle(self, request, data):
try:
api.add_tenant_user_role(request,
data['tenant_id'],
data['user_id'],
data['role_id'])
messages.success(request, _('Successfully added user to project.'))
except:
exceptions.handle(request, _('Unable to add user to project.'))
return shortcuts.redirect('horizon:syspanel:projects:users',
tenant_id=data['tenant_id'])
class CreateTenant(forms.SelfHandlingForm):
name = forms.CharField(label=_("Name"))
description = forms.CharField(
widget=forms.widgets.Textarea(),
label=_("Description"))
enabled = forms.BooleanField(label=_("Enabled"), required=False,
initial=True)
def handle(self, request, data):
try:
LOG.info('Creating project with name "%s"' % data['name'])
api.tenant_create(reques
|
t,
data['name'],
data['description'],
data['enabled'])
messages.success(request,
_('%s was successfully created.')
% data['name'])
except:
exceptions.handle(request, _('Unable to create project.'))
return shortcuts.redirect('horizon:syspanel:pro
|
jects:index')
class UpdateTenant(forms.SelfHandlingForm):
id = forms.CharField(label=_("ID"),
widget=forms.TextInput(attrs={'readonly': 'readonly'}))
name = forms.CharField(label=_("Name"))
description = forms.CharField(
widget=forms.widgets.Textarea(),
label=_("Description"))
enabled = forms.BooleanField(required=False, label=_("Enabled"))
def handle(self, request, data):
try:
LOG.info('Updating project with id "%s"' % data['id'])
api.tenant_update(request,
data['id'],
data['name'],
data['description'],
data['enabled'])
messages.success(request,
_('%s was successfully updated.')
% data['name'])
except:
exceptions.handle(request, _('Unable to update project.'))
return shortcuts.redirect('horizon:syspanel:projects:index')
class UpdateQuotas(forms.SelfHandlingForm):
tenant_id = forms.CharField(label=_("ID (name)"),
widget=forms.TextInput(attrs={'readonly': 'readonly'}))
metadata_items = forms.IntegerField(label=_("Metadata Items"))
injected_files = forms.IntegerField(label=_("Injected Files"))
injected_file_content_bytes = forms.IntegerField(label=_("Injected File "
"Content Bytes"))
cores = forms.IntegerField(label=_("VCPUs"))
instances = forms.IntegerField(label=_("Instances"))
volumes = forms.IntegerField(label=_("Volumes"))
gigabytes = forms.IntegerField(label=_("Gigabytes"))
ram = forms.IntegerField(label=_("RAM (in MB)"))
floating_ips = forms.IntegerField(label=_("Floating IPs"))
def handle(self, request, data):
ifcb = data['injected_file_content_bytes']
try:
api.nova.tenant_quota_update(request,
data['tenant_id'],
metadata_items=data['metadata_items'],
injected_file_content_bytes=ifcb,
volumes=data['volumes'],
gigabytes=data['gigabytes'],
ram=data['ram'],
floating_ips=data['floating_ips'],
instances=data['instances'],
injected_files=data['injected_files'],
cores=data['cores'])
messages.success(request,
_('Quotas for %s were successfully updated.')
% data['tenant_id'])
except:
exceptions.handle(request, _('Unable to update quotas.'))
return shortcuts.redirect('horizon:syspanel:projects:index')
|
epage/telepathy-bluewire
|
hand_tests/generic.py
|
Python
|
lgpl-2.1
| 17,072
| 0.034149
|
#!/usr/bin/env python
import sys
import gobject
import dbus.mainloop.glib
dbus.mainloop.glib.DBusGMainLoop(set_as_default = True)
import telepathy
DBUS_PROPERTIES = 'org.freedesktop.DBus.Properties'
def get_registry():
reg = telepathy.client.ManagerRegistry()
reg.LoadManagers()
return reg
def get_connection_manager(reg):
cm = reg.GetManager('bluewire')
return cm
class Action(object):
def __init__(self):
self._action = None
def queue_action(self):
pass
def append_action(self, action):
assert self._action is None
self._action = action
def get_next_action(self):
assert self._action is not None
return self._action
def _on_done(self):
if self._action is None:
return
self._action.queue_action()
def _on_error(self, error):
print error
def _on_generic_message(self, *args):
pass
class DummyAction(Action):
def queue_action(self):
gobject.idle_add(self._on_done)
class QuitLoop(Action):
def __init__(self, loop):
super(QuitLoop, self).__init__()
self._loop = loop
def queue_action(self):
self._loop.quit()
class DisplayParams(Action):
def __init__(self, cm):
super(DisplayParams, self).__init__()
self._cm = cm
def queue_action(self):
self._cm[telepathy.interfaces.CONN_MGR_INTERFACE].GetParameters(
'bluetooth,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, params):
print "Connection Parameters:"
for name, flags, signature, default in params:
print "\t%s (%s)" % (name, signature),
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_REQUIRED:
print "required",
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_REGISTER:
print "register",
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_SECRET:
print "secret",
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_DBUS_PROPERTY:
print "dbus-property",
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_HAS_DEFAULT:
print "has-default(%s)" % default,
print ""
super(DisplayParams, self)._on_done()
class RequestConnection(Action):
def __init__(self, cm, username, password, forward):
super(RequestConnection, self).__init__()
self._cm = cm
self._conn = None
self._serviceName = None
self._username = username
self._password = password
self._forward = forward
@property
def conn(self):
return self._conn
@property
def serviceName(self):
return self._serviceName
def queue_action(self):
self._cm[telepathy.server.CONNECTION_MANAGER].RequestConnection(
'bluetooth",
{
'account': self._username,
'password': self._password,
'forward': self._forward,
},
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, busName, objectPath):
self._serviceName = busName
self._conn = telepathy.client.Connection(busName, objectPath)
super(RequestConnection, self)._on_done()
class Connect(Action):
def __init__(self, connAction):
super(Connect, self).__init__()
self._connAction = connAction
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].connect_to_signal(
'StatusChanged',
self._on_change,
)
self._connAction.conn[t
|
elepathy.server.CONNECTION].Connect(
reply_handler = self._on_generic_message,
error_handler = self._on_error,
)
def _on_done(self):
super(Connect, self)._on_done()
def _on_change(self, status, reason):
if status == telepathy.constants.CONNECTION_STATUS_DISCONNECTED:
print "Disconnected!"
self._conn = N
|
one
elif status == telepathy.constants.CONNECTION_STATUS_CONNECTED:
print "Connected"
self._on_done()
elif status == telepathy.constants.CONNECTION_STATUS_CONNECTING:
print "Connecting"
else:
print "Status: %r" % status
class SimplePresenceOptions(Action):
def __init__(self, connAction):
super(SimplePresenceOptions, self).__init__()
self._connAction = connAction
def queue_action(self):
self._connAction.conn[DBUS_PROPERTIES].Get(
telepathy.server.CONNECTION_INTERFACE_SIMPLE_PRESENCE,
'Statuses',
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, statuses):
print "\tAvailable Statuses"
for (key, value) in statuses.iteritems():
print "\t\t - %s" % key
super(SimplePresenceOptions, self)._on_done()
class NullHandle(object):
@property
def handle(self):
return 0
@property
def handles(self):
return []
class UserHandle(Action):
def __init__(self, connAction):
super(UserHandle, self).__init__()
self._connAction = connAction
self._handle = None
@property
def handle(self):
return self._handle
@property
def handles(self):
return [self._handle]
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].GetSelfHandle(
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, handle):
self._handle = handle
super(UserHandle, self)._on_done()
class RequestHandle(Action):
def __init__(self, connAction, handleType, handleNames):
super(RequestHandle, self).__init__()
self._connAction = connAction
self._handle = None
self._handleType = handleType
self._handleNames = handleNames
@property
def handle(self):
return self._handle
@property
def handles(self):
return [self._handle]
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].RequestHandles(
self._handleType,
self._handleNames,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, handles):
self._handle = handles[0]
super(RequestHandle, self)._on_done()
class RequestChannel(Action):
def __init__(self, connAction, handleAction, channelType, handleType):
super(RequestChannel, self).__init__()
self._connAction = connAction
self._handleAction = handleAction
self._channel = None
self._channelType = channelType
self._handleType = handleType
@property
def channel(self):
return self._channel
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].RequestChannel(
self._channelType,
self._handleType,
self._handleAction.handle,
True,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, channelObjectPath):
self._channel = telepathy.client.Channel(self._connAction.serviceName, channelObjectPath)
super(RequestChannel, self)._on_done()
class EnsureChannel(Action):
def __init__(self, connAction, channelType, handleType, handleId):
super(EnsureChannel, self).__init__()
self._connAction = connAction
self._channel = None
self._channelType = channelType
self._handleType = handleType
self._handleId = handleId
self._handle = None
@property
def channel(self):
return self._channel
@property
def handle(self):
return self._handle
@property
def handles(self):
return [self._handle]
def queue_action(self):
properties = {
telepathy.server.CHANNEL_INTERFACE+".ChannelType": self._channelType,
telepathy.server.CHANNEL_INTERFACE+".TargetHandleType": self._handleType,
telepathy.server.CHANNEL_INTERFACE+".TargetID": self._handleId,
}
self._connAction.conn[telepathy.server.CONNECTION_INTERFACE_REQUESTS].EnsureChannel(
properties,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, yours, channelObjectPath, properties):
print "Create?", not not yours
print "Path:", channelObjectPath
print "Properties:", properties
self._channel = telepathy.client.Channel(self._connAction.serviceName, channelObjectPath)
self._handle = properties[telepathy.server.CHANNEL_INTERFACE+".TargetHandle"]
super(EnsureChannel, self)._on_done()
class CloseChannel(Action):
def __init__(self, connAction, chanAction):
super(CloseChannel, self).__init__()
self._connAction = connAction
self._chanAction = chanAction
self._handles = []
def queue_action(self):
self._chanAction.channel[telepathy.server.CHANNEL].Close(
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self):
super(CloseChannel, self)._on_done()
class ContactHandles(Action):
def __init__(self, connAction, chanAction):
|
ITPS/odoo-saas-tools
|
saas_portal_demo/controllers/__init__.py
|
Python
|
gpl-3.0
| 924
| 0
|
# -*- encoding: utf-8 -*-
##############################################################################
# Copyright (c) 2015 - Present All Rights Reserved
# Author: Cesar Lage <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free
|
Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of the GNU General Publ
|
ic License is available at:
# <http://www.gnu.org/licenses/gpl.html>.
##############################################################################
import main
|
dankolbman/MarketCents
|
twitter_feed.py
|
Python
|
mit
| 920
| 0.018478
|
# authenticates with twitter, searches for microsoft, evaluates overall
# sentiment for microsoft
import numpy as np
import twitter
from textblob import TextBlob
f = open('me.auth
|
')
keys = f.readlines()
# Read in keys
keys = [x.strip('\n') for x in keys]
# Connect
api = twitter.Api(consumer_key = keys[0],
consumer_secret = keys[1],
access_token_key = keys[2],
access_token_secret = keys[3])
print 'logged in as ', api.VerifyCredentials().name
search = api.GetSearch(term='microsoft', )
# Make text blobs out of st
|
atus content
blobs = [ TextBlob(status.text) for status in search ]
sentiments = [ blob.sentiment.polarity for blob in blobs ]
filtered_sentiments = filter(lambda a: a!=0.0, sentiments)
overall_sentiment = sum(filtered_sentiments)/len(filtered_sentiments)
print 'Overall sentiment for microsoft: {0}'.format(overall_sentiment)
|
wtrevino/django-listings
|
listings/syndication/views.py
|
Python
|
mit
| 619
| 0.001616
|
# -*- coding: utf-8 -*-
from django.http import HttpResponse, Http404
from django.template import Context
from django.contrib.si
|
tes.models import Site
from listings.syndication.models import Feed
from listings.models import POSTING_ACTIVE
def display_feed(request, feed_url):
site = Site.objects.get_current()
try:
feed = site.feed_set.get(feed_url=feed_url)
except Feed.DoesNotExist:
raise Http404
template = feed.get_template()
context = Context({'ads': feed.ads.filter(status=POSTING_ACTIVE)})
return HttpResponse(template.render(context), content_t
|
ype=feed.content_type)
|
CorunaDevelopers/teleTweetBot
|
teleTweetBot/handlers/TelegramMessageHandler.py
|
Python
|
gpl-3.0
| 632
| 0
|
#!/usr/bin
|
/env python
# _*_ coding:utf-8 _*
from commands import StartCommand
from commands
|
import StopCommand
from commands import TwitterCommand
from handlers.ExceptionHandler import ExceptionHandler
COMMANDS = {
'/start': StartCommand.process_message,
'/stop': StopCommand.process_message,
'/tweet': TwitterCommand.process_message
}
def process_message(twitter_api, telegram_message):
try:
msg_command = telegram_message.message.text.split()[0].lower()
return COMMANDS[msg_command](twitter_api, telegram_message)
except Exception as e:
ExceptionHandler.handle_exception(e, False)
|
boxed/CMi
|
web_frontend/gdata/apps/organization/client.py
|
Python
|
mit
| 20,094
| 0.003583
|
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OrganizationUnitProvisioningClient simplifies OrgUnit Provisioning API calls.
OrganizationUnitProvisioningClient extends gdata.client.GDClient to ease
interaction with the Google Organization Unit Provisioning API.
These interactions include the ability to create, retrieve, update and delete
organization units, move users within organization units, retrieve customerId
and update and retrieve users in organization units.
"""
__author__ = 'Gunjan Sharma <[email protected]>'
import urllib
import gdata.apps.organization.data
import gdata.client
CUSTOMER_ID_URI_TEMPLATE = '/a/feeds/customer/%s/customerId'
# OrganizationUnit URI templates
# The strings in this template are eventually replaced with the feed type
# (orgunit/orguser), API version and Google Apps domain name, respectively.
ORGANIZATION_UNIT_URI_TEMPLATE = '/a/feeds/%s/%s/%s'
# The value for orgunit requests
ORGANIZATION_UNIT_FEED = 'orgunit'
# The value for orguser requests
ORGANIZATION_USER_FEED = 'orguser'
class OrganizationUnitProvisioningClient(gdata.client.GDClient):
"""Client extension for the Google Org Unit Provisioning API service.
Attributes:
host: string The hostname for the MultiDomain Provisioning API service.
api_version: string The version of the MultiDomain Provisioning API.
"""
host = 'apps-apis.google.com'
api_version = '2.0'
auth_service = 'apps'
auth_scopes = gdata.gauth.AUTH_SCOPES['apps']
ssl = True
def __init__(self, domain, auth_token=None, **kwargs):
"""Constructs a new client for the Organization Unit Provisioning API.
Args:
domain: string The Google Apps domain with Organization Unit
Provisioning.
auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
OAuthToken which authorizes this client to edit the Organization
Units.
"""
gdata.client.GDClient.__init__(self, auth_token=auth_token, **kwargs)
self.domain = domain
def make_organization_unit_provisioning_uri(
self, feed_type, customer_id, org_unit_path_or_user_email=None,
params=None):
"""Creates a resource feed URI for the Organization Unit Provisioning API.
Using this client's Google Apps domain, create a feed URI for organization
unit provisioning in that domain. If an org unit path or org user email
address is provided, return a URI for that specific resource.
If params are provided, append them as GET params.
Args:
feed_type: string The type of feed (orgunit/orguser)
customer_id: string The customerId of the user.
org_unit_path_or_user_email: string (optional) The org unit path or
org user email address for which to make a feed URI.
params: dict (optional) key -> value params to append as GET vars to the
URI. Example: params={'start': 'my-resource-id'}
Returns:
A string giving the URI for organization unit provisioning for this
client's Google Apps domain.
"""
uri = ORGANIZATION_UNIT_URI_TEMPLATE % (feed_type, self.api_version,
|
customer_id)
if org_unit_path_or_user_email:
uri += '/' + org_unit_path_or_user_email
if params:
uri += '?' + urllib.urlencode(params)
return uri
MakeOrganizationUnitProvisioningUri = make_organization_unit_provisioning_uri
def make_organization_unit_orgunit_provisioning_uri(self, customer_id,
|
org_unit_path=None,
params=None):
"""Creates a resource feed URI for the orgunit's Provisioning API calls.
Using this client's Google Apps domain, create a feed URI for organization
unit orgunit's provisioning in that domain. If an org_unit_path is
provided, return a URI for that specific resource.
If params are provided, append them as GET params.
Args:
customer_id: string The customerId of the user.
org_unit_path: string (optional) The organization unit's path for which
to make a feed URI.
params: dict (optional) key -> value params to append as GET vars to the
URI. Example: params={'start': 'my-resource-id'}
Returns:
A string giving the URI for organization unit provisioning for
given org_unit_path
"""
return self.make_organization_unit_provisioning_uri(
ORGANIZATION_UNIT_FEED, customer_id, org_unit_path, params)
MakeOrganizationUnitOrgunitProvisioningUri = make_organization_unit_orgunit_provisioning_uri
def make_organization_unit_orguser_provisioning_uri(self, customer_id,
org_user_email=None,
params=None):
"""Creates a resource feed URI for the orguser's Provisioning API calls.
Using this client's Google Apps domain, create a feed URI for organization
unit orguser's provisioning in that domain. If an org_user_email is
provided, return a URI for that specific resource.
If params are provided, append them as GET params.
Args:
customer_id: string The customerId of the user.
org_user_email: string (optional) The organization unit's path for which
to make a feed URI.
params: dict (optional) key -> value params to append as GET vars to the
URI. Example: params={'start': 'my-resource-id'}
Returns:
A string giving the URI for organization user provisioning for
given org_user_email
"""
return self.make_organization_unit_provisioning_uri(
ORGANIZATION_USER_FEED, customer_id, org_user_email, params)
MakeOrganizationUnitOrguserProvisioningUri = make_organization_unit_orguser_provisioning_uri
def make_customer_id_feed_uri(self):
"""Creates a feed uri for retrieving customerId of the user.
Returns:
A string giving the URI for retrieving customerId of the user.
"""
uri = CUSTOMER_ID_URI_TEMPLATE % (self.api_version)
return uri
MakeCustomerIdFeedUri = make_customer_id_feed_uri
def retrieve_customer_id(self, **kwargs):
"""Retrieve the Customer ID for the customer domain.
Returns:
A gdata.apps.organization.data.CustomerIdEntry.
"""
uri = self.MakeCustomerIdFeedUri()
return self.GetEntry(
uri,
desired_class=gdata.apps.organization.data.CustomerIdEntry,
**kwargs)
RetrieveCustomerId = retrieve_customer_id
def create_org_unit(self, customer_id, name, parent_org_unit_path='/',
description='', block_inheritance=False, **kwargs):
"""Create a Organization Unit.
Args:
customer_id: string The ID of the Google Apps customer.
name: string The simple organization unit text name, not the full path
name.
parent_org_unit_path: string The full path of the parental tree to this
organization unit (default: '/').
[Note: Each element of the path MUST be URL encoded
(example: finance%2Forganization/suborganization)]
description: string The human readable text description of the
organization unit (optional).
block_inheritance: boolean This parameter blocks policy setting
inheritance from organization units higher in
the organization tree (default: False).
Returns:
A gdata.apps.organization.data.OrgUnitEntry representing an organization
unit.
|
fierval/retina
|
DiabeticRetinopathy/Refactoring/kobra/imaging.py
|
Python
|
mit
| 6,612
| 0.013313
|
from matplotlib import pyplot as plt
from matplotlib import cm
from os import path
import numpy as np
import cv2
import pandas as pd
from math import exp, pi, sqrt
import mahotas as mh
from numbapro import vectorize
def show_images(images,titles=None, scale=1.3):
"""Display a list of images"""
n_ims = len(images)
if titles is None: titles = ['(%d)' % i for i in range(1,n_ims + 1)]
|
fig = plt.figure()
n = 1
for image,title in zip(images,titles):
a = fig.add_subplot(1,n_ims,n) # Make subplot
if image.ndim == 2: # Is image grayscale?
plt.imshow(image, cmap = c
|
m.Greys_r)
else:
plt.imshow(cv2.cvtColor(image, cv2.COLOR_RGB2BGR))
a.set_title(title)
plt.axis("off")
n += 1
fig.set_size_inches(np.array(fig.get_size_inches(), dtype=np.float) * n_ims / scale)
plt.show()
plt.close()
# Pyramid Down & blurr
# Easy-peesy
def pyr_blurr(image):
return cv2.GaussianBlur(cv2.pyrDown(image), (7, 7), 30.)
def median_blurr(image, size = 7):
return cv2.medianBlur(image, size)
def display_contours(image, contours, color = (255, 0, 0), thickness = -1, title = None):
imShow = image.copy()
for i in range(0, len(contours)):
cv2.drawContours(imShow, contours, i, color, thickness)
show_images([imShow], scale=0.7, titles=title)
def salt_and_peper(im, fraction = 0.01):
assert (0 < fraction <= 1.), "Fraction must be in (0, 1]"
sp = np.zeros(im.shape)
percent = round(fraction * 100 / 2.)
cv2.randu(sp, 0, 100)
# quarter salt quarter pepper
im_sp = im.copy()
im_sp [sp < percent] = 0
im_sp [sp > 100 - percent] = 255
return im_sp
def remove_light_reflex(im, ksize = 5):
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (ksize, ksize))
return cv2.morphologyEx(im, cv2.MORPH_OPEN, kernel)
def _filter_kernel_mf_fdog(L, sigma, t = 3, mf = True):
dim_y = int(L)
dim_x = 2 * int(t * sigma)
arr = np.zeros((dim_y, dim_x), 'f')
ctr_x = dim_x / 2
ctr_y = int(dim_y / 2.)
# an un-natural way to set elements of the array
# to their x coordinate
it = np.nditer(arr, flags=['multi_index'])
while not it.finished:
arr[it.multi_index] = it.multi_index[1] - ctr_x
it.iternext()
two_sigma_sq = 2 * sigma * sigma
sqrt_w_pi_sigma = 1. / (sqrt(2 * pi) * sigma)
if not mf:
sqrt_w_pi_sigma = sqrt_w_pi_sigma / sigma ** 2
@vectorize(['float32(float32)'], target='cpu')
def k_fun(x):
return sqrt_w_pi_sigma * exp(-x * x / two_sigma_sq)
@vectorize(['float32(float32)'], target='cpu')
def k_fun_derivative(x):
return -x * sqrt_w_pi_sigma * exp(-x * x / two_sigma_sq)
if mf:
kernel = k_fun(arr)
kernel = kernel - kernel.mean()
else:
kernel = k_fun_derivative(arr)
# return the correlation kernel for filter2D
return cv2.flip(kernel, -1)
def fdog_filter_kernel(L, sigma, t = 3):
'''
K = - (x/(sqrt(2 * pi) * sigma ^3)) * exp(-x^2/2sigma^2), |y| <= L/2, |x| < s * t
'''
return _filter_kernel_mf_fdog(L, sigma, t, False)
def gaussian_matched_filter_kernel(L, sigma, t = 3):
'''
K = 1/(sqrt(2 * pi) * sigma ) * exp(-x^2/2sigma^2), |y| <= L/2, |x| < s * t
'''
return _filter_kernel_mf_fdog(L, sigma, t, True)
def createMatchedFilterBank(K, n = 12):
'''
Given a kernel, create matched filter bank
'''
rotate = 180 / n
center = (K.shape[1] / 2, K.shape[0] / 2)
cur_rot = 0
kernels = [K]
for i in range(1, n):
cur_rot += rotate
r_mat = cv2.getRotationMatrix2D(center, cur_rot, 1)
k = cv2.warpAffine(K, r_mat, (K.shape[1], K.shape[0]))
kernels.append(k)
return kernels
def applyFilters(im, kernels):
'''
Given a filter bank, apply them and record maximum response
'''
images = np.array([cv2.filter2D(im, -1, k) for k in kernels])
return np.max(images, 0)
def gabor_filters(ksize, sigma = 4.0, lmbda = 10.0, n = 16):
'''
Create a bank of Gabor filters spanning 180 degrees
'''
filters = []
for theta in np.arange(0, np.pi, np.pi / n):
kern = cv2.getGaborKernel((ksize, ksize), sigma, theta, lmbda, 0.5, 0, ktype=cv2.CV_64F)
kern /= 1.5*kern.sum()
filters.append(kern)
return filters
def saturate (v):
return np.array(map(lambda a: min(max(round(a), 0), 255), v))
def calc_hist(images, masks):
channels = map(lambda i: cv2.split(i), images)
imMask = zip(channels, masks)
nonZeros = map(lambda m: cv2.countNonZero(m), masks)
# grab three histograms - one for each channel
histPerChannel = map(lambda (c, mask): \
[cv2.calcHist([cimage], [0], mask, [256], np.array([0, 255])) for cimage in c], imMask)
# compute the cdf's.
# they are normalized & saturated: values over 255 are cut off.
cdfPerChannel = map(lambda (hChan, nz): \
[saturate(np.cumsum(h) * 255.0 / nz) for h in hChan], \
zip(histPerChannel, nonZeros))
return np.array(cdfPerChannel)
# compute color map based on minimal distances beteen cdf values of ref and input images
def getMin (ref, img):
l = [np.argmin(np.abs(ref - i)) for i in img]
return np.array(l)
# compute and apply color map on all channels of the image
def map_image(image, refHist, imageHist):
# each of the arguments contains histograms over 3 channels
mp = np.array([getMin(r, i) for (r, i) in zip(refHist, imageHist)])
channels = np.array(cv2.split(image))
mappedChannels = np.array([mp[i,channels[i]] for i in range(0, 3)])
return cv2.merge(mappedChannels).astype(np.uint8)
# compute the histograms on all three channels for all images
def histogram_specification(ref, images, masks):
'''
ref - reference image
images - a set of images to have color transferred via histogram specification
masks - masks to apply
'''
cdfs = calc_hist(images, masks)
mapped = [map_image(images[i], ref[0], cdfs[i, :, :]) for i in range(len(images))]
return mapped
def max_labelled_region(labels, Bc = None):
'''
Labelled region of maximal area
'''
return np.argmax(mh.labeled.labeled_size(labels)[1:]) + 1
def saturate (v):
return map(lambda a: min(max(round(a), 0), 255), v)
def plot_hist(hst, color):
fig = plt.figure()
plt.bar(np.arange(256), hst, width=2, color=color, edgecolor='none')
fig.set_size_inches(np.array(fig.get_size_inches(), dtype=np.float) * 2)
plt.show()
|
mrkulk/text-world
|
evennia/commands/default/general.py
|
Python
|
bsd-3-clause
| 13,667
| 0.001537
|
"""
General Character commands usually availabe to all characters
"""
from django.conf import settings
from evennia.utils import utils, prettytable
from evennia.commands.default.muxcommand import MuxCommand
# limit symbol import for API
__all__ = ("CmdHome", "CmdLook", "CmdNick",
"CmdInventory", "CmdGet", "CmdDrop", "CmdGive",
"CmdSay", "CmdPose", "CmdAccess")
class CmdHome(MuxCommand):
"""
move to your character's home location
Usage:
home
Teleports you to your home location.
"""
key = "home"
locks = "cmd:perm(home) or perm(Builders)"
arg_regex = r"$"
def func(self):
"Implement the command"
caller = self.caller
home = caller.home
if not home:
caller.msg("You have no home!")
elif home == caller.location:
caller.msg("You are already home!")
else:
caller.move_to(home)
caller.msg("There's no place like home ...")
class CmdLook(MuxCommand):
"""
look at location or object
Usage:
look
look <obj>
look *<player>
Observes your location or objects in your vicinity.
"""
key = "look"
aliases = ["l", "ls"]
locks = "cmd:all()"
arg_regex = r"\s|$"
def func(self):
"""
Handle the looking.
"""
caller = self.caller
args = self.args
if args:
# Use search to handle duplicate/nonexistant results.
looking_at_obj = caller.search(args, use_nicks=True)
if not looking_at_obj:
return
else:
looking_at_obj = caller.location
if not looking_at_obj:
caller.msg("You have no location to look at!")
return
if not hasattr(looking_at_obj, 'return_appearance'):
# this is likely due to us having a player instead
looking_at_obj = looking_at_obj.character
if not looking_at_obj.access(caller, "view"):
caller.msg("Could not find '%s'." % args)
return
# get object's appearance
caller.msg(looking_at_obj.return_appearance(caller))
# the object's at_desc() method.
looking_at_obj.at_desc(looker=caller)
class CmdNick(MuxCommand):
"""
define a personal alias/nick
Usage:
nick[/switches] <nickname> = [<string>]
alias ''
Switches:
object - alias an object
player - alias a player
clearall - clear all your aliases
list - show all defined aliases (also "nicks" works)
Examples:
nick hi = say Hello, I'm Sarah!
nick/object tom = the tall man
A 'nick' is a personal shortcut you create for your own use. When
you enter the nick, the alternative string will be sent instead.
The switches control in which situations the substitution will
happen. The default is that it will happen when you enter a
command. The 'object' and 'player' nick-types kick in only when
you use commands that requires an object or player as a target -
you can then use the nick to refer to them.
Note that no objects are actually renamed or changed by this
command - the nick is only available to you. If you want to
permanently add keywords to an ob
|
ject for everyone to use, you
need build privileges and to us
|
e the @alias command.
"""
key = "nick"
aliases = ["nickname", "nicks", "@nick", "alias"]
locks = "cmd:all()"
def func(self):
"Create the nickname"
caller = self.caller
switches = self.switches
nicks = caller.nicks.get(return_obj=True)
if 'list' in switches:
table = prettytable.PrettyTable(["{wNickType",
"{wNickname",
"{wTranslates-to"])
for nick in utils.make_iter(nicks):
table.add_row([nick.db_category, nick.db_key, nick.db_strvalue])
string = "{wDefined Nicks:{n\n%s" % table
caller.msg(string)
return
if 'clearall' in switches:
caller.nicks.clear()
caller.msg("Cleared all aliases.")
return
if not self.args or not self.lhs:
caller.msg("Usage: nick[/switches] nickname = [realname]")
return
nick = self.lhs
real = self.rhs
if real == nick:
caller.msg("No point in setting nick same as the string to replace...")
return
# check so we have a suitable nick type
if not any(True for switch in switches if switch in ("object", "player", "inputline")):
switches = ["inputline"]
string = ""
for switch in switches:
oldnick = caller.nicks.get(key=nick, category=switch)
if not real:
# removal of nick
if oldnick:
# clear the alias
string += "\nNick '%s' (= '%s') was cleared." % (nick, oldnick)
caller.nicks.delete(nick, category=switch)
else:
string += "\nNo nick '%s' found, so it could not be removed." % nick
else:
# creating new nick
if oldnick:
string += "\nNick %s changed from '%s' to '%s'." % (nick, oldnick, real)
else:
string += "\nNick set: '%s' = '%s'." % (nick, real)
caller.nicks.add(nick, real, category=switch)
caller.msg(string)
class CmdInventory(MuxCommand):
"""
view inventory
Usage:
inventory
inv
Shows your inventory.
"""
key = "inventory"
aliases = ["inv", "i"]
locks = "cmd:all()"
arg_regex = r"$"
def func(self):
"check inventory"
items = self.caller.contents
if not items:
string = "You are not carrying anything."
else:
table = prettytable.PrettyTable(["name", "desc"])
table.header = False
table.border = False
for item in items:
table.add_row(["{C%s{n" % item.name, item.db.desc and item.db.desc or ""])
string = "{wYou are carrying:\n%s" % table
self.caller.msg(string)
class CmdGet(MuxCommand):
"""
pick up something
Usage:
get <obj>
Picks up an object from your location and puts it in
your inventory.
"""
key = "get"
aliases = "grab"
locks = "cmd:all()"
arg_regex = r"\s|$"
def func(self):
"implements the command."
caller = self.caller
if not self.args:
caller.msg("Get what?")
return
#print "general/get:", caller, caller.location, self.args, caller.location.contents
obj = caller.search(self.args, location=caller.location)
if not obj:
return
if caller == obj:
caller.msg("You can't get yourself.")
return
if not obj.access(caller, 'get'):
if obj.db.get_err_msg:
caller.msg(obj.db.get_err_msg)
else:
caller.msg("You can't get that.")
return
obj.move_to(caller, quiet=True)
caller.msg("You pick up %s." % obj.name)
caller.location.msg_contents("%s picks up %s." %
(caller.name,
obj.name),
exclude=caller)
# calling hook method
obj.at_get(caller)
class CmdDrop(MuxCommand):
"""
drop something
Usage:
drop <obj>
Lets you drop an object from your inventory into the
location you are currently in.
"""
key = "drop"
locks = "cmd:all()"
arg_regex = r"\s|$"
def func(self):
"Implement command"
caller = self.caller
if not self.args:
caller.msg("Drop what?")
return
# Because the DROP command by definition looks for items
# in inventory, call the search function using locat
|
kyokley/BattlePyEngine
|
src/battlePy/ship.py
|
Python
|
mit
| 1,526
| 0
|
from battlePy.utils import docprop
(UP, DOWN, LEFT, RIGHT) = SHIP_ORIENTATIONS = range(4)
VECTOR_DICT = {UP: (0, 1), DOWN: (0, -1), LEFT: (-1, 0), RIGHT: (1, 0)}
class Ship(object):
name = docprop('name', 'Name of the ship')
size = docprop('size', 'Size of the ship')
hits = docprop('hits', 'Set of current hit locations')
locations = docprop('locations', 'Set of ship coordinates')
game = docprop('game', 'The game this Ship belongs to')
def __init__(self, name, size, game):
self.name = name
self.size = size
self.hits = set()
|
self.locations = set()
self.game = game
self.symbol = self.name[0]
def __repr__(self):
return "<%s %s %s>" % (self.__class__.__name__, id(self), self.name)
def placeShip(self, location, orientation):
self.locations = set()
newLocation = location
self.locations.add(newLocation)
for i in range(self.size - 1):
|
newLocation = (
newLocation[0] + VECTOR_DICT[orientation][0],
newLocation[1] + VECTOR_DICT[orientation][1],
)
self.locations.add(newLocation)
def isPlacementValid(self):
return self.game.isValidShipPlacement(self)
def addHit(self, location):
if location not in self.locations:
return
self.hits.add(location)
def isSunk(self):
return self.hits == self.locations
def getProtoShip(self):
return Ship(self.name, self.size)
|
tycho/yubioath-desktop
|
yubioath/gui/messages.py
|
Python
|
gpl-3.0
| 5,696
| 0.000351
|
# Copyright (c) 2014 Yubico AB
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Additional permission under GNU GPL version 3 section 7
#
# If you modify this program, or any covered work, by linking or
# combining it with the OpenSSL project's OpenSSL library (or a
# modified version of that library), containing parts covered by the
# terms of the OpenSSL or SSLeay licenses, We grant you additional
# permission to convey the resulting work. Corresponding Source for a
# non-source form of such a combination shall include the source code
# for the parts of OpenSSL used as well as that of the covered work.
"""
Strings for Yubico Authenticator.
Note: String names must not start with underscore (_).
"""
organization = "Yubico"
domain = "yubico.com"
app_name = "Yubico Authenticator"
win_title_1 = "Yubico Authenticator (%s)"
about_1 = "About: %s"
copyright = "Copyright © Yubico"
version_1 = "Version: %s"
wait = "Please wait..."
error = "Error"
menu_file = "&File"
menu_help = "&Help"
action_about = "&About"
action_add = "&Add..."
action_import = "&Import..."
action_reset = "&Reset"
action_password = "Set/Change &password"
action_settings = "&Settings"
action_delete = "&Delete"
action_show = "&Show credentials"
action_close = "&Close Window"
action_quit = "&Quit"
password = "Password"
settings = "Settings"
advanced = "Advanced"
search = "Search"
pass_required = "Password required"
remember = "Remember password"
no_key = "Insert a YubiKey..."
key_busy = "YubiKey already in use!"
key_present = "YubiKey found. Reading..."
key_removed = "YubiKey removed"
key_removed_desc = "There was an error communicating with the device!"
n_digits = "Number of digits"
enable_systray = "Show in system tray"
kill_scdaemon = "Kill scdaemon on show"
reader_name = "Card reader name"
no_creds = "No credentials available"
add_cred = "New credential"
cred_name = "Credential name"
cred_key = "Secret key (base32)"
cred_type = "Credential type"
cred_totp = "Time based (TOTP)"
cred_hotp = "Counter based (HOTP)"
algorithm = "Algorithm"
invalid_name = "Invalid name"
invalid_name_desc = "Name must be at least 3 characters"
invalid_key = "Invalid key"
invalid_key_desc = "Key must be base32 encoded"
set_pass = "Set password"
new_pass = "New password (blank for none)"
ver_pass = "Verify new password"
pass_mismatch = "Passwords do not match"
pass_mismatch_desc = "Please enter the same password twice"
touch_title = "Touch required"
touch_desc = "Touch your YubiKey now"
reset_title = "Confirm reset"
reset_warning_desc = """<span>Are you sure you want to delete all OATH credentials on the device?</span>
<br><br>
<b>This action cannot be undone.</b>
<br><br>
"""
imported = "Import complete"
imported_desc = "Found %d tokens, successf
|
ully imported %d tokens.%s"
delete_title = "Confirm credential deletion"
delete_desc_1 = """<span>Are you sure you want to delete the credential?</span>
<br>
This action cannot be undone.
<br><br>
<b>Delete credential: %s</b>
"""
free = "free"
in_use = "in use"
require_touch = "Require touch"
require_manual_refresh = "Require manual refresh"
overwrite_entry = "Overwrite entry?"
overwrite_entry_desc = "An entry with this username already exists.\n\nDo " \
"you wish to o
|
verwrite it? This action cannot be undone."
qr_scan = "Scan a QR code"
qr_scanning = "Scanning for QR code..."
qr_not_found = "QR code not found"
qr_not_found_desc = "No usable QR code detected. Make sure the QR code is " \
"fully visible on your primary screen and try again."
qr_invalid_type = "Invalid OTP type"
qr_invalid_type_desc = "Only TOTP and HOTP types are supported."
qr_invalid_digits = "Invalid number of digits"
qr_invalid_digits_desc = "An OTP may only contain 6 or 8 digits."
qr_invalid_algo = "Unsupported algorithm"
qr_invalid_algo_desc = "HMAC algorithm '%s' is not supported."
qr_missing_key = "Invalid QR code"
qr_missing_key_desc = "The QR code found on screen is missing the '%s' attribute."
tt_num_digits = "The number of digits to show for the credential."
tt_systray = "When checked, display an icon in the systray, and leave the " \
"application running there when closed."
tt_kill_scdaemon = "Kills any running scdaemon process when the window is " \
"shown. This is useful when using this application together with GnuPG " \
"to avoid GnuPG locking the device."
tt_reader_name = "Changes the default smartcard reader name to look for. " \
"This can be used to target a specific YubiKey when multiple are used, " \
"or to target an NFC reader."
ccid_disabled = '<b>CCID (smart card capabilities) is disabled on the ' \
'inserted YubiKey.</b><br><br>Without CCID enabled, you will only be ' \
'able to store 2 credentials.<br><br>' \
'<a href="%s">Learn how to enable CCID</a><br>'
no_space = "No space available"
no_space_desc = "There is not enough space to add another " \
"credential on your device.\n\nTo create free space to add a " \
"new credential, delete those you no longer need."
oath_backend = "OATH Storage Backend"
oath_backend_ccid = "Smart Card"
oath_backend_sqlite = "SQLite"
|
rgurevych/python_for_testers
|
tests/test_contacts_data_compliance.py
|
Python
|
apache-2.0
| 3,163
| 0.007272
|
import re
from models.contact import Contact
def test_all_contacts_on_homepage(app, db):
if app.contact.count() == 0:
app.contact.add(Contact(first_name="Mister", last_name="Muster", mobile_phone="123", email_1="[email protected]"))
contacts_from_homepage = sorted(app.contact.get_contact_list(), key = Contact.contact_id_or_max)
contacts_from_db = sorted(db.get_contact_list(), key = Contact.contact_id_or_max)
for i in range(len(contacts_from_homepage)):
hp_contact=contacts_from_homepage[i]
db_contact=contacts_from_db[i]
assert hp_contact.first_name == db_contact.first_name
assert hp_contact.last_name == db_contact.last_name
assert clear_address(hp_contact.address) == clear_address(db_contact.address)
assert clear_phone(hp_contact.all_phones_homepage) == clear_phone(merge_phones_homepage(db_contact))
assert hp_contact.all_emails_homepage == merge_emails_homepage(db_contact)
print("Successfully verified %s contacts vs Database" % str(len(contacts_from_homepage)))
"""def test_contact_on_homepage(app):
if app.contact.count() == 0:
app.contact.add(Contact(first_name="Mister", last_name="Must
|
er", mobile_phone="123", email_1="[email protected]"))
index = randrange(len(app.contact.get_contact_list()))
contact_from_homepage = app.contact.get_contact_list()[index]
contact_from_editpage = app.contact.get_contact_data_editpage(index)
assert contact_from_home
|
page.first_name == contact_from_editpage.first_name
assert contact_from_homepage.last_name == contact_from_editpage.last_name
assert contact_from_homepage.address == contact_from_editpage.address
assert contact_from_homepage.all_phones_homepage == merge_phones_homepage(contact_from_editpage)
assert contact_from_homepage.all_emails_homepage == merge_emails_homepage(contact_from_editpage)"""
"""def test_phones_on_viewpage(app):
contact_from_viewpage = app.contact.get_contact_data_viewpage(0)
contact_from_editpage = app.contact.get_contact_data_editpage(0)
assert contact_from_viewpage.home_phone == contact_from_editpage.home_phone
assert contact_from_viewpage.work_phone == contact_from_editpage.work_phone
assert contact_from_viewpage.mobile_phone == contact_from_editpage.mobile_phone
assert contact_from_viewpage.fax == contact_from_editpage.fax"""
def clear(s):
#return "".join(symbol for symbol in s if symbol not in "[]()- 0")
return re.sub("[- ()]", "", s)
def clear_phone(number):
return re.sub("0", "", number)
def clear_address(address):
return re.sub("[\n\r\s+]", "", address)
def merge_phones_homepage(contact):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.home_phone, contact.mobile_phone, contact.work_phone]))))
def merge_emails_homepage(contact):
return "\n".join(filter(lambda x: x != "", filter(lambda x: x is not None,
[contact.email_1, contact.email_2, contact.email_3])))
|
KSG-IT/ksg-nett
|
schedules/migrations/0009_auto_20190422_1627.py
|
Python
|
gpl-3.0
| 489
| 0.002045
|
# Generated b
|
y Django 2.2 on 2019-04-22 16:27
from django.db import migrations
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
('schedules', '0008_auto_20180208_1946'),
]
operations = [
migrations.AlterField(
model_name='shiftslotdayrule',
name='rule',
field=model_utils.fields.StatusField
|
(choices=[(0, 'dummy')], default='mo', max_length=2, no_check_for_status=True),
),
]
|
grow/pygrow
|
grow/extensions/base_extension_test.py
|
Python
|
mit
| 844
| 0
|
"""Tests for base extension."""
import unittest
from grow.extensions import base_extension
class BaseExtensionTestCase(unittest.TestCase):
"""Test the base extension."""
def test_config_disabled(self):
"""Uses the disabled config."""
ext = base_extension.BaseExtension(None, {
'disabled
|
': [
'a',
],
'enabled': [
'a',
],
})
self.assertFalse(ext.hooks.is_enabled('a'))
self.assertFalse(ext.hooks.is_enabled('b'))
def test_config_enabled(self):
"""Uses the enabled config."""
ext = base_extension.BaseExtension(None, {
'enabled': [
'a',
],
})
self.assertTrue(ext.hooks.is_enabled('a'))
self.assertFalse(ext.hooks.is
|
_enabled('b'))
|
leeclemmer/ggeocode
|
ggeocode/ggeocode.py
|
Python
|
mit
| 3,893
| 0.041613
|
import sys
import logging
import urllib
import urllib2
import json
from lxml import etree
class GGeocode():
""" Wrapper for Google Geocode API v3.
https://developers.google.com/maps/documentation/geocoding/
"""
def __init__(self, method='http',
output='json',
sensor='false',
address='',
components='',
latlng='',
client='',
signature='',
bounds='',
language='',
region=''):
# Construct base url
self.method = method.lower()
if method not in ['http','https']:
raise ValueError("""'method' is '%s' -
needs to be either 'http' or 'https'""" % (method,))
self.output = output.lower()
if output not in ['json','xml']:
raise ValueError("""'output' is '%s' -
needs to be either 'xml' or 'json'""" % (output,))
self.base_url = '%s://maps.googleapis.com/maps/api/geocode/%s?' % \
(method, output)
# Collect parameters
self.params = {}
# required parameters:
# sensor
# address or latlng or components
self.params['sensor'] = sensor.lower()
if sensor not in ['true','false']:
raise ValueError("""'sensor' is '%s' -
needs to be either 'true' or 'false'""" % (sensor,))
if (address and (latlng or components)) or (latlng and components):
raise ValueError("""Only supply one of these (not more):
address, latlng, or components""")
if not address and not latlng and not components:
raise ValueError("""Must supply one of the following:
address, latlng, or components""")
if address: self.params['address'] = address
if latlng: self.params['latlng'] = latlng
if components:
for component in components.split('|'):
if ':' not in component:
raise ValueError("""Component is %s - must be in the form
of 'component:value'""" % (component,))
if component.s
|
plit(':')[0] not in ['route',
'locality',
'administrative_area',
|
'postal_code',
'country']:
raise ValueError("""Component is %s - must be:
route, locality, administrative_area,
postal_code or country""" % (component.split(':')[0],))
self.params['components'] = components
# optional parameters:
# client and signature
# bounds
# language
# region
if (client and not signature) or (signature and not client):
raise ValueError("""Must supply both client and signature.""")
if client and signature:
self.params['client'] = client
self.params['signature'] = signature
if bounds: self.params['bounds'] = bounds
if language: self.params['language'] = language
# Access Google Geocoder API
try:
self.url = '%s%s' % (self.base_url,
urllib.urlencode(self.params))
self.response = urllib2.urlopen(self.url).read()
except:
e = sys.exc_info()[1]
logging.error(e)
# Get status and results
if output == 'json':
self.output = json.loads(self.response)
self.status = self.output['status']
self.results = self.output['results']
self.results_count = len(self.results)
if address or components:
self.lat = self.results[0]['geometry']['location']['lat']
self.lon = self.results[0]['geometry']['location']['lng']
elif latlng:
self.address = self.results[0]['formatted_address']
elif output == 'xml':
self.output = etree.fromstring(self.response)
self.status = self.output.xpath('/GeocodeResponse/status/text()')[0]
self.results = self.output.xpath('/GeocodeResponse/result')
self.results_count = len(self.results)
if address or components:
self.lat = self.results[0].xpath('geometry/location/lat/text()')[0]
self.lon = self.results[0].xpath('geometry/location/lng/text()')[0]
elif latlng:
self.address = self.results[0].xpath('formatted_address')[0]
if self.status != 'OK':
logging.error("Call to %s unsuccessful (Error code '%s')" % \
(self.url,self.status))
|
chipx86/reviewboard
|
reviewboard/reviews/evolutions/change_descriptions.py
|
Python
|
mit
| 432
| 0
|
from __future__ import unicode_literals
from django.d
|
b import models
from django_evolution.mutations import AddField
MUTATIONS = [
AddField('ReviewRequest', 'changedescs', models.ManyToManyField,
related_model='changedescs.ChangeDescription'),
AddField('ReviewRequestDraft', 'changedesc', models.ForeignKey,
initial=None, null=True,
relate
|
d_model='changedescs.ChangeDescription')
]
|
theo-l/django
|
django/db/backends/mysql/features.py
|
Python
|
bsd-3-clause
| 6,495
| 0.002002
|
import operator
from django.db.backends.base.features import BaseDatabaseFeatures
from django.utils.functional import cached_property
class DatabaseFeatures(BaseDatabaseFeatures):
empty_fetchmany_value = ()
allows_group_by_pk = True
related_fields_match_type = True
# MySQL doesn't support sliced subqueries with IN/ALL/ANY/SOME.
allow_sliced_subqueries_with_in = False
has_select_for_update = True
supports_forward_references = False
supports_regex_backreferencing = False
supports_date_lookup_using_string = False
can_introspect_autofield = True
can_introspect_binary_field = False
can_introspect_duration_field = False
can_introspect_small_integer_field = True
can_introspect_positive_integer_field = True
introspected_boolean_field_type = 'IntegerField'
supports_index_column_ordering = False
supports_timezones = False
requires_explicit_null_ordering_when_grouping = True
allows_auto_pk_0 = False
can_release_savepoints = True
atomic_transactions = False
can_clone_databases = True
supports_temporal_subtraction = True
supports_select_intersection = False
supports_select_difference = False
supports_slicing_ordering_in_compound = True
supports_index_on_text_field = False
has_case_insensitive_like = False
create_test_procedure_without_params_sql = """
CREATE PROCEDURE test_procedure ()
BEGIN
DECLARE V_I INTEGER;
SET V_I = 1;
END;
"""
create_test_procedure_with_int_param_sql = """
CREATE PROCEDURE test_procedure (P_I INTEGER)
BEGIN
DECLARE V_I INTEGER;
SET V_I = P_I;
END;
"""
db_functions_convert_bytes_to_str = True
# Neither MySQL nor MariaDB support partial indexes.
supports_partial_indexes = False
supports_order_by_nulls_modifier = False
order_by_nulls_first = True
@cached_property
def _mysql_storage_engine(self):
"Internal method used in Django tests. Don't rely on this from your code"
with self.connection.cursor() as cursor:
cursor.execute("SELECT ENGINE FROM INFORMATION_SCHEMA.ENGINES WHERE SUPPORT = 'DEFAULT'")
result = cursor.fetchone()
return result[0]
@cached_property
def update_can_self_select(self):
return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (10, 3, 2)
@cached_property
def can_introspect_foreign_keys(self):
"Confirm support for introspected foreign keys"
return self._mysql_storage_engine != 'MyISAM'
@cached_property
def can_return_columns_from_insert(self):
return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (10, 5, 0)
can_return_rows_from_bulk_insert = property(operator.attrgetter('can_return_columns_from_insert'))
@cached_property
def has_zoneinfo_database(self):
# Test if the time zone definitions are installed. CONVERT_TZ returns
# NULL if 'UTC' timezone
|
isn't loaded into the mysql.time_zone.
with self.connection.cursor() as cursor:
cursor.execute("SELECT CONVERT_TZ('2001-01-01 01:00:00', 'UTC', 'UTC')")
return cursor.fetchone()[0] is not None
@cached_property
def is_sql_auto_is_null_enabled(self):
with self.connection.cursor() as cursor:
cursor.execute('SELECT @@SQL_AUTO_IS_NULL')
result = cursor.fetchone()
retur
|
n result and result[0] == 1
@cached_property
def supports_over_clause(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 2)
supports_frame_range_fixed_distance = property(operator.attrgetter('supports_over_clause'))
@cached_property
def supports_column_check_constraints(self):
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 2, 1)
return self.connection.mysql_version >= (8, 0, 16)
supports_table_check_constraints = property(operator.attrgetter('supports_column_check_constraints'))
@cached_property
def can_introspect_check_constraints(self):
if self.connection.mysql_is_mariadb:
version = self.connection.mysql_version
return (version >= (10, 2, 22) and version < (10, 3)) or version >= (10, 3, 10)
return self.connection.mysql_version >= (8, 0, 16)
@cached_property
def has_select_for_update_skip_locked(self):
return not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (8, 0, 1)
@cached_property
def has_select_for_update_nowait(self):
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 3, 0)
return self.connection.mysql_version >= (8, 0, 1)
@cached_property
def supports_explain_analyze(self):
return self.connection.mysql_is_mariadb or self.connection.mysql_version >= (8, 0, 18)
@cached_property
def supported_explain_formats(self):
# Alias MySQL's TRADITIONAL to TEXT for consistency with other
# backends.
formats = {'JSON', 'TEXT', 'TRADITIONAL'}
if not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (8, 0, 16):
formats.add('TREE')
return formats
@cached_property
def supports_transactions(self):
"""
All storage engines except MyISAM support transactions.
"""
return self._mysql_storage_engine != 'MyISAM'
@cached_property
def ignores_table_name_case(self):
with self.connection.cursor() as cursor:
cursor.execute('SELECT @@LOWER_CASE_TABLE_NAMES')
result = cursor.fetchone()
return result and result[0] != 0
@cached_property
def supports_default_in_lead_lag(self):
# To be added in https://jira.mariadb.org/browse/MDEV-12981.
return not self.connection.mysql_is_mariadb
@cached_property
def supports_json_field(self):
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 2, 7)
return self.connection.mysql_version >= (5, 7, 8)
@cached_property
def can_introspect_json_field(self):
if self.connection.mysql_is_mariadb:
return self.supports_json_field and self.can_introspect_check_constraints
return self.supports_json_field
|
CompassionCH/compassion-modules
|
partner_communication/wizards/pdf_wizard.py
|
Python
|
agpl-3.0
| 2,115
| 0.001891
|
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import base64
import logging
from odoo import models, api, fields
_logger = logging.getLogger(__name__)
try:
from wand.image import Image
except ImportError:
_logger.warning("Please install wand to use PDF Previews")
class PdfPreviewWizard(models.TransientModel):
"""
Generate pdf of com
|
munication.
"""
_name = "partner.communication.pdf.wizard"
_description = "Partner Communication - PDF Wizard"
communication_id = fields.Many2one(
"partner.communication.job", required=True, ondelete="cascade", readonly=False
)
preview = fields.Binary(compute="_compute_pdf")
state = fields.Selection(related="communication_id.send_mode")
send_state = fields.Se
|
lection(related="communication_id.state")
body_html = fields.Html(compute="_compute_html")
@api.multi
def _compute_pdf(self):
if self.state != "physical":
return
comm = self.communication_id
report = comm.report_id.with_context(
lang=comm.partner_id.lang, must_skip_send_to_printer=True, bin_size=False
)
data = report.render_qweb_pdf(comm.ids)
with Image(blob=data[0], resolution=150) as pdf_image:
preview = base64.b64encode(pdf_image.make_blob(format="jpeg"))
self.preview = preview
@api.multi
def _compute_html(self):
comm = self.communication_id
template = getattr(comm.email_template_id, "sendgrid_localized_template", False)
if template:
body_html = template.html_content.replace("<%body%>", comm.body_html)
self.body_html = body_html
self.body_html = comm.body_html
@api.multi
def send(self):
return self.communication_id.send()
|
nvbn/python-social-auth
|
examples/flask_example/models/__init__.py
|
Python
|
bsd-3-clause
| 79
| 0
|
from flask_examp
|
le.models import user
from social.apps.flask_app import model
|
s
|
memsharded/conan
|
conans/test/unittests/client/build/meson_test.py
|
Python
|
mit
| 9,218
| 0.003688
|
import os
import shutil
import unittest
from parameterized.parameterized import parameterized
import six
from conans.client import defs_to_string
from conans.client.build.meson import Meson
from conans.client.conf import default_settings_yml
from conans.client.tools import args_to_string
from conans.errors import ConanException
from conans.model.settings import Settings
from conans.test.utils.conanfile import ConanFileMock, MockDepsCppInfo
from conans.test.utils.test_files import temp_folder
class MesonTest(unittest.TestCase):
def setUp(self):
self.tempdir = temp_folder(path_with_spaces=False)
def tearDown(self):
shutil.rmtree(self.tempdir)
def _check_commands(self, cmd_ref, cmd_test):
cmd_ref_splitted = cmd_ref.split(' ')
cmd_test_splitted = cmd_test.split(' ')
self.assertEqual(cmd_ref_splitted[:3], cmd_test_splitted[:3])
self.assertEqual(set(cmd_ref_splitted[3:]), set(cmd_test_splitted[3:]))
def partial_build_test(self):
conan_file = ConanFileMock()
conan_file.settings = Settings()
conan_file.should_configure = False
conan_file.should_build = False
conan_file.package_folder = os.path.join(self.tempdir, "my_cache_package_folder")
meson = Meson(conan_file)
meson.configure()
self.assertIsNone(conan_file.command)
meson.build()
self.assertIsNone(conan_file.command)
meson.test()
self.assertIsNone(conan_file.command)
meson.install()
self.assertIsNone(conan_file.command)
def folders_test(self):
settings = Settings.loads(default_settings_yml)
settings.os = "Linux"
settings.compiler = "gcc"
settings.compiler.version = "6.3"
settings.arch = "x86"
settings.build_type = "Release"
package_folder = os.path.join(self.tempdir, "my_cache_package_folder")
conan_file = ConanFileMock()
conan_file.deps_cpp_info = MockDepsCppInfo()
conan_file.settings = settings
conan_file.source_folder = os.path.join(self.tempdir, "my_cache_source_folder")
conan_file.build_folder = os.path.join(self.tempdir, "my_cache_build_folder")
conan_file.package_folder = package_folder
meson = Meson(conan_file)
defs = {
'default_library': 'shared',
'prefix': package_folder,
'libdir': 'lib',
'bindir': 'bin',
'sbindir': 'bin',
'libexecdir': 'bin',
'includedir': 'include',
'cpp_std': 'none'
}
meson.configure(source_dir=os.path.join(self.tempdir, "../subdir"),
build_dir=os.path.join(self.tempdir, "build"))
source_expected = os.path.join(self.tempdir, "../subdir")
build_expected = os.path.join(self.tempdir, "build")
cmd_expected = 'meson "%s" "%s" --backend=ninja %s --buildtype=release' \
% (source_expected, build_expected, defs_to_string(defs))
self._check_commands(cmd_expected, conan_file.command)
meson.configure(build_dir=os.path.join(self.tempdir, "build"))
source_expected = os.path.join(self.tempdir, "my_cache_source_folder")
build_expected = os.path.join(self.tempdir, "build")
cmd_expected = 'meson "%s" "%s" --backend=ninja %s --buildtype=release' \
% (source_expected, build_expected, defs_to_string(defs))
self._check_commands(cmd_expected, conan_file.command)
meson.configure()
source_expected = os.path.join(self.tempdir, "my_cache_source_folder")
build_expected = os.path.join(self.tempdir, "my_cache_build_folder")
cmd_expected = 'meson "%s" "%s" --backend=ninja %s --buildtype=release' \
% (source_expected, build_expected, defs_to_string(defs))
self._check_commands(cmd_expected, conan_file.command)
meson.configure(source_folder="source", build_folder="build")
build_expected = os.path.join(self.tempdir, "my_cache_build_folder", "build")
source_expected = os.path.join(self.tempdir, "my_cache_source_folder", "source")
cmd_expected = 'meson "%s" "%s" --backend=ninja %s --buildtype=release' \
% (source_expected, build_expected, defs_to_string(defs))
self._check_commands(cmd_expected, conan_file.command)
conan_file.in_local_cache = True
meson.configure(source_folder="source", build_folder="build",
cache_build_folder="rel_only_cache")
build_expected = os.path.join(self.tempdir, "my_cache_build_folder", "rel_only_cache")
source_expected = os.path.join(self.tempdir, "my_cache_source_folder", "source")
cmd_expected = 'meson "%s" "%s" --backend=ninja %s --buildtype=release' \
% (source_expected, build_expected, defs_to_string(defs))
self._check_commands(cmd_expected, conan_file.command)
conan_file.in_local_cache = False
meson.configure(source_folder="source", build_folder="build",
cache_build_folder="rel_only_cache")
build_expected = os.path.join(self.tempdir, "my_cache_build_folder", "build")
source_expected = os.path.join(self.tempdir, "my_cache_source_folder", "source")
cmd_expected = 'meson "%s" "%s" --backend=ninja %s --buildtype=release' \
% (source_expected, build_expected, defs_to_string(defs))
self._check_commands(cmd_expected, conan_file.command)
conan_file.in_local_cache = True
meson.configure(build_dir="build", cache_build_folder="rel_only_cache")
build_expected = os.path.join(self.tempdir, "my_cache_build_folder", "rel_only_cache")
source_expected = os.p
|
ath.join(self.tempdir, "my_cache_source_folder")
cmd_expected = 'meson "%s" "%s" --backend=ninja %s --buildtype=release' \
% (source_expected, build_expected, defs_to_string(defs))
self._check_commands(cmd_expected, conan_file.command)
args = ['--werror', '--warnlevel 3']
|
defs['default_library'] = 'static'
meson.configure(source_folder="source", build_folder="build", args=args,
defs={'default_library': 'static'})
build_expected = os.path.join(self.tempdir, "my_cache_build_folder", "build")
source_expected = os.path.join(self.tempdir, "my_cache_source_folder", "source")
cmd_expected = 'meson "%s" "%s" --backend=ninja %s %s --buildtype=release' \
% (source_expected, build_expected, args_to_string(args), defs_to_string(defs))
self._check_commands(cmd_expected, conan_file.command)
# Raise mixing
with six.assertRaisesRegex(self, ConanException, "Use 'build_folder'/'source_folder'"):
meson.configure(source_folder="source", build_dir="build")
meson.test()
self.assertEqual("ninja -C \"%s\" %s" % (build_expected, args_to_string(["test"])), conan_file.command)
meson.install()
self.assertEqual("ninja -C \"%s\" %s" % (build_expected, args_to_string(["install"])), conan_file.command)
def prefix_test(self):
conan_file = ConanFileMock()
conan_file.deps_cpp_info = MockDepsCppInfo()
conan_file.settings = Settings()
conan_file.package_folder = os.getcwd()
expected_prefix = '-Dprefix="%s"' % os.getcwd()
meson = Meson(conan_file)
meson.configure()
self.assertIn(expected_prefix, conan_file.command)
meson.build()
self.assertIn("ninja -C", conan_file.command)
meson.install()
self.assertIn("ninja -C", conan_file.command)
def no_prefix_test(self):
conan_file = ConanFileMock()
conan_file.deps_cpp_info = MockDepsCppInfo()
conan_file.settings = Settings()
conan_file.package_folder = None
meson = Meson(conan_file)
meson.configure()
self.assertNotIn('-Dprefix', conan_file.command)
meson.build()
self.assertIn("ninja -C", conan_file.command)
with self.assertRaises(T
|
hzlf/openbroadcast
|
website/cms/plugins/inherit/models.py
|
Python
|
gpl-3.0
| 681
| 0.005874
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from c
|
ms.models import CMSPlugin, Page
from django.conf import settings
class InheritPagePlaceholder(CMSPlugin):
"""
Provides the ability to inherit plugins for a certain placeholder from an associated "parent" page instance
"""
from_page = models.ForeignKey(Page, null=True, blank=True, help_text=_("Choose a page to include its plugins into this plac
|
eholder, empty will choose current page"))
from_language = models.CharField(_("language"), max_length=5, choices=settings.CMS_LANGUAGES, blank=True, null=True, help_text=_("Optional: the language of the plugins you want"))
|
lanbing510/GTDWeb
|
manage.py
|
Python
|
gpl-2.0
| 249
| 0
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
|
os.environ.setdefault("DJ
|
ANGO_SETTINGS_MODULE", "gtdweb.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
bastustrump/genimpro
|
rp_extract.py
|
Python
|
mit
| 39,383
| 0.013737
|
'''
RP_extract: Rhythm Patterns Audio Feature Extractor
@author: 2014-2015 Alexander Schindler, Thomas Lidy
Re-implementation by Alexander Schindler of RP_extract for Matlab
Matlab version originally by Thomas Lidy, based on Musik Analysis Toolbox by Elias Pampalk
( see http://ifs.tuwien.ac.at/mir/downloads.html )
Main function is rp_extract. See function definition and description for more information,
or example usage in main function.
Note: All required functions are provided by the two main scientific libraries numpy and scipy.
Note: In case you alter the code to use transform2mel, librosa needs to be installed: pip install librosa
'''
import numpy as np
from scipy import stats
from scipy.fftpack import fft
#from scipy.fftpack import rfft # Discrete Fourier transform of a real sequence.
from scipy import interpolate
# suppress numpy warnings (divide by 0 etc.)
np.set_printoptions(suppress=True)
# required for debugging
np.set_printoptions(precision=8,
threshold=10,
suppress=True,
linewidth=200,
edgeitems=10)
# INITIALIZATION: Constants & Mappings
# Bark Scale
bark = [100, 200, 300, 400, 510, 630, 770, 920, 1080, 1270, 1480, 1720, 2000, 2320, 2700, 3150, 3700, 4400, 5300, 6400, 7700, 9500, 12000, 15500]
n_bark_bands = len(bark)
# copy the bark vector (using [:]) and add a 0 in front (to make calculations below easier)
barks = bark[:]
barks.insert(0,0)
# Phone Scale
phon = [3, 20, 40, 60, 80, 100, 101]
# copy the bark vector (usin
|
g [:]) and add a 0 in front (to make calculations below easier)
phons = phon[:]
phons.insert(0,0)
phons = np.asarray(phons)
# Loudness Curves
eq_loudness = np.array([[55, 40, 32, 24, 19, 14, 10, 6, 4, 3, 2, 2, 0,-2,
|
-5,-4, 0, 5, 10, 14, 25, 35],
[66, 52, 43, 37, 32, 27, 23, 21, 20, 20, 20, 20,19,16,13,13,18, 22, 25, 30, 40, 50],
[76, 64, 57, 51, 47, 43, 41, 41, 40, 40, 40,39.5,38,35,33,33,35, 41, 46, 50, 60, 70],
[89, 79, 74, 70, 66, 63, 61, 60, 60, 60, 60, 59,56,53,52,53,56, 61, 65, 70, 80, 90],
[103, 96, 92, 88, 85, 83, 81, 80, 80, 80, 80, 79,76,72,70,70,75, 79, 83, 87, 95,105],
[118,110,107,105,103,102,101,100,100,100,100, 99,97,94,90,90,95,100,103,105,108,115]])
loudn_freq = np.array([31.62, 50, 70.7, 100, 141.4, 200, 316.2, 500, 707.1, 1000, 1414, 1682, 2000, 2515, 3162, 3976, 5000, 7071, 10000, 11890, 14140, 15500])
# We have the loudness values for the frequencies in loudn_freq
# now we calculate in loudn_bark a matrix of loudness sensation values for the bark bands margins
i = 0
j = 0
loudn_bark = np.zeros((eq_loudness.shape[0], len(bark)))
for bsi in bark:
while j < len(loudn_freq) and bsi > loudn_freq[j]:
j += 1
j -= 1
if np.where(loudn_freq == bsi)[0].size != 0: # loudness value for this frequency already exists
loudn_bark[:,i] = eq_loudness[:,np.where(loudn_freq == bsi)][:,0,0]
else:
w1 = 1 / np.abs(loudn_freq[j] - bsi)
w2 = 1 / np.abs(loudn_freq[j + 1] - bsi)
loudn_bark[:,i] = (eq_loudness[:,j]*w1 + eq_loudness[:,j+1]*w2) / (w1 + w2)
i += 1
# SPECTRAL MASKING Spreading Function
# CONST_spread contains matrix of spectral frequency masking factors
CONST_spread = np.zeros((n_bark_bands,n_bark_bands))
for i in range(n_bark_bands):
CONST_spread[i,:] = 10**((15.81+7.5*((i-np.arange(n_bark_bands))+0.474)-17.5*(1+((i-np.arange(n_bark_bands))+0.474)**2)**0.5)/10)
# UTILITY FUNCTIONS
def nextpow2(num):
'''NextPow2
find the next highest number to the power of 2 to a given number
and return the exponent to 2
(analogously to Matlab's nextpow2() function)
'''
n = 2
i = 1
while n < num:
n *= 2
i += 1
return i
# FFT FUNCTIONS
def periodogram(x,win,Fs=None,nfft=1024):
''' Periodogram
Periodogram power spectral density estimate
Note: this function was written with 1:1 Matlab compatibility in mind.
The number of points, nfft, in the discrete Fourier transform (DFT) is the maximum of 256 or the next power of two greater than the signal length.
:param x: time series data (e.g. audio signal), ideally length matches nfft
:param win: window function to be applied (e.g. Hanning window). in this case win expects already data points of the window to be provided.
:param Fs: sampling frequency (unused)
:param nfft: number of bins for FFT (ideally matches length of x)
:return: Periodogram power spectrum (np.array)
'''
#if Fs == None:
# Fs = 2 * np.pi # commented out because unused
U = np.dot(win.conj().transpose(), win) # compensates for the power of the window.
Xx = fft((x * win),nfft) # verified
P = Xx*np.conjugate(Xx)/U
# Compute the 1-sided or 2-sided PSD [Power/freq] or mean-square [Power].
# Also, compute the corresponding freq vector & freq units.
# Generate the one-sided spectrum [Power] if so wanted
if nfft % 2 != 0:
select = np.arange((nfft+1)/2) # ODD
P = P[select,:] # Take only [0,pi] or [0,pi)
P[1:-1] = P[1:-1] * 2 # Only DC is a unique point and doesn't get doubled
else:
#select = np.arange(nfft/2+1); # EVEN
#P = P[select,:] # Take only [0,pi] or [0,pi) # TODO: why commented out?
P[1:-2] = P[1:-2] * 2
P = P / (2 * np.pi)
return P
def calc_spectrogram(wavsegment,fft_window_size,fft_overlap = 0.5,real_values=True):
''' Calc_Spectrogram
calculate spectrogram using periodogram function (which performs FFT) to convert wave signal data
from time to frequency domain (applying a Hanning window and (by default) 50 % window overlap)
:param wavsegment: audio wave file data for a segment to be analyzed (mono (i.e. 1-dimensional vector) only
:param fft_window_size: windows size to apply FFT to
:param fft_overlap: overlap to apply during FFT analysis in % fraction (e.g. default = 0.5, means 50% overlap)
:param real_values: if True, return real values by taking abs(spectrogram), if False return complex values
:return: spectrogram matrix as numpy array (fft_window_size, n_frames)
'''
# hop_size (increment step in samples, determined by fft_window_size and fft_overlap)
hop_size = int(fft_window_size*(1-fft_overlap))
# this would compute the segment length, but it's pre-defined above ...
# segment_size = fft_window_size + (frames-1) * hop_size
# ... therefore we convert the formula to give the number of frames needed to iterate over the segment:
n_frames = (wavsegment.shape[0] - fft_window_size) / hop_size + 1
# n_frames_old = wavsegment.shape[0] / fft_window_size * 2 - 1 # number of iterations with 50% overlap
# TODO: provide this as parameter for better caching?
han_window = np.hanning(fft_window_size) # verified
# initialize result matrix for spectrogram
spectrogram = np.zeros((fft_window_size, n_frames), dtype=np.complex128)
# start index for frame-wise iteration
ix = 0
for i in range(n_frames): # stepping through the wave segment, building spectrum for each window
spectrogram[:,i] = periodogram(wavsegment[ix:ix+fft_window_size], win=han_window,nfft=fft_window_size)
ix = ix + hop_size
# NOTE: tested scipy periodogram BUT it delivers totally different values AND takes 2x the time of our periodogram function (0.13 sec vs. 0.06 sec)
# from scipy.signal import periodogram # move on top
#f, spec = periodogram(x=wavsegment[idx],fs=samplerate,window='hann',nfft=fft_window_size,scaling='spectrum',return_onesided=True)
if real_values: spectrogram = np.abs(spectrogram)
return (spectrogram)
# FEATURE FUNCTIONS
def calc_statistical_features(matrix):
result = np.zeros((matrix.shape[0],7))
result[:,0] = np.mean(matrix, axis=1)
result[:,1] = np.var(matrix, axis=1, dtype=np.float64) # the values for variance differ between MATLAB and Numpy!
result[:,2] = stats.skew(matrix, a
|
chrisidefix/devide
|
resources/python/filename_view_module_mixin_frame.py
|
Python
|
bsd-3-clause
| 2,169
| 0.005533
|
#!/usr/bin/env python
# -*- coding: ISO-8859-1 -*-
# generated by wxGlade 0.3.5.1 on Sat Jun 04 00:11:24 2005
import wx
class FilenameViewModuleMixinFrame(wx.Frame):
def __init__(self, *args, **kwds):
# begin wxGlade: FilenameViewModuleMixinFrame.__init__
kwds["style"] = wx.CAPTION|wx.MINIMIZE_BOX|wx.MAXIMIZE_BOX|wx.SYSTEM_MENU|wx.RESIZE_BORDER
wx.Frame.__init__(self, *args, **kwds)
self.viewFramePanel = wx.Panel(self, -1)
self.label_8_copy_1 = wx.StaticText(self.viewFramePanel, -1, "Filename")
self.filenameText = wx.TextCtrl(self.viewFramePanel, -1, "")
self.browseButtonId = wx.NewId()
self.browseButton = wx.Button(self.viewFramePanel, self.browseButtonId, "Browse")
self.__set_properties()
self.__do_layout()
# end wxGlade
def __set_properties(self):
# begin wxGlade: FilenameViewModuleMixinFrame.__set_properties
self.SetTitle("SomeModule")
# end wxGlade
def __do_layout(self):
# begin wxGlade: FilenameViewModuleMixinFrame.__do_layout
sizer_1 = wx.BoxSizer(wx.VERTICAL)
sizer_5 = wx.BoxSizer(wx.VERTICAL)
sizer_3 = wx.BoxSizer(wx.HORIZONTAL)
sizer_3.Add(self.label_8_copy_1, 0, wx.LEFT|wx.RIGHT|wx.ALIGN_CENTER_VERTICAL, 2)
sizer_3.Add(self.filenameText, 1, wx.ALIGN_CENTER_VERTICAL, 0)
sizer_3.Add(self.browseButton, 0, wx.ALIGN_CENTER_VERTICAL, 0)
sizer
|
_5.Add(sizer_3, 1, w
|
x.ALL|wx.EXPAND, 7)
self.viewFramePanel.SetAutoLayout(True)
self.viewFramePanel.SetSizer(sizer_5)
sizer_5.Fit(self.viewFramePanel)
sizer_5.SetSizeHints(self.viewFramePanel)
sizer_1.Add(self.viewFramePanel, 1, wx.EXPAND, 0)
self.SetAutoLayout(True)
self.SetSizer(sizer_1)
sizer_1.Fit(self)
sizer_1.SetSizeHints(self)
self.Layout()
# end wxGlade
# end of class FilenameViewModuleMixinFrame
if __name__ == "__main__":
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
frame_1 = FilenameViewModuleMixinFrame(None, -1, "")
app.SetTopWindow(frame_1)
frame_1.Show()
app.MainLoop()
|
Gnomescroll/Gnomescroll
|
server/waflib/extras/sync_exec.py
|
Python
|
gpl-3.0
| 777
| 0.023166
|
#! /usr/bin/env python
# encodin
|
g: utf-8
"""
Force the execution output to be synchronized
May deadlock with a lot of output (subprocess limitation)
"""
import sys
from waflib.Build import BuildContext
from waflib import Utils, Logs
def exec_command(self, cmd, **kw):
subprocess = Utils.subprocess
kw['shell'] = isinstance(cmd, str)
Logs.debug('runner: %r' % cmd)
Logs.debug('runner_env: kw=%s' % kw)
try
|
:
kw['stdout'] = kw['stderr'] = subprocess.PIPE
p = subprocess.Popen(cmd, **kw)
(out, err) = p.communicate()
if out:
sys.stdout.write(out.decode(sys.stdout.encoding or 'iso8859-1'))
if err:
sys.stdout.write(err.decode(sys.stdout.encoding or 'iso8859-1'))
return p.returncode
except OSError:
return -1
BuildContext.exec_command = exec_command
|
webcomics/dosage
|
dosagelib/plugins/namirdeiter.py
|
Python
|
mit
| 2,179
| 0.001377
|
# SPDX-License-Identifier: MIT
# Copyright (C) 2019-2020 Tobias Gruetzmacher
# Copyright (C) 2019-2020 Daniel Ring
from .common import _ParserScraper
class NamirDeiter(_ParserScraper):
imageSearch = '//img[contains(@src, "comics/")]'
prevSearch = ('//a[@rel="prev"]',
'//a[./img[contains(@src, "previous")]]',
'//a[contains(text(), "Previous")]')
def __init__(self, name, baseUrl, first=None, last=None):
if name == 'NamirDeiter':
super(NamirDeiter, self).__init__(name)
else:
super(NamirDeiter, sel
|
f).__init__('NamirDeiter/' + name)
self.url = 'https://' + baseUrl + '/'
self.stripUrl = self.url + 'comics/index.php?date=%s'
if first:
self.firstStripUrl = self.stripUrl % first
else:
self.firstStripUrl = self.url + 'comics/'
if last:
self.url = self.stripUrl % last
self.endOfLife = True
def link_modifier(self, fromurl, tour
|
l):
# Links are often absolute and keep jumping between http and https
return tourl.replace('http:', 'https:').replace('/www.', '/')
@classmethod
def getmodules(cls):
return (
cls('ApartmentForTwo', 'apartmentfor2.com'),
cls('NamirDeiter', 'namirdeiter.com', last='20150410'),
cls('NicoleAndDerek', 'nicoleandderek.com'),
cls('OneHundredPercentCat', 'ndunlimited.com/100cat', last='20121001'),
cls('SpareParts', 'sparepartscomics.com', first='20031022', last='20080331'),
cls('TheNDU', 'thendu.com'),
cls('WonderKittens', 'wonderkittens.com'),
cls('YouSayItFirst', 'yousayitfirst.com', first='20040220', last='20130125'),
)
class UnlikeMinerva(_ParserScraper):
name = 'NamirDeiter/UnlikeMinerva'
baseUrl = 'https://unlikeminerva.com/archive/index.php'
stripUrl = baseUrl + '?week=%s'
url = stripUrl % '127'
firstStripUrl = stripUrl % '26'
imageSearch = '//img[contains(@src, "archive/")]'
prevSearch = '//a[./img[contains(@src, "previous")]]'
multipleImagesPerStrip = True
endOfLife = True
|
hkkwok/MachOTool
|
mach_o/headers/prebind_cksum_command.py
|
Python
|
apache-2.0
| 488
| 0.002049
|
from utils.header import MagicField, Field
fro
|
m load_command import LoadCommandHeader, LoadCommandCommand
class PrebindCksumCommand(LoadCommandHeader):
ENDIAN = None
FIELDS = (
Magic
|
Field('cmd', 'I', {LoadCommandCommand.COMMANDS['LC_DYSYMTAB']: 'LC_DYSYMTAB'}),
Field('cmdsize', 'I'),
Field('cksum', 'I'),
)
def __init__(self, bytes_=None, **kwargs):
self.cksum = None
super(PrebindCksumCommand, self).__init__(bytes_, **kwargs)
|
tensorflow/tensorflow
|
tensorflow/python/framework/subscribe.py
|
Python
|
apache-2.0
| 12,914
| 0.006814
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Subscribe function."""
import contextlib
import re
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
def _recursive_apply(tensors, apply_fn):
"""Helper method to recursively apply a function to structure of tensors.
The structure of the tensors should take the form similar to fetches in
`tf.compat.v1.Session` and includes single `Tensor`, `list`, nested `list`,
`tuple`,
`namedtuple`, or `dict`.
Args:
tensors: Single `Tensor`, `list`, nested `list, `tuple`, `namedtuple`, or
`dict`.
apply_fn: Function to apply to each `Tensor` and should return a `Tensor`.
Returns:
Returns the modified tensors with the same structure.
Raises:
`TypeError` if undefined type in the tensors structure.
"""
tensors_type = type(tensors)
if tensors_type is ops.Tensor:
return apply_fn(tensors)
elif isinstance(tensors, variables.Variable):
return apply_fn(tensors.value())
elif isinstance(tensors, (list, tuple)):
tensors = [_recursive_apply(t, apply_fn) for t in tensors]
if tensors_type is list:
return list(tensors)
elif tensors_type is tuple:
return tuple(tensors)
return tensors_type(*tensors) # collections.namedtuple
elif tensors_type is dict:
return dict((k, _recursive_apply(v, apply_fn)) for k, v in tensors.items())
else:
raise TypeError(f'_recursive_apply argument {tensors!r} has invalid type '
f'{tensors_type!r}')
class _ControlOutputCache(object):
"""Helper class to manage calculating and caching control_outputs in graph."""
__slots__ = ['cache']
def __init__(self):
self.cache = {}
def calc_control_outputs(self, graph):
"""Returns the map of control_outputs for a given graph.
Args:
graph: The graph to parse.
Returns:
A map of the control outputs.
"""
control_outputs = {}
for op in graph.get_operations():
for control_input in op.control_inputs:
if control_input not in control_outputs:
control_outputs[control_input] = set()
control_outputs[control_input].add(op)
return control_outputs
def get_control_outputs(self, op):
"""Return the control outputs for a given op.
Args:
op: The op to fetch control outputs for.
Returns:
Iterable of control output ops.
"""
if op.graph not in self.cache:
control_outputs = self.calc_control_outputs(op.graph)
self.cache[op.graph] = control_outputs
else:
control_outputs = self.cache[op.graph]
return control_outputs.get(op, [])
def _subscribe_new(tensor, side_effects, control_cache):
"""Helper method that subscribes a single tensor to a list of side_effects.
Args:
tensor: `tf.Tensor`
side_effects: List of side_effect functions see subscribe for details.
control_cache: `_ControlOutputCache` helper to get control_outputs faster.
Returns:
The modified replacement to the passed in tensor which triggers the side
effects.
"""
update_input = []
for consumer_op in list(tensor.consumers()): # explicit copy
update_input.append((consumer_op, list(consumer_op.inputs).index(tensor)))
update_control_input = control_cache.get_control_outputs(tensor.op)
# Trailing slash on name scope to replace the scope.
name_scope = tens
|
or.op.name + '/subscription/'
with ops.name_scope(name_scope):
outs = []
for s in side_effects:
outs += s(tensor)
with ops.control_dependencies(outs):
out = array_ops.identity(tensor)
for consumer_op, index in update_input:
consumer_op._update_inp
|
ut(index, out) # pylint: disable=protected-access
for consumer_op in update_control_input:
# If an op has more than one output and two or more of its output tensors
# are subscribed at the same time, we remove the control dependency from
# the original op only once and we add the dependencies to all the
# new identities.
new_control_inputs = consumer_op.control_inputs
if tensor.op in new_control_inputs:
new_control_inputs.remove(tensor.op)
new_control_inputs.append(out.op)
# pylint: disable=protected-access
consumer_op._remove_all_control_inputs()
consumer_op._add_control_inputs(new_control_inputs)
# pylint: enable=protected-access
return out
def _subscribe_extend(tensor, side_effects):
"""Helper method to extend the list of side_effects for a subscribed tensor.
Args:
tensor: A `tf.Tensor` as returned by subscribe().
side_effects: List of side_effect functions, see subscribe for details.
Returns:
The given subscribed tensor (for API consistency).
"""
assert len(tensor.op.inputs) == 1, 'Op {} must only have one input'.format(
tensor.op.name)
source_tensor = tensor.op.inputs[0]
# Build the side effect graphs and add their outputs to the list of control
# dependencies for the subscribed tensor.
outs = []
name_scope = source_tensor.op.name + '/subscription/'
with ops.name_scope(name_scope):
for s in side_effects:
outs += s(source_tensor)
out_ops = [out.op if isinstance(out, ops.Tensor) else out for out in outs]
tensor.op._add_control_inputs(out_ops) # pylint: disable=protected-access
return tensor
def _is_subscribed_identity(tensor):
"""Checks if the given tensor is an identity op returned by `subscribe()`.
Args:
tensor: A `tf.Tensor` to check.
Returns:
True if the given tensor matches the criteria for subscription identities:
its op type is `Identity`, its name matches the name of its input and
conforms to the convention for subscribed nodes.
False otherwise.
"""
# Subscribed tensor are assumed to be identity ops.
if tensor.op.type != 'Identity':
return False
# Check that the tensor name matches the convention in place for identity ops
# created by subscribe().
match = re.match(r'(?P<prefix_name>^.*?)/subscription/Identity[^/]+',
tensor.name)
if match is None or len(match.groups()) != 1:
return False
prefix_name = match.group('prefix_name')
# Get a reference to the source tensor and check that it has a matching name.
assert len(tensor.op.inputs) == 1, 'Op {} must only have one input'.format(
tensor.op.name)
source_tensor = tensor.op.inputs[0]
if prefix_name != source_tensor.op.name:
return False
return True
def _subscribe(tensor, side_effects, control_cache):
"""Helper method that subscribes a single tensor to a list of side_effects.
This method will check if the given tensor has already been subscribed or if
it's a tensor returned by a previous call to `subscribe()` and, if so, will
reuse the existing identity op, appending the given side effects to the list
of existing ones.
Args:
tensor: The `tf.Tensor` to be subscribed.
side_effects: List of side_effect functions, see subscribe for details.
control_cache: `_ControlOutputCache` helper to get control_outputs faster.
Returns:
The modified replacement to the passed in tensor which triggers the side
effects or the given tensor, if it was already been subscribed.
"""
# Check if the given tensor has a numpy compatible type (see dtypes.py).
# If not, we cannot subscribe it, so we just return the original tensor.
if not tensor.dtype.is_numpy_compatible:
logging.debug(('Tensor {} has an un-supported {}
|
MariusLauge/dnd_tracker
|
dnd_tracker/settings.py
|
Python
|
gpl-3.0
| 3,300
| 0.001818
|
"""
Django settings for dnd_tracker project.
Generated by 'django-admin startproject' using Django 1.11.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'juchgjo=*=80&i=5xw18eg0-43h&wjms1wvi4j2u#8_uq0&1kc'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'frag_tracker.apps.FragTrackerConfig'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'dnd_tracker.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['./templates',],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
|
},
]
WSGI_APPLICATION = 'dnd_tracker.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
|
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Copenhagen'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
# Redirect to home URL after login (Default redirects to /accounts/profile/)
LOGIN_REDIRECT_URL = '/characterlistview'
|
renskiy/fabricio
|
examples/hello_world/fabfile.py
|
Python
|
mit
| 901
| 0.00111
|
"""
https://github.com/renskiy/fabricio/blob/master/examples/hello_world
"""
from fabricio import tasks, docker
from fabricio.misc import AvailableVagrantHosts
app = tasks.DockerTasks(
service=docker.Container(
name='app',
image='nginx:stable-alpine',
options={
# `docker run` options
'env': 'FOO=42',
},
),
hosts=AvailableVagrantHosts(),
# rollback_command=True, # show `rollba
|
ck` command in the list
# migrate_commands=True, # show `migrate` and `migrate-back` commands in the list
# backup_commands=True, # show `backup` and `restore` commands in the list
# pull_command=True, # show `pull` command in the list
# update_command=True, #
|
show `update` command in the list
# revert_command=True, # show `revert` command in the list
# destroy_command=True, # show `destroy` command in the list
)
|
sdague/home-assistant
|
homeassistant/components/simulated/sensor.py
|
Python
|
apache-2.0
| 4,535
| 0.000441
|
"""Adds a simulated sensor."""
from datetime import datetime
import math
from random import Random
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
CONF_AMP = "amplitude"
CONF_FWHM = "spread"
CONF_MEAN = "mean"
CONF_PERIOD = "period"
CONF_PHASE = "phase"
CONF_SEED = "seed"
CONF_UNIT = "unit"
CONF_RELATIVE_TO_EPOCH = "relative_to_epoch"
DEFAULT_AMP = 1
DEFAULT_FWHM = 0
DEFAULT_MEAN = 0
DEFAULT_NAME = "simulated"
DEFAULT_PERIOD = 60
DEFAULT_PHASE = 0
DEFAULT_SEED = 999
DEFAULT_UNIT = "value"
DEFAULT_RELATIVE_TO_EPOCH = True
ICON = "mdi:chart-line"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_AMP, default=DEFAULT_AMP): vol.Coerce(float),
vol.Optional(CONF_FWHM, default=DEFAULT_FWHM): vol.Coerce(float),
vol.Optional(CONF_MEAN, default=DEFAULT_MEAN): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PERIOD, default=DEFAULT_PERIOD): cv.positive_int,
vol.Optional(CONF_PHASE, default=DEFAULT_PHASE): vol.Coerce(float),
vol.Optional(CONF_SEED, default=DEFAULT_SEED): cv.positive_int,
vol.Optional(CONF_UNIT, default=DEFAULT_UNIT): cv.string,
vol.Optional(
CONF_RELATIVE_TO_EPOCH, default=DEFAULT_RELATIVE_TO_EPOCH
): cv.boolean,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the simulated sensor."""
name = config.get(CONF_NAME)
unit = config.get(CONF_UNIT)
amp = config.get(CONF_AMP)
mean = config.get(CONF_MEAN)
period = config.get(CONF_PERIOD)
phase = config.get(CONF_PHASE)
fwhm = config.get(CONF_FWHM)
seed = config.get(CONF_SEED)
relative_to_epoch = config.get(CONF_RELATIVE_TO_EPOCH)
sensor = SimulatedSensor(
name, unit, amp, mean, period, phase, fwhm, seed, relative_to_epoch
)
add_entities([sensor], True)
class SimulatedSensor(Entity):
"""Class for simulate
|
d sensor."""
def __init__(
self, name, unit, amp, mean, period, phase, fwhm, seed, relative_to_epoch
):
"""Init the class."""
self._name = name
self._unit = unit
self._amp = amp
self._mean = mean
self._period = period
self._phase = phase # phase in degrees
self._fwhm = fwhm
s
|
elf._seed = seed
self._random = Random(seed) # A local seeded Random
self._start_time = (
datetime(1970, 1, 1, tzinfo=dt_util.UTC)
if relative_to_epoch
else dt_util.utcnow()
)
self._relative_to_epoch = relative_to_epoch
self._state = None
def time_delta(self):
"""Return the time delta."""
dt0 = self._start_time
dt1 = dt_util.utcnow()
return dt1 - dt0
def signal_calc(self):
"""Calculate the signal."""
mean = self._mean
amp = self._amp
time_delta = self.time_delta().total_seconds() * 1e6 # to milliseconds
period = self._period * 1e6 # to milliseconds
fwhm = self._fwhm / 2
phase = math.radians(self._phase)
if period == 0:
periodic = 0
else:
periodic = amp * (math.sin((2 * math.pi * time_delta / period) + phase))
noise = self._random.gauss(mu=0, sigma=fwhm)
return round(mean + periodic + noise, 3)
async def async_update(self):
"""Update the sensor."""
self._state = self.signal_calc()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._unit
@property
def device_state_attributes(self):
"""Return other details about the sensor state."""
return {
"amplitude": self._amp,
"mean": self._mean,
"period": self._period,
"phase": self._phase,
"spread": self._fwhm,
"seed": self._seed,
"relative_to_epoch": self._relative_to_epoch,
}
|
zamattiac/SHARE
|
providers/org/biorxiv/migrations/0001_initial.py
|
Python
|
apache-2.0
| 658
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-08 15:45
from __future__ import unicode_litera
|
ls
from django.db import migrations
import share.robot
class Migration(migrations.Migration):
dependencies = [
('share', '0001_initial'),
('djcelery', '0001_initial'),
]
operations = [
migrations.RunPython(
code=share.robot.RobotUserMigration('org.biorxiv'),
),
migrations.RunPython(
code=share.robot.RobotOauthTokenMigration('org.biorxiv'),
),
migrations.RunPython(
code=share.robot.RobotSch
|
eduleMigration('org.biorxiv'),
),
]
|
morpheby/ist303-miye
|
client/__init__.py
|
Python
|
gpl-3.0
| 755
| 0.005298
|
"""
__init__.py
ist303-miye
Copyright (C) 2017
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Fre
|
e Software
Foundation; either version 2 of the License, or (at your option) any later
version.
This p
|
rogram is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place, Suite 330, Boston, MA 02111-1307 USA
"""
from .cwebview import *
|
informatics-isi-edu/synspy
|
hook-vispy.py
|
Python
|
bsd-3-clause
| 111
| 0
|
from PyInstaller.utils.hooks import collect_submodules, collect_data_fil
|
es
datas = collect_data_files('vispy
|
')
|
rasikapohankar/zeroclickinfo-fathead
|
lib/fathead/react_native/parse.py
|
Python
|
apache-2.0
| 15,135
| 0.004361
|
# -*- coding: utf-8 -*-
import os
import csv
from bs4 import BeautifulSoup
INFO = {'download_path': 'download/docs',
'doc_base_url':
'https://facebook.github.io/react-native/releases/0.40/docs{}',
'out_file': 'output.txt'}
HOME_LINK= 'http://facebook.github.io/react-native/docs/getting-started.html'
"""
This design is based on the python fathead
(zeroclickinfo-fathead/lib/fathead/python)
"""
class Data(object):
"""
Object responsible for loading raw HTML docs:
"""
def __init__(self, file):
"""
Initialize PythonData object. Load data from HTML.
"""
self.HTML = ""
self.FILE = file
self.load_data()
def load_data(self):
"""
Open the HTML file and load it into the object.
"""
with open(self.FILE, 'r') as data_file:
self.HTML = data_file.read()
def get_raw_data(self):
"""
Returns: The raw HTML that was loaded.
"""
return self.HTML
def get_file(self):
"""
Returns: The file path of the file being used.
"""
return self.FILE
class DataParser(object):
"""
Object responsible for parsing the raw HTML that contains data
"""
def __init__(self, data_object, info):
self.parsed_data = None
self.prop_sections = []
self.method_sections = []
self.intro_text = ''
self.title = ''
self.info = info
self.file_being_used = data_object.get_file()
soup_data = BeautifulSoup(data_object.get_raw_data(), 'html.parser')
self.title = soup_data.title.text
# Extract intro text
first_paragraph=soup_data.h1.find_next('p')
# There is only an intro text for the whole component, if there is not
# a h2 before the first paragraph
if soup_data.h1.find_next('p').find_previous('h2') is None:
self.intro_text += self._format_output(first_paragraph.text)
prop_div=soup_data.find('div', {'class': 'props'})
if prop_div:
self.prop_sections=prop_div.find_all('div')
# Methods come after a h3 with the text "Methods"
for h3 in soup_data.find_all('h3'):
if h3.text=="Methods #":
props=h3.parent.find('div', {'class': 'props'})
self.method_sections=props.find_all('div')
def parse_for_prop_name(self, section):
"""
Returns the function name
Args:
section: A section of parsed HTML that represents a function
definition
Returns:
Name of function
"""
prop_name_h4 = section.find('h4', {'class': 'propTitle'})
# The h4 prop section is consisting of the elements:
# <a class="anchor"> (Anchor-link),
# (optional) <span class="platform"> (platform span element),
# the name of the prop as clear text,
# <a class="hash-link"> (hash link)
link_to_general_props="View props... #"
if prop_name_h4 and prop_name_h4.text != link_to_general_props:
prop_name=prop_name_h4.next.next
if prop_name_h4.find('span', {'class': 'platform'}):
prop_name=prop_name_h4.find(
'span', {'class': 'platform'}).next.next
if not isinstance(prop_name, str):
# The prop_name is not a bs4.element.NavigableString
# It is probably a "ScrollView props..." link or something else
# that does not conform to the general format of the docs.
return None
return prop_name
def parse_for_first_paragraph(self, section):
"""
Returns the first paragraph of text for a given function
Fixes up some weird double spacing and newlines.
Args:
section: A section of parsed HTML that represents a function
definition
Returns:
First paragraph found with text
"""
paragraphs = section.find_all('p')
for paragraph in paragraphs:
if paragraph.text:
return self._format_output(paragraph.text)
return ''
def parse_for_anchor(self, section):
"""
Returns the anchor link to specific function doc
Args:
section: A section of parsed HTML that represents a function
definition
Returns:
The href value of the link to doc
"""
a_tag = section.find('a', {'class': 'anchor'})
if a_tag:
return a_tag['name']
return ''
def parse_for_signature(self, section, titleName):
"""
Returns the signature
Args:
section: A section of parsed HTML that represents a definition of
a property or method
Returns:
The signature
"""
h4 = section.find('h4', {'class': titleName})
contents=[]
for e in h4.strings:
contents.append(e)
# Remove the last item (and the preceding space), it is a hash link
del contents[-1]
del contents[-1]
# If platform is present, remove it - relevant for Properties
if h4.find('span', {'class': 'platform'}):
del contents[0]
# If there are two spans with class methodType, the first is not wanted,
# because it is "static".
# Relevant for methods section
if len(h4.find_all('span', {'class': 'methodType'})) > 1:
del contents[0]
if contents:
signature=''
for el in contents:
signature+=el
return '<pre><code>{}</code></pre>'.format(
self._format_output(signature))
return ''
def parse_for_method_name(self, section):
"""
Returns the name of a method
Args:
section: A section of parsed HTML that represents a method definition
Returns:
The method name
"""
method_name_h4 = section.find('h4', {'class': 'methodTitle'})
# The h4 method name section is consisting of the elements:
# <a class="anchor"> (Anchor-link),
# <span class="methodType"> (method type span element),
# the name of the prop as clear text,
# <span class="methodType"> (method signature span element),
# <a class="hash-link"> (hash link)
if method_name_h4:
method_name=method_name_h4.next.next
nbr_of_methodType_tags_in_h4=len(method_name_h4.find_all(
'span', {'class': 'methodType'}))
if nbr_of_methodType_tags_in_h4 > 1:
method_name=method_name_h4.find(
'span', {'class': 'methodType'}).next.next
return method_name
def create_url(self, anchor):
"""
Helper method to create URL back to document
Args:
anchor: #anchor
Returns:
Full URL to function on the python doc
"""
file_path = self.file_being_used.replace(self.info['download_path'], '')
return self.info['doc_base_url'].format(
'{}#{}'.format(file_path, anchor))
def parse_for_data(self):
"""
Main gateway into parsing the data. Will retrieve all necessary data
elements.
|
"""
data = []
if self.intro_text and self.title:
d
|
ata_elements = {
'module': self.title,
'function': '',
'method_signature': '',
'first_paragraph': self.intro_text,
'url': self.create_url('')
}
data.append(data_elements)
titleName='propTitle'
for prop_section in self.prop_sections:
prop_name = self.parse_for_prop_name(prop_section)
if prop_name:
prop_signature = self.parse_for_signature(prop_section,
|
W0mpRat/WebDev03
|
UdacityFrameWork.py
|
Python
|
unlicense
| 781
| 0.002561
|
__author__ = 'canderson'
import os
import webapp2
import jinja2
from google.appengine.ext imp
|
ort db
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
autoescape=True)
class Handler(webapp2.RequestHandler):
def write(self, *a, **kw):
self.response.out.write(*a, **kw)
def render_str(self, template, **params):
t =
|
env.get_template(template)
return t.render(params)
def render(self, template, **kw):
self.write(self.render_str(template, **kw))
class MainPage(Handler):
def get(self):
#self.write("asciichan!")
self.render('form.html')
app = webapp2.WSGIApplication([('/', MainPage)], debug=True)
|
persandstrom/home-assistant
|
tests/components/switch/test_template.py
|
Python
|
apache-2.0
| 16,305
| 0
|
"""The tests for the Template switch platform."""
from homeassistant.core import callback
from homeassistant import setup
import homeassistant.components as core
from homeassistant.const import STATE_ON, STATE_OFF
from tests.common import (
get_test_home_assistant, assert_setup_component)
class TestTemplateSwitch:
"""Test the Template switch."""
hass = None
calls = None
# pylint: disable=invalid-name
def setup_method(self, method):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.calls = []
@callback
def record_call(service):
"""Track function calls.."""
self.calls.append(service)
self.hass.services.register('test', 'automation', record_call)
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def test_template_state_text(self):
"""Test the state text of a template."""
with assert_setup_component(1, 'switch'):
assert setup.setup_component(self.hass, 'switch', {
'switch': {
'platform': 'template',
'switches': {
'test_template_switch': {
'value_template':
"{{ states.switch.test_state.state }}",
'turn_on': {
'service': 'switch.turn_on',
'entity_id': 'switch.test_state'
},
'turn_off': {
'service': 'switch.turn_off',
'entity_id': 'switch.test_state'
},
}
}
}
})
self.hass.start()
self.hass.block_till_done()
state = self.hass.states.set('switch.test_state', STATE_ON)
self.hass.block_till_done()
state = self.hass.states.get('switch.test_template_switch')
assert state
|
.state == STATE_ON
state = self.hass.states.set('switch.test_state', STATE_OFF)
self.hass.block_till_done()
state = self.hass.states.get('switch.test_template_switch')
assert state.state == STATE_OFF
def test_template_state_boolean_on(self):
"""Test the setting of the state with boolean on."""
with assert_setup_
|
component(1, 'switch'):
assert setup.setup_component(self.hass, 'switch', {
'switch': {
'platform': 'template',
'switches': {
'test_template_switch': {
'value_template':
"{{ 1 == 1 }}",
'turn_on': {
'service': 'switch.turn_on',
'entity_id': 'switch.test_state'
},
'turn_off': {
'service': 'switch.turn_off',
'entity_id': 'switch.test_state'
},
}
}
}
})
self.hass.start()
self.hass.block_till_done()
state = self.hass.states.get('switch.test_template_switch')
assert state.state == STATE_ON
def test_template_state_boolean_off(self):
"""Test the setting of the state with off."""
with assert_setup_component(1, 'switch'):
assert setup.setup_component(self.hass, 'switch', {
'switch': {
'platform': 'template',
'switches': {
'test_template_switch': {
'value_template':
"{{ 1 == 2 }}",
'turn_on': {
'service': 'switch.turn_on',
'entity_id': 'switch.test_state'
},
'turn_off': {
'service': 'switch.turn_off',
'entity_id': 'switch.test_state'
},
}
}
}
})
self.hass.start()
self.hass.block_till_done()
state = self.hass.states.get('switch.test_template_switch')
assert state.state == STATE_OFF
def test_icon_template(self):
"""Test icon template."""
with assert_setup_component(1, 'switch'):
assert setup.setup_component(self.hass, 'switch', {
'switch': {
'platform': 'template',
'switches': {
'test_template_switch': {
'value_template':
"{{ states.switch.test_state.state }}",
'turn_on': {
'service': 'switch.turn_on',
'entity_id': 'switch.test_state'
},
'turn_off': {
'service': 'switch.turn_off',
'entity_id': 'switch.test_state'
},
'icon_template':
"{% if states.switch.test_state.state %}"
"mdi:check"
"{% endif %}"
}
}
}
})
self.hass.start()
self.hass.block_till_done()
state = self.hass.states.get('switch.test_template_switch')
assert state.attributes.get('icon') == ''
state = self.hass.states.set('switch.test_state', STATE_ON)
self.hass.block_till_done()
state = self.hass.states.get('switch.test_template_switch')
assert state.attributes['icon'] == 'mdi:check'
def test_entity_picture_template(self):
"""Test entity_picture template."""
with assert_setup_component(1, 'switch'):
assert setup.setup_component(self.hass, 'switch', {
'switch': {
'platform': 'template',
'switches': {
'test_template_switch': {
'value_template':
"{{ states.switch.test_state.state }}",
'turn_on': {
'service': 'switch.turn_on',
'entity_id': 'switch.test_state'
},
'turn_off': {
'service': 'switch.turn_off',
'entity_id': 'switch.test_state'
},
'entity_picture_template':
"{% if states.switch.test_state.state %}"
"/local/switch.png"
"{% endif %}"
}
}
}
})
self.hass.start()
self.hass.block_till_done()
state = self.hass.states.get('switch.test_template_switch')
assert state.attributes.get('entity_picture') == ''
state = self.hass.states.set('switch.test_state', STATE_ON)
self.hass.block_till_done()
state = self.hass.states.get('switch.test_template_switch')
assert state.attributes['entity_picture'] == '/local/switch.png'
def test_template_syntax_error(self):
"""Test templating syntax error."""
with assert_setup_component(0, 'switch'):
assert setup.setup_component(self.hass, 'switch', {
'switch': {
'platform': 'template',
'switches': {
'test_template_switch': {
'value_templat
|
fabteam1/komsukomsuhuhu
|
komsukomsuhuu/komsukomsuhuu/celery.py
|
Python
|
mit
| 655
| 0.003053
|
from __future__ import absolute_import
import os
from celery import Celery
# set the default Django settings module for the 'celery' pro
|
gram.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'komsukomsuhuu.settings')
from django.conf import settings
app = Celery('komsukomsuhuu',
broker='amqp://',
backend='amqp://',
)
# Using a string here means the worker will not have to
# pi
|
ckle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
|
Bodidze/21v-python
|
unit_01/15.py
|
Python
|
mit
| 114
| 0.026316
|
#!/usr/bin
|
/python
# the sum of two elements defines the next
a, b = 0, 1
whi
|
le b < 10:
print b
a, b = b, a + b
|
Arcanemagus/SickRage
|
sickbeard/providers/newpct.py
|
Python
|
gpl-3.0
| 10,924
| 0.00403
|
# coding=utf-8
# Author: CristianBB
# Greetings to Mr. Pine-apple
# URL: https://sick-rage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, unicode_literals
import re
from requests.compat import urljoin
from sickbeard import helpers, logger, tvcache
from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.common import convert_size
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class newpctProvider(TorrentProvider):
def __init__(self):
TorrentProvider.__init__(self, 'Newpct')
self.onlyspasearch = None
self.url = 'http://www.newpct.com'
self.urls = {'search': urljoin(self.url, 'index.php')}
self.cache = tvcache.TVCache(self, min_time=20)
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
"""
Search query:
http://www.newpct.com/index.php?l=doSearch&q=fringe&category_=All&idioma_=1&bus_de_=All
q => Show name
category_ = Category 'Shows' (767)
idioma_ = Language Spanish (1), All
bus_de_ = Date from (All, mes, semana, ayer, hoy)
"""
results = []
# Only search if user conditions are true
lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang
search_params = {
'l': 'doSearch',
'q': '',
'category_': 'All',
'idioma_': 1,
'bus_de_': 'All'
}
for mode in search_strings:
items = []
logger.log('Search Mode: {0}'.format(mode), logger.DEBUG)
if self.onlyspasearch:
search_params['idioma_'] = 1
else:
search_params['idioma_'] = 'All'
# Only search if user conditions are true
if self.onlyspasearch and lang_info != 'es' and mode != 'RSS':
logger.log('Show info is not spanish, skipping provider search', logger.DEBUG)
continue
search_params['bus_de_'] = 'All' if mode != 'RSS' else 'semana'
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log('Search string: {0}'.format
(search_string.decode('utf-8')), logger.DEBUG)
search_params['q'] = search_string
data = self.get_url(self.urls['search'], params=search_params, returns='text')
if not data:
continue
with BS4Parser(data, 'html5lib') as html:
torrent_table = html.find('table', id='categoryTable')
torrent_rows = torrent_table('tr') if torrent_table else []
# Continue only if at least one Release is found
if len(torrent_rows) < 3: # Headers + 1 Torrent + Pagination
logger.log('Data returned from provider does not contain any torrents', logger.DEBUG)
continue
# 'Fecha', 'Título', 'Tamaño', ''
# Date, Title, Size
labels = [label.get_text(strip=True) for label in torrent_rows[0]('th')]
for row in torrent_rows[1:-1]:
try:
cells = row('td')
t
|
orrent_row = row.find('a')
|
download_url = torrent_row.get('href', '')
title = self._processTitle(torrent_row.get('title', ''), download_url)
if not all([title, download_url]):
continue
# Provider does not provide seeders/leechers
seeders = 1
leechers = 0
#2 is the 'Tamaño' column.
torrent_size = cells[2].get_text(strip=True)
size = convert_size(torrent_size) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''}
if mode != 'RSS':
logger.log('Found result: {0}'.format(title), logger.DEBUG)
items.append(item)
except (AttributeError, TypeError):
continue
results += items
return results
def get_url(self, url, post_data=None, params=None, timeout=30, **kwargs): # pylint: disable=too-many-arguments
"""
returns='content' when trying access to torrent info (For calling torrent client). Previously we must parse
the URL to get torrent file
"""
trickery = kwargs.pop('returns', '')
if trickery == 'content':
kwargs['returns'] = 'text'
data = super(newpctProvider, self).get_url(url, post_data=post_data, params=params, timeout=timeout, **kwargs)
url = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()
url = urljoin(self.url, url.rsplit('=', 1)[-1])
kwargs['returns'] = trickery
return super(newpctProvider, self).get_url(url, post_data=post_data, params=params,
timeout=timeout, **kwargs)
def download_result(self, result):
"""
Save the result to disk.
"""
# check for auth
if not self.login():
return False
urls, filename = self._make_url(result)
for url in urls:
# Search results don't return torrent files directly, it returns show sheets so we must parse showSheet to access torrent.
data = self.get_url(url, returns='text')
url_torrent = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()
if url_torrent.startswith('http'):
self.headers.update({'Referer': '/'.join(url_torrent.split('/')[:3]) + '/'})
logger.log('Downloading a result from {0}'.format(url))
if helpers.download_file(url_torrent, filename, session=self.session, headers=self.headers):
if self._verify_download(filename):
logger.log('Saved result to {0}'.format(filename), logger.INFO)
return True
else:
logger.log('Could not download {0}'.format(url), logger.WARNING)
helpers.remove_file_failed(filename)
if urls:
logger.log('Failed to download any results', logger.WARNING)
return False
@staticmethod
def _processTitle(title, url):
# Remove 'Mas informacion sobre ' literal from title
title = title[22:]
title = re.sub(r'[ ]{2,}', ' ', title, flags=re.I)
# Quality - Use re module to avoid case sensitive problems with replace
title = re.sub(r'\[HDTV 1080p?[^\[]*]', '1080p HDTV x264', title, flags=re.I)
title = re.sub(r'\[HDTV 720p?[^\[]*]', '720p HDTV x264', title, flags=re.I)
title = re.sub(r'\[ALTA DEFINICION 720p?[^\[]*]', '720p HDTV x264', title, flags=re.I)
title = re.sub(r'\[HDTV]', 'HDTV x264', title, flags=re.I)
title = re.sub(r'\[DVD[^\[]*]', 'DVDrip x264', title, flags=re.I)
title = re.sub(r'\[BluRay 1080p?[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BluRay Rip 1080p?[
|
Aravinthu/odoo
|
addons/board/models/board.py
|
Python
|
agpl-3.0
| 1,665
| 0.002402
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file
|
for full copyright and licensing details.
from odoo import api, models
from odoo.tools import pycompat
class Board(models.AbstractModel):
_name = 'board.board'
_description = "Board"
_auto = False
@api.model
def create(self, vals):
return self
@api.model
def fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False):
"""
Over
|
rides orm field_view_get.
@return: Dictionary of Fields, arch and toolbar.
"""
res = super(Board, self).fields_view_get(view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu)
custom_view = self.env['ir.ui.view.custom'].search([('user_id', '=', self.env.uid), ('ref_id', '=', view_id)], limit=1)
if custom_view:
res.update({'custom_view_id': custom_view.id,
'arch': custom_view.arch})
res.update({
'arch': self._arch_preprocessing(res['arch']),
'toolbar': {'print': [], 'action': [], 'relate': []}
})
return res
@api.model
def _arch_preprocessing(self, arch):
from lxml import etree
def remove_unauthorized_children(node):
for child in node.iterchildren():
if child.tag == 'action' and child.get('invisible'):
node.remove(child)
else:
remove_unauthorized_children(child)
return node
archnode = etree.fromstring(arch)
return etree.tostring(remove_unauthorized_children(archnode), pretty_print=True, encoding='unicode')
|
rmhyman/DataScience
|
Lesson1/titanic_data_heuristic1.py
|
Python
|
mit
| 2,937
| 0.007831
|
import numpy
import pandas
import statsmodels.api as sm
'''
In this exercise, we will perform some rudimentary practices similar to those of
an actual data scientist.
Part of a data scientist's job is to use her or his intuition and insight to
write algorithms and heuristics. A data scientist also creates mathematical models
to make predictions based on some attributes from the data that they are examining.
We would like for you to take your knowledge and intuition about the Titanic
and its passengers' attributes to predict whether or not the passengers survived
or perished. You can read more about the Titanic and specifics about this dataset at:
http://en.w
|
ikipedia.org/wiki/RMS_Titanic
http://www.kaggle.com/c/titanic-gettingStarted
In this exercise and the following ones, you are given a list of Titantic passengers
and their associated information. More information about the data can
|
be seen at the
link below:
http://www.kaggle.com/c/titanic-gettingStarted/data.
For this exercise, you need to write a simple heuristic that will use
the passengers' gender to predict if that person survived the Titanic disaster.
You prediction should be 78% accurate or higher.
Here's a simple heuristic to start off:
1) If the passenger is female, your heuristic should assume that the
passenger survived.
2) If the passenger is male, you heuristic should
assume that the passenger did not survive.
You can access the gender of a passenger via passenger['Sex'].
If the passenger is male, passenger['Sex'] will return a string "male".
If the passenger is female, passenger['Sex'] will return a string "female".
Write your prediction back into the "predictions" dictionary. The
key of the dictionary should be the passenger's id (which can be accessed
via passenger["PassengerId"]) and the associated value should be 1 if the
passenger survied or 0 otherwise.
For example, if a passenger is predicted to have survived:
passenger_id = passenger['PassengerId']
predictions[passenger_id] = 1
And if a passenger is predicted to have perished in the disaster:
passenger_id = passenger['PassengerId']
predictions[passenger_id] = 0
You can also look at the Titantic data that you will be working with
at the link below:
https://www.dropbox.com/s/r5f9aos8p9ri9sa/titanic_data.csv
'''
def simple_heuristic(file_path):
predictions = {}
df = pandas.read_csv(file_path)
for passenger_index, passenger in df.iterrows():
passenger_id = passenger['PassengerId']
if passenger['Sex'] == 'female':
predictions[passenger_id] = 1
else:
predictions[passenger_id] = 0
#print predictions
return predictions
|
goal/uwsgi
|
plugins/logcrypto/uwsgiplugin.py
|
Python
|
gpl-2.0
| 80
| 0
|
NAME = 'lo
|
gcrypto'
CFLAGS = []
|
LDFLAGS = []
LIBS = []
GCC_LIST = ['logcrypto']
|
pantaray/Analytic-Tools
|
sde_solvers.py
|
Python
|
gpl-3.0
| 29,346
| 0.015743
|
# sde_solvers.py - Collection of numerical methods to solve (vector-valued) SDEs
#
# Author: Stefan Fuertinger [[email protected]]
# Created: February 19 2014
# Last modified: <2017-09-15 11:31:25>
from __future__ import division
import numpy as np
from scipy.stats import norm
def rk_1(func,x0,tsteps,**kwargs):
r"""
Explicit first order (strong and weak) Runge--Kutta method for SDEs with additive/multiplicative (non-)autonomous scalar noise
Parameters
----------
func : callable (X,t,**kwargs)
Returns drift `A` and diffusion `B` of the SDE. See Examples for details.
x0 : NumPy 1darray
Initial condition
tsteps : NumPy 1darray
Sequence of time points for which to solve (including initial time `t0`)
**kwargs : keyword arguments
Additional keyword arguments to be passed on to `func`. See `Examples` for details.
Returns
-------
Y : NumPy 2darray
Approximate solution at timepoints given by `tsteps`. Format is
`Y[:,tk]` approximate solution at time `tk`
Thus `Y` is a `numstate`-by-`timesteps` array
Notes
-----
The general form of an SDE with additive/multiplicative (non-)autonomous scalar noise is
.. math:: (1) \qquad dX_t = A(X_t,t)dt + B(X_t,t)dW_t, \quad X(t_0) = x_0
The method for solving the SDE (1) is described in Sec. 11.1 of
Kloeden, P.E., & Platen, E. (1999). `Numerical Solution of Stochastic Differential Equations.`
Berlin: Springer.
Examples
--------
Consider the SDE system
.. math::
dV_t & = - \alpha t V_t + t Z_t \beta dW_t,\\
dZ_t & = \alpha t Z_t + t V_t \gamma dW_t,\\
V_{t_0} & = 0.5, \quad Z_{t_0} = -0.5, \quad t_0 = 1,
thus with :math:`X_t = (V_t,Z_t)` we have
.. math::
A(X_t,t) & = (-\alpha t V_t,\alpha t Z_t),\\
B(t) & = (t Z_t \beta,t V_t \gamma).
Hence `func` would look like this:
::
import numpy as np
def myrhs(Xt,t,alpha=0.2,beta=0.01,gamma=0.02):
A = np.array([-alpha*t*Xt[0],alpha*t*Xt[1]])
B = np.array([t*Xt[1]*beta,t*Xt[0]*gamma])
return A,B
Thus, the full call to `rk_1` to approximate the SDE system on :math:`[t_0,2]` could be
something like (assuming the function `myrhs` is defined in `myrhs.py`)
>>> import numpy as np
>>> from sde_solvers import rk_1
>>> from myrhs import myrhs
>>> Xt = rk_1(myrhs,np.array([0.5,-0.5]),np.arange(1,2,1e-3),beta=.02)
Hence we used :math:`\beta = 0.02` in `myrhs` instead of the default value 0.01.
See also
--------
pc_1 : an implicit first order strong Runge--Kutta method
(it uses a strong order 0.5 Euler--Maruyama method as predictor and an implicit Runge--Kutta
update formula as corrector) for stiff SDEs
"""
# Check for correctness of input and allocate common tmp variables
Y,dt,sqrtdt,zeta1,zeta2 = checkinput(func,x0,tsteps)
# Generate i.i.d. normal random variables with mean=0 (loc) and std=sqrt(delta) (scale) (Var=std^2)
DW = zeta1*sqrtdt
# Compute solution recursively
for n in xrange(tsteps.size - 1):
# Get drift/diffusion from func
t = tsteps[n]
A, B = func(Y[:,n], t,**kwargs)
BGamma = func(Y[:,n] + A*dt + B*sqrtdt, t,**kwargs)[1]
# Compute solution at next time point
Y[:,n+1] = Y[:,n] + A*dt + B*DW[n] + 0.5*(BGamma - B)*(DW[n]**2 - dt)*sqrtdt**(-1)
return Y
def pc_1(func,x0,tsteps,**kwargs):
r"""
Predictor-Corrector solver based on an implicit first order (strong and weak) Runge--Kutta method for SDEs with additive/multiplicative (non
|
-)autonomous scalar noise
Parameters
----------
func : callable (X,t,**kwargs)
Returns drift `A` and diffusion `B` of the SDE. See Examples for details.
x0 : NumPy 1darray
Initial condition
tsteps : NumPy 1darray
Sequence of time points for which to solve (including initial time `t0`
|
)
**kwargs : keyword arguments
Additional keyword arguments to be passed on to `func`. See `Examples` for details.
Returns
-------
Y : NumPy 2darray
Approximate solution at timepoints given by `tsteps`. Format is
`Y[:,tk]` approximate solution at time `tk`
Thus `Y` is a `numstate`-by-`timesteps` array
Notes
-----
The general form of an SDE with additive/multiplicative (non-)autonomous scalar noise is
.. math:: (1) \qquad dX_t = A(X_t,t)dt + B(X_t,t)dW_t, \quad X(t_0) = x_0
The code implements a two-fold approach to approximate solutions of (1). At each time-step
:math:`t_n` an order 0.5 strong Euler--Maruyama method is employed to estimate the solution
at time :math:`t_{n+1}` (predictor). This approximation is then used in the implicit
Runge--Kutta update formula (corrector).
The implicit Runge--Kutta method for solving the SDE (1) is described in Sec. 12.3 of
Kloeden, P.E., & Platen, E. (1999). `Numerical Solution of Stochastic Differential Equations.
Berlin: Springer.` The explicit Euler--Maruyama scheme is detailed in Sec. 9.1 ibid.
Examples
--------
Consider the SDE system
.. math::
dV_t & = - \alpha t V_t + t Z_t \beta dW_t,\\
dZ_t & = \alpha t Z_t + t V_t \gamma dW_t,\\
V_{t_0} & = 0.5, \quad Z_{t_0} = -0.5, \quad t_0 = 1,
thus with :math:`X_t = (V_t,Z_t)` we have
.. math::
A(X_t,t) & = (-\alpha t V_t,\alpha t Z_t),\\
B(t) & = (t Z_t \beta,t V_t \gamma).
Hence `func` would look like this:
::
import numpy as np
def myrhs(Xt,t,alpha=0.2,beta=0.01,gamma=0.02):
A = np.array([-alpha*t*Xt[0],alpha*t*Xt[1]])
B = np.array([t*Xt[1]*beta,t*Xt[0]*gamma])
return A,B
Thus, the full call to `pc_1` to approximate the SDE system on :math:`[t_0,2]` could be
something like (assuming the function `myrhs` is defined in `myrhs.py`)
>>> import numpy as np
>>> from sde_solvers import pc_1
>>> from myrhs import myrhs
>>> Xt = pc_1(myrhs,np.array([0.5,-0.5]),np.arange(1,2,1e-3),beta=.02)
Hence we used :math:`\beta = 0.02` in `myrhs` instead of the default value 0.01.
See also
--------
pc_15 : an implicit order 1.5 order strong Runge--Kutta method (it uses the mehod of ``rk_15``
as predictor and the corresponding implicit update formula as corrector).
"""
# Check for correctness of input and allocate common tmp variables
Y,dt,sqrtdt,zeta1,zeta2 = checkinput(func,x0,tsteps)
# Generate i.i.d. normal random variables with mean=0 (loc) and std=sqrt(delta) (scale) (Var=std^2)
DW = zeta1*sqrtdt
# Compute solution recursively
for n in xrange(tsteps.size - 1):
# Get drift/diffusion from func
t = tsteps[n]
A, B = func(Y[:,n], t,**kwargs)
BGamma = func(Y[:,n] + A*dt + B*sqrtdt, t,**kwargs)[1]
# Explicit Euler-Maruyama step
yt = Y[:,n] + A*dt + B*DW[n]
# Evaluate function at estimate yt and t_n+1
A1 = func(yt, tsteps[n+1],**kwargs)[0]
# Compute solution at next time point
Y[:,n+1] = Y[:,n] + A1*dt + B*DW[n] + 0.5*(BGamma - B)*(DW[n]**2 - dt)*sqrtdt**(-1)
return Y
def rk_15(func,x0,tsteps,**kwargs):
r"""
Explicit order 1.5 strong Runge--Kutta method for SDEs with additive (non-)autonomous scalar noise
Parameters
----------
func : callable (X,t,**kwargs)
Returns drift `A` and diffusion `B` of the SDE. See Examples for details.
x0 : NumPy 1darray
Initial condition
tsteps : NumPy 1darray
Sequence of time points for which to solve (including initial time `t0`)
**kwargs : keyword arguments
Additional keyword arguments to be passed on to `func`. See `Examples` for details.
R
|
ryfeus/lambda-packs
|
pytorch/source/caffe2/python/operator_test/ngram_ops_test.py
|
Python
|
mit
| 2,472
| 0.000405
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import hypothesis.strategies as st
from caffe2.python import core, workspace
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import numpy as np
class TestNGramOps(hu.HypothesisTestCase):
@given(
seed=st.integers(0, 2**32 - 1),
N=st.integers(min_value=10, max_value=100),
D=st.integers(min_value=2, max_value=10),
out_of_vcb=st.floats(min_value=0, max_value=0.5),
max_categorical_limit=st.integers(min_value=5, max_value=20),
max_in_vcb_val=st.integers(min_value=1000, max_value=10000),
**hu.gcs_cpu_only
)
def test_ngram_from_categorical_op(
self,
seed,
N,
D,
out_of_vcb,
max_categorical_limit,
max_in_vcb_val,
gc,
dc,
):
np.random.seed(seed)
col_num = max(int(D / 2), 1)
col_ids = np.random.choice(D, col_num, False).astype(np.int32)
categorical_limits = np.random.randint(
2, high=max_categorical_limit, size=col_num
).astype(np.int32)
vcb = [
np.random.choice(max_in_vcb_val, x, False)
for x in categorical_limits
]
vals = np.array([x for l in vcb for x in l], dtype=np.int32)
# Enforce round(floats) to be negative.
floats = np.random.rand(N, D).astype(np.float32) - 2
expected_output = []
for i in range(N):
val = 0
for (k, j) in enumerate(col_ids):
base = np.prod(categorical_limits[:k])
r = np.random.randint(categorical_limits[k])
p = np.random.rand()
if p > out_of_vcb:
val += base * r
floats[i][j] = vcb[k][r]
ex
|
pected_output.append(val)
expected_output = np.array(expected_output, dtype=np.int32)
workspace.ResetWorkspace()
workspace.FeedBlob('floats', floats)
op = core.CreateOperator(
"NGramFromCategorical",
['floats'],
['output'],
col_ids=col_ids,
categorical_limits=categorical_lim
|
its,
vals=vals,
)
workspace.RunOperatorOnce(op)
output = workspace.blobs['output']
np.testing.assert_array_equal(output, expected_output)
|
kapt/django-oscar
|
runtests.py
|
Python
|
bsd-3-clause
| 3,114
| 0.000321
|
#!/usr/bin/env python
"""
Custom test runner
If args or options, we run the testsuite as quickly as possible.
If args but no options, we default to using the spec plugin and aborting on
first error/failure.
If options, we ignore defaults and pass options onto Nose.
Examples:
Run all tests (as fast as possible)
$ ./runtests.py
Run all unit tests (using spec output)
$ ./runtests.py tests/unit
Run all checkout unit tests (using spec output)
$ ./runtests.py tests/unit/checkout
Run al
|
l tests relating to shipping
$ ./runtests.py --attr=shipping
Re-run failing tests (needs to be run twice to first build the index)
$ ./runtests.py ... --failed
Drop into pdb when a test fails
$ ./runtests.py ... --pdb-failures
"""
import sys
import logging
imp
|
ort warnings
from tests.config import configure
from django.utils.six.moves import map
# No logging
logging.disable(logging.CRITICAL)
def run_tests(verbosity, *test_args):
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner(verbosity=verbosity)
if not test_args:
test_args = ['tests']
num_failures = test_runner.run_tests(test_args)
if num_failures:
sys.exit(num_failures)
if __name__ == '__main__':
args = sys.argv[1:]
verbosity = 1
if not args:
# If run with no args, try and run the testsuite as fast as possible.
# That means across all cores and with no high-falutin' plugins.
import multiprocessing
try:
num_cores = multiprocessing.cpu_count()
except NotImplementedError:
num_cores = 4 # Guess
args = ['--nocapture', '--stop', '--processes=%s' % num_cores]
else:
# Some args/options specified. Check to see if any nose options have
# been specified. If they have, then don't set any
has_options = any(map(lambda x: x.startswith('--'), args))
if not has_options:
# Default options:
# --stop Abort on first error/failure
# --nocapture Don't capture STDOUT
args.extend(['--nocapture', '--stop'])
else:
# Remove options as nose will pick these up from sys.argv
for arg in args:
if arg.startswith('--verbosity'):
verbosity = int(arg[-1])
args = [arg for arg in args if not arg.startswith('-')]
configure()
with warnings.catch_warnings():
# The warnings module in default configuration will never cause tests
# to fail, as it never raises an exception. We alter that behaviour by
# turning DeprecationWarnings into exceptions, but exclude warnings
# triggered by third-party libs. Note: The context manager is not thread
# safe. Behaviour with multiple threads is undefined.
warnings.filterwarnings('error', category=DeprecationWarning)
warnings.filterwarnings('error', category=RuntimeWarning)
libs = r'(sorl\.thumbnail.*|bs4.*|webtest.*)'
warnings.filterwarnings(
'ignore', r'.*', DeprecationWarning, libs)
run_tests(verbosity, *args)
|
chrisnorman7/gmp3
|
application.py
|
Python
|
mpl-2.0
| 686
| 0
|
"""Application specific storage."""
import wx
from sound_lib.output import Output
from gmusicapi import Mobileclient
name = 'GMP3'
__version__ = '
|
4.3.0'
db_version = 1
url = 'https://github.com/chrisnorman7/gmp3'
app = wx.App(False)
app.SetAppName(name)
paths = wx.StandardPaths.Get()
output = Output()
api = Mobileclient()
api.android_id = '123456789abcde'
frame = None # The main window.
track = None # The current track.
stream = None # The stream of the currently playing track.
library_size = 0 # The size of the library in byt
|
es.
# Prevent the killer bug that makes the timer try and pop up billions of login
# windows:
logging_in = False
|
mferenca/HMS-ecommerce
|
ecommerce/extensions/offer/tests/test_utils.py
|
Python
|
agpl-3.0
| 2,446
| 0.004088
|
from decimal import Decimal
import ddt
from babel.numbers import format_currency
from django.conf import settings
from django.utils.translation import get_language, to_locale
from oscar.core.loading import get_model
from oscar.test.factories import * # pylint:disable=wildcard-import,unused-wildcard-import
from ecommerce.courses.tests.factories import CourseFactory
from ecommerce.extensions.catalogue.tests.mixins import CourseCatalogTestMixin
from ecommerce.extensions.offer.utils import _remove_exponent_and_trailing_zeros, format_benefit_value
from ecommerce.tests.testcases import TestCase
Benefit = get_model('offer', 'Benefit')
@ddt.ddt
class UtilTests(CourseCatalogTestMixin, TestCase):
def setUp(self):
super(UtilTests, self).setUp()
self.course = CourseFactory()
self.verified_seat = self.course.create_or_update_se
|
at('verified', False, 100, self.partner)
self.stock_record = StockRecord.objects.filter(product=self.verified_seat).first()
self.seat_price = self.stock_record.price_excl_tax
self._range = RangeFactory(products=[self.verified_seat, ])
self.percentage_benefit = BenefitFactory(type=Benefit.PERCENTAGE, range=self._range, value=35.00)
self.value_benefit = BenefitFactory(type=
|
Benefit.FIXED, range=self._range, value=self.seat_price - 10)
def test_format_benefit_value(self):
""" format_benefit_value(benefit) should format benefit value based on benefit type """
benefit_value = format_benefit_value(self.percentage_benefit)
self.assertEqual(benefit_value, '35%')
benefit_value = format_benefit_value(self.value_benefit)
expected_benefit = format_currency(
Decimal((self.seat_price - 10)), settings.OSCAR_DEFAULT_CURRENCY, format=u'#,##0.00',
locale=to_locale(get_language()))
self.assertEqual(benefit_value, '${expected_benefit}'.format(expected_benefit=expected_benefit))
@ddt.data(
('1.0', '1'),
('5000.0', '5000'),
('1.45000', '1.45'),
('5000.40000', '5000.4'),
)
@ddt.unpack
def test_remove_exponent_and_trailing_zeros(self, value, expected):
"""
_remove_exponent_and_trailing_zeros(decimal) should remove exponent and trailing zeros
from decimal number
"""
decimal = _remove_exponent_and_trailing_zeros(Decimal(value))
self.assertEqual(decimal, Decimal(expected))
|
indrajitr/ansible
|
lib/ansible/modules/group.py
|
Python
|
gpl-3.0
| 19,765
| 0.001164
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Stephen Fromm <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: group
version_added: "0.0.2"
short_description: Add or remove groups
requirements:
- groupadd
- groupdel
- groupmod
description:
- Manage presence of groups on a host.
- For Windows targets, use the M(win_group) module instead.
options:
name:
description:
- Name of the group to manage.
type: str
required: true
gid:
description:
- Optional I(GID) to set for the group.
type: int
state:
description:
- Whether the group should be present or not on the remote host.
type: str
choices: [ absent, present ]
default: present
system:
description:
- If I(yes), indicates that the group created is a system group.
type: bool
default: no
local:
description:
- Forces the use of "local" command alternatives on platforms that implement it.
- This is useful in environments that use centralized authentication when you want to manipulate the local groups.
(e.g. it uses C(lgroupadd) instead of C(groupadd)).
- This requires that these commands exist on the targeted host, otherwise it will be a fatal error.
type: bool
default: no
version_added: "2.6"
non_unique:
description:
- This option allows to change the group ID to a non-unique value. Requires C(gid).
- Not supported on macOS or BusyBox distributions.
type: bool
default: no
version_added: "2.8"
seealso:
- module: user
- module: win_group
author:
- Stephen Fromm (@sfromm)
'''
EXAMPLES = '''
- name: Ensure group "somegroup" exists
group:
name: somegroup
state: present
- name: Ensure group "docker" exists with correct gid
group:
name: docker
state: present
gid: 1750
'''
RETURN = r'''
gid:
description: Group ID of the group.
returned: When C(state) is 'present'
type: int
sample: 1001
name:
description: Group name
returned: always
type: str
sample: users
state:
description: Whether the group is present or not
returned: always
type: str
sample: 'absent'
system:
description: Whether the group is a system group or not
returned: When C(state) is 'present'
type: bool
sample: False
'''
import grp
import os
from ansible.module_utils._text import to_bytes
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.sys_info import get_platform_subclass
class Group(object):
"""
This is a generic Group manipulation class that is subclassed
based on platform.
A subclass may wish to override the following action methods:-
- group_del()
- group_add()
- group_mod()
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
GROUPFILE = '/etc/group'
def __new__(cls, *args, **kwargs):
new_cls = get_platform_subclass(Group)
return super(cls, new_cls).__new__(new_cls)
def __init__(self, module):
self.module = module
self.state = module.params['state']
self.name = module.params['name']
self.gid = module.params['gid']
self.system = module.params['system']
self.local = module.params['local']
self.non_unique = module.params['non_unique']
def execute_command(self, cmd):
return self.module.run_command(cmd)
def group_del(self):
if self.local:
command_name = 'lgroupdel'
else:
command_name = 'groupdel'
cmd = [self.module.get_bin_path(command_name, True), self.name]
return self.execute_command(cmd)
def _local_check_gid_exists(self):
if self.gid:
for gr in grp.getgrall():
if self.gid == gr.gr_gid and self.name != gr.gr_name:
self.module.fail_json(msg="GID '{0}' already exists with group '{1}'".format(self.gid, gr.gr_name))
def group_add(self, **kwargs):
if self.local:
command_name = 'lgroupadd'
self._local_check_gid_exists()
else:
command_name = 'groupadd'
cmd = [self.module.get_bin_path(command_name, True)]
for key in kwargs:
if key == 'gid' and kwargs[key] is not None:
cmd.append('-g')
cmd.append(str(kwargs[key]))
if self.non_unique:
cmd.append('-o')
elif key == 'system' and kwargs[key] is True:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def group_mod(self, **kwargs):
if self.local:
command_name = 'lgroupmod'
self._local_check_gid_exists()
else:
command_name = 'groupmod'
cmd = [self.module.get_bin_path(command_name, True)]
info = self.group_info()
for key in kwargs:
if key == 'gid':
if kwargs[key] is not None and info[2] != int(kwargs[key]):
cmd.append('-g')
cmd.append(str(kwargs
|
[key]))
if self.non_unique:
cmd.append('-o')
if len(cmd) == 1:
return (None, '', '')
if self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
def group_exists(self):
# The grp module does not distinguish between local and directory accounts.
# It's output can
|
not be used to determine whether or not a group exists locally.
# It returns True if the group exists locally or in the directory, so instead
# look in the local GROUP file for an existing account.
if self.local:
if not os.path.exists(self.GROUPFILE):
self.module.fail_json(msg="'local: true' specified but unable to find local group file {0} to parse.".format(self.GROUPFILE))
exists = False
name_test = '{0}:'.format(self.name)
with open(self.GROUPFILE, 'rb') as f:
reversed_lines = f.readlines()[::-1]
for line in reversed_lines:
if line.startswith(to_bytes(name_test)):
exists = True
break
if not exists:
self.module.warn(
"'local: true' specified and group was not found in {file}. "
"The local group may already exist if the local group database exists somewhere other than {file}.".format(file=self.GROUPFILE))
return exists
else:
try:
if grp.getgrnam(self.name):
return True
except KeyError:
return False
def group_info(self):
if not self.group_exists():
return False
try:
info = list(grp.getgrnam(self.name))
except KeyError:
return False
return info
# ===========================================
class SunOS(Group):
"""
This is a SunOS Group manipulation class. Solaris doesn't have
the 'system' group concept.
This overrides the following methods from the generic class:-
- group_add()
"""
platform = 'SunOS'
distribution = None
GROUPFILE = '/etc/group'
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('groupadd', True)]
for key in kwargs:
if key == 'gid' and kwargs[key] is not None:
cmd.append('-g')
cmd.append(str(kwargs[key]))
if self.non_unique:
cmd.append('-o')
cmd.append(self.name)
return self.execute_command(cmd)
# ==================================
|
nschloe/voropy
|
tests/test_signed_area.py
|
Python
|
mit
| 1,654
| 0.002418
|
import pathlib
import meshio
import numpy as np
import pytest
import meshplex
this_dir = pathlib.Path(__file__).resolve().parent
@pytest.mark.parametrize(
"points,cells,ref",
[
# line
([[0.0], [0.35]], [[0, 1]], [0.35]),
([[0.0], [0.35]], [[1, 0]], [-0.3
|
5]),
# triangle
([[0.0, 0.0], [1.0, 0.0], [0.0, 1.0]], [[0, 1, 2]], [0.5]),
([[0.0, 0.0], [0.0, 1.0], [1.0, 0.0]], [[0, 1, 2]], [-0.
|
5]),
(
[[0.0, 0.0], [1.0, 0.0], [1.1, 1.0], [0.0, 1.0]],
[[0, 1, 2], [0, 3, 2]],
[0.5, -0.55],
),
# tetra
(
[[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]],
[[0, 1, 2, 3]],
[1 / 6],
),
(
[[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]],
[[0, 1, 3, 2]],
[-1 / 6],
),
],
)
def test_signed_area(points, cells, ref):
mesh = meshplex.Mesh(points, cells)
ref = np.array(ref)
assert mesh.signed_cell_volumes.shape == ref.shape
assert np.all(
np.abs(ref - mesh.signed_cell_volumes) < np.abs(ref) * 1.0e-13 + 1.0e-13
)
def test_signed_area_pacman():
mesh = meshio.read(this_dir / "meshes" / "pacman.vtu")
assert np.all(np.abs(mesh.points[:, 2]) < 1.0e-15)
X = mesh.points[:, :2]
mesh = meshplex.Mesh(X, mesh.get_cells_type("triangle"))
vols = mesh.signed_cell_volumes
# all cells are positively oriented in this mesh
assert np.all(mesh.signed_cell_volumes > 0.0)
assert np.all(abs(abs(vols) - mesh.cell_volumes) < 1.0e-12 * mesh.cell_volumes)
|
bitsteller/witica
|
witica/metadata/extractor.py
|
Python
|
mit
| 7,072
| 0.035209
|
import json, codecs, re
from abc import ABCMeta, abstractmethod
from PIL import Image, ExifTags
from witica.util import throw, sstr, suni
#regular expressions regarding item ids
RE_METAFILE = r'^meta\/[^\n]+$'
RE_FIRST_ITEMID = r'(?!meta\/)[^\n?@.]+'
RE_ITEMFILE_EXTENSION = r'[^\n?@\/]+'
RE_ITEMID = r'^' + RE_FIRST_ITEMID + '$'
RE_ITEMFILE = r'^' + RE_FIRST_ITEMID + '\.' + RE_ITEMFILE_EXTENSION + '$'
RE_ITEM_SPLIT_ITEMID_EXTENSION = r'^(' + RE_FIRST_ITEMID + ')\.(' + RE_ITEMFILE_EXTENSION + ')$'
RE_ITEM_REFERENCE = r'^!(?:.\/)?' + RE_FIRST_ITEMID + '$'
#regular expressions to be used for md files parsing
RE_MD_SPLIT_JSON_MD = "^\s*({[\s\S]*?})?[\s]*([^}\s][\s\S]*)$" #splits md file into the json metadata and markdown sections as caputre groups
RE_MD_SPLIT_TITLE_BODY = "^(?:#(?!#)[\t ]*([\S][^\n\r]*)(?:\n|\r\n?|$))?([\s\S]*)$" #splits markdown section into title and body sections as capture groups
RE_MD_NOBRACKET = r'[^\]\[]*'
RE_MD_BRK = ( r'\[('
+ (RE_MD_NOBRACKET + r'(\[')*6
+ (RE_MD_NOBRACKET+ r'\])*')*6
+ RE_MD_NOBRACKET + r')\]' )
RE_MD_IMAGE_LINK = r'\!' + RE_MD_BRK + r'\s*\((?!\!)(<.*?>|([^")]+"[^"]*"|[^\)]*))\)'
#  or 
#RE_MD_ITEM_LINK = r'\!' + RE_MD_BRK + r'\s*\(\!(<.*?>|([^")]+"[^"]*"|[^\)]*))\)'
#  or 
RE_MD_ITEM_LINK = r'!({[\s\S]*?})?\((![\s\S]+?)\)'
# !{renderparametersjson}(!itemid)
registered_extractors = [];
def register(extension, extractor):
"""Register new metadata extractor for file extension"""
for (ext,extr) in registered_extractors:
if extension == ext:
raise ValueError("A metadata extractor for extension '" + extension + "' is already registered.")
#TODO: check type of extractor
registered_extractors.append((extension,extractor))
#print("registered: " + extension + " " + sstr(extractor))
def register_default_extractors():
register("item", JSONExtractor)
register("json", JSONExtractor)
register("md", MDExtractor)
register("txt", MDExtractor)
register("jpg", ImageExtractor)
register("jpeg", ImageExtractor)
def is_supported(extension):
for (ext,extractor) in registered_extractors:
if extension == ext:
return True
return False
def extract_metadata(filename):
extension = filename.rpartition(".")[2]
for (ext,extractor) in registered_extractors:
if extension == ext:
return extractor().extract_metadata(filename)
raise ValueError("Could not extract metadata, because a metadata extractor for extension '" + extension + "' is not registered.")
class MetadataExtractor(object):
__metaclass__ = ABCMeta
"""Abstract class representing a metadata extractor"""
supported_extensions = [];
def __init__(self):
pass
@abstractmethod
def extract_metadata(self, filename):
"""Extract metadata from filename and return metadata as json"""
pass
class JSONExtractor(MetadataExtractor):
__metaclass__ = ABCMeta
"""Extracts metadata from item or json file"""
supported_extensions = ["item", "json"];
def __init__(self):
pass
def extract_metadata(self, filename):
"""Extract metadata from filename and return metadata as json"""
f = codecs.open(filename, mode="r", encoding="utf-8")
return json.loads(f.read())
class MDExtractor(MetadataExtractor):
__metaclass__ = ABCMeta
"""Extracts metadata from markdown file"""
supported_extensions = ["md", "txt"];
def __init__(self):
pass
def extract_metadata(self, filename):
try:
meta = {}
#split into json and markdown part
f = codecs.open(filename, mode="r", encoding="utf-8")
match = re.match(RE_MD_SPLIT_JSON_MD,f.read())
f.close()
if not match:
raise IOError("Extracting metadata from file '" + sstr(filename) + "' failed. Could not split JSON and markdown parts.")
jsonstr, mdstr = match.groups()
#get title string (first heading in markdown string) if available
title = re.match(RE_MD_SPLIT_TITLE_BODY,mdstr).group(1)
if not title == None:
meta["title"] = title
#update with explicit json
if not jsonstr == None:
meta.update(json.loads(jsonstr))
return meta
except Exception, e:
throw(IOError, "Extracting metadata from file '" + sstr(filename) + "' failed.", e)
class ImageExtractor(MetadataExtractor):
__metaclass__ = ABCMeta
"""Extracts metadata from markdown file"""
supported_extensions = ["jpg", "jpeg"];
def __init__(self):
pass
def extract_metadata(self, filename):
try:
meta = {"type": "image"}
img = Image.open(filename)
exif = {
Exi
|
fTags.TAGS[k]: v
for k, v in img._getexif().items()
if k in ExifTags.TAGS
}
if ("ImageDescription" in exif or "UserComment" in exif):
if "UserComment" in exif:
meta["title"] = exif["UserComment"]
if "ImageDescription" in exif:
meta["title"] = exif["ImageDescription"]
if ("Make" in exif or "Model" in exif):
meta["camera"] = (exif["Make"] if "Make" in exif else "") + " " + (exif["Model"] if
|
"Model" in exif else "")
if ("Orientation" in exif):
meta["orientation"] = exif["Orientation"]
if ("Artist" in exif):
meta["author"] = exif["Artist"]
if ("DateTimeOriginal" in exif):
meta["created"] = exif["DateTimeOriginal"] #TODO: convert to unix time
if ("Flash" in exif):
meta["flash"] = exif["Flash"]
if ("GPSInfo" in exif):
lat, lon = self.get_lat_lon(exif["GPSInfo"])
if lat and lon:
meta["lat"] = lat
meta["lon"] = lon
return meta
except Exception, e:
throw(IOError, "Extracting metadata from file '" + sstr(filename) + "' failed.", e)
# This remaining functions in the ImageExtracotr class are originally by Eran Sandler (MIT-license), see https://gist.github.com/erans/983821
def _get_if_exist(self, data, key):
if key in data:
return data[key]
return None
def _convert_to_degress(self, value):
"""Helper function to convert the GPS coordinates stored in the EXIF to degress in float format"""
d0 = value[0][0]
d1 = value[0][1]
d = float(d0) / float(d1)
m0 = value[1][0]
m1 = value[1][1]
m = float(m0) / float(m1)
s0 = value[2][0]
s1 = value[2][1]
s = float(s0) / float(s1)
return d + (m / 60.0) + (s / 3600.0)
def get_lat_lon(self, gps_info_exif):
"""Returns the latitude and longitude, if available, from the provided exif_data (obtained through get_exif_data above)"""
lat = None
lon = None
gps_info = {
ExifTags.GPSTAGS[k]: v
for k, v in gps_info_exif.items()
if k in ExifTags.GPSTAGS
}
gps_latitude = self._get_if_exist(gps_info, "GPSLatitude")
gps_latitude_ref = self._get_if_exist(gps_info, 'GPSLatitudeRef')
gps_longitude = self._get_if_exist(gps_info, 'GPSLongitude')
gps_longitude_ref = self._get_if_exist(gps_info, 'GPSLongitudeRef')
if gps_latitude and gps_latitude_ref and gps_longitude and gps_longitude_ref:
lat = self._convert_to_degress(gps_latitude)
if gps_latitude_ref != "N":
lat = 0 - lat
lon = self._convert_to_degress(gps_longitude)
if gps_longitude_ref != "E":
lon = 0 - lon
return lat, lon
|
gfarnadi/FairPSL
|
debug/compare_map/run_fpsl_cvxpy.py
|
Python
|
mit
| 1,367
| 0.010241
|
#!/usr/bin/env python
import os, sys
SCRIPTDIR = os.path.dirname(__file__)
ENGINDIR = os.path.join(SCRIPTDIR, '..', '..', 'engines')
sys.path.append(os.path.abspath(ENGINDIR))
from fpsl_cvxpy import map_inference
PROBLEMDIR = os.path.join(SCRIPTDIR, '..', '..', 'problems', 'paper_review')
sys.path.append(os.path.abspath(PROBLEMDIR))
from grounding import ground
from os.path import join as ojoin
def run_model(data_path, out_path):
rules, hard_rules, _, atoms = ground(data_path)
|
results = map_inference(rules, hard_rules)
reviews = atoms['review']
with open(ojoin(out_path, 'POSITIVEREVIEW.txt'), 'w') as f:
for (review, paper), (vid, _) in reviews.items():
print("'%s'\t'%s'\t%f"%(review, paper, res
|
ults[vid]), file=f)
acceptable = atoms['acceptable']
with open(ojoin(out_path, 'ACCEPTABLE.txt'), 'w') as f:
for paper, (vid, _) in acceptable.items():
print("'%s'\t%f"%(paper, results[vid]), file=f)
presents = atoms['presents']
with open(ojoin(out_path, 'PRESENTS.txt'), 'w') as f:
for author, (vid, _) in presents.items():
print("'%s'\t%f"%(author, results[vid]), file=f)
if __name__ == '__main__':
data_path = ojoin(PROBLEMDIR, 'data', '1')
out_path = ojoin('output', 'fpsl_cvxpy')
run_model(data_path, out_path)
|
jdsolucoes/Ppostit
|
printy/migrations/0002_auto_20150921_2215.py
|
Python
|
apache-2.0
| 967
| 0.002068
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('printy', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='PostItModel',
fields=[
('id', models.Au
|
toField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('width', models.FloatField()),
('height', models.FloatField()),
],
),
migrations.AlterField(
model_name='postit',
name='print_page',
field=models.ForeignKey(related_name='posts', to='printy.PrintPage'),
),
mig
|
rations.AddField(
model_name='printpage',
name='post_it_model',
field=models.ForeignKey(default=1, to='printy.PostItModel'),
preserve_default=False,
),
]
|
siggame/discuss
|
discuss/discuss/production.py
|
Python
|
bsd-3-clause
| 642
| 0
|
from discuss.discuss.settings import *
##########################################################################
#
# Server settings
#
############
|
##############################################################
ALLOWED_HOSTS = ["localhost"]
WSGI_APPLICATION = 'discuss.discuss.wsgi_production.application'
##########################################################################
#
# Database settings
#
##########################################################################
DATABASES = {
'default': {
'ENGINE': 'django.db.backen
|
ds.sqlite3',
'NAME': os.path.join(VAR_DIR, 'db', 'production_db.sqlite3'),
}
}
|
paultag/hy
|
hy/errors.py
|
Python
|
mit
| 4,271
| 0
|
# -*- encoding: utf-8 -*-
#
# Copyright (c) 2013 Paul Tagliamonte <[email protected]>
# Copyright (c) 2013 Bob Tolbert <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import traceback
from clint.textui import colored
from hy._compat import PY3
class HyError(Exception):
"""
Generic Hy error. All internal Exceptions will be subclassed from this
Exception.
"""
pass
class HyCompileError(HyError):
def __init__(self, exception, traceback=None):
self.exception = exception
self.traceback = traceback
def __str__(self):
if isinstance(self.exception, HyTypeError):
return str(self.exception)
if self.traceback:
tb = "".join(traceback.format_tb(self.traceback)).strip()
else:
tb = "No traceback available. 😟"
return("Internal Compiler Bug 😱\n⤷ %s: %s\nCompilation traceback:\n%s"
% (self.exception.__class__.__name__,
self.exception, tb))
class HyTypeError(TypeError):
def __init__(self, expression, message):
super(HyTypeError, self).__init__(message)
self.expression = expression
self.message = message
self.source = None
self.filename = None
def __str__(self):
line = self.expression.start_line
start = self.expression.start_column
end = self.expression.end_column
source = []
if self.source is not None:
source = self.source.split("\n")[line-1:self.expression.end_line]
if line == self.expression.end_line:
length = end - start
else:
length = len(source[0]) - start
result = ""
result += ' File "%s", line %d, column %d\n\n' % (self.filename,
line,
start)
if len(source) == 1:
result += ' %s\n' % colored.red(source[0])
result += ' %s%s\n' % (' '*(start-1),
colored.green('^' + '-'*(length-1) + '^'))
if len(source) > 1:
result += ' %s\n' % colored.red(source[0])
result += ' %s%s\n' % ('
|
'*(start-1),
colored.green('^' + '-'*length))
if len(source) > 2: # write the middle lines
for line in source[1:-1]:
result += ' %s\n' % colored.red("".join(line))
result += ' %s\n' % colored.green("-"*len(line))
|
# write the last line
result += ' %s\n' % colored.red("".join(source[-1]))
result += ' %s\n' % colored.green('-'*(end-1) + '^')
result += colored.yellow("%s: %s\n\n" %
(self.__class__.__name__,
self.message))
if not PY3:
return result.encode('utf-8')
else:
return result
class HyMacroExpansionError(HyTypeError):
pass
class HyIOError(HyError, IOError):
"""
Trivial subclass of IOError and HyError, to distinguish between
IOErrors raised by Hy itself as opposed to Hy programs.
"""
pass
|
bird-house/birdhousebuilder.recipe.adagucserver
|
birdhousebuilder/recipe/adagucserver/tests/test_docs.py
|
Python
|
bsd-3-clause
| 1,620
| 0.003086
|
# -*- coding: utf-8 -*-
"""
Doctest runner for 'birdhousebuilder.recipe.adagucserver'.
"""
__docformat__ = 'restructuredtext'
import os
import sys
import unittest
import zc.buildout.tests
import zc.buildout.testing
from zope.testing import doctest, renormalizing
optionflags = (doctest.ELLIPSIS |
doctest.NORMALIZE_WHITESPACE |
doctest.REPORT_ONLY_FIRST_FAILURE)
def setUp(test):
zc.buildout.testing.buildoutSetUp(test)
# Install the recipe in develop mode
zc.buildout.testing.install_develop('birdhousebuilder.recipe.adagucserver', test)
test.globs['os'] = os
test.globs['sys'] = sys
test.globs['test_dir'] = os.path.dirname(__file__)
def test_suite():
suite = unittest.TestSuite((
doctest.DocFileSuite(
'../../../../README.rst',
setUp=setUp,
tearDown=zc.buildout.testing.buildoutTearDown,
optionflags=optionflags,
checker=renormalizing.RENormalizing([
|
# If want to clean up the doctest output you
|
# can register additional regexp normalizers
# here. The format is a two-tuple with the RE
# as the first item and the replacement as the
# second item, e.g.
# (re.compile('my-[rR]eg[eE]ps'), 'my-regexps')
zc.buildout.testing.normalize_path,
]),
),
))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
|
googleads/google-ads-python
|
google/ads/googleads/v10/errors/types/time_zone_error.py
|
Python
|
apache-2.0
| 1,124
| 0.00089
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distri
|
buted under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v10.errors",
marshal="google.ads.googleads.v10",
manifest={"TimeZoneErrorEnum",},
)
class TimeZoneErrorEnum(proto.Messag
|
e):
r"""Container for enum describing possible time zone errors.
"""
class TimeZoneError(proto.Enum):
r"""Enum describing possible currency code errors."""
UNSPECIFIED = 0
UNKNOWN = 1
INVALID_TIME_ZONE = 5
__all__ = tuple(sorted(__protobuf__.manifest))
|
seraphlnWu/django-mongoengine
|
django_mongoengine/admin/__init__.py
|
Python
|
bsd-3-clause
| 451
| 0.002217
|
from django_mongoengine.admin.options import *
from django_mongoengine.admin.sites import site
from django.conf import settings
if getattr(settings, 'DJANGO_MONGOENGINE_OVERRIDE_ADMIN', False):
import django.contrib.adm
|
in
# copy already registered model admins
# without that the already registered models
# don't show up in the new admin
site._registry = django.co
|
ntrib.admin.site._registry
django.contrib.admin.site = site
|
slaporte/qualityvis
|
inputs/google.py
|
Python
|
gpl-3.0
| 1,054
| 0.004744
|
from base import Input
from wapiti import get_json
class GoogleNews(Input):
prefix = 'gn'
def fetch(self):
return get_json('http://ajax.googleapis.com/ajax/services/search/news?v=1.0&q=' + self.page_title)
def process(self, f_res):
if f_res['responseStatus'] == 403 or not f_res.get('responseData', {}).get('cursor', {}).get('estimatedResultCount', {}):
return {}
else:
return super(GoogleNews, self).process(f_res['responseData']['cursor']['estimatedResultCount'])
stats = {
'count': lambda f: f
}
class GoogleSearch(Input):
prefix = 'gs'
def fetch(sel
|
f):
return get_json('http://ajax.googleapis.com/ajax/services/search/web?v=1.0&q=' + self.page_title)
def process(self, f_res):
if f_res['responseStatus'] == 403 or not f_res['responseData']:
return {}
else:
return super(GoogleSearch, self).process(f_res['responseData']['cursor']['estimatedResult
|
Count'])
stats = {
'count': lambda f: f
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.