repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
t794104/ansible
lib/ansible/plugins/filter/ipaddr.py
Python
gpl-3.0
32,458
0.000924
# (c) 2014, Maciej Delmanowski <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from functools import partial import types from ansible.module_utils import six try: import netaddr except ImportError: # in this case, we'll make the filters return error messages (see bottom) netaddr = None else: class mac_linux(netaddr.mac_unix): pass mac_linux.word_fmt = '%.2x' from ansible import errors # ---- IP address and network query helpers ---- def _empty_ipaddr_query(v, vtype): # We don't have any query to process, so just check what type the user # expects, and return the IP address in a correct format if v: if vtype == 'address': return str(v.ip) elif vtype == 'network': return str(v) def _first_last(v): if v.size == 2: first_usable = int(netaddr.IPAddress(v.first)) last_usable = int(netaddr.IPAddress(v.last)) return first_usable, last_usable elif v.size > 1: first_usable = int(netaddr.IPAddress(v.first + 1)) last_usable = int(netaddr.IPAddress(v.last - 1)) return first_usable, last_usable def _6to4_query(v, vtype, value): if v.version == 4: if v.size == 1: ipconv = str(v.ip) elif v.size > 1: if v.ip != v.network: ipconv = str(v.ip) else: ipconv = False if ipaddr(ipconv, 'public'): numbers = list(map(int, ipconv.split('.'))) try: return '2002:{:02x}{:02x}:{:02x}{:02x}::1/48'.format(*numbers) except Exception: return False elif v.version == 6: if vtype == 'address': if ipaddr(str(v), '2002::/16'): return value elif vtype == 'network': if v.ip != v.network: if ipaddr(str(v.ip), '2002::/16'): return value else: return False def _ip_query(v): if v.size == 1:
return str(v.ip) if v.size > 1: # /31 networks in netaddr have no broadcast address if v.ip != v.network or not v.broadcast: return str(v.ip) def _gate
way_query(v): if v.size > 1: if v.ip != v.network: return str(v.ip) + '/' + str(v.prefixlen) def _address_prefix_query(v): if v.size > 1: if v.ip != v.network: return str(v.ip) + '/' + str(v.prefixlen) def _bool_ipaddr_query(v): if v: return True def _broadcast_query(v): if v.size > 2: return str(v.broadcast) def _cidr_query(v): return str(v) def _cidr_lookup_query(v, iplist, value): try: if v in iplist: return value except Exception: return False def _first_usable_query(v, vtype): if vtype == 'address': "Does it make sense to raise an error" raise errors.AnsibleFilterError('Not a network address') elif vtype == 'network': if v.size == 2: return str(netaddr.IPAddress(int(v.network))) elif v.size > 1: return str(netaddr.IPAddress(int(v.network) + 1)) def _host_query(v): if v.size == 1: return str(v) elif v.size > 1: if v.ip != v.network: return str(v.ip) + '/' + str(v.prefixlen) def _hostmask_query(v): return str(v.hostmask) def _int_query(v, vtype): if vtype == 'address': return int(v.ip) elif vtype == 'network': return str(int(v.ip)) + '/' + str(int(v.prefixlen)) def _ip_prefix_query(v): if v.size == 2: return str(v.ip) + '/' + str(v.prefixlen) elif v.size > 1: if v.ip != v.network: return str(v.ip) + '/' + str(v.prefixlen) def _ip_netmask_query(v): if v.size == 2: return str(v.ip) + ' ' + str(v.netmask) elif v.size > 1: if v.ip != v.network: return str(v.ip) + ' ' + str(v.netmask) ''' def _ip_wildcard_query(v): if v.size == 2: return str(v.ip) + ' ' + str(v.hostmask) elif v.size > 1: if v.ip != v.network: return str(v.ip) + ' ' + str(v.hostmask) ''' def _ipv4_query(v, value): if v.version == 6: try: return str(v.ipv4()) except Exception: return False else: return value def _ipv6_query(v, value): if v.version == 4: return str(v.ipv6()) else: return value def _last_usable_query(v, vtype): if vtype == 'address': "Does it make sense to raise an error" raise errors.AnsibleFilterError('Not a network address') elif vtype == 'network': if v.size > 1: first_usable, last_usable = _first_last(v) return str(netaddr.IPAddress(last_usable)) def _link_local_query(v, value): v_ip = netaddr.IPAddress(str(v.ip)) if v.version == 4: if ipaddr(str(v_ip), '169.254.0.0/24'): return value elif v.version == 6: if ipaddr(str(v_ip), 'fe80::/10'): return value def _loopback_query(v, value): v_ip = netaddr.IPAddress(str(v.ip)) if v_ip.is_loopback(): return value def _multicast_query(v, value): if v.is_multicast(): return value def _net_query(v): if v.size > 1: if v.ip == v.network: return str(v.network) + '/' + str(v.prefixlen) def _netmask_query(v): return str(v.netmask) def _network_query(v): '''Return the network of a given IP or subnet''' return str(v.network) def _network_id_query(v): '''Return the network of a given IP or subnet''' return str(v.network) def _network_netmask_query(v): return str(v.network) + ' ' + str(v.netmask) def _network_wildcard_query(v): return str(v.network) + ' ' + str(v.hostmask) def _next_usable_query(v, vtype): if vtype == 'address': "Does it make sense to raise an error" raise errors.AnsibleFilterError('Not a network address') elif vtype == 'network': if v.size > 1: first_usable, last_usable = _first_last(v) next_ip = int(netaddr.IPAddress(int(v.ip) + 1)) if next_ip >= first_usable and next_ip <= last_usable: return str(netaddr.IPAddress(int(v.ip) + 1)) def _prefix_query(v): return int(v.prefixlen) def _previous_usable_query(v, vtype): if vtype == 'address': "Does it make sense to raise an error" raise errors.AnsibleFilterError('Not a network address') elif vtype == 'network': if v.size > 1: first_usable, last_usable = _first_last(v) previous_ip = int(netaddr.IPAddress(int(v.ip) - 1)) if previous_ip >= first_usable and previous_ip <= last_usable: return str(netaddr.IPAddress(int(v.ip) - 1)) def _private_query(v, value): if v.is_private(): return value def _public_query(v, value): v_ip = netaddr.IPAddress(str(v.ip)) if (v_ip.is_unicast() and not v_ip.is_private() and not v_ip.is_loopback() and not v_ip.is_netmask() and not v_ip.is_hostmask()): return value def _range_usable_query(v, vtype): if vtype == 'address': "Does it make sense to raise an error" raise errors.AnsibleFilterError('Not a network address') elif vtype == 'network': if v.size > 1: first_usable, last_usable = _first_last(v) first_usab
aclindsa/asink-python
src/shared/daemon.py
Python
gpl-2.0
3,138
0.007648
#!/usr/bin/env python # Copyright (C) 2011 Aaron Lindsay <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import sys from time import sleep import atexit import logging from signal import SIGTERM pid_file = "" #holds the name of file holding our pid def daemonize(pid_filename, daemon_fn): """Daemonize the current process, store the new pid in pid_filename, and call daemon_fn() to continue execution.""" global pid_file pid_file = pid_filename try: #fork off a process, kill the parent if os.fork() > 0: os._exit(0) except: logging.error("Failed to fork new process.") os._exit(0) os.chdir("/") os.setsid() #start a new session, with this as the session leader os.umask(0) #reset file creation mask #fork again try: if os.fork() > 0: os._exit(0) except: logging.error("Failed to fork new process.") os._exit(0) #flush all terminal 'files' and redirect them to /dev/null sys.stdout.flush() sy
s.stderr.flush() null = os.open('/dev/null', os.O_RDWR) os.dup2(null, sys.stdin.fileno()) os.dup2(null, sys.stdout.fileno()) os.dup2(null, sys.stderr.fileno()) os.close(null) #store our current pid in the given pidfile atexit.register(rm_pid_file) #delete pid file when current process exits pid = os.getpid() try: with open(pid_file,'w') as f: f.write(str(pid)) f.close() except: logging.error("Failed to create pid fil
e at %s" % (pid_filename)) os._exit(0) #run the function with "real work" in it daemon_fn() def rm_pid_file(): global pid_file os.remove(pid_file) def aengelize(pid_filename): """Make the daemonized process represented by the given filename 'go to heaven'.""" try: with open(pid_filename,'r') as f: pid = int(f.read().strip()) f.close() except: logging.error("Failed to open pid file at %s. Process already exited?" % (pid_filename)) sys.exit(0) #kill process try: #try to kill process for 11 seconds for i in range(0,110): os.kill(pid, SIGTERM) sleep(0.1) logging.error("Failed to stop process") except OSError, err: if str(err).find("No such process") <= 0: logging.error("Failed to stop process") sys.exit(1)
JenkinsDev/pelican-readtime
setup.py
Python
mit
2,230
0.000897
import os import codecs try: from setuptools import (setup, find_packages) except ImportError: from distutils.core import (setup, find_packages) VERSION = (0, 2, 0) __version__ = '.'.join(map(str, VERSION[:3])) + "".join(VERSION[3:]) __package_name__ = 'pelican-readtime' __description__ = 'Plugin for Pelican that computes average read time.' __contact_names__ = 'David Jenkins, Deepak Bhalla, Jonathan Dektiar' __contact_emails__ = '[email protected], [email protected], [email protected]' __homepage__ = 'https://github.com/JenkinsDev/pelican-readtime' __reposit
ory_url__ = 'https://github.com/JenkinsDev/pelican-readtime' __download_url__ = 'https://github.com/JenkinsDev/pelican-readtime' __docformat__ = 'markdown' __license__ = 'MIT' __keywords__ = 'pelican blogg
ing blog static webdevelopment plugin pelican-plugin readtime python python3 python2' here = os.path.abspath(os.path.dirname(__file__)) if os.path.exists('README.rst'): # codec is used for consistent encoding long_description = codecs.open( os.path.join(here, 'README.rst'), 'r', 'utf-8').read() else: long_description = 'See ' + __homepage__ setup( name=__package_name__, version=__version__, description=__description__, long_description=long_description, url=__repository_url__, download_url=__download_url__, license='MIT', author=__contact_names__, author_email=__contact_emails__, maintainer=__contact_names__, maintainer_email=__contact_emails__, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Topic :: Software Development :: Libraries :: Python Modules' ], keywords=__keywords__, packages=[''], install_requires=['pelican>=3.6'], zip_safe=True, include_package_data=True )
LethusTI/supportcenter
vendor/django/tests/regressiontests/forms/tests/util.py
Python
gpl-3.0
3,154
0.008566
# -*- coding: utf-8 -*- from django.core.exceptions import ValidationError from django.forms.util import flatatt, ErrorDict, ErrorList from django.test import TestCase from django.utils.safestring import mark_safe from django.utils.translation import ugettext_lazy class FormsUtilTestCase(TestCase): # Tests for forms/util.py module. def test_flatatt(self): ########### # flatatt # ########### self.assertEqual(flatatt({'id': "header"}), u' id="header"') self.assertEqual(flatatt({'class': "news", 'title': "Read this"}), u' class="news" title="Read this"') self.assertEqual(flatatt({}), u'') def test_validation_error(self): ################### # ValidationError # ################### # Can take a string. self.assertHTMLEqual(str(ErrorList(ValidationError("There was an error.").messages)), '<ul class="errorlist"><li>There was an error.</li></ul>') # Can take a unicode string. self.assertHTMLEqual(unicode(ErrorList(ValidationError(u"Not \u03C0.").messages)), u'<ul class="errorlist"><li>Not π.</li></ul>') # Can take a lazy string. self.assertHTMLEqual(str(ErrorList(ValidationError(ugettext_lazy("Error.")).messages)), '<ul class="errorlist"><li>Error.</li></ul>') # Can take a list. self.assertHTMLEqual(str(ErrorList(ValidationError(["Error one.", "Error two."]).messages)), '<ul class="errorlist"><li>Error one.</li><li>Error two.</li></ul>') # Can take a mixture in a list. self.assertHTMLEqual(str(ErrorList(ValidationError(["First error.", u"Not \u03C0.", ugettext_lazy("Error.")]).messages)), '<ul class="errorlist"><li>First error.</li><li>Not π.</li><li>Error.</li></ul>') class VeryBadError: def __unicode__(self): return u"A very bad error." # Can take a non-string. self.assertHTMLEqual(str(ErrorList(ValidationError(VeryBadError()).messages)), '<ul class="errorlist"><li>A very bad error.</li></ul>') # Escapes non-safe input but not input marked safe. example = 'Example of link: <a href="http://www.example.com/">example</a>' self.assertHTMLEqual(str(ErrorList([example])), '<ul class="errorlist"><li>Example of link: &lt;a href=&quot;http://www.
example.com/&quot;&gt;example&lt;/a&gt;</li></ul>') self.assertHTMLEqual(str(ErrorList([mark_safe(exam
ple)])), '<ul class="errorlist"><li>Example of link: <a href="http://www.example.com/">example</a></li></ul>') self.assertHTMLEqual(str(ErrorDict({'name': example})), '<ul class="errorlist"><li>nameExample of link: &lt;a href=&quot;http://www.example.com/&quot;&gt;example&lt;/a&gt;</li></ul>') self.assertHTMLEqual(str(ErrorDict({'name': mark_safe(example)})), '<ul class="errorlist"><li>nameExample of link: <a href="http://www.example.com/">example</a></li></ul>')
pcolmant/repanier
repanier/views/logout_view.py
Python
gpl-3.0
1,016
0.000984
from django.contrib.auth import logout from django.contrib.auth.decorators import login_required from django.contrib.auth.signals import user_logged_out from d
jango.dispatch import receiver from django.http import HttpResponseRedirect from django.urls import reverse from django.views.decorat
ors.cache import never_cache from django.views.decorators.csrf import csrf_protect from repanier.auth_backend import RepanierAuthBackend @login_required() @csrf_protect @never_cache def logout_view(request): """ Logs out the user and displays 'You are logged out' message. """ logout(request) # pages-root is the django cms root page. # pages-root may be replaced by login_form to go to the login form instead of the home page # The reverse may be replaced by "/" to also go to the home page return HttpResponseRedirect(reverse("pages-root")) @receiver(user_logged_out) def receiver_user_logged_out(sender, request, user, **kwargs): RepanierAuthBackend.remove_staff_right(user=user)
kapteyn-astro/kapteyn
doc/source/EXAMPLES/kmpfit_voigt.py
Python
bsd-3-clause
4,841
0.021483
#!/usr/bin/env python #------------------------------------------------------------ # Script which demonstrates how to find the best-fit # parameters of a Voigt line-shape model # # Vog, 26 Mar 2012 #------------------------------------------------------------ import numpy from matplotlib.pyplot import figure, show, rc from scipy.special import wofz from kapteyn import kmpfit ln2 = numpy.log(2) def voigt(x, y): # The Voigt function is also the real part of # w(z) = exp(-z^2) erfc(iz), the complex probability function, # which is also known as the Faddeeva function. Scipy has # implemented this function under the name wofz() z = x + 1j*y I = wofz(z).real return I def Voigt(nu, alphaD, alphaL, nu_0, A, a=0, b=0): # The Voigt line shape in terms of its physical parameters f = numpy.sqrt(ln2) x = (nu-nu_0)/alphaD * f y = alphaL/alphaD * f backg = a + b*nu V = A*f/(alphaD*numpy.sqrt(numpy.pi)) * voigt(x, y) + backg return V def funcV(p, x): # Compose the Voigt line-shape alphaD, alphaL, nu_0, I, a, b = p return Voigt(x, alphaD, alphaL, nu_0, I, a, b) def funcG(p, x): # Model function is a gaussian A, mu, sigma, zerolev = p return( A * numpy.exp(-(x-mu)*(x-mu)/(2*sigma*sigma)) + zerolev ) def residualsV(p, data): # Return weighted residuals of Voigt x, y, err = data return (y-funcV(p,x)) / err def residualsG(p, data): # Return weighted residuals of Gauss x, y, err = data return (y-funcG(p,x)) / err # Data from simulated MUSE cube x = numpy.array([854.05,854.18,854.31,854.44,854.57,854.7,854.83,854.96,\ 855.09,855.22,855.35,855.48,855.61,855.74,855.87,856.0,\ 856.13,856.26,856.39,856.52,856.65,856.78,856.91]) y = numpy.array([6.31683382764,6.41273839772,6.43047296256,6.37437933311,\ 6.34883451462,6.30711287633,6.24409954622,6.09241716936,\ 5.75421549752,5.20381929725,4.18020502292,3.64663145132,\ 4.25251198746,5.23945118487,5.76701752096,6.06587703526,\
6.15751018003,6.25985588506,6.35063433647,6.41795488447,\ 6.42002335563,6.35883554071,6.36915982142]) N = len(y) err = numpy.ones(N) A = -2 alphaD = 0.5 alphaL = 0.5 a = 6 b = 0 nu_0 = 855 p0 = [alphaD, alphaL, nu_0, A, a, b] # Do the fit fitter = kmpfit.Fitter(residuals=residualsV, data=(x,y,err)) fitter.parinfo = [{}, {}, {}, {}, {}, {'fixed':True}] # Take zero level fixed in fit fitter.f
it(params0=p0) print("\n========= Fit results Voigt profile ==========") print("Initial params:", fitter.params0) print("Params: ", fitter.params) print("Iterations: ", fitter.niter) print("Function ev: ", fitter.nfev) print("Uncertainties: ", fitter.xerror) print("dof: ", fitter.dof) print("chi^2, rchi2: ", fitter.chi2_min, fitter.rchi2_min) print("stderr: ", fitter.stderr) print("Status: ", fitter.status) alphaD, alphaL, nu_0, I, a_back, b_back = fitter.params c1 = 1.0692 c2 = 0.86639 hwhm = 0.5*(c1*alphaL+numpy.sqrt(c2*alphaL**2+4*alphaD**2)) print("\nFWHM Voigt profile: ", 2*hwhm) f = numpy.sqrt(ln2) Y = alphaL/alphaD * f amp = I/alphaD*numpy.sqrt(ln2/numpy.pi)*voigt(0,Y) print("Amplitude Voigt profile:", amp) print("Area under profile: ", I) # Fit the Gaussian model p0 = [-3, 855, 0.5, 6.3] fitterG = kmpfit.Fitter(residuals=residualsG, data=(x,y,err)) #fitterG.parinfo = [{}, {}, {}, {}, {}] # Take zero level fixed in fit fitterG.fit(params0=p0) print("\n========= Fit results Gaussian profile ==========") print("Initial params:", fitterG.params0) print("Params: ", fitterG.params) print("Iterations: ", fitterG.niter) print("Function ev: ", fitterG.nfev) print("Uncertainties: ", fitterG.xerror) print("dof: ", fitterG.dof) print("chi^2, rchi2: ", fitterG.chi2_min, fitterG.rchi2_min) print("stderr: ", fitterG.stderr) print("Status: ", fitterG.status) fwhmG = 2*numpy.sqrt(2*numpy.log(2))*fitterG.params[2] print("FWHM Gaussian: ", fwhmG) # Plot the result rc('legend', fontsize=6) fig = figure() frame1 = fig.add_subplot(1,1,1) xd = numpy.linspace(x.min(), x.max(), 200) frame1.plot(x, y, 'bo', label="data") label = "Model with Voigt function" frame1.plot(xd, funcV(fitter.params,xd), 'g', label=label) label = "Model with Gaussian function" frame1.plot(xd, funcG(fitterG.params,xd), 'm', ls='--', label=label) offset = a_back+b_back*nu_0 frame1.plot((nu_0-hwhm,nu_0+hwhm), (offset+amp/2,offset+amp/2), 'r', label='fwhm') frame1.plot(xd, a_back+b_back*xd, "y", label='Background') frame1.set_xlabel("$\\nu$") frame1.set_ylabel("$\\phi(\\nu)$") vals = (fitter.chi2_min, fitter.rchi2_min, fitter.dof) title = "Profile data with Voigt- vs. Gaussian model" frame1.set_title(title, y=1.05) frame1.grid(True) leg = frame1.legend(loc=3) show()
ella/django-versionedcache
versionedcache/middleware.py
Python
bsd-3-clause
647
0.004637
from django.core import cache from django.core.exceptions import MiddlewareNotUsed from versionedcache.debug import CacheClass class CacheDebugMiddleware(object
): def __init__(self): if not isinstance(cache.cache, CacheClass): raise MiddlewareNotUsed() def process_request(self, request): if request.user.is_superuser and 'cache_debug' in request.GET: action = request.GET['cache_debug'] # only two actions allowed if action not in ('turn_off', 'write_only'): return # implement action
getattr(cache.cache, action)()
zhaochao/fuel-web
fuel_agent/fuel_agent/objects/partition.py
Python
apache-2.0
11,745
0.000085
# Copyright 2014 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from fuel_agent import errors from fuel_agent.openstack.common import log as logging LOG = logging.getLogger(__name__) class Parted(object): def __init__(self, name, label): self.name = name self.label = label self.partitions = [] self.install_bootloader = False def add_partition(self, **kwargs): # TODO(kozhukalov): validate before appending # calculating partition name based on device name and partition count kwargs['name'] = self.next_name() kwargs['count'] = self.next_count() kwargs['device'] = self.name # if begin is given use its value else use end of last partition kwargs['begin'] = kwargs.get('begin', self.next_begin()) # if end is given use its value else # try to calculate it based on size kwarg or # raise KeyError # (kwargs.pop['size'] will raise error if size is not set) kwargs['end'] = kwargs.get('end') or \ kwargs['begin'] + kwargs.pop('size') # if partition_type is given use its value else # try to calculate it automatically kwargs['partition_type'] = \ kwargs.get('partition_type', self.next_type()) partition = Partition(**kwargs) self.partitions.append(partition) return partition @property def logical(self): return filter(lambda x: x.type == 'logical', self.partitions) @property def primary(self): return filter(lambda x: x.type == 'primary', self.partitions) @property def extended(self): found = filter(lambda x: x.type == 'extended', self.partitions) if found: return found[0] def next_type(self): if self.label == 'gpt': return 'primary' elif self.label == 'msdos': if self.extended: return 'logical' elif len(self.partitions) < 3 and not self.extended: return 'primary' elif len(self.partitions) == 3 and not self.extended: return 'extended' #NOTE(agordeev): how to reach that condition? else: return 'logical' def next_count(self, next_type=None): next_type = next_type or self.next_type() if next_type == 'logical': return len(self.logical) + 5 return len(self.partitions) + 1 def next_begin(self): if not self.partitions: return 1 if self.partitions[-1] == self.extended: return self.partitions[-1].begin return self.partitions[-1].end def next_name(self): if self.next_type() == 'extended': return None separator = '' if 'cciss' in self.name or 'loop' in self.name: separator = 'p' return '%s%s%s' % (self.name, separator, self.next_count()) class Partition(object): def __init__(self, name, count, device, begin, end, partition_type, flags=None, guid=None, configdrive=False): self.name = name self.count = count self.device = device self.name = name self.begin = begin self.end = end self.type = partition_type self.flags = flags or [] self.guid = guid self.configdrive = configdrive def set_flag(self, flag): if flag not in self.flags: self.flags.append(flag) def set_guid(self, guid): self.guid = guid class Pv(object): def __init__(self, name, metadatasize=16, metadatacopies=2): self.name = name self.metadatasize = metadatasize self.metadatacopies = metadatacopies class Vg(object): def __init__(self, name, pvnames=None): self.name = name self.pvnames = pvnames or [] def add_pv(self, pvname): if pvname not in self.pvnames: self.pvnames.append(pvname) class Lv(object): def __init__(self, name, vgname, size): self.name = name self.vgname = vgname self.size = size @property def device_name(self): return '/dev/mapper/%s-%s' % (self.vgname.replace('-', '--'), self.name.replace('-', '--')) class Md(object): def __init__(self, name, level, devices=None, spares=None): self.name = name self.level = level self.devices = devices or [] self.spares = spares or [] def add_device(self, device): if device in self.devices or device in self.spares: raise errors.MDDeviceDuplicationError( 'Error while attaching device to md: ' 'device %s is already attached' % device) s
elf.devices.append(device) def add_spare(self, device): if device in self.devices or device in self.spares: raise errors.MDDeviceDuplicationError( 'Error while attaching device to md: ' 'device %s is already
attached' % device) self.spares.append(device) class Fs(object): def __init__(self, device, mount=None, fs_type=None, fs_options=None, fs_label=None): self.device = device self.mount = mount self.type = fs_type or 'xfs' self.options = fs_options or '' self.label = fs_label or '' class PartitionScheme(object): def __init__(self): self.parteds = [] self.mds = [] self.pvs = [] self.vgs = [] self.lvs = [] self.fss = [] self.kernel_params = '' def add_parted(self, **kwargs): parted = Parted(**kwargs) self.parteds.append(parted) return parted def add_pv(self, **kwargs): pv = Pv(**kwargs) self.pvs.append(pv) return pv def add_vg(self, **kwargs): vg = Vg(**kwargs) self.vgs.append(vg) return vg def add_lv(self, **kwargs): lv = Lv(**kwargs) self.lvs.append(lv) return lv def add_fs(self, **kwargs): fs = Fs(**kwargs) self.fss.append(fs) return fs def add_md(self, **kwargs): mdkwargs = {} mdkwargs['name'] = kwargs.get('name') or self.md_next_name() mdkwargs['level'] = kwargs.get('level') or 'mirror' md = Md(**mdkwargs) self.mds.append(md) return md def md_by_name(self, name): found = filter(lambda x: x.name == name, self.mds) if found: return found[0] def md_by_mount(self, mount): fs = self.fs_by_mount(mount) if fs: return self.md_by_name(fs.device) def md_attach_by_mount(self, device, mount, spare=False, **kwargs): md = self.md_by_mount(mount) if not md: md = self.add_md(**kwargs) fskwargs = {} fskwargs['device'] = md.name fskwargs['mount'] = mount fskwargs['fs_type'] = kwargs.pop('fs_type', None) fskwargs['fs_options'] = kwargs.pop('fs_options', None) fskwargs['fs_label'] = kwargs.pop('fs_label', None) self.add_fs(**fskwargs) md.add_spare(device) if spare else md.add_device(device) return md def md_next_name(self): count = 0 while True: name = '/dev/md%s' % count if name not in [md.name for md in self.mds]: return name if count >= 127: raise errors.MDAlreadyExistsError( 'Error while generating md name: ' 'names from /dev/m
tongpa/tgext.pylogservice
tgext/pylogservice/models/__init__.py
Python
mit
623
0.008026
# -*- coding: utf-8 -*- from sqlalchemy.ext.declarative import declarative_base from
zope.sqlalchemy import ZopeTransactionExtension from sqlalchemy.orm import scoped_session, sessionmaker DeclarativeBase = declarative_base() maker = sessionmaker(autoflush=True, autocommit=False, extension=ZopeTransactionExtension()) DBSession = scoped_session(maker) metadata = DeclarativeBase.metadata def init_model(engine1 ): """Call me before using any of the tables or classes in the model.""" DBSession.config
ure(bind=engine1) metadata.bind = engine1 from .logsurvey import LogSurvey
olgabrani/synnefo
snf-tools/synnefo_tools/burnin/cyclades_common.py
Python
gpl-3.0
30,688
0
# Copyright (C) 2010-2014 GRNET S.A. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ Utility functions for Cyclades Tests Cyclades require a lot helper functions and `common' had grown too much. """ import time import IPy import base64 import socket import random import paramiko import tempfile import subprocess from kamaki.clients import ClientError from synnefo_tools.burnin.common import BurninTests, MB, GB, QADD, QREMOVE, \ QDISK, QVM, QRAM, QIP, QCPU, QNET # pylint: disable=too-many-public-methods class CycladesTests(BurninTests): """Extends the BurninTests class for Cyclades""" def _parse_images(self): """Find images given to command line""" if self.images is None: self.info("No --images given. Will use the default %s", "^Debian Base$") filters = ["name:^Debian Base$"] else: filters = self.images avail_images = self._find_images(filters) self.info("Found %s images to choose from", len(avail_images)) return avail_images def _parse_flavors(self): """Find flavors given to command line""" flavors = self._get_list_of_flavors(detail=True) if self.flavors is None: self.info("No --flavors given. Will use all of them") avail_flavors = flavors else: avail_flavors = self._find_flavors(self.flavors, flavors=flavors) self.info("Found %s flavors to choose from", len(avail_flavors)) return avail_flavors def _try_until_timeout_expires(self, opmsg, check_fun): """Try to perform an action until timeout expires""" assert callable(check_fun), "Not a function" action_timeout = self.action_timeout action_warning = self.action_warning if action_warning > action_timeout: action_warning = action_timeout start_time = int(time.time()) end_time = start_time + action_warning while end_time > time.time(): try: ret_value = check_fun() self.info("Operation `%s' finished in %s seconds", opmsg, int(time.time()) - start_time) return ret_value except Retry: time.sleep(self.query_interval) self.warning("Operation `%s' is taking too long after %s seconds", opmsg, int(time.time()) - start_time) end_time = start_time + action_timeout while end_time > time.time(): try: ret_value = check_fun() self.info("Operation `%s' finished in %s seconds", opmsg, int(time.time()) - start_time) return ret_value except Retry: time.sleep(self.query_interval) self.error("Operation `%s' timed out after %s seconds", opmsg, int(time.time()) - start_time) self.fail("time out") def _try_once(self, opmsg, check_fun, should_fail=False): """Try to perform an action once""" assert callable(check_fun), "Not a function" ret_value = None failed = False try: ret_value = check_fun() except Retry: failed = True if failed and not should_fail: self.error("Operation `%s' failed", opmsg) elif not failed and should_fail: self.error("Operation `%s' should have failed", opmsg) else: return ret_value def _get_list_of_servers(self, detail=False): """Get (detailed) list of servers""" if detail: self.info("Getting detailed list of servers") else: self.info("Getting simple list of servers") return self.clients.cyclades.list_servers(detail=detail) def _get_list_of_networks(self, detail=False): """Get (detailed) list of networks""" if detail: self.info("Getting detailed list of networks") else: self.info("Getting simple list of networks") return self.clients.network.list_networks(detail=detail) d
ef _get_server_details(self, server, quiet=False): """Get details for a server""" if not quiet: self.info("Getting details for server %s with id %s",
server['name'], server['id']) return self.clients.cyclades.get_server_details(server['id']) # pylint: disable=too-many-arguments def _create_server(self, image, flavor, personality=None, network=False, project_id=None): """Create a new server""" if network: fip = self._create_floating_ip(project_id=project_id) port = self._create_port(fip['floating_network_id'], floating_ip=fip) networks = [{'port': port['id']}] else: networks = None name = image.get('name', image.get('display_name', '')) servername = "%s for %s" % (self.run_id, name) self.info("Creating a server with name %s", servername) self.info("Using image %s with id %s", name, image['id']) self.info("Using flavor %s with id %s", flavor['name'], flavor['id']) server = self.clients.cyclades.create_server( servername, flavor['id'], image['id'], personality=personality, networks=networks, project_id=project_id) self.info("Server id: %s", server['id']) self.info("Server password: %s", server['adminPass']) self.assertEqual(server['name'], servername) self.assertEqual(server['flavor']['id'], flavor['id']) self.assertEqual(server['image']['id'], image['id']) self.assertEqual(server['status'], "BUILD") if project_id is None: project_id = self._get_uuid() self.assertEqual(server['tenant_id'], project_id) # Verify quotas changes = \ {project_id: [(QDISK, QADD, flavor['disk'], GB), (QVM, QADD, 1, None), (QRAM, QADD, flavor['ram'], MB), (QCPU, QADD, flavor['vcpus'], None)]} self._check_quotas(changes) return server def _delete_servers(self, servers, error=False): """Deleting a number of servers in parallel""" # Disconnect floating IPs if not error: # If there is the possibility for the machine to be in # ERROR state we cannot delete its ports. for srv in servers: self.info( "Disconnecting all floating IPs from server with id %s", srv['id']) self._disconnect_from_network(srv) # Delete servers for srv in servers: self.info("Sending the delete request for server with id %s", srv['id']) self.clients.cyclades.delete_server(srv['id']) if error: curr_states = ["ACTIVE", "ERROR", "STOPPED", "BUILD"] else: curr_states = ["ACTIVE"] for srv in servers: self._insist_on_server_transition(srv, curr_states, "DELETED") # Servers no longer in server list new_servers = [s['id'] for s in self._get_list_of_servers()] for srv in servers: self.info("Verifying that server with id %s is no longer in " "server list", srv['id']) self.assertNotIn(srv['id'], new_servers) # Verify quotas self._verify_quotas_
ingted/crmsh
modules/ui_history.py
Python
gpl-2.0
23,658
0.001395
# Copyright (C) 2008-2011 Dejan Muhamedagic <[email protected]> # Copyright (C) 2013 Kristoffer Gronlund <[email protected]> # See COPYING for license information. import os import sys import time import re import bz2 from . import config from . import command from . import completers as compl from . import utils from . import ui_utils from . import userdir from . import xmlutil from . import constants from . import options from .cibconfig import mkset_obj, cib_factory from .msg import common_err, common_debug, common_info from .msg import syntax_err from . import history from . import cmd_status ptest_options = ["@v+", "nograph", "scores", "actions", "utilization"] @utils.memoize def crm_report(): return history.Report() class History(command.UI): ''' The history class ''' name = "history" def __init__(self): command.UI.__init__(self) self.current_session = None self._source_inited = False def _init_source(self): if self._source_inited: return True self._source_inited = True return self._set_source(options.history) def _set_period(self, from_time='', to_time=''): ''' parse time specs and set period ''' from_dt = to_dt = None if from_time: from_dt = utils.parse_time(from_time) if not from_dt: return False if to_time: to_dt = utils.parse_time(to_time) if not to_dt: return False if to_dt and from_dt: if to_dt < from_dt: from_dt, to_dt = to_dt, from_dt elif to_dt == from_dt: common_err("%s - %s: To and from dates cannot be the same" % (from_time, to_time)) return False return crm_report().set_period(from_dt, to_dt) def _check_source(self, src): 'a (very) quick source check' if src == "live": return True if os.path.isfile(src) or os.path.isdir(src): return True return False def _set_source(self, src, live_from_time=None): ''' Have the last history source survive the History and Report instances ''' common_debug("setting source to %s" % src) if not self._check_source(src): if os.path.exists(crm_report().get_session_dir(src)): common_debug("Interpreting %s as session" % src) if crm_report().load_state(crm_report().get_session_dir(src)): options.history = crm_report().get_source() crm_report().prepare_source() self.current_session = src return True else: common_err("source %s doesn't exist" % src) return False crm_report().set_source(src) options.history = src self.current_session = None to_time = '' if src == "live": from_time = time.ctime(live_from_time and live_from_time or (time.time() - 60*60)) else: from_time = '' return self._set_period(from_time, to_time) @command.skill_level('administrator') def do_source(self, context, src=None): "usage: source {<dir>|<file>|live}" if src is None: print "Current source: %s" % (options.history) return True self._init_source() if src != options.history: return self._set_source(src) @command.skill_level('administrator') @command.alias('timeframe') def do_limit(self, context, from_time='', to_time=''): "usage: limit [<from_time> [<to_time>]]" self._init_source() if options.history == "live" and not from_time: from_time = time.ctime(time.time() - 60*60) return self._set_period(from_time, to_time) @command.skill_level('administrator') def do_refresh(self, context, force=''): "usage: refresh" self._init_source() if options.history != "live": common_info("nothing to refresh if source isn't live") return False if force: if force != "force" and force != "--force": context.fatal_error("Expected 'force' or '--force' (was '%s')" % (force)) force = True return crm_report().refresh_source(force) @command.skill_level('administrator') def do_detail(self, context, detail_lvl): "usage: detail <detail_level>"
self._init_source() detail_num = utils.convert2ints(detail_lvl) if detail_num is None or detail_num not in (0, 1): context.fatal_error("Expected '0' or '1' (was '%s')" % (detail_lvl)) return crm_report().set_detail(detail_lvl) @command.skill_level('administrator') @command.completers_repeating(compl.call(lambda: crm_report().node_list())) def do_setnodes(self, context, *args): "usage: setnodes <node> [<node>
...]" self._init_source() if options.history != "live": common_info("setting nodes not necessary for existing reports, proceeding anyway") return crm_report().set_nodes(*args) @command.skill_level('administrator') def do_info(self, context): "usage: info" self._init_source() return crm_report().info() @command.skill_level('administrator') def do_latest(self, context): "usage: latest" self._init_source() if not utils.wait4dc("transition", not options.batch): return False self._set_source("live") crm_report().refresh_source() f = self._get_pe_byidx(-1) if not f: return False crm_report().show_transition_log(f) @command.skill_level('administrator') @command.completers_repeating(compl.call(lambda: crm_report().rsc_list())) def do_resource(self, context, *args): "usage: resource <rsc> [<rsc> ...]" self._init_source() return crm_report().resource(*args) @command.skill_level('administrator') @command.wait @command.completers_repeating(compl.call(lambda: crm_report().node_list())) def do_node(self, context, *args): "usage: node <node> [<node> ...]" self._init_source() return crm_report().node(*args) @command.skill_level('administrator') @command.completers_repeating(compl.call(lambda: crm_report().node_list())) def do_log(self, context, *args): "usage: log [<node> ...]" self._init_source() return crm_report().log(*args) def ptest(self, nograph, scores, utilization, actions, verbosity): 'Send a decompressed self.pe_file to ptest' try: s = bz2.decompress(open(self.pe_file).read()) except IOError, msg: common_err("open: %s" % msg) return False return utils.run_ptest(s, nograph, scores, utilization, actions, verbosity) @command.skill_level('administrator') def do_events(self, context): "usage: events" self._init_source() return crm_report().events() @command.skill_level('administrator') @command.completers_repeating(compl.join(compl.call(lambda: crm_report().peinputs_list()), compl.choice(['v']))) def do_peinputs(self, context, *args): """usage: peinputs [{<range>|<number>} ...] [v]""" self._init_source() argl = list(args) opt_l = utils.fetch_opts(argl, ["v"]) if argl: l = [] for s in argl: a = utils.convert2ints(s.split(':')) if a and len(a) == 2 and not utils.check_range(a): common_err("%s: invalid peinputs range" % a) return False l += crm_report().pelist(a, long=("v" in opt_l)) else: l = crm_report().pelist(long=("v" in opt_l)) if not l: return False s = '\n'.join(l) utils.page_string(s) def _get_pe_byname(self, s): l = crm_report().find_pe_files(s) if len(
ESOedX/edx-platform
lms/djangoapps/experiments/utils.py
Python
agpl-3.0
17,007
0.003763
""" Utilities to facilitate experimentation """ from __future__ import absolute_import import logging from decimal import Decimal import six from django.utils.timezone import now from opaque_keys import InvalidKeyError from opaque_keys.edx.keys import CourseKey from course_modes.models import format_course_price, get_cosmetic_verified_display_price, CourseMode from lms.djangoapps.courseware.access import has_staff_access_to_preview_mode from lms.djangoapps.courseware.date_summary import verified_upgrade_deadline_link, verified_upgrade_link_is_valid from entitlements.models import CourseEntitlement from lms.djangoapps.commerce.utils import EcommerceService from openedx.core.djangoapps.catalog.utils import get_programs from openedx.core.djangoapps.django_comment_common.models import Role from openedx.core.djangoapps.waffle_utils import WaffleFlag, WaffleFlagNamespace from openedx.features.course_duration_limits.access import get_user_course_expiration_date from openedx.features.course_duration_limits.models import CourseDurationLimitConfig from student.models import CourseEnrollment from xmodule.partitions.partitions_service import get_all_partitions_for_course, get_user_partition_groups # Import this for backwards compatibility (so that anyone importing this function from here doesn't break) from .stable_bucketing import stable_bucketing_hash_group # pylint: disable=unused-import logger = logging.getLogger(__name__) # TODO: clean up as part of REVEM-199 (START) experiments_namespace = WaffleFlagNamespace(name=u'experiments') # .. toggle_name: experiments.add_programs # .. toggle_implementation: WaffleFlag # .. toggle_default: False # .. toggle_description: Toggle for adding the current course's program information to user metadata # .. toggle_category: experiments # .. toggle_use_cases: monitored_rollout # .. toggle_creation_date: 2019-2-25 # .. toggle_expiration_date: None # .. toggle_warnings: None # .. toggle_tickets: REVEM-63, REVEM-198 # .. toggle_status: supported PROGRAM_INFO_FLAG = WaffleFlag( waffle_namespace=experiments_namespace, flag_name=u'add_programs', flag_undefined_default=False ) # .. toggle_name: experiments.add_dashboard_info # .. toggle_implementation: WaffleFlag # .. toggle_default: False # .. toggle_description: Toggle for adding info about each course to the dashboard metadata # .. toggle_category: experiments # .. toggle_use_cases: monitored_rollout # .. toggle_creation_date: 2019-3-28 # .. toggle_expiration_date: None # .. toggle_warnings: None # .. toggle_tickets: REVEM-118 # .. toggle_status: supported DASHBOARD_INFO_FLAG = WaffleFlag(experiments_namespace, u'add_dashboard_info', flag_undefined_default=False) # TODO END: clean up as part of REVEM-199 (End) def check_and_get_upgrade_link_and_date(user, enrollment=None, course=None): """ For an authenticated user, return a link to allow them to upgrade in the specified course. Returns the upgrade link and upgrade deadline for a user in a given course given that the user is within the window to upgrade defined by our dynamic pacing feature; otherwise, returns None for both the link and date. """ if enrollment is None and course is None: logger.warn(u'Must specify either an enrollment or a course') return (None, None) if enrollment: if course is None: course = enrollment.course elif enrollment.course_id != course.id: logger.warn(u'{} refers to a different course than {} which was supplied. Enrollment course id={}, ' u'repr={!r}, deprecated={}. Course id={}, repr={!r}, deprecated={}.' .format(enrollment, course, enrollment.course_id, enrollment.course_id, enrollment.course_id.deprecated, course.id, course.id, course.id.deprecated ) ) return (None, None) if enrollment.user_id != user.id: logger.warn(u'{} refers to a different user than {} which was supplied. Enrollment user id={}, repr={!r}. ' u'User id={}, repr={!r}.'.format(enrollment, user, enrollment.user_id, enrollment.user_id, user.id, user.id, ) ) return (None, None) if enrollment is None: enrollment = CourseEnrollment.get_enrollment(user, course.id) if user.is_authenticated and verified_upgrade_link_is_valid(enrollment): return ( verified_upgrade_deadline_link(user, course), enrollment.upgrade_deadline ) return (None, None) # TODO: clean up as part of REVEM-
199 (START) def get_program_price_and_skus(courses): """ Get the total program price and purchase skus from these courses in the program """ program_price = 0 skus = [] for course in courses: course_price, course_sku = get_course_entitlement_price_and_sku(course) if course_price is not None and course_sku is not None: program_price = Decimal(program_price) + Decima
l(course_price) skus.append(course_sku) if program_price <= 0: program_price = None skus = None else: program_price = format_course_price(program_price) program_price = six.text_type(program_price) return program_price, skus def get_course_entitlement_price_and_sku(course): """ Get the entitlement price and sku from this course. Try to get them from the first non-expired, verified entitlement that has a price and a sku. If that doesn't work, fall back to the first non-expired, verified course run that has a price and a sku. """ for entitlement in course.get('entitlements', []): if entitlement.get('mode') == 'verified' and entitlement['price'] and entitlement['sku']: expires = entitlement.get('expires') if not expires or expires > now(): return entitlement['price'], entitlement['sku'] course_runs = course.get('course_runs', []) published_course_runs = [run for run in course_runs if run['status'] == 'published'] for published_course_run in published_course_runs: for seat in published_course_run['seats']: if seat.get('type') == 'verified' and seat['price'] and seat['sku']: price = Decimal(seat.get('price')) return price, seat.get('sku') return None, None def get_unenrolled_courses(courses, user_enrollments): """ Given a list of courses and a list of user enrollments, return the courses in which the user is not enrolled. Depending on the enrollments that are passed in, this method can be used to determine the courses in a program in which the user has not yet enrolled or the courses in a program for which the user has not yet purchased a certificate. """ # Get the enrollment course ids here, so we don't need to loop through them for every course run enrollment_course_ids = {enrollment.course_id for enrollment in user_enrollments} unenrolled_courses = [] for course in courses: if not is_enrolled_in_course(course, enrollment_course_ids): unenrolled_courses.append(course) return unenrolled_courses def is_enrolled_in_all_courses(courses, user_enrollments): """ Determine if the user is enrolled in all of the courses """ # Get the enrollment course ids here, so we don't need to loop through them for every course run enrollment_course_ids = {enrollment.course_id f
julfla/plugin.video.freplay
resources/lib/channels/arte.py
Python
gpl-2.0
5,259
0.034417
#-*- coding: utf-8 -*- import urllib2 import json import CommonFunctions common = CommonFunctions from xml.dom import minidom from resources.lib import utils from resources.lib import globalvar title=['ARTE'] img=['arte'] readyForUse=True def fix_text(text): return text.replace('&amp;','&').encode('utf-8').replace('&#039;',' ') def list_shows(channel,folder): shows=[] d=dict() filePath=utils.downloadCatalog('http://www.arte.tv/papi/tvguide-flow/sitemap/feeds/videos/F.xml','ARTE.XML',False) if folder=='none': xml = open(filePath).read() url=common.parseDOM(xml, "url") for i in range(0, len(url)): categoryTab=common.parseDOM(url[i], "video:category") if len(categoryTab)>0: category=fix_text(categoryTab[0]) if category not in d: shows.append( [channel,category,category,'','folder'] ) d[category]=category else: xml = open(filePath).read() url=common.parseDOM(xml, "url") for i in range(0, len(url)): titleTab=common.parseDOM(url[i], "video:title") if len(titleTab)>0: title=fix_text(titleTab[0]) categoryTab=common.parseDOM(url[i], "video:category") if globalvar.ADDON.getSetting('arteFull')=='true': videoTag=common.parseDOM(url[i], "video:tag")[0] else: videoTag='ARTE+7' if len(categoryTab)>0: if(fix_text(categoryTab[0])==folder and title not in d and videoTag=='ARTE+7'): shows.append( [channel,title,title,'','shows'] ) d[title]=title return shows def getVideoURL(channel,video_id): #Get JSON file jsonFile=urllib2.urlopen('http://arte.tv/papi/tvguide/videos/stream/player/F/'+ video_id + '/ALL/ALL.json').read() #Parse JSON to jsoncat = json.loads(jsonFile) url='' if globalvar.ADDON.getSetting('%sQuality' % (channel))=='HD': #HD HTTP if 'HTTP_MP4_SQ_1' in jsoncat['videoJsonPlayer']['VSR']: url=jsoncat['videoJsonPlayer']['VSR']['HTTP_MP4_SQ_1']['url'] #HD RTMP else: url=jsoncat['videoJsonPlayer']['VSR']['RTMP_SQ_1']['streamer'] + jsoncat['videoJsonPlayer']['VSR']['RTMP_SQ_1']['url'] if globalvar.ADDON.getSetting('%sQuality' % (channel))=='SD' or url=='': #SD HTTP if 'HLS_SQ_1': url=jsoncat['videoJsonPlayer']['VSR']['HLS_SQ_1']['u
rl'] #SD RTMP else: url=jsoncat['videoJsonPlayer']['VSR']['RTMP_MQ_1']['streamer'] + jsoncat['videoJsonPlayer']['VSR']['RTMP_MQ_1']['url'] url=jsoncat['videoJsonPlayer']['VSR']['HLS_SQ_1']['url'] return url def list_videos(channel,show_title): videos=[] filePath=utils.downloadCatalog('http://www.arte.tv/papi/tvguide-flow/sitemap/feeds/videos/F.xml','ARTE.XML',False) xml = open(filePath).read()
url=common.parseDOM(xml, "url") for i in range(0, len(url)): titleTab=common.parseDOM(url[i], "video:title") if len(titleTab)>0: title=fix_text(titleTab[0]) if(title==show_title): name='' image_url='' date='' duration='' views='' desc='' rating='' tmpTab=common.parseDOM(url[i], "video:publication_date") if len(tmpTab)>0: date=tmpTab[0][:10] tmpTab=common.parseDOM(url[i], "video:duration") if len(tmpTab)>0: duration=float(tmpTab[0])/60 tmpTab=common.parseDOM(url[i], "video:view_count") if len(tmpTab)>0: views=tmpTab[0] tmpTab=common.parseDOM(url[i], "video:rating") if len(tmpTab)>0: rating=tmpTab[0] descriptionTab=common.parseDOM(url[i], "video:description") if len(descriptionTab)>0: name=fix_text(descriptionTab[0]) desc=fix_text(descriptionTab[0]) tmpTab=common.parseDOM(url[i],"video:player_loc") if len(tmpTab)>0: if tmpTab[0]=="1": tmpTab=common.parseDOM(url[i], "video:id") if len(tmpTab)>0: video_id=tmpTab[0][28:28+10] + "_PLUS7-F" else: start=tmpTab[0].find("%2Fplayer%2FF%2F") end=tmpTab[0].find("%2F", start+16) video_id=tmpTab[0][start+16:end] if video_id.find("EXTRAIT")>0 : name="Extrait-" + name videoTag=common.parseDOM(url[i], "video:tag")[0] picTab=common.parseDOM(url[i], "video:thumbnail_loc") if len(picTab)>0: image_url=picTab[0] infoLabels={ "Title": name,"Plot":desc,"Aired":date,"Duration": duration, "Year":date[:4]} if not(globalvar.ADDON.getSetting('arteFull')=='true' and videoTag!='ARTE+7'): videos.append( [channel, video_id, name, image_url,infoLabels,'play'] ) return videos
tchellomello/home-assistant
tests/components/switcher_kis/test_init.py
Python
apache-2.0
5,602
0.000893
"""Test cases for the switcher_kis component.""" from datetime import timedel
ta from typing import TYPE_CH
ECKING, Any, Generator from pytest import raises from homeassistant.components.switcher_kis import ( CONF_AUTO_OFF, DATA_DEVICE, DOMAIN, SERVICE_SET_AUTO_OFF_NAME, SERVICE_SET_AUTO_OFF_SCHEMA, SIGNAL_SWITCHER_DEVICE_UPDATE, ) from homeassistant.const import CONF_ENTITY_ID from homeassistant.core import Context, callback from homeassistant.exceptions import Unauthorized, UnknownUser from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.typing import HomeAssistantType from homeassistant.setup import async_setup_component from homeassistant.util import dt from .consts import ( DUMMY_AUTO_OFF_SET, DUMMY_DEVICE_ID, DUMMY_DEVICE_NAME, DUMMY_DEVICE_STATE, DUMMY_ELECTRIC_CURRENT, DUMMY_IP_ADDRESS, DUMMY_MAC_ADDRESS, DUMMY_PHONE_ID, DUMMY_POWER_CONSUMPTION, DUMMY_REMAINING_TIME, MANDATORY_CONFIGURATION, SWITCH_ENTITY_ID, ) from tests.common import async_fire_time_changed, async_mock_service if TYPE_CHECKING: from aioswitcher.devices import SwitcherV2Device from tests.common import MockUser async def test_failed_config( hass: HomeAssistantType, mock_failed_bridge: Generator[None, Any, None] ) -> None: """Test failed configuration.""" assert await async_setup_component(hass, DOMAIN, MANDATORY_CONFIGURATION) is False async def test_minimal_config( hass: HomeAssistantType, mock_bridge: Generator[None, Any, None] ) -> None: """Test setup with configuration minimal entries.""" assert await async_setup_component(hass, DOMAIN, MANDATORY_CONFIGURATION) async def test_discovery_data_bucket( hass: HomeAssistantType, mock_bridge: Generator[None, Any, None] ) -> None: """Test the event send with the updated device.""" assert await async_setup_component(hass, DOMAIN, MANDATORY_CONFIGURATION) await hass.async_block_till_done() device = hass.data[DOMAIN].get(DATA_DEVICE) assert device.device_id == DUMMY_DEVICE_ID assert device.ip_addr == DUMMY_IP_ADDRESS assert device.mac_addr == DUMMY_MAC_ADDRESS assert device.name == DUMMY_DEVICE_NAME assert device.state == DUMMY_DEVICE_STATE assert device.remaining_time == DUMMY_REMAINING_TIME assert device.auto_off_set == DUMMY_AUTO_OFF_SET assert device.power_consumption == DUMMY_POWER_CONSUMPTION assert device.electric_current == DUMMY_ELECTRIC_CURRENT assert device.phone_id == DUMMY_PHONE_ID async def test_set_auto_off_service( hass: HomeAssistantType, mock_bridge: Generator[None, Any, None], mock_api: Generator[None, Any, None], hass_owner_user: "MockUser", hass_read_only_user: "MockUser", ) -> None: """Test the set_auto_off service.""" assert await async_setup_component(hass, DOMAIN, MANDATORY_CONFIGURATION) await hass.async_block_till_done() assert hass.services.has_service(DOMAIN, SERVICE_SET_AUTO_OFF_NAME) await hass.services.async_call( DOMAIN, SERVICE_SET_AUTO_OFF_NAME, {CONF_ENTITY_ID: SWITCH_ENTITY_ID, CONF_AUTO_OFF: DUMMY_AUTO_OFF_SET}, blocking=True, context=Context(user_id=hass_owner_user.id), ) with raises(Unauthorized) as unauthorized_read_only_exc: await hass.services.async_call( DOMAIN, SERVICE_SET_AUTO_OFF_NAME, {CONF_ENTITY_ID: SWITCH_ENTITY_ID, CONF_AUTO_OFF: DUMMY_AUTO_OFF_SET}, blocking=True, context=Context(user_id=hass_read_only_user.id), ) assert unauthorized_read_only_exc.type is Unauthorized with raises(Unauthorized) as unauthorized_wrong_entity_exc: await hass.services.async_call( DOMAIN, SERVICE_SET_AUTO_OFF_NAME, { CONF_ENTITY_ID: "light.not_related_entity", CONF_AUTO_OFF: DUMMY_AUTO_OFF_SET, }, blocking=True, context=Context(user_id=hass_owner_user.id), ) assert unauthorized_wrong_entity_exc.type is Unauthorized with raises(UnknownUser) as unknown_user_exc: await hass.services.async_call( DOMAIN, SERVICE_SET_AUTO_OFF_NAME, {CONF_ENTITY_ID: SWITCH_ENTITY_ID, CONF_AUTO_OFF: DUMMY_AUTO_OFF_SET}, blocking=True, context=Context(user_id="not_real_user"), ) assert unknown_user_exc.type is UnknownUser service_calls = async_mock_service( hass, DOMAIN, SERVICE_SET_AUTO_OFF_NAME, SERVICE_SET_AUTO_OFF_SCHEMA ) await hass.services.async_call( DOMAIN, SERVICE_SET_AUTO_OFF_NAME, {CONF_ENTITY_ID: SWITCH_ENTITY_ID, CONF_AUTO_OFF: DUMMY_AUTO_OFF_SET}, ) await hass.async_block_till_done() assert len(service_calls) == 1 assert str(service_calls[0].data[CONF_AUTO_OFF]) == DUMMY_AUTO_OFF_SET.lstrip("0") async def test_signal_dispatcher( hass: HomeAssistantType, mock_bridge: Generator[None, Any, None] ) -> None: """Test signal dispatcher dispatching device updates every 4 seconds.""" assert await async_setup_component(hass, DOMAIN, MANDATORY_CONFIGURATION) await hass.async_block_till_done() @callback def verify_update_data(device: "SwitcherV2Device") -> None: """Use as callback for signal dispatcher.""" pass async_dispatcher_connect(hass, SIGNAL_SWITCHER_DEVICE_UPDATE, verify_update_data) async_fire_time_changed(hass, dt.utcnow() + timedelta(seconds=5))
130s/bloom
setup.py
Python
bsd-3-clause
2,778
0.00036
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='bloom', version='0.4.4', packages=find_packages(exclude=['test']), package_data={ 'bloom.generators.debian': [ 'bloom/generators/debian/templates/*', 'bloom/generators/debian/templates/source/*' ] }, include_package_data=True, install_requires=[ 'argparse', 'catkin-pkg >= 0.1.14', 'distribute', 'empy', 'python-dateutil', 'PyYAML', 'rosdep >= 0.10.3', 'rosdistro >= 0.2.12', 'vcstools >= 0.1.22', ], author='Tully Foote, William Woodall', author_email='[email protected], [email protected]', maintainer='William Woodall', maintainer_email='[email protected]', url='http://www.ros.org/wiki/bloom', download_url='http://pr.willowgarage.com/downloads/bloom/', keywords=['ROS'], classifiers=['Programming Language :: Python', 'License :: OSI Approved :: BSD License'], description="Bloom is a release automation tool.", long_description="""\ Bloom provides tools for releasing software on top of a git repository \ and leverages tools and patterns from git-buildpackage. Additionally, \ bloom leverages meta and build information from catkin \ (https://github.com/ros/catkin) to automate release branching and the \ generation of platform specific source packages, like debian's src-debs.""", license='BSD', test_suite='test', entry_points={ 'console_scripts': [ 'git-bloom-config = bloom.commands.git.config:main', 'git-bloom-import-upstream = bloom.commands.git.import_upstream:main', 'git-bloom-branch = bloom.commands.git.branch:main', 'git-bloom-patch = bloom.commands.git.patch.patch_main:main', 'git-bloom-generate = bloom.commands.git.generate:main', 'git-bloom-release = bloom.commands.git.release:main', 'bloom-export-upstream = bloom.commands.export_upstream:main', 'bloom-update = bloom.commands.update:main', 'bloom-release = bloom.commands.release:main', 'bloom-generate = bloom.commands.generate:main' ], 'bloom.generators': [ 'release = bloom.generators.release:Rele
aseGenerator', 'rosrelease = bloom.generators.rosrelease:RosReleaseGenerator', 'debian =
bloom.generators.debian:DebianGenerator', 'rosdebian = bloom.generators.rosdebian:RosDebianGenerator' ], 'bloom.generate_cmds': [ 'debian = bloom.generators.debian.generate_cmd:description', 'rosdebian = bloom.generators.rosdebian:description' ] } )
endlessm/chromium-browser
build/apply_locales.py
Python
bsd-3-clause
1,497
0.011356
#!/usr/bin/env python # Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # TODO: remove this script when GYP has for loops from __future__ import print_function import sys import optparse def main(argv): parser = optparse.OptionParser() usage = 'usage: %s [options ...] format_string locale_list' parser.set_usage(usage.replace('%s', '%prog')) parser.add_option('-d', dest='dash_to_underscore', action="store_true", default=False, help='map "en-US" to "en" and "-" to "_" in locales') (options, argli
st) = parser.parse_args(argv) if len(arglist) < 3: print('ERROR: need string and list of locales') return 1 str_template = arglist[1] locales = arglist[2:] results = [] for locale in locales: # F
or Cocoa to find the locale at runtime, it needs to use '_' instead # of '-' (http://crbug.com/20441). Also, 'en-US' should be represented # simply as 'en' (http://crbug.com/19165, http://crbug.com/25578). if options.dash_to_underscore: if locale == 'en-US': locale = 'en' locale = locale.replace('-', '_') results.append(str_template.replace('ZZLOCALE', locale)) # Quote each element so filename spaces don't mess up GYP's attempt to parse # it into a list. print(' '.join(["'%s'" % x for x in results])) if __name__ == '__main__': sys.exit(main(sys.argv))
locomatix/locomatix-python
locomatix/cli/__init__.py
Python
apache-2.0
3,074
0.006181
############################################################################### # # Copyright 2010 Locomatix, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ############################################################################### __all__ = ['create_feed', 'delete_feed', 'list_feeds', \ 'create_object','delete_object', 'delete_all_objects', \ 'list_objects', 'query_objects', \ 'update_attributes','get_attributes', 'update_location','get_loca
tion', \ 'search_nearby', 'query_search_nearby', \ 'search_region', 'query_search_region', \ 'create_zone', 'activate_zone', 'get_zone', 'deactivate_zone', \ 'delete_zone', 'delete_all_zones', 'list_zones', \ 'create_fence', 'activate_fence','get_fence','deactivate_fence', \
'delete_fence', 'delete_all_fences', 'list_fences' \ 'get_location_history', 'query_location_history', \ 'get_space_activity', 'query_space_activity', \ 'get_histogram', 'query_histogram' ] from create_feed import create_feed from delete_feed import delete_feed from list_feeds import list_feeds from create_object import create_object from delete_object import delete_object from delete_all_objects import delete_all_objects from list_objects import list_objects from query_objects import query_objects from update_attributes import update_attributes from get_attributes import get_attributes from update_location import update_location from get_location import get_location from create_zone import create_zone from activate_zone import activate_zone from get_zone import get_zone from delete_zone import delete_zone from delete_all_zones import delete_all_zones from deactivate_zone import deactivate_zone from list_zones import list_zones from create_fence import create_fence from activate_fence import activate_fence from get_fence import get_fence from deactivate_fence import deactivate_fence from delete_fence import delete_fence from delete_all_fences import delete_all_fences from list_fences import list_fences from search_region import search_region from query_search_region import query_search_region from search_nearby import search_nearby from query_search_nearby import query_search_nearby from get_location_history import get_location_history from query_location_history import query_location_history from get_space_activity import get_space_activity from query_space_activity import query_space_activity from get_histogram import get_histogram from query_histogram import query_histogram
ipanova/pulp_puppet
pulp_puppet_plugins/test/unit/test_install_distributor.py
Python
gpl-2.0
24,167
0.002814
from cStringIO import StringIO import os import tarfile import unittest import tempfile import shutil import errno import mock from pulp.devel.unit.util import touch from pulp.plugins.conduits.repo_publish import RepoPublishConduit from pulp.plugins.config import PluginCallConfiguration from pulp.plugins.model import Repository, AssociatedUnit, PublishReport from pulp_puppet.common import constants from pulp_puppet.plugins.distributors import installdistributor class TestEntryPoint(unittest.TestCase): def test_everything(self): """everything isn't much""" plugin_class, config = installdistributor.entry_point() self.assertTrue(plugin_class is installdistributor.PuppetModuleInstallDistributor) # there is never a global config for this distributor self.assertEqual(config, {}) class TestValidateConfig(unittest.TestCase): def setUp(self): self.distributor = installdistributor.PuppetModuleInstallDistributor() self.repo = Repository('repo1', '', {}) def t
est_not_present(self): config = PluginCallConfiguration({}, {}) result, message = self.distributor.validate_config(self.repo, config, []) self.assertTrue(result) def test_relative_path(self): config = PluginCallConfiguration({}, {constants.CONFIG_INSTALL_PATH: 'a/b/c'}) result, message = self.distributor.validate_config(self.repo, config, [])
self.assertFalse(result) self.assertTrue(len(message) > 0) def test_with_permission(self): config = PluginCallConfiguration({}, {constants.CONFIG_INSTALL_PATH: '/tmp'}) result, message = self.distributor.validate_config(self.repo, config, []) self.assertTrue(result) class TestPublishRepo(unittest.TestCase): def setUp(self): self.distributor = installdistributor.PuppetModuleInstallDistributor() self.working_directory = tempfile.mkdtemp() self.puppet_dir = os.path.join(self.working_directory, 'puppet') os.makedirs(self.puppet_dir) self.repo = Repository('repo1', '', {}) self.conduit = RepoPublishConduit('repo1', self.distributor.metadata()['id']) self.uk1 = {'author': 'puppetlabs', 'name': 'stdlib', 'version': '1.2.0'} self.uk2 = {'author': 'puppetlabs', 'name': 'java', 'version': '1.3.1'} self.units = [ AssociatedUnit(constants.TYPE_PUPPET_MODULE, self.uk1, {}, '/a/b/x', '', '', '', ''), AssociatedUnit(constants.TYPE_PUPPET_MODULE, self.uk2, {}, '/a/b/y', '', '', '', ''), ] self.conduit.get_units = mock.MagicMock(return_value=self.units, spec_set=self.conduit.get_units) def tearDown(self): shutil.rmtree(self.working_directory) @mock.patch.object(installdistributor.PuppetModuleInstallDistributor, '_move_to_destination_directory', return_value=None) @mock.patch.object(installdistributor.PuppetModuleInstallDistributor, '_rename_directory', return_value=None) @mock.patch('tarfile.open', autospec=True) @mock.patch.object(installdistributor.PuppetModuleInstallDistributor, '_clear_destination_directory', return_value=None) @mock.patch.object(installdistributor.PuppetModuleInstallDistributor, '_create_temporary_destination_directory', return_value=None) @mock.patch.object(installdistributor.PuppetModuleInstallDistributor, '_check_for_unsafe_archive_paths', return_value=None) def test_workflow(self, mock_check_paths, mock_mkdir, mock_clear, mock_open, mock_rename, mock_move): config = PluginCallConfiguration({}, {constants.CONFIG_INSTALL_PATH: self.puppet_dir}) mock_open.return_value.getnames.return_value = ['a/b', 'a/c'] report = self.distributor.publish_repo(self.repo, self.conduit, config) self.assertTrue(isinstance(report, PublishReport)) self.assertTrue(report.success_flag) self.assertEqual(len(report.details['errors']), 0) self.assertEqual(len(report.details['success_unit_keys']), 2) self.assertTrue(self.uk1 in report.details['success_unit_keys']) self.assertTrue(self.uk2 in report.details['success_unit_keys']) self.assertEqual(mock_open.call_count, 2) mock_open.assert_any_call(self.units[0].storage_path) mock_open.assert_any_call(self.units[1].storage_path) self.assertEqual(mock_rename.call_count, 2) mock_mkdir.assert_called_once_with(self.puppet_dir) mock_clear.assert_called_once_with(self.puppet_dir) mock_check_paths.assert_called_once_with(self.units, self.puppet_dir) self.assertEqual(mock_move.call_count, 1) def test_no_destination(self): """this one should fail very early since the destination is missing""" config = PluginCallConfiguration({}, {}) report = self.distributor.publish_repo(self.repo, self.conduit, config) self.assertFalse(report.success_flag) self.assertTrue(isinstance(report.summary, basestring)) self.assertEqual(len(report.details['errors']), 0) self.assertEqual(len(report.details['success_unit_keys']), 0) def test_duplicate_unit_names(self): config = PluginCallConfiguration({}, {constants.CONFIG_INSTALL_PATH: self.puppet_dir}) uk3 = {'author': 'puppetlabs', 'name': 'stdlib', 'version': '1.3.1'} unit3 = AssociatedUnit(constants.TYPE_PUPPET_MODULE, uk3, {}, '/a/b/z', '', '', '', '') self.units.append(unit3) report = self.distributor.publish_repo(self.repo, self.conduit, config) self.assertFalse(report.success_flag) self.assertTrue(isinstance(report.summary, basestring)) self.assertEqual(len(report.details['errors']), 2) self.assertTrue(report.summary.find('duplicate') >= 0) @mock.patch.object(installdistributor.PuppetModuleInstallDistributor, '_check_for_unsafe_archive_paths', return_value=None) def test_unsafe_paths(self, mock_check): config = PluginCallConfiguration({}, {constants.CONFIG_INSTALL_PATH: self.puppet_dir}) mock_check.side_effect = self._add_error report = self.distributor.publish_repo(self.repo, self.conduit, config) self.assertFalse(report.success_flag) self.assertTrue(isinstance(report.summary, basestring)) self.assertTrue(len(report.summary) > 0) self.assertEqual(len(report.details['errors']), 1) self.assertEqual(len(report.details['success_unit_keys']), 0) @mock.patch.object(installdistributor.PuppetModuleInstallDistributor, '_check_for_unsafe_archive_paths', return_value=None) @mock.patch.object(installdistributor.PuppetModuleInstallDistributor, '_clear_destination_directory', side_effect=OSError) def test_cannot_remove_destination(self, mock_clear, mock_check): config = PluginCallConfiguration({}, {constants.CONFIG_INSTALL_PATH: self.puppet_dir}) report = self.distributor.publish_repo(self.repo, self.conduit, config) self.assertFalse(report.success_flag) self.assertTrue(isinstance(report.summary, basestring)) self.assertEqual(len(report.details['errors']), 2) self.assertEqual(len(report.details['success_unit_keys']), 0) @mock.patch.object(installdistributor.PuppetModuleInstallDistributor, '_check_for_unsafe_archive_paths', return_value=None) @mock.patch.object(installdistributor.PuppetModuleInstallDistributor, '_clear_destination_directory', return_value=None) def test_cannot_open_tarballs(self, mock_clear, mock_check): """ This is easy to simulate, because we can let the real tarfile module try to open the fake paths. """ config = PluginCallConfiguration({}, {c
remybaranx/qtaste
tools/jython/lib/Lib/repr.py
Python
gpl-3.0
3,151
0.009838
"""Redo the `...` (representation) but with limits on most sizes.""" __all__ = ["Repr","repr"] class Repr: def __init__(self): self.maxlevel = 6 self.maxtuple = 6 self.maxlist = 6 self.maxdict = 4 self.maxstring = 30 self.maxlong = 40 self.maxother = 20 def repr(self, x): return self.repr1(x, self.maxlevel) def repr1(self, x, level): typename = type(x).__name__ if ' ' in typename: parts = typename.split() typename = '_'.join(parts) if hasattr(self, 'repr_' + typename): return getattr(self, 'repr_' + typename)(x, level) else: s = `x` if len(s) > self.maxother: i = max(0, (self.maxother-3)//2) j = max(0, self.maxother-3-i) s = s[:i] + '...' + s[len(s)-j:] return s def repr_tuple(self, x, level): n = len(x) if n == 0: return '()' if level <= 0: return '(...)' s = '' for i in range(min(n, self.maxtuple)):
if s: s = s + ', ' s = s + self.repr1(x[i], level-1) if n > self.maxtuple: s = s + ', ...' elif n == 1: s = s + ',' return '(' + s + ')' def repr_list(self, x, level): n = len(x) if n == 0: return '[]' if level <=
0: return '[...]' s = '' for i in range(min(n, self.maxlist)): if s: s = s + ', ' s = s + self.repr1(x[i], level-1) if n > self.maxlist: s = s + ', ...' return '[' + s + ']' def repr_dict(self, x, level): n = len(x) if n == 0: return '{}' if level <= 0: return '{...}' s = '' keys = x.keys() keys.sort() for i in range(min(n, self.maxdict)): if s: s = s + ', ' key = keys[i] s = s + self.repr1(key, level-1) s = s + ': ' + self.repr1(x[key], level-1) if n > self.maxdict: s = s + ', ...' return '{' + s + '}' def repr_str(self, x, level): s = `x[:self.maxstring]` if len(s) > self.maxstring: i = max(0, (self.maxstring-3)//2) j = max(0, self.maxstring-3-i) s = `x[:i] + x[len(x)-j:]` s = s[:i] + '...' + s[len(s)-j:] return s def repr_long(self, x, level): s = `x` # XXX Hope this isn't too slow... if len(s) > self.maxlong: i = max(0, (self.maxlong-3)//2) j = max(0, self.maxlong-3-i) s = s[:i] + '...' + s[len(s)-j:] return s def repr_instance(self, x, level): try: s = `x` # Bugs in x.__repr__() can cause arbitrary # exceptions -- then make up something except: return '<' + x.__class__.__name__ + ' instance at ' + \ hex(id(x))[2:] + '>' if len(s) > self.maxstring: i = max(0, (self.maxstring-3)//2) j = max(0, self.maxstring-3-i) s = s[:i] + '...' + s[len(s)-j:] return s aRepr = Repr() repr = aRepr.repr
marduk191/plugin.video.movie25
resources/libs/sports/skysports.py
Python
gpl-3.0
35,627
0.028012
import urllib,urllib2,re,cookielib,string,os,sys import xbmc, xbmcgui, xbmcaddon, xbmcplugin from resources.libs import main #Mash Up - by Mash2k3 2012. from t0mm0.common.addon import Addon from resources.universal import playbackengine, watchhistory addon_id = 'plugin.video.movie25' selfAddon = xbmcaddon.Addon(id=addon_id) addon = Addon('plugin.video.movie25', sys.argv) art = main.art wh = watchhistory.WatchHistory('plugin.video.movie25') pyamfpath = xbmc.translatePath(os.path.join('special://home/addons', 'script.module.pyamf')) try: if not os.path.exists(pyamfpath): url = 'https://github.com/mash2k3/MashUpFixes/raw/master/FIXES/script.module.pyamf.zip' path = xbmc.translatePath(os.path.join('special://home/addons','packages')) lib=os.path.join(path, 'script.module.pyamf.zip') if main.downloadFile(url,lib): addonfolder = xbmc.translatePath(os.path.join('special://home/addons','')) xbmc.executebuiltin("XBMC.Extract(%s,%s)"%(lib,addonfolder)) except: pass def SKYSPORTS(): main.addDir('All Videos','http://www1.skysports.com/watch/more/5/27452/200/1',173,art+'/skysports.png') main.addDir('Sports','http://www1.skysports.com/watch/tv-shows',178,art+'/skysports.png') main.addDir('TV Shows','http://www1.skysports.com/watch/tv-shows',175,art+'/skysports.png') def SKYSPORTSCAT(): main.addDir('Sports [COLOR red]All Videos[/COLOR]','http://www1.skysports.com/watch/more/5/28461/200/1',173,art+'/skysports.png') main.addDir('Football','football',179,art+'/skysports.png') main.addDir('Formula 1','formula-1',179,art+'/skysports.png') main.addDir('Cricket','http://www1.skysports.com//watch/video/sports/cricket',176,art+'/skysports.png') main.addDir('Rugby Union','rugby-union',179,art+'/skysports.png') main.addDir('Rugby League','http://www1.skysports.com//watch/video/sports/rugby-league',176,art+'/skysports.png') main.addDir('Golf','http://www1.skysports.com//watch/video/sports/golf',176,art+'/skysports.png') main.addDir('Tennis','http://www1.skysports.com//watch/video/sports/tennis',176,art+'/skysports.png') main.addDir('Boxing','http://www1.skysports.com//watch/video/sports/boxing',176,art+'/skysports.png') main.addDir('NFL','http://www1.skysports.com//watch/video/sports/nfl',176,art+'/skysports.png') main.addDir('Racing','http://www1.skysports.com//watch/video/sports/racing',176,art+'/skysports.png') main.addDir('Darts','http://www1.skysports.com//watch/video/sports/darts',176,art+'/skysports.png') main.addDir('Basketball','http://www1.skysports.com//watch/video/sports/basketball',176,art+'/skysports.png') main.addDir('Cycling','http://www1.skysports.com//watch/video/sports/cycling',176,art+'/skysports.png') main.addDir('Speedway','http://www1.skysports.com//watch/video/sports/speedway',176,art+'/skysports.png') main.addDir('Ice Hockey','http://www1.skysports.com//watch/video/sports/ice-hockey',176,art+'/skysports.png') main.addDir('UFC','http://www1.skysports.com//watch/video/sports/ufc',176,art+'/skysports.png') main.addDir('WWE','http://www1.skysports.com//watch/video/sports/wwe',176,art+'/skysports.png') def SKYSPORTSCAT2(murl): if murl=='football': main.addDir('Football [COLOR red]All Videos[/COLOR]','http://www1.skysports.com/watch/more/5/12606/200/1',173,art+'/skysports.png') main.addDir('Premier League','premier-league',180,art+'/skysports.png') main.addDir('Championship','championship',180,art+'/skysports.png') main.addDir('League One','league-one',180,art+'/skysports.png') main.addDir('League Two','league-two',180,art+'/skysports.png') main.addDir('Scottish Football','scottish-football',180,art+'/skysports.png
') main.addDir('Primera Liga','primera-liga',180,art+'/skysports.png')
main.addDir('Champions League','http://www1.skysports.com/watch/video/sports/football/competitions/champions-league',176,art+'/skysports.png') main.addDir('Capital One Cup','http://www1.skysports.com/watch/video/sports/football/competitions/capital-one-cup',176,art+'/skysports.png') if murl=='formula-1': main.addDir('Formula 1 [COLOR red]All Videos[/COLOR]','http://www1.skysports.com/watch/more/5/12870/200/1',173,art+'/skysports.png') main.addDir('Grand Prix','grand-prix',180,art+'/skysports.png') main.addDir('Teams','f1Teams',180,art+'/skysports.png') if murl=='rugby-union': main.addDir('Rugby Union [COLOR red]All Videos[/COLOR]','http://www1.skysports.com/watch/more/5/12610/200/1',173,art+'/skysports.png') main.addDir('Aviva Premiership','http://www1.skysports.com/watch/video/sports/rugby-union/competitions/aviva-premiership',176,art+'/skysports.png') main.addDir('Super Rugby','http://www1.skysports.com/watch/video/sports/rugby-union/competitions/super-rugby',176,art+'/skysports.png') main.addDir('Heineken Cup','http://www1.skysports.com/watch/video/sports/rugby-union/competitions/heineken-cup',176,art+'/skysports.png') def SKYSPORTSTEAMS(murl): if murl=='premier-league': main.addDir('Premier League [COLOR red]All Videos[/COLOR]','http://www1.skysports.com/watch/more/5/16426/100/1',173,art+'/skysports.png') main.addDir('Arsenal','http://www1.skysports.com/watch/video/sports/football/teams/arsenal',176,art+'/skysports.png') main.addDir('Aston Villa','http://www1.skysports.com/watch/video/sports/football/teams/aston-villa',176,art+'/skysports.png') main.addDir('Chelsea','http://www1.skysports.com/watch/video/sports/football/teams/chelsea',176,art+'/skysports.png') main.addDir('Everton','http://www1.skysports.com/watch/video/sports/football/teams/everton',176,art+'/skysports.png') main.addDir('Fulham','http://www1.skysports.com/watch/video/sports/football/teams/fulham',176,art+'/skysports.png') main.addDir('Liverpool','http://www1.skysports.com/watch/video/sports/football/teams/liverpool',176,art+'/skysports.png') main.addDir('Manchester City','http://www1.skysports.com/watch/video/sports/football/teams/manchester-city',176,art+'/skysports.png') main.addDir('Manchester United','http://www1.skysports.com/watch/video/sports/football/teams/manchester-united',176,art+'/skysports.png') main.addDir('Newcastle United','http://www1.skysports.com/watch/video/sports/football/teams/newcastle-united',176,art+'/skysports.png') main.addDir('Norwich City','http://www1.skysports.com/watch/video/sports/football/teams/norwich-city',176,art+'/skysports.png') main.addDir('Queens Park Rangers','http://www1.skysports.com/watch/video/sports/football/teams/queens-park-rangers',176,art+'/skysports.png') main.addDir('Reading','http://www1.skysports.com/watch/video/sports/football/teams/reading',176,art+'/skysports.png') main.addDir('Southampton','http://www1.skysports.com/watch/video/sports/football/teams/southampton',176,art+'/skysports.png') main.addDir('Stoke City','http://www1.skysports.com/watch/video/sports/football/teams/stoke-city',176,art+'/skysports.png') main.addDir('Sunderland','http://www1.skysports.com/watch/video/sports/football/teams/sunderland',176,art+'/skysports.png') main.addDir('Swansea City','http://www1.skysports.com/watch/video/sports/football/teams/swansea-city',176,art+'/skysports.png') main.addDir('Tottenham Hotspur','http://www1.skysports.com/watch/video/sports/football/teams/tottenham-hotspur',176,art+'/skysports.png') main.addDir('West Bromwich Albion','http://www1.skysports.com/watch/video/sports/football/teams/west-bromwich-albion',176,art+'/skysports.png') main.addDir('West Ham United','http://www1.skysports.com/wa
aokolnychyi/spark
python/pyspark/sql/context.py
Python
apache-2.0
24,880
0.002854
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function import sys import warnings if sys.version >= '3': basestring = unicode = str from pyspark import since from pyspark.rdd import ignore_unicode_prefix from pyspark.sql.session import _monkey_patch_RDD, SparkSession from pyspark.sql.dataframe import DataFrame from pyspark.sql.readwriter import DataFrameReader from pyspark.sql.streaming import DataStreamReader from pyspark.sql.types import IntegerType, Row, StringType from pyspark.sql.utils import install_exception_handler __all__ = ["SQLContext", "HiveContext", "UDFRegistration"] class SQLContext(object): """The entry point for working with structured data (rows and columns) in Spark, in Spark 1.x. As of Spark 2.0, this is replaced by :class:`SparkSession`. However, we are keeping the class here for backward compatibility. A SQLContext can be used create :class:`DataFrame`, register :class:`DataFrame` as tables, execute SQL over tables, cache tables, and read parquet files. :param sparkContext: The :class:`SparkContext` backing this SQLContext. :param sparkSession: The :class:`SparkSession` around which this SQLContext wraps. :param jsqlContext: An optional JVM Scala SQLContext. If set, we do not instantiate a new SQLContext in the JVM, instead we make all calls to this object. """ _instantiatedContext = None @ignore_unicode_prefix def __init__(self, sparkContext, sparkSession=None, jsqlContext=None): """Creates a new SQLContext. >>> from datetime import datetime >>> sqlContext = SQLContext(sc) >>> allTypes = sc.parallelize([Row(i=1, s="string", d=1.0, l=1, ... b=True, list=[1, 2, 3], dict={"s": 0}, row=Row(a=1), ... time=datetime(2014, 8, 1, 14, 1, 5))]) >>> df = allTypes.toDF() >>> df.createOrReplaceTempView("allTypes") >>> sqlContext.sql('select i+1, d+1, not b, list[1], dict["s"], time, row.a ' ... 'from allTypes where b and i > 0').collect() [Row((i + CAST(1 AS BIGINT))=2, (d + CAST(1 AS DOUBLE))=2.0, (NOT b)=False, list[1]=2, \ dict[s]=0, time=datetime.datetime(2014, 8, 1, 14, 1, 5), a=1)] >>> df.rdd.map(lambda x: (x.i, x.s, x.d, x.l, x.b, x.time, x.row.a, x.list)).collect() [(1, u'string', 1.0, 1, True, datetime.datetime(2014, 8, 1, 14, 1, 5), 1, [1, 2, 3])] """ self._sc = sparkContext self._jsc = self._sc._jsc self._jvm = self._sc._jvm if sparkSession is None: sparkSession = SparkSession.builder.getOrCreate() if jsqlContext is None: jsqlContext = sparkSession._jwrapped self.sparkSession = sparkSession self._jsqlContext = jsqlContext _monkey_patch_RDD(self.sparkSession) install_exception_handler() if SQLContext._instantiatedContext is None: SQLContext._instantiatedContext = self @property def _ssql_ctx(self): """Accessor for the JVM Spark SQL context. Subclasses can override this property to provide their own JVM Contexts. """ return self._jsqlContext @classmethod @since(1.6) def getOrCreate(cls, sc): """ Get the existing SQLContext or create a new one with given SparkContext. :param sc: SparkContext """ if cls._instantiatedContext is None: jsqlContext = sc._jvm.SQLContext.getOrCreate(sc._jsc.sc()) sparkSession = SparkSession(sc, jsqlContext.sparkSession()) cls(sc, sparkSession, jsqlContext) return cls._instantiatedContext @since(1.6) def newSession(self): """ Returns a new SQLContext as new session, that has separate SQLConf, registered temporary views and UDFs, but shared SparkContext and table cache. """ return self.__class__(self._sc, self.sparkSession.newSession()) @since(1.3) def setConf(self, key, value): """Sets the given Spark SQL configuration property. """ self.sparkSession.conf.set(key, value) @ignore_unicode_prefix @since(1.3) def getConf(self, key, defaultValue=None): """Returns the value of Spark SQL configuration property for the given key. If the key is not set and defaultValue is not None, return defaultValue. If the key is not set and defaultValue is None, return the system default value. >>> sqlContext.getConf("spark.sql.shuffle.partitions") u'200' >>> sqlContext.getConf("spark.sql.shuffle.partitions", u"10") u'10' >>> sqlContext.setConf("spark.sql.shuffle.partitions", u"50") >>> sqlContext.getConf("spark.sql.shuffle.partitions", u"10") u'50' """ return self.sparkSession.conf.get(key, defaultValue) @property @since("1.3.1") def udf(self): """Returns a :class:`UDFRegistration` for UDF registration. :return: :class:`UDFRegistration` """ return UDFRegistration(self) @since(1.4) def range(self, start, end=None, step=1, numPartitions=None): """ Create a :class:`DataFrame` with single :class:`pyspark.sql.types.LongType` column named ``id``, containing elements in a range from ``start`` to ``end`` (exclusive) with
step value ``step``. :param start: the start value :param end: the end value (exclusive) :param step: the incremental step (default: 1) :param numPartitions: the number of partitions of the DataFrame :return: :class:`DataFrame` >>> sqlContext.range(1, 7, 2).collect() [Row(id=1), Row(id=3), Row(id=5)] If only one argument is specified, it will be used as the end value.
>>> sqlContext.range(3).collect() [Row(id=0), Row(id=1), Row(id=2)] """ return self.sparkSession.range(start, end, step, numPartitions) @ignore_unicode_prefix @since(1.2) def registerFunction(self, name, f, returnType=StringType()): """Registers a python function (including lambda function) as a UDF so it can be used in SQL statements. In addition to a name and the function itself, the return type can be optionally specified. When the return type is not given it default to a string and conversion will automatically be done. For any other return type, the produced object must match the specified type. :param name: name of the UDF :param f: python function :param returnType: a :class:`pyspark.sql.types.DataType` object :return: a wrapped :class:`UserDefinedFunction` >>> strlen = sqlContext.registerFunction("stringLengthString", lambda x: len(x)) >>> sqlContext.sql("SELECT stringLengthString('test')").collect() [Row(stringLengthString(test)=u'4')] >>> sqlContext.sql("SELECT 'foo' AS text").select(strlen("text")).collect() [Row(stringLengthString(text)=u'3')] >>> from pyspark.sql.types import IntegerType >>> _ = sqlContext.registerFunction("stringLengthInt", lambda x: len(x), IntegerType()) >>> sqlContext.sql("SELECT stringLengthInt('test')").collect() [Row(stringLengthInt(test)=4)] >>> from pyspark.sql.types import IntegerType >>> _ = sqlContext.u
ryantierney513/capirca
lib/speedway.py
Python
apache-2.0
1,410
0.00922
# Copyright 2011 Google Inc. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Speedway iptables generator. This is a subclass of Iptables lib.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals __author__
= '[email protected] (Tony Wa
tson)' from string import Template from lib import iptables class Error(Exception): pass class Term(iptables.Term): """Generate Iptables policy terms.""" _PLATFORM = 'speedway' _PREJUMP_FORMAT = None _POSTJUMP_FORMAT = Template('-A $filter -j $term') class Speedway(iptables.Iptables): """Generates filters and terms from provided policy object.""" _PLATFORM = 'speedway' _DEFAULT_PROTOCOL = 'all' SUFFIX = '.ipt' _RENDER_PREFIX = '*filter' _RENDER_SUFFIX = 'COMMIT' _DEFAULTACTION_FORMAT = ':%s %s' _TERM = Term
queria/my-tempest
tempest/services/compute/xml/aggregates_client.py
Python
apache-2.0
5,059
0
# Copyright 2013 NEC Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in c
ompliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from lxml import etree from tem
pest.common import rest_client from tempest.common import xml_utils from tempest import config from tempest import exceptions CONF = config.CONF class AggregatesClientXML(rest_client.RestClient): TYPE = "xml" def __init__(self, auth_provider): super(AggregatesClientXML, self).__init__(auth_provider) self.service = CONF.compute.catalog_type def _format_aggregate(self, g): agg = xml_utils.xml_to_json(g) aggregate = {} for key, value in agg.items(): if key == 'hosts': aggregate['hosts'] = [] for k, v in value.items(): aggregate['hosts'].append(v) elif key == 'availability_zone': aggregate[key] = None if value == 'None' else value else: aggregate[key] = value return aggregate def _parse_array(self, node): return [self._format_aggregate(x) for x in node] def list_aggregates(self): """Get aggregate list.""" resp, body = self.get("os-aggregates") aggregates = self._parse_array(etree.fromstring(body)) return resp, aggregates def get_aggregate(self, aggregate_id): """Get details of the given aggregate.""" resp, body = self.get("os-aggregates/%s" % str(aggregate_id)) aggregate = self._format_aggregate(etree.fromstring(body)) return resp, aggregate def create_aggregate(self, name, availability_zone=None): """Creates a new aggregate.""" if availability_zone is not None: post_body = xml_utils.Element("aggregate", name=name, availability_zone=availability_zone) else: post_body = xml_utils.Element("aggregate", name=name) resp, body = self.post('os-aggregates', str(xml_utils.Document(post_body))) aggregate = self._format_aggregate(etree.fromstring(body)) return resp, aggregate def update_aggregate(self, aggregate_id, name, availability_zone=None): """Update a aggregate.""" if availability_zone is not None: put_body = xml_utils.Element("aggregate", name=name, availability_zone=availability_zone) else: put_body = xml_utils.Element("aggregate", name=name) resp, body = self.put('os-aggregates/%s' % str(aggregate_id), str(xml_utils.Document(put_body))) aggregate = self._format_aggregate(etree.fromstring(body)) return resp, aggregate def delete_aggregate(self, aggregate_id): """Deletes the given aggregate.""" return self.delete("os-aggregates/%s" % str(aggregate_id)) def is_resource_deleted(self, id): try: self.get_aggregate(id) except exceptions.NotFound: return True return False def add_host(self, aggregate_id, host): """Adds a host to the given aggregate.""" post_body = xml_utils.Element("add_host", host=host) resp, body = self.post('os-aggregates/%s/action' % aggregate_id, str(xml_utils.Document(post_body))) aggregate = self._format_aggregate(etree.fromstring(body)) return resp, aggregate def remove_host(self, aggregate_id, host): """Removes a host from the given aggregate.""" post_body = xml_utils.Element("remove_host", host=host) resp, body = self.post('os-aggregates/%s/action' % aggregate_id, str(xml_utils.Document(post_body))) aggregate = self._format_aggregate(etree.fromstring(body)) return resp, aggregate def set_metadata(self, aggregate_id, meta): """Replaces the aggregate's existing metadata with new metadata.""" post_body = xml_utils.Element("set_metadata") metadata = xml_utils.Element("metadata") post_body.append(metadata) for k, v in meta.items(): meta = xml_utils.Element(k) meta.append(xml_utils.Text(v)) metadata.append(meta) resp, body = self.post('os-aggregates/%s/action' % aggregate_id, str(xml_utils.Document(post_body))) aggregate = self._format_aggregate(etree.fromstring(body)) return resp, aggregate
sssllliang/edx-analytics-pipeline
edx/analytics/tasks/util/vertica_target.py
Python
agpl-3.0
5,187
0.003085
"""luigi target for writing data into an HP Vertica database""" import logging import luigi logger = logging.getLogger('luigi-interface') # pylint: disable-msg=C0103 try: import vertica_python except ImportError: logger.warning("Attempted to load Vertica interface tools without the vertica_python package; will crash if \ Vertica functionality is used.") class VerticaTarget(luigi.Target): """ Target for a resource in HP Vertica """ marker_table = 'table_updates' def __init__(self, host, user, password, schema, table, update_id): """ Initializes a VerticaTarget instance. :param host: Vertica server address. Possibly a host:port string. :type host: str :param user: database user. :type user: str :param password: password for the specified user. :type password: str :param schema: the schema being written to. :type schema: str :param table: the table within schema being written to. :type table: str :param update_id: an identifier for this data set. :type update_id: str """ if ':' in host: self.host, self.port = host.split(':') self.port = int(self.port) else: self.host = host self.port = 5433 self.user = user self.password = password self.schema = schema self.table = table self.update_id = update_id # Default to using the schema data is being inserted into as the schema for the marker table. self.marker_schema = schema def touch(self, connection=None): """ Mark this update as complete. IMPORTANT, If the marker table doesn't exist, the connection transaction will be aborted and the connection reset. Then the marker table will be created. """ self.create_marker_table() if connection is None: connection = self.connect() connection.autocommit = True # if connection created here, we commit it here connection.cursor().execute( """INSERT INTO {marker_schema}.{marker_table} (update_id, target_table) VALUES (%s, %s)""".format(marker_
schema=self.marker_schema, marker_table=self.marker_table), (self.update_id, "{schema}.{table}".format(schema=self.schema, table=self.table)) ) # make sure update is properly marked assert self.exists(connection) def exists(self, connection=None): #
pylint: disable-msg=W0221 if connection is None: connection = self.connect() connection.autocommit = True cursor = connection.cursor() try: cursor.execute("""SELECT 1 FROM {marker_schema}.{marker_table} WHERE update_id = %s LIMIT 1""".format(marker_schema=self.marker_schema, marker_table=self.marker_table), (self.update_id,) ) row = cursor.fetchone() except vertica_python.errors.Error as err: if (type(err) is vertica_python.errors.MissingRelation) or ('Sqlstate: 42V01' in err.args[0]): # If so, then our query error failed because the table doesn't exist. row = None else: raise return row is not None def connect(self, autocommit=False): """ Creates a connection to a Vertica database using the supplied credentials. :param autocommit: whether the connection should automatically commit. :type autocmommit: bool """ # vertica-python 0.5.0 changes the code for connecting to databases to use kwargs instead of a dictionary. # The 'database' parameter is included for DBAPI reasons and does not actually affect the session. connection = vertica_python.connect(user=self.user, password=self.password, host=self.host, port=self.port, database="", autocommit=autocommit) return connection def create_marker_table(self): """ Create marker table if it doesn't exist. Using a separate connection since the transaction might have to be reset. """ connection = self.connect(autocommit=True) cursor = connection.cursor() try: cursor.execute( """ CREATE TABLE {marker_schema}.{marker_table} ( id AUTO_INCREMENT, update_id VARCHAR(4096) NOT NULL, target_table VARCHAR(128), inserted TIMESTAMP DEFAULT NOW(), PRIMARY KEY (update_id, id) ) """.format(marker_schema=self.marker_schema, marker_table=self.marker_table) ) except vertica_python.errors.QueryError as err: if 'Sqlstate: 42710' in err.args[0]: # This Sqlstate will appear if the marker table already exists. pass else: raise connection.close()
blacksph3re/alastair
alastair_cookie/forms.py
Python
gpl-2.0
1,349
0.029652
from django import forms from django.contrib.auth.forms import AuthenticationForm, PasswordChangeForm from crispy_forms.bootstrap import FormActions, AppendedText, StrictButton, InlineField from crispy_forms.helper import FormHelper from crispy_forms.layout import Layout, Submit, Button, Field, Hidden, HTML, Div class MyLoginForm(AuthenticationForm): def __init__(self, *args, **kwargs): super(MyLoginForm, self).__init__(*args, **kwargs) self.he
lper = FormHelper() self.helper.form_class = 'form-horizontal' self.helper.form_method = 'post' self.helper.form_action = '' self.helper.label_class = 'col-lg-3' self.helper.field_class = 'col-lg-6' se
lf.helper.layout = Layout( 'username', Field('password'), FormActions(Submit('login', 'Login', css_class='btn btn_success')), ) class MyPasswordChangeForm(PasswordChangeForm): def __init__(self, *args, **kwargs): super(MyPasswordChangeForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.form_class = 'form-horizontal' self.helper.form_method = 'post' self.helper.form_action = '' self.helper.label_class = 'col-lg-3' self.helper.field_class = 'col-lg-6' self.helper.layout = Layout( 'old_password', 'new_password1', 'new_password2', FormActions(Submit('save', 'Save', css_class='btn btn_success')), )
hjwp/cookiecutter-example-project
config/wsgi.py
Python
mit
1,632
0
""" WSGI config for PythonAnywhere test project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os from django.core.wsgi import get_wsgi_application from whitenoise.django import DjangoWhiteNoise # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks # if running multiple sites in the same mod_wsgi process. To fix this, use # mod_wsgi daemon mode with each site in its own daemon process, or use # os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production" os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production") # This application object is used by any WSGI server configured to use this #
file. This includes Django's development server, if the WSGI_APPLICATION # setting points h
ere. application = get_wsgi_application() # Use Whitenoise to serve static files # See: https://whitenoise.readthedocs.org/ application = DjangoWhiteNoise(application) # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application)
rosmo/ansible
lib/ansible/modules/network/nxos/nxos_hsrp.py
Python
gpl-3.0
15,408
0.001493
#!/usr/bin/python # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'network'} DOCUMENTATION = r''' --- module: nxos_hsrp extends_documentation_fragment: nxos version_added: "2.2" short_description: Manages HSRP configuration on NX-OS switches. description: - Manages HSRP configuration on NX-OS switches. author: - Jason Edelman (@jedelman8) - Gabriele Gerbino (@GGabriele) notes: - Tested against NXOSv 7.3.(0)D1(1) on VIRL - HSRP feature needs to be enabled first on the system. - SVIs must exist before using this module. - Interface must be a L3 port before using this module. - HSRP cannot be configured on loopback interfaces. - MD5 authentication is only possible with HSRPv2 while it is ignored if HSRPv1 is used instead, while it will not raise any error. Here we allow MD5 authentication only with HSRPv2 in order to enforce better practice. options: group: description: - HSRP group number. required: true interface: description: - Full name of interface that is being managed for HSRP. required: true version: description: - HSRP version. default: 1 choices: ['1','2'] priority: description: - HSRP priority or keyword 'default'. preempt: description: - Enable/Disable preempt. choices: ['enabled', 'disabled'] vip: description: - HSRP virtual IP address or keyword 'default' auth_string: description: - Authentication string. If this needs to be hidden(for md5 type), the string should be 7 followed by the key string. Otherwise, it can be 0 followed by key string or just key string (for backward compatibility). For text type, this should be just be a key string. if this is 'default', authentication is removed. auth_type: description: - Authentication type. choices: ['text','md5'] state: description: - Specify desired state of the resource. choices: ['present','absent'] default: 'present' ''' EXAMPLES = r''' - name: Ensure HSRP is configured with following params on a SVI nxos_hsrp: group: 10 vip: 10.1.1.1 priority: 150 interface: vlan10 preempt: enabled host: 68.170.147.165 - name: Ensure HSRP is configured with following params on a SVI with clear text authentication nxos_hsrp: group: 10 vip: 10.1.1.1 priority: 150 interface: vlan10 preempt: enabled host: 68.170.147.165 auth_type: text auth_string: CISCO - name: Ensure HSRP is configured with md5 authentication and clear authentication string nxos_hsrp: group: 10 vip: 10.1.1.1 priority: 150 interface: vlan10 preempt: enabled host: 68.170.147.165 auth_type: md5 auth_string: "0 1234" - name: Ensure HSRP is configured with md5 authentication and hidden authentication string nxos_hsrp: group: 10 vip: 10.1.1.1 priority: 150 interface: vlan10 preempt: enabled host: 68.170.147.165 auth_type: md5 auth_string: "7 1234" - name: Remove HSRP config for given interface, group, and VIP nxos_hsrp: group: 10 interface: vlan10 vip: 10.1.1.1 host: 68.170.147.165 state: absent ''' RETURN = r''' commands: description: commands sent to the device returned: always type: list sample: ["interface vlan10", "hsrp version 2", "hsrp 30", "ip 10.30.1.1"] ''' from ansible.module_utils.network.nxos.nxos import load_config, run_commands from ansible.module_utils.network.nxos.nxos import get_capabilities, nxos_argument_spec from ansible.module_utils.network.nxos.nxos import get_interface_type from ansible.module_utils.basic import AnsibleModule PARAM_TO_DEFAULT_KEYMAP = { 'vip':
None, 'priority': '100', 'auth_type': 'text', 'auth_string': 'cisco', } def apply_key_map(key_map, table): new_dict = {} for key in table: new_key = key_m
ap.get(key) if new_key: value = table.get(key) if value: new_dict[new_key] = str(value) else: new_dict[new_key] = value return new_dict def get_interface_mode(interface, intf_type, module): command = 'show interface {0} | json'.format(interface) interface = {} mode = 'unknown' try: body = run_commands(module, [command])[0] except IndexError: return None if intf_type in ['ethernet', 'portchannel']: interface_table = body['TABLE_interface']['ROW_interface'] mode = str(interface_table.get('eth_mode', 'layer3')) if mode == 'access' or mode == 'trunk': mode = 'layer2' elif intf_type == 'svi': mode = 'layer3' return mode def get_hsrp_group(group, interface, module): command = 'show hsrp group {0} all | json'.format(group) hsrp = {} hsrp_key = { 'sh_if_index': 'interface', 'sh_group_num': 'group', 'sh_group_version': 'version', 'sh_cfg_prio': 'priority', 'sh_preempt': 'preempt', 'sh_vip': 'vip', 'sh_authentication_type': 'auth_type', 'sh_keystring_attr': 'auth_enc', 'sh_authentication_data': 'auth_string' } try: body = run_commands(module, [command])[0] hsrp_table = body['TABLE_grp_detail']['ROW_grp_detail'] if 'unknown enum:' in str(hsrp_table): hsrp_table = get_hsrp_group_unknown_enum(module, command, hsrp_table) except (AttributeError, IndexError, TypeError, KeyError): return {} if isinstance(hsrp_table, dict): hsrp_table = [hsrp_table] for hsrp_group in hsrp_table: parsed_hsrp = apply_key_map(hsrp_key, hsrp_group) parsed_hsrp['interface'] = parsed_hsrp['interface'].lower() if parsed_hsrp['version'] == 'v1': parsed_hsrp['version'] = '1' elif parsed_hsrp['version'] == 'v2': parsed_hsrp['version'] = '2' if parsed_hsrp['auth_type'] == 'md5': if parsed_hsrp['auth_enc'] == 'hidden': parsed_hsrp['auth_enc'] = '7' else: parsed_hsrp['auth_enc'] = '0' if parsed_hsrp['interface'] == interface: return parsed_hsrp return hsrp def get_hsrp_group_unknown_enum(module, command, hsrp_table): '''Some older NXOS images fail to set the attr values when using structured output and instead set the values to <unknown enum>. This fallback method is a workaround that uses an unstructured (text) request to query the device a second time. 'sh_preempt' is currently the only attr affected. Add checks for other attrs as needed. ''' if 'unknown enum:' in hsrp_table['sh_preempt']: cmd = {'output': 'text', 'command': command.split('|')[0]} out = run_commands(module, cmd)[0] hsrp_table['sh_preempt'] = 'enabled' if ('may preempt' in out) else 'disabled' return hsrp_table def get_commands_remove_hsrp(group, interface): commands = ['interface {0}'.format(interface), 'no hsrp {0}'.format(group)] return commands def get_commands_config_hsrp(delta, interface, args, existing): commands = [] config_args = { 'group': 'hsrp {group}', 'priority': '{priority}', 'preempt': '{preempt}', 'vip': '{vip}' } preempt = delta.get('preempt', None) group = delta.get('group', None) vip = delta.get('vip', None) priority = delta.get('priority', None) if preempt: if preempt == 'enabled': delta['preempt'] = 'preempt' elif preempt == 'disabled': delta['preempt'] = 'no preempt' if priority: if priority == 'default': if existing and existing.get('priority') != PARAM_TO_DEFAULT_KEYMAP.get('priority'): delta['priority'] = 'no priority' else: del(delta['priority']) else: delta['priority'] = 'priority {0}'.format(delta['priority'])
c2corg/v6_api
c2corg_api/scripts/es/es_batch.py
Python
agpl-3.0
1,426
0
from elasticsearch import helpers from c2corg_api.scripts.migration.batch import Batch from elasticsearch.helpers import BulkIndexError import logging log = logging.getLogger(__name__) class ElasticBatch(Batch): """A batch implementation to do bulk inserts for ElasticSearch. Example usage: batch = ElasticBatch(client, 1000) with batch: ... batch.add({ '_op_type': 'index', '_index': index_name, '_type': SearchDocument._doc_type.name, '_id': document_id, 'title': 'Abc' }) """ def __init__(self, client, batch_size): super(ElasticBatch, self).__init__(client, batch_
size) self.client = client self.actions = [] def add(self, action): self.actions.append(action) self.flush_or_not() def should_flush(self): return len(self.actions) > self.batch_size def flush(self):
if self.actions: try: helpers.bulk(self.client, self.actions) except BulkIndexError: # when trying to delete a document that does not exist, an # error is raised, and other documents are not inserted log.warning( 'error sending bulk update to ElasticSearch', exc_info=True) self.actions = []
Etxea/gestioneide
gestioneide/apps.py
Python
gpl-3.0
217
0
from importlib import import_module from django.apps im
port AppConfig as BaseAppConfig class AppConfig(BaseAppConfig): name = "gestioneide" def ready(self): import_module("gestioneide.receivers
")
nachandr/cfme_tests
cfme/tests/services/test_service_performance.py
Python
gpl-2.0
1,275
0.001569
from timeit import timeit import pytest from cfme import test_requirements from cfme.base.ui import navigate_to from cfme.services.myservice import MyService from cfme.tests.test_db_migrate import download_and_migrate_db from cfme.utils.conf import cfme_data @pytest.fixture def appliance_with_performance_db(temp_appliance_extended_db): app = temp_appliance_extended_db try: db_backups = cfme_data['db_backups'] performance_db = db_backups['performance_510'] except KeyError as e: pytest.skip(f"Couldn't find the performance DB in the cfme_data: {e}") download_and_migrate_db(app, performance_db.url) yield app @test_requirements.service @pytest.mark.meta(automates=[1688937, 1686433]) def test_services_performance(appliance_with_performance_db): """ Polarion: assignee: jhenner initialEstimate: 1/4h casecomponent: Services Bugzilla: 16
88937 1686433 """ app = appliance_with_performance_db assert 50000 == app.rest_api.collections.services.count my_service = MyService(app) # Timeit seems to accept callable as well as string of Python code on cPython. assert timeit(lambda: navigate_to(my_service, 'All', use_resetter=False), numb
er=1) < 180
clembou/PCWG
pcwg/core/status.py
Python
mit
1,242
0.017713
import pandas as pd class Status: Instance = None @classmethod def add(cls, message, red = False, verbosity = 1): cls.get().add_message(message, red, verbosity) @classmethod def initialize_status(cls, status_method, verbosity = 1): # Note: verbosity must be passed (amd not read directly form preferencecs) # in to avoid circulat
e reference status = cls.get() status.status_method = status_method status.verbosity = verbosity @classmethod def get(cl
s): if cls.Instance == None: cls.Instance = Status() return cls.Instance def __init__(self): self.verbosity = 1 def add_message(self, message, red, verbosity): if verbosity <= self.verbosity: if isinstance(message, pd.DataFrame) or isinstance(message, pd.core.frame.DataFrame): text = str(message.head()) else: text = str(message) lines = text.split("\n") for line in lines: self.status_method(line, red) def status_method(self, message, red): print message
uclouvain/osis_louvain
assessments/signals/subscribers.py
Python
agpl-3.0
2,029
0.001479
############################################################################## # # OSIS stands for Open Student Information System. It's an application # designed to manage the core business of higher education institutions, # such as universities, faculties, institutes and professional schools. # The core business involves the administration of students, teachers, # courses, programs and so on. # # Copyright (C) 2015-2018 Université catholique de Louvain (http://www.uclouvain.be) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the Licens
e, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without
even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # A copy of this license - GNU General Public License - is available # at the root of the source code of this program. If not, # see http://www.gnu.org/licenses/. # ############################################################################## from django.dispatch import receiver from assessments.business import scores_encodings_deadline from base.signals import publisher @receiver(publisher.compute_scores_encodings_deadlines) def compute_scores_encodings_deadlines(sender, **kwargs): scores_encodings_deadline.compute_deadline(kwargs['offer_year_calendar']) @receiver(publisher.compute_student_score_encoding_deadline) def compute_student_score_encoding_deadline(sender, **kwargs): scores_encodings_deadline.compute_deadline_by_student(kwargs['session_exam_deadline']) @receiver(publisher.compute_all_scores_encodings_deadlines) def compute_all_scores_encodings_deadlines(sender, **kwargs): scores_encodings_deadline.recompute_all_deadlines(kwargs['academic_calendar'])
billyoverton/demerit-manager
run_server.py
Python
gpl-2.0
33
0
f
rom server im
port app app.run()
adviti/melange
thirdparty/google_appengine/google/appengine/ext/ndb/eventloop.py
Python
apache-2.0
7,479
0.009226
"""An event loop. This event loop should handle both asynchronous App Engine RPC objects (specifically urlfetch, memcache and datastore RPC objects) and arbitrary callback functions with an optional time delay. Normally, event loops are singleton objects, though there is no enforcement of this requirement. The API here is inspired by Monocle. """ import collections import logging import os import threading import time from google.appengine.api.apiproxy_rpc import RPC from google.appengine.datastore import datastore_rpc from . import utils logging_debug = utils.logging_debug IDLE = RPC.IDLE RUNNING = RPC.RUNNING FINISHING = RPC.FINISHING class EventLoop(object): """An event loop.""" def __init__(self): """Constructor.""" self.current = collections.deque() # FIFO list of (callback, args, kwds) self.idlers = collections.deque() # Cyclic list of (callback, args, kwds) self.inactive = 0 # How many idlers in a row were no-ops self.queue = [] # Sorted list of (time, callback, args, kwds) self.rpcs = {} # Map of rpc -> (callback, args, kwds) def insort_event_right(self, event, lo=0, hi=None): """Insert event in queue, and keep it sorted assuming queue is sorted. If event is already in queue, insert it to the right of the rightmost event (to keep FIFO order). Optional args lo (default 0) and hi (default len(a)) bound the slice of a to be searched. """ if lo < 0: raise ValueError('lo must be non-negative') if hi is None: hi = len(self.queue) while lo < hi: mid = (lo + hi) // 2 if event[0] < self.queue[mid][0]: hi = mid else: lo = mid + 1 self.queue.insert(lo, event) # TODO: Rename to queue_callback? def queue_call(self, delay, callback, *args, **kwds): """Schedule a function call at a specific time in the future.""" if delay is None: self.current.append((callback, args, kwds)) return if delay < 1e9: when = delay + time.time() else: # Times over a billion seconds are assumed to be absolute. when = delay self.insort_event_right((when, callback, args, kwds)) def queue_rpc(self, rpc, callback=None, *args, **kwds): """Schedule an RPC with an optional callback. The caller must have previously sent the call to the service. The optional callback is called with the remaining arguments. NOTE: If the rpc is a MultiRpc, the callback will be called once for each sub-RPC. TODO: Is this a good idea? """ if rpc is None: return if rpc.state not in (RUNNING, FINISHING): raise RuntimeError('rpc must be sent to service before queueing') if isinstance(rpc, datastore_rpc.MultiRpc): rpcs = rpc.rpcs if len(rpcs) > 1: # Don't call the callback until all sub-rpcs have completed. rpc.__done = False def help_multi_rpc_along(r=rpc, c=callback, a=args, k=kwds): if r.state == FINISHING and not r.__done: r.__done = True c(*a, **k) # TODO: And again, what about exceptions? callback = help_multi_rpc_along args = () kwds = {} else: rpcs = [rpc] for rpc in rpcs: self.rpcs[rpc] = (callback, args, kwds) def add_idle(self, callback, *args, **kwds): """Add an idle callback. An idle callback can return True, False or None. These mean: - None: remove the callback (don't reschedule) - False: the callback did no work; reschedule later - True: the callback did some work; reschedule soon If the callback raises an exception, the traceback is logged and the callback is removed. """ self.idlers.append((callback, args, kwds)) def run_idle(self): """Run one of the idle callbacks. Returns: True if one was called, False if no idle callback was called. """ if not self.idlers or self.inactive >= len(self.idlers): return False idler = self.idlers.popleft() callback, args, kwds = idler logging_debug('idler: %s', callback.__name__) res = callback(*args, **kwds) # See add_idle() for the meaning of the callback return value. if res is not None: if res: self.inactive = 0 else: self.inactive += 1 self.idlers.append(idler) else: logging_debug('idler %s removed', callback.__name__) return True def run0(self): """Run one item (a callback or an RPC wait_any). Returns: A time to sleep if something happened (may be 0); None if all queues are empty. """ if self.current:
self.inactive = 0 callback, args, kwds = self.current.popleft() lo
gging_debug('nowevent: %s', callback.__name__) callback(*args, **kwds) return 0 if self.run_idle(): return 0 delay = None if self.queue: delay = self.queue[0][0] - time.time() if delay <= 0: self.inactive = 0 _, callback, args, kwds = self.queue.pop(0) logging_debug('event: %s', callback.__name__) callback(*args, **kwds) # TODO: What if it raises an exception? return 0 if self.rpcs: self.inactive = 0 rpc = datastore_rpc.MultiRpc.wait_any(self.rpcs) if rpc is not None: logging_debug('rpc: %s.%s', rpc.service, rpc.method) # Yes, wait_any() may return None even for a non-empty argument. # But no, it won't ever return an RPC not in its argument. if rpc not in self.rpcs: raise RuntimeError('rpc %r was not given to wait_any as a choice %r' % (rpc, self.rpcs)) callback, args, kwds = self.rpcs[rpc] del self.rpcs[rpc] if callback is not None: callback(*args, **kwds) # TODO: Again, what about exceptions? return 0 return delay def run1(self): """Run one item (a callback or an RPC wait_any) or sleep. Returns: True if something happened; False if all queues are empty. """ delay = self.run0() if delay is None: return False if delay > 0: time.sleep(delay) return True def run(self): """Run until there's nothing left to do.""" # TODO: A way to stop running before the queue is empty. self.inactive = 0 while True: if not self.run1(): break class _State(threading.local): event_loop = None _EVENT_LOOP_KEY = '__EVENT_LOOP__' _state = _State() def get_event_loop(): """Return a EventLoop instance. A new instance is created for each new HTTP request. We determine that we're in a new request by inspecting os.environ, which is reset at the start of each request. Also, each thread gets its own loop. """ # TODO: Make sure this works with the multithreaded Python 2.7 runtime. ev = None if os.getenv(_EVENT_LOOP_KEY): ev = _state.event_loop if ev is None: ev = EventLoop() _state.event_loop = ev os.environ[_EVENT_LOOP_KEY] = '1' return ev def queue_call(*args, **kwds): ev = get_event_loop() ev.queue_call(*args, **kwds) def queue_rpc(rpc, callback=None, *args, **kwds): ev = get_event_loop() ev.queue_rpc(rpc, callback, *args, **kwds) def add_idle(callback, *args, **kwds): ev = get_event_loop() ev.add_idle(callback, *args, **kwds) def run(): ev = get_event_loop() ev.run() def run1(): ev = get_event_loop() return ev.run1() def run0(): ev = get_event_loop() return ev.run0()
foundit/Piped
contrib/zmq/piped_zmq/__init__.py
Python
mit
173
0.00578
# See http://www.python.org/dev/peps/pep
-0386/ for versio
n numbering, especially NormalizedVersion from distutils import version version = version.LooseVersion('0.7.1-dev')
renmengye/imageqa-public
src/nn/map.py
Python
mit
4,455
0.004265
from stage import * import os use_gpu = os.environ.get('GNUMPY_USE_GPU', 'yes') == 'yes' if use_gpu: import gnumpy as gpu import gnumpy as gnp class Map(Stage): def __init__(self, outputDim, activeFn, inputNames=None,
initRange=1.0, bias=True, biasInitConst=-1.0, initSeed=2, needInit=True, initWeights=0,
initType='zeroMean', learningRate=0.0, learningRateAnnealConst=0.0, momentum=0.0, deltaMomentum=0.0, weightClip=0.0, gradientClip=0.0, weightRegConst=0.0, outputdEdX=True, defaultValue=0.0, gpu=use_gpu, name=None): Stage.__init__(self, name=name, inputNames=inputNames, outputDim=outputDim, defaultValue=defaultValue, learningRate=learningRate, learningRateAnnealConst=learningRateAnnealConst, momentum=momentum, deltaMomentum=deltaMomentum, weightClip=weightClip, gradientClip=gradientClip, weightRegConst=weightRegConst, gpu=gpu, outputdEdX=outputdEdX) self.bias = bias self.activeFn = activeFn self.inputDim = None self.random = np.random.RandomState(initSeed) if not needInit: if self.gpu: self.W = gnp.as_garray(initWeights) else: self.W = initWeights else: # Lazy initialize the weights until the first data arrives self.W = None self.initRange = initRange self.biasInitConst = biasInitConst self.initType = initType self.X = 0 self.Y = 0 pass def initWeights(self): if self.initType == 'zeroMean': r0 = -self.initRange/2.0 r1 = self.initRange/2.0 elif self.initType == 'positive': r0 = 0.0 r1 = self.initRange else: raise Exception('Unknown initialization type: ' + self.initType) if self.bias: if self.biasInitConst >= 0.0: self.W = np.concatenate((self.random.uniform( r0, r1, (self.inputDim, self.outputDim)), np.ones((1, self.outputDim)) * self.biasInitConst), axis=0) else: self.W = self.random.uniform( r0, r1, (self.inputDim + 1, self.outputDim)) else: self.W = self.random.uniform( -self.initRange/2.0, self.initRange/2.0, (self.inputDim, self.outputDim)) if self.gpu: self.W = gpu.as_garray(self.W.astype('float32')) def forward(self, X): if self.inputDim is None: self.inputDim = X.shape[-1] if self.W is None: self.initWeights() if self.bias: self.X = np.concatenate((X, np.ones((X.shape[0], 1), dtype=X.dtype)), axis=-1) else: self.X = X if self.gpu: self.X = gpu.as_garray(self.X.astype('float32')) Z = gpu.dot(self.X, self.W) Z = Z.as_numpy_array(dtype='float32') self.Y = self.activeFn.forward(Z) else: Z = np.dot(self.X, self.W) self.Y = self.activeFn.forward(Z) return self.Y def backward(self, dEdY): dEdZ = self.activeFn.backward(dEdY, self.Y, 0) if self.gpu: gdEdZ = gpu.as_garray(dEdZ.astype('float32')) self.dEdW = gpu.dot(self.X.transpose(), gdEdZ) if self.bias: dEdX = gpu.dot(gdEdZ, self.W[:-1, :].transpose()) else: dEdX = gpu.dot(gdEdZ, self.W.transpose()) dEdX = gpu.as_numpy_array(dEdX) else: self.dEdW = np.dot(self.X.transpose(), dEdZ) if self.bias: dEdX = np.dot(dEdZ, self.W[:-1, :].transpose()) else: dEdX = np.dot(dEdZ, self.W.transpose()) return dEdX if self.outputdEdX else None
SU-ECE-17-7/ibeis
ibeis/algo/hots/match_chips4.py
Python
apache-2.0
15,985
0.001564
# -*- coding: utf-8 -*- """ Runs functions in pipeline to get query reuslts and does some caching. """ from __future__ import absolute_import, division, print_function, unicode_literals import utool as ut import six # NOQA from os.path import exists #from ibeis.algo.hots import query_request #from ibeis.algo.hots import hots_query_result #from ibeis.algo.hots import exceptions as hsexcept from ibeis.algo.hots import chip_match from ibeis.algo.hots import pipeline from ibeis.algo.hots import _pipeline_helpers as plh # NOQA (print, rrr, profile) = ut.inject2(__name__, '[mc4]') # TODO: Move to params USE_HOTSPOTTER_CACHE = pipeline.USE_HOTSPOTTER_CACHE USE_CACHE = not ut.get_argflag(('--nocache-query', '--noqcache')) and USE_HOTSPOTTER_CACHE USE_BIGCACHE = not ut.get_argflag(('--nocache-big', '--no-bigcache-query', '--noqcache', '--nobigcache')) and ut.USE_CACHE SAVE_CACHE = not ut.get_argflag('--nocache-save') #MIN_BIGCACHE_BUNDLE = 20 #MIN_BIGCACHE_BUNDLE = 150 MIN_BIGCACHE_BUNDLE = 64 HOTS_BATCH_SIZE = ut.get_argval('--hots-batch-size', type_=int, default=None) #---------------------- # Main Query Logic #---------------------- def empty_query(ibs, qaids): r""" Hack to give an empty query a query result object Args: ibs (ibeis.IBEISController): ibeis controller object qaids (list): Returns: tuple: (qaid2_cm, qreq_) CommandLine: python -m ibeis.algo.hots.match_chips4 --test-empty_query python -m ibeis.algo.hots.match_chips4 --test-empty_query --show Example: >>> # ENABLE_DOCTEST >>> from ibeis.algo.hots.match_chips4 import * # NOQA >>> import ibeis >>> ibs = ibeis.opendb('testdb1') >>> qaids = ibs.get_valid_aids(species=ibeis.const.TEST_SPECIES.ZEB_PLAIN) >>> # execute function >>> (qaid2_cm, qreq_) = empty_query(ibs, qaids) >>> # verify results >>> result = str((qaid2_cm, qreq_)) >>> print(result) >>> cm = qaid2_cm[qaids[0]] >>> ut.assert_eq(len(cm.get_top_aids()), 0) >>> ut.quit_if_noshow() >>> cm.ishow_top(ibs, update=True, make_figtitle=True, show_query=True, sidebyside=False) >>> from matplotlib import pyplot as plt >>> plt.show() """ daids = [] qreq_ = ibs.new_query_request(qaids, daids) cm = qreq_.make_empty_chip_matches() qaid2_cm = dict(zip(qaids, cm)) return qaid2_cm, qreq_ def submit_query_request_nocache(ibs, qreq_, verbose=pipeline.VERB_PIPELINE): """ depricate """ assert len(qreq_.qaids) > 0, ' no current query aids' if len(qreq_.daids) == 0: print('[mc4] WARNING no daids... returning empty query') qaid2_cm, qreq_ = empty_query(ibs, qreq_.qaids) return qaid2_cm save_qcache = False qaid2_cm = execute_query2(ibs, qreq_, verbose, save_qcache) return qaid2_cm @profile def submit_query_request(ibs, qaid_list, daid_list, use_cache=None, use_bigcache=None, cfgdict=None, qreq_=None, verbose=None, save_qcache=None, prog_hook=None): """ The standard query interface. TODO: rename use_cache to use_qcache Checks a big cache for qaid2_cm. If cache miss, tries to load each cm individually. On an individual cache miss, it preforms the query. Args: ibs (ibeis.IBEISController) : ibeis control object qaid_list (list): query annotation ids daid_list (list): database annotation ids use_cache (bool): use_bigcache (bool): Returns: qaid2_cm (dict): dict of QueryResult objects CommandLine: python -m ibeis.algo.hots.match_chips4 --test-submit_query_request Examples: >>> # SLOW_DOCTEST >>> from ibeis.algo.hots.match_chips4 import * # NOQA >>> import ibeis >>> qaid_list = [1] >>> daid_list = [1, 2, 3, 4, 5] >>> use_bigcache = True >>> use_cache = True >>> ibs = ibeis.opendb(db='testdb1') >>> qreq_ = ibs.new_query_request(qaid_list, daid_list, cfgdict={}, verbose=True) >>> qaid2_cm = submit_query_request(ibs, qaid_list, daid_list, use_cache, use_bigcache, qreq_=qreq_) """ # Get flag defaults if necessary if verbose is None: verbose = pipeline.VERB_PIPELINE if use_cache is None: use_cache = USE_CACHE if save_qcache is None: save_qcache = SAVE_CACHE if use_bigcache is None: use_bigcache = USE_BIGCACHE # Create new query request object to store temporary state if verbose: #print('[mc4] --- Submit QueryRequest_ --- ') ut.colorprint('[mc4] --- Submit QueryRequest_ --- ', 'darkyellow') assert qreq_ is not None, 'query request must be prebuilt' qreq_.prog_hook = prog_hook # --- BIG CACHE --- # Do not use bigcache single queries use_bigcache_ = (use_bigcache and use_cache and len(qaid_list) > MIN_BIGCACHE_BUNDLE) if (use_bigcache_ or save_qcache) and len(qaid_list) > MIN_BIGCACHE_BUNDLE: bc_dpath, bc_fname, bc_cfgstr = qreq_.get_bigcache_info() if use_bigcache_: # Try and load directly from a big cache try: qaid2_cm = ut.load_cache(bc_dp
ath, bc_fname, bc_cfgstr) cm_list = [qaid2_cm[qaid] for qaid in qaid_list] except (IOError, AttributeError): pass else: ret
urn cm_list # ------------ # Execute query request qaid2_cm = execute_query_and_save_L1(ibs, qreq_, use_cache, save_qcache, verbose=verbose) # ------------ if save_qcache and len(qaid_list) > MIN_BIGCACHE_BUNDLE: ut.save_cache(bc_dpath, bc_fname, bc_cfgstr, qaid2_cm) cm_list = [qaid2_cm[qaid] for qaid in qaid_list] return cm_list @profile def execute_query_and_save_L1(ibs, qreq_, use_cache, save_qcache, verbose=True, batch_size=None): """ Args: ibs (ibeis.IBEISController): qreq_ (ibeis.QueryRequest): use_cache (bool): Returns: qaid2_cm CommandLine: python -m ibeis.algo.hots.match_chips4 --test-execute_query_and_save_L1:0 python -m ibeis.algo.hots.match_chips4 --test-execute_query_and_save_L1:1 python -m ibeis.algo.hots.match_chips4 --test-execute_query_and_save_L1:2 python -m ibeis.algo.hots.match_chips4 --test-execute_query_and_save_L1:3 Example0: >>> # SLOW_DOCTEST >>> from ibeis.algo.hots.match_chips4 import * # NOQA >>> cfgdict1 = dict(codename='vsmany', sv_on=True) >>> p = 'default' + ut.get_cfg_lbl(cfgdict1) >>> qreq_ = ibeis.main_helpers.testdata_qreq_(p=p, qaid_override=[1, 2, 3, 4) >>> ibs = qreq_.ibs >>> use_cache, save_qcache, verbose = False, False, True >>> qaid2_cm = execute_query_and_save_L1(ibs, qreq_, use_cache, save_qcache, verbose) >>> print(qaid2_cm) Example1: >>> # SLOW_DOCTEST >>> from ibeis.algo.hots.match_chips4 import * # NOQA >>> cfgdict1 = dict(codename='vsone', sv_on=True) >>> p = 'default' + ut.get_cfg_lbl(cfgdict1) >>> qreq_ = ibeis.main_helpers.testdata_qreq_(p=p, qaid_override=[1, 2, 3, 4) >>> ibs = qreq_.ibs >>> use_cache, save_qcache, verbose = False, False, True >>> qaid2_cm = execute_query_and_save_L1(ibs, qreq_, use_cache, save_qcache, verbose) >>> print(qaid2_cm) Example1: >>> # SLOW_DOCTEST >>> # TEST SAVE >>> from ibeis.algo.hots.match_chips4 import * # NOQA >>> import ibeis >>> cfgdict1 = dict(codename='vsmany', sv_on=True) >>> p = 'default' + ut.get_cfg_lbl(cfgdict1) >>> qreq_ = ibeis.main_helpers.testdata_qreq_(p=p, qaid_override=[1, 2, 3, 4) >>> ibs = qreq_.ibs >>> use_cache, save_qcache, verbose = False, True, True >>> qaid2_cm = execute_query_and_save_L1(ibs, qreq_, use_cache, save_qcache, verbose) >>> print(qaid2_cm) Example2: >>> # SLOW_DOCTEST >>> # TEST LOAD >>> from ibei
spketoundi/CamODI
waespk/core/migrations/0023_merge.py
Python
mit
399
0.002506
#
-*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-05-12 11:51 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('ossuo', '0021_merge'), ('ossuo', '0022_signupformpage_signupformpagebullet_signupformpagelogo_signupformpagequote_signupformpageresponse'), ] operations = [
]
julianwang/cinder
cinder/volume/drivers/netapp/dataontap/block_base.py
Python
apache-2.0
37,442
0
# Copyright (c) 2012 NetApp, Inc. All rights reserved. # Copyright (c) 2014 Ben Swartzlander. All rights reserved. # Copyright (c) 2014 Navneet Singh. All rights reserved. # Copyright (c) 2014 Clinton Knight. All rights reserved. # Copyright (c) 2014 Alex Meade. All rights reserved. # Copyright (c) 2014 Andrew Kerr. All rights reserved. # Copyright (c) 2014 Jeff Applewhite. All rights reserved. # Copyright (c) 2015 Tom Barron. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Volume driver library for NetApp 7/C-mode block storage systems. """ import math import sys import uuid from oslo_log import log as logging from oslo_utils import excutils from oslo_utils import units import six from cinder import exception from cinder.i18n import _, _LE, _LI, _LW from cinder.volume.drivers.netapp.dataontap.client import api as na_api from cinder.volume.drivers.netapp import options as na_opts from cinder.volume.drivers.netapp import utils as na_utils from cinder.volume import utils as volume_utils from cinder.zonemanager import utils as fczm_utils LOG = logging.getLogger(__name__) class NetAppLun(object): """Represents a LUN on NetApp storage.""" def __init__(self, handle, name, size, metadata_dict): self.handle = handle self.name = name self.size = size self.metadata = metadata_dict or {} def get_metadata_property(self, prop): """Get the metadata property of a LUN.""" if prop in self.metadata: return self.metadata[prop] name = self.name LOG.debug("No metadata property %(prop)s defined for the LUN %(name)s", {'prop': prop, 'name': name}) def __str__(self, *args, **kwargs): return 'NetApp LUN [handle:%s, name:%s, size:%s, metadata:%s]' % ( self.handle, self.name, self.size, self.metadata) class NetAppBlockStorageLibrary(object): """NetApp block storage library for Data ONTAP.""" # do not increment this as it may be used in volume type definitions VERSION = "1.0.0" REQUIRED_FLAGS = ['netapp_login', 'netapp_password', 'netapp_server_hostname'] ALLOWED_LUN_OS_TYPES = ['linux', 'aix', 'hpux', 'image', 'windows', 'windows_2008', 'windows_gpt', 'solaris', 'solaris_efi', 'netware', 'openvms', 'hyper_v'] ALLOWED_IGROUP_HOST_TYPES = ['linux', 'aix', 'hpux', 'windows', 'solaris', 'netware', 'default', 'vmware', 'openvms', 'xen', 'hyper_v'] DEFAULT_LUN_OS = 'linux' DEFAULT_HOST_TYPE = 'linux' def __init__(self, driver_name, driver_protocol, **kwargs): na_utils.validate_instantiation(**kwargs) self.driver_name = driver_name self.driver_protocol = driver_protocol self.zapi_client = None self._stats = {} self.lun_table = {} self.lun_ostype = None self.host_type = None self.lookup_service = fczm_utils.create_lookup_service() self.app_version = kwargs.get("app_version", "unknown") self.configuration = kwargs['configuration'] self.configuration.append_config_values(na_opts.netapp_connection_opts) self.configuration.append_config_values(na_opts.netapp_basicauth_opts) self.configuration.append_config_values(na_opts.netapp_transport_opts) self.configuration.append_config_values( na_opts.netapp_provisioning_opts) self.configuration.append_config_values(na_opts.netapp_san_opts) def do_setup(self, context): na_utils.check_flags(self.REQUIRED_FLAGS, self.configuration) self.lun_ostype = (self.configuration.netapp_lun_ostype or self.DEFAULT_LUN_OS) self.host_type = (self.configuration.netapp_host_type or self.DEFAULT_HOST_TYPE) def check_for_setup_error(self): """Check that the driver is working and can communicate. Discovers the LUNs on the NetApp server. """ if self.lun_ostype not in self.ALLOWED_LUN_OS_TYPES: msg = _("Invalid value for NetApp configuration" " option netapp_lun_ostype.") LOG.error(msg) raise exception.NetAppDriverException(msg) if self.host_type not in self.ALLOWED_IGROUP_HOST_TYPES: msg = _("Invalid value for NetApp configuration" " option netapp_host_type.") LOG.error(msg) raise exception.NetAppDriverException(msg) lun_list = self.zapi_client.get_lun_list() self._extract_and_populate_luns(lun_list) LOG.debug("Success getting list of LUNs from server.") def get_pool(self, volume): """Return pool name where volume resides. :param volume: The volume hosted by the driver. :return: Name of the pool where given volume is hosted. """ name = volume['name'] metadata = self._get_lun_attr(name, 'metadata') or dict() return metadata.get('Volume', None) def create_volume(self, volume): """Driver entry point for creating a new volume (Data ONTAP LUN).""" LOG.debug('create_volume on %s', volume['host']) # get Data ONTAP volume name as pool name pool_name = volume_utils.extract_host(volume['host'], level='pool') if pool_name is None: msg = _("Pool is not available in the volume host field.") raise exception.InvalidHost(reason=msg) extra_specs = na_utils.get_volume_extra_specs(volume) lun_name = volume['name'] size = int(volume['size']) * units.Gi metadata = {'OsType': self.lun_ostype, 'SpaceReserved': 'true', 'Path': '/vol/%s/%s' % (pool_name, lun_name)} qos_policy_group_info = self._setup_qos_for_volume(volume, extra_specs) qos_policy_group_name = ( na_utils.get_qos_policy_group_name_from_info( qos_policy_group_info)) try: self._create_lun(pool_name, lun_name, size, metadata, qos_policy_group_name) except Exception: LOG.exception(_LE("Exception creating LUN %(name)s in pool " "%(pool)s."), {'name': l
un_name, 'pool': pool_name}) self._mark_qos_policy_group_for_deletion(qos_policy_group_info) msg = _("Volume %s could not be created.") raise exception.VolumeBackendAPIException(data=msg % ( volume['name'])) LOG.debug('Created LUN with name %(name)s and QoS info %(qos)s', {'name': lun_name, 'qo
s': qos_policy_group_info}) metadata['Path'] = '/vol/%s/%s' % (pool_name, lun_name) metadata['Volume'] = pool_name metadata['Qtree'] = None handle = self._create_lun_handle(metadata) self._add_lun_to_table(NetAppLun(handle, lun_name, size, metadata)) def _setup_qos_for_volume(self, volume, extra_specs): return None def _mark_qos_policy_group_for_deletion(self, qos_policy_group_info): return def delete_volume(self, volume): """Driver entry point for destroying existing volumes.""" name = volume['name'] metadata = self._get_lun_attr(name, 'metadata') if not metadata: LOG.warning(_LW("No entry in LUN table for volume/snapshot" " %(name)s."), {'name': name}) return self.zapi_client.destroy_lun(met
itsthejoker/Pokemon-Homage
lemonyellow/core/__init__.py
Python
mit
1,110
0
from datetime import datetime # pokemon lottery global_lotto_last_run = datetime(1969, 12, 31, 23, 59, 59, 999999) lotto_new_run = None print_speed_base = 0.03 # delay between printed characters """ Graphics Notes: The screen is 15x11 squares in dimension. Each square is 16x16 pixels. Total screen is 240x176. Since I want to at least double the scale, a good starting size would be 480x352, with each square being 32x32. The background image needs to be called at scale, then moved instead of the Player. Then obstacles and objects will be rendered as clear (or not) tiles above the background layer. """ """ General notes: When slower than an opposing wild pokemon, there is approximately a 50% chance you'll
escape. The o
nly reason that the start button fly away Trainer works is because enemy trainers face south for one frame before turning back and starting the fight sequence. Obviously, the trick does not work with trainers that are already facing south. Three Steps: After a battle, wild or Trainer, a wild battle cannot be triggered until the third step from their original location. """
jamespcole/home-assistant
homeassistant/components/esphome/binary_sensor.py
Python
apache-2.0
2,003
0
"""Support for ESPHome binary sensors.""" import logging from typing import TYPE_CHECKING, Optional from homeassistant.components.binary_sensor import BinarySensorDevice from . import EsphomeEntity, platform_async_setup_entry if TYPE_CHECKING: # pylint: disable=unused-import from aioesphomeapi import BinarySensorInfo, BinarySensorState # noqa DEPENDENCIES = ['esphome'] _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, entry, async_add_entities): """Set up ESPHome binary sensors based on a config entry.""" # pylint: disable=redefined-outer-name from aioesphomeapi import BinarySensorInfo, BinarySensorState # noqa await platform_async_setup_entry( hass, entry, async_add_entities, component_key='binary_sensor', info_type=BinarySensorInfo, entity_type=EsphomeBinarySensor, state_type=BinarySensorState ) class EsphomeBinarySensor(EsphomeEntity, BinarySensorDevice): """A binary sensor implementation for ESPHome.""" @property def _static_info(self) -> 'BinarySens
orInfo': return super()._static_info @property def _state(self) -> Optional['BinarySensorState']: return super()._state @property def is_on(self): """Return true if the binary sensor is on.""" if self._static_info.is_status_binary_sensor: # Status binary sensors indicated connected state. # So in their case what's usually _availability_ is now state return self._entry_data.available if self._s
tate is None: return None return self._state.state @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return self._static_info.device_class @property def available(self): """Return True if entity is available.""" if self._static_info.is_status_binary_sensor: return True return super().available
rzanluchi/keyard-client
tests/test_client_integration.py
Python
mit
890
0.001124
# -*- coding: utf-8 -*- import json import pytest from keyard_client import KeyardClient, testutils @pytest.mark.skipif(not testutils.keyard_is_available(), reason="keyard is missing") class TestKeyardClient(object): def setup_method(self, method): self.client = KeyardClient('http://127.0.0.1:8000') def test_register(self): response = self.client.register('app', '0.1', '
localhost:8002') assert response is True def test_health_check(self): response = self.client.health_check('app', '0.1', 'localhost:8002') assert response is True def test_unregister(self): response = self.client.unregister('app', '0.1', 'localhost:8002') assert response is True def test_get_service(self):
result = {"result": ['localhost:8080']} value = self.client.get_service('app') assert result == value
qiuzhong/crosswalk-test-suite
misc/webdriver-w3c-tests/navigation/forward.py
Python
bsd-3-clause
962
0.002079
# -*- mode: python; fill-column: 100; comment-column: 100; -*- import unittest import sys import os sys.path.append( os.pa
th.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))) import base_test class ForwardTest(base_test.WebDriverBaseTest): # Get a static page that must be the same upon refresh def test_forward(self): self.driver.get( self.webserver.where_is('navigation/res/forwardStart.html')) self.driver.get( self.webserver.where_is('navigation/res/forwardNext.html')) nextbody = self.driver.find_element_by_css(
"body").get_text() self.driver.go_back() currbody = self.driver.find_element_by_css("body").get_text() self.assertNotEqual(nextbody, currbody) self.driver.go_forward() currbody = self.driver.find_element_by_css("body").get_text() self.assertEqual(nextbody, currbody) if __name__ == '__main__': unittest.main()
cpenv/cpenv
cpenv/cli/create.py
Python
mit
1,854
0
import os from cpenv import api, paths from cpenv.cli import core from cpenv.module import parse_module_path class Create(core.CLI): '''Create a new Module.''' def setup_parser(self, parser): parser.add_argument( 'where', help='Path to new module', ) def run(self, args): where = paths.normalize(args.where) if os.path.isdir(where): core.echo() core.echo('Error: Can not create module in existing directory.') core.exit(1) default_name, default_version = parse_module_path(where) core.echo() core.echo('This command will guide you through creating a new module.') core.echo() name = core.prompt(' Module Name [%s]: ' % default_name) version = core.prompt(' Version [%s]: ' % default_version.string) description = core.prompt(' Description []: ') author = core.prompt(' Author []: ') email = core.prompt(' Email []: ') core.echo() core.echo('- Creating your new Module...', end='') mod
ule = api.create( where=where, name=name or default_name, version=version or defaul
t_version.string, description=description, author=author, email=email, ) core.echo('OK!') core.echo() core.echo(' ' + module.path) core.echo() core.echo('Steps you might take before publishing...') core.echo() core.echo(' - Include binaries your module depends on') core.echo(' - Edit the module.yml file') core.echo(' - Add variables to the environment section') core.echo(' - Add other modules to the requires section') core.echo(' - Add python hooks like post_activate') core.echo()
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
decksite/data/person.py
Python
gpl-3.0
17,080
0.004859
from typing import List, Optional, Sequence, Union from decksite.data import achievements, deck, preaggregation, query from decksite.data.models.person import Person from decksite.database import db from shared import dtutil, guarantee, logger from shared.container import Container from shared.database import sqlescape from shared.decorators import retry_after_calling from shared.pd_exception import AlreadyExistsException, DoesNotExistException def load_person_by_id(person_id: int, season_id: Optional[int] = None) -> Person: return load_person(f'p.id = {person_id}', season_id=season_id) def load_person_by_mtgo_username(username: str, season_id: Optional[int] = None) -> Person: return load_person('p.mtgo_username = {username}'.format(username=sqlescape(username, force_string=True)), season_id=season_id) def load_person_by_discord_id(discord_id: int, season_id: Optional[int] = None) -> Person: return load_person(f'p.discord_id = {discord_id}', season_id=season_id) # pylint: disable=invalid-name def load_person_by_discord_id_or_username(person: str, season_id: int = 0) -> Person: # It would probably be better if this method did not exist but for now it's required by the API. # The problem is that Magic Online usernames can be integers so we cannot be completely unambiguous here. # We can make a really good guess, thoug
h. # See https://discordapp.com/developers/docs/reference#snowflakes # Unix timestamp (ms) for 2015-01-01T00:00:00.0000 = 1420070400000 # Unix timestamp (ms) for 2015-01-01T00:00:00.0001 = 1420070400001 # Unix timestamp (ms) for 2015-02-01T00:00:00.0000 = 14227
48800000 # Unix timestamp (ms) for 2100-01-01T00:00:00.0000 = 4102444800000 # Discord timestamp (ms) for 2015-01-01T00:00:00.0000 = 0 # Discord timestamp (ms) for 2015-01-01T00:00:00.0001 = 1 # Discord timestamp (ms) for 2015-02-01T00:00:00.0000 = 2678400000 # Min Discord snowflake for 2015-01-01T00:00:00.0000 = 0 ( 00000000000000000000000 in binary) # Min Discord snowflake for 2015-01-01T00:00:00.0001 = 4194304 ( 10000000000000000000000 in binary) # Min Discord snowflake for 2015-02-01T00:00:00.0000 = 11234023833600000 ( 100111111010010100100100000000000000000000000000000000 in binary) # Min Discord snowflake for 2100-01-01T00:00:00.0000 = 5625346837708800000 (100111000010001001111110010010100000000000000000000000000000000 in binary) # Discord snowflakes created between 2015-01-01T00:00:00.001Z and 2100-01-01T00:00:00.000Z will therefore fall in the range 2097152-5625346837708800000 if created before the year 2100. # We use 2015-02-01T00:00:00.000Z (11234023833600000) as the start of the range instead because it greatly reduces the range and we have seen no evidence of Discord snowflakes from before December 28th 2015. # This function will fail or (very unlikely) return incorrect results if we ever have a player with a Magic Online username that falls numerically between MIN_DISCORD_ID and MAX_DISCORD_ID. MIN_DISCORD_ID = 11234023833600000 MAX_DISCORD_ID = 5625346837708800000 if person.isdigit() and int(person) >= MIN_DISCORD_ID and int(person) <= MAX_DISCORD_ID: return load_person_by_discord_id(int(person), season_id=season_id) return load_person_by_mtgo_username(person, season_id=season_id) # pylint: disable=invalid-name def maybe_load_person_by_discord_id(discord_id: Optional[int]) -> Optional[Person]: if discord_id is None: return None return guarantee.at_most_one(load_people(f'p.discord_id = {discord_id}')) # pylint: disable=invalid-name def maybe_load_person_by_tappedout_name(username: str) -> Optional[Person]: return guarantee.at_most_one(load_people('p.tappedout_username = {username}'.format(username=sqlescape(username)))) # pylint: disable=invalid-name def maybe_load_person_by_mtggoldfish_name(username: str) -> Optional[Person]: return guarantee.at_most_one(load_people('p.mtggoldfish_username = {username}'.format(username=sqlescape(username)))) def load_person(where: str, season_id: Optional[int] = None) -> Person: people = load_people(where, season_id=season_id) if len(people) == 0: # We didn't find an entry for that person with decks, what about without? person = load_person_statless(where, season_id) else: person = guarantee.exactly_one(people) set_achievements([person], season_id) return person # Sometimes (person detail page) we want to load what we know about a person even though they had no decks in the specified season. def load_person_statless(where: str = 'TRUE', season_id: Optional[int] = None) -> Person: person_query = query.person_query() sql = f""" SELECT p.id, {person_query} AS name, p.mtgo_username, p.tappedout_username, p.mtggoldfish_username, p.discord_id, p.elo, p.locale FROM person AS p WHERE {where} """ people = [Person(r) for r in db().select(sql)] for p in people: p.season_id = season_id return guarantee.exactly_one(people) def load_people_count(where: str = 'TRUE', season_id: Optional[Union[str, int]] = None) -> int: season_join = query.season_join() if season_id else '' season_query = query.season_query(season_id, 'season.id') sql = f""" SELECT COUNT(DISTINCT p.id) FROM person AS p LEFT JOIN deck AS d ON d.person_id = p.id LEFT JOIN deck_cache AS dc ON d.id = dc.deck_id {season_join} WHERE ({where}) AND ({season_query}) """ return db().value(sql) or 0 # Note: This only loads people who have decks in the specified season. def load_people(where: str = 'TRUE', order_by: str = 'num_decks DESC, p.name', limit: str = '', season_id: Optional[Union[str, int]] = None) -> Sequence[Person]: person_query = query.person_query() season_join = query.season_join() if season_id else '' season_query = query.season_query(season_id, 'season.id') sql = f""" SELECT p.id, {person_query} AS name, p.mtgo_username, p.tappedout_username, p.mtggoldfish_username, p.discord_id, p.elo, p.locale, SUM(1) AS num_decks, SUM(dc.wins) AS wins, SUM(dc.losses) AS losses, SUM(dc.draws) AS draws, SUM(wins - losses) AS record, SUM(CASE WHEN dc.wins >= 5 AND dc.losses = 0 AND d.source_id IN (SELECT id FROM source WHERE name = 'League') THEN 1 ELSE 0 END) AS perfect_runs, SUM(CASE WHEN d.finish = 1 THEN 1 ELSE 0 END) AS tournament_wins, SUM(CASE WHEN d.finish <= 8 THEN 1 ELSE 0 END) AS tournament_top8s, IFNULL(ROUND((SUM(dc.wins) / NULLIF(SUM(dc.wins + dc.losses), 0)) * 100, 1), '') AS win_percent, SUM(DISTINCT CASE WHEN d.competition_id IS NOT NULL THEN 1 ELSE 0 END) AS num_competitions FROM person AS p LEFT JOIN deck AS d ON d.person_id = p.id LEFT JOIN deck_cache AS dc ON d.id = dc.deck_id {season_join} WHERE ({where}) AND ({season_query}) GROUP BY p.id ORDER BY {order_by} {limit} """ people = [Person(r) for r in db().select(sql)] for p in people: p.season_id = season_id return people def seasons_active(person_id: int) -> List[int]: sql = f""" SELECT DISTINCT season.id FROM deck AS d {query.season_join()} WHERE d.person_id = {person_id} ORDER BY season.id """ return db().values(sql) def preaggregate() -> None: achievements.preaggregate_achiev
sjjhsjjh/blender-driver
applications/demonstration.py
Python
mit
2,992
0.003676
#!/usr/bin/python # (c) 2018 Jim Hawkins. MIT licensed, see https://opensource.org/licenses/MIT # Part of Blender Driver, see https://github.com/sjjhsjjh/blender-driver """Python module for Blender Driver demonstration application. Abstract base class for demonstration applications. This module can only be used from within the Blender Game Engine.""" # Exit if run other than as a module. if __name__ == '__main__': print(__doc__) raise SystemExit(1) # Standard library imports, in alphabetic order. # # Module for command line switches. # https://docs.python.org/3.5/library/argparse.html # The import isn't needed because this class uses the base class to get an # object. # import argparse # # Module for levelled logging messages. # Tutorial is here: https://docs.python.org/3.5/howto/logging.html # Reference is here: https://docs.python.org/3.5/library/logging.html from logging import DEBUG, INFO, WARNING, ERROR, log # # Blender library imports, in alphabetic order. # # Local imports. # # Blender Driver application with threads and locks. import blender_driver.application.thread # Diagnostic print to show when it's imported.
Only printed if all its own # imports run OK. print('"'.join(('Application module ', __name__, '.'))) class Application(blender_driver.application.thread.Application): _instructions = "Press ESC to crash BGE, or any other key to te
rminate." _bannerName = 'banner' _bannerObject = None @property def bannerObject(self): return self._bannerObject # Overriden. def data_initialise(self): # # Do common initialisation for subclasses. self._bannerObject = self.data_add_banner() self.dontDeletes.append(self._bannerName) # # Run the base class method. super().data_initialise() def data_add_banner(self): banner = "\n".join( ("Blender Driver" , self.applicationName , self._instructions)) return self.bpyutils.set_up_object( self._bannerName, {'text':banner, 'physicsType':'NO_COLLISION' , 'location': (-5, -8, 3)}) # Overriden. def game_initialise(self): super().game_initialise() self.mainLock.acquire() try: self._bannerObject = self.game_add_text(self._bannerName) log(DEBUG, "Game scene objects {}\nArguments: {}\nSettings: {}" , self.gameScene.objects, vars(self.arguments), self.settings) print(self._instructions) finally: self.mainLock.release() # Overriden. def game_keyboard(self, *args): # # Formally, run the base class method. Actually, it's a pass. super().game_keyboard(*args) # # Default is to terminate on any key press. log(DEBUG, "Terminating.") self.game_terminate() def tick_skipped(self): log(WARNING, "Skipped ticks: {:d}.", self.skippedTicks)
MrSenko/Nitrate
tcms/core/contrib/auth/migrations/0001_initial.py
Python
gpl-2.0
867
0.002307
# -*- coding: utf-8 -*- from __future__ import unicode_literals from
django.db import migrations, models from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
] operations = [ migrations.CreateModel( name='UserActivateKey', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('activation_key', models.CharField(max_length=40, null=True, blank=True)), ('key_expires', models.DateTimeField(null=True, blank=True)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ], options={ 'db_table': 'tcms_user_activate_keys', }, ), ]
mr555ru/orbotor
orbotor/gameprofile.py
Python
gpl-3.0
8,679
0.004378
# Orbotor - arcade with orbit mechanics # Copyright (C) 2014 mr555ru # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import sys # NOQA import profile import ConfigParser import pygame from pygame import * from static_functions import * import camera as camera import planet as planet from orbitable import GCD_Singleton, SoundSystem_Singleton from helldebris_collection import HellDebrisCollection from team import Team from simplestats import SimpleStats wwidth = 1024 wheight = 768 p1_name = "Player1" p2_name = "Player2" config = ConfigParser.RawConfigParser() config.read('profile.cfg') wwidth = config.getint("Screen", "width") wheight = config.getint("Screen", "height") p1_name = config.get("Player", "P1_name") p2_name = config.get("Player", "P2_name") display = (wwidth, wheight) clock = pygame.time.Clock() class Profile(): def __init__(self, is_player2_present=False, is_player1_ai=False, is_player2_ai=False, player1_team="Green", player2_team="Red", greenteamsize=8, redteamsize=8, debris_min=6, debris_max=20, draw_planet=False, name=""): self.p2 = is_player2_present self.p1_ai = is_player1_ai self.p2_ai = is_player2_ai self.p1_team = player1_team self.p2_team = player2_team mingreen = int(self.p1_team == "Green") + int(self.p2_team == "Green" and self.p2) minred = int(self.p1_team == "Red") + int(self.p2_team == "Red" and self.p2) self.green = max(mingreen, greenteamsize) self.red = max(minred, redteamsize) self.hellmin = debris_min self.hellmax = debris_max self.draw_planet = draw_planet self.name = name self.ERAD = 1000 self.MAXRAD = 1700 self.ORBHEIGHT = 350 def game_init(self): pygame.init() self.PROFILESTEP = False self.UPDAE_GAME = pygame.USEREVENT + 1 pygame.time.set_timer(self.UPDAE_GAME, GAME_SPEED) self.screen = pygame.display.set_mode(display) if self.p2: self.bg1 = Surface((wwidth, wheight/2)) self.bg2 = Surface((wwidth, wheight/2)) self.cam2 = camera.Camera(self.bg2, first_in_order=False) self.bgs = (self.bg1, self.bg2) else: self.bg1 = Surface((wwidth, wheight)) self.bgs = (self.bg1,) self.cam1 = camera.Camera(self.bg1) if self.name == "": pygame.display.set_caption("Orbotor") else: pygame.display.set_caption("Orbotor - %s" % self.name) self.pl = planet.Planet(self.bgs, self.ERAD, self.MAXRAD, "planet.png" if self.draw_planet else None) GCD_Singleton.set_planet(self.pl) self.soundsys = SoundSystem_Singleton self.spawn = (self.ERAD+self.ORBHEIGHT, 0) self.team1 = Team("Green", "#009900", self.green, self.spawn, self.pl) self.team2 = Team("Red", "#880000", self.red, self.spawn, self.pl) self.team1.set_opponent_team(self.team2) self.team2.set_opponent_team(self.team1) self.hell = HellDebrisCollection(self.spawn, self.pl, self.hellmin, self.hellmax) if self.p1_team == "Green": self.player1 = self.team1.guys[0] if self.p2: if self.p2_team == "Green": self.player2 = self.team1.guys[1] elif self.p2_team == "Red": self.player2 = self.team2.guys[0] else: raise Exception("unknown team for p2: %s" % self.p2_team) elif self.p1_team == "Red": self.player1 = team2.guys[0] if self.p2: if self.p2_team == "Green": self.player2 = self.team1.guys[0] elif self.p2_team == "Red": self.player2 = self.team2.guys[1] else: raise Exception("unknown team for p2: %s" % self.p2_team) else: raise Exception("unknown team for p1: %s" % self.p1_team) self.player1.is_ai = self.p1_ai if self.p1_ai: self.player1.set_name("[bot] %s" % p1_name) else: self.player1.set_name("%s" % p1_name) if self.p2: self.player2.is_ai = self.p2_ai if self.p2_ai: self.player2.set_name("[bot] %s" % p2_name) else: self.player2.set_name("%s" % p2_name)
self.stats1 = SimpleStats(self.team1, self.team2, self.player1) if self.p2: self.stats2 = SimpleStats(self.team1, self.team2, self.player2) def game_key_listen(self,
event): if event.type == KEYDOWN and event.key == K_F1: self.PROFILESTEP = True self.game_step() elif event.type == KEYDOWN and event.key == K_F2: print len(GCD_Singleton.orbitables) elif event.type == KEYDOWN and event.key == K_F5: self.soundsys.switch() if not self.p1_ai: self.player1.catch_kb_event(event) if self.p2 and not self.p2_ai: self.player2.catch_kb_event_hotseat(event) self.cam1.keys_listen(event) if self.p2: self.cam2.keys_listen_hotseat(event) def game_step(self): if self.PROFILESTEP: profile.runctx("self._step()", globals(), {"self": self}) else: self._step() def _step(self): self.team2.step() # todo faster better stronger self.team1.step() self.hell.step() self.player1.focus(self.cam1) self.cam1.step() if self.p2: self.player2.focus(self.cam2) self.cam2.step() GCD_Singleton.step() def game_draw(self): if self.PROFILESTEP: profile.runctx("self._draw()", globals(), {"self": self}) self.PROFILESTEP = False else: self._draw() def _draw(self): clock.tick(60) tup = [self.pl, ] + self.team1.objectslist() + self.team2.objectslist()\ + self.hell.objectslist() + self.pl.cities tup = tuple(tup) self.cam1.translate_coords(*tup) if self.p2: self.cam2.translate_coords(*tup) self.stats1.draw(self.bg1) self.screen.blit(self.bg1, (0, 0)) if self.p2: self.stats2.draw(self.bg2) self.screen.blit(self.bg2, (0, wheight/2)) pygame.display.update() def DefaultProfile(draw_planet, hell): return Profile(draw_planet=draw_planet, debris_min=hell[0], debris_max=hell[1]) def HotseatProfile(draw_planet, hell): return Profile(is_player2_present=True, draw_planet=draw_planet, debris_min=hell[0], debris_max=hell[1]) def RivalProfile(draw_planet, hell): return Profile(is_player2_present=True, is_player2_ai=True, draw_planet=draw_planet, debris_min=hell[0], debris_max=hell[1]) def CoopProfile(draw_planet, hell): return Profile(is_player2_present=True, player2_team="Green", draw_planet=draw_planet, debris_min=hell[0], debris_max=hell[1]) def SpectateProfile(draw_planet, hell
openstack/tripleo-common
tripleo_common/image/exception.py
Python
apache-2.0
991
0
# Copyright 2015 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # class ImageBuilderException(Exceptio
n): pass class ImageRateLimitedException(Exception):
"""Rate Limited request""" class ImageSpecificationException(Exception): pass class ImageUploaderException(Exception): pass class ImageUploaderThreadException(Exception): """Conflict during thread processing""" pass class ImageNotFoundException(Exception): pass
takluyver/terminado
terminado/management.py
Python
bsd-2-clause
12,822
0.000468
"""Terminal management for exposing terminals to a web interface using Tornado. """ # Copyright (c) Jupyter Development Team # Copyright (c) 2014, Ramalingam Saravanan <[email protected]> # Distributed under the terms of the Simplified BSD License. from __future__ import absolute_import, print_function import asyncio from collections import deque import itertools import logging import os import signal import codecs import warnings import select try: from ptyprocess import PtyProcessUnicode def preexec_fn(): signal.signal(signal.SIGPIPE, signal.SIG_DFL) except ImportError: from winpty import PtyProcess as PtyProcessUnicode preexec_fn = None from tornado.ioloop import IOLoop ENV_PREFIX = "PYXTERM_" # Environment variable prefix DEFAULT_TERM_TYPE = "xterm" class PtyWithClients(object): def __init__(self, argv, env=[], cwd=None): self.clients = [] # If you start the process and then construct this object from it, # output generated by the process prior to the object's creation # is lost. Hence the change from 0.8.3. # Buffer output until a client connects; then let the client # drain the buffer. # We keep the same read_buffer as before self.read_buffer = deque([], maxlen=10) self.preopen_buffer = deque([]) kwargs = dict(argv=argv, env=env, cwd=cwd) if preexec_fn is not None: kwargs["preexec_fn"] = preexec_fn self.ptyproc = PtyProcessUnicode.spawn(**kwargs) # The output might not be strictly UTF-8 encoded, so # we replace the inner decoder of PtyProcessUnicode # to allow non-strict decode. self.ptyproc.decoder = codecs.getincrementaldecoder('utf-8')(errors='replace') def resize_to_smallest(self): """Set the terminal size to that of the smallest client dimensions. A terminal not using the full space available is much nicer than a terminal trying to use more than the available space, so we keep it sized to the smallest client. """ minrows = mincols = 10001 for client in self.clients: rows, cols = client.size if rows is not None and rows < minrows: minrows = rows if cols is not None and cols < mincols: mincols = cols if minrows == 10001 or mincols == 10001: return rows, cols = self.ptyproc.getwinsize() if (rows, cols) != (minrows, mincols): self.ptyproc.setwinsize(minrows, mincols) def kill(self, sig=signal.SIGTERM): """Send a signal to the process in the pty""" self.ptyproc.kill(sig) def killpg(self, sig=signal.SIGTERM): """Send a signal to the process group of the process in the pty""" if os.name == 'nt': return self.ptyproc.kill(sig) pgid = os.getpgid(self.ptyproc.pid) os.killpg(pgid, sig) async def terminate(self, force=False): '''This forces a child process to terminate. It starts nicely with SIGHUP and SIGINT. If "force" is True then moves onto SIGKILL. This returns True if the child was terminated. This returns False if the child could not be terminated. ''' if os.name == 'nt': signals = [signal.SIGINT, signal.SIGTERM] else: signals = [signal.SIGHUP, signal.SIGCONT, signal.SIGINT, signal.SIGTERM] loop = IOLoop.current() def sleep(): return asyncio.sleep(self.ptyproc.delayafterterminate) if not self.ptyproc.isalive(): return True try: for sig in signals: self.kill(sig) await sleep() if not self.ptyproc.isalive(): return True if force: self.kill(signal.SIGKILL) await sleep() if not self.ptyproc.isalive(): return True else: return False return False except OSError: # I think there are kernel timing issues that sometimes cause # this to happen. I think isalive() reports True, but the # process is dead to the kernel. # Make one last attempt to see if the kernel is up to date. await sleep() if not self.ptyproc.isalive(): return True else: return False def _update_removing(target, changes): """Like dict.update(), but remove keys where the value is None. """ for k, v in changes.items(): if v is None: target.pop(k, None) else: target[k] = v class TermManagerBase(object): """Base class for a terminal manager.""" def __init__(self, shell_command, server_url="", te
rm_settings={}, extra_env=None, ioloop=None): self.shell_command = shell_command self.server_url = server_url self.term_settings = term_settings self.extra_env = extra_env self.log = logging.getLogger(__name__) self.ptys_by_fd = {} if ioloop is not None: warnings.warn(
f"Setting {self.__class__.__name__}.ioloop is deprecated and ignored", DeprecationWarning, stacklevel=2, ) def make_term_env(self, height=25, width=80, winheight=0, winwidth=0, **kwargs): """Build the environment variables for the process in the terminal.""" env = os.environ.copy() env["TERM"] = self.term_settings.get("type", DEFAULT_TERM_TYPE) dimensions = "%dx%d" % (width, height) if winwidth and winheight: dimensions += ";%dx%d" % (winwidth, winheight) env[ENV_PREFIX+"DIMENSIONS"] = dimensions env["COLUMNS"] = str(width) env["LINES"] = str(height) if self.server_url: env[ENV_PREFIX+"URL"] = self.server_url if self.extra_env: _update_removing(env, self.extra_env) return env def new_terminal(self, **kwargs): """Make a new terminal, return a :class:`PtyWithClients` instance.""" options = self.term_settings.copy() options['shell_command'] = self.shell_command options.update(kwargs) argv = options['shell_command'] env = self.make_term_env(**options) cwd = options.get('cwd', None) return PtyWithClients(argv, env, cwd) def start_reading(self, ptywclients): """Connect a terminal to the tornado event loop to read data from it.""" fd = ptywclients.ptyproc.fd self.ptys_by_fd[fd] = ptywclients loop = IOLoop.current() loop.add_handler(fd, self.pty_read, loop.READ) def on_eof(self, ptywclients): """Called when the pty has closed. """ # Stop trying to read from that terminal fd = ptywclients.ptyproc.fd self.log.info("EOF on FD %d; stopping reading", fd) del self.ptys_by_fd[fd] IOLoop.current().remove_handler(fd) # This closes the fd, and should result in the process being reaped. ptywclients.ptyproc.close() def pty_read(self, fd, events=None): """Called by the event loop when there is pty data ready to read.""" r, _, _ = select.select([fd], [], [], .1) if not r: return ptywclients = self.ptys_by_fd[fd] try: s = ptywclients.ptyproc.read(65536) client_list = ptywclients.clients ptywclients.read_buffer.append(s) if not client_list: # No one to consume our output: buffer it. ptywclients.preopen_buffer.append(s) return for client in ptywclients.clients: client.on_pty_read(s) except EOFError: self.on_eof(ptywclients) for client in ptywclients.clients: client.on_pty_died() def get_terminal(self, url_component=None): """Override in a subclass to give a terminal to a new w
rebost/django
django/contrib/localflavor/it/util.py
Python
bsd-3-clause
1,800
0.001667
from django.utils.encoding import smart_unicode def ssn_check_digit(value): "Calculate Italian social security number check digit." ssn_even_chars = { '0': 0, '1': 1, '2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8, '9': 9, 'A': 0, 'B': 1, 'C': 2, 'D': 3, 'E': 4, 'F': 5, 'G': 6, 'H': 7, 'I': 8, 'J': 9, 'K': 10, 'L': 11, 'M': 12, 'N': 13, 'O': 14, 'P': 15, 'Q': 16, 'R': 17, 'S': 18, 'T': 19, 'U': 20, 'V': 21, 'W': 22, 'X': 23, 'Y': 24, 'Z': 25 } ssn_odd_chars = { '0': 1, '1': 0, '2': 5, '3': 7, '4': 9, '5': 13, '6': 15, '7': 17, '8': 19, '9': 21, 'A': 1, 'B': 0, 'C': 5, 'D': 7, 'E': 9, 'F': 13, 'G': 15, 'H': 17, 'I': 19, 'J': 21, 'K': 2, 'L': 4, 'M': 18, 'N': 20, 'O': 11, 'P': 3, 'Q': 6, 'R': 8, 'S': 12, 'T': 14, 'U': 16, 'V': 10, 'W': 22, 'X': 25, 'Y': 24, 'Z': 23 } # Chars from 'A' to 'Z' ssn_check_digits = [chr(x) for x in range(65, 91)] ssn = value.upper() total = 0 for i in range(0, 15): try: if i % 2 == 0: total += ssn_odd_chars[ssn[i]] else: total += ssn_even_chars[ssn[i]] except KeyError: msg = "Character '%(char)s' is not allowed." % {'char': ssn[i]} raise ValueError(msg) return ssn_check_digits[total % 26] def vat_number_check_digit(vat_number): "Calculate Italian VAT number check digit." normalized_vat_number = smart_unicode(vat_number).zfill(10) total = 0
for i in range(0, 1
0, 2): total += int(normalized_vat_number[i]) for i in range(1, 11, 2): quotient , remainder = divmod(int(normalized_vat_number[i]) * 2, 10) total += quotient + remainder return smart_unicode((10 - total % 10) % 10)
aleksl05/IS-206
ex31.py
Python
gpl-3.0
982
0.037678
print "You enter a dark room with two doors. Do you go through door #1 or door #2?" door = raw_input("> ") if door == "1": print "There`s a giant bear here eating a chees cake. What do you do?" print "1. Take the cake." print "2. Scream at the bear." bear = raw_input("> ") if bear == "1": print "The bear eats your face
off. Good job!" elif bear == "2": print "The bear eats your legs off. Good
job!" else: print "Well, doing %s is probably better. Bear runs away." %bear elif door =="2": print "You stare into the endless abyss at Cthulhu's retina." print "1. Blueberries." print "2. Yellow jacket clothespins." print "3. Understanding revolvers yelling melodies." insanity = raw_input("> ") if insanity == "1" or insanity =="2": print "Your body survives powered by a mind of jello. Good job!" else: print "The insanity rots your eyes into a pool of muck. Good job!" else: print "You stumble around and fall on a knife and die. Good job!"
virantha/pypdfocr
pypdfocr/pypdfocr_pdffiler.py
Python
apache-2.0
2,902
0.004824
# Copyright 2013 Virantha Ekanayake All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Provides capability to search PDFs and file to a specific folder based on keywords """ from sets import Set import sys, os import re import logging import shutil from PyPDF2 import PdfFileReader from pypdfocr_filer import PyFiler from pypdfocr_filer_dirs import PyFilerDirs class PyPdfFiler(object): def __init__(self, filer): assert isinstance(filer, PyFiler) self.filer = filer # Must be a subclass of PyFiler # Whether to fall back on filename fo
r matching keywords against # if there is no match in the text self.file_using_filename = False def iter_pdf_page_text(self, filename):
self.filename = filename reader = PdfFileReader(filename) logging.info("pdf scanner found %d pages in %s" % (reader.getNumPages(), filename)) for pgnum in range(reader.getNumPages()): text = reader.getPage(pgnum).extractText() text = text.encode('ascii', 'ignore') text = text.replace('\n', ' ') yield text def _get_matching_folder(self, pdfText): searchText = pdfText.lower() for folder,strings in self.filer.folder_targets.items(): for s in strings: logging.debug("Checking string %s" % s) if s in searchText: logging.info("Matched keyword '%s'" % s) return folder # No match found, so return return None def file_original (self, original_filename): return self.filer.file_original(original_filename) def move_to_matching_folder(self, filename): for page_text in self.iter_pdf_page_text(filename): tgt_folder = self._get_matching_folder(page_text) if tgt_folder: break # Stop searching through pdf pages as soon as we find a match if not tgt_folder and self.file_using_filename: tgt_folder = self._get_matching_folder(filename) tgt_file = self.filer.move_to_matching_folder(filename, tgt_folder) return tgt_file if __name__ == '__main__': p = PyPdfFiler(PyFilerDirs()) for page_text in p.iter_pdf_page_text("scan_ocr.pdf"): print (page_text)
EricssonResearch/calvin-base
calvinextras/calvinsys/data/buffer/PersistentBuffer.py
Python
apache-2.0
7,704
0.005582
# -*- coding: utf-8 -*- # Copyright (c) 2018 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import os.path from twisted.enterprise import adbapi from calvin.runtime.south.async import async from calvin.utilities.calvinlogger import get_logger from calvin.runtime.south.calvinsys import base_calvinsys_object _log = get_logger(__name__) class PersistentBuffer(base_calvinsys_object.BaseCalvinsysObject): """ Asynchronous (using twisted adbapi) SQLite-based implementation of persistant queue Based on the following (from sqlite.org): 1) If no ROWID is specified on the insert [...] then an appropriate ROWID is created automatically. 2) The usual algorithm is to give the newly created row a ROWID that is one larger than the largest ROWID in the table prior to the insert. 3) If the table is initially empty, then a ROWID of 1 is used. 4) If the largest ROWID is equal to the largest possible integer (9223372036854775807) then the database engine starts picking positive candidate ROWIDs at random until it finds one that is not previously used. 5) The normal ROWID selection [...] will generate monotonically increasing unique ROWIDs as long as you never use the maximum ROWID value and you never delete the entry in the table with the largest ROWID. Since we are implementing a FIFO queue, 1) should ensure there is a row id, 2) & 5) that the ordering is correct and 3) that the rowid is reset whenever the queue is emptied, so 4) should never happen. """ init_schema = { "type": "object", "properties": { "buffer_id": { "description": "Buffer identifier,
should be unique - will be used as part of filename", "type": "string", "pattern": "^[a-zA-Z0-9]+" }, "reporting": { "description": "Log some statistics on buffer at given interval (in seconds)", "type": "nu
mber" } }, "required": ["buffer_id"], "description": "Initialize buffer" } can_write_schema = { "description": "Returns True if buffer ready for write, otherwise False", "type": "boolean" } write_schema = { "description": "Push data to buffer; always a list of json serializable items", "type": "array" } can_read_schema = { "description": "Returns True if data can be read, otherwise False", "type": "boolean" } read_schema = { "description": "Pop data from buffer, always a list", "type": "array" } def init(self, buffer_id, reporting=None, *args, **kwargs): self.db_name = buffer_id self.db_path = os.path.join(os.path.abspath(os.path.curdir), self.db_name + ".sq3") self.db = adbapi.ConnectionPool('sqlite3', self.db_path, check_same_thread=False) self._pushed_values = 0 self._popped_values = 0 self._latest_timestamp = 0 self._value = None self._changed = None self._statlogging = None def ready(length): def log_stats(): _log.info("{} : pushed {}, popped {} (latest timestamp: {}) ".format(self.db_name, self._pushed_values, self._popped_values, self._latest_timestamp)) self._statlogging.reset() self._changed = True # Something has changed, need to check if readable # install timer to report on pushing/popping if reporting: self._statlogging= async.DelayedCall(reporting, log_stats) self.scheduler_wakeup() def create(db): # Create simple queue table. Using TEXT unless there is a reason not to. db.execute("CREATE TABLE IF NOT EXISTS queue (value BLOB)") def error(e): _log.error("Error initializing queue {}: {}".format(self.db_name, e)) q = self.db.runInteraction(create) q.addCallback(ready) q.addErrback(error) def can_write(self): # Can always write after init, meaning changed is no longer None return self._changed is not None def write(self, value): def error(e): _log.warning("Error during write: {}".format(e)) done() # Call done to wake scheduler, not sure this is a good idea def done(unused=None): self._changed = True # Let can_read know there may be something new to read self.scheduler_wakeup() self._pushed_values += len(value) try: value = json.dumps(value) # Convert to string for sqlite except TypeError: _log.error("Value is not json serializable") else: q = self.db.runOperation("INSERT INTO queue (value) VALUES (?)", (value, )) q.addCallback(done) q.addErrback(error) def can_read(self): def error(e): _log.warning("Error during read: {}".format(e)) done() def done(value=None): if value: self._changed = True # alert can_read that the database has changed self._value = value self.scheduler_wakeup() def pop(db): limit = 2 # <- Not empirically/theoretically tested db.execute("SELECT value FROM queue ORDER BY rowid LIMIT (?)", (limit,)) value = db.fetchall() # a list of (value, ) tuples, or None if value: # pop values (i.e. delete rows with len(value) lowest row ids) db.execute("DELETE FROM queue WHERE rowid in (SELECT rowid FROM queue ORDER BY rowid LIMIT (?))", (len(value),)) return value if self._value : # There is a value to read return True elif self._changed : # Something has changed, try to pop a value self._changed = False q = self.db.runInteraction(pop) q.addCallback(done) q.addErrback(error) # Nothing to do return False def read(self): value = [] while self._value: # get an item from list of replies dbtuple = self._value.pop(0) # the item is a tuple, get the first value dbvalue = dbtuple[0] # convert value from string and return it try: value.extend(json.loads(dbvalue)) except ValueError: _log.error("No value decoded - possibly corrupt file") self._popped_values += len(value) return value def close(self): if self._statlogging: self._statlogging.cancel() def done(response): # A count response; [(cnt,)] if response[0][0] == 0: try: os.remove(self.db_path) except: # Failed for some reason _log.warning("Could not remove db file {}".format(self._dbpath)) q = self.db.runQuery("SELECT COUNT(*) from queue") q.addCallback(done) self.db.close()
keithhendry/treadmill
treadmill/sproc/appcfgmgr.py
Python
apache-2.0
452
0
"""Treadmill app configurator daemon, subscribes to eventmgr events. """ import click from .. import appcfgmgr def init(): """Top level command handler.""" @click.command() @click.option('--approot', type=click.Path(exists=Tru
e),
envvar='TREADMILL_APPROOT', required=True) def top(approot): """Starts appcfgmgr process.""" mgr = appcfgmgr.AppCfgMgr(root=approot) mgr.run() return top
Azure/azure-sdk-for-python
sdk/network/azure-mgmt-network/azure/mgmt/network/v2021_02_01/aio/operations/_express_route_circuit_peerings_operations.py
Python
mit
21,960
0.005237
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from az
ure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse]
, T, Dict[str, Any]], Any]] class ExpressRouteCircuitPeeringsOperations: """ExpressRouteCircuitPeeringsOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.network.v2021_02_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config async def _delete_initial( self, resource_group_name: str, circuit_name: str, peering_name: str, **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-02-01" accept = "application/json" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'), 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore async def begin_delete( self, resource_group_name: str, circuit_name: str, peering_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes the specified peering from the specified express route circuit. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param circuit_name: The name of the express route circuit. :type circuit_name: str :param peering_name: The name of the peering. :type peering_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._delete_initial( resource_group_name=resource_group_name, circuit_name=circuit_name, peering_name=peering_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'), 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore async def get( self, resource_group_name: str, circuit_name: str, peering_name: str, **kwargs: Any ) -> "_models.ExpressRouteCircuitPeering": """Gets the specified peering for the express route circuit. :param r
relh/cathhacks
app.py
Python
mit
626
0.038339
#!/usr/bin/env python import time import json import random import re from bottle import route, hook, response, run, static_
file @route('/') def index(): return static_file('index.html', root = '.') @route('/maptweets.js') def index_css(): return static_file('maptweets.js', root = '.') @route('/cross.jpg') def index_css(): return static_file('cross.jpg', ro
ot = '.') @route('/light.png') def index_css(): return static_file('light.png', root = '.') @route('/event.png') def index_css(): return static_file('event.png', root = '.') run(host = '0.0.0.0', port = 80, server = 'tornado', debug = True)
Shaswat27/sympy
sympy/core/mod.py
Python
bsd-3-clause
4,488
0.000223
from __future__ import print_function, division from sympy.core.numbers import nan from .function import Function class Mod(Function): """Represents a modulo operation on symbolic expressions. Receives two arguments, dividend p and divisor q. The convention used is the same as Python's: the remainder always has the same sign as the divisor. Examples ======== >>> from sympy.abc import x, y >>> x**2 % y Mod(x**2, y) >>> _.subs({x: 5, y: 6}) 1 """ @classmethod def eval(cls, p, q): from sympy.core.add import Add from sympy.core.mul import Mul from sympy.core.singleton import S from sympy.core.exprtools import gcd_terms from sympy.polys.polytools import gcd def doit(p, q): """Try to return p % q if both are numbers or +/-p is known to be less than or equal q. """ if p.is_infinite or q.is_infinite or p is nan or q is nan: return nan if (p == q or p == -q or p.is_Pow and p.exp.is_Integer and p.base == q or p.is_integer and q == 1): return S.Zero if q.is_Number: if p.is_Number: return (p % q) if q == 2: if p.is_even: return S.Zero elif p.is_odd: return S.One # by ratio r = p/q try: d = int(r) except TypeError: pass else: if type(d) is int: rv = p - d*q if (rv*q < 0) == True: rv += q return rv # by difference d = p - q if d.is_negative: if q.is_negative: return d elif q.is_positive: return p rv = doit(p, q) if rv is not None: return rv # denest if p.func is cls: # easy qinner = p.args[1] if qinner == q: return p # XXX other possibilities? # extract gcd; any further simplification should be done by the user G = gcd(p, q) if G != 1: p, q = [ gcd_terms(i/G, clear=False, fraction=False) for i in (p, q)] pwas, qwas = p, q # simplify terms # (x + y + 2) % x -> Mod(y + 2, x) if p.is_Add: args = [] for i in p.args: a = cls(i, q) if a.count(cls) > i.count(cls): args.append(i) else: args.append(a) if args != lis
t(p.args): p = Add(*args) else: # handle coefficients if they are not Rational # since those are not handled by factor_terms # e.g. Mod(.6*x, .3*y) -> 0.3*Mod(2*x, y) cp, p = p.as_coeff_Mul() cq, q = q.as_coeff_Mul() ok =
False if not cp.is_Rational or not cq.is_Rational: r = cp % cq if r == 0: G *= cq p *= int(cp/cq) ok = True if not ok: p = cp*p q = cq*q # simple -1 extraction if p.could_extract_minus_sign() and q.could_extract_minus_sign(): G, p, q = [-i for i in (G, p, q)] # check again to see if p and q can now be handled as numbers rv = doit(p, q) if rv is not None: return rv*G # put 1.0 from G on inside if G.is_Float and G == 1: p *= G return cls(p, q, evaluate=False) elif G.is_Mul and G.args[0].is_Float and G.args[0] == 1: p = G.args[0]*p G = Mul._from_args(G.args[1:]) return G*cls(p, q, evaluate=(p, q) != (pwas, qwas)) def _eval_is_integer(self): from sympy.core.logic import fuzzy_and, fuzzy_not p, q = self.args if fuzzy_and([p.is_integer, q.is_integer, fuzzy_not(q.is_zero)]): return True def _eval_is_nonnegative(self): if self.args[1].is_positive: return True def _eval_is_nonpositive(self): if self.args[1].is_negative: return True
theskyinflames/bpulse-go-client
vendor/github.com/youtube/vitess/test/initial_sharding_bytes.py
Python
apache-2.0
553
0.003617
#!/usr/bin/env python # # Copyright 2013, Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can # be found in the LICENSE file. """Re-runs initial_sharding.py with a varbinary keyspace_id.""" from vtdb import keyrange_constants import base_sharding import initial_sharding import utils # this test is just re-running an entire initial_sharding.py with a # varbinary keyspac
e_id if __name__ == '__main__': base_sharding.keyspa
ce_id_type = keyrange_constants.KIT_BYTES utils.main(initial_sharding)
bytedance/fedlearner
web_console_v2/api/fedlearner_webconsole/workflow/cronjob.py
Python
apache-2.0
3,936
0.000254
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # coding: utf-8 from typing import Tuple from time import sleep from fedlearner_webconsole.composer.interface import IItem, IRunner, ItemType from fedlearner_webconsole.composer.models import Context, RunnerStatus from fedlearner_webconsole.db import get_session from fedlearner_webconsole.workflow.models import Workflow, WorkflowState class WorkflowCronJobItem(IItem): def __init__(self, task_id: int): self.id = task_id def type(self) -> ItemType: return ItemType.WORKFLOW_CRON_JOB def get_id(self) -> int: return self.id def __eq__(self, obj: IItem): return self.id == obj.id and self.type() == obj.type() class WorkflowCronJob(IRunner): """ start workflow every intervals """ def __init__(self, task_id: int): self._workflow_id = task_id self._msg = None def start(self, context: Context): with get_session(context.db_engine) as session: try: workflow: Workflow = session.query(Workflow).filter_by( id=self._workflow_id).one() # TODO: This is a hack!!! Templatelly use this method # cc @hangweiqiang: Transaction State Refactor state = workflow.get_state_for_frontend() if state in ('COMPLETED', 'FAILED', 'READY', 'STOPPED', 'NEW'): if state in ('COMPLETED', 'FAILED'): workflow.update_target_state( target_state=WorkflowState.STOPPED) session.commit() # check workflow stopped # TODO: use composer timeout cc @yurunyu for _ in range(24): # use session ref
resh to get the latest info
# otherwise it'll use the indentity map locally session.refresh(workflow) if workflow.state == WorkflowState.STOPPED: break sleep(5) else: self._msg = f'failed to stop \ workflow[{self._workflow_id}]' return workflow.update_target_state( target_state=WorkflowState.RUNNING) session.commit() self._msg = f'restarted workflow[{self._workflow_id}]' elif state == 'RUNNING': self._msg = f'skip restarting workflow[{self._workflow_id}]' elif state == 'INVALID': self._msg = f'current workflow[{self._workflow_id}] \ is invalid' else: self._msg = f'workflow[{self._workflow_id}] \ state is {state}, which is out of expection' except Exception as err: # pylint: disable=broad-except self._msg = f'exception of workflow[{self._workflow_id}], \ details is {err}' def result(self, context: Context) -> Tuple[RunnerStatus, dict]: del context # unused by result if self._msg is None: return RunnerStatus.RUNNING, {} output = {'msg': self._msg} return RunnerStatus.DONE, output
hortonworks/hortonworks-sandbox
desktop/core/ext-py/Twisted/doc/core/benchmarks/timer.py
Python
apache-2.0
401
0.009975
"""Helper stuff for things"""
import gc gc.disable() print 'Disabled GC' def timeit(func, iter = 1000, *args, **kwargs): """timeit(func, iter = 1000 *args, **kwargs) -> elapsed time calls func iter times with args and kwargs, returns time elapsed """ import time r = range(iter) t = time.time() for i in r: func(*args, **kwarg
s) return time.time() - t
Mlieou/oj_solutions
leetcode/python/ex_631.py
Python
mit
1,437
0.004175
class Excel(object): def __init__(self, H, W): """ :type H: int :type W: str """ self.table = [[{'v': 0, 'sum': None} for _ in ra
nge(ord(W) - 64)] for __ in range(H)] def set(self, r, c, v): """ :type r: int :type c: str :type v: int :rtype: void """ self.table[r - 1][ord(c) - 65] = {'v': v, 'sum': Non
e} def get(self, r, c): """ :type r: int :type c: str :rtype: int """ cell = self.table[r - 1][ord(c) - 65] if not cell['sum']: return cell['v'] return sum(self.get(*pos) * cell['sum'][pos] for pos in cell['sum']) def sum(self, r, c, strs): """ :type r: int :type c: str :type strs: List[str] :rtype: int """ self.table [r - 1][ord(c) - 65]['sum'] = self.parse(strs) return self.get(r, c) def parse(self, strs): c = collections.Counter() for s in strs: s, e = s.split(':')[0], s.split(':')[1] if ':' in s else s for i in range(int(s[1:]), int(e[1:]) + 1): for j in range(ord(s[0]) - 64, ord(e[0]) - 64 + 1): c[(i, chr(j + 64))] += 1 return c # Your Excel object will be instantiated and called as such: # obj = Excel(H, W) # obj.set(r,c,v) # param_2 = obj.get(r,c) # param_3 = obj.sum(r,c,strs)
ytjia/coding-practice
algorithms/python/leetcode/MergeIntervals.py
Python
mit
1,167
0
# -*- coding: utf-8 -*- # Authors: Y. Jia <[email protected]> """ Given a collection of intervals, merge all overlapping intervals. https://leetcode.com/problems/merge-intervals/description/ """ # Definition for an interval. class Interval(object): def __init__(self, s=0, e=0): self.start = s
self.end = e def __eq__(self, other): return self.start == other.start and self.end == other.end class Solution(object): def merge(self, intervals): """ :type intervals: List[Interval] :rtype: List[Interval] """ lens = len(intervals) if lens <= 1: return intervals merged_intervals = list() intervals.sort(key=lambda interval:
interval.start) i = 0 j = i + 1 while j < lens: if intervals[i].end >= intervals[j].start: intervals[i].end = max(intervals[i].end, intervals[j].end) j += 1 else: merged_intervals.append(intervals[i]) i = j j = i + 1 merged_intervals.append(intervals[i]) return merged_intervals
FreeOpcUa/python-opcua
opcua/ua/status_codes.py
Python
lgpl-3.0
38,535
0.005969
#AUTOGENERATED!!! Date: 2020-06-19 19:44:10.693270 from opcua.ua.uaerrors import UaStatusCodeError class StatusCodes: Good = 0 Uncertain = 0x40000000 Bad = 0x80000000 BadUnexpectedError = 0x80010000 BadInternalError = 0x80020000 BadOutOfMemory = 0x80030000 BadResourceUnavailable = 0x80040000 BadCommunicationError = 0x80050000 BadEncodingError = 0x80060000 BadDecodingError = 0x80070000 BadEncodingLimitsExceeded = 0x80080000 BadRequestTooLarge = 0x80B80000 BadResponseTooLarge = 0x80B90000 BadUnknownResponse = 0x80090000 BadTimeout = 0x800A0000 BadServiceUnsupported = 0x800B0000 BadShutdown = 0x800C0000 BadServerNotConnected = 0x800D0000 BadServerHalted = 0x800E0000 BadNothingToDo = 0x800F0000 BadTooManyOperations = 0x80100000 BadTooManyMonitoredItems = 0x80DB0000 BadDataTypeIdUnknown = 0x80110000 BadCertificateInvalid = 0x80120000 BadSecurityChecksFailed = 0x80130000 BadCertificatePolicyCheckFailed = 0x81140000 BadCertificateTimeInvalid = 0x80140000 BadCertificateIssuerTimeInvalid = 0x80150000 BadCertificateHostNameInvalid = 0x80160000 BadCertificateUriInvalid = 0x80170000 BadCertificateUseNotAllowed = 0x80180000 BadCertificateIssuerUseNotAllowed = 0x80190000 BadCertificateUntrusted = 0x801A0000 BadCertificateRevocationUnknown = 0x801B0000 BadCertificateIssuerRevocationUnknown = 0x801C0000 BadCertificateRevoked = 0x801D0000 BadCertificateIssuerRevoked = 0x801E0000 BadCertificateChainIncomplete = 0x810D0000 BadUserAccessDenied = 0x801F0000 BadIdentityTokenInvalid = 0x80200000 BadIdentityTokenRejected = 0x80210000 BadSecureChannelIdInvalid = 0x80220000 BadInvalidTimestamp = 0x80230000 BadNonceInvalid = 0x80240000 BadSessionIdInvalid = 0x80250000 BadSessionClosed = 0x80260000 BadSessionNotActivated = 0x80270000 BadSubscriptionIdInvalid = 0x80280000 BadRequestHeaderInvalid = 0x802A0000 BadTimestampsToReturnInvalid = 0x802B0000 BadRequestCancelledByClient = 0x802C0000 BadTooManyArguments = 0x80E50000 BadLicenseExpired = 0x810E0000 BadLicenseLimitsExceeded = 0x810F0000 BadLicenseNotAvailable = 0x81100000 GoodSubscriptionTransferred = 0x002D0000 GoodCompletesAsynchronously = 0x002E0000 GoodOverload = 0x002F0000 GoodClamped = 0x00300000 BadNoCommunication = 0x80310000 BadWaitingForInitialData = 0x80320000 BadNodeIdInvalid = 0x80330000 BadNodeIdUnknown = 0x80340000 BadAttributeIdInvalid = 0x80350000 BadIndexRangeInvalid = 0x80360000 BadIndexRangeNoData = 0x80370000 BadDataEncodingInvalid = 0x80380000 BadDataEncodingUnsupported = 0x80390000 BadNotReadable = 0x803A0000 BadNotWritable = 0x803B0000 BadOutOfRange = 0x803C0000 BadNotSupported = 0x803D0000 BadNotFound = 0x803E0000 BadObjectDeleted = 0x803F0000 BadNotImplemented = 0x80400000 BadMonitoringModeInvalid = 0x80410000 BadMonitoredItemIdInvalid = 0x80420000 BadMonitoredItemFilterInvalid = 0x80430000 BadMonitoredItemFilterUnsupported = 0x80440000 BadFilterNotAllowed = 0x80450000 BadStructureMissing = 0x80460000 BadEventFilterInvalid = 0x80470000 BadContentFilterInvalid = 0x80480000 BadFilterOperatorInvalid = 0x80C10000 BadFilterOperatorUnsupported = 0x80C20000 BadFilterOperandCountMismatch = 0x80C30000 BadFilterOperandInvalid = 0x80490000 BadFilterElementInvalid = 0x80C40000 BadFilterLiteralInvalid = 0x80C50000 BadContinuationPointInvalid = 0x804A0000 BadNoContinuationPoints = 0x804B0000 BadReferenceTypeIdInvalid = 0x804C0000 BadBrowseDirectionInvalid = 0x804D0000 BadNodeNotInView = 0x804E0000 BadNumericOverflow = 0x81120000 BadServerUriInvalid = 0x804F0000 BadServerNameMissing = 0x80500000 BadDiscoveryUrlMissing = 0x80510000 BadSempahoreFileMissing = 0x80520000 BadRequestTypeInvalid = 0x80530000 BadSecurityModeRejected = 0x80540000 BadSecurityPolicyRejected = 0x80550000 BadTooManySessions = 0x80560000 BadUserSignatureInvalid = 0x80570000 BadApplicationSignatureInvalid = 0x80580000 BadNoValidCertificates = 0x80590000 BadIdentityChangeNotSupported = 0x80C60000 BadRequestCancelledByRequest = 0x805A0000 BadParentNodeIdInvalid = 0x805B0000 BadReferenceNotAllowed = 0x805C0000 BadNodeIdRejected = 0x805D0000 BadNodeIdExists = 0x805E0000 BadNodeClassInvalid = 0x805F0000 BadBrowseNameInvalid = 0x80600000 BadBrowseNameDuplicated = 0x80610000 BadNodeAttributesInvalid = 0x80620000 BadTypeDefinitionInvalid = 0x80630000 BadSourceNodeIdInvalid = 0x80640000 BadTargetNodeIdInvalid = 0x80650000 BadDuplicateReferenceNotAllowed = 0x80660000 BadInvalidSelfReference = 0x80670000 BadReferenceLocalOnly = 0x80680000 BadNoDeleteRights = 0x80690000 UncertainReferenceNotDeleted = 0x40BC0000 BadServerIndexInvalid = 0x806A0000 BadViewIdUnknown = 0x806B0000 BadViewTimestampInvalid = 0x80C90000 BadViewParameterMismatch = 0x80CA0000 BadViewVersionInvalid = 0x80CB0000 UncertainNotAllNodesAvailable = 0x40C00000 GoodResultsMayBeIncomplete = 0x00BA0000 BadNotTypeDefinition = 0x80C80000 UncertainReferenceOutOfServer = 0x406C0000 BadTooManyMatches = 0x806D0000 BadQueryTooComplex = 0x806E0000 BadNoMatch = 0x806F0000 BadMaxAgeInvalid = 0x80700000 BadSecurityModeInsufficient = 0x80E60000 BadHistoryOperationInvalid = 0x80710000 BadHistoryOperationUnsupported = 0x80720000 BadInvalidTimestampArgument = 0x80BD0000 BadWriteNotSupported = 0x80730000 BadTypeMismatch = 0x80740000 BadMethodInvalid = 0x80750000 BadArgumentsMissing = 0x80760000 BadNotExecutable = 0x81110000 BadTooManySubscriptions = 0x80770000 BadTooManyPublishRequests = 0x80780000 BadNoSubscription = 0x80790000 BadSequenceNumberUnknown = 0x807A0000 BadMessageNotAvailable = 0x807B0000 BadInsufficientClientProfile = 0x807C0000 BadStateNotActive = 0x80BF0000 BadAlreadyExists = 0x81150000 BadTcpServerTooBusy = 0x807D0000 BadTcpMessageTypeInvalid = 0x807E0000 BadTcpSecureChannelUnknown = 0x807F0000 BadTcpMessageTooLarge = 0x80800000 BadTcpNotEnoughResources = 0x80810000 BadTcpInternalError = 0x80820000 BadTcp
EndpointUrlInvalid = 0x80830000 BadRequestInterrupted
= 0x80840000 BadRequestTimeout = 0x80850000 BadSecureChannelClosed = 0x80860000 BadSecureChannelTokenUnknown = 0x80870000 BadSequenceNumberInvalid = 0x80880000 BadProtocolVersionUnsupported = 0x80BE0000 BadConfigurationError = 0x80890000 BadNotConnected = 0x808A0000 BadDeviceFailure = 0x808B0000 BadSensorFailure = 0x808C0000 BadOutOfService = 0x808D0000 BadDeadbandFilterInvalid = 0x808E0000 UncertainNoCommunicationLastUsableValue = 0x408F0000 UncertainLastUsableValue = 0x40900000 UncertainSubstituteValue = 0x40910000 UncertainInitialValue = 0x40920000 UncertainSensorNotAccurate = 0x40930000 UncertainEngineeringUnitsExceeded = 0x40940000 UncertainSubNormal = 0x40950000 GoodLocalOverride = 0x00960000 BadRefreshInProgress = 0x80970000 BadConditionAlreadyDisabled = 0x80980000 BadConditionAlreadyEnabled = 0x80CC0000 BadConditionDisabled = 0x80990000 BadEventIdUnknown = 0x809A0000 BadEventNotAcknowledgeable = 0x80BB0000 BadDialogNotActive = 0x80CD0000 BadDialogResponseInvalid = 0x80CE0000 BadConditionBranchAlreadyAcked = 0x80CF0000 BadConditionBranchAlreadyConfirmed = 0x80D00000 BadConditionAlreadyShelved = 0x80D10000 BadConditionNotShelved = 0x80D20000 BadShelvingTimeOutOfRange = 0x80D30000 BadNoData = 0x809B0000 BadBoundNotFound = 0x80D70000 BadBoundNotSupported = 0x80D80000 BadDataLost = 0x809D0000 BadDataUnavailable = 0x809E0000 BadEntryExists = 0x809F0000 BadNoEntryExists = 0x80A00000 BadTimestampNotSupported = 0x80A10000 GoodEntryInserted = 0x00A20000 GoodEntryReplaced = 0x00A30000 UncertainDataSubNormal = 0x40
kdeldycke/meta-package-manager
meta_package_manager/tests/test_cli_install.py
Python
gpl-2.0
2,682
0.001864
# Copyright Kevin Deldycke <[email protected]> and contributors. # All Rights Reserved. # # This program is Free Software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Founda
tion; either version 2 # of the License, or (at yo
ur option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. import pytest from click_extra.tests.conftest import destructive from ..pool import ALL_MANAGER_IDS from .test_cli import CLISubCommandTests @pytest.fixture def subcmd(): return "install", "arrow" class TestInstall(CLISubCommandTests): strict_selection_match = False """ Install sub-command try each user-selected manager until it find one providing the package we seek to install, after which the process stop. This mean not all managers will be called, so we allow the CLI output checks to partially match. """ def test_no_package_id(self, invoke): result = invoke("install") assert result.exit_code == 2 assert not result.stdout assert "Error: Missing argument 'PACKAGE_ID'." in result.stderr PACKAGE_IDS = { "apm": "markdown-pdf", "apt": "wget", "apt-mint": "exiftool", "brew": "jpeginfo", "cask": "pngyu", "choco": "ccleaner", "composer": "illuminate/contracts", "flatpak": "org.gnome.Dictionary", "gem": "markdown", "mas": "747648890", # Telegram "npm": "raven", "opkg": "enigma2-hotplug", "pip": "arrow", "snap": "standard-notes", "vscode": "tamasfe.even-better-toml", "yarn": "markdown", } assert set(PACKAGE_IDS) == set(ALL_MANAGER_IDS) @destructive @pytest.mark.parametrize( "mid,package_id", (pytest.param(*v, id=v[0]) for v in PACKAGE_IDS.items()) ) def test_single_manager_install(self, invoke, mid, package_id): result = invoke("--manager", mid, "install", package_id) assert result.exit_code == 0 self.check_manager_selection(result, {mid}, reference_set=ALL_MANAGER_IDS) destructive()(TestInstall.test_stats) destructive()(TestInstall.test_default_all_managers) destructive()(TestInstall.test_manager_selection)
colinbrislawn/scikit-bio
skbio/sequence/_sequence.py
Python
bsd-3-clause
83,555
0.000108
# ---------------------------------------------------------------------------- # Copyright (c) 2013--, scikit-bio development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. # ---------------------------------------------------------------------------- from __future__ import absolute_import, division, print_function from future.builtins import range, zip from future.utils import viewitems import six import itertools import math import re import collections import copy import numbers import textwrap from contextlib import contextmanager import numpy as np from scipy.spatial.distance import hamming import pandas as pd from skbio._base import SkbioObject from skbio.sequence._base import ElasticLines from skbio.util._misc import chunk_str from skbio.util._decorator import stable, experimental class Sequence(collections.Sequence, SkbioObject): """Store biological sequence data and optional associated metadata. ``Sequence`` objects do not enforce an alphabet and are thus the most generic objects for storing biological sequence data. Subclasses ``DNA``, ``RNA``, and ``Protein`` enforce the IUPAC character set [1]_ for, and provide operations specific to, each respective molecule type. ``Sequence`` objects consist of the underlying sequence data, as well as optional metadata and positional metadata. The underlying sequence is immutable, while the metdata and positional metadata are mutable. Parameters ---------- sequence : str, Sequence, or 1D np.ndarray (np.uint8 or '\|S1') Characters representing the biological sequence itself. metadata : dict, optional Arbitrary metadata which applies to the entire sequence. A shallow copy of the ``dict`` will be made (see Examples section below for details). positional_metadata : pd.DataFrame consumable, optional Arbitrary per-character metadata (e.g., sequence read quality scores). Must be able to be passed directly to ``pd.DataFrame`` constructor. Each column of metadata must be the same length as the biological sequence. A shallow copy of the positional metadata will be made if necessary (see Examples section below for details). lowercase : bool or str, optional If ``True``, lowercase sequence characters will be converted to uppercase characters. If ``False``, no characters will be converted. If a str, it will be treated as a key into the positional metadata of the object. All lowercase characters will be converted to uppercase, and a ``True`` value will be stored in a boolean array in the positional metadata under the key. Attributes ---------- values metadata positional_metadata observed_chars See Also -------- DNA RNA Protein References ---------- .. [1] Nomenclature for incompletely specified bases in nucleic acid sequences: recommendations 1984. Nucleic Acids Res. May 10, 1985; 13(9): 3021-3030. A Cornish-Bowden Examples -------- >>> from pprint import pprint >>> from skbio import Sequence **Creating sequences:** Create a sequence without any metadata: >>> seq = Sequence('GGUCGUGAAGGA') >>> seq Sequence --------------- Stats: length: 12 --------------- 0 GGUCGUGAAG GA Create a sequence with metadata and positional metadata: >>> metadata = {'id':'seq-id', 'desc':'seq desc', 'authors': ['Alice']} >>> positional_metadata = {'quality': [3, 3, 4, 10], ... 'exons': [True, True, False, True]} >>> seq = Sequence('ACGT', metadata=metadata, ... positional_metadata=positional_metadata) >>> seq Sequence ----------------------------- Metadata: 'authors': <class 'list'> 'desc': 'seq desc' 'id': 'seq-id' Positional metadata: 'exons': <dtype: bool> 'quality': <dtype: int64> Stats: length: 4 ----------------------------- 0 ACGT **Retrieving underlying sequence data:** Retrieve underlying sequence: >>> seq.values # doctest: +NORMALIZE_WHITESPACE array([b'A', b'C', b'G', b'T'], dtype='|S1') Underlying sequence immutable: >>> seq.values = np.array([b'T', b'C', b'G', b'A'], dtype='|S1') Traceback (most recent call last): ... AttributeError: can't set attribute >>> seq.values[0] = b'T' Traceback (most recent call last): ... ValueError: assignment destination is read-only **Retrieving sequence metadata:** Retrieve metadata: >>> pprint(seq.metadata) # using pprint to display dict in sorted order {'authors': ['Alice'], 'desc': 'seq desc', 'id': 'seq-id'} Retrieve positional metadata: >>> seq.positional_metadata exons quality 0 True 3 1 True 3 2 False 4 3 True 10 **Updating sequence metadata:** .. warning:: Be aware that a shallow copy of ``metadata`` and ``positional_metadata`` is made for performance. Since a deep copy is not made, changes made to mutable Python objects stored as metadata may affect the metadata of other ``Sequence`` objects or anything else that shares a reference to the object. The following examples illustrate this behavior. First, let's create a sequence and update its metadata: >>> metadata = {'id':'seq-id', 'desc':'seq desc', 'authors': ['Alice']} >>> seq = Sequence('ACGT', metadata=metadata) >>> seq.metadata['id'] = 'new-id' >>> seq.metadata['pubmed'] = 12345 >>> pprint(seq.metadata) {'authors': ['Alice'], 'desc': 'seq desc', 'id': 'new-id', 'pubmed': 12345} Note that the original metadata dictionary (stored in variable ``metadata``) hasn't changed because a shallow copy was made: >>> pprint(metadata) {'authors': ['Alice'], 'desc': 'seq desc', 'id': 'seq-id'} >>> seq.metadata == metadata False Note however that since only a *shallow* copy was made, updates to mutable objects will also change the original metadata dictionary: >>> seq.metadata['authors'].append('Bob') >>> seq.metadata['authors'] ['Alice', 'Bob'] >>> metadata['autho
rs'] ['Alice', 'Bob'] This behavior can also occur when manipulating a sequence that has been derived from another sequence: >>> subseq = seq[1:3] >>> subseq Sequence ----------------------------- Metadata: 'authors': <class 'list'> 'desc': 'seq desc' 'id': 'new-id' 'pubmed': 12345 Stats: length: 2 ----------------------------- 0 CG >>> pprint(subseq.metadata) {'authors': ['Alice', 'Bob']
, 'desc': 'seq desc', 'id': 'new-id', 'pubmed': 12345} The subsequence has inherited the metadata of its parent sequence. If we update the subsequence's author list, we see the changes propagated in the parent sequence and original metadata dictionary: >>> subseq.metadata['authors'].append('Carol') >>> subseq.metadata['authors'] ['Alice', 'Bob', 'Carol'] >>> seq.metadata['authors'] ['Alice', 'Bob', 'Carol'] >>> metadata['authors'] ['Alice', 'Bob', 'Carol'] The behavior for updating positional metadata is similar. Let's create a new sequence with positional metadata that is already stored in a ``pd.DataFrame``: >>> positional_metadata = pd.DataFrame( ... {'quality': [3, 3, 4, 10], 'list': [[], [], [], []]}) >>> seq = Sequence('ACGT', positional_metadata=positional_metadata) >>> seq Sequence ----------------------------- Positional metadata: 'list': <dtype: object> 'quality': <dtype: int64> Stats: length: 4 ----------------------------- 0 ACGT >>> seq.positional_metadata list quality 0 [] 3 1 [] 3 2 [] 4 3 [] 10 Now let's update the sequence's posi
Jumpscale/jumpscale6_core
lib/JumpScale/baselib/cmdrouter/CmdRouter.py
Python
bsd-2-clause
264
0.018939
from JumpScale import j import JumpScale.baselib.redis import JumpScale.grid.jumpscripts class CmdRouter(object): def __init__(self, path=None): j.core.jumpscripts.load(path) def
route(self,organization,actor,name,**args):
pass
akanuragkumar/tensorflow-basics
ex1.py
Python
gpl-3.0
110
0.009091
import pandas as pd adv = pd.read_csv('Advertising.csv') tv_budget
_x = adv.TV.to
list() print(tv_budget_x)
spyoungtech/behave-webdriver
behave_webdriver/steps/__init__.py
Python
mit
77
0
from .actions impo
rt * from .actions_r
e import * from .expectations import *
mikaelboman/home-assistant
homeassistant/remote.py
Python
mit
15,888
0
""" Support for an interface to work with a remote instance of Home Assistant. If a connection error occurs while communicating with the API a HomeAssistantError will be raised. For more details about the Python API, please refer to the documentation at https://home-assistant.io/developers/python_api/ """ from datetime import datetime import enum import json import logging import threading import urllib.parse import requests import homeassistant.bootstrap as bootstrap import homeassistant.core as ha from homeassistant.const import ( HTTP_HEADER_HA_AUTH, SERVER_PORT, URL_API, URL_API_EVENT_FORWARD, URL_API_EVENTS, URL_API_EVENTS_EVENT, URL_API_SERVICES, URL_API_SERVICES_SERVICE, URL_API_STATES, URL_API_STATES_ENTITY, HTTP_HEADER_CONTENT_TYPE, CONTENT_TYPE_JSON) from homeassistant.exceptions import HomeAssistantError METHOD_GET = "get" METHOD_POST = "post" METHOD_DELETE = "delete" _LOGGER = logging.getLogger(__name__) class APIStatus(enum.Enum): """Represent API status.""" # pylint: disable=no-init,invalid-name,too-few-public-methods OK = "ok" INVALID_PASSWORD = "invalid_password" CANNOT_CONNECT = "cannot_connect" UNKNOWN = "unknown" def __str__(self): """Return the state.""" return self.value class API(object): """Object to pass around Home Assistant API location and credentials.""" # pylint: disable=too-few-public-methods def __init__(self, host, api_password=None, port=None, use_ssl=False): """Initalize the API.""" self.host = host self.port = port or SERVER_PORT self.api_password = api_password if use_ssl: self.base_url = "https://{}:{}".format(host, self.port) else: self.base_url = "http://{}:{}".format(host, self.port) self.status = None self._headers = { HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_JSON, } if api_password is not None: self._headers[HTTP_HEADER_HA_AUTH] = api_password def validate_api(self, force_validate=False): """Test if we can communicate with the API.""" if self.status is None or force_validate: self.status = validate_api(self) return self.status == APIStatus.OK def __call__(self, method, path, data=None): """Make a call to the Home Assistant API.""" if data is not None: data = json.dumps(data, cls=JSONEncoder) url = urllib.parse.urljoin(self.base_url, path) try: if method == METHOD_GET: return requests.get( url, params=data, timeout=5, headers=self._headers) else: return requests.request( method, url, data=data, timeout=5, headers=self._headers) except requests.exceptions.ConnectionError: _LOGGER.exception("Error connecting to server") raise HomeAssistantError("Error connecting to server") except requests.exceptions.Timeout: error = "Timeout when talking to {}".format(self.host) _LOGGER.exception(error) raise HomeAssistantError(error) def __repr__(self): """Return the representation of the API.""" return "API({}, {}, {})".format( self.host, self.api_password, self.port) class HomeAssistant(ha.HomeAssistant): """Home Assistant that forwards work.""" # pylint: disable=super-init-not-called,too-many-instance-attributes def __init__(self, remote_api, local_api=None): """Initalize the forward instance.""" if not remote_api.validate_api(): raise HomeAssistantError( "Remote API at {}:{} not valid: {}".format( remote_api.host, remote_api.port, remote_api.status)) self.remote_api = remote_api self.pool = pool = ha.create_worker_pool() self.bus = EventBus(remote_api, pool) self.services = ha.ServiceRegistry(self.bus, pool) self.states = StateMachine(self.bus, self.remote_api) self.config = ha.Config() self.config.api = local_api def start(self): """Start the instance.""" # Ensure a local API exists to connect with remote if 'api' not in self.config.components: if not bootstrap.setup_component(self, 'api'): raise HomeAssistantError( 'Unable to setup local API to receive events') ha.create_timer(self) self.bus.fire(ha.EVENT_HOMEASSISTANT_START, origin=ha.EventOrigin.remote) # Give eventlet time to startup import eventlet eventlet.sleep(0.1) # Setup that events from remote_api get forwarded to local_api # Do this after we fire START, otherwise HTTP is not started if not connect_remote_events(self.remote_api, self.config.api): raise HomeAssistantError(( 'Could not setup event forwarding from api {} to ' 'local api {}').format(self.remote_api, self.config.api)) def stop(self): """Stop Home Assistant and shuts down all threads.""" _LOGGER.info("Stopping") self.bus.fire(ha.EVENT_HOMEASSISTANT_STOP, origin=ha.EventOrigin.remote) self.pool.stop() # Disconnect master event forwarding disconnect_remote_events(self.remote_api, self.config.api) class EventBus(ha.EventBus): """EventBus implementation that forwards fire_event to remote API.""" # pylint: disable=too-few-public-methods def __init__(self, api, pool=None): """Initalize the eventbus.""" super().__init__(pool) self._api = api def fire(self, event_type, event_data=None, origin=ha.EventOrigin.local): """Forward local events to remote target. Handles remote event as usual. """ # All local events that are not TIME_CHANGED a
re forwarded to API if origin == ha.EventOrigin.local and \ event_type != ha.EVENT_TIME_CHANGED: fire_event(self._api, event_type, event_data) else: super().fire(event_type, event_data, origin) class EventForwarder(object): """Listens for events and forwards to specified APIs.""" def __init__(self, hass, restrict_origin=None): """Initalize the event forwarder.""" self
.hass = hass self.restrict_origin = restrict_origin # We use a tuple (host, port) as key to ensure # that we do not forward to the same host twice self._targets = {} self._lock = threading.Lock() def connect(self, api): """Attach to a Home Assistant instance and forward events. Will overwrite old target if one exists with same host/port. """ with self._lock: if len(self._targets) == 0: # First target we get, setup listener for events self.hass.bus.listen(ha.MATCH_ALL, self._event_listener) key = (api.host, api.port) self._targets[key] = api def disconnect(self, api): """Remove target from being forwarded to.""" with self._lock: key = (api.host, api.port) did_remove = self._targets.pop(key, None) is None if len(self._targets) == 0: # Remove event listener if no forwarding targets present self.hass.bus.remove_listener(ha.MATCH_ALL, self._event_listener) return did_remove def _event_listener(self, event): """Listen and forward all events.""" with self._lock: # We don't forward time events or, if enabled, non-local events if event.event_type == ha.EVENT_TIME_CHANGED or \ (self.restrict_origin and event.origin != self.restrict_origin): return for api in self._targets.values(): fire_event(api, event.event_type, event.data) class StateMachine(ha.StateMachine): """Fire set events to an API. Uses state_change events to
rom1sqr/miasm
miasm2/arch/mips32/regs.py
Python
gpl-2.0
1,927
0.00467
#!/usr/bin/env python #-*- coding:utf-8 -*- from miasm2.expression.expression import ExprId from miasm2.core.cpu import gen_reg, gen_regs gen_reg('PC', globals()) gen_reg('PC_FETCH', globals()) gen_reg('R_LO', globals()) gen_reg('R_HI', globals()) exception_flags = ExprId('exception_flags', 32) PC_init = ExprId("PC_init") PC_FETCH_init = ExprId("PC_FETCH_init") regs32_str = ["ZERO", 'AT', 'V0', 'V1'] +\ ['A%d'%i for i in xrange(4)] +\ ['T%d'%i for i in xrange(8)] +\ ['S%d'%i for i in xrange(8)] +\ ['T%d'%i for i in xrange(8, 10)] +\ ['K0', 'K1'] +\ ['GP', 'SP', 'FP', 'RA'] regs32_expr = [ExprId(x, 32) for x in regs32_str] regs_flt_str = ['F%d'%i for i in xrange(0x20)] regs_fcc_str = ['FCC%d'%i for i in xrange(8)] R_LO = ExprId('R_LO', 32) R_HI = ExprId('R_HI', 32) R_LO_init = ExprId('R_LO_init', 32) R_HI_init = ExprId('R_HI_init', 32) cpr0_str = [
"CPR0_%d"%x for x in xrange(0x100)] cpr0_str[0] = "INDEX" cpr0_str[16] = "ENTRYLO0" cpr0_str[24] = "ENTRYLO1" cpr0_str[40] = "PAGEMASK" cpr0_str[72] = "COUNT" cpr0_str[80] = "ENTRYHI" cpr0_str[104] = "CAUSE" cpr0_str[112] = "EPC" cpr0_str[128] = "CONFIG" cpr0_str[152] = "WATCHHI" regs_cpr0_e
xpr, regs_cpr0_init, regs_cpr0_info = gen_regs(cpr0_str, globals()) gpregs_expr, gpregs_init, gpregs = gen_regs(regs32_str, globals()) regs_flt_expr, regs_flt_init, fltregs = gen_regs(regs_flt_str, globals(), sz=64) regs_fcc_expr, regs_fcc_init, fccregs = gen_regs(regs_fcc_str, globals()) all_regs_ids = [PC, PC_FETCH, R_LO, R_HI] + gpregs_expr + regs_flt_expr + \ regs_fcc_expr + regs_cpr0_expr all_regs_ids_byname = dict([(x.name, x) for x in all_regs_ids]) all_regs_ids_init = [PC_init, PC_FETCH_init, R_LO_init, R_HI_init] + \ gpregs_init + regs_flt_init + regs_fcc_init + regs_cpr0_init all_regs_ids_no_alias = all_regs_ids[:] regs_init = {} for i, r in enumerate(all_regs_ids): regs_init[r] = all_regs_ids_init[i]
pkaifosh/sima
sima/motion/motion.py
Python
gpl-2.0
10,210
0
from __future__ import absolute_import from __future__ import division from builtins import next from builtins import zip from builtins import range from past.utils import old_div from builtins import object import itertools as it import abc import numpy as np import sima from . import _motion as mc from future.utils import with_metaclass def add_with_offset(array1, array2, offset): """ >>> from sima.motion.motion import add_with_offset >>> import numpy as np >>> a1 = np.zeros((4, 4)) >>> a2 = np.ones((1, 2)) >>> add_with_offset(a1, a2, (1, 2)) >>> np.array_equal(a1[1:2, 2:4], a2) True """ slices = tuple(slice(o, o + e) for o, e in zip(offset, array2.shape)) array1[slices] += array2 class MotionEstimationStrategy(with_metaclass(abc.ABCMeta, object)): @classmethod def _make_nonnegative(cls, displacements): min_displacement = np.nanmin( [np.nanmin(s.reshape(-1, s.shape[-1]), 0) for s in displacements], 0) new_displacements = [d - min_displacement for d in displacements] min_shifts = np.nanmin([np.nanmin(s.reshape(-1, s.shape[-1]), 0) for s in new_displacements], 0) assert np.all(min_shifts == 0) return new_displacements @abc.abstractmethod def _estimate(self, dataset): return def estimate(self, dataset): """Estimate the displacements for a dataset. Parameters ---------- dataset : sima.ImagingDataset Returns ------- displacements : list of ndarray of int """ shifts = self._estimate(dataset) assert np.any(np.all(x is not np.ma.masked for x in shift) for shift in it.chain.from_iterable(shifts)) assert np.all( np.all(x is np.ma.masked for x in shift) or not np.any(x is np.m
a.masked for x in shift) for shift in it.chain.from_iterable(shifts)) shifts = self._make_nonnegative(shifts) assert np.any(np.all(x is not np.ma.masked for x in shift) for shift in it.chain.from_
iterable(shifts)) assert np.all( np.all(x is np.ma.masked for x in shift) or not np.any(x is np.ma.masked for x in shift) for shift in it.chain.from_iterable(shifts)) return shifts def correct(self, dataset, savedir, channel_names=None, info=None, correction_channels=None, trim_criterion=None): """Create a motion-corrected dataset. Parameters ---------- dataset : sima.ImagingDataset or list of sima.Sequence Dataset or sequences to be motion corrected. savedir : str The directory used to store the dataset. If the directory name does not end with .sima, then this extension will be appended. channel_names : list of str, optional Names for the channels. Defaults to ['0', '1', '2', ...]. info : dict Data for the order and timing of the data acquisition. See sima.ImagingDataset for details. correction_channels : list of int, optional Information from the channels corresponding to these indices will be used for motion correction. By default, all channels will be used. trim_criterion : float, optional The required fraction of frames during which a location must be within the field of view for it to be included in the motion-corrected imaging frames. By default, only locations that are always within the field of view are retained. Returns ------- dataset : sima.ImagingDataset The motion-corrected dataset. """ sequences = [s for s in dataset] if correction_channels: correction_channels = [ sima.misc.resolve_channels(c, channel_names, len(sequences[0])) for c in correction_channels] mc_sequences = [s[:, :, :, :, correction_channels] for s in sequences] else: mc_sequences = sequences displacements = self.estimate(sima.ImagingDataset(mc_sequences, None)) disp_dim = displacements[0].shape[-1] max_disp = np.max(list(it.chain.from_iterable(d.reshape(-1, disp_dim) for d in displacements)), axis=0) frame_shape = np.array(sequences[0].shape)[1: -1] # (z, y, x) if len(max_disp) == 2: # if 2D displacements frame_shape[1:3] += max_disp else: # if 3D displacements frame_shape += max_disp corrected_sequences = [s.apply_displacements(d, frame_shape) for s, d in zip(sequences, displacements)] planes, rows, columns = _trim_coords( trim_criterion, displacements, sequences[0].shape[1:4], frame_shape) corrected_sequences = [ s[:, planes, rows, columns] for s in corrected_sequences] return sima.ImagingDataset( corrected_sequences, savedir, channel_names=channel_names) class ResonantCorrection(MotionEstimationStrategy): """Motion estimation strategy for resonant scanner data. When acquiring data imaging data with a resonant scanner, the data acquired when imaging the same positions can be substantially different depending no whether the resonant scanner is moving in one direction or the other when passing over that row. This can cause problems when trying to motion correct the data, since even rows are collected while scanning in one direction and odd rows are collected by scanning in the other direction. The class defined here addresses this issue by using only the even rows to estimate the displacements, and then uses those displacements to motion-correct the entire dataset. Parameters ---------- base_strategy : sima.motion.MotionEstimationStrategy The underlying motion estimation strategy that will be used. offset : int Horizontal displacement to be added to odd rows. Note the convention that row 0 (i.e. the "first" row) is considered even. """ def __init__(self, base_strategy, offset=0): self._base_strategy = base_strategy self._offset = offset def _estimate(self, dataset): if not next(iter(dataset)).shape[2] % 2 == 0: raise ValueError( 'Resonant motion correction requires an even number of rows') downsampled_dataset = sima.ImagingDataset( [sima.Sequence.join( *it.chain.from_iterable( (seq[:, :, ::2, :, c], seq[:, :, 1::2, :, c]) for c in range(seq.shape[4]))) for seq in dataset], None) downsampled_displacements = self._base_strategy.estimate( downsampled_dataset) displacements = [] for d_disps in downsampled_displacements: disps = np.repeat(d_disps, 2, axis=2) # Repeat the displacements disps[:, :, :, 0] *= 2 # multiply y-shifts by 2 disps[:, :, 1::2, -1] += self._offset # shift even rows by offset displacements.append(disps) return displacements def _trim_coords(trim_criterion, displacements, raw_shape, untrimmed_shape): """The coordinates used to trim the corrected imaging data.""" epsilon = 1e-8 assert len(raw_shape) == 3 assert len(untrimmed_shape) == 3 if trim_criterion is None: trim_criterion = 1. if trim_criterion == 0.: trim_criterion = epsilon if not isinstance(trim_criterion, (float, int)): raise TypeError('Invalid type for trim_criterion') obs_counts = sum(_observation_counts(raw_shape, d, untrimmed_shape) for d in it.chain.from_iterable(displacements)) num_frames = sum(len(x) for x in displacements) occupancy = old_div(obs_counts.astype(float), num_fram
mganeva/mantid
scripts/test/Muon/frequency_domain_context_test.py
Python
gpl-3.0
2,194
0.002735
# Mantid Repository : https://github.com/mantidproject/mantid # # Copyright &copy; 2018 ISIS Rutherford Appleton Laboratory UKRI, # NScD Oak Ridge National Laboratory, European Spallation Source # & Institut Laue - Langevin # SPDX - License - Identifier: GPL - 3.0 + import sys from Muon.GUI.Common.muon_load_data import MuonLoadData from Muon.GUI.Common.utilities.load_utils import load_workspace_from_filename from Muon.GU
I.Common.muon_data_context import MuonDataContext from Muon.GUI.FrequencyDomainAnalysis.frequency_context import FrequencyContext from mantid.api import AnalysisDataService import unittest from Muon.GUI.Common.observer_pattern import Observer from mantid.api import FileFinder import copy if sys.version_info.major
< 2: from unittest import mock else: import mock class MuonDataContextTest(unittest.TestCase): def setUp(self): self.loaded_data = MuonLoadData() self.context = MuonDataContext(self.loaded_data) self.frequency_context = FrequencyContext(self.context) self.gui_variable_observer = Observer() self.gui_variable_observer.update = mock.MagicMock() self.context.gui_variables_notifier.add_subscriber(self.gui_variable_observer) self.context.instrument = 'CHRONUS' self.gui_variable_observer = Observer() self.gui_variable_observer.update = mock.MagicMock() self.context.gui_variables_notifier.add_subscriber(self.gui_variable_observer) filepath = FileFinder.findRuns('CHRONUS00003422.nxs')[0] load_result, run, filename = load_workspace_from_filename(filepath) self.loaded_data.add_data(workspace=load_result, run=[run], filename=filename, instrument='CHRONUS') self.context.current_runs = [[run]] self.context.update_current_data() def tearDown(self): AnalysisDataService.clear() def test_get_detectors_excluded_from_default_grouping_tables_gets_correct_groups_for_CHRONUS(self): result = self.frequency_context.get_detectors_excluded_from_default_grouping_tables() self.assertEqual(result, [256, 425]) if __name__ == '__main__': unittest.main(buffer=False, verbosity=2)
tyrjola/robotframework-wiremock
resources/scripts/setup.py
Python
mit
320
0
from distutils.core import setup setup(name='robot
framework-wiremock', packages=['WireMockLibrary'], package_dir={'': 'src'}, version='development', des
cription='Robot framework library for WireMock', author='Timo Yrjola', author_email='[email protected]', classifiers=[])
Sorsly/subtle
google-cloud-sdk/lib/googlecloudsdk/third_party/apis/bigtableadmin/v2/bigtableadmin_v2_messages.py
Python
mit
40,478
0.004595
"""Generated message classes for bigtableadmin version v2. """ # NOTE: This file is autogenerated and should not be edited by hand. from apitools.base.protorpclite import messages as _messages from apitools.base.py import encoding from apitools.base.py import extra_types package = 'bigtableadmin' class BigtableadminOperationsCancelRequest(_messages.Message): """A BigtableadminOperationsCancelRequest object. Fields: name: The name of the operation resource to be cancelled. """ name = _messages.StringField(1, required=True) class BigtableadminOperationsDeleteRequest(_messages.Message): """A BigtableadminOperationsDeleteRequest object. Fields: name: The name of the operation resource to be deleted. """ name = _messages.StringField(1, required=True) class BigtableadminOperationsGetRequest(_messages.Message): """A BigtableadminOperationsGetRequest object. Fields: name: The name of the operation resource. """ name = _messages.StringField(1, required=True) class BigtableadminOperationsListRequest(_messages.Message): """A BigtableadminOperationsListRequest object. Fields: filter: The standard list filter. name: The name of the operation collection. pageSize: The standard list page size. pageToken: The standard list page token. """ filter = _messages.StringField(1) name = _messages.StringField(2, required=True) pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32) pageToken = _messages.StringField(4) class BigtableadminProjectsInstancesClustersCreateRequest(_messages.Message): """A BigtableadminProjectsInstancesClustersCreateRequest object. Fields: cluster: A Cluster resource to be passed as the request body. clusterId: The ID to be used when referring to the new cluster within its instance, e.g., just `mycluster` rather than `projects/myproject/instances/myinstance/clusters/mycluster`. parent: The unique name of the instance in which to create the new cluster. Values are of the form `projects/<project>/instances/<instance>`. """ cluster = _messages.MessageField('Cluster', 1) clusterId = _messages.StringField(2) parent = _messages.StringField(3, required=True) class BigtableadminProjectsInstancesClustersDeleteRequest(_messages.Message): """A BigtableadminProjectsInstancesClustersDeleteRequest object. Fields: name: The unique name of the cluster to be deleted. Values are of the form `projects/<project>/instances/<instance>/clusters/<cluster>`. """ name = _messages.StringField(1, required=True) class BigtableadminProjectsInstancesClustersGetRequest(_messages.Message): """A BigtableadminProjectsInstancesClustersGetRequest object. Fields: name: The unique name of the requested cluster. Values are of the form `projects/<project>/instances/<instance>/clusters/<cluster>`. """ name = _messages.StringField(1, required=True) class BigtableadminProjectsInstancesClustersListRequest(_messages.Message): """A BigtableadminProjectsInstancesClustersListRequest object. Fields: pageToken: The value of `next_page_token` returned by a previous call. parent: The unique name of the instance for which a list of clusters is requested. Values are of the form `projects/<project>/instances/<instance>`. Use `<instance> = '-'` to list Clusters for all Instances in a project, e.g., `projects/myproject/instances/-`. """ pageToken = _messages.StringField(1) parent = _messages.StringField(2, required=True) class BigtableadminProjectsInstancesDeleteRequest(_messages.Message): """A BigtableadminProjectsInstancesDeleteRequest object. Fields: name: The unique name of the instance to be deleted. Values are of the form `projects/<project>/instances/<instance>`. """ name = _messages.StringField(1, required=True) class BigtableadminProjectsInstancesGetRequest(_messages.Message): """A BigtableadminProjectsInstancesGetRequest object. Fields: name: The unique name of the requested instance. Values are of the form `projects/<project>/instances/<instance>`. """ name = _messages.StringField(1, required=True) class BigtableadminProjectsInstancesListRequest(_messages.Message): """A BigtableadminProjectsInstan
cesListRequest object. Fields: pageToken: The value of `next_page_token` returned by a previous call. parent: The unique name of the project for which a list of instances is requested. Values are of the form `pr
ojects/<project>`. """ pageToken = _messages.StringField(1) parent = _messages.StringField(2, required=True) class BigtableadminProjectsInstancesTablesCreateRequest(_messages.Message): """A BigtableadminProjectsInstancesTablesCreateRequest object. Fields: createTableRequest: A CreateTableRequest resource to be passed as the request body. parent: The unique name of the instance in which to create the table. Values are of the form `projects/<project>/instances/<instance>`. """ createTableRequest = _messages.MessageField('CreateTableRequest', 1) parent = _messages.StringField(2, required=True) class BigtableadminProjectsInstancesTablesDeleteRequest(_messages.Message): """A BigtableadminProjectsInstancesTablesDeleteRequest object. Fields: name: The unique name of the table to be deleted. Values are of the form `projects/<project>/instances/<instance>/tables/<table>`. """ name = _messages.StringField(1, required=True) class BigtableadminProjectsInstancesTablesDropRowRangeRequest(_messages.Message): """A BigtableadminProjectsInstancesTablesDropRowRangeRequest object. Fields: dropRowRangeRequest: A DropRowRangeRequest resource to be passed as the request body. name: The unique name of the table on which to drop a range of rows. Values are of the form `projects/<project>/instances/<instance>/tables/<table>`. """ dropRowRangeRequest = _messages.MessageField('DropRowRangeRequest', 1) name = _messages.StringField(2, required=True) class BigtableadminProjectsInstancesTablesGetRequest(_messages.Message): """A BigtableadminProjectsInstancesTablesGetRequest object. Enums: ViewValueValuesEnum: The view to be applied to the returned table's fields. Defaults to `SCHEMA_ONLY` if unspecified. Fields: name: The unique name of the requested table. Values are of the form `projects/<project>/instances/<instance>/tables/<table>`. view: The view to be applied to the returned table's fields. Defaults to `SCHEMA_ONLY` if unspecified. """ class ViewValueValuesEnum(_messages.Enum): """The view to be applied to the returned table's fields. Defaults to `SCHEMA_ONLY` if unspecified. Values: VIEW_UNSPECIFIED: <no description> NAME_ONLY: <no description> SCHEMA_VIEW: <no description> FULL: <no description> """ VIEW_UNSPECIFIED = 0 NAME_ONLY = 1 SCHEMA_VIEW = 2 FULL = 3 name = _messages.StringField(1, required=True) view = _messages.EnumField('ViewValueValuesEnum', 2) class BigtableadminProjectsInstancesTablesListRequest(_messages.Message): """A BigtableadminProjectsInstancesTablesListRequest object. Enums: ViewValueValuesEnum: The view to be applied to the returned tables' fields. Defaults to `NAME_ONLY` if unspecified; no others are currently supported. Fields: pageToken: The value of `next_page_token` returned by a previous call. parent: The unique name of the instance for which tables should be listed. Values are of the form `projects/<project>/instances/<instance>`. view: The view to be applied to the returned tables' fields. Defaults to `NAME_ONLY` if unspecified; no others are currently supported. """ class ViewValueValuesEnum(_messages.Enum): """The view to be applied to the returned tables' fields. Defaults to `NAME_ONLY` if unspecified; no others are currently supported. Values: VIEW_UNSPECIFIED: <no description> NAME_ONLY: <no description> SCHEMA_VIEW: <no description> FULL: <no description> "
biomodels/MODEL1006230013
MODEL1006230013/model.py
Python
cc0-1.0
427
0.009368
import os path = os.path.dirname(os.path.realpath(__file__)) sbmlFilePath = os.path.join(path, 'MODEL1006230013.xml') with open(sbmlFilePath,'r') as f: sbmlString = f.read() def module_exists(module_name): try: __import__(module_name) except ImportError: return False else: return True if module_exists('libsbml')
: import libsbml sbml = libsbml.r
eadSBMLFromString(sbmlString)
whav/hav
src/hav/apps/tags/apps.py
Python
gpl-3.0
92
0
from django.apps import
AppConfig class TagsConfig(AppConfig):
name = "hav.apps.tags"
psistats/linux-client
psistats/libsensors/lib/sensors.py
Python
mit
8,525
0.002346
#!/usr/bin/env python # -*- coding: utf-8 -*- ############################################################################### # MODIFIED FROM ORIGINAL VERSION # # This file is not the same as in pypi. It includes a pull request to fix py3 # incompabilities that never ended up getting merged. ############################################################################### import os from ctypes import CDLL, c_char_p, c_int, c_void_p, c_uint, c_double, byref, Structure, get_errno,\ POINTER, c_short, c_size_t, create_string_buffer from ctypes.util import find_library from psistats.libsensors.lib import stdc version_info = (0, 0, 3) __version__ = '.'.join(map(str, version_info)) __date__ = '2014-08-17' __author__ = "Marc 'BlackJack' Rintsch" __contact__ = '[email protected]' __license__ = 'LGPL v2.1' API_VERSION = 4 DEFAULT_CONFIG_FILENAME = '/etc/sensors3.conf' LIB_FILENAME = os.environ.get('SENSORS_LIB') or find_library('sensors') SENSORS_LIB = CDLL(LIB_FILENAME) VERSION = c_char_p.in_dll(SENSORS_LIB, 'libsensors_version').value MAJOR_VERSION = version_info[0] class SensorsError(Exception): def __init__(self, message, error_number=None): Exception.__init__(self, message) self.error_number = error_number def _error_check(result, _func, _arguments): if result < 0: raise SensorsError(_strerror(result), result) return result _strerror = SENSORS_LIB.sensors_strerror _strerror.argtypes = [c_int] _strerror.restype = c_char_p _init = SENSORS_LIB.sensors_init _init.argtypes = [c_void_p] _init.restype = c_int _init.errcheck = _error_check cleanup = SENSORS_LIB.sensors_cleanup cleanup.argtypes = None cleanup.restype = None SENSORS_FEATURE_IN = 0x00 SENSORS_FEATURE_FAN = 0x01 SENSORS_FEATURE_TEMP = 0x02 SENSORS_FEATURE_POWER = 0x03 SENSORS_FEATURE_ENERGY = 0x04 SENSORS_FEATURE_CURR = 0x05 SENSORS_FEATURE_HUMIDITY = 0x06 # SENSORS_FEATURE_MAX_MAIN SENSORS_FEATURE_VID = 0x10 SENSORS_FEATURE_INTRUSION = 0x11 #SENSORS_FEATURE_MAX_OTHER, SENSORS_FEATURE_BEEP_ENABLE = 0x18 #SENSORS_FEATURE_MAX, #SENSORS_FEATURE_UNKNOWN = INT_MAX def init(config_filename=DEFAULT_CONFIG_FILENAME): file_p = stdc.fopen(config_filename.encode('utf-8'), b'r') if file_p is None: error_number = get_errno() raise OSError(error_number, os.strerror(error_number), config_filename) try: _init(file_p) finally: stdc.fclose(file_p) class Subfeature(Structure): _fields_ = [ ('name', c_char_p), ('number', c_int), ('type', c_int), ('mapping', c_int), ('flags', c_uint), ] def __repr__(self): return '<%s name=%r number=%d type=%d mapping=%d flags=%08x>' % ( self.__class__.__name__, self.name, self.number, self.type, self.mapping, self.flags ) def get_value(self): result = c_double() _get_value(byref(self.parent.chip), self.number, byref(result)) return result.value SUBFEATURE_P = POINTER(Subfeature) class Feature(Structure): _fields_ = [ ('name', c_char_p), ('number', c_int), ('type', c_int), ('_first_subfeature', c_int), ('_padding1', c_int), ] def __repr__(self): return '<%s name=%r number=%r type=%r>' % ( self.__class__.__name__, self.name, self.number, self.type ) def __iter__(self): number = c_int(0) while True: result_p = _get_all_subfeatures( byref(self.chip), byref(self), byref(number) ) if not result_p: break result = result_p.contents result.chip = self.chip result.parent = self yield result @property def label(self): # # TODO Maybe this is a memory leak! # return _get_label(byref(self.chip), byref(self)).decode('utf-8') def get_value(self): # # TODO Is the first always the correct one for all feature types? # return next(iter(self)).get_value() FEATURE_P = POINTER(Feature) class Bus(Structure): TYPE_ANY = -1 NR_ANY = -1 _fields_ = [ ('type', c_short), ('nr', c_short), ] def __str__(self): return ( '*' if self.type == self.TYPE_ANY else _get_adapter_name(byref(self)).decode('utf-8') ) def __repr__(self): return '%s(%r, %r)' % (self.__class__.__name__, self.type, self.nr) @property def has_wildcards(self): return self.type == self.TYPE_ANY or self.nr == self.NR_ANY BUS_P = POINTER(Bus) class Chip(Structure): # # TODO Move common stuff into `AbstractChip` class. # _fields_ = [ ('prefix', c_char_p), ('bus', Bus), ('addr', c_int), ('path', c_char_p), ] PREFIX_ANY = None ADDR_ANY = -1 def __new__(cls, *args): result = super(Chip, cls).__new__(cls) if args: _parse_chip_name(args[0].encode('utf-8'), byref(result)) return result def __init__(self, *_args): Structure.__init__(self) # # Need to bind the following to the instance so it is available in # `__del__()` when the interpreter shuts down. # self._free_chip_name = _free_chip_name self.byref = byref def __del__(self): if self._b_needsfree_: self._free_chip_name(self.byref(self)) def __repr__(self): return '<%s prefix=%r bus=%r addr=%r path=%r>' % ( ( self.__class__.__name__, self.prefix, self.bus, self.addr, self.path ) ) def __str__(self): buffer_size = 200 result = create_string_buffer(buffer_size) used = _snprintf_chip_name(result, len(result), byref(self)) assert used < buffer_size return result.value.decode('utf-8') def __iter__(self): number = c_int(0) while True: result_p = _get_features(byref(self), byref(number)) if not result_p: break result = result_p.contents result.chip = self yield result @property def adapter_name(self): return str(self.bus) @property def has_wildcards(self): return ( self.prefix == self.PREFIX_ANY or self.addr == self.ADDR_ANY or self.bus.has_wildcards ) CHIP_P = POINTER(Chip) _parse_chip_name = SENSORS_LIB.sensors_parse_chip_name _parse_chip_name.argtypes = [c_char_p, CHIP_P] _parse_chip_name.restype = c_int _parse_chip_name.errcheck = _error_check _free_chip_name = SENSORS_LIB.sensors_free_chip_name _free_chip_name.argtypes = [CHIP_P] _free_chip_name.restype = None _snprintf_chip_name = SENSORS_LIB.sensors_snprintf_chip_name _snprintf_chip_name.argtypes = [c_char_p, c_size_t, CHIP_P] _snprintf_chip_name.restype = c_int _snprintf_chip_name.errcheck = _error_check _get_adapter_name = SENSORS_LIB.sensors_get_adapter_name _get_adapter_name.argtypes = [BUS_P] _get_adapter_name.restype = c_char_p _get_label = SENSORS_LIB.sensors_get_label _get_label.argtypes = [CHIP_P, FEATURE_P] _get_label.restype = c_char_p _get_value = SENSORS_LIB.sensors_get_value _get_value.argtypes = [CHIP_P, c_int, POINTER(c_double)] _get_value.restype = c_int _get_value.errcheck = _error_check #
# TODO sensors_set_value() # TODO sensors_do_chip_sets() # _get_detected_chips = SENSORS_LIB.sensors_get_detected_chips _get_detected_chips.arg
types = [CHIP_P, POINTER(c_int)] _get_detected_chips.restype = CHIP_P _get_features = SENSORS_LIB.sensors_get_features _get_features.argtypes = [CHIP_P, POINTER(c_int)] _get_features.restype = FEATURE_P _get_all_subfeatures = SENSORS_LIB.sensors_get_all_subfeatures _get_all_subfeatures.argtypes = [CHIP_P, FEATURE_P,
Ahmad31/Web_Flask_Cassandra
flask/lib/python2.7/site-packages/pony/orm/dbapiprovider.py
Python
apache-2.0
33,878
0.009947
from __future__ import absolute_import, print_function, division from pony.py23compat import PY2, basestring, unicode, buffer, int_types import os, re, json from decimal import Decimal, InvalidOperation from datetime import datetime, date, time, timedelta from uuid import uuid4, UUID import pony from pony.utils import is_utf8, decorator, throw, localbase, deprecated from pony.converting import str2date, str2time, str2datetime, str2timedelta from pony.orm.ormtypes import LongStr, LongUnicode, RawSQLType, TrackedValue, Json class DBException(Exception): def __init__(exc, original_exc, *args): args = args or getattr(original_exc, 'args', ()) Exception.__init__(exc, *args) exc.original_exc = original_exc # Exception inheritance layout of DBAPI 2.0-compatible provider: # # Exception # Warning # Error # InterfaceError # DatabaseError # DataError # OperationalError # IntegrityError # InternalError # ProgrammingError # NotSupportedError class Warning(DBException): pass class Error(DBException): pass class InterfaceError(Error): pass class DatabaseError(Error): pass class DataError(DatabaseError): pass class OperationalError(DatabaseError): pass class IntegrityError(DatabaseError): pass class InternalError(DatabaseError): pass class ProgrammingError(DatabaseError): pass class NotSupportedError(DatabaseError): pass @decorator def wrap_dbapi_exceptions(func, provider, *args, **kwargs): dbapi_module = provider.dbapi_module try: return func(provider, *args, **kwargs) except dbapi_module.NotSupportedError as e: raise NotSupportedError(e) except dbapi_module.ProgrammingError as e: raise ProgrammingError(e) except dbapi_module.InternalError as e: raise InternalError(e) except dbapi_module.IntegrityError as e: raise IntegrityError(e) except dbapi_module.OperationalError as e: raise OperationalError(e) except dbapi_module.DataError as e: raise DataError(e) except dbapi_module.DatabaseError as e: raise DatabaseError(e) except dbapi_module.InterfaceError as e: if e.args == (0, '') and getattr(dbapi_module, '__name__', None) == 'MySQLdb': throw(InterfaceError, e, 'MySQL server misconfiguration') raise InterfaceError(e) except dbapi_module.Error as e: raise Error(e) except dbapi_module.Warning as e: raise Warning(e) def unexpected_args(attr, args): throw(TypeError, 'Unexpected positional argument%s for attribute %s: %r' % ((args > 1 and 's' or ''), attr, ', '.join(repr(arg) for arg in args))) version_re = re.compile('[0-9\.]+') def get_version_tuple(s): m = version_re.match(s) if m is not None: components = m.group(0).split('.') return tuple(int(component) for component in components) return None class DBAPIProvider(object): paramstyle = 'qmark' quote_char = '"' max_params_count = 200 max_name_len = 128 table_if_not_exists_syntax = True index_if_not_exists_syntax = True max_time_precision = default_time_precision = 6 uint64_support = False select_for_update_nowait_syntax = True # SQLite and PostgreSQL does not limit varchar max length. varchar_default_max_len = None dialect = None dbapi_module = None dbschema_cls = None translator_cls = None sqlbuilder_cls = None name_before_table = 'schema_name' default_schema_name = None fk_types = { 'SERIAL' : 'INTEGER', 'BIGSERIAL' : 'BIGINT' } def __init__(provider, *args, **kwargs): pool_mockup = kwargs.pop('pony_pool_mockup', None) if pool_mockup: provider.pool = pool_mockup else: provider.pool = provider.get_pool(*args, **kwargs) connection = provider.connect() provider.inspect_connection(connection) provider.release(connection) @wrap_dbapi_exceptions def inspect_connection(provider, connection): pass def normalize_name(provider, name): return name[:provider.max_name_len] def get_default_entity_table_name(provider, entity): return provider.normalize_name(entity.__name__) def get_default_m2m_table_name(provider, attr, reverse): if attr.symmetric: assert reverse is attr name = attr.entity.__name__ + '_' + attr.name else: name = attr.entity.__name__ + '_' + reverse.entity.__name__ return provider.normalize_name(name) def get_default_column_names(provider, attr, reverse_pk_columns=None): normalize = provider.normalize_name if reverse_pk_columns is None: return [ normalize(attr.name) ] elif len(reverse_pk_columns) == 1: return [ normalize(attr.name) ] else: prefix = attr.name + '_' return [ normalize(prefix + column) for column in reverse_pk_columns ] def get_default_m2m_column_names(provider, entity): normalize = provider.normalize_name columns = entity._get_pk_columns_() if len(columns) == 1: return [ normalize(entity.__name__.lower()) ] else: prefix = entity.__name__.lower() + '_' return [ normalize(prefix + column) for column in columns ] def get_default_index_name(provider, table_name, column_names, is_pk=False, is_unique=False, m2m=False): if is_pk: index_name = 'pk_%s' % table_name else: if is_unique: template = 'unq_%(tname)s__%(cnames)s' elif m2m: template = 'idx_%(tname)s' else: template = 'idx_%(tname)s__%(cnames)s' index_name = template % dict(tname=table_name, cnames='_'.join(name for name in column_names)) return provider.normalize_name(index_name.lower()) def get_default_fk_name(provider, child_table_name, parent_table_name, child_column_names): fk_name = 'fk_%s__%s' % (child_table_name, '__'.join(child_column_names)) return provider.normalize_name(fk_name.lower()) def split_table_name(provider, table_name): if isinstance(table_name, basestring): return provider.default_schema_name, table_name if not table_name: throw(TypeError, 'Invalid table name: %r' % table_name) if len(table_name) != 2: size = len(table_name) throw(TypeError, '%s qualified table name must have two components: ' '%s and table_name. Got %d component%s: %s' % (provider.dialect, provider.name_before_table, size, 's' if size != 1 else '', table_name)) return table_name[0], table_name[1] def quote_name(provider, name): quote_char = provider.quote_char if isinstance(name, basestring): name = name.replace(quote_char, quote_char+quote_char) return quote_char + name + quote_cha
r return '.'.join(provider.quote_name(item) for item in name) def normalize_vars(provider, vars, vartypes): pass def ast2sql(provider, ast): builder = provider.sqlbuilder_cls(provider, ast) re
turn builder.sql, builder.adapter def should_reconnect(provider, exc): return False @wrap_dbapi_exceptions def connect(provider): return provider.pool.connect() @wrap_dbapi_exceptions def set_transaction_mode(provider, connection, cache): pass @wrap_dbapi_exceptions def commit(provider, connection, cache=None): core = pony.orm.core if core.debug: core.log_orm('COMMIT') connection.commit() if cache is not None: cache.in_transaction = False @wrap_dbapi_exceptions def rollback(provider, connection, cache=None): core = pony.orm.core if core.debug: core.log_orm('ROLLBACK') connection.rollback() if cache is not None: cache.in_transaction = False @
joequant/algobroker
algobroker/dispatcher.py
Python
bsd-2-clause
1,618
0.000618
#!/usr/bin/python3 # Copyright (C) 2015 Bitquant Research Laboratories (Asia) Limited # Released under the Simplified BSD License import my_path import time import zmq.green as zmq import pprint import algobroker import msgpack class Dispatcher(algobroker.Broker): def __init__(self): algobroker.Broker.__init
__(self, "dispatcher") # send work self.sms_sender = self.socket(zmq.PUSH) self.sms_sender.connect(algobroker.ports['data']['broker_plivo']) self.bitmex_sender = self.socket(zmq.PUSH)
self.bitmex_sender.connect(algobroker.ports['data']['broker_bitmex']) self.web_sender = self.socket(zmq.PUSH) self.web_sender.connect(algobroker.ports['data']['broker_web']) def process_data(self, data): if (data['cmd'] == "log"): self.warning(pprint.pformat(data)) elif (data['cmd'] == 'alert' and data['type'] == 'sms'): self.debug("sending sms") self.debug(pprint.pformat(data)) self.sms_sender.send(msgpack.packb(data)) elif (data['cmd'] == 'alert' and data['type'] == 'web'): self.debug("sending web") self.debug(pprint.pformat(data)) self.web_sender.send(msgpack.packb(data)) elif (data.get('broker', None) == 'bitmex'): self.debug("sending bitmex") self.debug(pprint.pformat(data)) self.bitmex_sender.send(msgpack.packb(data)) else: self.error("unknown action") if __name__ == "__main__": dispatcher = Dispatcher() dispatcher.run()
Svolcano/python_exercise
dianhua/worker/crawler/china_telecom/neimenggu/main.py
Python
mit
17,659
0.003293
# -*- coding: utf-8 -*- import re import json import traceback import sys import time import datetime import random # 这段代码是用于解决中文报错的问题 reload(sys) sys.setdefaultencoding("utf8") from datetime import date from scrapy.selector import Selector from dateutil.relativedelta import relativedelta if __name__ == '__main__': import sys sys.path.append('../..') sys.path.append('../../..') sys.path.append('../../../..') from base_crawler import BaseCrawler from crawler.china_telecom_tool import login_unity else: from worker.crawler.base_crawler import BaseCrawler from worker.crawler.china_telecom_tool import login_unity class Crawler(BaseCrawler): """ kwargs 包含 'tel': str, 'pin_pwd': str, 'id_card': str, 'full_name': unicode, 'sms_code': str, 'captcha_code': str 錯誤等級 0: 成功 1: 帳號密碼錯誤 2: 認證碼錯誤 9: 其他錯誤 """ def __init__(self, **kwargs): """ 初始化 """ super(Crawler, self).__init__(**kwargs) self.pin_pwd_error_times = 0 self.info_res = '' def need_parameters(self, **kwargs): return ['pin_pwd'] def get_verify_type(self, **kwargs):
return 'SMS' def login(self, **kwargs): ProvinceID = '07' code, key = login_unity(self, ProvinceID, **kwargs) if code != 0: return code, key cookie_url = 'http://nm.189.cn/selfservice/service/userLogin' cookie_data = { "number" : kwargs['tel'], "intLoginType":"4", "areaCode":"0471", "isBusinessCustType":"N", "identifyType":"B", "userLoginType":"4",
"password":"", "randomPass":"", "noCheck":"N", "isSSOLogin":"Y", "sRand":"SSOLogin" } code, key, resp = self.post(cookie_url, data=json.dumps(cookie_data)) if code != 0: return code, key personal_info_url = 'http://www.189.cn/dqmh/userCenter/userInfo.do?method=editUserInfo_new&fastcode=10000557&cityCode=nm' for retry in xrange(self.max_retry): code, key, tel_info_res = self.get(personal_info_url) if code != 0: return code, key if u'真实姓名' in tel_info_res.text: self.info_res = tel_info_res.text return 0, "success" else: pass else: self.log('crawler', "request_error", tel_info_res) return 9, "website_busy_error" def send_verify_request(self, **kwargs): """ 請求發送短信,或是下載圖片,或是同時發送請求 return status_key: str, 狀態碼金鑰,參考status_code level: int, 錯誤等級 message: unicode, 詳細的錯誤信息 image_str: str, Captcha圖片的base64字串, SMS則回空 """ send_sms_url = "http://nm.189.cn/selfservice/bill/xdQuerySMS" send_sms_data = { "phone": kwargs['tel'] } code, key, resp = self.post(send_sms_url, data=json.dumps(send_sms_data)) if code != 0: return code, key, "" if resp.text: try: resp_json_response = resp.json() except: error = traceback.format_exc() self.log('crawler', "Not json file : {}, resp:{}".format(error, resp.history), resp) return 9, 'website_busy_error', '' if resp_json_response.get('flag', '') == '0': return 0, "success", "" elif resp_json_response.get('flag', '') == '2': self.log('crawler', "send_sms_error", resp) return 9, "send_sms_error", '' else: self.log('crawler', "unknown_error", resp) return 9, "unknown_error", '' else: self.log('crawler', "send_sms_error", resp) return 9, "send_sms_error", '' def verify(self, **kwargs): """ 執行二次驗證 return status_key: str, 狀態碼金鑰,參考status_code level: int, 錯誤等級 message: unicode, 詳細的錯誤信息 """ check_sms_url = "http://nm.189.cn/selfservice/bill/xdQuerySMSCheck" check_sms_data = { 'code': kwargs['sms_code'] } code, key, resp = self.post(check_sms_url, data=json.dumps(check_sms_data)) if code != 0: return code, key try: resp_json_response = resp.json() except: error = traceback.format_exc() self.log('crawler', "json_error : %s" % error, resp) return 9, 'json_error' if resp_json_response.get('flag', '') == '0': self.log('crawler', "verify_error", resp) return 2, "verify_error" # 如果直接返回详单数据按成功处理。 elif resp_json_response.get('flag', '') == '1' or 'resultNum' in resp.text or 'items' in resp.text: return 0, "success" else: self.log('crawler', "unknown_error", resp) return 9, "unknown_error" def crawl_info(self, **kwargs): """ 爬取帳戶資訊 return status_key: str, 狀態碼金鑰,參考status_code level: int, 錯誤等級 message: unicode, 詳細的錯誤信息 info: dict, 帳戶信息,參考帳戶信息格式 """ user_info = {} selector = Selector(text=self.info_res) try: full_name = selector.xpath('//input[@name="realName"]/@value').extract() user_info['full_name'] = full_name[0] if full_name else '' id_card = selector.xpath('//input[@name="certificateNumber"]/@value').extract() user_info['id_card'] = id_card[0] if id_card else '' address = re.findall(u'id="address".*?;">(.*?)</textarea>', self.info_res) user_info['address'] = address[0] if address else '' user_info['open_date'] = "" user_info['is_realname_register'] = True except: error = traceback.format_exc() self.log('crawler', "html_error : %s" % error, '') return 9, "html_error", {} return 0, "success", user_info def random_sleep(self, tm, modulus=3): time.sleep(random.uniform(tm / modulus / 1.5, 1.5 * tm / modulus)) def crawl_call_log(self, **kwargs): """ 爬取詳單 return status_key: str, 狀態碼金鑰,參考status_code level: int, 錯誤等級 message: unicode, 詳細的錯誤信息 call_log: list, 通信詳單,參考詳單格式 """ call_log = [] crawl_num = 0 call_log_url = "http://nm.189.cn/selfservice/bill/xdQuery" today = date.today() missing_list = [] pos_missing = [] search_month = [x for x in range(0, -6, -1)] for each_month in search_month: query_date = today + relativedelta(months=each_month) search_month = "%d%02d" % (query_date.year, query_date.month) call_log_data = { "billingCycle": "{}{}".format(query_date.year, str(query_date.month).zfill(2)), 'accNbr': kwargs['tel'], 'accNbrType': '4', 'areaCode': '0478', 'pageNo': -1, 'pageRecords': -1, 'prodSpecId': '378', 'qtype': '0', 'isYWlQuery': 'N', } header = { 'Referer': 'http://nm.189.cn/selfservice/bill/xd', 'Host': 'nm.189.cn', 'Content-Type': 'application/json' } start_time = time.time() end_time = start_time + 10 aid_time_dict = dict() retry_times = self.max_retry log_for_retry = [] while 1: log_for_retry.append((1, retry_times)) retry_times -= 1 code, key, resp = self.post(call_log_url, data=json.dumps(call_log_data), headers=header) if code: missing_flag = True elif 'POR-2102' in resp.text: # 无查询结果,这个月没有数据 missing_flag = False
mikuyves/fashion-finder
flickr/settings.py
Python
gpl-3.0
298
0
# -*- coding: utf-8 -*- import sys import os from os.path import d
irname # Set the directory for using the modules in the same project such as eshop. PROJECT_PATH = dirname(os.path.abspath(os.path.dirname(__file__))) ESHOP_PATH = os.path.join(PROJECT_PATH, 'eshop/') sys.path.append(PROJECT_PATH)
pitunti/alfaPitunti
plugin.video.alfa/channels/anitoonstv.py
Python
gpl-3.0
6,699
0.005526
# -*- coding: utf-8 -*- import re from channels import renumbertools from channelselector import get_thumb from core import httptools from core import scrapertools from core import servertools from core import tmdb from core.item import Item from platformcode import config, logger from channels import autoplay IDIOMAS = {'latino': 'Latino'} list_language = IDIOMAS.values() list_servers = ['openload', 'okru', 'netutv', 'rapidvideo' ] list_quality = ['default'] host = "http://www.anitoonstv.com" def mainlist(item): logger.info() thumb_series = get_thumb("channels_tvshow.png") autoplay.init(item.channel, list_servers, list_quality) itemlist = list() itemlist.append(Item(channel=item.channel, action="lista", title="Anime", url=host, thumbnail=thumb_series)) itemlist.append(Item(channel=item.channel, action="lista", title="Series Animadas", url=host, thumbnail=thumb_series)) itemlist.append(Item(channel=item.channel, action="lista", title="Novedades", url=host, thumbnail=thumb_series)) item
list.append(Item(channel=item.channel, action="lista", title="Pokemon", url=host, thumbnail=thumb_series)) itemlist = renumbertools.show_option(item.channel, itemli
st) autoplay.show_option(item.channel, itemlist) return itemlist def lista(item): logger.info() itemlist = [] data = httptools.downloadpage(item.url).data data = re.sub(r"\n|\r|\t|\s{2}|&nbsp;", "", data) if 'Novedades' in item.title: patron_cat = '<div class="activos"><h3>(.+?)<\/h2><\/a><\/div>' patron = '<a href="(.+?)"><h2><span>(.+?)<\/span>' else: patron_cat = '<li><a href=.+?>' patron_cat += str(item.title) patron_cat += '<\/a><div>(.+?)<\/div><\/li>' patron = "<a href='(.+?)'>(.+?)<\/a>" data = scrapertools.find_single_match(data, patron_cat) matches = scrapertools.find_multiple_matches(data, patron) for link, name in matches: if "Novedades" in item.title: url = link title = name.capitalize() else: url = host + link title = name if ":" in title: cad = title.split(":") show = cad[0] else: if "(" in title: cad = title.split("(") if "Super" in title: show = cad[1] show = show.replace(")", "") else: show = cad[0] else: show = title if "&" in show: cad = title.split("xy") show = cad[0] context1=[renumbertools.context(item), autoplay.context] itemlist.append( item.clone(title=title, url=url, plot=show, action="episodios", show=show, context=context1)) tmdb.set_infoLabels(itemlist) return itemlist def episodios(item): logger.info() itemlist = [] data = httptools.downloadpage(item.url).data data = re.sub(r"\n|\r|\t|\s{2}|&nbsp;", "", data) patron = '<div class="pagina">(.+?)<\/div><div id="fade".+?>' data = scrapertools.find_single_match(data, patron) patron_caps = "<a href='(.+?)'>Capitulo: (.+?) - (.+?)<\/a>" matches = scrapertools.find_multiple_matches(data, patron_caps) show = scrapertools.find_single_match(data, '<span>Titulo.+?<\/span>(.+?)<br><span>') scrapedthumbnail = scrapertools.find_single_match(data, "<img src='(.+?)'.+?>") scrapedplot = scrapertools.find_single_match(data, '<span>Descripcion.+?<\/span>(.+?)<br>') i = 0 temp = 0 for link, cap, name in matches: if int(cap) == 1: temp = temp + 1 if int(cap) < 10: cap = "0" + cap season = temp episode = int(cap) season, episode = renumbertools.numbered_for_tratk( item.channel, item.show, season, episode) date = name title = "%sx%s %s (%s)" % (season, str(episode).zfill(2), "Episodio %s" % episode, date) # title = str(temp)+"x"+cap+" "+name url = host + "/" + link if "NO DISPONIBLE" not in name: itemlist.append(Item(channel=item.channel, action="findvideos", title=title, thumbnail=scrapedthumbnail, plot=scrapedplot, url=url, show=show)) if config.get_videolibrary_support() and len(itemlist) > 0: itemlist.append(Item(channel=item.channel, title="Añadir esta serie a la videoteca", url=item.url, action="add_serie_to_library", extra="episodios", show=show)) return itemlist def findvideos(item): logger.info() itemlist = [] data = httptools.downloadpage(item.url).data data1 = re.sub(r"\n|\r|\t|\s{2}|&nbsp;", "", data) data_vid = scrapertools.find_single_match(data1, '<div class="videos">(.+?)<\/div><div .+?>') # name = scrapertools.find_single_match(data,'<span>Titulo.+?<\/span>([^<]+)<br>') scrapedplot = scrapertools.find_single_match(data, '<br><span>Descrip.+?<\/span>([^<]+)<br>') scrapedthumbnail = scrapertools.find_single_match(data, '<div class="caracteristicas"><img src="([^<]+)">') itemla = scrapertools.find_multiple_matches(data_vid, '<div class="serv">.+?-(.+?)-(.+?)<\/div><.+? src="(.+?)"') for server, quality, url in itemla: if "Calidad Alta" in quality: quality = quality.replace("Calidad Alta", "HQ") server = server.lower().strip() if "ok" == server: server = 'okru' if "netu" == server: continue itemlist.append(item.clone(url=url, action="play", server=server, contentQuality=quality, thumbnail=scrapedthumbnail, plot=scrapedplot, title="Enlace encontrado en %s: [%s]" % (server.capitalize(), quality))) autoplay.start(itemlist, item) return itemlist def play(item): logger.info() itemlist = [] # Buscamos video por servidor ... devuelve = servertools.findvideosbyserver(item.url, item.server) if not devuelve: # ...sino lo encontramos buscamos en todos los servidores disponibles devuelve = servertools.findvideos(item.url, skip=True) if devuelve: # logger.debug(devuelve) itemlist.append(Item(channel=item.channel, title=item.contentTitle, action="play", server=devuelve[0][2], url=devuelve[0][1], thumbnail=item.thumbnail)) return itemlist
tbeadle/django
tests/model_fields/test_booleanfield.py
Python
bsd-3-clause
4,416
0.000226
from django.core.exceptions import ValidationError from django.db import IntegrityError, models, transaction from django.test import SimpleTestCase, TestCase from .models import BooleanModel, FksToBooleans, NullBooleanModel class BooleanFieldTests(TestCase): def _test_get_prep_value(self, f): self.assertEqual(f.get_prep_value(True), True) self.assertEqual(f.get_prep_value('1'), True) self.assertEqual(f.get_prep_value(1), True) self.assertEqual(f.get_prep_value(False), False) self.assertEqual(f.get_prep_value('0'), False) self.assertEqual(f.get_prep_value(0), False) self.assertEqual(f.get_prep_value(None), None) def _test_to_python(self, f): self.assertIs(f.to_python(1), True) self.assertIs(f.to_python(0), False) def test_booleanfield_get_prep_value(self): self._test_get_prep_value(models.BooleanField()) def test_nullbooleanfield_get_prep_value(self): self._test_get_prep_value(models.NullBooleanField()) def test_booleanfield_to_python(self): self._test_to_python(models.BooleanField()) def test_nullbooleanfield_to_python(self): self._test_to_python(models.NullBooleanField()) def test_booleanfield_choices_blank(self): """ BooleanField with choices and defaults doesn't generate a formfield with the blank option (#9640, #10549). """ choices = [(1, 'Si'), (2, 'No')] f = models.BooleanField(choices=choices, default=1, null=False) self.assertEqual(f.formfield().choices, choices) def test_return_type(self): b = BooleanModel.objects.create(bfield=True) b.refresh_from_db() self.assertEqual(b.bfield, True) b2 = Boolean
Model.objects.create(bfield=False) b2.refresh_from_db() self.assertEqual(b2.bfield, False) b3 = NullBooleanModel.objects.create(nbfield=True) b3.refresh_from_db() self.assertEqual(b3.nbfield, True) b4 = NullBooleanModel.objects.create(nbfield=False) b4.refresh_from_db() self.assertEqual(b4.nbfield, False) # When an extra
clause exists, the boolean conversions are applied with # an offset (#13293). b5 = BooleanModel.objects.all().extra(select={'string_col': 'string'})[0] self.assertNotIsInstance(b5.pk, bool) def test_select_related(self): """ Boolean fields retrieved via select_related() should return booleans. """ bmt = BooleanModel.objects.create(bfield=True) bmf = BooleanModel.objects.create(bfield=False) nbmt = NullBooleanModel.objects.create(nbfield=True) nbmf = NullBooleanModel.objects.create(nbfield=False) m1 = FksToBooleans.objects.create(bf=bmt, nbf=nbmt) m2 = FksToBooleans.objects.create(bf=bmf, nbf=nbmf) # select_related('fk_field_name') ma = FksToBooleans.objects.select_related('bf').get(pk=m1.id) self.assertEqual(ma.bf.bfield, True) self.assertEqual(ma.nbf.nbfield, True) # select_related() mb = FksToBooleans.objects.select_related().get(pk=m1.id) mc = FksToBooleans.objects.select_related().get(pk=m2.id) self.assertEqual(mb.bf.bfield, True) self.assertEqual(mb.nbf.nbfield, True) self.assertEqual(mc.bf.bfield, False) self.assertEqual(mc.nbf.nbfield, False) def test_null_default(self): """ A BooleanField defaults to None, which isn't a valid value (#15124). """ boolean_field = BooleanModel._meta.get_field('bfield') self.assertFalse(boolean_field.has_default()) b = BooleanModel() self.assertIsNone(b.bfield) with transaction.atomic(): with self.assertRaises(IntegrityError): b.save() nb = NullBooleanModel() self.assertIsNone(nb.nbfield) nb.save() # no error class ValidationTest(SimpleTestCase): def test_boolean_field_doesnt_accept_empty_input(self): f = models.BooleanField() with self.assertRaises(ValidationError): f.clean(None, None) def test_nullbooleanfield_blank(self): """ NullBooleanField shouldn't throw a validation error when given a value of None. """ nullboolean = NullBooleanModel(nbfield=None) nullboolean.full_clean()
dairdr/voteapp
voteapp/apps/vote/mixing.py
Python
mit
1,724
0.020894
# -*- coding: utf-8 -*- """Defines mixing class. You can use it for inherit from Class Base Views, it was developed by Timothée Peignier https://gist.github.com/cyberdelia/1231560 """ from django.con
trib.auth.decorators import login_required from django.utils.cache import patch_response_headers from django.utils.decorators import method_decorator from django.views.decorators.cache import cache_page, never_cache from django.views.decorators.csrf import csrf_exempt class NeverCacheMixin(object):
@method_decorator(never_cache) def dispatch(self, *args, **kwargs): return super(NeverCacheMixin, self).dispatch(*args, **kwargs) class LoginRequiredMixin(object): @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(LoginRequiredMixin, self).dispatch(*args, **kwargs) class CSRFExemptMixin(object): @method_decorator(csrf_exempt) def dispatch(self, *args, **kwargs): return super(CSRFExemptMixin, self).dispatch(*args, **kwargs) class CacheMixin(object): cache_timeout = 60 def get_cache_timeout(self): return self.cache_timeout def dispatch(self, *args, **kwargs): return cache_page(self.get_cache_timeout())(super(CacheMixin, self).dispatch)(*args, **kwargs) class CacheControlMixin(object): cache_timeout = 60 def get_cache_timeout(self): return self.cache_timeout def dispatch(self, *args, **kwargs): response = super(CacheControlMixin, self).dispatch(*args, **kwargs) patch_response_headers(response, self.get_cache_timeout()) return response class JitterCacheMixin(CacheControlMixin): cache_range = [40, 80] def get_cache_range(self): return self.cache_range def get_cache_timeout(self): return random.randint(*self.get_cache_range())
sprinkler/rainmachine-developer-resources
sdk-parsers/RMUtilsFramework/rmTimeUtils.py
Python
gpl-3.0
12,413
0.01007
# Copyright (c) 2014 RainMachine, Green Electronics LLC # All rights reserved. # Authors: Nicu Pavel <[email protected]> # Codrin Juravle <[email protected]> from datetime import datetime, timedelta, tzinfo from math import sin, cos, asin, acos, sqrt import time, calendar import ctypes,os, fcntl, errno from RMUtilsFramework.rmLogging import log ZERO = timedelta(0) Y2K38_MAX_YEAR = 2037 Y2K38_MAX_TIMESTAMP = 2147483647 # For monotonic time class timespec(ctypes.Structure): _fields_ = [ ('tv_sec', ctypes.c_long), ('tv_nsec', ctypes.c_long) ] class UTC(tzinfo): """UTC""" def utcoffset(self, dt): return ZERO def tzname(self, dt): return "UTC" def dst(self, dt): return ZERO utc = UTC() utc_t0 = datetime(1970, 1, 1, tzinfo=utc) def rmYMDToTimestamp(year, month, day): if year > Y2K38_MAX_YEAR: #Y2K38 year = Y2K38_MAX_YEAR try: return int(datetime(year, month, day).strftime("%s")) except ValueError: return int(time.mktime(datetime(year, month, day).timetuple())) # Windows platform doesn't have strftime(%s) def rmYMDFromTimestamp(timestamp): if timestamp > Y2K38_MAX_TIMESTAMP: #Y2K38 timestamp = Y2K38_MAX_TIMESTAMP d = datetime.fromtimestamp(timestamp) return d.year, d.month, d.day def rmTimestampToDate(timestamp): if timestamp > Y2K38_MAX_TIMESTAMP: #Y2K38 timestamp = Y2K38_MAX_TIMESTAMP return datetime.fromtimestamp(timestamp) def rmTimestampToDateAsString(timestamp, format = None): if timestamp > Y2K38_MAX_TIMESTAMP: #Y2K38 timestamp = Y2K38_MAX_TIMESTAMP if format: return datetime.fromtimestamp(timestamp).strftime(format) return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S') def rmCurrentTimestampToDateAsString(format = None): timestamp = int(time.time()) if format: return datetime.fromtimestamp(timestamp).strftime(format) return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S') def rmTimestampToUtcDateAsString(timestamp, format = None): if timestamp > Y2K38_MAX_TIMESTAMP: #Y2K38 timestamp = Y2K38_MAX_TIMESTAMP if format: return datetime.utcfromtimestamp(timestamp).strftime(format) return datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S') def rmTimestampFromDateAsString(dateString, format): return int(datetime.strptime(dateString, format).strftime("%s")) # Converts a date string in UTC format to a local timestamp (ex: 2019-05-20T12:00:00Z) def rmTimestampFromUTCDateAsString(dateString, format): dt = datetime.strptime(dateString, format) return int((dt - datetime.utcfromtimestamp(0)).total_seconds()) def rmTimestampFromDateAsStringWithOffset(dateString): # format in form of 2015-04-24T08:00:00-04:00 converted to UTC timestamp if dateString is None: return None try: sign = int(dateString[19:20] + '1') (hour, minute) = [int(s) for s in dateString[20:].split(':')] offset = sign * (hour * 60 * 60 + minute * 60) except: return None try: start_time = datetime.strptime(dateString[:19], "%Y-%m-%dT%H:%M:%S") timestamp = int(calendar.timegm(start_time.timetuple())) - offset except: return None return timestamp def rmTimestampToYearMonthDay(timestamp): d = datetime.fromtimestamp(timestamp) return d.year, d.month, d.day def rmNowToYearMonthDay(): d = datetime.now() return d.year, d.month, d.day def rmNormalizeTimestamp(timestamp): return int(datetime.fromtimestamp(timestamp).strftime('%s')) def rmTimestampToDayOfYear(timestamp): if timestamp is None: timestamp = rmCurrentDayTimestamp() d = datetime.fromtimestamp(timestamp).timetuple() return d.tm_yday def rmNowDateTime(): return datetime.now() def rmCurrentTimestamp(): return int(time.time()) def rmCurrentDayTimestamp(): return rmGetStartOfDay(int(time.time())) def rmCurrentMinuteTimestamp(): timestamp = int(time.time()) return timestamp - (timestamp % 60) def rmGetStartOfDay(timestamp): tuple = datetime.fromtimestamp(timestamp).timetuple() return int(datetime(tuple.tm_year, tuple.tm_mon, tuple.tm_mday).strftime("%s")) def rmGetStartOfDayUtc(timestamp): tuple = datetime.utcfromtimestamp(timestamp).timetuple() dt = datetime(tuple.tm_year, tuple.tm_mon, tuple.tm_mday, tzinfo=utc) return int((dt-utc_t0).total_seconds()) def rmTimestampIsLeapYear(timestamp): d = datetime.fromtimestamp(timestamp) #try: # datetime(d.year, 2, 29) # return True #except ValueError: # return False if d.year % 400 == 0: return True elif d.year % 100 == 0: return False elif d.year % 4 == 0: return True return False def rmConvertDateStringToFormat(dateString, inputFormat, outputFormat): return datetime.strptime(dateString, inputFormat).strftime(outpu
tFormat) def rmDayRange(startDayTimestamp, numDays): d = datetime.fromtimestamp(startDayTimestamp) if numDays >=0: dateList = [int(time.mktime( (d + timedelta(days=x)).timetuple() )) for x in range(0, numDays)] else: numDays = -numDays dateList = [int(time.mktim
e( (d - timedelta(days=x)).timetuple() )) for x in range(0, numDays)] return dateList def rmDeltaDayFromTimestamp(startDayTimeStamp, deltaDays): d = datetime.fromtimestamp(startDayTimeStamp) if deltaDays < 0: d = d - timedelta(days=-deltaDays) else: d = d + timedelta(days=deltaDays) return int(time.mktime(d.timetuple())) def rmGetNumberOfDaysBetweenTimestamps(startTimestamp, endTimestamp): d1 = datetime.fromtimestamp(startTimestamp) d2 = datetime.fromtimestamp(endTimestamp) delta = d2-d1 return delta.days # Sunrise and sunset for specific location and elevation def computeSuntransitAndDayLenghtForDayTs(ts, lat, lon, elevation): ts = rmGetStartOfDayUtc(ts) n = julianDayFromTimestamp(ts) J = __computeMeanSolarNoon(n, lon) M = __computeSolarMeanAnomay(J) C = __equationOfTheCenter(M) L = __computeEclipticLongitude(M, C) Jtr = computeSolarTransit(J, M, L) delta = __computeSinSunDeclination(L) w0 = computeHourAngle(lat, delta, elevation) return Jtr, w0 def rmGetSunsetTimestampForDayTimestamp(ts, lat, lon, elevation): Jtr, w0 = computeSuntransitAndDayLenghtForDayTs(ts, lat, -lon, elevation) Jset = Jtr+w0/360 tsJset = julianDayToUTC(Jset) return tsJset def rmGetSunriseTimestampForDayTimestamp(ts, lat, lon, elevation): if lat is None or lon is None: log.debug("Latitude or longitude is not set. Returning same timestamp") return ts Jtr, w0 = computeSuntransitAndDayLenghtForDayTs(ts, lat, -lon, elevation) Jrise = Jtr-w0/360 tsJrise = julianDayToUTC(Jrise) return tsJrise def julianDayFromTimestamp(ts): ts = rmGetStartOfDayUtc(ts) + 12*3600 JD = float(ts)/86400 + 2440587.5 return JD - 2451545.0 + 0.0008 def julianDayToUTC(JD): return (JD - 2440587.5)*86400 def __cosa(degree): radian = degree/180*3.14159265359 return cos(radian) def __sina(degree): radian = degree/180*3.14159265359 return sin(radian) def __acosa(x): if abs(x) > 1: return 180. if x< 0 else 0. radian = acos(x) return radian/3.14159265359*180. def __asina(x): if abs(x) > 1: return -90. if x< 0 else 90. radian = asin(x) return radian/(3.14159265359)*180. def __computeMeanSolarNoon(jd, wlon): J = wlon/360 + jd return J def __computeSolarMeanAnomay(solarNoon): #degrees return (357.5291 + 0.98560028*solarNoon)%360 def __equationOfTheCenter(solarMeanAnomaly): # constant from sine M = solarMeanAnomaly return 1.9148*__sina(M) + 0.0200*__sina(2*M) + 0.0003*__sina(3*M) def __computeEclipticLongitude(solarMeanAnomaly, eqCenter): #degrees (it adds a sum a sines) L = (solarMeanAnomaly + eqCenter + 180 + 102.9372) % 360 return L def computeSolarTransit(meanSolarNoon, solarMeanAnomaly, eclipticLongitude)
Micronaet/micronaet-production
working_bom/__openerp__.py
Python
agpl-3.0
1,492
0.00067
############################################################################### # # Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>). # # This program is free software: yo
u can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more detai
ls. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### { 'name': 'BOM for working process', 'version': '0.1', 'category': '', 'description': """ Add extra information for manage BOM as a work BOM """, 'author': 'Micronaet S.r.l. - Nicola Riolini', 'website': 'http://www.micronaet.it', 'license': 'AGPL-3', 'depends': [ 'base', 'mrp', ], 'init_xml': [], 'demo': [], 'data': [ 'security/ir.model.access.csv', 'bom_views.xml', ], 'active': False, 'installable': True, 'auto_install': False, }
open-dai/bcn-lleida-opendai-pilots
web-geo-server/admin_web/wsgi.py
Python
lgpl-3.0
1,160
0.001724
""" WSGI config for opendai_lleida_web project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "admin_web.settings-production") # This appl
ication object is used by any WSGI server configured to use this # file. This includes
Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application)
great-expectations/great_expectations
great_expectations/data_asset/__init__.py
Python
apache-2.0
77
0
from .data_ass
et import DataAs
set from .file_data_asset import FileDataAsset
doubleO8/versionone-sdk-spoon
versio9/tests/__init__.py
Python
bsd-3-clause
49
0
impo
rt connect_tests import string_utils_tests
bjweiqm/Sele
school/pachong/video_db.py
Python
gpl-2.0
534
0.002
#!/usr/bin/env python # encoding:utf-8 """ @software: PyCharm @file: video_db.py @time: 2016
/8/4 16:56 """ import sqlite3 class Create_DB(): def __init__(self): self.conn = sqlite3.connect('video.db') self.cn = self.conn.curs
or() def create_table(self, table): # 创建表格 table == 创建表命令 self.cn.execute(table) def insert_db(self): # 插入数据 pass def select_db(self): # 查询数据 pass if __name__ == '__main__': pass
j5shi/Thruster
pylibs/test/test_site.py
Python
gpl-2.0
15,991
0.001688
"""Tests for 'site'. Tests assume the initial paths in sys.path once the interpreter has begun executing have not been removed. """ import unittest from test.test_support import run_unittest, TESTFN, EnvironmentVarGuard from test.test_support import captured_output import __builtin__ import os import sys import re import encodings import subprocess import sysconfig from copy import copy # Need to make sure to not import 'site' if someone specified ``-S`` at the # command-line. Detect this by just making sure 'site' has not been imported # already. if "site" in sys.modules: import site else: raise unittest.SkipTest("importation of site.py suppressed") if site.ENABLE_USER_SITE and not os.path.isdir(site.USER_SITE): # need to add user site directory for tests os.makedirs(site.USER_SITE) site.addsitedir(site.USER_SITE) class HelperFunctionsTests(unittest.TestCase): """Tests for helper functions. The setting of the encoding (set using sys.setdefaultencoding) used by the Unicode implementation is not tested. """ def setUp(self): """Save a copy of sys.path""" self.sys_path = sys.path[:] self.old_base = site.USER_BASE self.old_site = site.USER_SITE self.old_prefixes = site.PREFIXES self.old_vars = copy(sysconfig._CONFIG_VARS) def tearDown(self): """Restore sys.path""" sys.path[:] = self.sys_path site.USER_BASE = self.old_base site.USER_SITE = self.old_site site.PREFIXES = self.old_prefixes sysconfig._CONFIG_VARS = self.old_vars def test_makepath(self): # Test makepath() have an absolute path for its first return value # and a case-normalized version of the absolute path for its # second value. path_parts = ("Beginning", "End") original_dir = os.path.join(*path_parts) abs_dir, norm_dir = site.makepath(*path_parts) self.assertEqual(os.path.abspath(original_dir), abs_dir) if original_dir == os.path.normcase(original_dir): self.assertEqual(abs_dir, norm_dir) else: self.assertEqual(os.path.normcase(abs_dir), norm_dir) def test_init_pathinfo(self): dir_set = site._init_pathinfo() for entry in [site.makepath(path)[1] for path in sys.path if path and os.path.isdir(path)]: self.assertIn(entry, dir_set, "%s from sys.path not found in set returned " "by _init_pathinfo(): %s" % (entry, dir_set)) def pth_file_tests(self, pth_file): """Contain common
code for testing results of reading a .pth file""" self.assertIn(pth_file.imported, sys.modules, "%s not in sys.modules" % pth_file.imported) self.assertIn(site.makepath(pth_file.good_dir_path)[0], sys.path) self.assertFalse(os.path.exists(pth_file.bad_dir_path)) def test_addpackage(self): # Make sure addpackage() imports if the line starts with 'import', # adds directories to sys.pat
h for any line in the file that is not a # comment or import that is a valid directory name for where the .pth # file resides; invalid directories are not added pth_file = PthFile() pth_file.cleanup(prep=True) # to make sure that nothing is # pre-existing that shouldn't be try: pth_file.create() site.addpackage(pth_file.base_dir, pth_file.filename, set()) self.pth_file_tests(pth_file) finally: pth_file.cleanup() def make_pth(self, contents, pth_dir='.', pth_name=TESTFN): # Create a .pth file and return its (abspath, basename). pth_dir = os.path.abspath(pth_dir) pth_basename = pth_name + '.pth' pth_fn = os.path.join(pth_dir, pth_basename) pth_file = open(pth_fn, 'w') self.addCleanup(lambda: os.remove(pth_fn)) pth_file.write(contents) pth_file.close() return pth_dir, pth_basename def test_addpackage_import_bad_syntax(self): # Issue 10642 pth_dir, pth_fn = self.make_pth("import bad)syntax\n") with captured_output("stderr") as err_out: site.addpackage(pth_dir, pth_fn, set()) self.assertRegexpMatches(err_out.getvalue(), "line 1") self.assertRegexpMatches(err_out.getvalue(), re.escape(os.path.join(pth_dir, pth_fn))) # XXX: the previous two should be independent checks so that the # order doesn't matter. The next three could be a single check # but my regex foo isn't good enough to write it. self.assertRegexpMatches(err_out.getvalue(), 'Traceback') self.assertRegexpMatches(err_out.getvalue(), r'import bad\)syntax') self.assertRegexpMatches(err_out.getvalue(), 'SyntaxError') def test_addpackage_import_bad_exec(self): # Issue 10642 pth_dir, pth_fn = self.make_pth("randompath\nimport nosuchmodule\n") with captured_output("stderr") as err_out: site.addpackage(pth_dir, pth_fn, set()) self.assertRegexpMatches(err_out.getvalue(), "line 2") self.assertRegexpMatches(err_out.getvalue(), re.escape(os.path.join(pth_dir, pth_fn))) # XXX: ditto previous XXX comment. self.assertRegexpMatches(err_out.getvalue(), 'Traceback') self.assertRegexpMatches(err_out.getvalue(), 'ImportError') @unittest.skipIf(sys.platform == "win32", "Windows does not raise an " "error for file paths containing null characters") def test_addpackage_import_bad_pth_file(self): # Issue 5258 pth_dir, pth_fn = self.make_pth("abc\x00def\n") with captured_output("stderr") as err_out: site.addpackage(pth_dir, pth_fn, set()) self.assertRegexpMatches(err_out.getvalue(), "line 1") self.assertRegexpMatches(err_out.getvalue(), re.escape(os.path.join(pth_dir, pth_fn))) # XXX: ditto previous XXX comment. self.assertRegexpMatches(err_out.getvalue(), 'Traceback') self.assertRegexpMatches(err_out.getvalue(), 'TypeError') def test_addsitedir(self): # Same tests for test_addpackage since addsitedir() essentially just # calls addpackage() for every .pth file in the directory pth_file = PthFile() pth_file.cleanup(prep=True) # Make sure that nothing is pre-existing # that is tested for try: pth_file.create() site.addsitedir(pth_file.base_dir, set()) self.pth_file_tests(pth_file) finally: pth_file.cleanup() @unittest.skipUnless(site.ENABLE_USER_SITE, "requires access to PEP 370 " "user-site (site.ENABLE_USER_SITE)") def test_s_option(self): usersite = site.USER_SITE self.assertIn(usersite, sys.path) env = os.environ.copy() rc = subprocess.call([sys.executable, '-c', 'import sys; sys.exit(%r in sys.path)' % usersite], env=env) self.assertEqual(rc, 1, "%r is not in sys.path (sys.exit returned %r)" % (usersite, rc)) env = os.environ.copy() rc = subprocess.call([sys.executable, '-s', '-c', 'import sys; sys.exit(%r in sys.path)' % usersite], env=env) self.assertEqual(rc, 0) env = os.environ.copy() env["PYTHONNOUSERSITE"] = "1" rc = subprocess.call([sys.executable, '-c', 'import sys; sys.exit(%r in sys.path)' % usersite], env=env) self.assertEqual(rc, 0) env = os.environ.copy() env["PYTHONUSERBASE"] = "/tmp" rc = subprocess.call([sys.executable, '-c', 'import sys, site; sys.exit(site.USER_BASE.startswith("/tmp"))'], env=env) self.assertE
uclouvain/OSIS-Louvain
education_group/tests/ddd/factories/domain/campus.py
Python
agpl-3.0
1,589
0.00063
############################################################################## # # OSIS stands for Open Student Information System. It's an application # designed to manage the core business of higher education institutions, # such as universities, faculti
es, institutes and professional schools. # The core business involves the administration of students, teachers, # courses, programs and so on. # # Copyri
ght (C) 2015-2020 Université catholique de Louvain (http://www.uclouvain.be) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # A copy of this license - GNU General Public License - is available # at the root of the source code of this program. If not, # see http://www.gnu.org/licenses/. # ############################################################################## import factory.fuzzy from education_group.ddd.domain._campus import Campus class CampusFactory(factory.Factory): class Meta: model = Campus abstract = False name = factory.Sequence(lambda n: 'Campus %02d' % n) university_name = factory.Sequence(lambda n: 'University %02d' % n)
wolfiex/ipython-dev-reload
setup.py
Python
mit
529
0.009452
from setuptools import setup def readme():
with open('README.md') as f: return f.read() ## test with python setup.py develop setup( name='ipyreload', packages=['ipyreload'], version= 1.2, description='ipython productivity tools', long_description=readme(), url="https://github.com/wolfiex/ipython-dev-reload", keywords= 'ipython reload'.split(' '), author='Dan Ellis', author_em
ail='[email protected]', license='MIT', zip_safe=False)
dslackw/slpkg
slpkg/url_read.py
Python
gpl-3.0
1,692
0.000591
#!/usr/bin/python3 # -*- coding: utf-8 -*- # url_read.py file is part of slpkg. # Copyright 2014-2021 Dimitris Zlatanidis <[email protected]> # All rights reserved. # Slpkg is a user-friendly package manager for Slackware installation
s # https://gitl
ab.com/dslackw/slpkg # Slpkg is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import urllib3 from slpkg.__metadata__ import MetaData as _meta_ class URL: """Urls reading class """ def __init__(self, link): self.link = link self.meta = _meta_ self.red = _meta_.color["RED"] self.endc = _meta_.color["ENDC"] if self.meta.http_proxy: self.http = urllib3.ProxyManager(self.meta.http_proxy) else: self.http = urllib3.PoolManager() def reading(self): """Open url and read """ try: f = self.http.request('GET', self.link) return f.data.decode("utf-8", "ignore") except urllib3.exceptions.NewConnectionError: print(f"\n{self.red}Can't read the file '{self.link.split('/')[-1]}'{self.endc}") return " "
chinmaygarde/flutter_engine
tools/dia_dll.py
Python
bsd-3-clause
2,094
0.011939
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ This script is based on chromium/chromium/master/tools/clang/scripts/update.py. It is used on Windows platforms to copy the correct msdia*.dll to the clang folder, as a "gclient hook". """ import os import shutil import stat import sys # Path constants. (All of these should be absolute paths.) THIS_DI
R = os.path.abspath(os.path.dirname(__file__)) LLVM_BUILD_DIR = os.path.abspath(os.path.join(THIS_DIR, '..', '..', 'third_party', 'llvm-build', 'Release+Asserts')) def GetDiaDll(): """Get the location of msdia*.dll for the platform.""" # Bump after VC updates. DIA_DLL = { '2013': 'msdia120.dll', '2015': 'msdia140.dll', '2017': 'msdia140.dll'
, '2019': 'msdia140.dll', } # Don't let vs_toolchain overwrite our environment. environ_bak = os.environ sys.path.append(os.path.join(THIS_DIR, '..', '..', 'build')) import vs_toolchain win_sdk_dir = vs_toolchain.SetEnvironmentAndGetSDKDir() msvs_version = vs_toolchain.GetVisualStudioVersion() if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))): dia_path = os.path.join(win_sdk_dir, '..', 'DIA SDK', 'bin', 'amd64') else: if 'GYP_MSVS_OVERRIDE_PATH' in os.environ: vs_path = os.environ['GYP_MSVS_OVERRIDE_PATH'] else: vs_path = vs_toolchain.DetectVisualStudioPath() dia_path = os.path.join(vs_path, 'DIA SDK', 'bin', 'amd64') os.environ = environ_bak return os.path.join(dia_path, DIA_DLL[msvs_version]) def CopyFile(src, dst): """Copy a file from src to dst.""" print("Copying %s to %s" % (str(src), str(dst))) shutil.copy(src, dst) def CopyDiaDllTo(target_dir): # This script always wants to use the 64-bit msdia*.dll. dia_dll = GetDiaDll() CopyFile(dia_dll, target_dir) def main(): CopyDiaDllTo(os.path.join(LLVM_BUILD_DIR, 'bin')) return 0 if __name__ == '__main__': sys.exit(main())
fintech-circle/edx-platform
openedx/core/djangoapps/auth_exchange/views.py
Python
agpl-3.0
7,236
0.002349
""" Views to support exchange of authentication credentials. The following are currently implemented: 1. AccessTokenExchangeView: 3rd party (social-auth) OAuth 2.0 access token -> 1st party (open-edx) OAuth 2.0 access token 2. LoginWithAccessTokenView: 1st party (open-edx) OAuth 2.0 access token -> session cookie """ # pylint: disable=abstract-method import django.contrib.auth as auth import social.apps.django_app.utils as social_utils from django.conf import settings from django.contrib.auth import login from
django.http import HttpResponse from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from edx_oauth2_provider.constants import SCOPE_VALUE_DICT
from oauth2_provider.settings import oauth2_settings from oauth2_provider.views.base import TokenView as DOTAccessTokenView from oauthlib.oauth2.rfc6749.tokens import BearerToken from provider import constants from provider.oauth2.views import AccessTokenView as DOPAccessTokenView from rest_framework import permissions from rest_framework.response import Response from rest_framework.views import APIView from openedx.core.djangoapps.auth_exchange.forms import AccessTokenExchangeForm from openedx.core.djangoapps.oauth_dispatch import adapters from openedx.core.lib.api.authentication import OAuth2AuthenticationAllowInactiveUser class AccessTokenExchangeBase(APIView): """ View for token exchange from 3rd party OAuth access token to 1st party OAuth access token. """ @method_decorator(csrf_exempt) @method_decorator(social_utils.strategy("social:complete")) def dispatch(self, *args, **kwargs): return super(AccessTokenExchangeBase, self).dispatch(*args, **kwargs) def get(self, request, _backend): # pylint: disable=arguments-differ """ Pass through GET requests without the _backend """ return super(AccessTokenExchangeBase, self).get(request) def post(self, request, _backend): # pylint: disable=arguments-differ """ Handle POST requests to get a first-party access token. """ form = AccessTokenExchangeForm(request=request, oauth2_adapter=self.oauth2_adapter, data=request.POST) # pylint: disable=no-member if not form.is_valid(): return self.error_response(form.errors) # pylint: disable=no-member user = form.cleaned_data["user"] scope = form.cleaned_data["scope"] client = form.cleaned_data["client"] return self.exchange_access_token(request, user, scope, client) def exchange_access_token(self, request, user, scope, client): """ Exchange third party credentials for an edx access token, and return a serialized access token response. """ if constants.SINGLE_ACCESS_TOKEN: edx_access_token = self.get_access_token(request, user, scope, client) # pylint: disable=no-member else: edx_access_token = self.create_access_token(request, user, scope, client) return self.access_token_response(edx_access_token) # pylint: disable=no-member class DOPAccessTokenExchangeView(AccessTokenExchangeBase, DOPAccessTokenView): """ View for token exchange from 3rd party OAuth access token to 1st party OAuth access token. Uses django-oauth2-provider (DOP) to manage access tokens. """ oauth2_adapter = adapters.DOPAdapter() class DOTAccessTokenExchangeView(AccessTokenExchangeBase, DOTAccessTokenView): """ View for token exchange from 3rd party OAuth access token to 1st party OAuth access token. Uses django-oauth-toolkit (DOT) to manage access tokens. """ oauth2_adapter = adapters.DOTAdapter() def get(self, request, _backend): return Response(status=400, data={ 'error': 'invalid_request', 'error_description': 'Only POST requests allowed.', }) def get_access_token(self, request, user, scope, client): """ TODO: MA-2122: Reusing access tokens is not yet supported for DOT. Just return a new access token. """ return self.create_access_token(request, user, scope, client) def create_access_token(self, request, user, scope, client): """ Create and return a new access token. """ _days = 24 * 60 * 60 token_generator = BearerToken( expires_in=settings.OAUTH_EXPIRE_PUBLIC_CLIENT_DAYS * _days, request_validator=oauth2_settings.OAUTH2_VALIDATOR_CLASS(), ) self._populate_create_access_token_request(request, user, scope, client) return token_generator.create_token(request, refresh_token=True) def access_token_response(self, token): """ Wrap an access token in an appropriate response """ return Response(data=token) def _populate_create_access_token_request(self, request, user, scope, client): """ django-oauth-toolkit expects certain non-standard attributes to be present on the request object. This function modifies the request object to match these expectations """ request.user = user request.scopes = [SCOPE_VALUE_DICT[scope]] request.client = client request.state = None request.refresh_token = None request.extra_credentials = None request.grant_type = client.authorization_grant_type def error_response(self, form_errors): """ Return an error response consisting of the errors in the form """ return Response(status=400, data=form_errors) class LoginWithAccessTokenView(APIView): """ View for exchanging an access token for session cookies """ authentication_classes = (OAuth2AuthenticationAllowInactiveUser,) permission_classes = (permissions.IsAuthenticated,) @staticmethod def _get_path_of_arbitrary_backend_for_user(user): """ Return the path to the first found authentication backend that recognizes the given user. """ for backend_path in settings.AUTHENTICATION_BACKENDS: backend = auth.load_backend(backend_path) if backend.get_user(user.id): return backend_path @method_decorator(csrf_exempt) def post(self, request): """ Handler for the POST method to this view. """ # The django login method stores the user's id in request.session[SESSION_KEY] and the # path to the user's authentication backend in request.session[BACKEND_SESSION_KEY]. # The login method assumes the backend path had been previously stored in request.user.backend # in the 'authenticate' call. However, not all authentication providers do so. # So we explicitly populate the request.user.backend field here. if not hasattr(request.user, 'backend'): request.user.backend = self._get_path_of_arbitrary_backend_for_user(request.user) login(request, request.user) # login generates and stores the user's cookies in the session return HttpResponse(status=204) # cookies stored in the session are returned with the response
wallarelvo/eneza-server
smsserver/poolmember.py
Python
apache-2.0
584
0.001712
import android class SMSPoolMember: def __init__(self, query): self.droid = android.Android() self.query = str(query).lstrip().rstrip() def wifiConnected(self): none = "<unknown ssid>" return not self.droid.wifiGetConnectionInfo().result["ssid"] == none def dataConnected(self): return self.droid.getCellLocation().result["cid"] > -1 def sendResponse(self): if self.query == "connecti
on": return "pool:" + str(self.wifiConnected()
or self.dataConnected()) else: return "pool: None"
xialeiliu/RankIQA
src/MyLossLayer/netloss_tid2013.py
Python
mit
1,910
0.013613
import caffe import numpy as np import pdb class MyLossLayer(caffe.Layer): """Layer of Efficient Siamese loss function.""" def setup(self, bottom, top): self.margin = 10 print '*********************** SETTING UP' pass def forward(self, bottom, top): """The parameters here have the same meaning as data_layer""" self.Num = 0 batch = 1 level = 5 dis = 9 SepSize = batch*level self.dis = [] # for the first for k in range(dis): for i in range(SepSize*k,SepSize*(k+1)-batch): for j in range(SepSize*k + int((i-SepSize*k)/batch+1)*batch,SepSize*(k+1)): self.dis.append(bottom[0].data[i]-bottom[0].data[j]) self.Num +=1 self.dis = np.asarray(self.dis) self.loss = np.maximum(0,self.margin-self.dis) # Efficient Siamese forward pass of hinge loss top[0].data[...] = np.sum(self.loss)/bottom[0].num def backward(self, top, propagate_down, bottom): """The parameters here have the same meaning as data_layer""" batch=1 index = 0 level = 5 dis = 9 SepSize = batch*level self.ref= np.zeros(bottom[
0].num,dtype=np.float32) for k in range(dis)
: for i in range(SepSize*k,SepSize*(k+1)-batch): for j in range(SepSize*k + int((i-SepSize*k)/batch+1)*batch,SepSize*(k+1)): if self.loss[index]>0: self.ref[i] += -1 self.ref[j] += +1 index +=1 # Efficient Siamese backward pass bottom[0].diff[...]= np.reshape(self.ref,(bottom[0].num,1))/bottom[0].num def reshape(self, bottom, top): """Reshaping happens during the call to forward.""" top[0].reshape(1)
qwercik/brainfuckOS
utils/align-to-full-sector.py
Python
mit
467
0
#!/usr/bin/env python3 import sys if len(sys.argv) != 2: raise SystemExit('Incorrect usage. Use: ' + sys.argv[0] + ' <image.img>') image_filename = sys.argv[1] content = '' with open(image_filename, 'rb') as f: content = bytearray(f.read()) bytes_to_append = 512 -
len(content) % 512 for i in range(bytes_to_append): content.append(0) with open(image_filename, 'wb') as f: f.write(content) print('Successfully
aligned to sector')