repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
Gustry/inasafe
|
safe/definitions/reports/__init__.py
|
Python
|
gpl-3.0
| 2,816
| 0
|
# coding=utf-8
"""Definitions for basic report.
"""
from __future__ import absolute_import
from safe.utilities.i18n import tr
__copyright__ = "Copyright 2016, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "[email protected]"
__revision__ = '$Format:%H$'
# Meta description about component
# component generation type
jinja2_component_type = {
'key': 'jinja2_component_type',
'name': 'Jinja2',
'description': tr('A component that is generated using Jinja2 API.')
}
qgis_composer_component_type = {
'key': 'qgis_composer_component_type',
'name': 'QGISComposer',
'description': tr('A component that is generated using QGISComposer API.')
}
qt_renderer_component_type = {
'key': 'qt_renderer_component_type',
'name': 'QtRenderer',
'description': tr('A component that is generated using QtRenderer API.')
}
available_component_type = [
jinja2_component_type,
qgis_composer_component_type,
qt_renderer_component_type
]
# Tags
# Tags is a way to categorize different component quickly for easy
# retrieval
final_product_tag = {
'key': 'final_product_tag',
'name': tr('Final Product'),
'description': tr(
'Tag this component as a Final Product of report generation.')
}
infographic_product_tag = {
'key': 'infographic_product_tag',
'name': tr('Infographic'),
'description': tr(
'Tag this component as an Infographic related product.')
}
map_product_tag = {
'key': 'map_product_tag',
'name': tr('Map'),
'description': tr(
'Tag this component as a product mainly to show map.')
}
table_product_tag = {
'key': 'table_product_tag',
'name': tr('Table'),
'description': tr(
'Tag this component as a product mainly with table.')
}
template_product_tag = {
'key': 'template_product_tag',
'name': tr(
'Tag this component as a QGIS Template product.')
}
product_type_tag = [
table_product_tag,
map_product_tag,
template_product_tag,
infographic_product_tag
]
html_product_tag = {
'key': 'html_product_tag',
'name': tr('HTML'),
'description': tr('Tag this product as HTML output.')
}
pdf_product_tag = {
'key': 'pdf_product_tag',
'name': tr('PDF'),
'description': tr('Tag this product as PDF output.')
}
qpt_product_tag = {
'key': 'qpt_product_tag',
'name': tr('QPT'),
'description': tr('Tag this product as QPT output.')
}
png_product_tag = {
'key': 'png_product_tag',
'name': tr('PNG'),
'description': t
|
r('Tag this product as PNG output.')
}
svg_product_tag = {
'key': 'svg_produ
|
ct_tag',
'name': tr('SVG'),
'description': tr('Tag this product as SVG output.')
}
product_output_type_tag = [
html_product_tag,
pdf_product_tag,
qpt_product_tag,
png_product_tag,
]
|
nathanbjenx/cairis
|
cairis/controllers/ConceptReferenceController.py
|
Python
|
apache-2.0
| 3,396
| 0.00265
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
if (sys.version_info > (3,)):
import http.client
from http.client import BAD_REQUEST, CONFLICT, NOT_FOUND, OK
else:
import httplib
from httplib import BAD_REQUEST, CONFLICT, NOT_FOUND, OK
from flask import session, request, make_response
from flask_restful import Resource
from cairis.data.ConceptReferenceDAO import ConceptReferenceDAO
from cairis.tools.JsonConverter import json_serialize
from cairis.tools.MessageDefinitions import ConceptReferenceMessage
from cairis.tools.ModelDefinitions import ConceptReferenceModel
from cairis.tools.SessionValidator import get_session_id
__author__ = 'Shamal Faily'
class ConceptReferencesAPI(Resource):
def get(self):
session_id = get_session_id(session, request)
constraint_id = request.args.get('constraint_id', -1)
dao = ConceptReferenceDAO(session_id)
crs = dao.get_concept_references(constraint_id=constraint_id)
dao.close()
resp = make_response(json_serialize(crs, session_id=session_id))
resp.headers['Content-Type'] = "application/json"
return resp
def post(self):
session_id = get_session_id(session, request)
dao = ConceptReferenceDAO(session_id)
new_cr = dao.from_json(request)
dao.add_concept_reference(new_cr)
dao.close()
resp_dict = {'message': 'Concept Reference successfully added'}
resp = make_response(json_serialize(resp_dict, session_id=session_id), OK)
resp.contenttype = 'application/json'
return resp
class ConceptReferenceByNameAPI(Resource):
def get(self, name):
session_id = get_session_id(session, request)
dao = ConceptReferenceDAO(session_id)
found_cr = dao.get_concept_reference(name)
dao.close()
resp = make_response(json_serialize(found_cr, session_id=session_id))
resp.headers['Co
|
ntent-Type'] = "application/json"
return resp
def put(self, name):
session_id = get_sessi
|
on_id(session, request)
dao = ConceptReferenceDAO(session_id)
upd_cr = dao.from_json(request)
dao.update_concept_reference(upd_cr, name)
dao.close()
resp_dict = {'message': 'Concept Reference successfully updated'}
resp = make_response(json_serialize(resp_dict), OK)
resp.contenttype = 'application/json'
return resp
def delete(self, name):
session_id = get_session_id(session, request)
dao = ConceptReferenceDAO(session_id)
dao.delete_concept_reference(name)
dao.close()
resp_dict = {'message': 'Concept Reference successfully deleted'}
resp = make_response(json_serialize(resp_dict), OK)
resp.contenttype = 'application/json'
return resp
|
mnaughto/trigger-statusbar
|
.trigger/module_dynamic/module.py
|
Python
|
mit
| 2,707
| 0.026967
|
import json
import os
import shutil
import zipfile
from build import cd
def create_template(name, path, **kw):
os.makedirs(os.path.join(path, 'module'))
with open(os.path.join(path, 'module', 'manifest.json'), 'w') as manifest_file:
manifest = {
"name": name,
"version": "0.1",
"description": "My module template"
}
with open(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'platform_version.txt'))) as platform_version_file:
manifest['platform_version'] = platform_version_file.read()
json.dump(manifest, manifest_file, indent=4, sort_keys=True)
# Copy template module
template_path = os.path.abspath(os.path.join(os.path.split(__file__)[0], 'templatemodule'))
for root, dirnames, filenames in os.walk(template_path):
for filename in filenames:
relative_path = os.path.join(root, filename)[len(template_path)+1:]
with open(os.path.join(root, filename), 'r') as source:
lines = source.readlines()
new_dir = os.path.split(os.path.join(path, 'module', relative_path.replace('templatemodule', name)))[0]
if not os.path.isdir(new_dir):
os.makedirs(new_dir)
with open(os.path.join(path, 'module', relative_path.replace('templatemodule', name)), 'w') as output:
for line in lines:
output.write(line.replace('templatemodule', name))
return load(path, manifest)
def load(path, manifest, **kw):
module_model = {}
module_model['local_path'] = path
module_model['module_dynamic_path'] = os.path.join(path, ".trigger", "module_dynamic")
module_model['files'] = {
'manifest': os.path.join(path, 'module', 'manifest.json'),
'module_structure': os.path.join(path, ".trigger", "schema", "module_structure.json")
}
module_model['rawfiles'] = {
'dynamic_platform_version': os.path.join(path, ".trigger", "p
|
latform_version.txt")
}
module_model['directories'] = {
'module_directory': os.path.join(path, 'module')
}
return module_model
def create_upload_zip(path, subd
|
irs = [], **kw):
module_path = os.path.abspath(os.path.join(path, 'module'))
zip_base = os.path.abspath(os.path.join(path, '.trigger', 'upload_tmp'))
if os.path.exists(zip_base+".zip"):
os.unlink(zip_base+".zip")
if len(subdirs):
zip_path = _make_partial_archive(zip_base, subdirs, root_dir=module_path)
else:
zip_path = shutil.make_archive(zip_base, 'zip', root_dir=module_path)
return zip_path
def _make_partial_archive(zip_base, subdirs, root_dir):
zip = zipfile.ZipFile(zip_base + ".zip", "w")
with cd(root_dir):
for subdir in subdirs:
if not os.path.exists(subdir):
continue
for root, dirs, files in os.walk(subdir):
for file in files:
zip.write(os.path.join(root, file))
zip.close()
return zip_base + ".zip"
|
KungFuLucky7/server_admin
|
server_admin/wsgi.py
|
Python
|
gpl-2.0
| 1,551
| 0.001289
|
"""
WSGI config for server_admin project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os, sys
sys.path.append('/home/terrywong/server_admin')
sys.path.append('/home/terrywong/server_admin/server_admin')
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "server_admin.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "server_admin.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI m
|
iddleware here.
# fr
|
om helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
looker/sentry
|
src/sentry/api/base.py
|
Python
|
bsd-3-clause
| 11,066
| 0.000633
|
from __future__ import absolute_import
import functools
import logging
import six
import time
from datetime import datetime, timedelta
from django.conf import settings
from django.utils.http import urlquote
from django.views.decorators.csrf import csrf_exempt
from enum import Enum
from pytz import utc
from rest_framework.authentication import SessionAuthentication
from rest_framework.parsers import JSONParser
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from rest_framework.views import APIView
from sentry import tsdb
from sentry.app import raven
from sentry.auth import access
from sentry.models import Environment
from sentry.utils.cursors import Cursor
from sentry.utils.dates import to_datetime
from sentry.utils.http import absolute_uri, is_valid_origin
from sentry.utils.audit import create_audit_entry
from .authentication import ApiKeyAuthentication, TokenAuthentication
from .paginator import Paginator
from .permissions import NoPermission
__all__ = ['DocSection', 'Endpoint', 'EnvironmentMixin', 'StatsMixin']
ONE_MINUTE = 60
ONE_HOUR = ONE_MINUTE * 60
ONE_DAY = ONE_HOUR * 24
LINK_HEADER = '<{uri}&cursor={cursor}>; rel="{name}"; results="{has_results}"; cursor="{cursor}"'
DEFAULT_AUTHENTICATION = (
TokenAuthentication, ApiKeyAuthentication, SessionAuthentication, )
logger = logging.getLogger(__name__)
audit_logger = logging.getLogger('sentry.audit.api')
class DocSection(Enum):
ACCOUNTS = 'Accounts'
EVENTS = 'Events'
ORGANIZATIONS = 'Organizations'
PROJECTS = 'Projects'
RELEASES = 'Releases'
TEAMS = 'Teams'
class Endpoint(APIView):
authentication_classes = DEFAULT_AUTHENTICATION
renderer_classes = (JSONRenderer, )
parser_classes = (JSONParser, )
permission_classes = (NoPermission, )
def build_cursor_link(self, request, name, cursor):
querystring = u'&'.join(
u'{0}={1}'.format(urlquote(k), urlquote(v)) for k, v in six.iteritems(request.GET)
if k != 'cursor'
)
base_url = absolute_uri(urlquote(request.path))
if querystring:
base_url = '{0}?{1}'.format(base_url, querystring)
else:
base_url = base_url + '?'
return LINK_HEADER.format(
uri=base_url,
cursor=six.text_type(cursor),
name=name,
has_results='true' if bool(cursor) else 'false',
)
def convert_args(self, request, *args, **kwargs):
return (args, kwargs)
def handle_exception(self, request, exc):
try:
response = super(Endpoint, self).handle_exception(exc)
except Exception as exc:
import sys
import traceback
sys.stderr.write(traceback.format_exc())
event_id = raven.captureException(request=request)
context = {
'detail': 'Internal Error',
'errorId': event_id,
}
response = Response(context, status=500)
response.exception = True
return response
def create_audit_entry(self, request, transaction_id=None, **kwargs):
return create_audit_entry(request, transaction_id, audit_logger, **kwargs)
def initialize_request(self, request, *args, **kwargs):
rv = super(Endpoint, self).initialize_request(request, *args, **kwargs)
# If our request is being made via our internal API client, we need to
# stitch back on auth and user information
if getattr(request, '__from_api_client__', False):
if rv.auth is None:
rv.auth = getattr(
|
request, 'auth', None)
if rv.user is None:
rv.user = getattr(request, 'user', None)
return rv
@csrf_exempt
def dispatch(self, request, *args, **kwargs):
"""
Identical to rest framework's dispatch except we add the ability
to convert arguments (for common URL params).
"""
self.args = args
self.kwargs = kwargs
request =
|
self.initialize_request(request, *args, **kwargs)
self.request = request
self.headers = self.default_response_headers # deprecate?
if settings.SENTRY_API_RESPONSE_DELAY:
time.sleep(settings.SENTRY_API_RESPONSE_DELAY / 1000.0)
origin = request.META.get('HTTP_ORIGIN', 'null')
# A "null" value should be treated as no Origin for us.
# See RFC6454 for more information on this behavior.
if origin == 'null':
origin = None
try:
if origin and request.auth:
allowed_origins = request.auth.get_allowed_origins()
if not is_valid_origin(origin, allowed=allowed_origins):
response = Response('Invalid origin: %s' %
(origin, ), status=400)
self.response = self.finalize_response(
request, response, *args, **kwargs)
return self.response
self.initial(request, *args, **kwargs)
if getattr(request, 'user', None) and request.user.is_authenticated():
raven.user_context({
'id': request.user.id,
'username': request.user.username,
'email': request.user.email,
})
# Get the appropriate handler method
if request.method.lower() in self.http_method_names:
handler = getattr(self, request.method.lower(),
self.http_method_not_allowed)
(args, kwargs) = self.convert_args(request, *args, **kwargs)
self.args = args
self.kwargs = kwargs
else:
handler = self.http_method_not_allowed
if getattr(request, 'access', None) is None:
# setup default access
request.access = access.from_request(request)
response = handler(request, *args, **kwargs)
except Exception as exc:
response = self.handle_exception(request, exc)
if origin:
self.add_cors_headers(request, response)
self.response = self.finalize_response(
request, response, *args, **kwargs)
return self.response
def add_cors_headers(self, request, response):
response['Access-Control-Allow-Origin'] = request.META['HTTP_ORIGIN']
response['Access-Control-Allow-Methods'] = ', '.join(
self.http_method_names)
def add_cursor_headers(self, request, response, cursor_result):
if cursor_result.hits is not None:
response['X-Hits'] = cursor_result.hits
if cursor_result.max_hits is not None:
response['X-Max-Hits'] = cursor_result.max_hits
response['Link'] = ', '.join(
[
self.build_cursor_link(
request, 'previous', cursor_result.prev),
self.build_cursor_link(request, 'next', cursor_result.next),
]
)
def respond(self, context=None, **kwargs):
return Response(context, **kwargs)
def paginate(
self, request, on_results=None, paginator=None,
paginator_cls=Paginator, default_per_page=100, **paginator_kwargs
):
assert (paginator and not paginator_kwargs) or (paginator_cls and paginator_kwargs)
per_page = int(request.GET.get('per_page', default_per_page))
input_cursor = request.GET.get('cursor')
if input_cursor:
input_cursor = Cursor.from_string(input_cursor)
else:
input_cursor = None
assert per_page <= max(100, default_per_page)
if not paginator:
paginator = paginator_cls(**paginator_kwargs)
cursor_result = paginator.get_result(
limit=per_page,
cursor=input_cursor,
)
# map results based on callback
if on_results:
results = on_results(cursor_result.results)
response = Response(results)
self.add_cursor_headers(request, response, cursor_resul
|
Ibuprofen/gizehmoviepy
|
gif_parsers/read_rgb.py
|
Python
|
mit
| 1,281
| 0.014832
|
import json
from PIL import Image
import collections
with open('../config/nodes.json') as data_file:
nodes = json.load(data_file)
# empty fucker
ordered_nodes = [None] * len(nodes)
# populate fucker
for i, pos in nodes.items():
ordered_nodes[int(i)] = [pos['x'], pos['y']]
filename = "04_rgb_vertica
|
l_lines"
im = Image.open("../gif_generators/output/"+filename+".gif") #Can be many different formats.
target_size = 400, 400
resize = False
if target_size != im.size:
resize = True
data = []
# To iterate through the entire gif
try:
|
frame_num = 0
while True:
im.seek(frame_num)
frame_data = []
# do something to im
img = im.convert('RGB')
if resize == True:
print "Resizing"
img.thumbnail(target_size, Image.ANTIALIAS)
for x, y in ordered_nodes:
frame_data.append(img.getpixel((x, y)))
#print r, g, b
data.append(frame_data)
# write to json
print frame_num
frame_num+=1
except EOFError:
pass # end of sequence
#print data
#print r, g, b
with open(filename+'.json', 'w') as outfile:
json.dump({
"meta": {},
"data": data
}, outfile)
print im.size #Get the width and hight of the image for iterating over
#print pix[,y] #Get the RGBA Value of the a pixel of an image
|
pziarsolo/bam_crumbs
|
bam_crumbs/utils/bin.py
|
Python
|
gpl-3.0
| 370
| 0
|
import os.path
from crumbs.utils.bin_utils import create_get_binary_path
from ba
|
m_crumbs.settings import get_setting
BIN_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', 'bin'))
get_binary_path = create_get_binary_path(os.path.split(__file__)[0],
g
|
et_setting)
|
MirzaBaig715/DjangoURLShortner
|
urlshortenerapp/admin.py
|
Python
|
mit
| 114
| 0
|
from django.contrib impo
|
rt adm
|
in
from .models import Line
# Register your models here.
admin.site.register(Line)
|
DaveBackus/Data_Bootcamp
|
Code/Lab/fred_CooleyRupert_run.py
|
Python
|
mit
| 1,002
| 0.001996
|
"""
Runs peaktrough.py, which generates Cooley-Rupert figures for specified
series from FRED.
Execute peaktrough.py first, then run this program.
Written by Dave Backus under the watchful eye of Chase Coleman and Spencer Lyon
Date: July 10, 2014
"""
# import functions from peaktrough.py. * means all of them
# generates the msg "UMD has deleted: peaktrough" which means it reloads
from peaktrough import *
# do plots o
|
ne at a time
manhandle_freddata("GDPC1", saveshow="show")
print("aaaa")
# do plots all at once with map
fred_series = ["GDPC1", "PCECC96", "GPDIC96", "OPHNFB"]
# uses default saveshow parameter
gdpc1, pcecc96, gpdic96, ophnfb = map(manhandle_freddata, fred_series)
print("xxxx")
# lets us change saveshow parameter
gdpc1, pcecc96, gpdic96, ophnfb = map(lambda s:
manhandle_freddata(s, saveshow="save"), fred_series)
print("yy
|
yy")
# skip lhs (this doesn't seem to work, not sure why)
map(lambda s:
manhandle_freddata(s, saveshow="show"), fred_series)
print("zzzz")
|
mittya/duoclub
|
duoclub/posts/apps.py
|
Python
|
mit
| 144
| 0
|
# -*- coding: utf-8 -*-
from django.apps import AppConfig
class PostsConfig(AppCon
|
fig):
name = 'posts'
v
|
erbose_name = '图片列表'
|
PatrickKennedy/pygab
|
common/mounts.py
|
Python
|
bsd-2-clause
| 1,656
| 0.006039
|
#!/usr/bin/env python
#
# PyGab - Python Jabber Framework
# Copyright (c) 2008, Patrick Kennedy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
#
|
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TOR
|
T (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from common import utils
from core.mounts import *
try:
exec(utils.get_import(
mod=utils.get_module(), from_=['mounts'], import_=['*']))
except ImportError, e:
# If the bot module doesn't overwrite anything, no problem.
pass
#raise
|
renzon/appengineepython
|
backend/appengine/routes/updown/home.py
|
Python
|
mit
| 1,716
| 0.002914
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from google.appengine.api.app_identity.app_identity import get_default_gcs_bucket_name
from google.appengine.ext.blobstore import blobstore
from blob_app import blob_facade
from config.template_middleware import TemplateResponse
from gaecookie.decorator import no_csrf
from tekton import router
from routes.updown import upload, download
from tekton.gae.middleware.redirect import RedirectResponse
@no_csrf
def index(_logged_user):
success_url = router.to_path(upload)
bucket = get_default_gcs_bucket_name()
url = blobstore.create_upload_url(success_url, gs_bucket_name=bucket)
cmd = blob_facade.list_blob_files_cmd(_logged_user)
blob_files = cmd()
d
|
elete_path = router.to_path(delete)
download_path = router.to_path(download)
blob_file_form = blob_facade.blob_file_form()
def localize_blob_file(blob_file):
blob_file_dct = blob_file_form.fill_with_model(blob_file, 64)
blob_file_dct['delete_path'] = router.to_path(delete_path, blob_file_dct['id'])
blob_file_dct['d
|
ownload_path'] = router.to_path(download_path,
blob_file.blob_key,
blob_file_dct['filename'])
return blob_file_dct
localized_blob_files = [localize_blob_file(blob_file) for blob_file in blob_files]
context = {'upload_url': url,
'blob_files': localized_blob_files}
return TemplateResponse(context, 'updown/home.html')
def delete(blob_file_id):
blob_facade.delete_blob_file_cmd(blob_file_id).execute()
return RedirectResponse(router.to_path(index))
|
kiddinn/plaso
|
tests/containers/init_imports.py
|
Python
|
apache-2.0
| 558
| 0.007168
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests that all containers are imported
|
correctly."""
import unittest
from tests import test_lib
class ContainersImportTest(test_lib.ImportCheckTestCase):
"""Tests that container classes are imported correctly."""
_IGNORABLE_FILES = frozenset(['manager.py', 'interface.py'])
def testContainersImported(self):
"""Tests that all parsers are imported."""
self._AssertFilesImportedInInit(
test_lib.CONTAINERS_PATH, self._IGNORABLE_FILES)
if __name__ ==
|
'__main__':
unittest.main()
|
50wu/gpdb
|
gpMgmt/bin/gppylib/operations/test/regress/test_package/test_regress_simple_gppkg.py
|
Python
|
apache-2.0
| 1,745
| 0.006304
|
#!/usr/bin/env python3
import unittest
from gppylib.operations.test.regress.test_package import GppkgTestCase, GppkgSpec, BuildGppkg, RPMSpec, BuildRPM, run_command, run_remote_command
class SimpleGppkgTestCase(GppkgTestCase):
"""Covers simple build/install/remove/update test cases"""
def test00_sim
|
ple_build(self):
self.build(self.alpha_spec, self.A_spec)
def test01_simple_install(self):
gppkg_file = self.alpha_spec.get_filename()
self.install(gppkg_file)
#Check RPM database
|
self.check_rpm_install(self.A_spec.get_package_name())
def test02_simple_update(self):
gppkg_file = self.alpha_spec.get_filename()
self.install(gppkg_file)
update_rpm_spec = RPMSpec("A", "1", "2")
update_gppkg_spec = GppkgSpec("alpha", "1.1")
update_gppkg_file = self.build(update_gppkg_spec, update_rpm_spec)
self.update(update_gppkg_file)
#Check for the packages
self.check_rpm_install(update_rpm_spec.get_package_name())
def test03_simple_uninstall(self):
gppkg_file = self.alpha_spec.get_filename()
self.install(gppkg_file)
self.remove(gppkg_file)
results = run_command("gppkg -q --all")
results = results.split('\n')[self.start_output:self.end_output]
self.assertEqual(results, [])
def test04_help(self):
help_options = ["--help", "-h", "-?"]
for opt in help_options:
results = run_command("gppkg " + opt)
self.assertNotEqual(results, "")
def test05_version(self):
results = run_command("gppkg --version")
self.assertNotEqual(results, "")
if __name__ == "__main__":
unittest.main()
|
apple/swift-lldb
|
packages/Python/lldbsuite/test/commands/expression/import-std-module/vector-dbg-info-content/TestDbgInfoContentVector.py
|
Python
|
apache-2.0
| 1,822
| 0.001098
|
"""
Test basic std::vector functionality but with a declaration from
the debug info (the Foo struct) as content.
"""
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestDbgInfoContentVector(TestBase):
mydir = TestBase.compute_mydir(__file__)
# FIXME: This should work on more setups, so remove these
# skipIf's in the future.
@add_test_categories(["libc++"])
@skipIf(compiler=no_match("clang"))
@skipIf(oslist=no_match(["linux"]))
@skipIf(debug_info=no_match(["dwarf"]))
def test(self):
self.build()
lldbutil.run_to_source_breakpoint(self,
"// Set break point at this line.", lldb.SBFileSpec("main.cpp"))
self.runCmd("settings set target.import-std-module true")
self.expect("expr (size_t)a.size()", substrs=['(size_t) $0 = 3'])
self.expect("expr (int)a.front().a", substrs=['(int) $1 = 3'])
self.expect("expr (int)a[1].a", substrs=['(int) $2 = 1'])
self.expect("expr (int)a
|
.back().a", substrs=['(int) $3 = 2'])
self.expect("expr std::reverse(a.begin(), a.end())")
self.expect("expr (int)a.front().a", substrs=['(int) $4 = 2'])
self.expect("expr (int)(a.begin()->a)", substrs=['(int) $5 = 2'])
self.expect("expr (int)(a.rbegin()->a)", substrs=['(int) $6 = 3'])
self.expect("expr a.pop_back()")
|
self.expect("expr (int)a.back().a", substrs=['(int) $7 = 1'])
self.expect("expr (size_t)a.size()", substrs=['(size_t) $8 = 2'])
self.expect("expr (int)a.at(0).a", substrs=['(int) $9 = 2'])
self.expect("expr a.push_back({4})")
self.expect("expr (int)a.back().a", substrs=['(int) $10 = 4'])
self.expect("expr (size_t)a.size()", substrs=['(size_t) $11 = 3'])
|
NaturalHistoryMuseum/inselect
|
setup.py
|
Python
|
bsd-3-clause
| 7,651
| 0.001699
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import inselect
REQUIREMENTS = [
# TODO How to specify OpenCV? 'cv2>=3.1.0',
'numpy>=1.11.1,<1.12',
'Pillow>=3.4.2,<3.5',
'python-dateutil>=2.6.0,<2.7',
'pytz>=2016.7',
'PyYAML>=3.12,<3.2',
'schematics>=1.1.1,<1.2',
'scikit-learn>=0.18.1,<0.19',
'scipy>=0.18.1,<0.19',
'unicodecsv>=0.14.1,<0.15',
]
SCRIPTS = ('export_metadata', 'ingest', 'read_barcodes', 'save_crops', 'segment')
setup_data = {
'name': 'inselect',
'version': inselect.__version__,
'author': (u'Lawrence Hudson, Alice Heaton, Pieter Holtzhausen, '
u'Stéfan van der Walt'),
'author_email': '[email protected]',
'maintainer': 'Lawrence Hudson',
'maintainer_email': '[email protected]',
'url': 'https://github.com/NaturalHistoryMuseum/inselect/',
'license': 'Modified BSD',
'description': inselect.__doc__,
'long_description': inselect.__doc__,
'packages': [
'inselect', 'inselect.gui', 'inselect.gui.plugins',
'inselect.gui.views', 'inselect.gui.views.boxes', 'inselect.lib',
'inselect.lib.templates', 'inselect.scripts',
],
'include_package_data': True,
'test_suite': 'inselect.tests',
'scripts': ['inselect/scripts/{0}.py'.format(script) for script in SCRIPTS],
'install_requires': REQUIREMENTS,
'extras_require': {
'gui': [
'ExifRead>=2.1.2', 'humanize>=0.5.1', 'psutil>=5.0.0',
'PyQt5>=5.6.0'
],
'barcodes': ['gouda>=0.1.13', 'pylibdmtx>=0.1.6', 'pyzbar>=0.1.3'],
'windows': ['pywin32>=220'],
'development': ['coveralls>=1.1', 'mock>=2.0.0', 'nose>=1.3.7'],
},
'entry_points': {
'gui_scripts':
['inselect = inselect.gui.app:main'],
'console_scripts':
['{0} = inselect.scripts.{0}:main'.format(script) for script in SCRIPTS],
},
'classifiers': [
'Development Status :: 4 - Beta',
'Topic :: Utilities',
'Topic :: Scientific/Engineering :: Bio-Informatics'
'Programming Language :: Python :: 3.5',
],
}
def setuptools_setup():
"""setuptools setup"""
from setuptools import setup
setup(**setup_data)
def _qt_files(site_packages):
"""Returns a list of tuples (src, dest) of Qt dependencies to be installed.
Elements are instances of Path.
site_packages should be an instance of Path to the site-packages directory.
IF we leave cx_Freeze to do its thing then the entirety of PyQt5, Qt5 and
uic are included in the installer. The only way to avoid horrible bloat is
to hand-tune which files we include.
This whole system is fucked beyond belief.
"""
from pathlib import Path
return [
# Qt DLLs
(
site_packages.joinpath('PyQt5/Qt/bin').joinpath(dep),
dep
)
for dep in ('Qt5Core.dll', 'Qt5Gui.dll', 'Qt5Widgets.dll')
] + [
# Qt plugins
(
site_packages.joinpath('PyQt5/Qt/plugins/platforms').joinpath(dep),
Path('platforms').joinpath(dep)
)
for dep in ('qwindows.dll',)
] + [
# PyQt extension modules
(
site_packages.joinpath('PyQt5').joinpath(dep),
Path('PyQt5').joinpath(dep)
)
for dep in ('__init__.py', 'Qt.pyd', 'QtCore.pyd', 'QtGui.pyd', 'QtWidgets.pyd')
]
def cx_setup():
"""cx_Freeze setup. Used for building Windows installers"""
import scipy
from pathlib import Path
from distutils.sysconfig import get_python_lib
from cx_Freeze import setup, Executable
from pylibdmtx import pylibdmtx
from pyzbar import pyzbar
# Useful paths
environment_root = Path(sys.executable).parent
site_packages = Path(get_python_lib())
project_root = Path(__file__).parent
# Files as tuples (source, dest)
include_files = [
# Evil, evil, evil
# cx_Freeze breaks pywintypes and pythoncom on Python 3.5
# https://bitbucket.org/anthony_tuininga/cx_freeze/issues/194/error-with-frozen-executable-using-35-and
(site_packages.joinpath('win32/lib/pywintypes.py'), 'pywintypes.py'),
(site_packages.joinpath('pythoncom.py'), 'pythoncom.py'),
# Binary dependencies that are not detected
(environment_root.joinpath('Library/bin/mkl_core.dll'), 'mkl_core.dll'),
(environment_root.joinpath('Library/bin/mkl_intel_thread.dll'), 'mkl_intel_thread.dll'),
(environment_root.joinpath('Library/bin/libiomp5md.dll'), 'libiomp5md.dll'),
# Styles
|
heet
(project_root.joinpath('inselect/gui/inselect.qss'), 'inselect.qss'),
] + [
# DLLs that are not detected because they are loaded by ctypes
(dep._name, Path(dep._name).name)
for dep in pylibdmtx.EXTERNAL_DEPENDENCIES + pyzbar.EXTERNAL_DEPENDENCIES
] + _qt_files(site_packages)
# Convert instances of Path to strs
include_files = [(str(source)
|
, str(dest)) for source, dest in include_files]
# Directories as strings
include_files += [
# Fixes scipy freeze
# http://stackoverflow.com/a/32822431/1773758
str(Path(scipy.__file__).parent),
]
# Packages to exclude.
exclude_packages = [
str(p.relative_to(site_packages)).replace('\\', '.') for p in
site_packages.rglob('*/tests')
]
setup(
name=setup_data['name'],
version=setup_data['version'],
options={
'build_exe': {
'packages':
setup_data.get('packages', []) + [
'urllib', 'sklearn.neighbors', 'win32com.gen_py',
'win32timezone',
],
'excludes': [
# '_bz2', # Required by sklearn
'_decimal', '_elementtree', '_hashlib', '_lzma',
'_ssl', 'curses',
'distutils', 'email', 'http', 'lib2to3', 'mock', 'nose',
'PyQt5',
# 'pydoc', # Required by sklearn
'tcl', 'Tkinter', 'ttk', 'Tkconstants',
# 'unittest', # Required by numpy.core.multiarray
'win32com.HTML', 'win32com.test', 'win32evtlog', 'win32pdh',
'win32trace', 'win32ui', 'win32wnet',
'xml', 'xmlrpc',
'inselect.tests',
] + exclude_packages,
'includes': [
],
'include_files': include_files,
'include_msvcr': True,
'optimize': 2,
},
'bdist_msi': {
'upgrade_code': '{fe2ed61d-cd5e-45bb-9d16-146f725e522f}'
}
},
executables=[
Executable(
script='inselect/scripts/inselect.py',
targetName='inselect.exe',
icon='icons/inselect.ico',
base='Win32GUI',
shortcutName='Inselect', # See http://stackoverflow.com/a/15736406
shortcutDir='ProgramMenuFolder'
)
] + [
Executable(
script='inselect/scripts/{0}.py'.format(script),
targetName='{0}.exe'.format(script),
icon='icons/inselect.ico',
base='Console'
)
for script in SCRIPTS
],
)
if (3, 5) <= sys.version_info:
if 'bdist_msi' in sys.argv:
cx_setup()
else:
setuptools_setup()
else:
sys.exit('Only Python >= 3.5 is supported')
|
17zuoye/luigi
|
test/parameter_test.py
|
Python
|
apache-2.0
| 25,868
| 0.002203
|
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
from helpers import unittest
from datetime import timedelta
import luigi
import luigi.date_interval
import luigi.interface
import luigi.notifications
from helpers import with_config
from luigi.mock import MockTarget, MockFileSystem
from luigi.parameter import ParameterException
from worker_test import email_patch
luigi.notifications.DEBUG = True
class A(luigi.Task):
p = luigi.IntParameter()
class WithDefault(luigi.Task):
x = luigi.Parameter(default='xyz')
class Foo(luigi.Task):
bar = luigi.Parameter()
p2 = luigi.IntParameter()
multi = luigi.Parameter(is_list=True)
not_a_param = "lol"
class Bar(luigi.Task):
multibool = luigi.BoolParameter(is_list=True)
def run(self):
Bar._val = self.multibool
|
class Baz(luigi.Task):
bool = luigi.BoolParameter()
def run(self):
Baz._val = self.bool
class ForgotParam(luigi.Task):
param = luigi.Parameter()
def run(self):
pass
class ForgotParamDep(luigi.Task):
def requ
|
ires(self):
return ForgotParam()
def run(self):
pass
class HasGlobalParam(luigi.Task):
x = luigi.Parameter()
global_param = luigi.IntParameter(is_global=True, default=123) # global parameters need default values
global_bool_param = luigi.BoolParameter(is_global=True, default=False)
def run(self):
self.complete = lambda: True
def complete(self):
return False
class HasGlobalParamDep(luigi.Task):
x = luigi.Parameter()
def requires(self):
return HasGlobalParam(self.x)
_shared_global_param = luigi.Parameter(is_global=True, default='123')
class SharedGlobalParamA(luigi.Task):
shared_global_param = _shared_global_param
class SharedGlobalParamB(luigi.Task):
shared_global_param = _shared_global_param
class BananaDep(luigi.Task):
x = luigi.Parameter()
y = luigi.Parameter(default='def')
def output(self):
return MockTarget('banana-dep-%s-%s' % (self.x, self.y))
def run(self):
self.output().open('w').close()
class Banana(luigi.Task):
x = luigi.Parameter()
y = luigi.Parameter()
style = luigi.Parameter(default=None)
def requires(self):
if self.style is None:
return BananaDep() # will fail
elif self.style == 'x-arg':
return BananaDep(self.x)
elif self.style == 'y-kwarg':
return BananaDep(y=self.y)
elif self.style == 'x-arg-y-arg':
return BananaDep(self.x, self.y)
else:
raise Exception('unknown style')
def output(self):
return MockTarget('banana-%s-%s' % (self.x, self.y))
def run(self):
self.output().open('w').close()
class MyConfig(luigi.Config):
mc_p = luigi.IntParameter()
mc_q = luigi.IntParameter(default=73)
class MyConfigWithoutSection(luigi.Config):
use_cmdline_section = False
mc_r = luigi.IntParameter()
mc_s = luigi.IntParameter(default=99)
class NoopTask(luigi.Task):
pass
class ParameterTest(unittest.TestCase):
def setUp(self):
super(ParameterTest, self).setUp()
# Need to restore some defaults for the global params since they are overriden
HasGlobalParam.global_param.set_global(123)
HasGlobalParam.global_bool_param.set_global(False)
def test_default_param(self):
self.assertEqual(WithDefault().x, 'xyz')
def test_missing_param(self):
def create_a():
return A()
self.assertRaises(luigi.parameter.MissingParameterException, create_a)
def test_unknown_param(self):
def create_a():
return A(p=5, q=4)
self.assertRaises(luigi.parameter.UnknownParameterException, create_a)
def test_unknown_param_2(self):
def create_a():
return A(1, 2, 3)
self.assertRaises(luigi.parameter.UnknownParameterException, create_a)
def test_duplicated_param(self):
def create_a():
return A(5, p=7)
self.assertRaises(luigi.parameter.DuplicateParameterException, create_a)
def test_parameter_registration(self):
self.assertEqual(len(Foo.get_params()), 3)
def test_task_creation(self):
f = Foo("barval", p2=5, multi=('m1', 'm2'))
self.assertEqual(len(f.get_params()), 3)
self.assertEqual(f.bar, "barval")
self.assertEqual(f.p2, 5)
self.assertEqual(f.multi, ('m1', 'm2'))
self.assertEqual(f.not_a_param, "lol")
def test_multibool(self):
luigi.run(['--local-scheduler', '--no-lock', 'Bar', '--multibool', 'true', '--multibool', 'false'])
self.assertEqual(Bar._val, (True, False))
def test_multibool_empty(self):
luigi.run(['--local-scheduler', '--no-lock', 'Bar'])
self.assertEqual(Bar._val, tuple())
def test_bool_false(self):
luigi.run(['--local-scheduler', '--no-lock', 'Baz'])
self.assertEqual(Baz._val, False)
def test_bool_true(self):
luigi.run(['--local-scheduler', '--no-lock', 'Baz', '--bool'])
self.assertEqual(Baz._val, True)
def test_forgot_param(self):
self.assertRaises(luigi.parameter.MissingParameterException, luigi.run, ['--local-scheduler', '--no-lock', 'ForgotParam'],)
@email_patch
def test_forgot_param_in_dep(self, emails):
# A programmatic missing parameter will cause an error email to be sent
luigi.run(['--local-scheduler', '--no-lock', 'ForgotParamDep'])
self.assertNotEquals(emails, [])
def test_default_param_cmdline(self):
luigi.run(['--local-scheduler', '--no-lock', 'WithDefault'])
self.assertEqual(WithDefault().x, 'xyz')
def test_global_param_defaults(self):
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 123)
self.assertEqual(h.global_bool_param, False)
def test_global_param_cmdline(self):
luigi.run(['--local-scheduler', '--no-lock', 'HasGlobalParam', '--x', 'xyz', '--global-param', '124'])
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 124)
self.assertEqual(h.global_bool_param, False)
def test_global_param_cmdline_flipped(self):
luigi.run(['--local-scheduler', '--no-lock', '--global-param', '125', 'HasGlobalParam', '--x', 'xyz'])
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 125)
self.assertEqual(h.global_bool_param, False)
def test_global_param_override(self):
h1 = HasGlobalParam(x='xyz', global_param=124)
h2 = HasGlobalParam(x='xyz')
self.assertEquals(h1.global_param, 124)
self.assertEquals(h2.global_param, 123)
def test_global_param_dep_cmdline(self):
luigi.run(['--local-scheduler', '--no-lock', 'HasGlobalParamDep', '--x', 'xyz', '--global-param', '124'])
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 124)
self.assertEqual(h.global_bool_param, False)
def test_global_param_dep_cmdline_optparse(self):
luigi.run(['--local-scheduler', '--no-lock', '--task', 'HasGlobalParamDep', '--x', 'xyz', '--global-param', '124'], use_optparse=True)
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 124)
self.assertEqual(h.global_bool_param, False)
def test_global_param_dep_cmdline_bool(self):
luigi.run(['--local-scheduler', '--no-lock', 'HasGlobalParamDep', '--x', 'xyz', '--global-bool-param'])
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 123)
self.assertEqual(h.global_
|
pmghalvorsen/gramps_branch
|
gramps/plugins/lib/maps/dummylayer.py
|
Python
|
gpl-2.0
| 2,350
| 0.005106
|
# -*- python -*-
# -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2011-2012 Serge Noiraud
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
import os
from gi.reposito
|
ry import GObject
#------------------------------------------------------------------------
#
# Set up logging
#
#------------------------------------------------------------------------
import logging
_LOG = logging.getLogger("maps.dummylayer")
#----------------------------------------
|
---------------------------------
#
# Gramps Modules
#
#-------------------------------------------------------------------------
#-------------------------------------------------------------------------
#
# osmGpsMap
#
#-------------------------------------------------------------------------
try:
from gi.repository import OsmGpsMap as osmgpsmap
except:
raise
class DummyLayer(GObject.GObject, osmgpsmap.MapLayer):
def __init__(self):
"""
Initialize the dummy layer
"""
GObject.GObject.__init__(self)
def do_draw(self, gpsmap, gdkdrawable):
"""
Draw the layer
"""
pass
def do_render(self, gpsmap):
"""
Render the layer
"""
pass
def do_busy(self):
"""
The layer is busy
"""
return False
def do_button_press(self, gpsmap, gdkeventbutton):
"""
Someone press a button
"""
return False
GObject.type_register(DummyLayer)
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/django-1.4/django/utils/functional.py
|
Python
|
bsd-3-clause
| 11,110
| 0.00144
|
import copy
import operator
from functools import wraps, update_wrapper
# You can't trivially replace this `functools.partial` because this binds to
# classes and returns bound instances, whereas functools.partial (on CPython)
# is a type and its instances don't bind.
def curry(_curried_func, *args, **kwargs):
def _curried(*moreargs, **morekwargs):
return _curried_func(*(args+moreargs),
|
**dict(kwargs, **morekwargs))
return _curried
def memoize(func, cache, num_args):
"""
Wrap a function so that r
|
esults for any argument tuple are stored in
'cache'. Note that the args to the function must be usable as dictionary
keys.
Only the first num_args are considered when creating the key.
"""
@wraps(func)
def wrapper(*args):
mem_args = args[:num_args]
if mem_args in cache:
return cache[mem_args]
result = func(*args)
cache[mem_args] = result
return result
return wrapper
class cached_property(object):
"""
Decorator that creates converts a method with a single
self argument into a property cached on the instance.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, type):
res = instance.__dict__[self.func.__name__] = self.func(instance)
return res
class Promise(object):
"""
This is just a base class for the proxy class created in
the closure of the lazy function. It can be used to recognize
promises in code.
"""
pass
def lazy(func, *resultclasses):
"""
Turns any callable into a lazy evaluated callable. You need to give result
classes or types -- at least one is needed so that the automatic forcing of
the lazy evaluation code is triggered. Results are not memoized; the
function is evaluated on every access.
"""
class __proxy__(Promise):
"""
Encapsulate a function call and act as a proxy for methods that are
called on the result of that function. The function is not evaluated
until one of the methods on the result is called.
"""
__dispatch = None
def __init__(self, args, kw):
self.__args = args
self.__kw = kw
if self.__dispatch is None:
self.__prepare_class__()
def __reduce__(self):
return (
_lazy_proxy_unpickle,
(func, self.__args, self.__kw) + resultclasses
)
def __prepare_class__(cls):
cls.__dispatch = {}
for resultclass in resultclasses:
cls.__dispatch[resultclass] = {}
for type_ in reversed(resultclass.mro()):
for (k, v) in type_.__dict__.items():
# All __promise__ return the same wrapper method, but they
# also do setup, inserting the method into the dispatch
# dict.
meth = cls.__promise__(resultclass, k, v)
if hasattr(cls, k):
continue
setattr(cls, k, meth)
cls._delegate_str = str in resultclasses
cls._delegate_unicode = unicode in resultclasses
assert not (cls._delegate_str and cls._delegate_unicode), "Cannot call lazy() with both str and unicode return types."
if cls._delegate_unicode:
cls.__unicode__ = cls.__unicode_cast
elif cls._delegate_str:
cls.__str__ = cls.__str_cast
__prepare_class__ = classmethod(__prepare_class__)
def __promise__(cls, klass, funcname, method):
# Builds a wrapper around some magic method and registers that magic
# method for the given type and method name.
def __wrapper__(self, *args, **kw):
# Automatically triggers the evaluation of a lazy value and
# applies the given magic method of the result type.
res = func(*self.__args, **self.__kw)
for t in type(res).mro():
if t in self.__dispatch:
return self.__dispatch[t][funcname](res, *args, **kw)
raise TypeError("Lazy object returned unexpected type.")
if klass not in cls.__dispatch:
cls.__dispatch[klass] = {}
cls.__dispatch[klass][funcname] = method
return __wrapper__
__promise__ = classmethod(__promise__)
def __unicode_cast(self):
return func(*self.__args, **self.__kw)
def __str_cast(self):
return str(func(*self.__args, **self.__kw))
def __cmp__(self, rhs):
if self._delegate_str:
s = str(func(*self.__args, **self.__kw))
elif self._delegate_unicode:
s = unicode(func(*self.__args, **self.__kw))
else:
s = func(*self.__args, **self.__kw)
if isinstance(rhs, Promise):
return -cmp(rhs, s)
else:
return cmp(s, rhs)
def __mod__(self, rhs):
if self._delegate_str:
return str(self) % rhs
elif self._delegate_unicode:
return unicode(self) % rhs
else:
raise AssertionError('__mod__ not supported for non-string types')
def __deepcopy__(self, memo):
# Instances of this class are effectively immutable. It's just a
# collection of functions. So we don't need to do anything
# complicated for copying.
memo[id(self)] = self
return self
@wraps(func)
def __wrapper__(*args, **kw):
# Creates the proxy object, instead of the actual value.
return __proxy__(args, kw)
return __wrapper__
def _lazy_proxy_unpickle(func, args, kwargs, *resultclasses):
return lazy(func, *resultclasses)(*args, **kwargs)
def allow_lazy(func, *resultclasses):
"""
A decorator that allows a function to be called with one or more lazy
arguments. If none of the args are lazy, the function is evaluated
immediately, otherwise a __proxy__ is returned that will evaluate the
function when needed.
"""
@wraps(func)
def wrapper(*args, **kwargs):
for arg in list(args) + kwargs.values():
if isinstance(arg, Promise):
break
else:
return func(*args, **kwargs)
return lazy(func, *resultclasses)(*args, **kwargs)
return wrapper
empty = object()
def new_method_proxy(func):
def inner(self, *args):
if self._wrapped is empty:
self._setup()
return func(self._wrapped, *args)
return inner
class LazyObject(object):
"""
A wrapper for another class that can be used to delay instantiation of the
wrapped class.
By subclassing, you have the opportunity to intercept and alter the
instantiation. If you don't need to do that, use SimpleLazyObject.
"""
def __init__(self):
self._wrapped = empty
__getattr__ = new_method_proxy(getattr)
def __setattr__(self, name, value):
if name == "_wrapped":
# Assign to __dict__ to avoid infinite __setattr__ loops.
self.__dict__["_wrapped"] = value
else:
if self._wrapped is empty:
self._setup()
setattr(self._wrapped, name, value)
def __delattr__(self, name):
if name == "_wrapped":
raise TypeError("can't delete _wrapped.")
if self._wrapped is empty:
self._setup()
delattr(self._wrapped, name)
def _setup(self):
"""
Must be implemented by subclasses to initialise the wrapped object.
"""
raise NotImplementedError
# introspection support:
__members__ = property(lambda self: self.__dir__())
__dir__ = new_method_proxy(dir)
class SimpleLazyObject(LazyObject):
"""
A lazy object initialised from any function.
Designed for compound objects of unknown type. For builtins or objects of
known type,
|
kaplun/Invenio-OpenAIRE
|
modules/webcomment/lib/webcomment_templates.py
|
Python
|
gpl-2.0
| 109,494
| 0.006694
|
# -*- coding: utf-8 -*-
## Comments and reviews for records.
## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""HTML Templates for commenting features """
__revision__ = "$Id$"
import cgi
# Invenio imports
from invenio.urlutils import create_html_link
from invenio.webuser import get_user_info, collect_user_info, isGuestUser, get_email
from invenio.dateutils import convert_datetext_to_dategui
from invenio.webmessage_mailutils import email_quoted_txt2html
from invenio.webcomment_config import \
CFG_WEBCOMMENT_MAX_ATTACHED_FILES, \
CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE
from invenio.config import CFG_SITE_URL, \
CFG_SITE_SECURE_URL, \
CFG_SITE_LANG, \
CFG_SITE_NAME, \
CFG_SITE_NAME_INTL,\
CFG_SITE_SUPPORT_EMAIL,\
CFG_WEBCOMMENT_ALLOW_REVIEWS, \
CFG_WEBCOMMENT_ALLOW_COMMENTS, \
CFG_WEBCOMMENT_USE_RICH_TEXT_EDITOR, \
CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN, \
CFG_WEBCOMMENT_AUTHOR_DELETE_COMMENT_OPTION, \
CFG_CERN_SITE
from invenio.htmlutils import get_html_text_editor
from invenio.messages import gettext_set_language
from invenio.bibformat import format_record
from invenio.access_control_engine import acc_authorize_action
from invenio.websearch_templates import get_fieldvalues
class Template:
"""templating class, refer to webcomment.py for examples of call"""
def tmpl_get_first_comments_without_ranking(self, recID, ln, comments, nb_comments_total, warnings):
"""
@param recID: record id
@param ln: language
@param comments: tuple as returned from webcomment.py/query_retrieve_comments_or_remarks
@param nb_comments_total: total number of comments for this record
@param warnings: list of warning tuples (warning_msg, arg1, arg2, ...)
@return: html of comments
"""
# load the right message language
_ = gettext_set_language(ln)
# naming data fields of comments
c_nickname = 0
c_user_id = 1
c_date_creation = 2
c_body = 3
c_id = 4
warnings = self.tmpl_warnings(warnings, ln)
# comments
comment_rows = ''
max_comment_round_name = comments[-1][0]
for comment_round_name, comments_list in comments:
comment_rows += '<div id="cmtRound%i" class="cmtRound">' % (comment_round_name)
comment_rows += _('%(x_nb)i comments for round "%(x_name)s"') % {'x_nb': len(comments_list), 'x_name': comment_round_name} + "<br/>"
for comment in comments_list:
if comment[c_nickname]:
nickname = comment[c_nickname]
display = nickname
else:
(uid,
|
nickname, display) = get_user_info(comment[c_user_id])
messaging_link = self.create_messaging_link(nickname, displa
|
y, ln)
comment_rows += """
<tr>
<td>"""
report_link = '%s/record/%s/comments/report?ln=%s&comid=%s' % (CFG_SITE_URL, recID, ln, comment[c_id])
reply_link = '%s/record/%s/comments/add?ln=%s&comid=%s&action=REPLY' % (CFG_SITE_URL, recID, ln, comment[c_id])
comment_rows += self.tmpl_get_comment_without_ranking(req=None, ln=ln, nickname=messaging_link, comment_uid=comment[c_user_id],
date_creation=comment[c_date_creation],
body=comment[c_body], status='', nb_reports=0,
report_link=report_link, reply_link=reply_link, recID=recID)
comment_rows += """
<br />
<br />
</td>
</tr>"""
# Close comment round
comment_rows += '</div>'
# write button
write_button_label = _("Write a comment")
write_button_link = '%s/record/%s/comments/add' % (CFG_SITE_URL, recID)
write_button_form = '<input type="hidden" name="ln" value="%s"/>' % ln
write_button_form = self.createhiddenform(action=write_button_link, method="get", text=write_button_form, button=write_button_label)
# output
if nb_comments_total > 0:
out = warnings
comments_label = len(comments) > 1 and _("Showing the latest %i comments:") % len(comments) \
or ""
out += """
<table>
<tr>
<td class="blocknote">%(comment_title)s</td>
</tr>
</table>
%(comments_label)s<br />
<table border="0" cellspacing="5" cellpadding="5" width="100%%">
%(comment_rows)s
</table>
%(view_all_comments_link)s
<br />
<br />
%(write_button_form)s<br />""" % \
{'comment_title': _("Discuss this document"),
'comments_label': comments_label,
'nb_comments_total' : nb_comments_total,
'recID': recID,
'comment_rows': comment_rows,
'tab': ' '*4,
'siteurl': CFG_SITE_URL,
's': nb_comments_total>1 and 's' or "",
'view_all_comments_link': nb_comments_total>0 and '''<a href="%s/record/%s/comments/display">View all %s comments</a>''' \
% (CFG_SITE_URL, recID, nb_comments_total) or "",
'write_button_form': write_button_form,
'nb_comments': len(comments)
}
else:
out = """
<!-- comments title table -->
<table>
<tr>
<td class="blocknote">%(discuss_label)s:</td>
</tr>
</table>
%(detailed_info)s
<br />
%(form)s
<br />""" % {'form': write_button_form,
'discuss_label': _("Discuss this document"),
'detailed_info': _("Start a discussion about any aspect of this document.")
}
return out
def tmpl_record_not_found(self, status='missing', recID="", ln=CFG_SITE_LANG):
"""
Displays a page when bad or missing record ID was given.
@param status: 'missing' : no recID was given
'inexistant': recID doesn't have an entry in the database
'nan' : recID is not a number
'invalid' : recID is an error code, i.e. in the interval [-99,-1]
@param return: body of the page
"""
_ = gettext_set_language(ln)
if status == 'inexistant':
body = _("Sorry, the record %s does not seem to exist.") % (recID,)
elif status in ('nan', 'invalid'):
body = _("Sorry, %s is not a valid ID value.") % (recID,)
else:
body = _("Sorry, no record ID was provided.")
body += "<br /><br />"
link = "<a href=\"%s?ln=%s\">%s</a>." % (CFG_SITE_URL, ln, CFG_SITE_NAME_INTL.get(ln, CFG_SITE_NAME))
body += _("You may want to start browsing from %s") % link
return body
def tmpl_get_first_comments_with_ranking(self, recID, ln, comments=None, n
|
ridelore/sopel-modules
|
rep.py
|
Python
|
apache-2.0
| 4,834
| 0.008068
|
from sopel import module
from sopel.tools import Identifier
import time
import re
TIMEOUT = 36000
@module.rule('^(</?3)\s+([a-zA-Z0-9\[\]\\`_\^\{\|\}-]{1,32})\s*$')
@module.intent('ACTION')
@module.require_chanmsg("You may only modify someone's rep in a channel.")
def heart_cmd(bot, trigger):
luv_h8(bot, trigger, trigger.group(2), 'h8' if '/' in trigger.group(1) else 'luv')
@module.rule('.*?(?:([a-zA-Z0-9\[\]\\`_\^\{\|\}-]{1,32})(\+{2}|-{2})).*?')
@module.require_chanmsg("You may only modify someone's rep in a channel.")
def karma_cmd(bot, trigger):
if re.match('^({prefix})({cmds})'.format(prefix=bot.config.core.prefix, cmds='|'.join(luv_h8_cmd.commands)),
trigger.group(0)):
|
return # avoid processing commands if people try to be tricky
for (nick, act) in re.findall('(?:([a-zA-Z0-9\[\]\\`_\^\{\|\}-]{1,32})(\+{2}|-{2}))', trigger.raw):
if luv_h8(bot, trigger, nick, 'luv' if act == '++' else 'h8', warn_nonexistent=False):
break
@module.commands('luv', 'h8')
@module.example(".luv Phixion")
@module.example(".h8 Thaya")
@module.require_chanmsg("You may only modify someone's rep in a channel.")
def luv_h8_cmd(bot, trigger):
if not trigger.group(3):
bot.reply("No user specified.")
return
target = Identifier(trigger.group(3))
luv_h8(bot, trigger, target, trigger.group(1))
def luv_h8(bot, trigger, target, which, warn_nonexistent=True):
target = verified_nick(bot, target, trigger.sender)
which = which.lower() # issue #18
pfx = change = selfreply = None # keep PyCharm & other linters happy
if not target:
if warn_nonexistent:
bot.reply("You can only %s someone who is here." % which)
return False
if rep_too_soon(bot, trigger.nick):
return False
if which == 'luv':
selfreply = "No narcissism allowed!"
pfx, change = 'in', 1
if which == 'h8':
selfreply = "Go to 4chan if you really hate yourself!"
pfx, change = 'de', -1
if not (pfx and change and selfreply): # safeguard against leaving something in the above mass-None assignment
bot.say("Logic error! Please report this to %s." % bot.config.core.owner)
return
if is_self(bot, trigger.nick, target):
bot.reply(selfreply)
return False
rep = mod_rep(bot, trigger.nick, target, change)
bot.say("%s has %screased %s's reputation score to %d" % (trigger.nick, pfx, target, rep))
return True
@module.commands('rep')
@module.example(".rep Phixion")
def show_rep(bot, trigger):
target = trigger.group(3) or trigger.nick
rep = get_rep(bot, target)
if rep is None:
bot.say("%s has no reputation score yet." % target)
return
bot.say("%s's current reputation score is %d." % (target, rep))
# helpers
def get_rep(bot, target):
return bot.db.get_nick_value(Identifier(target), 'rep_score')
def set_rep(bot, caller, target, newrep):
bot.db.set_nick_value(Identifier(target), 'rep_score', newrep)
bot.db.set_nick_value(Identifier(caller), 'rep_used', time.time())
def mod_rep(bot, caller, target, change):
rep = get_rep(bot, target) or 0
rep += change
set_rep(bot, caller, target, rep)
return rep
def get_rep_used(bot, nick):
return bot.db.get_nick_value(Identifier(nick), 'rep_used') or 0
def set_rep_used(bot, nick):
bot.db.set_nick_value(Identifier(nick), 'rep_used', time.time())
def rep_used_since(bot, nick):
now = time.time()
last = get_rep_used(bot, nick)
return abs(last - now)
def rep_too_soon(bot, nick):
since = rep_used_since(bot, nick)
if since < TIMEOUT:
bot.notice("You must wait %d more seconds before changing someone's rep again." % (TIMEOUT - since), nick)
return True
else:
return False
def is_self(bot, nick, target):
nick = Identifier(nick)
target = Identifier(target)
if nick == target:
return True # shortcut to catch common goofballs
try:
nick_id = bot.db.get_nick_id(nick, False)
target_id = bot.db.get_nick_id(target, False)
except ValueError:
return False # if either nick doesn't have an ID, they can't be in a group
return nick_id == target_id
def verified_nick(bot, nick, channel):
nick = re.search('([a-zA-Z0-9\[\]\\`_\^\{\|\}-]{1,32})', nick).group(1)
if not nick:
return None
nick = Identifier(nick)
if nick.lower() not in bot.privileges[channel.lower()]:
if nick.endswith('--'):
if Identifier(nick[:-2]).lower() in bot.privileges[channel.lower()]:
return Identifier(nick[:-2])
return None
return nick
|
|
vienin/python-ufo
|
ufo/notify.py
|
Python
|
gpl-2.0
| 8,320
| 0.007452
|
# Copyright (C) 2010 Agorabox. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import utils
import os
from datetime import datetime
import new
from gettext import dgettext
from ufo.debugger import Debugger
from ufo.constants import ShareDoc
from ufo.database import *
from ufo.utils import get_user_infos
from ufo.user import user
class TranslatableText:
def __init__(self, text):
self.text = text
def __repr__(self):
return dgettext("python-ufo", self.text)
def _(message):
return TranslatableText(message)
class action:
def __init__(self, description):
self.description = description
def __call__(self, func):
func.action = True
func.description = self.description
return func
class NotificationDocument(Document, Debugger):
doctype = TextField(default="NotificationDocument")
subtype = TextField(default="")
date = DateTimeField(default=datetime.now)
initiator = TextField()
target = TextField()
by_id = ViewField('notification',
language = 'javascript',
map_fun = "function (doc) {" \
"if (doc.doctype === 'NotificationDocument') {" \
"emit(doc._id, doc);" \
"}" \
"}")
by_subtype_and_initiator = ViewField('notification',
language = 'javascript',
map_fun = "function (doc) {" \
"if (doc.doctype === 'NotificationDocument' && doc.subtype && doc.initiator) {" \
"emit([doc.subtype, doc.initiator], doc);" \
"}" \
"}")
def __init__(self, *args, **fields):
super(NotificationDocument, self).__init__(*args, **fields)
if fields.get('initiator') and fields.get('target'):
self.initiator = fields['initiator']
self.target = fields['target']
@action(_("Dismiss"))
def dismiss(self):
user.dismiss(self)
def __getitem__(self, key):
try:
value = getattr(self, "pretty_" + key)
except:
try:
value = getattr(self, key)
except:
value = super(Document, self).__getitem__(key)
if isinstance(value, TranslatableText):
return repr(value)
else:
return value
@property
def fullname(self):
return get_user_infos(login=self.initiator)['fullname']
@property
def actions(self):
actions = {}
for k, v in self.__class__.__dict__.items():
if type(v) == new.function and getattr(v, "action", False):
actions[k] = repr(v.description)
return actions
@property
def default_action(self):
for action in self.actions.values():
if getattr(getattr(self, action), "default", False):
return action
return "dismiss"
class NewFriendshipNotification(NotificationDocument):
subtype = TextField(default="NewFriendship")
title = _('New friendship invitation')
body = _('You have been invited by %(fullname)s to be his/her friend.')
summary = _("%(fullname)s wants to be your friend")
def __init__(self, **fields):
super(NewFriendshipNotification, self).__init__()
if fields.get('initiator') and fields.get('target'):
self.initiator = fields['initiator']
self.target = fields['target']
@action(_("Accept"))
def accept_invitation(self):
self.debug("Accepting the friend invitation from '%s' to '%s'"
% (self.initiator, self.target))
user.accept_friend(self.initiator)
@action(_("Refuse"))
def refuse_invitation(self):
self.debug("Refusing the friend invitation from '%s' to '%s'"
% (self.initiator, self.target))
user.refuse_friend(self.initiator)
@action(_("Block user"))
def block_invitation(self):
self.debug("Blocking the friend invitation from '%s' to '%s'"
% (self.initiator, self.target))
user.block_user(self.initiator)
class FollowRequestNotification(NotificationDocument):
subtype = TextField(default="FollowRequest")
title = _('New file sharing request')
body = _('%(fullname)s would like to be in your followers list.')
summary = _("%(fullname)s wants to follow you")
@action(_("Accept"))
def accept_invitation(self):
self.debug
|
("Accepting the follow request from '%s' to '%s'"
% (self.initiator, self.target))
user.accept_following(self.initiator)
@action(_("Refuse"))
def refuse_invitation(self):
self.debug("Refusing the follow request from '%s' to '%s'"
% (self.initiator,
|
self.target))
user.refuse_following(self.initiator)
@action(_("Block user"))
def block_invitation(self):
self.debug("Blocking the follow request from '%s' to '%s'"
% (self.initiator, self.target))
user.block_user(self.initiator)
class AcceptedFriendshipNotification(NotificationDocument):
subtype = TextField(default="AcceptedFriendship")
title = _('Friendship invitation accepted')
body = _('%(fullname)s has accepted your friendship invitation, '
'you can now share some document with him/her.')
summary = _("%(fullname)s has accepted your invitation")
@action
def accept_friend(self):
self.debug("Proceed pending shares from '%s' to '%s'" % (self.initiator, self.target))
# user.accept_friend(self.initiator)
class CanceledFriendshipNotification(NotificationDocument):
subtype = TextField(default="CanceledFriendship")
title = _('A friendship has been canceled')
body = _('%(fullname)s has removed you from his friend list, '
'you can not access his files any more.')
summary = _("%(fullname)s has canceled his friendship with you")
class RefusedFriendshipNotification(NotificationDocument):
subtype = TextField(default="RefusedFriendship")
title = _('%(fullname)s has refused your friend request')
body = _('%(fullname)s would rather be stranger than friends.')
summary = _("%(fullname)s has refused your friend request")
class NewShareNotification(NotificationDocument):
subtype = TextField(default="NewShare")
files = ListField(TextField())
title = _('Someone has shared some files with you')
body = _('%(fullname)s has shared the following files with you : %(files)s')
summary = _("%(fullname)s has shared some files with you")
def __init__(self, **fields):
super(NewShareNotification, self).__init__(**fields)
if fields.get('files'):
self.files = fields['files']
class CanceledShareNotification(NotificationDocument):
subtype = TextField(default="CanceledShare")
files = ListField(TextField())
title = _('A share has been canceled')
body = _('%(fullname)s has canceled the share of \'%(file)s\', '
'you can\'t access the file any more.')
summary = _("%(fullname)s has canceled a share with you")
def __init__(self, **
|
thisisshi/cloud-custodian
|
tools/c7n_mailer/c7n_mailer/utils.py
|
Python
|
apache-2.0
| 16,298
| 0.000675
|
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import base64
from datetime import datetime, timedelta
import functools
import json
import os
import time
import yaml
import jinja2
import jmespath
from dateutil import parser
from dateutil.tz import gettz, tzutc
try:
from botocore.exceptions import ClientError
except ImportError: # pragma: no cover
pass # Azure provider
class Providers:
AWS = 0
Azure = 1
def get_jinja_env(template_folders):
env = jinja2.Environment(trim_blocks=True, autoescape=False) # nosec nosemgrep
env.filters['yaml_safe'] = functools.partial(yaml.safe_dump, default_flow_style=False)
env.filters['date_time_format'] = date_time_format
env.filters['get_date_time_delta'] = get_date_time_delta
env.filters['from_json'] = json.loads
env.filters['get_date_age'] = get_date_age
env.globals['format_resource'] = resource_format
env.globals['format_struct'] = format_struct
env.globals['resource_tag'] = get_resource_tag_value
env.globals['get
|
_resource_tag_value'] = get_resource_tag_value
env.globals['search'] = jmespath.search
env.loader = jinja2.FileSystemLoader(template_folders)
return env
def get_rendered_jinja(
target, sqs_message, resources, logger,
specified_template, default_template, template_folders):
env = get_jinja_env(template_folders)
mail_template = sqs_message['action'].get(speci
|
fied_template, default_template)
if not os.path.isabs(mail_template):
mail_template = '%s.j2' % mail_template
try:
template = env.get_template(mail_template)
except Exception as error_msg:
logger.error("Invalid template reference %s\n%s" % (mail_template, error_msg))
return
# recast seconds since epoch as utc iso datestring, template
# authors can use date_time_format helper func to convert local
# tz. if no execution start time was passed use current time.
execution_start = datetime.utcfromtimestamp(
sqs_message.get(
'execution_start',
time.mktime(
datetime.utcnow().timetuple())
)).isoformat()
rendered_jinja = template.render(
recipient=target,
resources=resources,
account=sqs_message.get('account', ''),
account_id=sqs_message.get('account_id', ''),
partition=sqs_message.get('partition', ''),
event=sqs_message.get('event', None),
action=sqs_message['action'],
policy=sqs_message['policy'],
execution_start=execution_start,
region=sqs_message.get('region', ''))
return rendered_jinja
# eg, target_tag_keys could be resource-owners ['Owners', 'SupportTeam']
# and this function would go through the resource and look for any tag keys
# that match Owners or SupportTeam, and return those values as targets
def get_resource_tag_targets(resource, target_tag_keys):
if 'Tags' not in resource:
return []
if isinstance(resource['Tags'], dict):
tags = resource['Tags']
else:
tags = {tag['Key']: tag['Value'] for tag in resource['Tags']}
targets = []
for target_tag_key in target_tag_keys:
if target_tag_key in tags:
targets.append(tags[target_tag_key])
return targets
def get_message_subject(sqs_message):
default_subject = 'Custodian notification - %s' % (sqs_message['policy']['name'])
subject = sqs_message['action'].get('subject', default_subject)
jinja_template = jinja2.Template(subject)
subject = jinja_template.render(
account=sqs_message.get('account', ''),
account_id=sqs_message.get('account_id', ''),
partition=sqs_message.get('partition', ''),
event=sqs_message.get('event', None),
action=sqs_message['action'],
policy=sqs_message['policy'],
region=sqs_message.get('region', '')
)
return subject
def setup_defaults(config):
config.setdefault('region', 'us-east-1')
config.setdefault('ses_region', config.get('region'))
config.setdefault('memory', 1024)
config.setdefault('runtime', 'python3.7')
config.setdefault('timeout', 300)
config.setdefault('subnets', None)
config.setdefault('security_groups', None)
config.setdefault('contact_tags', [])
config.setdefault('ldap_uri', None)
config.setdefault('ldap_bind_dn', None)
config.setdefault('ldap_bind_user', None)
config.setdefault('ldap_bind_password', None)
config.setdefault('endpoint_url', None)
config.setdefault('datadog_api_key', None)
config.setdefault('slack_token', None)
config.setdefault('slack_webhook', None)
def date_time_format(utc_str, tz_str='US/Eastern', format='%Y %b %d %H:%M %Z'):
return parser.parse(utc_str).astimezone(gettz(tz_str)).strftime(format)
def get_date_time_delta(delta):
return str(datetime.now().replace(tzinfo=gettz('UTC')) + timedelta(delta))
def get_date_age(date):
return (datetime.now(tz=tzutc()) - parser.parse(date)).days
def format_struct(evt):
return json.dumps(evt, indent=2, ensure_ascii=False)
def get_resource_tag_value(resource, k):
for t in resource.get('Tags', []):
if t['Key'] == k:
return t['Value']
return ''
def strip_prefix(value, prefix):
if value.startswith(prefix):
return value[len(prefix):]
return value
def resource_format(resource, resource_type):
if resource_type.startswith('aws.'):
resource_type = strip_prefix(resource_type, 'aws.')
if resource_type == 'ec2':
tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())}
return "%s %s %s %s %s %s" % (
resource['InstanceId'],
resource.get('VpcId', 'NO VPC!'),
resource['InstanceType'],
resource.get('LaunchTime'),
tag_map.get('Name', ''),
resource.get('PrivateIpAddress'))
elif resource_type == 'ami':
return "%s %s %s" % (
resource.get('Name'), resource['ImageId'], resource['CreationDate'])
elif resource_type == 'sagemaker-notebook':
return "%s" % (resource['NotebookInstanceName'])
elif resource_type == 's3':
return "%s" % (resource['Name'])
elif resource_type == 'ebs':
return "%s %s %s %s" % (
resource['VolumeId'],
resource['Size'],
resource['State'],
resource['CreateTime'])
elif resource_type == 'rds':
return "%s %s %s %s" % (
resource['DBInstanceIdentifier'],
"%s-%s" % (
resource['Engine'], resource['EngineVersion']),
resource['DBInstanceClass'],
resource['AllocatedStorage'])
elif resource_type == 'rds-cluster':
return "%s %s %s" % (
resource['DBClusterIdentifier'],
"%s-%s" % (
resource['Engine'], resource['EngineVersion']),
resource['AllocatedStorage'])
elif resource_type == 'asg':
tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())}
return "%s %s %s" % (
resource['AutoScalingGroupName'],
tag_map.get('Name', ''),
"instances: %d" % (len(resource.get('Instances', []))))
elif resource_type == 'elb':
tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())}
if 'ProhibitedPolicies' in resource:
return "%s %s %s %s" % (
resource['LoadBalancerName'],
"instances: %d" % len(resource['Instances']),
"zones: %d" % len(resource['AvailabilityZones']),
"prohibited_policies: %s" % ','.join(
resource['ProhibitedPolicies']))
return "%s %s %s" % (
resource['LoadBalancerName'],
"instances: %d" % len(resource['Instances']),
"zones: %d" % len(resource['AvailabilityZones']))
elif resource_type == 'redshift':
return "%s %s %s" % (
resource['ClusterIdentifier'],
'nodes:%d' % len(resource['ClusterNodes']),
'encrypted:%s' % resource['Encrypted'])
elif resource_type == 'emr':
|
imvu/bluesteel
|
app/logic/logger/migrations/0002_auto_20191123_1904.py
|
Python
|
mit
| 679
| 0.001473
|
# -*- coding: utf-8
|
-*-
# Generated by Django 1.11.3 on 2019-11-24 03:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logger', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='logentry',
name='log_type',
field=models.IntegerField(choices=[(0, 'Debug'), (1, 'I
|
nfo'), (2, 'Warning'), (3, 'Error'), (4, 'Critical')], default=1),
),
migrations.AlterField(
model_name='logentry',
name='message',
field=models.TextField(default=''),
),
]
|
RickHutten/paparazzi
|
sw/tools/px4/px_mkfw.py
|
Python
|
gpl-2.0
| 4,811
| 0.017044
|
#!/usr/bin/env python
############################################################################
#
# Copyright (C) 2012, 2013 PX4 Development Team. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# 3. Neither the name PX4 nor the names of its contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
############################################################################
#
# PX4 firmware image generator
#
# The PX4 firmware file is a JSON-encoded Python object, containing
# metadata fields and a zlib-compressed base64-encoded firmware image.
#
import sys
import argparse
import json
import base64
import zlib
import time
import subprocess
#
# Construct a basic firmware description
#
def mkdesc():
proto = {}
proto['magic'] = "PX4FWv1"
proto['board_id'] = 0
proto['board_revision'] = 0
proto['version'] = ""
proto['summary'] = ""
proto['description'] = ""
proto['git_identity'] = ""
proto['build_time'] = 0
proto['image'] = bytes()
proto['image_size'] = 0
return proto
# Parse commandline
parser = argparse.ArgumentParser(description="Firmware generator for the PX autopilot system.")
parser.add_argument("--prototype", action="store", help="read a prototype description from a file")
parser.add_argument("--board_id", action="store", help="set the board ID required")
parser.add_argument("--board_revision", action="store", help="set the board revision required")
parser.add_argument("--version", action="store", help="set a version string")
parser.add_argument("--summary", action="store", help="set a brief description")
parser.add_argument("--description", action="store", help="set a longer description")
parser.add_argument("--git_identity", action="store", help="the working directory to check for git identity")
parser.add_argument("--parameter_xml", action="store", help="the parameters.xml file")
parser.add_argument("--airframe_xml", action="store", help="the airframes.xml file")
parser.add_argument("--image", action="store", help="th
|
e firmware image")
args = parser.parse_args()
# Fetch the firmware descriptor prototype if specified
if args.prototype != None:
f = open(args.prototype,"r")
desc = json.load(f)
f.close()
else:
desc = mkdesc()
desc['build_
|
time'] = int(time.time())
if args.board_id != None:
desc['board_id'] = int(args.board_id)
if args.board_revision != None:
desc['board_revision'] = int(args.board_revision)
if args.version != None:
desc['version'] = str(args.version)
if args.summary != None:
desc['summary'] = str(args.summary)
if args.description != None:
desc['description'] = str(args.description)
if args.git_identity != None:
cmd = " ".join(["git", "--git-dir", args.git_identity + "/.git", "describe", "--always", "--dirty"])
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout
desc['git_identity'] = str(p.read().strip())
p.close()
if args.parameter_xml != None:
f = open(args.parameter_xml, "rb")
bytes = f.read()
desc['parameter_xml_size'] = len(bytes)
desc['parameter_xml'] = base64.b64encode(zlib.compress(bytes,9)).decode('utf-8')
if args.airframe_xml != None:
f = open(args.airframe_xml, "rb")
bytes = f.read()
desc['airframe_xml_size'] = len(bytes)
desc['airframe_xml'] = base64.b64encode(zlib.compress(bytes,9)).decode('utf-8')
if args.image != None:
f = open(args.image, "rb")
bytes = f.read()
desc['image_size'] = len(bytes)
desc['image'] = base64.b64encode(zlib.compress(bytes,9)).decode('utf-8')
print(json.dumps(desc, indent=4))
|
zalando/turnstile
|
tests/checks/test_specification_check.py
|
Python
|
apache-2.0
| 1,332
| 0.00301
|
# -*- coding: utf-8 -*-
import pytest
import turnstile.models.message as message
from turnstile.checks import CheckIgnore
from turnstile.checks.commit_msg.specification import check
d
|
ef test_check():
commit_1 = message.CommitMessage('something', 'https://github.com/jmcs/turnstile/issues/42 m€sságe')
result_1 = check(None, {}, commit_1)
assert result_1.successful
assert result_1.details == []
commit_2 = message.CommitMessage('something', 'invalid-1')
result_2 = check(None, {}, commit_2)
assert not result_2.successful
assert result_2.details == ['invalid-1 is not a valid specifica
|
tion.']
# Merge messages are ignored
with pytest.raises(CheckIgnore):
commit_3 = message.CommitMessage('something', 'Merge stuff')
check(None, {}, commit_3)
commit_4 = message.CommitMessage('something', 'ftp://example.com/spec')
result_4 = check(None, {'specification': {'allowed_schemes': ['https']}}, commit_4)
assert not result_4.successful
assert result_4.details == ['ftp://example.com/spec is not a valid specification.']
commit_5 = message.CommitMessage('something', 'ftp://example.com/spec')
result_5 = check(None, {'specification': {'allowed_schemes': ['https', 'ftp']}}, commit_5)
assert result_5.successful
assert result_5.details == []
|
ehabkost/virt-test
|
qemu/tests/timedrift.py
|
Python
|
gpl-2.0
| 7,552
| 0.001457
|
import logging, time, commands
from autotest.client.shared import error
from virttest import utils_test, aexpect
def run_timedrift(test, params, env):
"""
Time drift test (mainly for Windows guests):
1) Log into a guest.
2) Take a time reading from the guest and host.
3) Run load on the guest and host.
4) Take a second time reading.
5) Stop the load and rest for a while.
6) Take a third time reading.
7) If the drift immediately after load is higher than a user-
specified value (in %), fail.
If the drift after the rest period is higher than a user-specified value,
fail.
@param test: QEMU test object.
@param params: Dictionary with test parameters.
@param env: Dictionary with the test environment.
"""
# Helper functions
def set_cpu_affinity(pid, mask):
"""
Set the CPU affinity of all threads of the process with PID pid.
Do this recursively for all child processes as well.
@param pid: The process ID.
@param mask: The CPU affinity mask.
@return: A dict containing the previous mask for each thread.
"""
tids = commands.getoutput("ps -L --pid=%s -o lwp=" % pid).split()
prev_masks = {}
for tid in tids:
prev_mask = commands.getoutput("taskset -p %s" % tid).split()[-1]
prev_masks[tid] = prev_mask
commands.getoutput("taskset -p %s %s" % (mask, tid))
children = commands.getoutput("ps --ppid=%s -o pid=" % pid).split()
for child in children:
prev_masks.update(set_cpu_affinity(child, mask))
return prev_masks
def restore_cpu_affinity(prev_masks):
"""
Restore the CPU affinity of several threads.
@param prev_masks: A dict containing TIDs as keys and masks as values.
"""
for tid, mask in prev_masks.items():
commands.getoutput("taskset -p %s %s" % (mask, tid))
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
timeout = int(params.get("login_timeout", 360))
session = vm.wait_for_login(timeout=t
|
imeout)
# Collect test parameters:
# Command to run to get the current time
time_command = params.get("time_command")
# Filter which should match a string to be passed to time.strptime()
time_filter_re = params.get("time_filter_re")
# Time format for time.strptime()
time_format = params.get("time_format")
guest_load_command = params.get("guest_load_command")
guest_load_st
|
op_command = params.get("guest_load_stop_command")
host_load_command = params.get("host_load_command")
guest_load_instances = int(params.get("guest_load_instances", "1"))
host_load_instances = int(params.get("host_load_instances", "0"))
# CPU affinity mask for taskset
cpu_mask = params.get("cpu_mask", "0xFF")
load_duration = float(params.get("load_duration", "30"))
rest_duration = float(params.get("rest_duration", "10"))
drift_threshold = float(params.get("drift_threshold", "200"))
drift_threshold_after_rest = float(params.get("drift_threshold_after_rest",
"200"))
guest_load_sessions = []
host_load_sessions = []
try:
# Set the VM's CPU affinity
prev_affinity = set_cpu_affinity(vm.get_shell_pid(), cpu_mask)
try:
# Open shell sessions with the guest
logging.info("Starting load on guest...")
for i in range(guest_load_instances):
load_session = vm.login()
# Set output func to None to stop it from being called so we
# can change the callback function and the parameters it takes
# with no problems
load_session.set_output_func(None)
load_session.set_output_params(())
load_session.set_output_prefix("(guest load %d) " % i)
load_session.set_output_func(logging.debug)
guest_load_sessions.append(load_session)
# Get time before load
# (ht stands for host time, gt stands for guest time)
(ht0, gt0) = utils_test.get_time(session,
time_command,
time_filter_re,
time_format)
# Run some load on the guest
for load_session in guest_load_sessions:
load_session.sendline(guest_load_command)
# Run some load on the host
logging.info("Starting load on host...")
for i in range(host_load_instances):
host_load_sessions.append(
aexpect.run_bg(host_load_command,
output_func=logging.debug,
output_prefix="(host load %d) " % i,
timeout=0.5))
# Set the CPU affinity of the load process
pid = host_load_sessions[-1].get_pid()
set_cpu_affinity(pid, cpu_mask)
# Sleep for a while (during load)
logging.info("Sleeping for %s seconds...", load_duration)
time.sleep(load_duration)
# Get time delta after load
(ht1, gt1) = utils_test.get_time(session,
time_command,
time_filter_re,
time_format)
# Report results
host_delta = ht1 - ht0
guest_delta = gt1 - gt0
drift = 100.0 * (host_delta - guest_delta) / host_delta
logging.info("Host duration: %.2f", host_delta)
logging.info("Guest duration: %.2f", guest_delta)
logging.info("Drift: %.2f%%", drift)
finally:
logging.info("Cleaning up...")
# Restore the VM's CPU affinity
restore_cpu_affinity(prev_affinity)
# Stop the guest load
if guest_load_stop_command:
session.cmd_output(guest_load_stop_command)
# Close all load shell sessions
for load_session in guest_load_sessions:
load_session.close()
for load_session in host_load_sessions:
load_session.close()
# Sleep again (rest)
logging.info("Sleeping for %s seconds...", rest_duration)
time.sleep(rest_duration)
# Get time after rest
(ht2, gt2) = utils_test.get_time(session,
time_command,
time_filter_re,
time_format)
finally:
session.close()
# Report results
host_delta_total = ht2 - ht0
guest_delta_total = gt2 - gt0
drift_total = 100.0 * (host_delta_total - guest_delta_total) / host_delta
logging.info("Total host duration including rest: %.2f", host_delta_total)
logging.info("Total guest duration including rest: %.2f", guest_delta_total)
logging.info("Total drift after rest: %.2f%%", drift_total)
# Fail the test if necessary
if abs(drift) > drift_threshold:
raise error.TestFail("Time drift too large: %.2f%%" % drift)
if abs(drift_total) > drift_threshold_after_rest:
raise error.TestFail("Time drift too large after rest period: %.2f%%"
% drift_total)
|
eirki/script.service.koalahbonordic
|
tests/mock_constants.py
|
Python
|
mit
| 370
| 0.002703
|
#! /usr/bin/env python2
# -*- coding: utf-8 -*-
from __future__ import (unicode_literals, absolute_import, division)
i
|
mport os as os_module
import xbmc
from lib.constants import *
userdatafolder = os_module.path.join(xbmc.translatePath("special://profile").decode("utf-8"), "a
|
ddon_data", addonid, "test data")
libpath = os_module.path.join(userdatafolder, "Library")
|
TwoUnderscorez/KalutServer
|
KalutServer/RESTfulAPI/SSLbottle.py
|
Python
|
apache-2.0
| 772
| 0.002591
|
from mybottle import Bottle, run, ServerAdapter, get, post, request
import KalutServer.conf as myconf
class SSLWSGIRefServer(ServerAdapter):
def run(self, handler, quiet=False):
from wsgiref.simple_ser
|
ver import make_server, WSGIRequestHandler
import ssl
if quiet:
class QuietHandler(WSGIRequestHandler):
def log_request(*args, **kw): pass
self.options['handler_class'] = QuietHandler
srv = make_server(self.host, self.port, handler, **self.options)
|
srv.socket = ssl.wrap_socket (
srv.socket,
certfile=myconf.certfile, # path to chain file
keyfile=myconf.keyfile, # path to RSA private key
server_side=True)
srv.serve_forever()
|
rickyrish/rickyblog
|
publicaciones/urls.py
|
Python
|
gpl-2.0
| 234
| 0.012821
|
from
|
django.conf.urls import patterns, url
from publicaciones import views
urlpatterns = patterns('',
url(r'^$', views.index
|
, name='index'),
url(r'^(?P<articulo_titulo>[\W\w]+)/$', views.ver_articulo, name='ver_articulo'),
)
|
JeffRoy/mi-dataset
|
mi/dataset/driver/optaa_dj/cspp/optaa_dj_cspp_telemetered_driver.py
|
Python
|
bsd-2-clause
| 2,238
| 0.002681
|
"""
@package mi.dataset.driver.optaa_dj.cspp
@file mi-dataset/mi/dataset/driver/optaa_dj/c
|
spp/optaa_dj_cspp_telemetered_driver.py
@author Joe Padula
@brief Telemetered driver for the optaa_dj_cspp instrument
Release notes:
Initial Release
"""
__author__ = 'jpadula'
from mi.dataset.dataset_driver import SimpleDatasetDriver
from mi.dataset.dataset_parser import
|
DataSetDriverConfigKeys
from mi.dataset.parser.cspp_base import \
DATA_PARTICLE_CLASS_KEY, \
METADATA_PARTICLE_CLASS_KEY
from mi.dataset.parser.optaa_dj_cspp import \
OptaaDjCsppParser, \
OptaaDjCsppMetadataTelemeteredDataParticle, \
OptaaDjCsppInstrumentTelemeteredDataParticle
from mi.core.versioning import version
@version("15.6.1")
def parse(basePythonCodePath, sourceFilePath, particleDataHdlrObj):
"""
This is the method called by Uframe
:param basePythonCodePath This is the file system location of mi-dataset
:param sourceFilePath This is the full path and filename of the file to be parsed
:param particleDataHdlrObj Java Object to consume the output of the parser
:return particleDataHdlrObj
"""
with open(sourceFilePath, 'rU') as stream_handle:
# create an instance of the concrete driver class defined below
driver = OptaaDjCsppTelemeteredDriver(basePythonCodePath, stream_handle, particleDataHdlrObj)
driver.processFileStream()
return particleDataHdlrObj
class OptaaDjCsppTelemeteredDriver(SimpleDatasetDriver):
"""
The optaa_dj_cspp telemetered driver class extends the SimpleDatasetDriver.
"""
def _build_parser(self, stream_handle):
parser_config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.optaa_dj_cspp',
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
METADATA_PARTICLE_CLASS_KEY: OptaaDjCsppMetadataTelemeteredDataParticle,
DATA_PARTICLE_CLASS_KEY: OptaaDjCsppInstrumentTelemeteredDataParticle
}
}
parser = OptaaDjCsppParser(parser_config,
stream_handle,
self._exception_callback)
return parser
|
hamiltonkibbe/PyAbleton
|
pyableton/__init__.py
|
Python
|
mit
| 1,356
| 0.00885
|
#!/usr/bin/env python
#
# Copyright (c) 2014 Hamilton Kibbe <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to perm
|
it persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PAR
|
TICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
""" PyAbleton
A library for creating and editing Ableton Live instrument/effect presets in Python.
"""
__author__ = '[email protected]'
__version__ = '1.0'
import presets
|
VimanyuAgg/Have_a_seat
|
dbanalytics tester.py
|
Python
|
apache-2.0
| 1,275
| 0.004706
|
from pymongo import MongoClient
import schedule
import time
##############
## This script will be deployed in bluemix with --no-route set to true
##########
|
####
con = MongoClient("mongodb://abcd:[email protected]:11798/have_a_sea
|
t")
db = con.have_a_seat
cursor = db.Bookings.find()
#Bookings is {customerName:"", customerEmail: "", customerPhone: "", Slot: ""}
dict = {}
db.Exploration.delete_many({})
db.Exploitation.delete_many({})
for i in range(4): # Finding for all slots
for c in cursor:
if c['Slot'] == i and c['customerEmail'] not in dict.keys():
dict[c['customerEmail']] = 1
elif c['Slot'] == i and c['customerEmail'] in dict.keys():
dict[c['customerEmail']] += 1
tuples_list = sorted(dict.items(), key=lambda x: x[1], reverse=True)
print tuples_list
print 'Completed for slot ', i
db.Exploitation.insert({'Slot': i, 'customerEmail': tuples_list[0][0],
'customerName': db.Bookings.find_one({'customerEmail': tuples_list[0][0]})['customerName']})
db.Exploration.insert({'Slot': i, 'customerEmail': tuples_list[len(tuples_list) - 1][0], 'customerName':
db.Bookings.find_one({'customerEmail': tuples_list[len(tuples_list) - 1][0]})['customerName']})
|
floppp/programming_challenges
|
project_euler/051-100/97.py
|
Python
|
mit
| 29
| 0
|
print 284
|
33 * 2*
|
*7830457 + 1
|
j4n7/record-q
|
qualitative_variation.py
|
Python
|
gpl-3.0
| 1,382
| 0.003618
|
from collections import Counter
def unalk_coeff(l):
'''Source: https://ww2.amstat.org/publications/jse/v15n2/kader.html'''
n = len(l)
freq = Counter(l)
freqsum = 0
for key, freq in freq.items():
p = freq / n
freqsum += p**2
unalk_coeff = 1 - freqsum
return unalk_coeff
def IQV(l, k):
'''k = number of categories
a value can take
Source: http://sjam.selcuk.edu.tr/sjam/article/view/291'''
IQV = (k / (k - 1)) * unalk_coeff(l)
return IQV
def IQV_var(l, k):
'''k = number of categories
a value can take
Source: https://www.youtube.com/watch?v=oQCqaS1ICwk'''
freq = Counter(l)
freqsum = 0
for k, v in freq.items():
freqsum += v
p2sum = 0
for k, v in freq.items():
p2sum += ((v / freqsum) * 100)**2
IQV = (k * (100**2 - p2sum)) / (((100**2) * (k - 1)))
return IQV
def simpsons_d(l):
freq = Counter(l)
n = 0
for k, v in freq.items():
n += v
s = 0
for k, v in freq.items():
s += v * (v - 1)
d = s / (n * n - 1)
return 1 - d
# TEST, Source: https://ww2.amstat.org/publications/jse/v15n2/kader.html
# print(unalk_coeff(['A', 'A', 'A', 'A', 'A', 'A', 'A', 'B', 'B', 'B']))
# print(unalk_coeff(['A', 'A', 'A', 'A', 'A', 'B', 'B', 'B', 'B', 'B']))
# print(unalk_coeff
|
(['A', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', '
|
B']))
|
hexlism/css_platform
|
sleepyenv/lib/python2.7/site-packages/Flask_Admin-1.2.0-py2.7.egg/flask_admin/contrib/geoa/typefmt.py
|
Python
|
apache-2.0
| 988
| 0.004049
|
from flask_admin.contrib.sqla.typefmt import DEFAULT_FORMATTERS as BASE_FORMATTERS
import json
from jinja2 import Markup
from wtforms.widgets import html_params
from geoalchemy2.shape import to_shape
from geoalchemy2.elements import WKBElement
from sqlalchemy import func
from flask import current_ap
|
p
def geom_formatter(view, value):
params = html_params(**{
"data-role": "leafle
|
t",
"disabled": "disabled",
"data-width": 100,
"data-height": 70,
"data-geometry-type": to_shape(value).geom_type,
"data-zoom": 15,
})
if value.srid is -1:
geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value))
else:
geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value.ST_Transform( 4326)))
return Markup('<textarea %s>%s</textarea>' % (params, geojson))
DEFAULT_FORMATTERS = BASE_FORMATTERS.copy()
DEFAULT_FORMATTERS[WKBElement] = geom_formatter
|
fergalmoran/Chrome2Kindle
|
server/reportlab/graphics/renderSVG.py
|
Python
|
mit
| 30,282
| 0.009412
|
__doc__="""An experimental SVG renderer for the ReportLab graphics framework.
This will create SVG code from the ReportLab Graphics API (RLG).
To read existing SVG code and convert it into ReportLab graphics
objects download the svglib module here:
http://python.net/~gherman/#svglib
"""
import math, types, sys, os
from operator import getitem
from reportlab.pdfbase.pdfmetrics import stringWidth # for font info
from reportlab.lib.utils import fp_str
from reportlab.lib.colors import black
from reportlab.graphics.renderbase import StateTracker, getStateDelta, Renderer, renderScaledDrawing
from reportlab.graphics.shapes import STATE_DEFAULTS, Path, UserNode
from reportlab.graphics.shapes import * # (only for test0)
from reportlab import rl_config
from reportlab.lib.utils import getStringIO
from xml.dom import getDOMImplementation
### some constants ###
sin = math.sin
cos = math.cos
pi = math.pi
LINE_STYLES = 'stroke-width stroke-linecap stroke fill stroke-dasharray'
TEXT_STYLES = 'font-family font-size'
### top-level user function ###
def drawToString(d, showBoundary=rl_config.showBoundary):
"Returns a SVG as a string in memory, without touching the disk"
s = getStringIO()
drawToFile(d, s, showBoundary=showBoundary)
return s.getvalue()
def drawToFile(d, fn, showBoundary=rl_config.showBoundary):
d = renderScaledDrawing(d)
c = SVGCanvas((d.width, d.height))
draw(d, c, 0, 0, showBoundary=showBoundary)
c.save(fn)
def draw(drawing, canvas, x=0, y=0, showBoundary=rl_config.showBoundary):
"""As it says."""
r = _SVGRenderer()
r.draw(renderScaledDrawing(drawing), canvas, x, y, showBoundary=showBoundary)
### helper functions ###
def _pointsFromList(L):
"""
given a list of coordinates [x0, y0, x1, y1....]
produce a list of points [(x0,y0), (y1,y0),....]
"""
P=[]
for i in range(0,len(L), 2):
P.append((L[i], L[i+1]))
return P
def transformNode(doc, newTag, node=None, **attrDict):
"""Transform a DOM node into new node and copy selected attributes.
Creates a new DOM node with tag name 'newTag' for document 'doc'
and copies selected attributes from an existing 'node' as provided
in 'attrDict'. The source 'node' can be None. Attribute values will
be converted to strings.
E.g.
n = transformNode(doc, "node1", x="0", y="1")
-> DOM node for <node1 x="0" y="1"/>
n = transformNode(doc, "node1", x=0, y=1+1)
-> DOM node for <node1 x="0" y="2"/>
n = transformNode(doc, "node1", node0, x="x0", y="x0", zoo=bar())
-> DOM node for <node1 x="[node0.x0]" y="[node0.y0]" zoo="[bar()]"/>
"""
newNode = doc.createElement(newTag)
for newAttr
|
, attr in attrDict.items():
sattr = str(attr)
if not node:
newNode.setAttribute(newAttr, sattr)
else:
attrVal = node.getAttribute(sattr)
newNode.setAttribute(newAttr, attrVal or sattr)
return newNode
### classes ###
class SVGCanvas:
def __init__(self, size=(300,300)):
self.verbose = 0
self.width, self.height = self.size = size
|
# self.height = size[1]
self.code = []
self.style = {}
self.path = ''
self._strokeColor = self._fillColor = self._lineWidth = \
self._font = self._fontSize = self._lineCap = \
self._lineJoin = self._color = None
implementation = getDOMImplementation('minidom')
#Based on official example here http://www.w3.org/TR/SVG10/linking.html want:
#<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
# "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
#Thus,
#doctype = implementation.createDocumentType("svg",
# "-//W3C//DTD SVG 20010904//EN",
# "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd")
#
#However, putting that example through http://validator.w3.org/ recommends:
#<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN"
# "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
#So we'll use that for our SVG 1.0 output.
doctype = implementation.createDocumentType("svg",
"-//W3C//DTD SVG 1.0//EN",
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd")
self.doc = implementation.createDocument(None,"svg",doctype)
self.svg = self.doc.documentElement
self.svg.setAttribute("width", str(size[0]))
self.svg.setAttribute("height", str(self.height))
#these suggested by Tim Roberts, as updated by [email protected]
self.svg.setAttribute("xmlns", "http://www.w3.org/2000/svg")
self.svg.setAttribute("xmlns:xlink", "http://www.w3.org/1999/xlink")
self.svg.setAttribute("version", "1.0")
#self.svg.setAttribute("baseProfile", "full") #disliked in V 1.0
title = self.doc.createElement('title')
text = self.doc.createTextNode('...')
title.appendChild(text)
self.svg.appendChild(title)
desc = self.doc.createElement('desc')
text = self.doc.createTextNode('...')
desc.appendChild(text)
self.svg.appendChild(desc)
self.setFont(STATE_DEFAULTS['fontName'], STATE_DEFAULTS['fontSize'])
self.setStrokeColor(STATE_DEFAULTS['strokeColor'])
self.setLineCap(2)
self.setLineJoin(0)
self.setLineWidth(1)
# Add a rectangular clipping path identical to view area.
clipPath = transformNode(self.doc, "clipPath", id="clip")
clipRect = transformNode(self.doc, "rect", x=0, y=0,
width=self.width, height=self.height)
clipPath.appendChild(clipRect)
self.svg.appendChild(clipPath)
self.groupTree = transformNode(self.doc, "g",
id="group",
transform="scale(1,-1) translate(0,-%d)" % self.height,
style="clip-path: url(#clip)")
self.svg.appendChild(self.groupTree)
self.currGroup = self.groupTree
def save(self, fn=None):
if isinstance(fn,str):
f = open(fn, 'w')
else:
f = fn
f.write(self.doc.toprettyxml(indent=" "))
if f is not fn:
f.close()
### helpers ###
def NOTUSED_stringWidth(self, s, font=None, fontSize=None):
"""Return the logical width of the string if it were drawn
in the current font (defaults to self.font).
"""
font = font or self._font
fontSize = fontSize or self._fontSize
return stringWidth(s, font, fontSize)
def _formatStyle(self, include=''):
include = include.split()
keys = self.style.keys()
if include:
#2.1-safe version of the line below follows:
#keys = filter(lambda k: k in include, keys)
tmp = []
for word in keys:
if word in include:
tmp.append(word)
keys = tmp
items = []
for k in keys:
items.append((k, self.style[k]))
items = map(lambda i: "%s: %s"%(i[0], i[1]), items)
return '; '.join(items) + ';'
def _escape(self, s):
"""
return a copy of string s with special characters in postscript strings
escaped with backslashes.
Have not handled characters that are converted normally in python strings
i.e. \\n -> newline
"""
return s.replace(chr(0x5C), r'\\' ).replace('(', '\(' ).replace(')', '\)')
def _genArcCode(self, x1, y1, x2, y2, startAng, extent):
"""Calculate the path for an arc inscribed in rectangle defined
by (x1,y1),(x2,y2)."""
return
#calculate semi-minor and semi-major axes of ellipse
xScale = abs((x2-x1)/2.0)
yScale = abs((y2-y1)/2.0)
#calculate centre of ellipse
x, y = (x1+x2)/2.0, (y1+y2)/2.0
codeline = 'matrix currentmatrix %s %s translate %s %s scale 0 0 1 %s %s %s setmatrix'
if extent >= 0:
arc='arc'
else:
arc='arcn'
data = (x,y, xScale, y
|
sirk390/coinpy
|
coinpy-lib/src/coinpy/lib/serialization/structures/s11n_varint.py
|
Python
|
lgpl-3.0
| 1,579
| 0.005066
|
import struct
from coinpy.lib.serialization.common.serializer import Serializer
from coinpy.lib.serialization.exceptions import Mi
|
ssingDataException
|
class VarintSerializer(Serializer):
def __init__(self, desc=""):
self.desc = desc
def serialize(self, value):
if (value < 0xfd):
return (struct.pack("<B", value))
if (value <= 0xffff):
return ("\xfd" + struct.pack("<H", value))
if (value <= 0xffffffff):
return ("\xfe" + struct.pack("<I", value))
return ("\xff" + struct.pack("<Q", value))
def get_size(self, value):
if (value < 0xfd):
return (1)
if (value <= 0xffff):
return (3)
if (value <= 0xffffffff):
return (5)
return (9)
def deserialize(self, data, cursor=0):
if (len(data) - cursor < 1):
raise MissingDataException("Decoding error: not enough data for varint")
prefix = struct.unpack_from("<B", data, cursor)[0]
cursor += 1
if (prefix < 0xFD):
return (prefix, cursor)
if (len(data) - cursor < {0xFD: 2, 0xFE: 4, 0xFF: 8}[prefix]):
raise MissingDataException("Decoding error: not enough data for varint of type : %d" % (prefix))
if (prefix == 0xFD):
return (struct.unpack_from("<H", data, cursor)[0], cursor + 2)
if (prefix == 0xFE):
return (struct.unpack_from("<I", data, cursor)[0], cursor + 4)
return (struct.unpack_from("<Q", data, cursor)[0], cursor + 8)
|
MSFTOSSMgmt/WPSDSCLinux
|
Providers/Scripts/2.4x-2.5x/Scripts/nxOMSAgentNPMConfig.py
|
Python
|
mit
| 15,607
| 0.006023
|
#!/usr/bin/env python
# ===================================
# Copyright (c) Microsoft Corporation. All rights reserved.
# See license.txt for license information.
# ===================================
import socket
import os
import sys
import imp
import md5
import sha
import codecs
import base64
import platform
import shutil
protocol = imp.load_source('protocol', '../protocol.py')
nxDSCLog = imp.load_source('nxDSCLog', '../nxDSCLog.py')
LG = nxDSCLog.DSCLog
# Paths
CONFIG_PATH = '/etc/opt/microsoft/omsagent/conf/'
SERVER_ADDRESS = '/var/opt/microsoft/omsagent/npm_state/npmdagent.sock'
DEST_FILE_NAME = 'npmd_agent_config.xml'
PLUGIN_PATH = '/opt/microsoft/omsagent/plugin/'
PLUGIN_CONF_PATH = '/etc/opt/microsoft/omsagent/conf/omsagent.d/'
RESOURCE_MODULE_PATH = '/opt/microsoft/omsconfig/modules/nxOMSAgentNPMConfig/DSCResources/MSFT_nxOMSAgentNPMConfigResource/NPM/'
DSC_RESOURCE_VERSION_PATH = '/opt/microsoft/omsconfig/modules/nxOMSAgentNPMConfig/VERSION'
AGENT_RESOURCE_VERSION_PATH = '/var/opt/microsoft/omsagent/npm_state/npm_version'
DSC_X64_AGENT_PATH = 'Agent/64/'
DSC_X86_AGENT_PATH = 'Agent/32/'
DSC_PLUGIN_PATH = 'Plugin/plugin/'
DSC_PLUGIN_CONF_PATH = 'Plugin/conf/'
AGENT_BINARY_PATH = '/opt/microsoft/omsagent/plugin/'
AGENT_SCRIPT_PATH = '/opt/microsoft/omsconfig/Scripts/NPMAgentBinaryCap.sh'
# Constants
X64 = '64bit'
AGENT_BINARY_NAME = 'npmd_agent'
def enum(**enums):
return type('Enum', (), enums)
Commands = enum(LogNPM = 'ErrorLog', StartNPM = 'StartNPM', StopNPM = 'StopNPM', Config = 'Config', Purge = 'Purge')
LogType = enum(Error = 'ERROR', Info = 'INFO')
class INPMDiagnosticLog:
def log(self):
pass
class NPMDiagnosticLogUtil(INPMDiagnosticLog):
def log(self, logType, logString):
# Create a UDS socket
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
try:
# Connect the socket to the port where the server is listening
sock.connect(SERVER_ADDRESS)
# Send data
message = Commands.LogNPM + ':' + '[' + logType + ']' + logString
sock.sendall(message)
except Exception, msg:
LG().Log(LogType.Error, str(msg))
finally:
sock.close()
LOG_ACTION = NPMDiagnosticLogUtil()
class IOMSAgent:
def restart_oms_agent(self):
pass
class OMSAgentUtil(IOMSAgent):
def restart_oms_agent(self):
if os.system('sudo /opt/microsoft/omsagent/bin/service_control restart') == 0:
return True
else:
LOG_ACTION.log(LogType.Error, 'Error restarting omsagent.')
return False
class INPMAgent:
def binary_setcap(self):
pass
class NPMAgentUtil(IOMSAgent):
def binary_setcap(self, binaryPath):
if os.path.exists(AGENT_SCRIPT_PATH) and os.system('sudo %s %s' %(AGENT_SCRIPT_PATH, binaryPath)) == 0:
return True
else:
LOG_ACTION.log(LogType.Error, 'Error setting capabilities to npmd agent binary.')
return False
global show_mof
show_mof = False
OMS_ACTION = OMSAgentUtil()
NPM_ACTION = NPMAgentUtil()
# [key] string ConfigType;
# [write] string ConfigID;
# [write] string Contents;
# [write,ValueMap{"Present", "Absent"},Values{"Present", "Absent"}] string Ensure;
# [write] string ContentChecksum;
def init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
if ConfigType is not None and ConfigType != '':
ConfigType = ConfigType.encode('ascii', 'ignore')
else:
ConfigType = 'UpdatedAgentConfig'
if ConfigID is not None:
ConfigID = ConfigID.encode('ascii', 'ignore')
else:
|
ConfigID = ''
if Contents is not None:
Contents = base64.b64decode(Contents)#Contents.encode('ascii', 'ignore')
else:
Contents = ''
if Ensure is not None and Ensure != '':
Ensure = Ensure.encode('ascii', 'ignore')
else:
Ensure = 'Present'
if ContentChecksum is not None:
ContentChecksum = ContentChecksum.encode('ascii', 'ignore')
else:
ContentChecksum = ''
return ConfigType, ConfigID, Contents, Ensure, C
|
ontentChecksum
def Set_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
recvdContentChecksum = md5.md5(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
LOG_ACTION.log(LogType.Info, 'Content received did not match checksum with md5, trying with sha1')
# validate with sha1
recvdContentChecksum = sha.sha(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
# data is corrupt do not proceed further
LOG_ACTION.log(LogType.Error, 'Content received did not match checksum with sha1, exiting Set')
return [-1]
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = Set(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return retval
def Test_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
recvdContentChecksum = md5.md5(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
LOG_ACTION.log(LogType.Info, 'Content received did not match checksum with md5, trying with sha1')
# validate with sha1
recvdContentChecksum = sha.sha(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
# data is corrupt do not proceed further
LOG_ACTION.log(LogType.Error, 'Content received did not match checksum with sha1, exiting Set')
return [0]
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = Test(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return retval
def Get_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
arg_names = list(locals().keys())
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
retval = Get(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
ConfigType = protocol.MI_String(ConfigType)
ConfigID = protocol.MI_String(ConfigID)
Ensure = protocol.MI_String(Ensure)
Contents = protocol.MI_String(Contents)
ContentChecksum = protocol.MI_String(ContentChecksum)
retd = {}
ld = locals()
for k in arg_names:
retd[k] = ld[k]
return retval, retd
############################################################
# Begin user defined DSC functions
############################################################
def SetShowMof(a):
global show_mof
show_mof = a
def ShowMof(op, ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
if not show_mof:
return
mof = ''
mof += op + ' nxOMSAgentNPMConfig MyNPMConfig \n'
mof += '{\n'
mof += ' ConfigType = "' + ConfigType + '"\n'
mof += ' ConfigID = "' + ConfigID + '"\n'
mof += ' Contents = "' + Contents + '"\n'
mof += ' Ensure = "' + Ensure + '"\n'
mof += ' ContentChecksum = "' + ContentChecksum + '"\n'
mof += '}\n'
f = open('./test_mofs.log', 'a')
Print(mof, file=f)
LG().Log(LogType.Info, mof)
f.close()
def Set(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
ShowMof('SET', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
if ConfigType != 'UpdatedAgentConfig':
LOG_ACTION.log(LogType.Error, 'Config type did not match, exiting set')
return [-1]
if Ensure == 'Absent':
if os.path.exists(AGENT_RESOURCE_VERSION_PATH):
LG().Log(LogType.Info, 'Ensure is absent, but resource is present, purging')
success = PurgeSolution()
if not success:
retval = -1
return [retval]
if TestConfigUpdate(Contents) != 0:
retval = SetConfigUpdate(Contents)
version = TestResourceVersion()
if version != 0:
retval = SetFilesUpdate(version)
return [retval]
def Test(Con
|
nisavid/spruce-project
|
doc/conf.tmpl.py
|
Python
|
lgpl-3.0
| 11,930
| 0.006287
|
# -*- coding: utf-8 -*-
"""API documentation build configuration file.
This file is :func:`execfile`\\ d with the current directory set to its
containing dir.
Note that not all possible configuration values are present in this
autogenerated file.
All configuration values have a default; values that are commented out
serve to show the default.
"""
from datetime import date as _date
import re as _re
from textwrap import dedent as _dedent
import docutils.parsers.rst as _rst
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.intersphinx',
'sphinx.ext.mathjax',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
'sphinxcontrib.cheeseshop',
]
#
|
Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the proje
|
ct.
project = u''
author = u''
copyright = u'{} {}'.format(_date.today().year, author)
description = u''
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
rst_prolog = \
'''\
.. role:: bash(code)
:language: bash
.. role:: python(code)
:language: python
'''
rst_prolog = _dedent(rst_prolog)
nitpicky = True
# FIXME: encapsulate this in a Sphinx extension. make ``rfc_uri_tmpl`` a
# Sphinx config setting
rfc_uri_tmpl = 'https://tools.ietf.org/html/rfc{}.html'
def rfc_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
_rst.roles.set_classes(options)
rfcrefpattern = r'(?:(?P<displaytext>[^<]*)'\
r' <)?(?P<refname>[^>]*)(?(displaytext)>|)'
match = _re.match(rfcrefpattern, _rst.roles.utils.unescape(text))
if match:
rfcnum, anchorsep, anchor = match.group('refname').partition('#')
try:
rfcnum = int(rfcnum)
if rfcnum <= 0:
raise ValueError
except ValueError:
message = \
inliner\
.reporter\
.error('invalid RFC number {!r}; expected a positive integer'
.format(rfcnum),
line=lineno)
problem = inliner.problematic(rawtext, rawtext, message)
return [problem], [message]
uri = rfc_uri_tmpl.format(rfcnum)
if anchor:
uri += anchorsep + anchor
displaytext = match.group('displaytext')
if displaytext:
refnode = _rst.nodes.reference(rawtext, displaytext, refuri=uri,
**options)
else:
displaytext = 'RFC {}'.format(rfcnum)
if anchor:
displaytext += ' ' + anchor.replace('-', ' ')
strongnode = _rst.nodes.strong(rawtext, displaytext)
refnode = _rst.nodes.reference('', '', strongnode, refuri=uri,
**options)
return [refnode], []
else:
message = \
inliner\
.reporter\
.error('invalid RFC reference {!r}'.format(text), line=lineno)
problem = inliner.problematic(rawtext, rawtext, message)
return [problem], [message]
_rst.roles.register_local_role('rfc', rfc_role)
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder
|
honor6-dev/android_kernel_huawei_h60
|
drivers/vendor/hisi/build/scripts/obj_cmp_tools/vxworks_dassemble.py
|
Python
|
gpl-2.0
| 348
| 0.022989
|
import os
import sys
import string
filenames = os.listdir(os.getcwd())
for file in filenames:
if os.path.splitext(file)[1] == ".o" or os.path.splitex
|
t(file)[1] == ".elf" :
print "objdumparm.exe -D "+file
os.system("C:/WindRiver/gnu/4.1.2-vxworks-6.8/x
|
86-win32/bin/objdumparm.exe -D "+file +" > " +file + ".txt")
os.system("pause")
|
drunken-pypers/cloudlynt
|
cloudlynt/wsgi.py
|
Python
|
mit
| 1,140
| 0.000877
|
"""
WSGI config for cloudlynt project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
|
os.environ.setdefault("DJANGO_S
|
ETTINGS_MODULE", "cloudlynt.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
andrewgailey/robogen
|
robogen/rgkit/backup bots/stupid272.py
|
Python
|
unlicense
| 6,333
| 0.011211
|
# stupid 2.7.2 by peterm, patch by Smack
# http://robotgame.org/viewrobot/5715
import random
import math
import rg
def around(l):
return rg.locs_around(l)
def around2(l):
return [(l[0]+2, l[1]), (l[0]+1, l[1]+1),
|
(l[0], l[1]+2), (l[0]-1, l[1]+1),
(l[0]-2, l[1]), (l[0]-1, l[1]-1), (l[0], l[1]-2), (l[0]+1, l[1]-1)]
def diag(l1, l2):
if rg.wdist(l1,
|
l2) == 2:
if abs(l1[0] - l2[0]) == 1:
return True
return False
def infront(l1, l2):
if rg.wdist(l1, l2) == 2:
if diag(l1, l2):
return False
else:
return True
return False
def mid(l1, l2):
return (int((l1[0]+l2[0]) / 2), int((l1[1]+l2[1]) / 2))
def sign(x):
if x > 0:
return 1
elif x == 0:
return 0
else:
return -1
class Robot:
def act(self, game):
robots = game['robots']
##print self.location, "starts thinking"
def isenemy(l):
if robots.get(l) != None:
if robots[l]['player_id'] != self.player_id:
return True
return False
def isteammate(l):
if robots.get(l) != None:
if robots[l]['player_id'] == self.player_id:
return True
return False
def isempty(l):
if ('normal' in rg.loc_types(l)) and not ('obstacle' in rg.loc_types(l)):
if robots.get(l) == None:
return True
return False
def isspawn(l):
if 'spawn' in rg.loc_types(l):
return True
return False
# scan the area around
enemies = []
for loc in around(self.location):
if isenemy(loc):
enemies.append(loc)
moveable = []
moveable_safe = []
for loc in around(self.location):
if isempty(loc):
moveable.append(loc)
if isempty(loc) and not isspawn(loc):
moveable_safe.append(loc)
def guard():
return ['guard']
def suicide():
return ['suicide']
def canflee():
return len(moveable) > 0
def flee():
if len(moveable_safe) > 0:
return ['move', random.choice(moveable_safe)]
if len(moveable) > 0:
return ['move', random.choice(moveable)]
return guard()
def canattack():
return len(enemies) > 0
def attack():
r = enemies[0]
for loc in enemies:
if robots[loc]['hp'] > robots[r]['hp']:
r = loc
return ['attack', r]
def panic():
if canflee():
return flee()
elif canattack():
return attack()
else:
return guard()
def imove(to):
f = self.location
d = (to[0]-f[0], to[1]-f[1])
di = (sign(d[0]), sign(d[1]))
good = []
if di[0]*di[1] != 0:
good.append((di[0], 0))
good.append((0, di[1]))
else:
good.append(di)
for dmove in good:
loc = (f[0]+dmove[0], f[1]+dmove[1])
if isempty(loc):
return ['move', loc]
return flee()
##print "There are", len(enemies), "enemies close"
if len(enemies) > 1:
# we gonna die next turn if we don't move?
if self.hp <= len(enemies)*10:
# it's ok to suicide if you take someone else with you
for loc in enemies:
if robots[loc]['hp'] <= 15:
##print "Suicide!"
pass#return suicide()
##print "Too many enemies around, panic!"
return panic()
elif len(enemies) == 1:
if self.hp <= 10:
if robots[enemies[0]]['hp'] > 15:
##print "Enemy will kill me, panic!"
return panic()
elif robots[enemies[0]]['hp'] <= 10:
##print "I will kill enemy, attack!"
return attack()
#else:
# # might tweak this
# ##print "I'm too low on health, suicide!"
# return suicide()
else:
if robots[enemies[0]]['hp'] <= 10:
if self.hp <= 15:
# avoid suiciders
##print "Avoiding suicider, panic!"
return panic()
else:
##print "Attack!"
return attack()
# if we're at spawn, get out
if isspawn(self.location):
##print "I'm on spawn, panic!"
return panic()
closehelp = None
prediction = None
# are there enemies in 2 squares?
for loc in around2(self.location):
if isenemy(loc):
##print "Enemy in 2 squares:", loc
# try to help teammates
for loc2 in around(loc):
if isteammate(loc2):
##print "And a teammate close to him:", loc2
closehelp = imove(loc)
# predict and attack
if infront(loc, self.location):
prediction = ['attack', mid(loc, self.location)]
elif rg.wdist(rg.toward(loc, rg.CENTER_POINT), self.location) == 1:
prediction = ['attack', rg.toward(loc, rg.CENTER_POINT)]
else:
prediction = ['attack', (self.location[0], loc[1])]
if closehelp != None:
##print "Help teammate fight:", closehelp
return closehelp
if prediction != None:
##print "Predict:", prediction
return prediction
# move randomly
##print "Can't decide, panic!"
return panic()
|
charles-g-young/Table2NetCDF
|
gov/noaa/gmd/table_2_netcdf/TableDataDesc.py
|
Python
|
apache-2.0
| 11,062
| 0.013741
|
'''
Given an XML file that describes a text file containing a header and a table, parse
the XML into it's descriptive elements.
Created on Feb 27, 2017
@author: cyoung
'''
import xml.etree.ElementTree as ElementTree
from gov.noaa.gmd.table_2_netcdf.Util import Util
class TableDataDesc:
#XML element names
ELEMENT_NAME="name"
ELEMENT_DATA_TYPE="data-type"
ELEMENT_GLOBAL_ATTRIBUTE="global-attribute"
ELEMENT_GLOBAL_ATTRIBUTE_STRATEGY="global-attribute-strategy"
ELEMENT_HEADER_STRATEGY="header-strategy"
ELEMENT_CLASS_NAME="class-name"
ELEMENT_VARIABLE="variable"
ELEMENT_VARIABLE_ATTRIBUTE="variable-attribute"
ELEMENT_VARIABLE_ATTRIBUTE_STRATEGY="variable-attribute-strategy"
ELEMENT_VARIABLE_NAME="variable-name"
ELEMENT_VARIABLE_STRATEGY="variable-strategy"
def __init__ (self, xmlFile):
self.xmlFile=xmlFile
self.tree = ElementTree.parse(xmlFile)
def getAllColumnDesc (self):
pass
def getAllGlobalAttributeDesc(self):
root = self.tree.getroot()
elements=root.findall(".//"+self.ELEMENT_GLOBAL_ATTRIBUTE)
gads=[]
for e in elements:
gads.append(self.__getGlobalAttributeDesc(e))
return gads
def getAllVariableAttributeDesc(self):
root = self.tree.getroot()
elements=root.findall(".//"+self.ELEMENT_VARIABLE_ATTRIBUTE)
gads=[]
for e in elements:
gads.append(self.__getVariableAttributeDesc(e))
return gads
def getAllVariableDesc(self):
root = self.tree.getroot()
elements=root.findall(".//"+self.ELEMENT_VARIABLE)
gads=[]
for e in elements:
gads.append(self.__getVariableDesc(e))
return gads
def getColumnDesc(self, columnName):
pass
def getGlobalAttributeDesc(self, attributeName):
element=self.__getGlobalAttributeElement(attributeName)
return self.__getGlobalAttributeDesc(element)
def getGlobalAttributeStrategyDesc(self, attributeName):
element=self.__getGlobalAttributeStrategyElement(attributeName)
className=element.find(self.ELEMENT_CLASS_NAME).text
return GlobalAttributeStrategyDesc(className)
def getHeaderStrategyDesc(self):
element=self.__getHeaderStrategyElement()
className=element.find(self.ELEMENT_CLASS_NAME).text
return HeaderStrategyDesc(className)
def getVariableAttributeDesc(self, variableName):
pass
def getVariableAttributeStrategyDesc(self, variableName):
pass
def getVariableDesc(self, variableName):
element=self.__getVariableElement()
name=element.find(self.ELEMENT_NAME).text
child=element.find(self.ELEMENT_VARIABLE_STRATEGY)
className=child.find(self.ELEMENT_CLASS_NAME).text
return VariableDesc(name, className)
def __getGlobalAttributeDesc(self, element):
name=element.find(self.ELEMENT_NAME).text
dataType=element.find(self.ELEMENT_DATA_TYPE).text
child=element.find(self.ELEMENT_GLOBAL_ATTRIBUTE_STRATEGY)
className=child.find(self.ELEMENT_CLASS_NAME).text
strategyDesc=GlobalAttributeStrategyDesc(className)
return GlobalAttributeDesc(name, dataType, strategyDesc)
def __getGlobalAttributeElement(self, attributeName):
root = self.tree.getroot()
elements=root.findall(".//"+self.ELEMENT_GLOBAL_ATTRIBUTE)
element=None
for e in elements:
if e.find(self.ELEMENT_NAME).text == attributeName:
element=e
break
if element is None:
raise Exception(self.ELEMENT_GLOBAL_ATTRIBUTE+" element with name '"+attributeName+
"' not found in file '"+self.xmlFile+"'.")
return element
def __getGlobalAttributeStrategyElement(self, attributeName):
globalAttributeElement=self.__getGlobalAttributeElement(attributeName)
element=globalAttributeElement.find(self.ELEMENT_GLOBAL_ATTRIBUTE_STRATEGY)
if element is None:
raise Exception(self.ELEMENT_GLOBAL_ATTRIBUTE_STRATEGY+" element with name '"+attributeName+
"' not found in file '"+self.xmlFile+"'.")
return element
def __getVariableAttributeDesc(self, element):
name=element.find(self.ELEMENT_VARIABLE_NAME).text
dataType=element.find(self.ELEMENT_DATA_TYPE).text
child=element.find(self.ELEMENT_VARIABLE_ATTRIBUTE_STRATEGY)
className=child.find(self.ELEMENT_CLASS_NAME).text
strategyDesc=VariableAttributeStrategyDesc(className)
return VariableAttributeDesc(name, dataType, "attributes", strategyDesc)
def __getVariableDesc(self, element):
name=element.find(self.ELEMENT_NAME).text
child=element.find(self.ELEMENT_VARIABLE_STRATEGY)
className=child.find(self.ELEMENT_CLASS_NAME).text
strategyDesc=VariableStrategyDesc(className)
return VariableDesc(name, strategyDesc)
def __getHeaderStrategyElement(self):
root = self.tree.getroot()
elements=root.findall(".//"+self.ELEMENT_HEADER_STRATEGY)
if len(elements) == 0:
raise Exception(self.ELEMENT_HEADER_STRATEGY+" element "+
"' not found in file '"+self.xmlFile+"'.")
return elements[0]
def __eq__(self, other):
if self.xmlFile != other.xmlFile:
return False
return True
class ColumnDesc:
def __init__ (self, columnName, index, dataType):
self.columnName=columnName
self.index=index
self.dataType=dataType
def getColumnName(self):
return self.columnName
def getDataType(self):
return self.dataType
def getIndex(self):
return self.index
def __eq__(self, other):
if self.columnName !=
|
other.columnName:
return False
if self.index != other.index:
return False
if self.dataType != other.dataType:
return False
return True
class GlobalAttributeDesc:
d
|
ef __init__ (self, attributeName, attributeType, globalAttributeStrategyDesc):
self.attributeName=attributeName
self.attributeType=attributeType
self.globalAttributeStrategyDesc=globalAttributeStrategyDesc
def getAttributeName(self):
return self.attributeName
def getAttributeType(self):
return self.attributeType
def getGlobalAttributeStrategyDesc(self):
return self.globalAttributeStrategyDesc
def __eq__(self, other):
if self.attributeName != other.attributeName:
return False
if self.attributeType != other.attributeType:
return False
if self.globalAttributeStrategyDesc != other.globalAttributeStrategyDesc:
return False
return True
#A base class for strategy descriptions.
class StrategyDesc(object):
#Hold the name of the strategy class to be loaded.
def __init__ (self, strategyClassName):
self.strategyClassName=strategyClassName
def getStrategyClassName(self):
return self.strategyClassName
def __eq__(self, other):
if self.strategyClassName != other.strategyClassName:
return False
return True
class GlobalAttributeStrategyDesc(StrategyDesc):
def __init__ (self, strategyClassName):
super().__init__(strategyClassName)
def getStrategyClassName(self):
return self.strategyClassName
#Return the value parsed from the header of the given global attribute
def parse (self, attributeName, header):
#Instantiate the strategy class by name.
c=Util().getClass(self.strategyClassName)
return c.parse(attributeName, header)
class HeaderStrategyDesc(StrategyDesc):
def __init__ (self, strategyClassName):
super().__init__(strategyClassName)
def getStrategyClassName(self):
re
|
sedders123/phial
|
phial/errors.py
|
Python
|
mit
| 286
| 0
|
"""phial's custom errors."""
class ArgumentValidationError(Exception):
"""Exception indicating ar
|
gument validation has failed."""
pass
class ArgumentTypeValidationError(ArgumentValidationError):
"""Exce
|
ption indicating argument type validation has failed."""
pass
|
citrix-openstack/build-python-troveclient
|
troveclient/flavors.py
|
Python
|
apache-2.0
| 1,700
| 0
|
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from troveclient import base
class Flavor(base.Resource):
"""
A Flavor is an Instance type, specifying among other things, RAM size.
"""
def __repr__(self):
return "<Flavor: %s>" % self.name
class Flavors(base.ManagerWithFind):
"""
Manage :class:`Flavor` resources.
"""
resource_class = Flavor
def __repr__(self):
return "<Flavors Manager at %s>" % id(self)
def _list(self, url, response_key):
resp, body = self.api.client.get(url)
if not body:
raise Exception("Call to " + url + " did not return a body.")
return [self.resource_class(self, res) for res in body[response_key]]
def list(self):
"""
Get a list of all flavors.
:rtype: list of :class:`Flavor`.
"""
return self._list("/flavors", "flavors")
def get(self, flavor):
"""
Get
|
a specific flavor.
:rtype: :class:`Flavor`
"""
return self._get(
|
"/flavors/%s" % base.getid(flavor),
"flavor")
|
bx5974/desktop-mirror
|
lib/advanced.py
|
Python
|
apache-2.0
| 35,589
| 0.001264
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import wx
import wx.lib.newevent
from threading import Thread, Lock
import signal
import logging
from argparse import ArgumentParser, SUPPRESS
from ConfigParser import ConfigParser
from subprocess import Popen
import subprocess as sb
# CoreEventHandler
import socket
import urllib2
import json
import re
# local libraries
from common import APPNAME
from common import DEFAULT_PORT
from log import LoggingConfiguration
from command import Command
from crossplatform import CrossPlatform
from avahiservice import AvahiService
from streamserver import StreamServer
from streamreceiver import StreamReceiver
from areachooser import FrmAreaChooser
from common import VERSION
SomeNewEvent, EVT_SOME_NEW_EVENT = wx.lib.newevent.NewEvent()
class UiAdvanced(wx.Frame):
def __init__(self, parent, title, core):
super(UiAdvanced, self).__init__(parent, title=title,
size=wx.DefaultSize,
style=wx.DEFAULT_FRAME_STYLE)
self._core = core
self._core.register_listener(self)
self._input = dict()
self.Bind(EVT_SOME_NEW_EVENT, self.handler)
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
# ~/Downloads/png2ico/png2ico icon.ico
# desktop-mirror-64.png desktop-mirror-32.png desktop-mirror-16.png
self.SetIcon(wx.Icon(CrossPlatform.get().share_path('icon.ico'),
wx.BITMAP_TYPE_ICO))
self.InitUI()
self.ConfigLoad()
self.OnClickFullScreen(None)
self.Centre()
self.Show()
def ConfigLoad(self):
#filepath = CrossPlatform.get().user_config_path('ui.ini')
#if not os.path.exists(filepath):
filepath = CrossPlatform.get().system_config_path()
logging.info('Loading config from ' + filepath)
config = ConfigParser()
config.read(filepath)
if not config.has_section('input'):
config.add_section('input')
else:
for w in self._input:
if config.has_option('input', w):
self._input[w].SetValue(config.get('input', w))
self.config = config
def ConfigSave(self):
config = self.config
for w in self._input:
config.set('input', w, self._input[w].GetValue())
filepath = CrossPlatform.get().user_config_path('ui.ini')
logging.info('Saving config to ' + filepath)
with open(filepath, 'w') as configfile:
config.write(configfile)
def OnAvahi(self, data):
hosts = self._core.hosts
unique = []
targets = self._core.targets
widget = self._input['address']
val = widget.GetValue()
widget.Clear()
#logging.debug('val: {}'.format(val))
#logging.debug('hosts: {}'.format(hosts))
for f in targets:
for service in targets[f]:
key = service['host']
if key in unique:
continue
unique.append(key)
t = {'host': service['host'],
'service': service['service'],
'port': service['port'],
'ip': hosts[service['host']][0]}
logging.debug('Adding one {}'.format(t))
widget.Append('{}
|
- {}:{}'.format(t['host'],
|
t['ip'],
t['port']))
widget.SetClientData(widget.GetCount() - 1, t)
# After appending, widget value will be cleared
widget.SetValue(val)
def OnSelection(self, data):
self._input['x'].SetValue(str(data[0]))
self._input['y'].SetValue(str(data[1]))
self._input['w'].SetValue(str(data[2]))
self._input['h'].SetValue(str(data[3]))
self._input_rb_area.SetLabel('Area ({}x{}+{}+{})'.format(
data[2],
data[3],
data[0],
data[1]))
def OnStreamServer(self, data):
#status_str = {StreamServer.S_STOPPED: 'Stopped',
# StreamServer.S_STARTING: 'Start...',
# StreamServer.S_STARTED: 'Started',
# StreamServer.S_STOPPING: 'Stop...'}
#self.statusbar.SetStatusText(status_str[data])
if StreamServer.S_STARTED != data:
return
ip = self._target['ip']
ports = (self._target['port'],)
service = self._target['service']
if service == 'auto':
ports = (8089, DEFAULT_PORT + 1)
for port in ports:
try:
self._core.playme(ip, port, service)
break
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)
logging.warn('{} {} {}'.format(exc_type,
fname[1],
exc_tb.tb_lineno))
else:
msg = ('Connection Error\n'
' - IP: {}\n'
' - Port: {}\n'
' - Service: {}').format(ip, ports, service)
wx.MessageBox(msg, APPNAME,
style=wx.OK | wx.CENTRE | wx.ICON_ERROR)
def OnStreamReceiver(self, data):
if data[0] != StreamReceiver.EVENT_ASK_TO_PLAY:
logging.warn('Unknown event: {}'.format(data))
return
dlg = wx.MessageDialog(self,
('Stream Request. Accept?'),
APPNAME,
wx.YES_NO | wx.NO_DEFAULT | wx.ICON_QUESTION)
if dlg.ShowModal() == wx.ID_YES:
if CrossPlatform.get().is_linux():
cmdline = ['ffplay', data[1]]
Popen(cmdline)
else:
startupinfo = sb.STARTUPINFO()
startupinfo.dwFlags |= sb.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = 0
cmdline = ['ffplay', data[1] + ' live=1']
Popen(cmdline, startupinfo=startupinfo)
def handler(self, evt):
logging.debug('UI event {0}: {1}'.format(evt.attr1, evt.attr2))
dispatch = {'avahi': self.OnAvahi,
'selection': self.OnSelection,
'server': self.OnStreamServer,
'srx': self.OnStreamReceiver}
if evt.attr1 in dispatch:
dispatch[evt.attr1](evt.attr2)
def InitUI(self):
def titleBox(hide=True):
font = wx.SystemSettings_GetFont(wx.SYS_SYSTEM_FONT)
font.SetPointSize(16)
hbox = wx.BoxSizer(wx.HORIZONTAL)
text1 = wx.StaticText(panel, label="Desktop Mirror")
text1.SetFont(font)
hbox.Add(text1, flag=wx.TOP | wx.LEFT | wx.BOTTOM, border=15)
#hbox = wx.BoxSizer(wx.HORIZONTAL)
#line = wx.StaticLine(panel)
#hbox.Add(line, 1, flag=wx.EXPAND | wx.ALL, border=10)
#vbox.Add(hbox, 1, wx.ALL, 5)
if hide:
map(lambda w: w.Hide(),
[w.GetWindow() for w in hbox.GetChildren()
if w.GetWindow() is not None])
return hbox
def targetBox():
hbox = wx.BoxSizer(wx.HORIZONTAL)
#hbox.Add(wx.StaticText(panel, label="Target"),
# flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL, border=5)
cb = wx.ComboBox(panel, 500, "127.0.0.1",
style=wx.CB_DROPDOWN | wx.TE_PROCESS_ENTER
)
cb.SetMinSize((250, 0))
button1 = wx.Button(panel, label="Streaming")
hbox.Add(cb, 1, flag=wx.EXPAND | wx.ALL | wx.ALIGN_RIGHT,
border=0)
hbox.Add(button1, 0, flag=wx.EXPAND | wx.LEFT | wx.ALIGN_RIGHT,
border=5)
self._input['ad
|
miracle2k/stgit
|
stgit/stack.py
|
Python
|
gpl-2.0
| 40,808
| 0.007131
|
"""Basic quilt-like functionality
"""
__copyright__ = """
Copyright (C) 2005, Catalin Marinas <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import sys, os, re
from email.Utils import formatdate
from stgit.exception import *
from stgit.utils import *
from stgit.out import *
from stgit.run import *
from stgit import git, basedir, templates
from stgit.config import config
from shutil import copyfile
from stgit.lib import git as libgit, stackupgrade
# stack exception class
class StackException(StgException):
pass
class FilterUntil:
def __init__(self):
self.should_print = True
def __call__(self, x, until_test, prefix):
if until_test(x):
self.should_print = False
if self.should_print:
return x[0:len(prefix)] != prefix
return False
#
# Functions
#
__comment_prefix = 'STG:'
__patch_prefix = 'STG_PATCH:'
def __clean_comments(f):
"""Removes lines marked for status in a commit file
"""
f.seek(0)
# remove status-prefixed lines
lines = f.readlines()
patch_filter = FilterUntil()
until_test =
|
lambda t: t == (__patch_
|
prefix + '\n')
lines = [l for l in lines if patch_filter(l, until_test, __comment_prefix)]
# remove empty lines at the end
while len(lines) != 0 and lines[-1] == '\n':
del lines[-1]
f.seek(0); f.truncate()
f.writelines(lines)
# TODO: move this out of the stgit.stack module, it is really for
# higher level commands to handle the user interaction
def edit_file(series, line, comment, show_patch = True):
fname = '.stgitmsg.txt'
tmpl = templates.get_template('patchdescr.tmpl')
f = file(fname, 'w+')
if line:
print >> f, line
elif tmpl:
print >> f, tmpl,
else:
print >> f
print >> f, __comment_prefix, comment
print >> f, __comment_prefix, \
'Lines prefixed with "%s" will be automatically removed.' \
% __comment_prefix
print >> f, __comment_prefix, \
'Trailing empty lines will be automatically removed.'
if show_patch:
print >> f, __patch_prefix
# series.get_patch(series.get_current()).get_top()
diff_str = git.diff(rev1 = series.get_patch(series.get_current()).get_bottom())
f.write(diff_str)
#Vim modeline must be near the end.
print >> f, __comment_prefix, 'vi: set textwidth=75 filetype=diff nobackup:'
f.close()
call_editor(fname)
f = file(fname, 'r+')
__clean_comments(f)
f.seek(0)
result = f.read()
f.close()
os.remove(fname)
return result
#
# Classes
#
class StgitObject:
"""An object with stgit-like properties stored as files in a directory
"""
def _set_dir(self, dir):
self.__dir = dir
def _dir(self):
return self.__dir
def create_empty_field(self, name):
create_empty_file(os.path.join(self.__dir, name))
def _get_field(self, name, multiline = False):
id_file = os.path.join(self.__dir, name)
if os.path.isfile(id_file):
line = read_string(id_file, multiline)
if line == '':
return None
else:
return line
else:
return None
def _set_field(self, name, value, multiline = False):
fname = os.path.join(self.__dir, name)
if value and value != '':
write_string(fname, value, multiline)
elif os.path.isfile(fname):
os.remove(fname)
class Patch(StgitObject):
"""Basic patch implementation
"""
def __init_refs(self):
self.__top_ref = self.__refs_base + '/' + self.__name
self.__log_ref = self.__top_ref + '.log'
def __init__(self, name, series_dir, refs_base):
self.__series_dir = series_dir
self.__name = name
self._set_dir(os.path.join(self.__series_dir, self.__name))
self.__refs_base = refs_base
self.__init_refs()
def create(self):
os.mkdir(self._dir())
def delete(self, keep_log = False):
if os.path.isdir(self._dir()):
for f in os.listdir(self._dir()):
os.remove(os.path.join(self._dir(), f))
os.rmdir(self._dir())
else:
out.warn('Patch directory "%s" does not exist' % self._dir())
try:
# the reference might not exist if the repository was corrupted
git.delete_ref(self.__top_ref)
except git.GitException, e:
out.warn(str(e))
if not keep_log and git.ref_exists(self.__log_ref):
git.delete_ref(self.__log_ref)
def get_name(self):
return self.__name
def rename(self, newname):
olddir = self._dir()
old_top_ref = self.__top_ref
old_log_ref = self.__log_ref
self.__name = newname
self._set_dir(os.path.join(self.__series_dir, self.__name))
self.__init_refs()
git.rename_ref(old_top_ref, self.__top_ref)
if git.ref_exists(old_log_ref):
git.rename_ref(old_log_ref, self.__log_ref)
os.rename(olddir, self._dir())
def __update_top_ref(self, ref):
git.set_ref(self.__top_ref, ref)
self._set_field('top', ref)
self._set_field('bottom', git.get_commit(ref).get_parent())
def __update_log_ref(self, ref):
git.set_ref(self.__log_ref, ref)
def get_old_bottom(self):
return git.get_commit(self.get_old_top()).get_parent()
def get_bottom(self):
return git.get_commit(self.get_top()).get_parent()
def get_old_top(self):
return self._get_field('top.old')
def get_top(self):
return git.rev_parse(self.__top_ref)
def set_top(self, value, backup = False):
if backup:
curr_top = self.get_top()
self._set_field('top.old', curr_top)
self._set_field('bottom.old', git.get_commit(curr_top).get_parent())
self.__update_top_ref(value)
def restore_old_boundaries(self):
top = self._get_field('top.old')
if top:
self.__update_top_ref(top)
return True
else:
return False
def get_description(self):
return self._get_field('description', True)
def set_description(self, line):
self._set_field('description', line, True)
def get_authname(self):
return self._get_field('authname')
def set_authname(self, name):
self._set_field('authname', name or git.author().name)
def get_authemail(self):
return self._get_field('authemail')
def set_authemail(self, email):
self._set_field('authemail', email or git.author().email)
def get_authdate(self):
date = self._get_field('authdate')
if not date:
return date
if re.match('[0-9]+\s+[+-][0-9]+', date):
# Unix time (seconds) + time zone
secs_tz = date.split()
date = formatdate(int(secs_tz[0]))[:-5] + secs_tz[1]
return date
def set_authdate(self, date):
self._set_field('authdate', date or git.author().date)
def get_commname(self):
return self._get_field('commname')
def set_commname(self, name):
self._set_field('commname', name or git.committer().name)
def get_commemail(self):
return self._get_field('commemail')
def set_commemail(self, email):
self._set_field('commemail', email or git.committer().email)
def get_log(self):
return self._get_field('log')
def set_log(self, value, backup = False):
self._set_fiel
|
tensorflow/tensorflow
|
tensorflow/python/training/session_manager.py
|
Python
|
apache-2.0
| 23,320
| 0.004417
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training helper that checkpoints models and creates session."""
import time
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import checkpoint_management
from tensorflow.python.util.tf_export import tf_export
def _maybe_name(obj):
"""Returns object name if it has one, or a message otherwise.
This is useful for names that apper in error messages.
Args:
obj: Object to get the name of.
Returns:
name, "None", or a "no name" message.
"""
if obj is None:
return "None"
elif hasattr(obj, "name"):
return obj.name
else:
return "<no name for %s>" % type(obj)
def _restore_checkpoint_and_maybe_run_saved_model_initializers(
sess, saver, path):
"""Restores checkpoint values and SavedModel initializers if found."""
# NOTE: All references to SavedModel refer to SavedModels loaded from the
# load_v2 API (which does not require the `sess` argument).
# If the graph contains resources loaded from a SavedModel, they are not
# restored when calling `saver.restore`. Thus, the SavedModel initializer must
# be called with `saver.restore` to properly initialize the model.
# The SavedModel init is stored in the "saved_model_initializers" collection.
# This collection is part of the MetaGraph's default_init_op, so it is already
# called by MonitoredSession as long as the saver doesn't restore any
# checkpoints from the working dir.
saved_model_init_ops = ops.get_collection("saved_model_initializers")
if saved_model_init_ops:
sess.run(saved_model_init_ops)
# The saver must be called *after* the SavedModel init, because the SavedModel
# init will restore the variables from the SavedModel variables directory.
# Initializing/restoring twice is not ideal but there's no other way to do it.
saver.restore(sess, path)
@tf_export(v1=["train.SessionManager"])
class SessionManager(object):
"""Training helper that restores from checkpoint and creates session.
This class is a small wrapper that takes care of session creation and
checkpoint recovery. It also provides functions that to facilitate
coordination among multiple training threads or processes.
* Checkpointing trained variables as the training progresses.
* Initializing variables on startup, restoring them from the most recent
checkpoint after a crash, or wait for checkpoints to become available.
### Usage:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will checkpoint the model in '/tmp/mydir'.
sm = SessionManager()
sess = sm.prepare_session(master, init_op, saver, checkpoint_dir)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`prepare_session()` initializes or restores a model. It requires `init_op`
and `saver` as an argument.
A second process could wait for the model to be ready by doing the following:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will wait for the model to become ready.
sm = SessionManager()
sess = sm.wait_for_session(master)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`wait_for_session()` waits for a model to be initialized by other processes.
"""
def __init__(self,
local_init_op=None,
ready_op=None,
ready_for_local_init_op=None,
graph=None,
recovery_wait_secs=30,
local_init_run_options=None,
local_init_feed_dict=None):
"""Creates a SessionManager.
The `local_init_op` is an `Operation` that is run always after a new session
was created. If `None`, this step is skipped.
The `ready_op` is an `Operation` used to check if the model is ready. The
model is considered ready if that operation returns an empty 1D string
tensor. If the operation returns a non empty 1D string tensor, the elements
are concatenated and used to indicate to the user why the model is not
ready.
The `ready_for_local_init_op` is an `Operation` used to check if the model
is ready to run local_init_op. The model is considered ready if that
operation returns an empty 1D string tensor. If the operation returns a non
empty 1D string tensor, the elements are concatenated and used to indicate
to the user why the model is not ready.
If `ready_op` is `None`, the model is not checked for readiness.
`recovery_wait_secs` is the number of seconds between checks that
the model is ready. It is used by processes to wait for a model to
be initialized or restored. Defaults to 30 seconds.
Args:
local_i
|
nit_op: An `Operation` run immediately after session creation.
Usually used to initialize tables and local variables.
ready_op: An `Operation` to check if the model is initialize
|
d.
ready_for_local_init_op: An `Operation` to check if the model is ready
to run local_init_op.
graph: The `Graph` that the model will use.
recovery_wait_secs: Seconds between checks for the model to be ready.
local_init_run_options: RunOptions to be passed to session.run when
executing the local_init_op.
local_init_feed_dict: Optional session feed dictionary to use when running
the local_init_op.
Raises:
ValueError: If ready_for_local_init_op is not None but local_init_op is
None
"""
# Sets default values of arguments.
if graph is None:
graph = ops.get_default_graph()
self._local_init_op = local_init_op
self._ready_op = ready_op
self._ready_for_local_init_op = ready_for_local_init_op
self._graph = graph
self._recovery_wait_secs = recovery_wait_secs
self._target = None
self._local_init_run_options = local_init_run_options
self._local_init_feed_dict = local_init_feed_dict
if ready_for_local_init_op is not None and local_init_op is None:
raise ValueError("If you pass a ready_for_local_init_op "
"you must also pass a local_init_op "
", ready_for_local_init_op [%s]" %
ready_for_local_init_op)
def _restore_checkpoint(self,
master,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None):
"""Creates a `Session`, and tries to restore a checkpoint.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the
|
freerangerouting/frr
|
tests/topotests/bgp_multi_vrf_topo1/test_bgp_multi_vrf_topo1.py
|
Python
|
gpl-2.0
| 229,521
| 0.00132
|
#!/usr/bin/env python
#
# Copyright (c) 2020 by VMware, Inc. ("VMware")
# Used Copyright (c) 2018 by Network Device Education Foundation,
# Inc. ("NetDEF") in this file.
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose with or without fee is hereby granted, provided
# that the above copyright notice and this permission notice appear
# in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND VMWARE DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL VMWARE BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
#
"""
Following tests are covered to test BGP Multi-VRF:
FUNC_1:
Within each VRF, each address must be unambiguous on DUT.
FUNC_2:
Different VRFs can have ambiguous/overlapping
addresses on DUT.
FUNC_3:
Create static routes(IPv4+IPv6) associated to specific VRFs
and verify on DUT that same prefixes are present in corresponding
routing table.
FUNC_4_&_5:
Each VRF should be mapped with a unique VLAN on DUT
for traffic segregation, when using a single physical interface.
FUNC_6:
Adver
|
tise same set of prefixes from different VRFs
and verify on remote router that these prefixes are not
leaking to each other
FUNC_7:
Redistribute Static routes and verify on remote routers
that routes are advertised within specific VRF instance, which
those static routes belong to.
FUNC_8:
Test end to end traffic isolation based on VRF tables.
FUNC_9:
Use static routes for inter-vrf communication
(route-leaking) on DUT.
FUNC_10:
Verify intra-vrf and inter-vrf co
|
mmunication between
iBGP peers.
FUNC_11:
Verify intra-vrf and inter-vrf communication
between eBGP peers.
FUNC_12_a:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
FUNC_12_b:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
FUNC_12_c:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
FUNC_12_d:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
FUNC_12_e:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
FUNC_12_f:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
FUNC_13:
Configure a route-map on DUT to match traffic based
on a VRF interfaces.
FUNC_14:
Test VRF-lite with Static+BGP originated routes.
FUNC_15:
Configure prefix-lists on DUT and apply to BGP peers to
permit/deny prefixes.
FUNC_16_1:
Configure a route-map on DUT to match traffic based various
match/set causes.
FUNC_16_2:
Configure a route-map on DUT to match traffic based various
match/set causes.
FUNC_16_3:
Configure a route-map on DUT to match traffic based various
match/set causes.
"""
import os
import sys
import time
import pytest
# Save the Current Working Directory to find configuration files.
CWD = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(CWD, "../"))
sys.path.append(os.path.join(CWD, "../lib/"))
# Required to instantiate the topology builder class.
# pylint: disable=C0413
# Import topogen and topotest helpers
from lib.topogen import Topogen, get_topogen
from lib.topotest import iproute2_is_vrf_capable
from lib.common_config import (
step,
verify_rib,
start_topology,
write_test_header,
check_address_types,
write_test_footer,
reset_config_on_routers,
create_route_maps,
create_static_routes,
create_prefix_lists,
create_interface_in_kernel,
create_bgp_community_lists,
check_router_status,
apply_raw_config,
required_linux_kernel_version,
)
from lib.topolog import logger
from lib.bgp import (
verify_bgp_rib,
create_router_bgp,
verify_bgp_community,
verify_bgp_convergence,
verify_best_path_as_per_bgp_attribute,
)
from lib.topojson import build_config_from_json
pytestmark = [pytest.mark.bgpd, pytest.mark.staticd]
# Global variables
NETWORK1_1 = {"ipv4": "1.1.1.1/32", "ipv6": "1::1/128"}
NETWORK1_2 = {"ipv4": "1.1.1.2/32", "ipv6": "1::2/128"}
NETWORK2_1 = {"ipv4": "2.1.1.1/32", "ipv6": "2::1/128"}
NETWORK2_2 = {"ipv4": "2.1.1.2/32", "ipv6": "2::2/128"}
NETWORK3_1 = {"ipv4": "3.1.1.1/32", "ipv6": "3::1/128"}
NETWORK3_2 = {"ipv4": "3.1.1.2/32", "ipv6": "3::2/128"}
NETWORK4_1 = {"ipv4": "4.1.1.1/32", "ipv6": "4::1/128"}
NETWORK4_2 = {"ipv4": "4.1.1.2/32", "ipv6": "4::2/128"}
NETWORK5_1 = {"ipv4": "5.1.1.1/32", "ipv6": "5::1/128"}
NETWORK5_2 = {"ipv4": "5.1.1.2/32", "ipv6": "5::2/128"}
NETWORK6_1 = {"ipv4": "6.1.1.1/32", "ipv6": "6::1/128"}
NETWORK6_2 = {"ipv4": "6.1.1.2/32", "ipv6": "6::2/128"}
NETWORK7_1 = {"ipv4": "7.1.1.1/32", "ipv6": "7::1/128"}
NETWORK7_2 = {"ipv4": "7.1.1.2/32", "ipv6": "7::2/128"}
NETWORK8_1 = {"ipv4": "8.1.1.1/32", "ipv6": "8::1/128"}
NETWORK8_2 = {"ipv4": "8.1.1.2/32", "ipv6": "8::2/128"}
NEXT_HOP_IP = {"ipv4": "Null0", "ipv6": "Null0"}
LOOPBACK_1 = {
"ipv4": "10.10.10.10/32",
"ipv6": "10::10:10/128",
}
LOOPBACK_2 = {
"ipv4": "20.20.20.20/32",
"ipv6": "20::20:20/128",
}
def setup_module(mod):
"""
Sets up the pytest environment
* `mod`: module name
"""
# Required linux kernel version for this suite to run.
result = required_linux_kernel_version("4.15")
if result is not True:
pytest.skip("Kernel requirements are not met")
# iproute2 needs to support VRFs for this suite to run.
if not iproute2_is_vrf_capable():
pytest.skip("Installed iproute2 version does not support VRFs")
testsuite_run_time = time.asctime(time.localtime(time.time()))
logger.info("Testsuite start time: {}".format(testsuite_run_time))
logger.info("=" * 40)
logger.info("Running setup_module to create topology")
# This function initiates the topology build with Topogen...
json_file = "{}/bgp_multi_vrf_topo1.json".format(CWD)
tgen = Topogen(json_file, mod.__name__)
global topo
topo = tgen.json_topo
# ... and here it calls Mininet initialization functions.
# Starting topology, create tmp files which are loaded to routers
# to start deamons and then start routers
start_topology(tgen)
# Creating configuration from JSON
build_config_from_json(tgen, topo)
global BGP_CONVERGENCE
global ADDR_TYPES
ADDR_TYPES = check_address_types()
BGP_CONVERGENCE = verify_bgp_convergence(tgen, topo)
assert BGP_CONVERGENCE is True, "setup_module : Failed \n Error: {}".format(
BGP_CONVERGENCE
)
logger.info("Running setup_module() done")
def teardown_module():
"""Teardown the pytest environment"""
logger.info("Running teardown_module to delete topology")
tgen = get_topogen()
# Stop toplogy and Remove tmp files
tgen.stop_topology()
logger.info(
"Testsuite end time: {}".format(time.asctime(time.localtime(time.time())))
)
logger.info("=" * 40)
#####################################################
#
# Testcases
#
#####################################################
def test_address_unambiguous_within_each_vrf_p0(request):
"""
FUNC_1:
Within each VRF, each address must be unambiguous on DUT.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
if tgen.routers_have_failure():
check_router_status(tgen)
step("Configure a set of static routes(IPv4+IPv6) in " "RED_A on route
|
gameduell/duell
|
pylib/click/core.py
|
Python
|
bsd-2-clause
| 68,206
| 0.000088
|
import os
import sys
import codecs
from contextlib import contextmanager
from itertools import repeat
from functools import update_wrapper
from .types import convert_type, IntRange, BOOL
from .utils import make_str, make_default_short_help, echo
from .exceptions import ClickException, UsageError, BadParameter, Abort, \
MissingParameter
from .termui import prompt, confirm
from .formatting import HelpFormatter, join_options
from .parser import OptionParser, split_opt
from .globals import push_context, pop_context
from ._compat import PY2, isidentifier, iteritems, _check_for_unicode_literals
_missing = object()
SUBCOMMAND_METAVAR = 'COMMAND [ARGS]...'
SUBCOMMANDS_METAVAR = 'COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]...'
def _bashcomplete(cmd, prog_name, complete_var=None):
"""Internal handler for the bash completion support."""
if complete_var is None:
complete_var = '_%s_COMPLETE' % (prog_name.replace('-', '_')).upper()
complete_instr = os.environ.get(complete_var)
if not complete_instr:
return
from ._bashcomplete import bashcomplete
if bashcomplete(cmd, prog_name, complete_var, complete_instr):
sys.exit(1)
def batch(iterable, batch_size):
return list(zip(*repeat(iter(iterable), batch_size)))
def invoke_param_callback(callback, ctx, param, value):
code = getattr(callback, '__code__', None)
args = getattr(code, 'co_argcount', 3)
if args < 3:
# This will become a warning in Click 3.0:
from warnings import warn
warn(Warning('Invoked legacy parameter callback "%s". The new '
'signature for such callbacks starting with '
'click 2.0 is (ctx, param, value).'
% callback), stacklevel=3)
return callback(ctx, value)
return callback(ctx, param, value)
@contextmanager
def augment_usage_errors(ctx, param=None):
"""Context manager that attaches extra information to exceptions that
fly.
"""
try:
yield
except BadParameter as e:
if e.ctx is None:
e.ctx = ctx
if param is not None and e.param is None:
e.param = param
raise
except UsageError as e:
if e.ctx is None:
e.ctx = ctx
raise
def iter_params_for_processing(invocation_order, declaration_order):
"""Given a sequence of parameters in the order as should be considered
for processing and an iterable of parameters that exist, this returns
a list in the correct order as they s
|
hould be processed.
|
"""
def sort_key(item):
try:
idx = invocation_order.index(item)
except ValueError:
idx = float('inf')
return (not item.is_eager, idx)
return sorted(declaration_order, key=sort_key)
class Context(object):
"""The context is a special internal object that holds state relevant
for the script execution at every single level. It's normally invisible
to commands unless they opt-in to getting access to it.
The context is useful as it can pass internal objects around and can
control special execution features such as reading data from
environment variables.
A context can be used as context manager in which case it will call
:meth:`close` on teardown.
.. versionadded:: 2.0
Added the `resilient_parsing`, `help_option_names`,
`token_normalize_func` parameters.
.. versionadded:: 3.0
Added the `allow_extra_args` and `allow_interspersed_args`
parameters.
.. versionadded:: 4.0
Added the `color`, `ignore_unknown_options`, and
`max_content_width` parameters.
:param command: the command class for this context.
:param parent: the parent context.
:param info_name: the info name for this invocation. Generally this
is the most descriptive name for the script or
command. For the toplevel script it is usually
the name of the script, for commands below it it's
the name of the script.
:param obj: an arbitrary object of user data.
:param auto_envvar_prefix: the prefix to use for automatic environment
variables. If this is `None` then reading
from environment variables is disabled. This
does not affect manually set environment
variables which are always read.
:param default_map: a dictionary (like object) with default values
for parameters.
:param terminal_width: the width of the terminal. The default is
inherit from parent context. If no context
defines the terminal width then auto
detection will be applied.
:param max_content_width: the maximum width for content rendered by
Click (this currently only affects help
pages). This defaults to 80 characters if
not overridden. In other words: even if the
terminal is larger than that, Click will not
format things wider than 80 characters by
default. In addition to that, formatters might
add some safety mapping on the right.
:param resilient_parsing: if this flag is enabled then Click will
parse without any interactivity or callback
invocation. This is useful for implementing
things such as completion support.
:param allow_extra_args: if this is set to `True` then extra arguments
at the end will not raise an error and will be
kept on the context. The default is to inherit
from the command.
:param allow_interspersed_args: if this is set to `False` then options
and arguments cannot be mixed. The
default is to inherit from the command.
:param ignore_unknown_options: instructs click to ignore options it does
not know and keeps them for later
processing.
:param help_option_names: optionally a list of strings that define how
the default help parameter is named. The
default is ``['--help']``.
:param token_normalize_func: an optional function that is used to
normalize tokens (options, choices,
etc.). This for instance can be used to
implement case insensitive behavior.
:param color: controls if the terminal supports ANSI colors or not. The
default is autodetection. This is only needed if ANSI
codes are used in texts that Click prints which is by
default not the case. This for instance would affect
help output.
"""
def __init__(self, command, parent=None, info_name=None, obj=None,
auto_envvar_prefix=None, default_map=None,
terminal_width=None, max_content_width=None,
resilient_parsing=False, allow_extra_args=None,
allow_interspersed_args=None,
ignore_unknown_options=None, help_option_names=None,
token_normalize_func=None, color=None):
#: the parent context or `None` if none exists.
self.parent = parent
#: the :class:`Command` for this context.
self.command = command
#: the descriptive information name
self.info_name = info_name
#: the parsed parameters except if the value is hidden in which
#: case it's not remembered.
self.params = {}
#: the leftover arguments.
self.args = []
if obj is None and parent is not None:
|
openstack/sahara
|
sahara/service/edp/oozie/engine.py
|
Python
|
apache-2.0
| 19,118
| 0
|
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import os
import xml.dom.minidom as xml
from oslo_config import cfg
from oslo_utils import uuidutils
import six
from sahara import conductor as c
from sahara import context
from sahara.service.edp import base_engine
from sahara.service.edp import hdfs_helper as h
from sahara.service.edp.job_binaries import manager as jb_manager
from sahara.service.edp import job_utils
from sahara.service.edp.oozie import oozie as o
from sahara.service.edp.oozie.workflow_creator import workflow_factory
from sahara.service.validations.edp import job_execution as j
from sahara.utils import edp
from sahara.utils import remote
from sahara.utils import xmlutils as x
CONF = cfg.CONF
conductor = c.API
@six.add_metaclass(abc.ABCMeta)
class OozieJobEngine(base_engine.JobEngine):
def __init__(self, cluster):
self.cluster = cluster
self.plugin = job_utils.get_plugin(self.cluster)
def get_remote_client(self):
return o.RemoteOozieClient(self.get_oozie_server_uri(self.cluster),
self.get_oozie_server(self.cluster),
self.get_hdfs_user())
def get_client(self):
# by default engine will return standard oozie client implementation
return o.OozieClient(self.get_oozie_server_uri(self.cluster),
self.get_oozie_server(self.cluster))
def _get_oozie_job_params(self, hdfs_user, path_to_workflow,
oozie_params, use_hbase_lib,
scheduled_params=None, job_dir=None,
job_execution_type=None):
oozie_libpath_key = "oozie.libpath"
oozie_libpath = ""
rm_path = self.get_resource_manager_uri(self.cluster)
nn_path = self.get_name_node_uri(self.cluster)
hbase_common_lib_path = "%s%s" % (nn_path, h.HBASE_COMMON_LIB_PATH)
if use_hbase_lib:
if oozie_libpath_key in oozie_params:
oozie_libpath = "%s,%s" % (oozie_params.get(oozie_libpath_key,
""), hbase_common_lib_path)
else:
oozie_libpath = hbase_common_lib_path
if job_execution_type == "scheduled":
app_path = "oozie.coord.application.path"
job_parameters = {
"start": scheduled_params.get('start'),
"end": scheduled_params.get('end'),
"frequency": scheduled_params.get('frequency'),
"workflowAppUri": "%s%s" % (nn_path, job_dir),
app_path: "%s%s" % (nn_path, job_dir)}
else:
app_path = "oozie.wf.application.path"
job_parameters = {
app_path: "%s%s" % (nn_path, path_to_workflow)}
job_parameters["nameNode"] = nn_path
job_parameters["user.name"] = hdfs_user
job_parameters["jobTracker"] = rm_path
job_parameters[oozie_libpath_key] = oozie_libpath
job_parameters["oozie.use.system.libpath"] = "true"
# Don't let the application path be overwritten, that can't
# possibly make any sense
if app_path in oozie_params:
del oozie_params[app_path]
if oozie_libpath_key in oozie_params:
del oozie_params[oozie_libpath_key]
job_parameters.update(oozie_params)
return job_parameters
def _upload_workflow_file(self, where, job_dir, wf_xml, hdfs_user):
with remote.get_remote(where) as r:
h.put_file_to_hdfs(r, wf_xml, "workflow.xml", job_dir, hdfs_user)
return "%s/workflow.xml" % job_dir
def _upload_coordinator_file(self, where, job_dir, wf_xml, hdfs_user):
with remote.get_remote(where) as r:
h.put_file_to_hdfs(r, wf_xml, "coordinator.xml", job_dir,
hdfs_user)
return "%s/coordinator.xml" % job_dir
def cancel_job(self, job_execution):
if job_execution.engine_job_id is not None:
client = self.get_client()
client.kill_job(job_execution)
return client.get_job_info(job_execution)
def get_job_status(self, job_execution):
if job_execution.engine_job_id is not None:
return self.get_client().get_job_info(job_executio
|
n)
def _prepare_run_job(self, job_execution):
ctx = context.ctx()
# This will be a dictionary of tuples, (native_url, runtime_url)
# keyed by data_source id
data_source_urls = {}
prepared_job_params = {}
job = conductor.job_get(ctx, job_execution.job_id)
input_source, output_source = job_utils.get_input_output_data_sources(
jo
|
b_execution, job, data_source_urls, self.cluster)
# Updated_job_configs will be a copy of job_execution.job_configs with
# any name or uuid references to data_sources resolved to paths
# assuming substitution is enabled.
# If substitution is not enabled then updated_job_configs will
# just be a reference to job_execution.job_configs to avoid a copy.
# Additional_sources will be a list of any data_sources found.
additional_sources, updated_job_configs = (
job_utils.resolve_data_source_references(job_execution.job_configs,
job_execution.id,
data_source_urls,
self.cluster)
)
job_execution = conductor.job_execution_update(
ctx, job_execution,
{"data_source_urls": job_utils.to_url_dict(data_source_urls)})
# Now that we've recorded the native urls, we can switch to the
# runtime urls
data_source_urls = job_utils.to_url_dict(data_source_urls,
runtime=True)
data_sources = additional_sources + [input_source, output_source]
job_utils.prepare_cluster_for_ds(data_sources,
self.cluster, updated_job_configs,
data_source_urls)
proxy_configs = updated_job_configs.get('proxy_configs')
configs = updated_job_configs.get('configs', {})
use_hbase_lib = configs.get('edp.hbase_common_lib', {})
# Extract all the 'oozie.' configs so that they can be set in the
# job properties file. These are config values for Oozie itself,
# not the job code
oozie_params = {}
for k in list(configs):
if k.startswith('oozie.'):
oozie_params[k] = configs[k]
external_hdfs_urls = self._resolve_external_hdfs_urls(
job_execution.job_configs)
for url in external_hdfs_urls:
h.configure_cluster_for_hdfs(self.cluster, url)
hdfs_user = self.get_hdfs_user()
# TODO(tmckay): this should probably be "get_namenode"
# but that call does not exist in the oozie engine api now.
oozie_server = self.get_oozie_server(self.cluster)
wf_dir = self._create_hdfs_workflow_dir(oozie_server, job)
self._upload_job_files_to_hdfs(oozie_server, wf_dir, job, configs,
proxy_configs)
wf_xml = workflow_factory.get_workflow_xml(
job, self.cluster, updated_job_configs,
input_source, output_source,
hdfs_user, data_source_urls)
path_to_workflow = self._upload_workflow_file(oozie_server, wf_dir,
|
Lvl4Sword/Acedia
|
setup.py
|
Python
|
agpl-3.0
| 1,191
| 0.005877
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt'), encoding='utf-8') as f:
CHANGES = f.read()
setup(
name='sloth',
version='0.1',
description='',
long_description=README,
license='AGPLv3',
# TODO: add author info
#author='',
#author_email='',
url='https://bitbucket.org/pride/sloth/',
# TODO: add
|
keywords
#keywor
|
ds='',
install_requires = ['python-dateutil', 'arrow'],
classifiers = [
"License :: OSI Approved :: GNU Affero General Public License v3"
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
packages=find_packages(include=['sloth']),
include_package_data=True,
zip_safe=False,
entry_points="""\
[console_scripts]
sloth-game = sloth.start:run
""",
)
|
marcoapintoo/Biosignal-Intermediate-Format
|
biosignalformat/test.py
|
Python
|
apache-2.0
| 5,325
| 0.005446
|
#!/usr/bin/python
import unittest
from biosignalformat import *
class TestBaseObjects(unittest.TestCase):
def test_MinimalExperiment(self):
provider = XArchiveProvider("experiment001.7z")
#provider = ZipArchiveProvider("experiment001.zip")
experiment = Experiment({
"name": "Exp!",
"description": "blah!"
})
experiment.setArchiver(provider)
experiment.write()
with self.assertRaises(Exception):
experiment.remove(provider)
metadata = experiment.readMetadata(provider)
self.assertEqual(metadata["name"], "Exp!")
self.assertEqual(metadata["description"], "blah!")
def test_MinimalStructure7z(self):
provider = XArchiveProvider("experiment002B.7z")
experiment = Experiment({
"name": "Exp!",
"description": "blah!"
})
experiment.setArchiver(provider)
subject = Subject({
"name": "Subject001",
"description": "description-subject!"
})
experiment.addSubject(subject)
session = Session({
"name": "Subject001-Session001",
"description": "description-subject-session!"
})
subject.addSession(session)
channel = Channel({
"name": "AF8"
})
session.addChannel(channel)
#channelDataset.rawData = [1e-1222, 2.344, 3.14159265358979323846264338327950288419716939937510582097494459230781640629]
channel.setData([c/1e-12 for c in range(500000)])
experiment.write()
metadata = experiment.readMetadata()
self.assertEqual(metadata["name"], "Exp!")
self.assertEqual(metadata["description"], "blah!")
def test_MinimalStructureZip(self):
provider = ZipArchiveProvider("experiment002.zip")
experiment = Experiment({
"name": "Exp!",
"description": "blah!"
})
experiment.setArchiver(provider)
subject = Subject({
"name": "Subject001",
"description": "description-subject!"
})
experiment.addSubject(subject)
session = Session({
"name": "Subject001-Session001",
"description": "description-subject-session!"
})
subject.addSession(session)
channel = Channel({
"name": "AF8"
})
session.addChannel(channel)
#channelDataset.rawData = [1e-1222, 2.344, 3.14159265358979323846264338327950288419716939937510582097494459230781640629]
channel.setData([c/1e-12 for c in range(500000)])
experiment.write()
metadata = experiment.readMetadata()
self.assertEqual(metadata["name"], "Exp!")
self.assertEqual(metadata["description"], "blah!")
class TestPlugins(unittest.TestCase):
def test_plugins(self):
from biosignalformat.external import sample
self.assertEqual(sample.ConstantVariable, 12)
class TestConverters(unittest.TestCase):
def test_single_edf(self):
from biosignalformat.external import base_converter
#importer = base_converter.EDFImporter("ExampleEDF.edf", SevenZipArchiveProvider("ExampleEDFAscii.bif.7z"))
importer = base_converter.EDFImporter("ExampleEDF.edf", XArchiveProvider("Examp
|
leEDFAs
|
cii.bif.zip"))
importer.convert()
def atest_multiple_edf(self):
from biosignalformat.external import base_converter
importer = base_converter.EDFImporter("ExampleEDF.edf", XZipArchiveProvider("ExampleMultipleEDFAscii.bif.7z"))
#importer = base_converter.EDFImporter("ExampleEDF.edf", ZipArchiveProvider("ExampleMultipleEDFAscii.bif.zip"))
importer.convert()
importer2 = base_converter.EDFImporter("ExampleEDF2.edf", experiment=importer.experiment, subject=importer.subject)
importer2.convert()
importer3 = base_converter.EDFImporter("ExampleEDF2.edf", experiment=importer.experiment)
importer3.convert()
def test_single_bdf(self):
from biosignalformat.external import base_converter
#importer = base_converter.BDFImporter("ExampleBDF.bdf", SevenZipArchiveProvider("ExampleBDFAscii.bif.7z"))
importer = base_converter.BDFImporter("ExampleBDF.bdf", XArchiveProvider("ExampleBDFAscii.bif.zip"))
importer.convert()
def test_multiple_bdf(self):
from biosignalformat.external import base_converter
#importer = base_converter.EDFImporter("ExampleBDF.bdf", SevenZipArchiveProvider("ExampleMultipleBDFAscii.bif.7z"))
importer = base_converter.EDFImporter("ExampleBDF.bdf", XArchiveProvider("ExampleMultipleBDFAscii-3.bif.zip"))
importer.convert()
importer2 = base_converter.EDFImporter("ExampleBDF.bdf", experiment=importer.experiment, subject=importer.subject)
importer2.convert()
importer3 = base_converter.EDFImporter("ExampleBDF.bdf", experiment=importer.experiment)
importer3.convert()
def test_all():
test_loader = unittest.TestLoader()
#unittest.TextTestRunner(verbosity=2).run(test_loader.loadTestsFromTestCase(TestBaseObjects))
#unittest.TextTestRunner(verbosity=2).run(test_loader.loadTestsFromTestCase(TestPlugins))
unittest.TextTestRunner(verbosity=2).run(test_loader.loadTestsFromTestCase(TestConverters))
|
szykin/django-mock-queries
|
tests/test_utils.py
|
Python
|
mit
| 11,117
| 0.001889
|
from datetime import date, datetime
from mock import patch, MagicMock
from unittest import TestCase
from django_mock_queries import utils, constants
class TestUtils(TestCase):
def test_merge_concatenates_lists(self):
l1 = [1, 2, 3]
l2 = [4, 5, 6]
result = utils.merge(l1, l2)
for x in (l1 + l2):
assert x in result
def test_merge_eliminates_duplicate_entries(self):
l1 = [1, 2]
l2 = [2, 3]
result = utils.merge(l1, l2)
for x in (l1 + l2):
assert result.count(x) == 1
def test_intersect_creates_list_with_common_elements(self):
l1 = [1, 2]
l2 = [2, 3]
result = utils.intersect(l1, l2)
for x in (l1 + l2):
if x in l1 and x in l2:
assert x in result
else:
assert x not in result
def test_get_attribute_returns_value_with_default_comparison(self):
ob
|
j = MagicMock(foo='test')
value, comparison = utils.get_attribute(obj, 'foo')
assert value == 'test'
assert comparison is None
def test_get_attribute_returns_value_with_defined_comparison(self):
obj = MagicMock(foo='test')
value, comparison = utils.get_attribute(obj, 'foo__' + constants.COMPARISON_IEXACT)
assert value == 'test'
assert comparison == constants.COMPARISON_IEXAC
|
T
def test_get_attribute_returns_none_with_isnull_comparison(self):
obj = MagicMock(foo=None)
value, comparison = utils.get_attribute(obj, 'foo__' + constants.COMPARISON_ISNULL)
assert value is None
assert comparison == constants.COMPARISON_ISNULL, comparison
def test_get_attribute_returns_nested_object_value(self):
obj = MagicMock(child=MagicMock(foo='test'))
value, comparison = utils.get_attribute(obj, 'child__foo__' + constants.COMPARISON_IEXACT)
assert value == 'test'
assert comparison == constants.COMPARISON_IEXACT
def test_get_attribute_returns_default_value_when_object_is_none(self):
obj = None
default_value = ''
value, comparison = utils.get_attribute(obj, 'foo', default_value)
assert value == default_value
assert comparison is None
def test_get_attribute_with_date(self):
obj = MagicMock(foo=date(2017, 12, 31))
value, comparison = utils.get_attribute(
obj, 'foo__' + constants.COMPARISON_YEAR + '__' + constants.COMPARISON_GT
)
assert value == date(2017, 12, 31)
assert comparison == (constants.COMPARISON_YEAR, constants.COMPARISON_GT)
def test_get_attribute_returns_tuple_with_exact_as_default_comparison(self):
obj = MagicMock(foo=datetime(2017, 1, 1))
value, comparison = utils.get_attribute(obj, 'foo__' + constants.COMPARISON_YEAR)
assert value == datetime(2017, 1, 1)
assert comparison == (constants.COMPARISON_YEAR, constants.COMPARISON_EXACT)
def test_validate_date_or_datetime_raises_value_error(self):
with self.assertRaisesRegexp(ValueError, r'13 is incorrect value for month'):
utils.validate_date_or_datetime(13, constants.COMPARISON_MONTH)
def test_is_match_equality_check_when_comparison_none(self):
result = utils.is_match(1, 1)
assert result is True
result = utils.is_match('a', 'a')
assert result is True
result = utils.is_match(1, '1')
assert result is False
def test_is_match_case_sensitive_equality_check(self):
result = utils.is_match('a', 'A', constants.COMPARISON_EXACT)
assert result is False
result = utils.is_match('a', 'a', constants.COMPARISON_EXACT)
assert result is True
def test_is_match_case_insensitive_equality_check(self):
result = utils.is_match('a', 'A', constants.COMPARISON_IEXACT)
assert result is True
result = utils.is_match('a', 'a', constants.COMPARISON_IEXACT)
assert result is True
def test_is_match_case_sensitive_contains_check(self):
result = utils.is_match('abc', 'A', constants.COMPARISON_CONTAINS)
assert result is False
result = utils.is_match('abc', 'a', constants.COMPARISON_CONTAINS)
assert result is True
def test_is_match_case_insensitive_contains_check(self):
result = utils.is_match('abc', 'A', constants.COMPARISON_ICONTAINS)
assert result is True
result = utils.is_match('abc', 'a', constants.COMPARISON_ICONTAINS)
assert result is True
def test_is_match_startswith_check(self):
result = utils.is_match('abc', 'a', constants.COMPARISON_STARTSWITH)
assert result is True
result = utils.is_match('abc', 'A', constants.COMPARISON_STARTSWITH)
assert result is False
def test_is_match_istartswith_check(self):
result = utils.is_match('abc', 'a', constants.COMPARISON_ISTARTSWITH)
assert result is True
result = utils.is_match('abc', 'A', constants.COMPARISON_ISTARTSWITH)
assert result is True
def test_is_match_endswith_check(self):
result = utils.is_match('abc', 'c', constants.COMPARISON_ENDSWITH)
assert result is True
result = utils.is_match('abc', 'C', constants.COMPARISON_ENDSWITH)
assert result is False
def test_is_match_iendswith_check(self):
result = utils.is_match('abc', 'c', constants.COMPARISON_IENDSWITH)
assert result is True
result = utils.is_match('abc', 'C', constants.COMPARISON_IENDSWITH)
assert result is True
def test_is_match_greater_than_value_check(self):
result = utils.is_match(5, 3, constants.COMPARISON_GT)
assert result is True
result = utils.is_match(3, 5, constants.COMPARISON_GT)
assert result is False
def test_is_match_greater_than_equal_to_value_check(self):
result = utils.is_match(5, 3, constants.COMPARISON_GTE)
assert result is True
result = utils.is_match(5, 5, constants.COMPARISON_GTE)
assert result is True
result = utils.is_match(3, 5, constants.COMPARISON_GTE)
assert result is False
def test_is_match_less_than_value_check(self):
result = utils.is_match(1, 2, constants.COMPARISON_LT)
assert result is True
result = utils.is_match(2, 2, constants.COMPARISON_LT)
assert result is False
def test_is_match_less_than_equal_to_value_check(self):
result = utils.is_match(1, 2, constants.COMPARISON_LTE)
assert result is True
result = utils.is_match(1, 1, constants.COMPARISON_LTE)
assert result is True
result = utils.is_match(2, 1, constants.COMPARISON_LTE)
assert result is False
def test_is_match_isnull_check(self):
result = utils.is_match(1, True, constants.COMPARISON_ISNULL)
assert result is False
result = utils.is_match(1, False, constants.COMPARISON_ISNULL)
assert result is True
result = utils.is_match(None, True, constants.COMPARISON_ISNULL)
assert result is True
result = utils.is_match(None, False, constants.COMPARISON_ISNULL)
assert result is False
result = utils.is_match(None, 1, constants.COMPARISON_ISNULL)
assert result is True
def test_is_match_in_value_check(self):
result = utils.is_match(2, [1, 3], constants.COMPARISON_IN)
assert result is False
result = utils.is_match(1, [1, 3], constants.COMPARISON_IN)
assert result is True
@patch('django_mock_queries.utils.get_attribute')
@patch('django_mock_queries.utils.is_match', MagicMock(return_value=True))
def test_matches_includes_object_in_results_when_match(self, get_attr_mock):
source = [
MagicMock(foo=1),
MagicMock(foo=2),
]
get_attr_mock.return_value = None, None
results = utils.matches(*source, foo__gt=0)
for x in source:
assert x in results
@patch('django_mock_queries.utils.get_attribute')
@patch('django_mock_queries.utils.is_match', MagicMock(return_value=False))
|
codebhendi/alfred-bot
|
speaker.py
|
Python
|
mit
| 490
| 0.004082
|
import talkey
from gtts import gTTS
import vlc
import time
import wave
import contextlib
class Speaker:
def __init__(self):
self.engine =talkey.Talkey()
def say(self, text_to_say):
self.engine.say(text_to_say)
def google_say(self, text_to_say, fname="1.mp3"):
|
tts = gTTS(text=text_to_say, lang="en")
tts.save(fname)
self.player = v
|
lc.MediaPlayer(fname)
self.player.play()
self.player.stop()
os.remove(fname)
|
larsks/cloud-init
|
tests/cloud_tests/testcases/modules/ssh_keys_generate.py
|
Python
|
gpl-3.0
| 1,674
| 0
|
# This file is part of cloud-init. See LICENSE file for license information.
"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestSshKeysGenerate(base.CloudTestCase):
"""Test ssh keys module."""
# TODO: Check cloud-init-output for the correct keys being generated
def test_dsa_public(self):
|
"""Test dsa public key not generated."""
out = self.get_data_file('dsa_public')
self.assertEqual('', out)
def test_dsa_private(self):
"""Test dsa private key not generated."""
out = self.get_data_file('dsa_private')
self.assertEqual('', out)
def test_rsa_public(self):
"""Test rsa public key not generated."""
out = self.get_data_file('rsa_public')
self.assertEqua
|
l('', out)
def test_rsa_private(self):
"""Test rsa public key not generated."""
out = self.get_data_file('rsa_private')
self.assertEqual('', out)
def test_ecdsa_public(self):
"""Test ecdsa public key generated."""
out = self.get_data_file('ecdsa_public')
self.assertIsNotNone(out)
def test_ecdsa_private(self):
"""Test ecdsa public key generated."""
out = self.get_data_file('ecdsa_private')
self.assertIsNotNone(out)
def test_ed25519_public(self):
"""Test ed25519 public key generated."""
out = self.get_data_file('ed25519_public')
self.assertIsNotNone(out)
def test_ed25519_private(self):
"""Test ed25519 public key generated."""
out = self.get_data_file('ed25519_private')
self.assertIsNotNone(out)
# vi: ts=4 expandtab
|
texas/tx_tecreports
|
tx_tecreports/migrations/0007_auto__add_field_contributor_zipcode_short.py
|
Python
|
apache-2.0
| 13,870
| 0.007354
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Contributor.zipcode_short'
db.add_column(u'tx_tecreports_contributor', 'zipcode_short',
self.gf('django.db.models.fields.CharField')(max_length=5, null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Contributor.zipcode_short'
db.delete_column(u'tx_tecreports_contributor', 'zipcode_short')
models = {
u'tx_tecreports.contributionsbyamount': {
'Meta': {'ordering': "['low']", 'object_name': 'ContributionsByAmount'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'high': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'low': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stats_by_amount'", 'to': u"orm['tx_tecreports.Report']"}),
'total': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'tx_tecreports.contributionsbydate': {
'Meta': {'ordering': "['date']", 'object_name': 'ContributionsByDate'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'date': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stats_by_date'", 'to': u"orm['tx_tecreports.Report']"}),
'total': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'tx_tecreports.contributionsbystate': {
'Meta': {'ordering': "['-amount']", 'object_name': 'ContributionsByState'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stats_by_state'", 'to': u"orm['tx_tecreports.Report']"}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'total': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'tx_tecreports.contributionsbyzipcode': {
'Meta': {'ordering': "['-amount']", 'object_name': 'ContributionsByZipcode'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stats_by_zipcode'", 'to': u"orm['tx_tecreports.Report']"}),
'total': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'tx_tecreports.contributor': {
'Meta': {'object_name': 'Contributor'},
'address_1': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'address_2': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'city': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'first_name': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_entity': ('django.db.models.fields.BooleanField', [], {}),
'is_individual': ('django.db.models.fields.BooleanField', [], {}),
'last_name': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'state': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'suffix': ('tx_tecreports.fields.Op
|
tionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'title': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'type_of': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'contributors'", 'to': u"orm['tx_tecreports.ContributorType']"}),
'zipcode': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
|
'zipcode_short': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True'})
},
u'tx_tecreports.contributortype': {
'Meta': {'object_name': 'ContributorType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'})
},
u'tx_tecreports.employer': {
'Meta': {'object_name': 'Employer'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'})
},
u'tx_tecreports.filer': {
'Meta': {'object_name': 'Filer'},
'filer_id': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'}),
'filer_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filers'", 'to': u"orm['tx_tecreports.FilerType']"}),
'first_name': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'name_prefix': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'name_suffix': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'nickname': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'})
},
u'tx_tecreports.filertype': {
'Meta': {'object_name': 'FilerType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'})
},
u'tx_tecreports.filing': {
'Meta': {'object_name': 'Filing'},
'filer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filings'", 'to': u"orm['tx_tecreports.Filer']"}),
'filing_method': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filings'", 'to': u"orm['tx_tecreports.FilingMethod']"}),
'is_correction': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'report_due': ('django.db.models.fields.DateField', [], {}),
'report_filed': ('django.db.models.fields.DateField', [], {}),
'report_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250', 'primary_key': 'True'}),
'report_type': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
|
lnielsen/invenio
|
invenio/modules/documentation/__init__.py
|
Python
|
gpl-2.0
| 834
| 0.015588
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms
|
of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
|
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Integrate Sphinx documentation pages."""
|
frhumanes/consulting
|
web/src/stadistic/__init__.py
|
Python
|
apache-2.0
| 1,076
| 0.001859
|
class StadisticRouter(object):
"""A router to control all database operations on models in
the stadistic application"""
def db_for_read(self, model
|
, **hints):
"Point all operations on myapp models to 'other'"
if model._meta.app_label == 'stadistic':
return 'nonrel'
return 'default'
def db_for_write(self, model, **hints):
"Point all operations on stadistic models to 'other'"
if model._meta.app_label == 'stadistic':
return 'nonrel'
return 'default'
def allow_relation(self, ob
|
j1, obj2, **hints):
"Deny any relation if a model in stadistic is involved"
if obj1._meta.app_label == 'stadistic' or obj2._meta.app_label == 'stadistic':
return True
return True
def allow_syncdb(self, db, model):
"Make sure the stadistic app only appears on the 'nonrel' db"
if db == 'nonrel':
return model._meta.app_label == 'stadistic'
elif model._meta.app_label == 'stadistic':
return False
return True
|
benbox69/pyload
|
module/plugins/hooks/ExtractArchive.py
|
Python
|
gpl-3.0
| 21,976
| 0.008282
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
import os
import sys
import traceback
# monkey patch bug in python 2.6 and lower
# http://bugs.python.org/issue6122 , http://bugs.python.org/issue1236 , http://bugs.python.org/issue1731717
if sys.version_info < (2, 7) and os.name != "nt":
import errno
import subprocess
def _eintr_retry_call(func, *args):
while True:
try:
return func(*args)
except OSError, e:
if e.errno == errno.EINTR:
continue
raise
#: Unsued timeout option for older python version
def wait(self, timeout=0):
"""
Wait for child process to terminate. Returns returncode
attribute.
"""
if self.returncode is None:
try:
pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
except OSError, e:
if e.errno != errno.ECHILD:
raise
#: This happens if SIGCLD is set to be ignored or waiting
#: For child processes has otherwise been disabled for our
#: process. This child is dead, we can't get the status.
sts = 0
self._handle_exitstatus(sts)
return self.returncode
subprocess.Popen.wait = wait
try:
import send2trash
except ImportError:
pass
from module.plugins.internal.Addon import Addon, Expose, threaded
from module.plugins.internal.Plugin import exists, replace_patterns
from module.plugins.internal.Extractor import ArchiveError, CRCError, PasswordError
from module.utils import fs_encode, save_join as fs_join, uniqify
class ArchiveQueue(object):
def __init__(self, plugin, storage):
self.plugin = plugin
self.storage = storage
def get(self):
try:
return [int(pid) for pid in self.plugin.retrieve("ExtractArchive:%s" % self.storage, "").decode('base64').split()]
except Exception:
return []
def set(self, value):
if isinstance(value, list):
item = str(value)[1:-1].replace(' ', '').replace(',', ' ')
else:
item = str(value).strip()
return self.plugin.store("ExtractArchive:%s" % self.storage, item.encode('base64')[:-1])
def delete(self):
return self.plugin.delete("ExtractArchive:%s" % self.storage)
def add(self, item):
queue = self.get()
if item not in queue:
return self.set(queue + [item])
else:
return True
def remove(self, item):
queue = self.get()
try:
queue.remove(item)
except ValueError:
pass
if queue is []:
return self.delete()
return self.set(queue)
class ExtractArchive(Addon):
__name__ = "ExtractArchive"
__type__ = "hook"
__version__ = "1.50"
__status__ = "testing"
__config__ = [("activated" , "bool" , "Activated" , True ),
("fullpath" , "bool" , "Extract with full paths" , True ),
("overwrite" , "bool" , "Overwrite files" , False ),
("keepbroken" , "bool" , "Try to extract broken archives" , False ),
("repair" , "bool" , "Repair broken archives (RAR required)" , False ),
("test" , "bool" , "Test archive before extracting" , False ),
("usepasswordfile", "bool" , "Use password file" , True ),
("passwordfile" , "file" , "Password file" , "passwords.txt" ),
("delete" , "bool" , "Delete archive after extraction" , True ),
("deltotrash" , "bool" , "Move to trash (recycle bin) instead delete", True ),
("subfolder" , "bool" , "Create subfolder for each package" , False ),
("destination" , "folder" , "Extract files to folder" , "" ),
("extensions" , "str" , "Extract archives ending with extension" , "7z,bz2,bzip2,gz,gzip,lha,lzh,lzma,rar,tar,taz,tbz,tbz2,tgz,xar,xz,z,zip"),
("excludefiles" , "str" , "Don't extract the following files" , "*.nfo,*.DS_Store,index.dat,thumb.db" ),
("recursive" , "bool" , "Extract archives in archives" , True ),
("waitall" , "bool" , "Run after all downloads was processed" , False ),
("renice" , "int" , "CPU priority" , 0 )]
__description__ = """Extract different kind of archives"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]"),
("Immenz" , "[email protected]" )]
NAME_REPLACEMENTS = [(r'\.part\d+\.rar$', ".part.rar")]
def init(self):
self.event_map = {'allDownloadsProcessed': "all_downlo
|
ads_processed",
'packageDeleted' : "package_deleted"
|
}
self.queue = ArchiveQueue(self, "Queue")
self.failed = ArchiveQueue(self, "Failed")
self.interval = 60
self.extracting = False
self.last_package = False
self.extractors = []
self.passwords = []
self.repair = False
def activate(self):
for p in ("UnRar", "SevenZip", "UnZip"):
try:
module = self.pyload.pluginManager.loadModule("internal", p)
klass = getattr(module, p)
if klass.find():
self.extractors.append(klass)
if klass.REPAIR:
self.repair = self.get_config('repair')
except OSError, e:
if e.errno == 2:
self.log_warning(_("No %s installed") % p)
else:
self.log_warning(_("Could not activate: %s") % p, e)
if self.pyload.debug:
traceback.print_exc()
except Exception, e:
self.log_warning(_("Could not activate: %s") % p, e)
if self.pyload.debug:
traceback.print_exc()
if self.extractors:
self.log_debug(*["Found %s %s" % (Extractor.__name__, Extractor.VERSION) for Extractor in self.extractors])
self.extract_queued() #: Resume unfinished extractions
else:
self.log_info(_("No Extract plugins activated"))
@threaded
def extract_queued(self, thread):
if self.extracting: #@NOTE: doing the check here for safty (called by coreReady)
return
self.extracting =
|
samhoo/askbot-realworld
|
askbot/migrations/0031_synchronize_badge_slug_with_name.py
|
Python
|
gpl-3.0
| 26,451
| 0.008468
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
def deslugify(text):
in_bits = text.split('-')
out_bits = list()
for bit in in_bits:
out_bit = bit[0].upper() + bit[1:]
out_bits.append(out_bit)
return ' '.join(out_bits)
class Migration(DataMigration):
def forwards(self, orm):
pass# nothing to do
def backwards(self, orm):
"""need this reverse migration so that creation of unique
constraint (type, name) works in backwards migration 0030
"""
for badge in orm.BadgeData.objects.all():
badge.name = deslugify(badge.slug)
if badge.name == 'Strunk And White':#special case
badge.name = 'Strunk & White'
badge.save()
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True'}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'to': "orm['auth.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'through': "'ActivityAuditStatus'", 'to': "orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.activityauditstatus': {
'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Question']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', []
|
, {'related_name': "'answers'", 'to': "orm['askbot.Questi
|
on']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.answerrevision': {
'Meta': {'object_name': 'AnswerRevision', 'db_table': "u'answer_revision'"},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answerrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length':
|
nicolewu/cerbero
|
cerbero/utils/svn.py
|
Python
|
lgpl-2.1
| 1,732
| 0.000577
|
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from cerbero.utils import shell
def checkout(url, dest):
'''
Checko
|
ut a url to a given destination
@param url: url t
|
o checkout
@type url: string
@param dest: path where to do the checkout
@type url: string
'''
shell.call('svn co %s %s' % (url, dest))
def update(repo, revision='HEAD'):
'''
Update a repositry to a given revision
@param repo: repository path
@type revision: str
@param revision: the revision to checkout
@type revision: str
'''
shell.call('svn up -r %s' % revision, repo)
def checkout_file(url, out_path):
'''
Checkout a single file to out_path
@param url: file URL
@type url: str
@param out_path: output path
@type revision: str
'''
shell.call('svn export --force %s %s' % (url, out_path))
|
cloudify-cosmo/softlayer-python
|
SoftLayer/CLI/virt/create.py
|
Python
|
mit
| 10,420
| 0
|
"""Manage, delete, order compute instances."""
# :license: MIT, see LICENSE for more details.
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
from SoftLayer.CLI import template
from SoftLayer.CLI import virt
from SoftLayer import utils
import click
@click.command(epilog="See 'sl vs create-options' for valid options")
@click.option('--domain', '-D', help="Domain portion of the FQDN")
@click.option('--hostname', '-H', help="Host portion of the FQDN")
@click.option('--image',
help="Image GUID. See: 'sl image list' for reference")
@click.option('--cpu', '-c', help="Number of CPU cores", type=click.INT)
@click.option('--memory', '-m', help="Memory in mebibytes", type=virt.MEM_TYPE)
@click.option('--os', '-o',
help="OS install code. Tip: you can specify <OS>_LATEST")
@click.option('--billing',
type=click.Choice(['hourly', 'monthly']),
default='hourly',
help="""Billing rate""")
@click.option('--datacenter', '-d', help="Datacenter shortname")
@click.option('--dedicated/--public',
is_flag=True,
help="Create a dedicated Virtual Server (Private Node)")
@click.option('--san',
is_flag=True,
help="Use SAN storage instead of local disk.")
@click.option('--test',
is_flag=True,
help="Do not actually create the virtual server")
@click.option('--export',
type=click.Path(writable=True, resolve_path=True),
help="Exports options to a template file")
@click.option('--userfile', '-F',
help="Read userdata from file",
type=click.Path(exists=True, readable=True, resolve_path=True))
@click.option('--postinstall', '-i', help="Post-install script to download")
@click.option('--key', '-k',
multiple=True,
help="SSH keys to add to the root user")
@click.option('--disk', multiple=True, help="Disk sizes")
@click.option('--private',
is_flag=True,
help="Forces the VS to only have access the private network")
@click.option('--like',
is_flag=True,
help="Use the configuration from an existing VS")
@click.option('--network', '-n', help="Network port speed in Mbps")
@click.option('--tag', '-g', multiple=True, help="Tags to add to the instance")
@click.option('--template', '-t',
help="A template file that defaults the command-line options",
type=click.Path(exists=True, readable=True, resolve_path=True))
@click.option('--userdata', '-u', help="User defined metadata string")
@click.option('--vlan-public',
help="The ID of the public VLAN on which you want the virtual "
"server placed",
type=click.INT)
@click.option('--vlan-private',
help="The ID of the private VLAN on which you want the virtual "
"server placed",
type=click.INT)
@click.option('--wait',
type=click.INT,
help="Wait until VS is finished provisioning for up to X "
"seconds before returning")
@environment.pass_env
def cli(env, **args):
"""Order/create virtual servers."""
template.update_with_template_args(args, list_args=['disk', 'key'])
vsi = SoftLayer.VSManager(env.client)
_update_with_like_args(env.client, args)
_validate_args(args)
# Do not create a virtual server with test or export
do_create = not (args['export'] or args['test'])
table = formatting.Table(['Item', 'cost'])
table.align['Item'] = 'r'
table.align['cost'] = 'r'
data = _parse_create_args(env.client, args)
output = []
if args.get('test'):
result = vsi.verify_create_instance(**data)
total_monthly = 0.0
total_hourly = 0.0
table = formatting.Table(['Item', 'cost'])
table.align['Item'] = 'r'
table.align['cost'] = 'r'
for price in result['prices']:
total_monthly += float(price.get('recurringFee', 0.0))
total_hourly += float(price.get('hourlyRecurringFee', 0.0))
if args.get('billing') == 'hourly':
rate = "%.2f" % float(price['hourlyRecurringFee'])
elif args.get('billing') == 'monthly':
rate = "%.2f" % float(price['recurringFee'])
table.add_row([price['item']['description'], rate])
total = 0
if args.get('billing') == 'hourly':
total = total_hourly
elif args.get('billing') == 'monthly':
total = total_monthly
billing_rate = 'monthly'
if args.get('hourly'):
billing_rate = 'hourly'
table.add_row(['Total %s cost' % billing_rate, "%.2f" % total])
output.append(table)
output.append(formatting.FormattedItem(
None,
' -- ! Prices reflected here are retail and do not '
'take account level discounts and are not guaranteed.'))
if args['export']:
export_file = args.pop('export')
template.export_to_template(export_file, args,
exclude=['wait', 'test'])
return 'Successfully exported options to a template file.'
if do_create:
if env.skip_confirmations or formatting.confirm(
"This action will incur charges on your account. Continue?"):
result = vsi.create_instance(**data)
table = formatting.KeyValueTable(['name', 'value'])
table.align['name'] = 'r'
table.align['value'] = 'l'
table.add_row(['id', result['id']])
table.add_row(['created', result['createDate']])
table.add_row(['guid', result['globalIdentifier']])
output.append(table)
if args.get('wait'):
ready = vsi.wait_for_ready(
result['id'], int(args.get('wait') or 1))
table.add_row(['ready', ready])
else:
raise exceptions.CLIAbort('Aborting virtual server order.')
return output
def _validate_args(args):
"""Raises an ArgumentError if the given arguments are not valid."""
if all([args['userdata'], args['userfile']]):
raise exceptions.ArgumentError(
'[-u | --userdata] not allowed with [-F | --userfile]')
image_args = [args['os'], args['image']]
if all(image_args):
raise exceptions.ArgumentError(
'[-o | --os] not allowed with [--image]')
if not any(image_args):
raise exceptions.ArgumentError(
'One of [--os | --image] is required')
def _update_with_like_args(env, args):
"""Update arguments with options taken from a currently running VS.
:param VSManager args: A VSManager
:param dict args: CLI arguments
"""
if args['like']:
vsi = SoftLayer.VSManager(env.client)
vs_id = helpers.resolve_id(vsi.resolve_ids, args.pop('like'), 'VS')
like_details = vsi.get_instance(vs_id)
like_args = {
'hostname': like_details['hostname'],
'doma
|
in': like_details['domain'],
'cp
|
u': like_details['maxCpu'],
'memory': like_details['maxMemory'],
'hourly': like_details['hourlyBillingFlag'],
'datacenter': like_details['datacenter']['name'],
'network': like_details['networkComponents'][0]['maxSpeed'],
'user-data': like_details['userData'] or None,
'postinstall': like_details.get('postInstallScriptUri'),
'dedicated': like_details['dedicatedAccountHostOnlyFlag'],
'private': like_details['privateNetworkOnlyFlag'],
}
tag_refs = like_details.get('tagReferences', None)
if tag_refs is not None and len(tag_refs) > 0:
like_args['tag'] = [t['tag']['name'] for t in tag_refs]
# Handle mutually exclusive options
like_image = utils.lookup(like_details,
'blockDeviceTemplateGroup',
'globalIdentifier')
|
JoeJasinski/evesch
|
evesch/core/feed/views.py
|
Python
|
gpl-2.0
| 8,264
| 0.0144
|
from icalendar import Calendar, vCalAddress, vText
import icalendar
from datetime import timedelta
from django.template import RequestContext
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.core.urlresolvers import reverse
from django.core.exceptions import ObjectDoesNotExist
#from django.contrib.syndication.views import feed
from django.utils import feedgenerator
from django.template.loader import render_to_string
from django.http import HttpResponse
from evesch.org.models import Organization
from evesch.event.models import Event
from evesch.core.feed.feeds import OrgFeed
from evesch.euser.models import eUser, get_current_user
def org_rss(request,org_short_name,org_feed_hash):
try:
"""
"""
host = request.META['HTTP_HOST']
current_org, message = Organization.objects.get_current_org(org_short_name)
if message:
return HttpResponseRedirect(reverse('org_orgs_list'))
if not org_feed_hash == current_org.org_feed_hash:
return HttpResponseRedirect(reverse('org_org_view', kwargs={'org_short_name':current_org.org_short_name}))
events = current_org.event_set.all().order_by('-event_date')
orgfeed = feedgenerator.Rss201rev2Feed(title=current_org.org_name,
link="http://%s%s" % (host, reverse('event_events_list',kwargs={'org_short_name':current_org.org_short_name,})),
description=current_org.org_desc, language='en',
)
for event in events:
orgfeed.add_item(
title=event.event_name,
link="http://%s%s" % (host, reverse('event_event_view', kwargs={'org_short_name':current_org.org_short_name,'event_hash':event.event_hash})),
description="Event on: %s -- Description: %s" % (event.event_date.strftime('%d %b %Y'), event.event_desc),
categories=(event.event_type,),
author_name=event.event_creator_name,
pubdate=event.event_created_date)
response = HttpResponse()
response['Content-Type'] = 'application/rss+xml'
response.write(orgfeed.writeString('UTF-8'))
#template_name = "error.html"
return response
except ObjectDoesNotExist:
context = {'error':"Organization does not exist",}
template_name = "error.html"
return render_to_response(template_name,context,context_instance=RequestContext(request))
def org_ics(request,org_short_name,org_feed_hash):
host = request.META['HTTP_HOST']
current_org, message = Organization.objects.get_current_org(org_short_name)
if message:
return HttpResponseRedirect(reverse('org_orgs_list'))
if not org_feed_hash == current_org.org_feed_hash:
return HttpResponseRedirect(reverse('org_org_view', kwargs={'org_short_name':current_org.org_short_name}))
events = current_org.event_set.all().order_by('-event_date')
orgical = Calendar()
orgical['summary'] = "Calendar for organization %s" % (current_org.org_name)
orgical.add('prodid', '-//Evesch//NONSGML v1.0//EN')
orgical.add('version', '2.0')
for event in events:
cal_event = icalendar.Event()
cal_event.add('summary', event.event_name)
cal_event.add('dtstart', event.event_date)
cal_event.add('description', event.event_desc)
cal_event.add('categories',event.event_type)
cal_event.add('duration',timedelta(hours=1))
cal_event.add('url',"http://%s%s" % (host, reverse('event_event_view',kwargs={'org_short_name':current_org.org_short_name,'event_hash':event.event_hash,})))
if event.event_creator_name.email:
organizer_n = event.event_creator_name.email
else:
organizer_n = "%s %s" % (event.event_creator_name.first_name, event.event_creator_name.last_name)
organizer = vCalAddress('MAILTO:' + organizer_n)
organizer.params['cn'] = vText("%s %s" % (event.event_creator_name.first_name, event.event_creator_name.last_name))
organizer.params['role'] = vText('CREATOR')
cal_event.add('organizer', organizer, encode=0)
orgical.add_component(cal_event)
template_name = "core/message.html"
context = {}
response = HttpResponse()
response['Content-Type'] = 'text/calendar'
response.write(orgical.to_ical())
#template_name = "error.html"
return response
def user_rss(request,username,user_feed_hash):
try:
""" """
host = request.META['HTTP_HOST']
current_user, message = get_current_user(username)
if message:
return HttpResponseRedirect(reverse(
|
'home'))
if not user_feed_hash == current_user.user_feed_hash:
return HttpResponseRedirect(reverse('euser_user_view', kwargs={'username':current_user.username}))
|
user_events = Event.objects.filter(attendee__in=current_user.attendee_set.all()).order_by('-event_date')
orgfeed = feedgenerator.Rss201rev2Feed(title=current_user.username,
link="http://%s%s" % (host, reverse('euser_user_view', kwargs={'username':current_user.username})) ,
description=current_user.about, language='en',
)
for event in user_events:
orgfeed.add_item(
title=event.event_name,
link="http://%s%s" % (host, reverse('event_event_view', kwargs={'org_short_name':event.event_org.org_short_name,'event_hash':event.event_hash})),
description="Event on: %s -- Description: %s" % (event.event_date.strftime('%d %b %Y'), event.event_desc),
categories=(event.event_type,),
author_name=event.event_creator_name,
pubdate=event.event_created_date)
response = HttpResponse()
response['Content-Type'] = 'application/rss+xml'
response.write(orgfeed.writeString('UTF-8'))
#template_name = "error.html"
return response
except ObjectDoesNotExist:
context = {'error':"Organization does not exist",}
template_name = "error.html"
return render_to_response(template_name,context,context_instance=RequestContext(request))
def user_ics(request,username,user_feed_hash):
host = request.META['HTTP_HOST']
current_user, message = get_current_user(username)
if message:
return HttpResponseRedirect(reverse('home'))
#user_events = Event.objects.all()
if not user_feed_hash == current_user.user_feed_hash:
return HttpResponseRedirect(reverse('euser_user_view', kwargs={'username':current_user.username}))
user_events = Event.objects.filter(attendee__in=current_user.attendee_set.all()).order_by('-event_date')
userical = Calendar()
userical['summary'] = "Calendar for user %s" % (current_user.username)
userical.add('prodid', '-//Evesch//NONSGML v1.0//EN')
userical.add('version', '2.0')
for event in user_events:
cal_event = icalendar.Event()
cal_event.add('summary', event.event_name)
cal_event.add('dtstart', event.event_date)
cal_event.add('description', event.event_desc)
cal_event.add('categories',event.event_type)
cal_event.add('duration',timedelta(hours=1))
cal_event.add('url',"http://" + host + reverse('event_event_view',kwargs={'org_short_name':event.event_org.org_short_name,'event_hash':event.event_hash,}))
if event.event_creator_name.email:
organizer_n = event.event_creator_name.email
else:
organizer_n = "%s %s" % (event.event_creator_name.first_name, event.event_creator_name.last_name)
organizer = vCalAddress('MAILTO:' + organizer_n)
organizer.params['cn'] = vText("%s %s" % (event.event_creator_name.first_name, event.event_creator_name.last_name))
organizer.params['role'] = vText('CREATOR')
cal_event.add('organizer', organizer, encode=0)
userical.add_component(cal_event)
template_name = "core/message.html"
context = {}
response = HttpResponse()
response['Content-Type'] = 'text/calendar'
response.wr
|
looooo/pivy
|
scons/scons-local-1.2.0.d20090919/SCons/Tool/sgiar.py
|
Python
|
isc
| 2,570
| 0.006226
|
"""SCons.Tool.sgiar
Tool-specific initialization for SGI ar (library archive). If CC
exists, static libraries should be built with it, so the prelinker has
a chance to resolve C++ template instantiations.
There norm
|
ally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of t
|
his software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/sgiar.py 4369 2009/09/19 15:58:29 scons"
import SCons.Defaults
import SCons.Tool
import SCons.Util
def generate(env):
"""Add Builders and construction variables for ar to an Environment."""
SCons.Tool.createStaticLibBuilder(env)
if env.Detect('CC'):
env['AR'] = 'CC'
env['ARFLAGS'] = SCons.Util.CLVar('-ar')
env['ARCOM'] = '$AR $ARFLAGS -o $TARGET $SOURCES'
else:
env['AR'] = 'ar'
env['ARFLAGS'] = SCons.Util.CLVar('r')
env['ARCOM'] = '$AR $ARFLAGS $TARGET $SOURCES'
env['SHLINK'] = '$LINK'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
env['SHLINKCOM'] = '$SHLINK $SHLINKFLAGS -o $TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
env['LIBPREFIX'] = 'lib'
env['LIBSUFFIX'] = '.a'
def exists(env):
return env.Detect('CC') or env.Detect('ar')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
docker-tow/tow
|
tests/dockerfile_tests.py
|
Python
|
apache-2.0
| 4,097
| 0.004882
|
import unittest
from tow.dockerfile import Dockerfile
class DockerfileTest(unittest.TestCase):
def test_parse_spaced_envs(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["ENV test 1"]
envs = d.envs()
self.assertEqual(envs, {"test": "1"})
def test_parse_many_envs(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["ENV test 1", "ENV test2=2", "ENV test3 3"]
envs = d.envs()
self.assertEqual(envs, {"test": "1", "test2": "2", "test3": "3"})
def test_parse_multiline(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['ENV myName="John Doe" myDog=Rex\\ The\\ Dog \\',
'myCat=fluffy']
envs = d.envs()
self.assertEqual(envs, {"myName": "John Doe",
"myDog": "Rex\\ The\\ Dog", "myCat": "fluffy"})
def test_add_copy(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["FROM ubuntu"]
mapping = ("/tets1", "/test2")
d.add_copy([mapping])
self.assertListEqual(d._Dockerfile__dockerfile, ["FROM ubuntu",
"# TOW COPY BLOCK FROM MAPPING FILE START",
"COPY %s %s" % mapping,
"# TOW COPY BLOCK FROM MAPPING FILE END"])
def test_add_copy_after_from(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["FROM ubuntu", "ENTRYPOINT [/bin/sh]"]
mapping = ("/tets1", "/test2")
d.add_copy([mapping])
self.assertListEqual(d._Dockerfile__dockerfile, ["FROM ubuntu",
"# TOW COPY BLOCK FROM MAPPING FILE START",
"COPY %s %s" % mapping,
"# TOW COPY BLOCK FROM MAPPING FILE END",
"ENTRYPOINT [/bin/sh]"])
def test_add_
|
copy_after_maintainer(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["FROM ubuntu", "MAINTAINER test","ENTRY
|
POINT [/bin/sh]"]
mapping = ("/tets1", "/test2")
d.add_copy([mapping])
self.assertListEqual(d._Dockerfile__dockerfile, ["FROM ubuntu",
"MAINTAINER test",
"# TOW COPY BLOCK FROM MAPPING FILE START",
"COPY %s %s" % mapping,
"# TOW COPY BLOCK FROM MAPPING FILE END",
"ENTRYPOINT [/bin/sh]"])
def test_find_entrypoint_or_cmd(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu', 'ENTRYPOINT ["/bin/sh"]', 'CMD ["-c"]']
self.assertEqual(d.find_entrypoint_or_cmd(), (["/bin/sh"], ["-c"]))
def test_find_entrypoint_or_cmd_shell_style(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu', 'ENTRYPOINT /bin/sh', 'CMD ["-c"]']
self.assertEqual(d.find_entrypoint_or_cmd(), (["/bin/sh"], ["-c"]))
def test_find_entrypoint_or_cmd_cmd_only(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu', 'CMD ["/bin/sh", "-c", "-x"]']
self.assertEqual(d.find_entrypoint_or_cmd(), (None, ["/bin/sh", "-c", "-x"]))
def test_find_entrypoint_or_cmd_entrypoint_only(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu', 'ENTRYPOINT ["/bin/sh"]']
self.assertEqual(d.find_entrypoint_or_cmd(), (["/bin/sh"], None))
def test_find_entrypoint_or_cmd_none(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu']
self.assertEqual(d.find_entrypoint_or_cmd(), (None, None))
|
GlobalBoost/GlobalBoost-Y
|
test/functional/rpc_rawtransaction.py
|
Python
|
mit
| 24,022
| 0.008659
|
#!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the rawtransaction RPCs.
Test the following RPCs:
- createrawtransaction
- signrawtransactionwithwallet
- sendrawtransaction
- decoderawtransaction
- getrawtransaction
"""
from collections import OrderedDict
from decimal import Decimal
from io import BytesIO
from test_framework.messages import CTransaction, ToHex
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, bytes_to_hex_str, connect_nodes_bi, hex_str_to_bytes
class multidict(dict):
"""Dictionary that allows duplicate keys.
Constructed with a list of (key, value) tuples. When dumped by the json module,
will output invalid json with repeated keys, eg:
>>> json.dumps(multidict([(1,2),(1,2)])
'{"1": 2, "1": 2}'
Used to test calls to rpc methods with repeated keys in the json object."""
def __init__(self, x):
dict.__init__(self, x)
self.x = x
def items(self):
return self.x
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [["-addresstype=legacy"], ["-addresstype=legacy"], ["-addresstype=legacy"]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self, split=False):
super().setup_network()
connect_nodes_bi(self.nodes, 0, 2)
def run_test(self):
self.log.info('prepare some coins for multiple *rawtransaction commands')
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(101)
self.sync_all()
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0)
self.sync_all()
self.nodes[0].generate(5)
self.sync_all()
self.log.info('Test getrawtransaction on genesis block coinbase returns an error')
block = self.nodes[0].getblock(self.nodes[0].getblockhash(0))
assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot'])
self.log.info('Check parameter types and required parameters of createrawtransaction')
# Test `createrawtransaction` required parameters
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction)
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [])
# Test `createrawtransaction` invalid extra parameters
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [], {}, 0, False, 'foo')
# Test `createrawtransaction` invalid `inputs`
txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000'
assert_raises_rpc_error(-3, "Expected type array", self.nodes[0].createrawtransaction, 'foo', {})
assert_raises_rpc_error(-1, "JSON value is not an object as expected", self.nodes[0].createrawtransaction, ['foo'], {})
assert_raises_rpc_error(-8, "txid must be hexadecimal string", self.nodes[0].createrawtransaction, [{}], {})
assert_raises_rpc_error(-8, "txid must be hexadecimal string", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {})
assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid}], {})
assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {})
assert_raises_rpc_error(-8, "Invalid parameter, vout must be positive", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {})
assert_raises_rpc_error(-8, "Invalid parameter, sequence number is out of range", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {})
# Test `createrawtransaction` invalid `outputs`
address = self.nodes[0].getnewaddress()
address2 = self.nodes[0].getnewaddress()
assert_raises_rpc_error(-1, "JSON value is not an array as expected", self.nodes[0].createrawtransaction, [], 'foo')
self.nodes[0].createrawtransaction(inputs=[], outputs={}) # Should not throw for backwards compatibility
self.nodes[0].createrawtransaction(inputs=[], outputs=[])
assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'})
assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].createrawtransaction, [], {'foo': 0})
assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'})
assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1})
assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)]))
assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}])
assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], [{"data": 'aa'}, {"data": "bb"}])
assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], multidict([("data", 'aa'), ("data", "bb")]))
assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}])
assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']])
# Test `createrawtransaction` invalid `locktime`
assert_raises_rpc_error(-3, "Expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo')
assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, -1)
assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, 4294967296)
# Test `createrawtransaction` invalid `replaceable`
assert_raises_rpc_error(-3, "Expected type bool", self.nodes[0].createrawtransaction, [], {}, 0, 'foo')
self.log.info('Check that createrawtransaction accepts an array and object as outputs')
tx = CTransaction()
# One output
tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99}))))
assert_
|
equal(len(
|
tx.vout), 1)
assert_equal(
bytes_to_hex_str(tx.serialize()),
self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]),
)
# Two outputs
tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)])))))
assert_equal(len(tx.vout), 2)
assert_equal(
bytes_to_hex_str(tx.serialize()),
self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]),
)
# Multiple mixed outputs
tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')])))))
assert_equal(len(tx.vout), 3)
assert_equal(
bytes_to_hex_str(tx.serialize()),
self.nodes[2].createrawtransaction(inputs=[{'txid':
|
CI-WATER/django-tethys_wps
|
tethys_wps/views.py
|
Python
|
bsd-2-clause
| 1,115
| 0.000897
|
from inspect import getmembers
from django.shortcuts import render
from utilities import get_wps_service_engine, list_wps_service_engines, abstract_is_link
def home(request):
"""
Home page for Tethys WPS tool. Lists all the WPS services that are linked.
"""
wps_services = list_wps_service_engines()
context = {'wps_services': wps_services}
return render(request, 'tethys_wps/home.html', context)
def service(request, service):
|
"""
View that lists the processes for a given service.
"""
wps = get_wps_service_engine(service)
context = {'wps': wps,
'service': service}
return render(request, 'tethys_wps/service.html', context)
def process(request, service, identifier):
"""
View that displays a detailed description for a WPS process.
"""
wps = get_wps_service_engine(service)
wps_process = wps.describeprocess(identifier)
co
|
ntext = {'process': wps_process,
'service': service,
'is_link': abstract_is_link(wps_process)}
return render(request, 'tethys_wps/process.html', context)
|
kiith-sa/QGIS
|
tests/src/python/test_qgscomposereffects.py
|
Python
|
gpl-2.0
| 3,073
| 0.004557
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsComposerEffects.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later ver
|
sion.
"""
__author__ = '(C) 2012 by Dr. Horst Düster / Dr. Marco Hugent
|
obler'
__date__ = '20/08/2012'
__copyright__ = 'Copyright 2012, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import qgis
from PyQt4.QtCore import QFileInfo
from PyQt4.QtXml import QDomDocument
from PyQt4.QtGui import (QPainter, QColor)
from qgis.core import (QgsComposerShape,
QgsRectangle,
QgsComposition,
QgsMapRenderer
)
from utilities import (unitTestDataPath,
getQgisTestApp,
TestCase,
unittest,
expectedFailure
)
from qgscompositionchecker import QgsCompositionChecker
QGISAPP, CANVAS, IFACE, PARENT = getQgisTestApp()
TEST_DATA_DIR = unitTestDataPath()
class TestQgsComposerEffects(TestCase):
def __init__(self, methodName):
"""Run once on class initialisation."""
unittest.TestCase.__init__(self, methodName)
# create composition
self.mMapRenderer = QgsMapRenderer()
self.mComposition = QgsComposition(self.mMapRenderer)
self.mComposition.setPaperSize(297, 210)
self.mComposerRect1 = QgsComposerShape(20, 20, 150, 100, self.mComposition)
self.mComposerRect1.setShapeType(QgsComposerShape.Rectangle)
self.mComposerRect1.setBackgroundColor(QColor.fromRgb(255, 150, 0))
self.mComposition.addComposerShape(self.mComposerRect1)
self.mComposerRect2 = QgsComposerShape(50, 50, 150, 100, self.mComposition)
self.mComposerRect2.setShapeType(QgsComposerShape.Rectangle)
self.mComposerRect2.setBackgroundColor(QColor.fromRgb(0, 100, 150))
self.mComposition.addComposerShape(self.mComposerRect2)
def testBlendModes(self):
"""Test that blend modes work for composer items."""
self.mComposerRect2.setBlendMode(QPainter.CompositionMode_Multiply)
checker = QgsCompositionChecker('composereffects_blend', self.mComposition)
myTestResult, myMessage = checker.testComposition()
self.mComposerRect2.setBlendMode(QPainter.CompositionMode_SourceOver)
assert myTestResult == True, myMessage
def testTransparency(self):
"""Test that transparency works for composer items."""
self.mComposerRect2.setTransparency( 50 )
checker = QgsCompositionChecker('composereffects_transparency', self.mComposition)
myTestResult, myMessage = checker.testComposition()
self.mComposerRect2.setTransparency( 100 )
assert myTestResult == True, myMessage
if __name__ == '__main__':
unittest.main()
|
mikhtonyuk/rxpython
|
concurrent/futures/cooperative/ensure_exception_handled.py
|
Python
|
mit
| 3,261
| 0
|
import traceback
class EnsureExceptionHandledGuard:
"""Helper for ensuring that Future's exceptions were
|
handled.
This solves a nasty problem with Futures and Tasks that have an
exception set: if nobody asks for the exception, the exception is
never logged. This violates the Zen of Python: 'Errors should
never pass silently. Unless explicitly silenced.'
However, we don't want to log the exception as soon
|
as
set_exception() is called: if the calling code is written
properly, it will get the exception and handle it properly. But
we *do* want to log it if result() or exception() was never called
-- otherwise developers waste a lot of time wondering why their
buggy code fails silently.
An earlier attempt added a __del__() method to the Future class
itself, but this backfired because the presence of __del__()
prevents garbage collection from breaking cycles. A way out of
this catch-22 is to avoid having a __del__() method on the Future
class itself, but instead to have a reference to a helper object
with a __del__() method that logs the traceback, where we ensure
that the helper object doesn't participate in cycles, and only the
Future has a reference to it.
The helper object is added when set_exception() is called. When
the Future is collected, and the helper is present, the helper
object is also collected, and its __del__() method will log the
traceback. When the Future's result() or exception() method is
called (and a helper object is present), it removes the the helper
object, after calling its clear() method to prevent it from
logging.
One downside is that we do a fair amount of work to extract the
traceback from the exception, even when it is never logged. It
would seem cheaper to just store the exception object, but that
references the traceback, which references stack frames, which may
reference the Future, which references the _EnsureExceptionHandledGuard,
and then the _EnsureExceptionHandledGuard would be included in a cycle,
which is what we're trying to avoid! As an optimization, we don't
immediately format the exception; we only do the work when
activate() is called, which call is delayed until after all the
Future's callbacks have run. Since usually a Future has at least
one callback (typically set by 'yield from') and usually that
callback extracts the callback, thereby removing the need to
format the exception.
PS. I don't claim credit for this solution. I first heard of it
in a discussion about closing files when they are collected.
"""
__slots__ = ['exc', 'tb', 'hndl', 'cls']
def __init__(self, exc, handler):
self.exc = exc
self.hndl = handler
self.cls = type(exc)
self.tb = None
def activate(self):
exc = self.exc
if exc is not None:
self.exc = None
self.tb = traceback.format_exception(exc.__class__, exc,
exc.__traceback__)
def clear(self):
self.exc = None
self.tb = None
def __del__(self):
if self.tb:
self.hndl(self.cls, self.tb)
|
robertsj/poropy
|
pyqtgraph/examples/exampleLoaderTemplate.py
|
Python
|
mit
| 2,302
| 0.002172
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'exampleLoaderTemplate.ui'
#
# Created: Sat Dec 17 23:46:27 2011
# by: PyQt4 UI code generator 4.8.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(762, 302)
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayout.setMargin(0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.splitter = QtGui.QSplitter(Form)
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.splitter.setObjectName(_fromUtf8("splitter"))
self.layoutWidget = QtGui.QWidget(self.splitter)
self.layoutWidget.setObjectName(_fromUtf8("layoutWidget"))
self.verticalLayout = QtGui.QVBoxLayout(self.layoutWidget)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.exampleTree = QtGui.QTreeWidget(self.layoutWidget)
self.exampleTree.setObjectName(_fromUtf8("exampleTree"))
self.exampleTree.headerItem().setText(0
|
, _fromUtf8("1"))
self.exampleTree.header().setVisible(False)
self.v
|
erticalLayout.addWidget(self.exampleTree)
self.loadBtn = QtGui.QPushButton(self.layoutWidget)
self.loadBtn.setObjectName(_fromUtf8("loadBtn"))
self.verticalLayout.addWidget(self.loadBtn)
self.codeView = QtGui.QTextBrowser(self.splitter)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Monospace"))
font.setPointSize(10)
self.codeView.setFont(font)
self.codeView.setObjectName(_fromUtf8("codeView"))
self.gridLayout.addWidget(self.splitter, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.loadBtn.setText(QtGui.QApplication.translate("Form", "Load Example", None, QtGui.QApplication.UnicodeUTF8))
|
asleao/sistema-cotacao
|
project/cotacao/migrations/0004_auto_20170322_0818.py
|
Python
|
gpl-3.0
| 665
| 0.001504
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-22 11:18
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migrati
|
on):
dependencies = [
('cotacao', '0003_auto_20170312_2049'),
]
operations = [
migrations.RemoveField(
model_name='item',
name='pedido',
),
|
migrations.AddField(
model_name='pedido',
name='itens',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='itens', to='cotacao.Item'),
),
]
|
windflyer/apport
|
apport_python_hook.py
|
Python
|
gpl-2.0
| 7,544
| 0.001723
|
'''Python sys.excepthook hook to generate apport crash dumps.'''
# Copyright (c) 2006 - 2009 Canonical Ltd.
# Authors: Robert Collins <[email protected]>
# Martin Pitt <[email protected]>
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
# the full text of the license.
import os
import sys
CONFIG = '/etc/default/apport'
def enabled():
'''Return whether Apport should generate crash reports.'''
# This doesn't use apport.packaging.enabled() because it is too heavyweight
# See LP: #528355
import re
try:
with open(CONFIG) as f:
conf = f.read()
return re.search('^\s*enabled\s*=\s*0\s*$', conf, re.M) is None
except IOError:
# if the file does not exist, assume it's enabled
return True
def apport_excepthook(exc_type, exc_obj, exc_tb):
'''Catch an uncaught exception and make a traceback.'''
# create and save a problem report. Note that exceptions in this code
# are bad, and we probably need a per-thread reentrancy guard to
# prevent that happening. However, on Ubuntu there should never be
# a reason for an exception here, other than [say] a read only var
# or some such. So what we do is use a try - finally to ensure that
# the original excepthook is invoked, and until we get bug reports
# ignore the other issues.
# import locally here so that there is no routine overhead on python
# startup time - only when a traceback occurs will this trigger.
try:
# ignore 'safe' exit types.
if exc_type in (KeyboardInterrupt, ):
return
# do not do anything if apport was disabled
if not enabled():
return
try:
from cStringIO import StringIO
StringIO # pyflakes
except ImportError:
from io import StringIO
import re, traceback
from apport.fileutils import likely_packaged, get_recent_crashes
# apport will look up the package from the executable path.
try:
binary = os.path.realpath(os.path.join(os.getcwd(), sys.argv[0]))
except (TypeError, AttributeError, IndexError):
# the module has mutated sys.argv, plan B
try:
binary = os.readlink('/proc/%i/exe' % os.getpid())
except OSError:
return
# for interactive python sessions, sys.argv[0] == ''; catch that and
# other irregularities
if not os.access(binary, os.X_OK) or not os.path.isfile(binary):
return
# filter out binaries in user accessible paths
if not likely_packaged(binary):
return
import apport.report
pr = apport.report.Report()
# special handling of dbus-python exceptions
if hasattr(exc_obj, 'get_dbus_name'):
if exc_obj.get_dbus_name() == 'org.freedesktop.DBus.Error.NoReply':
# NoReply is an useless crash, we do not even get the method it
# was trying to call; needs actual crash from D-BUS backend (LP #914220)
return
if exc_obj.get_dbus_name() == 'org.freedesktop.DBus.Error.ServiceUnknown':
dbus_service_unknown_analysis(exc_obj, pr)
# append a basic traceback. In future we may want to include
# additional data such as the local variables, loaded modules etc.
tb_file = StringIO()
traceback.print_exception(exc_type, exc_obj, exc_tb, file=tb_file)
pr['Traceback'] = tb_file.getvalue().strip()
pr.add_proc_info(extraenv=['PYTHONPATH', 'PYTHONHOME'])
pr.add_user_info()
# override the ExecutablePath with the script that was actually running
pr['ExecutablePath'] = binary
if 'ExecutableTimestamp' in pr:
pr['ExecutableTimestamp'] = str(int(os.stat(binary).st_mtime))
try:
pr['PythonArgs'] = '%r' % sys.argv
except AttributeError:
pass
if pr.check_ignored():
return
mangled_program = re.sub('/', '_', binary)
# get the uid for now, user name later
user = os.getuid()
pr_filename = '%s/%s.%i.crash' % (os.environ.get(
'APPORT_REPORT_DIR', '/var/crash'), mangled_program, user)
crash_counter = 0
if os.path.exists(pr_filename):
if apport.fileutils.seen_report(pr_filename):
# flood protection
with open(pr_filename, 'rb') as f:
crash_counter = get_recent_crashes(f) + 1
if crash_counter > 1:
return
# remove the old file, so that we can create the new one with
# os.O_CREAT|os.O_EXCL
os.unlink(pr_filename)
else:
# don't clobber existing report
return
if crash_counter:
pr['CrashCounter'] = str(crash_counter)
with os.fdopen(os.open(pr_filename,
os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o640), 'wb') as f:
pr.write(f)
finally:
# resume original processing to get the default behaviour,
# but do not trigger an AttributeError on interpreter shutdown.
if sys:
sys.__excepthook__(exc_type, exc_obj, exc_tb)
def dbus_service_unknown_analysis(exc_obj, report):
from glob import glob
import subprocess, re
try:
from configparser import ConfigParser, NoSectionError, NoOptionError
(ConfigParser, NoSectionError, NoOptionError) # pyflakes
except ImportError:
# Python 2
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
# determine D-BUS name
m = re.search('name\s+(\S+)\s+was not provided by any .service',
exc_obj.get_dbus_message())
if not m:
if sys.stderr:
sys.stderr.write('Error: cannot parse D-BUS name from exception: '
+ exc_obj.get_dbus_message())
return
dbus_name = m.group(1)
# determine .service file and Exec name for the D-BUS name
services = [] # tuples of (service file, exe name, running)
for f in glob('/usr/share/dbus-1/*services/*.service'):
cp = ConfigParser(interpolation=None)
cp.read(f, encoding='UTF-8')
try:
if cp.get('D-BUS Service', 'Name') == dbus_name:
exe = cp.get('D-BUS Service', 'Exec')
running = (subprocess.call(['pidof', '-sx', exe], stdout=subprocess.PIPE) == 0)
services.append((f, exe, running))
except (NoSectionError, NoOptionError):
if sys.stderr:
sys.stderr.write('Invalid D-BUS .service file %s: %s' % (
f, exc_obj.get_dbus_message()))
continue
if not services:
report['DbusErrorAnalysis'] = 'no service file providing ' + dbus_name
else:
report['DbusErrorAnalysis'] = 'provided by'
for (service, exe, running) in services:
report['Dbu
|
sErrorAnalys
|
is'] += ' %s (%s is %srunning)' % (
service, exe, ('' if running else 'not '))
def install():
'''Install the python apport hook.'''
sys.excepthook = apport_excepthook
|
streema/deployer
|
deployer/tasks/virtualenv.py
|
Python
|
mit
| 576
| 0.003472
|
from fabric.api import run
from fabric.decorators import with_settings
from fabric.colors import green, yellow
from deployer.tasks.requirements import install_requirements
@with_settings(warn_only=True)
def setup_virtualenv(python_version='', app_name='', app_dir='', repo_url=''):
print(green("Set
|
ting up virtualenv on {}".format(app_dir)))
print(green('Creating virtualenv'))
if run("pyenv virtualenv {0} {1}-{0}".format(python_version, app_name)).failed:
print(yellow("Virtualenv already exists"))
install_requirements(app_name, python_ver
|
sion)
|
evereux/flicket
|
application/flicket/views/release.py
|
Python
|
mit
| 2,064
| 0.002907
|
#! usr/bin/python3
# -*- coding: utf-8 -*-
#
# Flicket - copyright Paul Bourne: [email protected]
import datetime
from flask import redirect, url_for, flash, g
from flask_babel import gettext
from flask_login import login_required
from . import flicket_bp
from application import app, db
from application.flicket.models.flicket_models import FlicketTicket, FlicketStatus
from application.flicket.scripts.email import FlicketMail
from application.flicket.scripts.flicket_functions import add_action
# view to release a ticket user has been assigned.
@flicket_bp.route(app.config['FLICKET'] + '
|
release/<int:ticket_id>/', methods=['GET', 'POST'])
@login_required
def release(ticket_id=False):
if ticket_id:
ticket = FlicketTicket.query.filter_by(id=ticket_id).first()
# is ticket assigned.
if not ticket.assigned:
flash(gettext('Ticket has not been assigned'), category='warning')
return redirect(url_for('flicket_bp.ticket_view', ticket_id=ticket_id))
# check tic
|
ket is owned by user or user is admin
if (ticket.assigned.id != g.user.id) and (not g.user.is_admin):
flash(gettext('You can not release a ticket you are not working on.'), category='warning')
return redirect(url_for('flicket_bp.ticket_view', ticket_id=ticket_id))
# set status to open
status = FlicketStatus.query.filter_by(status='Open').first()
ticket.current_status = status
ticket.last_updated = datetime.datetime.now()
user = ticket.assigned
ticket.assigned = None
user.total_assigned -= 1
db.session.commit()
# add action record
add_action(ticket, 'release')
# send email to state ticket has been released.
f_mail = FlicketMail()
f_mail.release_ticket(ticket)
flash(gettext('You released ticket: %(value)s', value=ticket.id), category='success')
return redirect(url_for('flicket_bp.ticket_view', ticket_id=ticket.id))
return redirect(url_for('flicket_bp.tickets'))
|
Azure/azure-sdk-for-python
|
sdk/peering/azure-mgmt-peering/azure/mgmt/peering/aio/_peering_management_client.py
|
Python
|
mit
| 6,754
| 0.002813
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Optional, TYPE_CHECKING
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
from ._configuration import PeeringManagementClientConfiguration
from .operations import PeeringManagementClientOperationsMixin
from .operations import LegacyPeeringsOperations
from .operations import Operations
from .operations import PeerAsnsOperations
from .operations import PeeringLocationsOperations
from .operations import PeeringsOperations
from .operations import PeeringServiceLocationsOperations
from .operations import PeeringServicePrefixesOperations
from .operations import PrefixesOperations
from .operations import PeeringServiceProvidersOperations
from .operations import PeeringServicesOperations
from .. import models
class PeeringManagementClient(PeeringManagementClientOperationsMixin):
"""Peering Client.
:ivar legacy_peerings: LegacyPeeringsOperations operations
:vartype legacy_peerings: azure.mgmt.peering.aio.operations.LegacyPeeringsOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.peering.aio.operations.Operations
:ivar peer_asns: PeerAsnsOperations operations
:vartype peer_asns: azure.mgmt.peering.aio.operations.PeerAsnsOperations
:ivar peering_locations: PeeringLocationsOperations operations
:vartype peering_locations: azure.mgmt.peering.aio.operations.PeeringLocationsOperations
:ivar peerings: PeeringsOperations operations
:vartype peerings: azure.mgmt.peering.aio.operations.PeeringsOperations
:ivar peering_service_locations: PeeringServiceLocationsOperations operations
:vartype peering_service_locations: azure.mgmt.peering.aio.operations.PeeringServiceLocationsOperations
:ivar peering_service_prefixes: PeeringServicePrefixesOperations operations
:vartype peering_service_prefixes: azure.mgmt.peering.aio.operations.PeeringServicePrefixesOperations
:ivar prefixes: PrefixesOperations operations
:vartype prefixes: azure.mgmt.peering.aio.operations.PrefixesOperations
:ivar peering_service_providers: PeeringServiceProvidersOperations operations
:vartype peering_service_providers: azure.mgmt.peering.aio.operations.PeeringServiceProvidersOperations
:ivar peering_services: PeeringServicesOperations operations
:vartype peering_services: azure.mgmt.peering.aio.operations.PeeringServicesOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential:
|
~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The Azure subscription ID.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: Optional[str] = None,
**kwargs: Any
) -> None:
if not base_url:
base_url = 'https://management.azure.com'
self._config = PeeringManagementClientConfigura
|
tion(credential, subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.legacy_peerings = LegacyPeeringsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(
self._client, self._config, self._serialize, self._deserialize)
self.peer_asns = PeerAsnsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_locations = PeeringLocationsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peerings = PeeringsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_service_locations = PeeringServiceLocationsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_service_prefixes = PeeringServicePrefixesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.prefixes = PrefixesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_service_providers = PeeringServiceProvidersOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_services = PeeringServicesOperations(
self._client, self._config, self._serialize, self._deserialize)
async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse:
"""Runs the network request through the client's chained policies.
:param http_request: The network request you want to make. Required.
:type http_request: ~azure.core.pipeline.transport.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to True.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.pipeline.transport.AsyncHttpResponse
"""
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
http_request.url = self._client.format_url(http_request.url, **path_format_arguments)
stream = kwargs.pop("stream", True)
pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs)
return pipeline_response.http_response
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "PeeringManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details)
|
engineerapart/TheRemoteFreelancer
|
docs/scripts/download_favicons.py
|
Python
|
unlicense
| 964
| 0.001037
|
#!python3
"""
This script downloads the favicons
Usage:
python3 update_alexa path/to/data.csv
"""
import os
import requests
favicon_path = os.path.join(os.path.dirname(__file__), "..", "icons")
def download_favicons(links):
for link in links:
netloc = link['netloc']
url = 'http://' + netloc
|
new_favicon_path = os.path.join(favicon_path, netloc + ".ico")
if not os.path.exists(new_favicon_path):
try:
print(url)
response = requests.get(
"https://realfavicongenerator.p.rapidapi.com/favicon/icon",
params={
|
'platform': 'desktop', "site": url},
headers={'X-Mashape-Key': os.environ.get("mashape_key")}
)
except:
pass
else:
if response:
with open(new_favicon_path, 'wb') as f:
f.write(response.content)
|
rbuffat/pyidf
|
tests/test_outputcontrolilluminancemapstyle.py
|
Python
|
apache-2.0
| 1,023
| 0.002933
|
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.daylighting import OutputControlIlluminanceMapStyle
log = logging.getLogger(__name__)
class TestOutputControlIlluminanceMapStyle(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_outputcontrolilluminancemapstyle(self):
pyidf.validation_level = ValidationLevel.error
obj = OutputControlIlluminanceMapStyle()
# alpha
var_column_separator = "Comma"
obj.column_separator = var_column_separator
|
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(l
|
ine.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.outputcontrolilluminancemapstyles[0].column_separator, var_column_separator)
|
mtils/ems
|
ems/qt4/itemmodel/columnsectionmapper.py
|
Python
|
mit
| 3,948
| 0.008359
|
'''
Created on 24.03.2011
@author: michi
'''
from PyQt4.QtGui import QItemDelegate
from sqlalchemy import Table
from sqlalchemy.sql import Alias,Select
from ems import qt4
class ColumnSectionMapper(object):
def __init__(self,alchemySelect=None, parent=None):
self.__columnConfigs = []
self.__columnConfigIdByName = {}
self.__alchemySelect = alchemySelect
self.__delegate = MapperDelegate(self,parent)
pass
def addColumn(self,columnName,translatedName=None, delegate=None):
if self.__columnConfigIdByName.has_key(columnName):
raise KeyError("Column %s already assigned" % columnName)
index = len(self.__columnConfigs)
self.__columnConfigs.append({'name':columnName,
'translatedName':translatedName,
'delegate':delegate})
self.__columnConfigIdByName[columnName] = index
@property
def translatedColumnNames(self):
names = {}
for config in self.__columnConfigs:
names[config['name']] = config['translatedName']
return names
def __extractTablesFormSelect(self,alchemySelect):
tableDict = {}
for fromCond in alchemySelect.locate_all_froms():
if isinstance(fromCond, Table):
tableDict[str(fromCond.name)] = fromCond
elif isinstance(fromCond,Alias):
if isinstance(fromCond.original,Table):
tableDict[str(fromCond.name)] = fromCond
return tableDict
def getDelegate(self):
return self.__delegate
def getColConfig(self, column):
if isinstance(column, int):
index = column
else:
index = self.__columnConfigIdByName[unicode(column)]
return self.__columnConfigs[index]
def getSelectColumns(self, alchemySelect=None):
if alchemySelect is None:
alchemySelect = self.__alchemySelect
if not isinstance(alchemySelect, Select):
raise TypeError("alchemySelect has to be instanceof sqlalchemy.select")
tableDict = self.__extractTablesFormSelect(alchemySelect)
columnList = []
for config in self.__columnConfigs:
tableName,colName = config['name'].split('.')
if tableDict.has_key(tableName):
columnList.append(tableDict[tableName].c[colName])
return columnList
class MapperDelegate(QItemDelegate):
def __init__(self, mapper, parent=None):
super(MapperDelegate, self).__init__(parent)
self.__mapper = mapper
def getDelegate(self, index):
colName = index.data(qt4.ColumnNameRole).toString()
delegate = self.__mapper.getColConfig(colName)['delegate']
return delegate
def paint(self, painter, option, index):
delegate = self.getDelegate(index)
|
if delegate is not None:
delegate.paint(painter, option, index)
else:
QItemDelegate.paint(self, painter, option, index)
def createEditor(self, p
|
arent, option, index):
delegate = self.getDelegate(index)
if delegate is not None:
return delegate.createEditor(parent, option, index)
else:
return QItemDelegate.createEditor(self, parent, option,
index)
def setEditorData(self, editor, index):
delegate = self.getDelegate(index)
if delegate is not None:
delegate.setEditorData(editor, index)
else:
QItemDelegate.setEditorData(self, editor, index)
def setModelData(self, editor, model, index):
delegate = self.getDelegate(index)
if delegate is not None:
delegate.setModelData(editor, model, index)
else:
QItemDelegate.setModelData(self, editor, model, index)
|
citrix-openstack-build/sahara
|
sahara/tests/unit/service/edp/test_job_manager.py
|
Python
|
apache-2.0
| 20,947
| 0
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import mock
import testtools
from sahara import conductor as cond
from sahara import exceptions as ex
from sahara.plugins import base as pb
from sahara.service.edp import job_manager
from sahara.service.edp import job_utils
from sahara.service.edp.oozie.workflow_creator import workflow_factory
from sahara.swift import swift_helper as sw
from sahara.tests.unit import base
from sahara.tests.unit.service.edp import edp_test_utils as u
from sahara.utils import edp
from sahara.utils import patches as p
conductor = cond.API
_java_main_class = "org.apache.hadoop.examples.WordCount"
_java_opts = "-Dparam1=val1 -Dparam2=val2"
class TestJobManager(base.SaharaWithDbTestCase):
def setUp(self):
super(TestJobManager, self).setUp()
p.patch_minidom_writexml()
pb.setup_plugins()
@mock.patch('uuid.uuid4')
@mock.patch('sahara.utils.remote.get_remote')
def test_create_workflow_dir(self, get_remote, uuid4):
job = mock.Mock()
job.name = "job"
# This is to mock "with remote.get_remote(instance) as r"
remote_instance = mock.Mock()
get_remote.return_value.__enter__ = mock.Mock(
return_value=remote_instance)
remote_instance.execute_command = mock.Mock()
remote_instance.execute_command.return_value = 0, "standard out"
uuid4.return_value = "generated_uuid"
job_utils.create_workflow_dir("where", "/tmp/somewhere", job, "uuid")
remote_instance.execute_command.assert_called_with(
"mkdir -p /tmp/somewhere/job/uuid")
remote_instance.execute_command.reset_mock()
job_utils.create_workflow_dir("where", "/tmp/somewhere", job)
remote_instance.execute_command.assert_called_with(
"mkdir -p /tmp/somewhere/job/generated_uuid")
@mock.patch('sahara.service.edp.binary_retrievers.dispatch.get_raw_binary')
@mock.patch('sahara.utils.remote.get_remote')
def test_upload_job_files(self, get_remote, get_raw_binary):
main_names = ["main1", "main2", "main3"]
lib_names = ["lib1", "lib2", "lib3"]
def make_data_objects(*args):
objs = []
for name in args:
m = mock.Mock()
m.name = name
objs.append(m)
return objs
job = mock.Mock()
job.name = "job"
job.mains = make_data_objects(*main_names)
job.libs = make_data_objects(*lib_names)
# This is to mock "with remote.get_remote(instance) as r"
remote_instance = mock.Mock()
get_remote.return_value.__enter__ = mock.Mock(
return_value=remote_instance)
get_raw_binary.return_value = "data"
paths = job_utils.upload_job_files(
"where", "/somedir", job, libs_subdir=False)
self.assertEqual(paths,
["/somedir/" + n for n in main_names + lib_names])
for path in paths:
remote_instance.write_file_to.assert_any_call(path, "data")
remote_instance.write_file_to.reset_mock()
paths = job_utils.upload_job_files(
"where", "/
|
somedir", job, libs_subdir=True)
remote_instance.execute_command.assert_called_with(
"mkdir -p /s
|
omedir/libs")
expected = ["/somedir/" + n for n in main_names]
expected += ["/somedir/libs/" + n for n in lib_names]
self.assertEqual(paths, expected)
for path in paths:
remote_instance.write_file_to.assert_any_call(path, "data")
@mock.patch('sahara.conductor.API.job_binary_get')
def test_build_workflow_for_job_pig(self, job_binary):
job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG, configs={})
job_binary.return_value = {"name": "script.pig"}
input_data = u.create_data_source('swift://ex/i')
output_data = u.create_data_source('swift://ex/o')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<param>INPUT=swift://ex.sahara/i</param>
<param>OUTPUT=swift://ex.sahara/o</param>""", res)
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>""", res)
self.assertIn("<script>script.pig</script>", res)
# testing workflow creation with a proxy domain
self.override_config('use_domain_for_proxy_users', True)
self.override_config("proxy_user_domain_name", 'sahara_proxy_domain')
job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG, proxy=True)
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.domain.name</name>
<value>sahara_proxy_domain</value>
</property>
<property>
<name>fs.swift.service.sahara.password</name>
<value>55555555-6666-7777-8888-999999999999</value>
</property>
<property>
<name>fs.swift.service.sahara.trust.id</name>
<value>0123456789abcdef0123456789abcdef</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>job_00000000-1111-2222-3333-4444444444444444</value>
</property>
</configuration>""", res)
@mock.patch('sahara.conductor.API.job_binary_get')
def test_build_workflow_swift_configs(self, job_binary):
# Test that swift configs come from either input or output data sources
job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG, configs={})
job_binary.return_value = {"name": "script.pig"}
input_data = u.create_data_source('swift://ex/i')
output_data = u.create_data_source('hdfs://user/hadoop/out')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>""", res)
input_data = u.create_data_source('hdfs://user/hadoop/in')
output_data = u.create_data_source('swift://ex/o')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>""", res)
job, job_exec = u.create_job_exec(
edp.JOB_TYPE_PIG, configs={'configs': {'dummy': 'value'}})
input_data = u.create_data_source('hdfs://user/hadoop/in')
output_data = u.create_data_source('hdfs://user/hadoop/out')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
|
mokuki082/EggDrop
|
code/build/bdist.macosx-10.6-intel/python3.4-standalone/app/temp/pygame/font.py
|
Python
|
gpl-3.0
| 393
| 0.005089
|
def __load():
import imp, os, sys
ext = 'pygame/font.so'
for path in sys.path:
if not path.endswith('lib-dynl
|
oad'):
continue
ext_path = os.path.join(path, ext)
if os.path.exists(ext_path):
mod = imp.load_dynamic(__name__, ext_path)
break
else:
raise ImportError(repr(
|
ext) + " not found")
__load()
del __load
|
cheery/lever
|
runtime/evaluator/optable.py
|
Python
|
mit
| 1,719
| 0.001163
|
import re
source = [
('assert', 0x00, False, 'vreg'),
('raise', 0x05, False, 'vreg'),
('constant', 0x10, True, 'constant'),
('list', 0x20, True, 'vreg*'),
('move', 0x30, False, 'vreg vreg'),
('call', 0x40, True, 'vreg vreg*'),
('not', 0x41, True, 'vreg'),
('contains', 0x42, True, 'vreg vreg'),
('callv', 0x45, True, 'vreg vreg vreg*'),
('isnull', 0x48, True, 'vreg'),
('return', 0x50, False, 'vreg'),
('yield', 0x51, False, 'vreg'),
('jump', 0x60, False, 'block'),
('cond', 0x70, False, 'vreg block block'),
('func', 0x80, True, 'function'),
('iter', 0xC0, True, 'vreg'),
#('next', 0xC1, True, 'vreg'),
#('iterstop', 0xC2, False, 'block'),
('next', 0xC3, True, 'vreg block'),
('getattr', 0xD0, True, 'vreg string'),
('setattr', 0xD1, True, 'vreg string vreg'),
('getitem', 0xD2, True, 'vreg vreg'),
('setitem', 0xD3, True, 'vreg vreg vreg'),
('getloc', 0xE0, True, 'index'),
('setloc', 0xE1, True, 'index vreg'),
('getupv', 0xE2, True, 'index index'),
('setupv', 0xE3, True, 'index index vreg'),
('getglob', 0xF0, True, 'string'),
('setglob', 0xF1, True, 'string vreg'),
('loglob', 0xFF, False, 'vreg'
|
),
]
enc = {}
dec = {}
names = {}
for opname,
|
opcode, has_result, form in source:
assert opcode not in dec, opcode
pattern = re.split(r"\s+", form.rstrip('*'))
if form.endswith('*'):
variadic = pattern.pop()
else:
variadic = None
enc[opname] = opcode, has_result, pattern, variadic
dec[opcode] = opname, has_result, pattern, variadic
names[opcode] = opname
|
ScreamingUdder/mantid
|
Framework/PythonInterface/test/python/mantid/api/SpectrumInfoTest.py
|
Python
|
gpl-3.0
| 788
| 0.005076
|
from __future__ import (absolute_import, division, print_function)
import unittest
from testhelpers import WorkspaceCreationHelper
class SpectrumInfoTest(unittest.TestCase):
_ws = None
def setUp(self):
if self.__class__._ws is None:
self.__class__._ws = WorkspaceCreationHelper.create2DWorkspaceWithFullInstrument(2, 1, False) # no monitors
self.__class__._ws.getSpectrum(0).clearDetectorIDs()
def test_hasDetectors(self):
info = self._ws.spectrumInfo()
self.assertEquals(info.hasDetectors(0), False)
self.asse
|
rtEquals(info.hasDetectors(1), True)
def test_isMasked(self):
info = self._ws.spectrumInfo()
self.assertEquals(info.isMasked(1), False)
if __
|
name__ == '__main__':
unittest.main()
|
TheRedLady/codebook
|
codebook/profiles/restapi/serializers.py
|
Python
|
gpl-3.0
| 4,811
| 0.001663
|
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from ..models import MyUser, Profile
from ..utils import perform_reputation_check
class CreateUserSerializer(serializers.ModelSerializer):
password = serializers.CharField(
style={'input_type': 'password'}
)
class Meta:
model = MyUser
fields = ('email', 'password', 'first_name', 'last_name')
extra_kwargs = {'password': {'write_only': True}}
def create(self, validated_data):
user = MyUser.objects.create_user(
email=validated_data['email'],
first_name=validated_data['first_name'],
last_name=validated_data['last_name'],
password=validated_data['password']
)
return user
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = MyUser
fields = [
'id',
'email',
'first_name',
'last_name',
]
extra_kwargs = {'id': {'read_only': True}, 'email': {'read_only': True}}
def create(self, validated_data):
user = MyUser.objects.create_user(
email=validated_data['email'],
first_name=validated_data['first_name'],
last_name=validated_data['last_name'],
password=validated_data['password']
)
return user
def update(self, instance, validated_data):
instance.first_name = validated_data.get('first_name', instance.first_name)
instance.last_name = validated_data.get('last_name', instance.last_name)
instance.save()
return instance
class FollowSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='profiles:profile-detail')
full_name = serializers.SerializerMethodField()
class Meta:
model = Profile
fields = ['user_id', 'full_name', 'url']
def get_full_name(self, obj):
return obj.user.get_full_name()
class CreateProfileSerializer(serializers.ModelSerializer):
user = CreateUserSerializer()
class Meta:
model = Profile
fields = [
'user',
'follows'
]
def create(self, validated_data):
new_user = CreateUserSerializer().create(validated_data.pop('user'))
new_profile = Profile.objects.get(user_id=new_user.id)
new_profile.save()
return new_profile
class ProfileSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=Tr
|
ue)
reputation = serializers.CharField(max_length=8, read_only=True)
follows = FollowSerializer(read_only=True, many=True)
url = serializers.HyperlinkedIdentityField(view_name='profiles:profile-detail')
questions_count = serializers.SerializerMethodField()
answers_count = serializers.Seriali
|
zerMethodField()
followed_by = serializers.SerializerMethodField()
class Meta:
model = Profile
fields = [
'url',
'user',
'reputation',
'follows',
'questions_count',
'answers_count',
'followed_by'
]
def get_questions_count(self, obj):
return obj.user.questions.count()
def get_answers_count(self, obj):
return obj.user.answers.count()
def get_followed_by(self, obj):
return obj.profile_set.count()
class UpdateProfileSerializer(serializers.ModelSerializer):
user = UserSerializer()
class Meta:
model = Profile
fields = [
'user',
'reputation',
'follows',
]
def validate_follows(self, value):
if self.instance in value:
raise serializers.ValidationError(_('User cannot follow self'))
return value
def validate_reputation(self, value):
if value != perform_reputation_check(self.instance.user):
raise serializers.ValidationError(_('Selected reputation is not valid for this user'))
return value
def update(self, instance, validated_data):
UserSerializer().update(instance.user, validated_data.pop('user'))
instance.reputation = validated_data.get('reputation', instance.reputation)
if validated_data['follows']:
instance.follows.add(*validated_data['follows'])
instance.save()
return instance
class AuthorSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='profiles:profile-detail')
full_name = serializers.SerializerMethodField()
class Meta:
model = MyUser
fields = [
'id',
'email',
'url',
'full_name',
]
def get_full_name(self, obj):
return obj.get_full_name()
|
Evidlo/django-notifications
|
notifications/tests/urls.py
|
Python
|
bsd-3-clause
| 544
| 0
|
# -*- coding: utf-8 -*-
from django.conf.urls import include, url
|
from django.contrib import admin
from django.contrib.auth.views import login
import notifications.urls
import notifications.tests.views
urlpatterns = [
url(r'^login/$', login, name='login'), # needed for Django 1.6 tests
url(r'^admin/', include(admin.site.urls)),
url(r'^test_make/', notifications.tests.views.make_notification),
url(r'^test/', notifications.tests.views.live_tester),
url(r'^', incl
|
ude(notifications.urls, namespace='notifications')),
]
|
davidxmoody/diary
|
tests/filler_text.py
|
Python
|
mit
| 20,162
| 0.002579
|
import random
filler_text = '''Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
Dolor laborum. et magna dolor nostrud Lorem qui deserunt do Excepteur laboris occaecat ut minim nisi dolore reprehenderit exercitation in eu quis aute aliquip laboris dolore deserunt cupidatat consequat. voluptate officia non irure commodo Duis amet, in mollit occaecat tempor ut sunt enim Excepteur sunt dolore minim do dolor fugiat voluptate Excepteur in adipisicing do undefined culpa cupidatat mollit proident, undefined commodo consectetur.
Labore deserunt aute incididunt amet, Excepteur labore qui velit ad Duis qui sint consequat. Ut dolore laboris mollit cillum reprehenderit aute non minim ad dolore Ut Excepteur reprehenderit sint aliqua. Duis reprehenderit culpa et Ut ipsum exercitation exercitation nulla ipsum ea ex in ullamco ea fugiat officia occaecat nulla nostrud cupidatat ea irure ad in ullamco culpa enim ullamco in enim in sed ad Ut velit dolor dolor eu.
Fugiat reprehenderit eiusmod adipisicing ad eiusmod sint aliquip id nostrud Duis aliquip labore ipsum mollit Ut Duis aute sit sed Ut dolor exercitation consequat. qui Duis velit aliquip nulla culpa non consequat. qui elit, amet, esse velit ea ad veniam, Excepteur aliqua. ut deserunt Ut aliquip deserunt elit, occaecat ullamco dolore aliquip voluptate laborum. elit, sit in dolore est.
Ullamco ut velit non culpa veniam, in consequat. nostrud sint amet, nulla in labore cillum non sed Ut veniam, dolor occaecat in do occaecat voluptate nostrud deserunt nisi labore in deserunt voluptate consectetur do quis exercitation nisi laboris eiusmod anim Ut reprehenderit occaecat magna nisi occaecat aliquip sed irure exercitation exercitation undefined adipisicing sint cupidatat eu labore sunt amet, officia Excepteur mollit sint magna sunt.
Aliqua. consectetur elit, Ut est officia veniam, nulla sint in ipsum dolore do aute fugiat exercitation aliquip commodo consequat. consectetur et do officia reprehenderit aute ut laboris quis culpa eu incididunt tempor reprehenderit ipsum aute veniam, aliqua. culpa Duis nostrud aute aliqua. id amet, sit aute id sunt laborum. velit nulla minim adipisicing tempor Duis est sint exercitation quis consequat. ut aliqua. eu reprehenderit.
Culpa Duis incididunt est elit, ea dolore tempor in occaecat non in pariatur. commodo sint commodo ut ut sit commodo reprehenderit ex eu laborum. magna non aliqua. eiusmod Excepteur enim deserunt velit veniam, et dolore eu cupidatat deserunt do irure eu sunt irure voluptate est officia undefined in occaecat labore dolor est mollit dolor incididunt in Excepteur.
Commodo nostrud pariatur. sit Excepteur est sunt culpa in dolore ex tempor cillum ut sint laboris Excepteur ut adipisicing laborum. enim pariatur. reprehenderit ut consectetur ad in dolore Excepteur velit ipsum adipisicing ex deserunt aliqua. cupidatat aliquip nisi sint consectetur laboris velit aliquip ex ullamco dolore in pariatur. non dolor ad velit nostrud veniam, laborum. esse laborum. dolor fugiat aute consequat. velit cillum qui in dolor.
Dolore ex nulla incididunt aute ut aute pariatur. est ipsum sunt occaecat quis ea dolor culpa aute esse id ex incididunt ad consectetur do ex mollit voluptate dolor enim dolor reprehenderit ut anim Duis dolor pariatur. aute velit consequat. in consequat. aliqua. aliquip est fugiat voluptate ad sit esse in adipisicing elit, dolor magna in dolor ullamco occaecat eu aliquip reprehenderit fugiat.
Ex qui Excepteur eiusmod Excepteur officia cillum id aliqua. ad in in minim quis ut culpa in irure nisi pariatur. magna nostrud aliquip eiusmod non occaecat dolor do quis non et ea quis dolor ut incididunt ea proident, Excepteur in tempor nisi sunt eu mollit consectetur mollit nostrud dolor in tempor ut nulla exercitation et dolore reprehenderit consectetur irure tempor sunt tempor elit, eiusmod in sit.
Sit commodo minim id eiusmod in Duis laboris Excepteur ut ea minim magna Duis deserunt velit veniam, proident, aliqua. dolore anim commodo ullamco do labore non ullamco non enim ipsum consectetur irure sint Lorem deserunt dolor commodo cillum velit dolore Excepteur laborum. in tempor anim mollit magna in quis consequat. non ex Duis undefined eiusmod pariatur. dolore dolor dolore pariatur. incididunt eiusmod Excepteur non id Duis et adipisicing in ea eu.
Sit aute nostrud ex laboris ad aliqua. est dolor commodo pariatur. anim Duis velit veniam, incididunt ullamco ad non dolore nisi esse pariatur. Excepteur ut mollit in aute sit anim tempor aliqua. cupidatat dolore ea cupidatat est consectetur Lorem nulla dolor velit ea commodo do officia incididunt nostrud in nostrud pariatur. occaecat anim.
Quis adipisicing fugiat sit sit tempor sit irure elit, consequat. non est est Ut non aute Duis magna eu labore ullamco et fugiat in veniam, dolor dolor sed tempor cupidatat proident, in ut eiusmod ad quis labore ad ipsum officia amet, non dolore nisi aute proident, deserunt Duis nulla Duis proident, sed est irure Ut minim dolor magna proident, magna ullamco commodo.
Dolor mollit ullamco aliqua. eu labore aliqua. sed officia enim qui nostrud eiusmod Excepteur aliquip quis officia in aliquip nostrud tempor proident, ea sed consequat. dolor aliqua. aliqua. in dolor in do ut eu mollit commodo nostrud amet, id Duis qui dolor velit sit cillum sit officia dolor cillum sunt in dolore consectetur tempor irure in sit dolore amet, fugiat nisi nulla sint exercitation cillum officia.
Sit velit ipsum commodo laboris cillum dolore aliquip sint laboris laborum. fugiat anim ipsum cupidatat est qui deserunt irure sit aliqua. veniam, id nisi sunt nisi occaecat mollit eiusmod et sint exercitation id Duis non sit irure cupidatat qui aliqua. do id tempor id in quis elit, fugiat dolore proident, irure do Excepteur qui non irure proident, nulla aliquip minim velit velit dolor voluptate adipisicing incididunt magna incididunt ad ad.
Laborum. in ullamco non consequat. Excepteur pariatur. fugiat eiusmod esse consectetur ea ex velit esse est voluptate tempor in dolor voluptate commodo magna consequat. nisi tempor et sit commodo ut aute cupidatat incididunt ut incididunt elit, ut ad veniam, mollit aute adipisicing consectetur ex id Excepteur ullamco esse laboris sit ad anim in amet, sunt.
Ut incididunt qui reprehenderit dolor Ut mollit tempor pariatur. tempor non commodo laboris Excepteur quis adipisicing aliqua. dolor incididunt Excepteur qui est esse sunt quis ex culpa ad consequat. voluptate sint cupidatat eiusmod minim enim sed aute Excepteur dolore incididunt cillum culpa cillum tempor pariatur. ipsum laborum. reprehenderit aliqua. Ut amet, ipsum amet, sunt veniam, sint Ut sint.
Ut in cillum consectetur adipisicing dolore Ut magna exercitation mollit pariatur. minim consequat. et in veniam, nulla enim ullamco sint Excepteur cupidatat consequat. ut sint fugiat tempor Duis eiusmod Excepteur officia qui anim eu proident, aute qui quis magna pariatur. tempor veniam, non exercitation irure dolor non proident, nisi qui pariatur. enim sint cupidatat fugiat elit, magna culpa in Duis exercitation deserunt et voluptate nostrud anim enim nisi proident, amet.
Est id ad elit, minim nulla velit incididunt ipsum deserunt sunt pariatur. sunt mollit voluptate laborum. mollit laboris vol
|
uptate dolore culpa ipsum labore in undefined voluptate cupidatat amet, sed in aliquip dolor tempor dolore in Ut dolor amet, eiusmod cupidatat in aliqua. ullamco incididunt aute Excepteur ad ullamco sit amet, mollit ex officia Duis.
Ex irure labore dolor aute reprehenderit ullamco elit, sit consectetur aliqua. non consectetur veniam, in dolor ipsum exercitation Lorem sed pariatur. laborum. consequat. culpa aliqua. Ut Duis laborum. Ut proident, aliquip adipisicing consectetur c
|
ulpa magna do irure aute tempor quis incididunt cupidatat co
|
cmgrote/tapiriik
|
tapiriik/services/fit.py
|
Python
|
apache-2.0
| 22,972
| 0.030167
|
from datetime import datetime, timedelta
from .interchange import WaypointType, ActivityStatisticUnit, ActivityType, LapIntensity, LapTriggerMethod
from .devices import DeviceIdentifier, DeviceIdentifierType
import struct
import sys
import pytz
class FITFileType:
Activity = 4 # The only one we care about now.
class FITManufacturer:
DEVELOPMENT = 255 # $1500/year for one of these numbers.
class FITEvent:
Timer = 0
Lap = 9
Activity = 26
class FITEventType:
Start = 0
Stop = 1
# It's not a coincidence that these enums match the ones in interchange perfectly
class FITLapIntensity:
Active = 0
Rest = 1
Warmup = 2
Cooldown = 3
class FITLapTriggerMethod:
Manual = 0
Time = 1
Distance = 2
PositionStart = 3
PositionLap = 4
PositionWaypoint = 5
PositionMarked = 6
|
SessionEnd = 7
FitnessEquipment = 8
class FIT
|
ActivityType:
GENERIC = 0
RUNNING = 1
CYCLING = 2
TRANSITION = 3
FITNESS_EQUIPMENT = 4
SWIMMING = 5
WALKING = 6
ALL = 254
class FITMessageDataType:
def __init__(self, name, typeField, size, packFormat, invalid, formatter=None):
self.Name = name
self.TypeField = typeField
self.Size = size
self.PackFormat = packFormat
self.Formatter = formatter
self.InvalidValue = invalid
class FITMessageTemplate:
def __init__(self, name, number, *args, fields=None):
self.Name = name
self.Number = number
self.Fields = {}
self.FieldNameSet = set()
self.FieldNameList = []
if len(args) == 1 and type(args[0]) is dict:
fields = args[0]
self.Fields = fields
self.FieldNameSet = set(fields.keys()) # It strikes me that keys might already be a set?
else:
# Supply fields in order NUM, NAME, TYPE
for x in range(0, int(len(args)/3)):
n = x * 3
self.Fields[args[n+1]] = {"Name": args[n+1], "Number": args[n], "Type": args[n+2]}
self.FieldNameSet.add(args[n+1])
sortedFields = list(self.Fields.values())
sortedFields.sort(key = lambda x: x["Number"])
self.FieldNameList = [x["Name"] for x in sortedFields] # *ordered*
class FITMessageGenerator:
def __init__(self):
self._types = {}
self._messageTemplates = {}
self._definitions = {}
self._result = []
# All our convience functions for preparing the field types to be packed.
def stringFormatter(input):
raise Exception("Not implemented")
def dateTimeFormatter(input):
# UINT32
# Seconds since UTC 00:00 Dec 31 1989. If <0x10000000 = system time
if input is None:
return struct.pack("<I", 0xFFFFFFFF)
delta = round((input - datetime(hour=0, minute=0, month=12, day=31, year=1989)).total_seconds())
return struct.pack("<I", delta)
def msecFormatter(input):
# UINT32
if input is None:
return struct.pack("<I", 0xFFFFFFFF)
return struct.pack("<I", round((input if type(input) is not timedelta else input.total_seconds()) * 1000))
def mmPerSecFormatter(input):
# UINT16
if input is None:
return struct.pack("<H", 0xFFFF)
return struct.pack("<H", round(input * 1000))
def cmFormatter(input):
# UINT32
if input is None:
return struct.pack("<I", 0xFFFFFFFF)
return struct.pack("<I", round(input * 100))
def altitudeFormatter(input):
# UINT16
if input is None:
return struct.pack("<H", 0xFFFF)
return struct.pack("<H", round((input + 500) * 5)) # Increments of 1/5, offset from -500m :S
def semicirclesFormatter(input):
# SINT32
if input is None:
return struct.pack("<i", 0x7FFFFFFF) # FIT-defined invalid value
return struct.pack("<i", round(input * (2 ** 31 / 180)))
def versionFormatter(input):
# UINT16
if input is None:
return struct.pack("<H", 0xFFFF)
return struct.pack("<H", round(input * 100))
def defType(name, *args, **kwargs):
aliases = [name] if type(name) is not list else name
# Cheap cheap cheap
for alias in aliases:
self._types[alias] = FITMessageDataType(alias, *args, **kwargs)
defType(["enum", "file"], 0x00, 1, "B", 0xFF)
defType("sint8", 0x01, 1, "b", 0x7F)
defType("uint8", 0x02, 1, "B", 0xFF)
defType("sint16", 0x83, 2, "h", 0x7FFF)
defType(["uint16", "manufacturer"], 0x84, 2, "H", 0xFFFF)
defType("sint32", 0x85, 4, "i", 0x7FFFFFFF)
defType("uint32", 0x86, 4, "I", 0xFFFFFFFF)
defType("string", 0x07, None, None, 0x0, formatter=stringFormatter)
defType("float32", 0x88, 4, "f", 0xFFFFFFFF)
defType("float64", 0x89, 8, "d", 0xFFFFFFFFFFFFFFFF)
defType("uint8z", 0x0A, 1, "B", 0x00)
defType("uint16z", 0x0B, 2, "H", 0x00)
defType("uint32z", 0x0C, 4, "I", 0x00)
defType("byte", 0x0D, 1, "B", 0xFF) # This isn't totally correct, docs say "an array of bytes"
# Not strictly FIT fields, but convenient.
defType("date_time", 0x86, 4, None, 0xFFFFFFFF, formatter=dateTimeFormatter)
defType("duration_msec", 0x86, 4, None, 0xFFFFFFFF, formatter=msecFormatter)
defType("distance_cm", 0x86, 4, None, 0xFFFFFFFF, formatter=cmFormatter)
defType("mmPerSec", 0x84, 2, None, 0xFFFF, formatter=mmPerSecFormatter)
defType("semicircles", 0x85, 4, None, 0x7FFFFFFF, formatter=semicirclesFormatter)
defType("altitude", 0x84, 2, None, 0xFFFF, formatter=altitudeFormatter)
defType("version", 0x84, 2, None, 0xFFFF, formatter=versionFormatter)
def defMsg(name, *args):
self._messageTemplates[name] = FITMessageTemplate(name, *args)
defMsg("file_id", 0,
0, "type", "file",
1, "manufacturer", "manufacturer",
2, "product", "uint16",
3, "serial_number", "uint32z",
4, "time_created", "date_time",
5, "number", "uint16")
defMsg("file_creator", 49,
0, "software_version", "uint16",
1, "hardware_version", "uint8")
defMsg("activity", 34,
253, "timestamp", "date_time",
1, "num_sessions", "uint16",
2, "type", "enum",
3, "event", "enum", # Required
4, "event_type", "enum",
5, "local_timestamp", "date_time")
defMsg("session", 18,
253, "timestamp", "date_time",
2, "start_time", "date_time", # Vs timestamp, which was whenever the record was "written"/end of the session
7, "total_elapsed_time", "duration_msec", # Including pauses
8, "total_timer_time", "duration_msec", # Excluding pauses
59, "total_moving_time", "duration_msec",
5, "sport", "enum",
6, "sub_sport", "enum",
0, "event", "enum",
1, "event_type", "enum",
9, "total_distance", "distance_cm",
11,"total_calories", "uint16",
14, "avg_speed", "mmPerSec",
15, "max_speed", "mmPerSec",
16, "avg_heart_rate", "uint8",
17, "max_heart_rate", "uint8",
18, "avg_cadence", "uint8",
19, "max_cadence", "uint8",
20, "avg_power", "uint16",
21, "max_power", "uint16",
22, "total_ascent", "uint16",
23, "total_descent", "uint16",
49, "avg_altitude", "altitude",
50, "max_altitude", "altitude",
71, "min_altitude", "altitude",
57, "avg_temperature", "sint8",
58, "max_temperature", "sint8")
defMsg("lap", 19,
253, "timestamp", "date_time",
0, "event", "enum",
1, "event_type", "enum",
25, "sport", "enum",
23, "intensity", "enum",
24, "lap_trigger", "enum",
2, "start_time", "date_time", # Vs timestamp, which was whenever the record was "written"/end of the session
7, "total_elapsed_time", "duration_msec", # Including pauses
8, "total_timer_time", "duration_msec", # Excluding pauses
52, "total_moving_time", "duration_msec",
9, "total_distance", "distance_cm",
11,"total_calories", "uint16",
13, "avg_speed", "mmPerSec",
14, "max_speed", "mmPerSec",
15, "avg_heart_rate", "uint8",
16, "max_heart_rate", "uint8",
17, "avg_cadence", "uint8", # FIT rolls run and bike cadence into one
18, "max_cadence", "uint8",
19, "avg_power", "uint16",
20, "max_power", "uint16",
21, "total_ascent", "uint16",
22, "total_descent", "uint16",
42, "avg_altitude", "altitude",
43, "max_altitude", "altitude",
62, "min_altitude", "altitude",
50, "avg_temperature", "sint8",
51, "max_temperature", "sint8"
)
defMsg("record", 20,
253, "timestamp", "date_time",
0, "position_lat", "semicircles",
1, "position_long", "semicircles",
2, "altitude", "altitude",
3, "heart_rate", "uint8",
4, "cadence", "uint8",
5, "distance", "distance_cm",
6, "speed", "mmPerSec"
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_10_01/operations/_virtual_network_peerings_operations.py
|
Python
|
mit
| 22,805
| 0.004955
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkPeeringsOperations(object):
"""VirtualNetworkPeeringsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2017_10_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deseri
|
alizer.
"""
models = _models
def __init__(self, client, config, serializer,
|
deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def get(
self,
resource_
|
josquindebaz/P2Qt
|
p2gui.py
|
Python
|
lgpl-3.0
| 106,057
| 0.009045
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from PySide import QtCore
from PySide import QtGui
from PySide import QtWebKit
import sys
import re
import datetime
import os
import time
import subprocess
import threading
import atexit
import webbrowser
import functools
import operator
import Viewer
import Controller
class Principal(QtGui.QMainWindow):
def __init__(self, parent=None):
QtGui.QMainWindow.__init__(self, parent)
# create the menu
##################################################
self.menu = Viewer.MyMenu()
self.setMenuBar(self.menu)
get_remote_corpus = Controller.myxml()
if get_remote_corpus.get():
if get_remote_corpus.parse():
for corpus in get_remote_corpus.getDataCorpus():
t = QtGui.QAction(corpus[0], self)
t.triggered.connect(functools.partial(self.connect_server,
"prosperologie.org", corpus[1], corpus[0]))
self.menu.distant.addAction(t)
self.menu.local_connect.triggered.connect(self.connect_server_localhost)
"""To delete: direct access to corpus editing tab"""
self.menu.local_edit.triggered.connect(self.add_edit_corpus_tab)
""" end """
self.menu.menu_P1P2.triggered.connect(self.P1toP2)
self.menu.codex.triggered.connect(self.codex_window)
self.menu.server_vars.triggered.connect(self.display_server_vars)
self.menu.contexts.triggered.connect(self.display_contexts)
self.menu.pers.triggered.connect(self.display_pers)
self.menu.marlowe_gen.triggered.connect(self.add_gen_mrlw_tab)
self.menu.Marlowe_remote.triggered.connect(self.MarloweViewer)
self.menu.manual.triggered.connect(lambda: webbrowser.open('http://mypads.framapad.org/mypads/?/mypads/group/doxa-g71fm7ki/pad/view/interface-p2-manuel-de-l-utilisateur-hsa17wo'))
# create the status bar
##################################################
self.status = self.statusBar()
self.status.showMessage(self.tr("Ready"))
#create the progressebar
##################################################
self.PrgBar = Viewer.PrgBar(self)
self.status.addPermanentWidget(self.PrgBar.bar)
# create the toolbar
##################################################
self.toolbar = self.addToolBar("")
#self.toolbar.setIconSize(QtCore.QSize(16, 16))
self.toolbar.setMovable(0)
self.toolbar_descr_corpus = QtGui.QLabel()
self.toolbar.addWidget(self.toolbar_descr_corpus)
spacer2 = QtGui.QLabel()
spacer2.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
self.toolbar.addWidget(spacer2)
self.toolbar_name_corpus = QtGui.QLabel()
self.toolbar.addWidget(self.toolbar_name_corpus)
##################################################
#cadrans NO - NE - SO - SE
#
# ###########
# # # #
# # NO # NE #
# # # #
# ###########
# # # #
# # SO # SE #
# # # #
# ###########
##################################################
#cadran NO
##################################################
##### Tab for actants #############
##################################################
self.actantsTab = Viewer.actantsTab()
self.actantsTab.L.addAction(QtGui.QAction('texts', self,
triggered=lambda: self.show_texts_from_list(0)))
self.actantsTab.L.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.actantsTab.L)))
self.actantsTab.L1.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.actantsTab.L1)))
self.actantsTab.L2.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.actantsTab.L2)))
##### Tab for authors #############
##################################################
self.authorsTab = Viewer.authorsTab()
self.authorsTab.L.currentItemChanged.connect(self.authLchanged)
self.authorsTab.S.currentIndexChanged.connect(self.authLchanged)
##### Tab for concepts #############
##################################################
self.NOT2 = Viewer.ConceptTab()
self.NOT2.select.currentIndexChanged.connect(self.select_concept)
self.NOT2.sort_command.currentIndexChanged.connect(self.affiche_concepts_scores)
self.NOT2.dep0.listw.currentItemChanged.connect(self.cdep0_
|
changed)
self.NOT2.depI.listw.currentItemChanged.connect(self.cdepI_changed)
self.NOT2.depII.listw.currentItemChanged.connect(self.cdepII_changed)
self.NOT2.depI.deselected.connect(lam
|
bda: self.NOT2.depII.listw.clear())
self.NOT2.dep0.deselected.connect(lambda: [self.NOT2.depI.listw.clear(),
self.NOT2.depII.listw.clear()])
#TODO add those below
for i in range(7,12):
self.NOT2.sort_command.model().item(i).setEnabled(False)
##### Tab for syntax items (Lexicon) #############
##################################################
self.NOT1 = Viewer.LexiconTab()
self.NOT1.select.currentIndexChanged.connect(self.select_liste)
self.NOT1.sort_command.currentIndexChanged.connect(self.affiche_liste_scores)
self.NOT1.dep0.listw.currentItemChanged.connect(self.ldep0_changed)
#TODO add those below
for i in range(6,11):
self.NOT1.sort_command.model().item(i).setEnabled(False)
#context menus activation
self.NOT1.dep0.listw.addAction(QtGui.QAction('texts', self,
triggered=lambda: self.show_texts_from_list(0)))
self.NOT1.dep0.listw.addAction(QtGui.QAction('network', self,
triggered=lambda: self.show_network(0)))
self.NOT1.dep0.listw.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.NOT1.dep0.listw)))
self.NOT2.dep0.listw.addAction(QtGui.QAction('texts', self,
triggered=lambda: self.show_texts_from_list(0)))
self.NOT2.dep0.listw.addAction(QtGui.QAction('network', self,
triggered=lambda: self.show_network(0)))
self.NOT2.dep0.listw.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.NOT2.dep0.listw)))
self.NOT2.depI.listw.addAction(QtGui.QAction('texts', self,
triggered=lambda: self.show_texts_from_list(1)))
self.NOT2.depI.listw.addAction(QtGui.QAction('network', self,
triggered=lambda: self.show_network(1)))
self.NOT2.depI.listw.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.NOT2.depI.listw)))
self.NOT2.depII.listw.addAction(QtGui.QAction('texts', self,
triggered=lambda: self.show_texts_from_list(2)))
self.NOT2.depII.listw.addAction(QtGui.QAction('network', self,
triggered=lambda: self.show_network(2)))
self.NOT2.depII.listw.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.NOT2.depII.listw)))
##### Tab for persons #############
##################################################
self.show_persons = Viewer.personsTab()
self.show_persons.L.listw.addAction(QtGui.QAction('texts', self,
triggered=lambda: self.show_texts_from_list("pers")))
self.show_persons.L.listw.addAction(QtGui.QAction('network', self,
triggered=lambda: self.show_network("pers")))
self.show_persons.L.listw.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.show_persons.L.listw)))
#Networks tab
##################################################
self.tabNetworks = QtGui.QTabWidget()
self.tabNetworks.setTabsClos
|
lenn0x/Milo-Tracing-Framework
|
src/py/examples/helloworld/HelloWorld.py
|
Python
|
apache-2.0
| 5,966
| 0.015924
|
#
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
from thrift.Thrift import *
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def ping(self, name):
"""
Parameters:
- name
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot != None:
self._oprot = oprot
self._seqid = 0
def ping(self, name):
"""
Parameters:
- name
"""
self.send_ping(name)
return self.recv_ping()
def send_ping(self, name):
self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid)
args = ping_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ping(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = ping_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "ping failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["ping"] = Processor.process_ping
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_ping(self, seqid, iprot, oprot):
args = ping_args()
args.read(iprot)
iprot.readMessageEnd()
result = ping_resul
|
t()
result.success = self._handler.ping(args.name)
oprot.writeMessageBegin("ping", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class ping_args:
"""
Attributes:
- name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
)
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot
|
.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_args')
if self.name != None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ping_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
|
NitishT/minio-py
|
minio/error.py
|
Python
|
apache-2.0
| 23,884
| 0.003601
|
# -*- coding: utf-8 -*-
# Minio Python Library for Amazon S3 Compatible Cloud Storage,
# (C) 2015, 2016, 2017 Minio, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
minio.error
~~~~~~~~~~~~~~~~~~~
This module provides custom exception classes for Minio library
and API specific errors.
:copyright: (c) 2015, 2016, 2017 by Minio, Inc.
:license: Apache 2.0, see LICENSE for more details.
"""
from xml.etree import cElementTree
from xml.etree.cElementTree import ParseError
if hasattr(cElementTree, 'ParseError'):
## ParseError seems to not have .message like other
## exceptions. Add dynamically new attribute carrying
## value from message.
if not hasattr(ParseError, 'message'):
setattr(ParseError, 'message', ParseError.msg)
_ETREE_EXCEPTIONS = (ParseError, AttributeError, ValueError, TypeError)
else:
_ETREE_EXCEPTIONS = (SyntaxError, AttributeError, ValueError, TypeError)
class MinioError(Exception):
"""
Base class for all exceptions
:param message: User defined message.
"""
def __init__(self, message, **kwargs):
super(MinioError, self).__init__(**kwargs)
self.message = message
def __str__(self):
return "{name}: message: {message}".format(
name=self.__class__.__name__,
message=self.message
)
class InvalidEndpointError(MinioError):
"""
InvalidEndpointError is raised when input endpoint URL is invalid.
"""
pass
class InvalidBucketError(MinioError):
"""
InvalidBucketError is raised when input bucket name is invalid.
NOTE: Bucket names are validated based on Amazon S3 requirements.
"""
pass
class InvalidArgumentError(MinioError):
"""
InvalidArgumentError is raised when an unexpected
argument is received by the callee.
"""
pass
class InvalidSizeError(MinioError):
"""
InvalidSizeError is raised when an unexpected size mismatch occurs.
"""
pass
class InvalidXMLError(MinioError):
"""
InvalidXMLError is raised when an unexpected XML tag or
a missing tag is found during parsing.
"""
pass
class MultiDeleteError(object):
"""
Represents an error raised when trying to delete an object in a
Multi-Object Delete API call :class:`MultiDeleteError <MultiDeleteError>`
:object_name: Object name that had a delete error.
:error_code: Error code.
:error_message: Error message.
"""
def __init__(self, object_name, err_code, err_message):
self.object_name = object_name
self.error_code = err_code
self.error_message = err_message
def __str__(self):
string_format = '<MultiDeleteError: object_name: {} error_code: {}' \
' error_message: {}>'
return string_format.format(self.object_name,
self.error_code,
self.error_message)
class ResponseError(MinioError):
"""
ResponseError is raised when an API call doesn't succeed.
raises :exc:`ResponseError` accordingly.
:param response: Response from http client :class:`urllib3.HTTPResponse`.
"""
def __init__(self, response, method, bucket_name=None,
object_name=None):
super(ResponseError, self).__init__(message='')
# initialize parameter fields
self._response = response
self._xml = response.data
self.method = method
self.bucket_name = bucket_name
self.object_name = object_name
# initialize all ResponseError fields
self.code = ''
# Amz headers
self.request_id = ''
self.host_id = ''
self.region = ''
# handle the error
self._handle_error_response(bucket_name)
def get_exception(self):
"""
Gets the error exception derived from the initialization of
an ErrorResponse object
:return: The derived exception or ResponseError exception
"""
exception = known_errors.get(self.code)
if exception:
return exception(self)
else:
return self
def _handle_error_response(self, bucket_name=None):
"""
Sets error response uses xml body if available, otherwise
relies on HTTP headers.
"""
if not self._response.data:
self._set_error_response_without_body(bucket_name)
else:
self._set_error_response_with_body(bucket_name)
def _set_error_response_with_body(self, bucket_name=None):
"""
Sets all the error response fields with a valid response body.
Raises :exc:`ValueError` if invoked on a zero length body.
:param bucket_name: Optional bucket name resource at which error
occurred.
:param object_name: Option object name resource at which error
occurred.
"""
if len(self._response.data) == 0:
raise ValueError('response data has no body.')
try:
root = cElementTree.fromstring(self._response.data)
except _ETREE_EXCEPTIONS as error:
raise InvalidXMLError('"Error" XML is not parsable. '
'Message: {0}'.format(error.message))
for attribute in root:
if attribute.tag == 'Code':
self.code = attribute.text
elif attribute.tag == 'BucketName':
self.bucket_name =
|
attribute.text
elif attribute.tag == 'Key':
self.object_name = attribute.text
elif attribute.tag == 'Message':
self.message = attribute.text
elif attribute.tag == 'RequestId':
self.request_id = attribute.text
elif attribute.tag == 'HostId':
self.host_id = attribute.text
# Set amz headers.
self.
|
_set_amz_headers()
def _set_error_response_without_body(self, bucket_name=None):
"""
Sets all the error response fields from response headers.
"""
if self._response.status == 404:
if bucket_name:
if self.object_name:
self.code = 'NoSuchKey'
self.message = self._response.reason
else:
self.code = 'NoSuchBucket'
self.message = self._response.reason
elif self._response.status == 409:
self.code = 'Confict'
self.message = 'The bucket you tried to delete is not empty.'
elif self._response.status == 403:
self.code = 'AccessDenied'
self.message = self._response.reason
elif self._response.status == 400:
self.code = 'BadRequest'
self.message = self._response.reason
elif self._response.status == 301:
self.code = 'PermanentRedirect'
self.message = self._response.reason
elif self._response.status == 307:
self.code = 'Redirect'
self.message = self._response.reason
elif self._response.status in [405, 501]:
self.code = 'MethodNotAllowed'
self.message = self._response.reason
elif self._response.status == 500:
self.code = 'InternalError'
self.message = 'Internal Server Error.'
else:
self.code = 'UnknownException'
self.message = self._response.reason
# Set amz headers.
self._set_amz_headers()
def _set_amz_headers(self):
"""
Sets x-amz-* error response fields from response headers.
"""
if self._response.headers:
|
kobotoolbox/kpi
|
kobo/settings/testing.py
|
Python
|
agpl-3.0
| 664
| 0.001506
|
# coding: utf-8
from mongomock import MongoClient as MockMongoClient
from .base import *
# For tests, don't use KoBoCAT's DB
DATABASES = {
'default': dj_database_url.config(default='sqlite:///%s/db.sqlite3' % BASE_DIR),
}
DATABASE_ROUTERS = ['kpi.db_routers.TestingDat
|
abaseRouter']
TESTING = True
# Decrease prod value to speed-up tests
SUBMISSION_LIST_LIMIT = 100
ENV = 'testing'
# Run all Celery tasks synchronously during testing
CELERY_TASK_ALWAYS_EAGER = True
MONGO_CONNECTION_URL = 'mongodb://fakehost/formhub_te
|
st'
MONGO_CONNECTION = MockMongoClient(
MONGO_CONNECTION_URL, j=True, tz_aware=True)
MONGO_DB = MONGO_CONNECTION['formhub_test']
|
pklaus/netio230a
|
setup.py
|
Python
|
gpl-3.0
| 1,135
| 0.026432
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2015, Philipp Klaus. All rights reserved.
License: GPLv3
"""
from distutils.core import setup
setup(name='netio230a',
version = '1.1.9',
description = 'Python package to control the Koukaam NETIO-230A',
long_description = 'Python software to access the Koukaam NETIO-230A and NETIO-230B: power distribution units / controllable power outlets with Ethernet interface',
author = 'Philipp Klaus',
author_email = '[email protected]',
url = 'https://github.com/pklaus/netio230a',
license = 'GPL3+',
packages = ['netio230a'],
scripts = ['scripts/netio230a_cli', 'scripts/netio230a_discovery', 'scripts/netio230a_fakeserver'],
zip_safe = True,
platforms = 'any',
keywords = 'Netio230A Koukaam PDU',
classifiers = [
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
'License :: OSI Approved :: GPL License',
'Programming Language :: Python',
'Programming Language
|
:: Python :: 2',
'Programming L
|
anguage :: Python :: 3',
]
)
|
SpiNNakerManchester/SpiNNer
|
spinner/proxy.py
|
Python
|
gpl-2.0
| 8,272
| 0.040015
|
r"""A proxy enabling multiple wiring guide instances to interact with the same
SpiNNaker boards.
A very simple protocol is used between the client and server. Clients may send
the following new-line delimited commands to the server:
* ``VERSION,[versionstring]\n`` The server will disconnect any client with an
incompatible version number reported for ``[versionstring]``. Returns
``OK\n``.
* ``LED,[c],[f],[b],[lednum],[state]\n`` Turn on or off the specified LED. Note
that the LED remains switched on while *any* client wants it to be on.
Returns ``OK\n``.
* ``TARGET,[c],[f],[b],[link]\n`` Discover what link is at the other end of the
supplied link. Returns ``[c],[f],[b],[link]\n`` or ``None\n`` if no link is
connected. Note that links are represented by their number, not their name.
"""
import traceback
import socket
import select
from collections import defaultdict
import logging
from six import iteritems
from spinner.version import __version__
from spinner.topology import Direction
DEFAULT_PORT = 6512
class ProxyError(Exception):
"""Exception raised when the proxy cannot connect."""
pass
class ProxyServer(object):
"""A proxy server enabling multiple wiring guide instances to interact with
the same SpiNNaker boards.
"""
def __init__(self, bmp_controller, wiring_probe,
hostname="", port=DEFAULT_PORT):
self.bmp_controller = bmp_controller
self.wiring_probe = wiring_probe
# Open a TCP socket
self.server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server_sock.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
self.server_sock.bind((hostname, port))
self.server_sock.listen(5)
self.client_socks = []
# A buffer for unprocessed data received from each client
self.client_buffer = {}
# For each LED, maintains a set of clients which have turned it on
self.led_setters = defaultdict(set)
def add_client(self, sock, addr):
"""Register a new client."""
logging.info("New connection {} from {}".format(sock, addr))
self.client_socks.append(sock)
# Create buffer for received data (and schedule its deletion upon
# disconnection)
self.client_buffer[sock] = b""
def remove_client(self, sock):
"""Disconnect and cleanup after a particular child."""
logging.info("Closing socket {}".format(sock))
# Remove buffer
self.client_buffer.pop(sock)
# Turn off any LEDs left on by the client
for (c, f, b, led), socks in iteritems(self.led_setters):
if sock in socks:
self.set_led(sock, c, f, b, led, False)
# Close socket
self.client_socks.remove(sock)
sock.close()
def set_led(self, sock, c, f, b, led, state):
"""Set the state of a diagnostic LED.
An LED is turned on if at least one client has turned it on. An LED is only
turned off if all clients which have turned the LED on have also turned it
off again.
"""
setters = self.led_setters[(c, f, b, led)]
cur_led_state = bool(setters)
if state:
setters.add(sock)
else:
setters.discard(sock)
new_led_state = bool(setters)
if cur_led_state != new_led_state:
self.bmp_controller.set_led(led, new_led_state, c, f, b)
def handle_version(self, sock, args):
"""Handle "VERSION" commands.
This command contains, as the argument, the SpiNNer version number of the
remote client. If the version of the client does not match the server, the
client is disconnected.
Arguments: vX.Y.Z
Returns: OK
"""
# Check for identical version
assert args.decode("ascii") == __version__
sock.send(b"OK\n")
def handle_led(self, sock, args):
"""Handle "LED" commands.
Set the state of a diagnostic LED on a board.
Arguments: c,f,b,led,state
Returns: OK
"""
c, f, b, led, state = map(int, args.split(b","))
self.set_led(sock, c, f, b, led, state)
sock.send(b"OK\n")
def handle_target(self, sock, args):
"""Handle "TARGET" commands.
Determine what is at the other end of a given link.
Arguments: c,f,b,d
Returns: c,f,b,d or None
"""
c, f, b, d = map(int, args.split(b","))
target = self.wiring_probe.get_link_target(c, f, b, d)
if target is None:
sock.send(b"None\n")
else:
sock.send("{},{},{},{}\n".format(*map(int, target)).encode("ascii"))
def process_data(self, sock, data):
"""Process data received from a socket."""
# Prepend any previously unprocessed data
data = self.client_buffer[sock] + data
# Handle any received commands. If a command fails
|
(or is invalid) the
# connection is dropped.
try:
while b"\n" in data:
line, _, data = data.part
|
ition(b"\n")
logging.debug("Handling command {} from {}".format(line, sock))
cmd, _, args = line.partition(b",")
# If an unrecognised command arrives, this lookup will fail and get
# caught by the exception handler, printing an error and disconnecting
# the client.
{
b"VERSION": self.handle_version,
b"LED": self.handle_led,
b"TARGET": self.handle_target,
}[cmd](sock, args)
except Exception as e:
logging.exception(
"Disconnected client {} due to bad command (above)".format(sock))
self.remove_client(sock)
return
# Retain any remaining unprocessed data
self.client_buffer[sock] = data
def main(self):
logging.info("Starting proxy server...")
try:
while True:
ready, _1, _2 = select.select([self.server_sock] + self.client_socks, [], [])
for sock in ready:
if sock is self.server_sock:
# New client connected!
self.add_client(*self.server_sock.accept())
else:
# Data arrived from a client
try:
data = sock.recv(1024)
except (IOError, OSError) as exc:
logging.error(
"Socket {} failed to receive: {}".format(sock, exc))
# Cause socket to get closed
data = b"" # pragma: no branch
if len(data) == 0:
# Connection closed
self.remove_client(sock)
else:
self.process_data(sock, data)
except KeyboardInterrupt:
# Disconnect all clients (also cleans up LED states, etc.)
for sock in self.client_socks:
self.remove_client(sock)
logging.info("Proxy server terminated cleanly.")
class ProxyClient(object):
"""A client for the ProxyServer object defined above.
This object implements a BMPController-compatible ``set_led`` method and
WiringProbe compatible ``get_link_target`` method and thus may be substituted
for the above when these functions are all that are required, e.g. for the
InteractiveWiringGuide.
"""
def __init__(self, hostname, port=DEFAULT_PORT):
"""Connect to a running ProxyServer."""
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((hostname, port))
# A receive buffer
self.buf = b""
# Check for protocol version compatibility.
self.check_version()
def recvline(self):
"""Wait for a full line to be received from the server."""
while b"\n" not in self.buf:
data = self.sock.recv(1024)
self.buf += data
if len(data) == 0:
raise ProxyError("Remote server closed the connection.")
line, _, self.buf = self.buf.partition(b"\n")
return line
def check_version(self):
"""Check that the remote server has a compatible protocol version."""
self.sock.send("VERSION,{}\n".format(__version__).encode("ascii"))
if self.recvline() != b"OK":
raise ProxyError("Remote server has incompatible protocol version")
def set_led(self, led, state, c, f, b):
"""Set the state of an LED on the remote machine."""
self.sock.send("LED,{},{},{},{},{}\n".format(
c, f, b, led, int(state)).encode("ascii"))
if self.recvline() != b"OK":
raise ProxyError("Got unexpected response to LED command.")
def get_link_target(self, c, f, b, d):
"""Discover the other end of a specified link on a remote machine."""
self.sock.send("TARGET,{},{},{},{}\n".format(
c, f, b, int(d)).encode("ascii"))
response = self.recvline()
if response == b"None":
return None
else:
try:
c, f, b, d = map(int, response.split(b","))
return (c, f, b, Direction(d
|
eeucalyptus/eeDA
|
app/graphics/textrenderer.py
|
Python
|
apache-2.0
| 3,090
| 0.009385
|
from . import Renderer
from PIL import Image, ImageFont, ImageQt, ImageDraw
from PyQt5 import QtGui
'''
Renders a single line of text at a given position.
'''
class TextRenderer(Renderer):
MSFACTOR = 8
def __init__(self, gl, text, pos, size = 64):
super().__init__(gl)
self.text = text
self.pos = pos
if size > 64:
self.MSFACTOR = 4
if size > 128:
self.sizeAdjust = size / 128
self.fSize = 128
else:
self.fSize = size
self.sizeAdjust = 1
self.callList = self.genSymb
|
olCallList()
def genSymbolCallList(self):
genList = self.gl.glGenLists(1)
try:
font = ImageFont.truetype('resources/interface/Roboto.ttf', self.fSize * self.MSFACTOR)
except OSError:
print("Font n
|
ot found, loading failsafe.")
font = ImageFont.truetype('arial.ttf', self.fSize * self.MSFACTOR)
# works on Windows; may still fail on Linux and OSX. Documentation unclear.
textSize = font.getsize(self.text)
border = 5
image = Image.new("RGBA", (textSize[0] + 2*border, textSize[1] + 2*border), None)
draw = ImageDraw.Draw(image)
draw.text((border, border), self.text, font=font, fill="white")
del draw
imgWidth = float(self.sizeAdjust * image.size[0] / self.MSFACTOR)
imgHeight = float(self.sizeAdjust * image.size[1] / self.MSFACTOR)
self.vertices =[0.0, self.fSize - imgHeight, 2.0,
0.0, float(self.fSize), 2.0,
imgWidth, float(self.fSize), 2.0,
imgWidth, self.fSize - imgHeight, 2.0]
self.texCoords=[0.0, 0.0, 2.0,
0.0, 1.0, 2.0,
1.0, 1.0, 2.0,
1.0, 0.0, 2.0]
self.texture = QtGui.QOpenGLTexture(ImageQt.ImageQt(image), True)
self.texture.setMinMagFilters(QtGui.QOpenGLTexture.LinearMipMapLinear, QtGui.QOpenGLTexture.Linear)
self.gl.glNewList(genList, self.gl.GL_COMPILE)
self.gl.glColor4f(1.0, 1.0, 1.0, 0.0)
self.gl.glMatrixMode(self.gl.GL_MODELVIEW)
self.gl.glPushMatrix()
self.gl.glTranslated(self.pos.x - self.sizeAdjust * (image.size[0] / (2 * self.MSFACTOR) - border), self.pos.y - image.size[1] / (2 * self.MSFACTOR), 0)
self.texture.bind()
self.gl.glEnableClientState(self.gl.GL_VERTEX_ARRAY)
self.gl.glEnableClientState(self.gl.GL_TEXTURE_COORD_ARRAY)
self.gl.glVertexPointer(3, self.gl.GL_FLOAT, 0, self.vertices)
self.gl.glTexCoordPointer(3, self.gl.GL_FLOAT, 0, self.texCoords)
self.gl.glEnable(self.gl.GL_TEXTURE_2D)
self.gl.glDrawArrays(self.gl.GL_QUADS, 0, 4)
self.gl.glDisable(self.gl.GL_TEXTURE_2D)
self.texture.release()
self.gl.glPopMatrix()
self.gl.glEndList()
return genList
|
NeCTAR-RC/horizon
|
openstack_dashboard/api/_nova.py
|
Python
|
apache-2.0
| 5,498
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This module is a special module to define functions or other resources
which need to be imported outside of openstack_dashboard.api.nova
(like cinder.py) to avoid cyclic imports.
"""
from django.conf import settings
from glanceclient import exc as glance_exceptions
from novaclient import api_versions
from novaclient import client as nova_client
from horizon import exceptions as horizon_exceptions
from horizon.utils import memoized
from openstack_dashboard.api import base
from openstack_dashboard.api import glance
from openstack_dashboard.api import microversions
from openstack_dashboard.contrib.developer.profiler import api as profiler
# Supported compute versions
VERSIONS = base.APIVersionManager("compute", preferred_version=2)
VERSIONS.load_supported_version(1.1, {"client": nova_client, "version": 1.1})
VERSIONS.load_supported_version(2, {"client": nova_client, "version": 2})
INSECURE = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
CACERT = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
class Server(base.APIResourceWrapper):
"""Simple wrapper around novaclient.server.Server.
Preserves the request info so image name can later be retrieved.
"""
_attrs = ['addresses', 'attrs', 'id', 'image', 'links', 'description',
'metadata', 'name', 'private_ip', 'public_ip', 'status', 'uuid',
'image_name', 'VirtualInterfaces', 'flavor', 'key_name', 'fault',
'tenant_id', 'user_id', 'created', 'locked',
'OS-EXT-STS:power_state', 'OS-EXT-STS:task_state',
'OS-EXT-SRV-ATTR:instance_name', 'OS-EXT-SRV-ATTR:host',
'OS-EXT-AZ:availability_zone', 'OS-DCF:diskConfig']
def __init__(self, apiresource, request):
super(Server, self).__init__(apiresource)
self.request = request
# TODO(gabriel): deprecate making a call to Glance as a fallback.
@property
def image_name(self):
if not self.image:
return None
elif hasattr(self.image, 'name'):
return self.image.name
elif 'name' in self.image:
return self.image['name'
|
]
else:
try:
image = glance.image_get(self.request, self.image['id'])
self.image['name'] = image.name
return image.name
except (glance_exceptions.ClientException,
horizon_exceptions.ServiceCatalogException):
self.image['name'] = None
|
return None
@property
def internal_name(self):
return getattr(self, 'OS-EXT-SRV-ATTR:instance_name', "")
@property
def availability_zone(self):
return getattr(self, 'OS-EXT-AZ:availability_zone', "")
@property
def host_server(self):
return getattr(self, 'OS-EXT-SRV-ATTR:host', '')
@memoized.memoized
def get_microversion(request, features):
client = novaclient(request)
min_ver, max_ver = api_versions._get_server_version_range(client)
return (microversions.get_microversion_for_features(
'nova', features, api_versions.APIVersion, min_ver, max_ver))
def get_auth_params_from_request(request):
"""Extracts properties needed by novaclient call from the request object.
These will be used to memoize the calls to novaclient.
"""
return (
request.user.username,
request.user.token.id,
request.user.tenant_id,
request.user.token.project.get('domain_id'),
base.url_for(request, 'compute'),
base.url_for(request, 'identity')
)
@memoized.memoized
def cached_novaclient(request, version=None):
(
username,
token_id,
project_id,
project_domain_id,
nova_url,
auth_url
) = get_auth_params_from_request(request)
if version is None:
version = VERSIONS.get_active_version()['version']
c = nova_client.Client(version,
username,
token_id,
project_id=project_id,
project_domain_id=project_domain_id,
auth_url=auth_url,
insecure=INSECURE,
cacert=CACERT,
http_log_debug=settings.DEBUG,
auth_token=token_id,
endpoint_override=nova_url)
return c
def novaclient(request, version=None):
if isinstance(version, api_versions.APIVersion):
version = version.get_string()
return cached_novaclient(request, version)
def get_novaclient_with_instance_desc(request):
microversion = get_microversion(request, "instance_description")
return novaclient(request, version=microversion)
@profiler.trace
def server_get(request, instance_id):
return Server(get_novaclient_with_instance_desc(request).servers.get(
instance_id), request)
|
RedHatInsights/insights-core
|
insights/parsers/tests/test_ls_var_cache_pulp.py
|
Python
|
apache-2.0
| 1,314
| 0.003805
|
import doctest
from insights.parsers import ls_var_cache_pulp
from insights.parsers.ls_var_cache_pulp import LsVarCachePulp
from insights.tests import context_wrap
LS_VAR_CACHE_PULP = """
total 0
drwxrwxr-x. 5 48 1000 216 Jan 21 12:56 .
drwxr-xr-x. 10 0 0 121 Jan 20 13:57 ..
lrwxrwxrwx. 1 0 0 19 Jan 21 12:56 cache -> /var/lib/pulp/cache
drwxr-xr-x. 2 48 48 6 Jan 21 13:03 [email protected]
drwxr-xr-x. 2 48 48 6 Jan 21 02:03 reserved_resource_worker-1@dhcp130
|
-202.gsslab.pnq2.redhat.com
drwxr-xr-x. 2 48 48 6 Jan 20 14:03 [email protected]
"""
def test_ls_var_cache_pulp():
ls_var_cache_pulp = LsVarCachePulp(context_wrap(LS_VAR_CACHE_PULP, path="insights_commands/ls_-lan_.var.cache.pulp"))
assert ls_var_cache_pulp.files_of('/var/cache/pulp') == ['cache']
cache_item = ls_var_cache_pulp.dir_entry('/var/cache/pulp', 'cache')
assert cache_item is not None
assert '/var/lib/pulp/' in cache_it
|
em['link']
def test_ls_var_lib_mongodb_doc_examples():
env = {
'ls_var_cache_pulp': LsVarCachePulp(context_wrap(LS_VAR_CACHE_PULP, path="insights_commands/ls_-lan_.var.cache.pulp")),
}
failed, total = doctest.testmod(ls_var_cache_pulp, globs=env)
assert failed == 0
|
pcmoritz/ray-1
|
python/ray/tune/tests/test_integration_mlflow.py
|
Python
|
apache-2.0
| 11,695
| 0
|
import os
import unittest
from collections import namedtuple
from unittest.mock import patch
from ray.tune.function_runner import wrap_function
from ray.tune.integration.mlflow import MLflowLoggerCallback, MLflowLogger, \
mlflow_mixin, MLflowTrainableMixin
class MockTrial(
namedtuple("MockTrial",
["config", "trial_name", "trial_id", "logdir"])):
def __hash__(self):
return hash(self.trial_id)
def __str__(self):
return self.trial_name
MockRunInfo = namedtuple("MockRunInfo", ["run_id"])
class MockRun:
def __init__(self, run_id, tags=None):
self.run_id = run_id
self.tags = tags
self.info = MockRunInfo(run_id)
self.params = []
self.metrics = []
self.artifacts = []
def log_param(self, key, value):
self.params.append({key: value})
def log_metric(self, key, value):
self.metrics.append({key: value})
def log_artifact(self, artifact):
self.artifacts.append(artifact)
def set_terminated(self, status):
self.terminated = True
self.status = status
MockExperiment = namedtuple("MockExperiment", ["name", "experiment_id"])
class MockMlflowClient:
def __init__(self, tracking_uri=None, registry_uri=None):
self.tracking_uri = tracking_uri
self.registry_uri = registry_uri
self.experiments = [MockExperiment("existing_experiment", 0)]
self.runs = {0: []}
self.active_run = None
def set_tracking_uri(self, tracking_uri):
self.tracking_uri = tracking_uri
def get_experiment_by_name(self, name):
try:
index = self.experiment_names.index(name)
return self.experiments[index]
except ValueError:
return None
def get_experiment(self, experiment_id):
experiment_id = int(experiment_id)
try:
return self.experiments[experiment_id]
except IndexError:
return None
def create_experiment(self, name):
experiment_id = len(self.experiments)
self.experiments.append(MockExperiment(name, experiment_id))
self.runs[experiment_id] = []
return experiment_id
def create_run(self, experiment_id, tags=None):
experiment_runs = self.runs[experiment_id]
run_id = (experiment_id, len(experiment_runs))
run = MockRun(run_id=run_id, tags=tags)
experiment_runs.append(run)
return run
def start_run(self, experiment_id, run_name):
# Creates new run and sets it as active.
run = self.create_run(experiment_id)
self.active_run = run
def get_mock_run(self, run_id):
return self.runs[run_id[0]][run_id[1]]
def log_param(self, run_id, key, value):
run = self.get_mock_run(run_id)
run.log_param(key, value)
def log_metric(self, run_id, key, value, step):
run = self.get_mock_run(run_id)
run.log_metric(key, value)
def log_artifacts(self, run_id, local_dir):
run = self.get_mock_run(run_id)
run.log_artifact(local_dir)
def set_terminated(self, run_id, status):
run = self.get_mock_run(run_id)
run.set_terminated(status)
@property
def experiment_names(self):
return [e.name for e in self.experiments]
def clear_env_vars():
if "MLF
|
LOW_EXPERIMENT_NAME" in os.environ:
del os.environ["MLFLOW_EXPERIMENT_NAME"]
if "MLFLOW_EXPERIMENT_ID" in os.environ:
del os.environ["MLFLOW
|
_EXPERIMENT_ID"]
class MLflowTest(unittest.TestCase):
@patch("mlflow.tracking.MlflowClient", MockMlflowClient)
def testMlFlowLoggerCallbackConfig(self):
# Explicitly pass in all args.
logger = MLflowLoggerCallback(
tracking_uri="test1",
registry_uri="test2",
experiment_name="test_exp")
logger.setup()
self.assertEqual(logger.client.tracking_uri, "test1")
self.assertEqual(logger.client.registry_uri, "test2")
self.assertListEqual(logger.client.experiment_names,
["existing_experiment", "test_exp"])
self.assertEqual(logger.experiment_id, 1)
# Check if client recognizes already existing experiment.
logger = MLflowLoggerCallback(experiment_name="existing_experiment")
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment"])
self.assertEqual(logger.experiment_id, 0)
# Pass in experiment name as env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_NAME"] = "test_exp"
logger = MLflowLoggerCallback()
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment", "test_exp"])
self.assertEqual(logger.experiment_id, 1)
# Pass in existing experiment name as env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_NAME"] = "existing_experiment"
logger = MLflowLoggerCallback()
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment"])
self.assertEqual(logger.experiment_id, 0)
# Pass in existing experiment id as env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_ID"] = "0"
logger = MLflowLoggerCallback()
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment"])
self.assertEqual(logger.experiment_id, "0")
# Pass in non existing experiment id as env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_ID"] = "500"
with self.assertRaises(ValueError):
logger = MLflowLoggerCallback()
logger.setup()
# Experiment name env var should take precedence over id env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_NAME"] = "test_exp"
os.environ["MLFLOW_EXPERIMENT_ID"] = "0"
logger = MLflowLoggerCallback()
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment", "test_exp"])
self.assertEqual(logger.experiment_id, 1)
@patch("mlflow.tracking.MlflowClient", MockMlflowClient)
def testMlFlowLoggerLogging(self):
clear_env_vars()
trial_config = {"par1": 4, "par2": 9.}
trial = MockTrial(trial_config, "trial1", 0, "artifact")
logger = MLflowLoggerCallback(
experiment_name="test1", save_artifact=True)
logger.setup()
# Check if run is created.
logger.on_trial_start(iteration=0, trials=[], trial=trial)
# New run should be created for this trial with correct tag.
mock_run = logger.client.runs[1][0]
self.assertDictEqual(mock_run.tags, {"trial_name": "trial1"})
self.assertTupleEqual(mock_run.run_id, (1, 0))
self.assertTupleEqual(logger._trial_runs[trial], mock_run.run_id)
# Params should be logged.
self.assertListEqual(mock_run.params, [{"par1": 4}, {"par2": 9}])
# When same trial is started again, new run should not be created.
logger.on_trial_start(iteration=0, trials=[], trial=trial)
self.assertEqual(len(logger.client.runs[1]), 1)
# Check metrics are logged properly.
result = {
"metric1": 0.8,
"metric2": 1,
"metric3": None,
"training_iteration": 0
}
logger.on_trial_result(0, [], trial, result)
mock_run = logger.client.runs[1][0]
# metric3 is not logged since it cannot be converted to float.
self.assertListEqual(mock_run.metrics, [{
"metric1": 0.8
}, {
"metric2": 1.0
}, {
"training_iteration": 0
}])
# Check that artifact is logged on termination.
logger.on_trial_complete(0, [], trial)
mock_run = logger.client.runs[1][0]
self.assertListEqual(mock_run.artifacts, ["artifact"])
self.asser
|
scholer/pptx-downsizer
|
pptx_downsizer/__main__.py
|
Python
|
gpl-3.0
| 170
| 0.005882
|
# __main__.py is used when a package is executed as a module, i.e.: `python -m pptx_downs
|
izer`
if __name__ == '__main__':
from .pptx_downsizer impor
|
t cli
cli()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.